summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2014-01-21 20:10:23 -0500
committerMike Bayer <mike_mp@zzzcomputing.com>2014-01-21 20:10:23 -0500
commit07fb90c6cc14de6d02cf4be592c57d56831f59f7 (patch)
tree050ef65db988559c60f7aa40f2d0bfe24947e548
parent560fd1d5ed643a1b0f95296f3b840c1963bbe67f (diff)
parentee1f4d21037690ad996c5eacf7e1200e92f2fbaa (diff)
downloadsqlalchemy-ticket_2501.tar.gz
Merge branch 'master' into ticket_2501ticket_2501
Conflicts: lib/sqlalchemy/orm/mapper.py
-rw-r--r--.gitignore2
-rw-r--r--LICENSE2
-rw-r--r--MANIFEST.in2
-rw-r--r--README.py3k6
-rw-r--r--README.unittests.rst39
-rw-r--r--doc/build/builder/autodoc_mods.py50
-rw-r--r--doc/build/builder/changelog.py296
-rw-r--r--doc/build/builder/mako.py52
-rw-r--r--doc/build/builder/viewsource.py209
-rw-r--r--doc/build/changelog/changelog_01.rst27
-rw-r--r--doc/build/changelog/changelog_02.rst548
-rw-r--r--doc/build/changelog/changelog_03.rst35
-rw-r--r--doc/build/changelog/changelog_04.rst50
-rw-r--r--doc/build/changelog/changelog_05.rst10
-rw-r--r--doc/build/changelog/changelog_06.rst56
-rw-r--r--doc/build/changelog/changelog_07.rst137
-rw-r--r--doc/build/changelog/changelog_08.rst643
-rw-r--r--doc/build/changelog/changelog_09.rst1294
-rw-r--r--doc/build/changelog/migration_04.rst6
-rw-r--r--doc/build/changelog/migration_07.rst10
-rw-r--r--doc/build/changelog/migration_08.rst4
-rw-r--r--doc/build/changelog/migration_09.rst1149
-rw-r--r--doc/build/conf.py22
-rw-r--r--doc/build/copyright.rst4
-rw-r--r--doc/build/core/connections.rst6
-rw-r--r--doc/build/core/constraints.rst409
-rw-r--r--doc/build/core/ddl.rst287
-rw-r--r--doc/build/core/defaults.rst345
-rw-r--r--doc/build/core/dml.rst37
-rw-r--r--doc/build/core/engines.rst4
-rw-r--r--doc/build/core/event.rst5
-rw-r--r--doc/build/core/events.rst3
-rw-r--r--doc/build/core/exceptions.rst1
-rw-r--r--doc/build/core/expression_api.rst250
-rw-r--r--doc/build/core/functions.rst27
-rw-r--r--doc/build/core/index.rst3
-rw-r--r--doc/build/core/internals.rst10
-rw-r--r--doc/build/core/metadata.rst330
-rw-r--r--doc/build/core/pooling.rst63
-rw-r--r--doc/build/core/reflection.rst168
-rw-r--r--doc/build/core/schema.rst1462
-rw-r--r--doc/build/core/selectable.rst85
-rw-r--r--doc/build/core/sqlelement.rst140
-rw-r--r--doc/build/core/tutorial.rst26
-rw-r--r--doc/build/core/types.rst102
-rw-r--r--doc/build/dialects/drizzle.rst24
-rw-r--r--doc/build/dialects/index.rst35
-rw-r--r--doc/build/dialects/informix.rst11
-rw-r--r--doc/build/dialects/mssql.rst36
-rw-r--r--doc/build/dialects/mysql.rst66
-rw-r--r--doc/build/dialects/oracle.rst14
-rw-r--r--doc/build/dialects/postgresql.rst50
-rw-r--r--doc/build/faq.rst942
-rw-r--r--doc/build/glossary.rst641
-rw-r--r--doc/build/index.rst14
-rw-r--r--doc/build/intro.rst49
-rw-r--r--doc/build/orm/deprecated.rst2
-rw-r--r--doc/build/orm/events.rst2
-rw-r--r--doc/build/orm/examples.rst144
-rw-r--r--doc/build/orm/exceptions.rst1
-rw-r--r--doc/build/orm/extensions/associationproxy.rst1
-rw-r--r--doc/build/orm/extensions/automap.rst22
-rw-r--r--doc/build/orm/extensions/declarative.rst3
-rw-r--r--doc/build/orm/extensions/hybrid.rst2
-rw-r--r--doc/build/orm/extensions/instrumentation.rst3
-rw-r--r--doc/build/orm/extensions/mutable.rst5
-rw-r--r--doc/build/orm/inheritance.rst7
-rw-r--r--doc/build/orm/internals.rst39
-rw-r--r--doc/build/orm/loading.rst181
-rw-r--r--doc/build/orm/mapper_config.rst558
-rw-r--r--doc/build/orm/query.rst8
-rw-r--r--doc/build/orm/relationships.rst2
-rw-r--r--doc/build/orm/session.rst546
-rw-r--r--doc/build/orm/tutorial.rst627
-rw-r--r--doc/build/requirements.txt2
-rw-r--r--doc/build/static/docs.css36
-rw-r--r--doc/build/templates/genindex.mako2
-rw-r--r--doc/build/templates/layout.mako27
-rw-r--r--doc/build/templates/page.mako2
-rw-r--r--doc/build/templates/rtd_layout.mako164
-rw-r--r--doc/build/templates/search.mako2
-rw-r--r--doc/build/templates/static_base.mako9
-rw-r--r--doc/build/testdocs.py3
-rw-r--r--examples/adjacency_list/__init__.py2
-rw-r--r--examples/adjacency_list/adjacency_list.py29
-rw-r--r--examples/association/__init__.py14
-rw-r--r--examples/association/basic_association.py6
-rw-r--r--examples/association/dict_of_sets_with_default.py7
-rw-r--r--examples/association/proxied_association.py8
-rw-r--r--examples/custom_attributes/__init__.py14
-rw-r--r--examples/dogpile_caching/__init__.py45
-rw-r--r--examples/dogpile_caching/advanced.py8
-rw-r--r--examples/dogpile_caching/caching_query.py5
-rw-r--r--examples/dogpile_caching/environment.py9
-rw-r--r--examples/dogpile_caching/helloworld.py12
-rw-r--r--examples/dogpile_caching/local_session_caching.py5
-rw-r--r--examples/dogpile_caching/model.py7
-rw-r--r--examples/dogpile_caching/relationship_caching.py (renamed from examples/dogpile_caching/relation_caching.py)3
-rw-r--r--examples/dynamic_dict/__init__.py2
-rw-r--r--examples/elementtree/__init__.py23
-rw-r--r--examples/elementtree/adjacency_list.py10
-rw-r--r--examples/elementtree/optimized_al.py10
-rw-r--r--examples/generic_associations/__init__.py15
-rw-r--r--examples/generic_associations/discriminator_on_association.py84
-rw-r--r--examples/generic_associations/generic_fk.py140
-rw-r--r--examples/generic_associations/table_per_association.py11
-rw-r--r--examples/generic_associations/table_per_related.py24
-rw-r--r--examples/graphs/__init__.py2
-rw-r--r--examples/inheritance/__init__.py2
-rw-r--r--examples/inheritance/concrete.py2
-rw-r--r--examples/inheritance/joined.py3
-rw-r--r--examples/inheritance/single.py2
-rw-r--r--examples/join_conditions/__init__.py7
-rw-r--r--examples/join_conditions/cast.py95
-rw-r--r--examples/join_conditions/threeway.py108
-rw-r--r--examples/large_collection/__init__.py2
-rw-r--r--examples/nested_sets/__init__.py2
-rw-r--r--examples/nested_sets/nested_sets.py4
-rw-r--r--examples/postgis/__init__.py2
-rw-r--r--examples/sharding/__init__.py2
-rw-r--r--examples/versioned_history/__init__.py (renamed from examples/versioning/__init__.py)6
-rw-r--r--examples/versioned_history/history_meta.py (renamed from examples/versioning/history_meta.py)11
-rw-r--r--examples/versioned_history/test_versioning.py (renamed from examples/versioning/test_versioning.py)165
-rw-r--r--examples/versioned_rows/__init__.py7
-rw-r--r--examples/versioned_rows/versioned_map.py284
-rw-r--r--examples/versioned_rows/versioned_rows.py105
-rw-r--r--examples/versioning/_lib.py96
-rw-r--r--examples/vertical/__init__.py1
-rw-r--r--examples/vertical/dictlike-polymorphic.py214
-rw-r--r--examples/vertical/dictlike.py179
-rw-r--r--lib/sqlalchemy/__init__.py29
-rw-r--r--lib/sqlalchemy/cextension/processors.c287
-rw-r--r--lib/sqlalchemy/cextension/resultproxy.c109
-rw-r--r--lib/sqlalchemy/cextension/utils.c48
-rw-r--r--lib/sqlalchemy/connectors/__init__.py2
-rw-r--r--lib/sqlalchemy/connectors/mxodbc.py2
-rw-r--r--lib/sqlalchemy/connectors/mysqldb.py7
-rw-r--r--lib/sqlalchemy/connectors/pyodbc.py2
-rw-r--r--lib/sqlalchemy/connectors/zxJDBC.py2
-rw-r--r--lib/sqlalchemy/databases/__init__.py4
-rw-r--r--lib/sqlalchemy/dialects/__init__.py3
-rw-r--r--lib/sqlalchemy/dialects/drizzle/base.py4
-rw-r--r--lib/sqlalchemy/dialects/firebird/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/firebird/base.py33
-rw-r--r--lib/sqlalchemy/dialects/firebird/fdb.py2
-rw-r--r--lib/sqlalchemy/dialects/firebird/kinterbasdb.py11
-rw-r--r--lib/sqlalchemy/dialects/informix/__init__.py9
-rw-r--r--lib/sqlalchemy/dialects/informix/base.py590
-rw-r--r--lib/sqlalchemy/dialects/informix/informixdb.py69
-rw-r--r--lib/sqlalchemy/dialects/mssql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/adodbapi.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py110
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/mxodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py3
-rw-r--r--lib/sqlalchemy/dialects/mssql/pyodbc.py14
-rw-r--r--lib/sqlalchemy/dialects/mssql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py350
-rw-r--r--lib/sqlalchemy/dialects/mysql/cymysql.py6
-rw-r--r--lib/sqlalchemy/dialects/mysql/gaerdbms.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqlconnector.py7
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqldb.py28
-rw-r--r--lib/sqlalchemy/dialects/mysql/oursql.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/pymysql.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/pyodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py87
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py46
-rw-r--r--lib/sqlalchemy/dialects/oracle/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/postgres.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/__init__.py8
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py252
-rw-r--r--lib/sqlalchemy/dialects/postgresql/constraints.py6
-rw-r--r--lib/sqlalchemy/dialects/postgresql/hstore.py13
-rw-r--r--lib/sqlalchemy/dialects/postgresql/json.py199
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py6
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py52
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pypostgresql.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/ranges.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py34
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlite.py8
-rw-r--r--lib/sqlalchemy/dialects/sybase/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py4
-rw-r--r--lib/sqlalchemy/dialects/sybase/mxodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/pyodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/pysybase.py2
-rw-r--r--lib/sqlalchemy/engine/__init__.py24
-rw-r--r--lib/sqlalchemy/engine/base.py139
-rw-r--r--lib/sqlalchemy/engine/ddl.py193
-rw-r--r--lib/sqlalchemy/engine/default.py165
-rw-r--r--lib/sqlalchemy/engine/interfaces.py140
-rw-r--r--lib/sqlalchemy/engine/reflection.py164
-rw-r--r--lib/sqlalchemy/engine/result.py33
-rw-r--r--lib/sqlalchemy/engine/strategies.py29
-rw-r--r--lib/sqlalchemy/engine/threadlocal.py2
-rw-r--r--lib/sqlalchemy/engine/url.py48
-rw-r--r--lib/sqlalchemy/engine/util.py24
-rw-r--r--lib/sqlalchemy/event.py735
-rw-r--r--lib/sqlalchemy/event/__init__.py10
-rw-r--r--lib/sqlalchemy/event/api.py107
-rw-r--r--lib/sqlalchemy/event/attr.py376
-rw-r--r--lib/sqlalchemy/event/base.py217
-rw-r--r--lib/sqlalchemy/event/legacy.py156
-rw-r--r--lib/sqlalchemy/event/registry.py236
-rw-r--r--lib/sqlalchemy/events.py143
-rw-r--r--lib/sqlalchemy/exc.py28
-rw-r--r--lib/sqlalchemy/ext/__init__.py2
-rw-r--r--lib/sqlalchemy/ext/associationproxy.py8
-rw-r--r--lib/sqlalchemy/ext/automap.py840
-rw-r--r--lib/sqlalchemy/ext/compiler.py4
-rw-r--r--lib/sqlalchemy/ext/declarative/__init__.py60
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py106
-rw-r--r--lib/sqlalchemy/ext/declarative/base.py95
-rw-r--r--lib/sqlalchemy/ext/declarative/clsregistry.py93
-rw-r--r--lib/sqlalchemy/ext/horizontal_shard.py2
-rw-r--r--lib/sqlalchemy/ext/hybrid.py6
-rw-r--r--lib/sqlalchemy/ext/instrumentation.py10
-rw-r--r--lib/sqlalchemy/ext/mutable.py22
-rw-r--r--lib/sqlalchemy/ext/orderinglist.py2
-rw-r--r--lib/sqlalchemy/ext/serializer.py10
-rw-r--r--lib/sqlalchemy/inspection.py13
-rw-r--r--lib/sqlalchemy/interfaces.py2
-rw-r--r--lib/sqlalchemy/log.py10
-rw-r--r--lib/sqlalchemy/orm/__init__.py1639
-rw-r--r--lib/sqlalchemy/orm/attributes.py307
-rw-r--r--lib/sqlalchemy/orm/base.py453
-rw-r--r--lib/sqlalchemy/orm/collections.py150
-rw-r--r--lib/sqlalchemy/orm/dependency.py2
-rw-r--r--lib/sqlalchemy/orm/deprecated_interfaces.py6
-rw-r--r--lib/sqlalchemy/orm/descriptor_props.py216
-rw-r--r--lib/sqlalchemy/orm/dynamic.py22
-rw-r--r--lib/sqlalchemy/orm/evaluator.py2
-rw-r--r--lib/sqlalchemy/orm/events.py311
-rw-r--r--lib/sqlalchemy/orm/exc.py20
-rw-r--r--lib/sqlalchemy/orm/identity.py4
-rw-r--r--lib/sqlalchemy/orm/instrumentation.py85
-rw-r--r--lib/sqlalchemy/orm/interfaces.py452
-rw-r--r--lib/sqlalchemy/orm/loading.py28
-rw-r--r--lib/sqlalchemy/orm/mapper.py582
-rw-r--r--lib/sqlalchemy/orm/path_registry.py261
-rw-r--r--lib/sqlalchemy/orm/persistence.py134
-rw-r--r--lib/sqlalchemy/orm/properties.py1212
-rw-r--r--lib/sqlalchemy/orm/query.py562
-rw-r--r--lib/sqlalchemy/orm/relationships.py1618
-rw-r--r--lib/sqlalchemy/orm/scoping.py4
-rw-r--r--lib/sqlalchemy/orm/session.py152
-rw-r--r--lib/sqlalchemy/orm/state.py129
-rw-r--r--lib/sqlalchemy/orm/strategies.py395
-rw-r--r--lib/sqlalchemy/orm/strategy_options.py924
-rw-r--r--lib/sqlalchemy/orm/sync.py2
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py10
-rw-r--r--lib/sqlalchemy/orm/util.py536
-rw-r--r--lib/sqlalchemy/pool.py269
-rw-r--r--lib/sqlalchemy/processors.py29
-rw-r--r--lib/sqlalchemy/schema.py3682
-rw-r--r--lib/sqlalchemy/sql/__init__.py27
-rw-r--r--lib/sqlalchemy/sql/annotation.py182
-rw-r--r--lib/sqlalchemy/sql/base.py460
-rw-r--r--lib/sqlalchemy/sql/compiler.py714
-rw-r--r--lib/sqlalchemy/sql/ddl.py864
-rw-r--r--lib/sqlalchemy/sql/default_comparator.py278
-rw-r--r--lib/sqlalchemy/sql/dml.py769
-rw-r--r--lib/sqlalchemy/sql/elements.py2880
-rw-r--r--lib/sqlalchemy/sql/expression.py6688
-rw-r--r--lib/sqlalchemy/sql/functions.py296
-rw-r--r--lib/sqlalchemy/sql/operators.py25
-rw-r--r--lib/sqlalchemy/sql/schema.py3273
-rw-r--r--lib/sqlalchemy/sql/selectable.py3001
-rw-r--r--lib/sqlalchemy/sql/sqltypes.py1628
-rw-r--r--lib/sqlalchemy/sql/type_api.py1053
-rw-r--r--lib/sqlalchemy/sql/util.py410
-rw-r--r--lib/sqlalchemy/sql/visitors.py2
-rw-r--r--lib/sqlalchemy/testing/__init__.py7
-rw-r--r--lib/sqlalchemy/testing/assertions.py73
-rw-r--r--lib/sqlalchemy/testing/assertsql.py5
-rw-r--r--lib/sqlalchemy/testing/config.py6
-rw-r--r--lib/sqlalchemy/testing/engines.py18
-rw-r--r--lib/sqlalchemy/testing/entities.py6
-rw-r--r--lib/sqlalchemy/testing/exclusions.py19
-rw-r--r--lib/sqlalchemy/testing/fixtures.py6
-rw-r--r--lib/sqlalchemy/testing/mock.py10
-rw-r--r--lib/sqlalchemy/testing/pickleable.py6
-rw-r--r--lib/sqlalchemy/testing/plugin/noseplugin.py23
-rw-r--r--lib/sqlalchemy/testing/profiling.py47
-rw-r--r--lib/sqlalchemy/testing/requirements.py47
-rw-r--r--lib/sqlalchemy/testing/runner.py5
-rw-r--r--lib/sqlalchemy/testing/schema.py5
-rw-r--r--lib/sqlalchemy/testing/suite/test_insert.py33
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py8
-rw-r--r--lib/sqlalchemy/testing/suite/test_types.py139
-rw-r--r--lib/sqlalchemy/testing/util.py6
-rw-r--r--lib/sqlalchemy/testing/warnings.py6
-rw-r--r--lib/sqlalchemy/types.py2544
-rw-r--r--lib/sqlalchemy/util/__init__.py13
-rw-r--r--lib/sqlalchemy/util/_collections.py34
-rw-r--r--lib/sqlalchemy/util/compat.py29
-rw-r--r--lib/sqlalchemy/util/deprecations.py2
-rw-r--r--lib/sqlalchemy/util/langhelpers.py328
-rw-r--r--lib/sqlalchemy/util/queue.py21
-rw-r--r--lib/sqlalchemy/util/topological.py2
-rw-r--r--setup.cfg3
-rw-r--r--setup.py2
-rw-r--r--test/aaa_profiling/test_memusage.py4
-rw-r--r--test/aaa_profiling/test_orm.py60
-rw-r--r--test/aaa_profiling/test_resultset.py1
-rw-r--r--test/aaa_profiling/test_zoomark.py1
-rw-r--r--test/aaa_profiling/test_zoomark_orm.py1
-rw-r--r--test/base/test_events.py248
-rw-r--r--test/base/test_utils.py114
-rw-r--r--test/dialect/mssql/test_compiler.py44
-rw-r--r--test/dialect/mssql/test_engine.py7
-rw-r--r--test/dialect/mssql/test_query.py3
-rw-r--r--test/dialect/mysql/test_compiler.py47
-rw-r--r--test/dialect/mysql/test_dialect.py59
-rw-r--r--test/dialect/mysql/test_reflection.py73
-rw-r--r--test/dialect/mysql/test_types.py299
-rw-r--r--test/dialect/postgresql/test_compiler.py123
-rw-r--r--test/dialect/postgresql/test_dialect.py39
-rw-r--r--test/dialect/postgresql/test_reflection.py76
-rw-r--r--test/dialect/postgresql/test_types.py400
-rw-r--r--test/dialect/test_firebird.py9
-rw-r--r--test/dialect/test_informix.py25
-rw-r--r--test/dialect/test_oracle.py635
-rw-r--r--test/engine/test_bind.py42
-rw-r--r--test/engine/test_execute.py146
-rw-r--r--test/engine/test_parseconnect.py183
-rw-r--r--test/engine/test_pool.py248
-rw-r--r--test/engine/test_reconnect.py10
-rw-r--r--test/engine/test_reflection.py139
-rw-r--r--test/engine/test_transaction.py142
-rw-r--r--test/ext/declarative/test_basic.py157
-rw-r--r--test/ext/declarative/test_reflection.py132
-rw-r--r--test/ext/test_associationproxy.py190
-rw-r--r--test/ext/test_automap.py146
-rw-r--r--test/ext/test_compiler.py18
-rw-r--r--test/ext/test_extendedattr.py2
-rw-r--r--test/ext/test_mutable.py6
-rw-r--r--test/ext/test_serializer.py51
-rw-r--r--test/orm/_fixtures.py11
-rw-r--r--test/orm/inheritance/test_assorted_poly.py1
-rw-r--r--test/orm/inheritance/test_basic.py1
-rw-r--r--test/orm/inheritance/test_manytomany.py1
-rw-r--r--test/orm/inheritance/test_poly_linked_list.py4
-rw-r--r--test/orm/inheritance/test_relationship.py137
-rw-r--r--test/orm/inheritance/test_selects.py79
-rw-r--r--test/orm/test_assorted_eager.py9
-rw-r--r--test/orm/test_attributes.py45
-rw-r--r--test/orm/test_backref_mutations.py11
-rw-r--r--test/orm/test_bundle.py289
-rw-r--r--test/orm/test_cascade.py4
-rw-r--r--test/orm/test_collection.py63
-rw-r--r--test/orm/test_composites.py81
-rw-r--r--test/orm/test_default_strategies.py41
-rw-r--r--test/orm/test_deferred.py566
-rw-r--r--test/orm/test_eager_relations.py62
-rw-r--r--test/orm/test_events.py148
-rw-r--r--test/orm/test_expire.py163
-rw-r--r--test/orm/test_froms.py86
-rw-r--r--test/orm/test_generative.py1
-rw-r--r--test/orm/test_inspect.py48
-rw-r--r--test/orm/test_joins.py35
-rw-r--r--test/orm/test_lazy_relations.py2
-rw-r--r--test/orm/test_lockmode.py173
-rw-r--r--test/orm/test_mapper.py526
-rw-r--r--test/orm/test_naturalpks.py2
-rw-r--r--test/orm/test_of_type.py4
-rw-r--r--test/orm/test_options.py760
-rw-r--r--test/orm/test_pickled.py5
-rw-r--r--test/orm/test_query.py632
-rw-r--r--test/orm/test_relationships.py122
-rw-r--r--test/orm/test_session.py22
-rw-r--r--test/orm/test_subquery_relations.py253
-rw-r--r--test/orm/test_unitofwork.py23
-rw-r--r--test/orm/test_update_delete.py34
-rw-r--r--test/orm/test_utils.py190
-rw-r--r--test/orm/test_validators.py281
-rw-r--r--test/orm/test_versioning.py281
-rw-r--r--test/perf/orm2010.py179
-rw-r--r--test/profiles.txt178
-rw-r--r--test/requirements.py98
-rw-r--r--test/sql/test_case_statement.py2
-rw-r--r--test/sql/test_compiler.py392
-rw-r--r--test/sql/test_constraints.py43
-rw-r--r--test/sql/test_cte.py45
-rw-r--r--test/sql/test_ddlemit.py (renamed from test/engine/test_ddlemit.py)2
-rw-r--r--test/sql/test_defaults.py64
-rw-r--r--test/sql/test_functions.py5
-rw-r--r--test/sql/test_generative.py6
-rw-r--r--test/sql/test_insert.py29
-rw-r--r--test/sql/test_join_rewriting.py120
-rw-r--r--test/sql/test_metadata.py520
-rw-r--r--test/sql/test_operators.py274
-rw-r--r--test/sql/test_query.py18
-rw-r--r--test/sql/test_quote.py162
-rw-r--r--test/sql/test_returning.py124
-rw-r--r--test/sql/test_selectable.py80
-rw-r--r--test/sql/test_text.py371
-rw-r--r--test/sql/test_types.py442
-rw-r--r--test/sql/test_unicode.py16
-rw-r--r--test/sql/test_update.py156
404 files changed, 45728 insertions, 28113 deletions
diff --git a/.gitignore b/.gitignore
index 69da39e02..ba308575e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,10 +1,12 @@
*.pyc
+*.pyo
/build/
/dist/
/doc/build/output/
/dogpile_data/
*.orig
tox.ini
+/.tox
.venv
*.egg-info
.coverage
diff --git a/LICENSE b/LICENSE
index e937253d5..4a8757fda 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
This is the MIT license: http://www.opensource.org/licenses/mit-license.php
-Copyright (c) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>.
+Copyright (c) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>.
SQLAlchemy is a trademark of Michael Bayer.
Permission is hereby granted, free of charge, to any person obtaining a copy of this
diff --git a/MANIFEST.in b/MANIFEST.in
index d520be179..23aa88268 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -9,5 +9,5 @@ recursive-include test *.py *.dat
# don't come in if --with-cextensions isn't specified.
recursive-include lib *.c *.txt
-include README* LICENSE distribute_setup.py sa2to3.py ez_setup.py sqla_nose.py CHANGES*
+include README* AUTHORS LICENSE distribute_setup.py sa2to3.py ez_setup.py sqla_nose.py CHANGES*
prune doc/build/output
diff --git a/README.py3k b/README.py3k
deleted file mode 100644
index 2afaeb688..000000000
--- a/README.py3k
+++ /dev/null
@@ -1,6 +0,0 @@
-=================
-PYTHON 3 SUPPORT
-=================
-
-As of SQLAlchemy 0.9, SQLAlchemy installs and runs with
-Python 3 directly, with no code changes.
diff --git a/README.unittests.rst b/README.unittests.rst
index 7d052cfd7..0bac0bcc5 100644
--- a/README.unittests.rst
+++ b/README.unittests.rst
@@ -214,3 +214,42 @@ DEVELOPING AND TESTING NEW DIALECTS
See the new file README.dialects.rst for detail on dialects.
+
+TESTING WITH MULTIPLE PYTHON VERSIONS USING TOX
+-----------------------------------------------
+
+If you want to test across multiple versions of Python, you may find `tox
+<http://tox.testrun.org/>`_ useful. To use it:
+
+1. Create a ``tox.ini`` file with the following:
+
+.. code-block:: ini
+
+ # Tox (http://tox.testrun.org/) is a tool for running tests
+ # in multiple virtualenvs. This configuration file will run the
+ # test suite on all supported python versions. To use it, "pip install tox"
+ # and then run "tox" from this directory.
+
+ [tox]
+ envlist = py26, py27, py33, py34, pypy
+
+ [testenv]
+ deps =
+ mock
+ nose
+ commands = {envpython} ./sqla_nose.py
+
+2. Run::
+
+ pip install tox
+
+3. Run::
+
+ tox
+
+This will run the test suite on all the Python versions listed in the
+``envlist`` in the ``tox.ini`` file. You can also manually specify the versions
+to test against::
+
+ tox -e py26,py27,py33
+
diff --git a/doc/build/builder/autodoc_mods.py b/doc/build/builder/autodoc_mods.py
index 576b4c339..93e2596be 100644
--- a/doc/build/builder/autodoc_mods.py
+++ b/doc/build/builder/autodoc_mods.py
@@ -9,6 +9,30 @@ def autodoc_skip_member(app, what, name, obj, skip, options):
else:
return skip
+
+_convert_modname = {
+ "sqlalchemy.sql.sqltypes": "sqlalchemy.types",
+ "sqlalchemy.sql.type_api": "sqlalchemy.types",
+ "sqlalchemy.sql.schema": "sqlalchemy.schema",
+ "sqlalchemy.sql.elements": "sqlalchemy.sql.expression",
+ "sqlalchemy.sql.selectable": "sqlalchemy.sql.expression",
+ "sqlalchemy.sql.dml": "sqlalchemy.sql.expression",
+ "sqlalchemy.sql.ddl": "sqlalchemy.schema",
+ "sqlalchemy.sql.base": "sqlalchemy.sql.expression"
+}
+
+_convert_modname_w_class = {
+ ("sqlalchemy.engine.interfaces", "Connectable"): "sqlalchemy.engine"
+}
+
+def _adjust_rendered_mod_name(modname, objname):
+ if modname in _convert_modname:
+ return _convert_modname[modname]
+ elif (modname, objname) in _convert_modname_w_class:
+ return _convert_modname_w_class[(modname, objname)]
+ else:
+ return modname
+
# im sure this is in the app somewhere, but I don't really
# know where, so we're doing it here.
_track_autodoced = {}
@@ -16,6 +40,24 @@ _inherited_names = set()
def autodoc_process_docstring(app, what, name, obj, options, lines):
if what == "class":
_track_autodoced[name] = obj
+
+ # need to translate module names for bases, others
+ # as we document lots of symbols in namespace modules
+ # outside of their source
+ bases = []
+ for base in obj.__bases__:
+ if base is not object:
+ bases.append(":class:`%s.%s`" % (
+ _adjust_rendered_mod_name(base.__module__, base.__name__),
+ base.__name__))
+
+ if bases:
+ lines[:0] = [
+ "Bases: %s" % (", ".join(bases)),
+ ""
+ ]
+
+
elif what in ("attribute", "method") and \
options.get("inherited-members"):
m = re.match(r'(.*?)\.([\w_]+)$', name)
@@ -35,15 +77,16 @@ def autodoc_process_docstring(app, what, name, obj, options, lines):
" *inherited from the* :%s:`~%s.%s.%s` *%s of* :class:`~%s.%s`" % (
"attr" if what == "attribute"
else "meth",
- supercls.__module__, supercls.__name__,
+ _adjust_rendered_mod_name(supercls.__module__, supercls.__name__),
+ supercls.__name__,
attrname,
what,
- supercls.__module__, supercls.__name__
+ _adjust_rendered_mod_name(supercls.__module__, supercls.__name__),
+ supercls.__name__
),
""
]
-from docutils import nodes
def missing_reference(app, env, node, contnode):
if node.attributes['reftarget'] in _inherited_names:
return node.children[0]
@@ -51,7 +94,6 @@ def missing_reference(app, env, node, contnode):
return None
-
def setup(app):
app.connect('autodoc-skip-member', autodoc_skip_member)
app.connect('autodoc-process-docstring', autodoc_process_docstring)
diff --git a/doc/build/builder/changelog.py b/doc/build/builder/changelog.py
deleted file mode 100644
index 41a403ad0..000000000
--- a/doc/build/builder/changelog.py
+++ /dev/null
@@ -1,296 +0,0 @@
-import re
-from sphinx.util.compat import Directive
-from docutils.statemachine import StringList
-from docutils import nodes, utils
-import textwrap
-import itertools
-import collections
-import md5
-
-def _comma_list(text):
- return re.split(r"\s*,\s*", text.strip())
-
-def _parse_content(content):
- d = {}
- d['text'] = []
- idx = 0
- for line in content:
- idx += 1
- m = re.match(r' *\:(.+?)\:(?: +(.+))?', line)
- if m:
- attrname, value = m.group(1, 2)
- d[attrname] = value or ''
- else:
- break
- d["text"] = content[idx:]
- return d
-
-
-class EnvDirective(object):
- @property
- def env(self):
- return self.state.document.settings.env
-
-class ChangeLogDirective(EnvDirective, Directive):
- has_content = True
-
- type_ = "change"
-
- default_section = 'misc'
-
- def _organize_by_section(self, changes):
- compound_sections = [(s, s.split(" ")) for s in
- self.sections if " " in s]
-
- bysection = collections.defaultdict(list)
- all_sections = set()
- for rec in changes:
- inner_tag = rec['tags'].intersection(self.inner_tag_sort)
- if inner_tag:
- inner_tag = inner_tag.pop()
- else:
- inner_tag = ""
-
- for compound, comp_words in compound_sections:
- if rec['tags'].issuperset(comp_words):
- bysection[(compound, inner_tag)].append(rec)
- all_sections.add(compound)
- break
- else:
- intersect = rec['tags'].intersection(self.sections)
- if intersect:
- for sec in rec['sorted_tags']:
- if sec in intersect:
- bysection[(sec, inner_tag)].append(rec)
- all_sections.add(sec)
- break
- else:
- bysection[(self.default_section, inner_tag)].append(rec)
- return bysection, all_sections
-
- @classmethod
- def changes(cls, env):
- return env.temp_data['ChangeLogDirective_%s_changes' % cls.type_]
-
- def _setup_run(self):
- self.sections = self.env.config.changelog_sections
- self.inner_tag_sort = self.env.config.changelog_inner_tag_sort + [""]
- self.env.temp_data['ChangeLogDirective_%s_changes' % self.type_] = []
- self._parsed_content = _parse_content(self.content)
-
- p = nodes.paragraph('', '',)
- self.state.nested_parse(self.content[1:], 0, p)
-
- def run(self):
- self._setup_run()
- changes = self.changes(self.env)
- output = []
-
- self.version = version = self._parsed_content.get('version', '')
- id_prefix = "%s-%s" % (self.type_, version)
- topsection = self._run_top(id_prefix)
- output.append(topsection)
-
- bysection, all_sections = self._organize_by_section(changes)
-
- counter = itertools.count()
-
- sections_to_render = [s for s in self.sections if s in all_sections]
- if not sections_to_render:
- for cat in self.inner_tag_sort:
- append_sec = self._append_node()
-
- for rec in bysection[(self.default_section, cat)]:
- rec["id"] = "%s-%s" % (id_prefix, next(counter))
-
- self._render_rec(rec, None, cat, append_sec)
-
- if append_sec.children:
- topsection.append(append_sec)
- else:
- for section in sections_to_render + [self.default_section]:
- sec = nodes.section('',
- nodes.title(section, section),
- ids=["%s-%s" % (id_prefix, section.replace(" ", "-"))]
- )
-
- append_sec = self._append_node()
- sec.append(append_sec)
-
- for cat in self.inner_tag_sort:
- for rec in bysection[(section, cat)]:
- rec["id"] = "%s-%s" % (id_prefix, next(counter))
- self._render_rec(rec, section, cat, append_sec)
-
- if append_sec.children:
- topsection.append(sec)
-
- return output
-
- def _append_node(self):
- return nodes.bullet_list()
-
- def _run_top(self, id_prefix):
- version = self._parsed_content.get('version', '')
- topsection = nodes.section('',
- nodes.title(version, version),
- ids=[id_prefix]
- )
-
- if self._parsed_content.get("released"):
- topsection.append(nodes.Text("Released: %s" %
- self._parsed_content['released']))
- else:
- topsection.append(nodes.Text("no release date"))
-
- intro_para = nodes.paragraph('', '')
- for len_, text in enumerate(self._parsed_content['text']):
- if ".. change::" in text:
- break
- if len_:
- self.state.nested_parse(self._parsed_content['text'][0:len_], 0,
- intro_para)
- topsection.append(intro_para)
-
- return topsection
-
-
- def _render_rec(self, rec, section, cat, append_sec):
- para = rec['node'].deepcopy()
-
- text = _text_rawsource_from_node(para)
-
- to_hash = "%s %s" % (self.version, text[0:100])
- targetid = "%s-%s" % (self.type_,
- md5.md5(to_hash.encode('ascii', 'ignore')
- ).hexdigest())
- targetnode = nodes.target('', '', ids=[targetid])
- para.insert(0, targetnode)
- permalink = nodes.reference('', '',
- nodes.Text("(link)", "(link)"),
- refid=targetid,
- classes=['changeset-link']
- )
- para.append(permalink)
-
- insert_ticket = nodes.paragraph('')
- para.append(insert_ticket)
-
- i = 0
- for collection, render, prefix in (
- (rec['tickets'], self.env.config.changelog_render_ticket, "#%s"),
- (rec['pullreq'], self.env.config.changelog_render_pullreq,
- "pull request %s"),
- (rec['changeset'], self.env.config.changelog_render_changeset, "r%s"),
- ):
- for refname in collection:
- if i > 0:
- insert_ticket.append(nodes.Text(", ", ", "))
- else:
- insert_ticket.append(nodes.Text(" ", " "))
- i += 1
- if render is not None:
- refuri = render % refname
- node = nodes.reference('', '',
- nodes.Text(prefix % refname, prefix % refname),
- refuri=refuri
- )
- else:
- node = nodes.Text(prefix % refname, prefix % refname)
- insert_ticket.append(node)
-
- if rec['tags']:
- tag_node = nodes.strong('',
- " ".join("[%s]" % t for t
- in
- [t1 for t1 in [section, cat]
- if t1 in rec['tags']] +
-
- list(rec['tags'].difference([section, cat]))
- ) + " "
- )
- para.children[0].insert(0, tag_node)
-
- append_sec.append(
- nodes.list_item('',
- nodes.target('', '', ids=[rec['id']]),
- para
- )
- )
-
-
-class ChangeDirective(EnvDirective, Directive):
- has_content = True
-
- type_ = "change"
- parent_cls = ChangeLogDirective
-
- def run(self):
- content = _parse_content(self.content)
- p = nodes.paragraph('', '',)
- sorted_tags = _comma_list(content.get('tags', ''))
- rec = {
- 'tags': set(sorted_tags).difference(['']),
- 'tickets': set(_comma_list(content.get('tickets', ''))).difference(['']),
- 'pullreq': set(_comma_list(content.get('pullreq', ''))).difference(['']),
- 'changeset': set(_comma_list(content.get('changeset', ''))).difference(['']),
- 'node': p,
- 'type': self.type_,
- "title": content.get("title", None),
- 'sorted_tags': sorted_tags
- }
-
- if "declarative" in rec['tags']:
- rec['tags'].add("orm")
-
- self.state.nested_parse(content['text'], 0, p)
- self.parent_cls.changes(self.env).append(rec)
-
- return []
-
-def _text_rawsource_from_node(node):
- src = []
- stack = [node]
- while stack:
- n = stack.pop(0)
- if isinstance(n, nodes.Text):
- src.append(n.rawsource)
- stack.extend(n.children)
- return "".join(src)
-
-def _rst2sphinx(text):
- return StringList(
- [line.strip() for line in textwrap.dedent(text).split("\n")]
- )
-
-
-def make_ticket_link(name, rawtext, text, lineno, inliner,
- options={}, content=[]):
- env = inliner.document.settings.env
- render_ticket = env.config.changelog_render_ticket or "%s"
- prefix = "#%s"
- if render_ticket:
- ref = render_ticket % text
- node = nodes.reference(rawtext, prefix % text, refuri=ref, **options)
- else:
- node = nodes.Text(prefix % text, prefix % text)
- return [node], []
-
-def setup(app):
- app.add_directive('changelog', ChangeLogDirective)
- app.add_directive('change', ChangeDirective)
- app.add_config_value("changelog_sections", [], 'env')
- app.add_config_value("changelog_inner_tag_sort", [], 'env')
- app.add_config_value("changelog_render_ticket",
- None,
- 'env'
- )
- app.add_config_value("changelog_render_pullreq",
- None,
- 'env'
- )
- app.add_config_value("changelog_render_changeset",
- None,
- 'env'
- )
- app.add_role('ticket', make_ticket_link)
diff --git a/doc/build/builder/mako.py b/doc/build/builder/mako.py
index 8003ed417..0367bf018 100644
--- a/doc/build/builder/mako.py
+++ b/doc/build/builder/mako.py
@@ -23,49 +23,31 @@ class MakoBridge(TemplateBridge):
)
if rtd:
+ # RTD layout, imported from sqlalchemy.org
import urllib2
- template_url = builder.config['site_base'] + "/docs_base.mako"
- template = urllib2.urlopen(template_url).read()
- self.lookup.put_string("/rtd_base.mako", template)
+ template = urllib2.urlopen(builder.config['site_base'] + "/docs_adapter.mako").read()
+ self.lookup.put_string("docs_adapter.mako", template)
+
+ setup_ctx = urllib2.urlopen(builder.config['site_base'] + "/docs_adapter.py").read()
+ lcls = {}
+ exec(setup_ctx, lcls)
+ self.setup_ctx = lcls['setup_context']
+
+ def setup_ctx(self, context):
+ pass
def render(self, template, context):
template = template.replace(".html", ".mako")
context['prevtopic'] = context.pop('prev', None)
context['nexttopic'] = context.pop('next', None)
- # RTD layout
- if rtd:
- # add variables if not present, such
- # as if local test of READTHEDOCS variable
- if 'MEDIA_URL' not in context:
- context['MEDIA_URL'] = "http://media.readthedocs.org/"
- if 'slug' not in context:
- context['slug'] = context['project'].lower()
- if 'url' not in context:
- context['url'] = "/some/test/url"
- if 'current_version' not in context:
- context['current_version'] = "latest"
-
- if 'name' not in context:
- context['name'] = context['project'].lower()
-
- context['rtd'] = True
- context['toolbar'] = True
- context['layout'] = "rtd_layout.mako"
- context['base'] = "rtd_base.mako"
-
- context['pdf_url'] = "%spdf/%s/%s/%s.pdf" % (
- context['MEDIA_URL'],
- context['slug'],
- context['current_version'],
- context['slug']
- )
# local docs layout
- else:
- context['rtd'] = False
- context['toolbar'] = False
- context['layout'] = "layout.mako"
- context['base'] = "static_base.mako"
+ context['rtd'] = False
+ context['toolbar'] = False
+ context['base'] = "static_base.mako"
+
+ # override context attributes
+ self.setup_ctx(context)
context.setdefault('_', lambda x: x)
return self.lookup.get_template(template).render_unicode(**context)
diff --git a/doc/build/builder/viewsource.py b/doc/build/builder/viewsource.py
new file mode 100644
index 000000000..3f6b8263a
--- /dev/null
+++ b/doc/build/builder/viewsource.py
@@ -0,0 +1,209 @@
+from docutils import nodes
+from sphinx.ext.viewcode import collect_pages
+from sphinx.pycode import ModuleAnalyzer
+import imp
+from sphinx import addnodes
+import re
+from sphinx.util.compat import Directive
+import os
+from docutils.statemachine import StringList
+from sphinx.environment import NoUri
+
+import sys
+
+py2k = sys.version_info < (3, 0)
+if py2k:
+ text_type = unicode
+else:
+ text_type = str
+
+def view_source(name, rawtext, text, lineno, inliner,
+ options={}, content=[]):
+
+ env = inliner.document.settings.env
+
+ node = _view_source_node(env, text, None)
+ return [node], []
+
+def _view_source_node(env, text, state):
+ # pretend we're using viewcode fully,
+ # install the context it looks for
+ if not hasattr(env, '_viewcode_modules'):
+ env._viewcode_modules = {}
+
+ modname = text
+ text = modname.split(".")[-1] + ".py"
+
+ # imitate sphinx .<modname> syntax
+ if modname.startswith("."):
+ # see if the modname needs to be corrected in terms
+ # of current module context
+ base_module = env.temp_data.get('autodoc:module')
+ if base_module is None:
+ base_module = env.temp_data.get('py:module')
+
+ if base_module:
+ modname = base_module + modname
+
+ urito = env.app.builder.get_relative_uri
+
+ # we're showing code examples which may have dependencies
+ # which we really don't want to have required so load the
+ # module by file, not import (though we are importing)
+ # the top level module here...
+ pathname = None
+ for token in modname.split("."):
+ file_, pathname, desc = imp.find_module(token, [pathname] if pathname else None)
+ if file_:
+ file_.close()
+
+ # unlike viewcode which silently traps exceptions,
+ # I want this to totally barf if the file can't be loaded.
+ # a failed build better than a complete build missing
+ # key content
+ analyzer = ModuleAnalyzer.for_file(pathname, modname)
+ # copied from viewcode
+ analyzer.find_tags()
+ if not isinstance(analyzer.code, text_type):
+ code = analyzer.code.decode(analyzer.encoding)
+ else:
+ code = analyzer.code
+
+ if state is not None:
+ docstring = _find_mod_docstring(analyzer)
+ if docstring:
+ # get rid of "foo.py" at the top
+ docstring = re.sub(r"^[a-zA-Z_0-9]+\.py", "", docstring)
+
+ # strip
+ docstring = docstring.strip()
+
+ # yank only first paragraph
+ docstring = docstring.split("\n\n")[0].strip()
+ else:
+ docstring = None
+
+ entry = code, analyzer.tags, {}
+ env._viewcode_modules[modname] = entry
+ pagename = '_modules/' + modname.replace('.', '/')
+
+ try:
+ refuri = urito(env.docname, pagename)
+ except NoUri:
+ # if we're in the latex builder etc., this seems
+ # to be what we get
+ refuri = None
+
+
+ if docstring:
+ # embed the ref with the doc text so that it isn't
+ # a separate paragraph
+ if refuri:
+ docstring = "`%s <%s>`_ - %s" % (text, refuri, docstring)
+ else:
+ docstring = "``%s`` - %s" % (text, docstring)
+ para = nodes.paragraph('', '')
+ state.nested_parse(StringList([docstring]), 0, para)
+ return_node = para
+ else:
+ if refuri:
+ refnode = nodes.reference('', '',
+ nodes.Text(text, text),
+ refuri=urito(env.docname, pagename)
+ )
+ else:
+ refnode = nodes.Text(text, text)
+
+ if state:
+ return_node = nodes.paragraph('', '', refnode)
+ else:
+ return_node = refnode
+
+ return return_node
+
+from sphinx.pycode.pgen2 import token
+
+def _find_mod_docstring(analyzer):
+ """attempt to locate the module-level docstring.
+
+ Note that sphinx autodoc just uses ``__doc__``. But we don't want
+ to import the module, so we need to parse for it.
+
+ """
+ analyzer.tokenize()
+ for type_, parsed_line, start_pos, end_pos, raw_line in analyzer.tokens:
+ if type_ == token.COMMENT:
+ continue
+ elif type_ == token.STRING:
+ return eval(parsed_line)
+ else:
+ return None
+
+def _parse_content(content):
+ d = {}
+ d['text'] = []
+ idx = 0
+ for line in content:
+ idx += 1
+ m = re.match(r' *\:(.+?)\:(?: +(.+))?', line)
+ if m:
+ attrname, value = m.group(1, 2)
+ d[attrname] = value or ''
+ else:
+ break
+ d["text"] = content[idx:]
+ return d
+
+def _comma_list(text):
+ return re.split(r"\s*,\s*", text.strip())
+
+class AutoSourceDirective(Directive):
+ has_content = True
+
+ def run(self):
+ content = _parse_content(self.content)
+
+
+ env = self.state.document.settings.env
+ self.docname = env.docname
+
+ sourcefile = self.state.document.current_source.split(":")[0]
+ dir_ = os.path.dirname(sourcefile)
+ files = [
+ f for f in os.listdir(dir_) if f.endswith(".py")
+ and f != "__init__.py"
+ ]
+
+ if "files" in content:
+ # ordered listing of files to include
+ files = [fname for fname in _comma_list(content["files"])
+ if fname in set(files)]
+
+ node = nodes.paragraph('', '',
+ nodes.Text("Listing of files:", "Listing of files:")
+ )
+
+ bullets = nodes.bullet_list()
+ for fname in files:
+ modname, ext = os.path.splitext(fname)
+ # relative lookup
+ modname = "." + modname
+
+ link = _view_source_node(env, modname, self.state)
+
+ list_node = nodes.list_item('',
+ link
+ )
+ bullets += list_node
+
+ node += bullets
+
+ return [node]
+
+def setup(app):
+ app.add_role('viewsource', view_source)
+
+ app.add_directive('autosource', AutoSourceDirective)
+
+ # from sphinx.ext.viewcode
+ app.connect('html-collect-pages', collect_pages)
diff --git a/doc/build/changelog/changelog_01.rst b/doc/build/changelog/changelog_01.rst
index 156599a13..0f66f99e4 100644
--- a/doc/build/changelog/changelog_01.rst
+++ b/doc/build/changelog/changelog_01.rst
@@ -182,7 +182,7 @@
:tickets:
added a "mods" system which allows pluggable modules that modify/augment
- core functionality, using the function "install_mods(*modnames)".
+ core functionality, using the function "install_mods(\*modnames)".
.. change::
:tags:
@@ -421,7 +421,7 @@
:tags:
:tickets:
- added *args, **kwargs pass-thru to engine.transaction(func) allowing easier
+ added \*args, \**kwargs pass-thru to engine.transaction(func) allowing easier
creation of transactionalizing decorator functions
.. change::
@@ -520,7 +520,7 @@
create_engine() now uses genericized parameters; host/hostname,
db/dbname/database, password/passwd, etc. for all engine connections. makes
- engine URIs much more "universal"
+ engine URIs much more "universal"
.. change::
:tags:
@@ -672,7 +672,7 @@
:tags:
:tickets:
- added 'get_session().invalidate(*obj)' method to objectstore, instances will
+ added 'get_session().invalidate(\*obj)' method to objectstore, instances will
refresh() themselves upon the next attribute access.
.. change::
@@ -805,7 +805,7 @@
:tags:
:tickets:
- added "refresh(*obj)" method to objectstore / Session to reload the attributes of
+ added "refresh(\*obj)" method to objectstore / Session to reload the attributes of
any set of objects from the database unconditionally
.. changelog::
@@ -856,14 +856,15 @@
two issues related to postgres, which doesnt want to give you the "lastrowid"
since oids are deprecated:
- * postgres database-side defaults that are on primary key cols *do* execute
- explicitly beforehand, even though thats not the idea of a PassiveDefault. this is
- because sequences on columns get reflected as PassiveDefaults, but need to be explicitly
- executed on a primary key col so we know what we just inserted.
- * if you did add a row that has a bunch of database-side defaults on it,
- and the PassiveDefault thing was working the old way, i.e. they just execute on
- the DB side, the "cant get the row back without an OID" exception that occurred
- also will not happen unless someone (usually the ORM) explicitly asks for it.
+
+ * postgres database-side defaults that are on primary key cols *do* execute
+ explicitly beforehand, even though thats not the idea of a PassiveDefault. this is
+ because sequences on columns get reflected as PassiveDefaults, but need to be explicitly
+ executed on a primary key col so we know what we just inserted.
+ * if you did add a row that has a bunch of database-side defaults on it,
+ and the PassiveDefault thing was working the old way, i.e. they just execute on
+ the DB side, the "cant get the row back without an OID" exception that occurred
+ also will not happen unless someone (usually the ORM) explicitly asks for it.
.. change::
:tags:
diff --git a/doc/build/changelog/changelog_02.rst b/doc/build/changelog/changelog_02.rst
index 600dcc6eb..c3b91f1bd 100644
--- a/doc/build/changelog/changelog_02.rst
+++ b/doc/build/changelog/changelog_02.rst
@@ -3,14 +3,14 @@
0.2 Changelog
==============
-
+
.. changelog::
:version: 0.2.8
:released: Tue Sep 05 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
cleanup on connection methods + documentation. custom DBAPI
arguments specified in query string, 'connect_args' argument
@@ -18,7 +18,7 @@
function to 'create_engine'.
.. change::
- :tags:
+ :tags:
:tickets: 274
added "recycle" argument to Pool, is "pool_recycle" on create_engine,
@@ -27,7 +27,7 @@
stale connections
.. change::
- :tags:
+ :tags:
:tickets: 121
changed "invalidate" semantics with pooled connection; will
@@ -39,28 +39,28 @@
the connecting application
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
eesh ! the tutorial doctest was broken for quite some time.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
add_property() method on mapper does a "compile all mappers"
step in case the given property references a non-compiled mapper
(as it did in the case of the tutorial !)
.. change::
- :tags:
+ :tags:
:tickets: 277
check for pg sequence already existing before create
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
if a contextual session is established via MapperExtension.get_session
(as it is using the sessioncontext plugin, etc), a lazy load operation
@@ -68,24 +68,24 @@
persistent with a session already.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
lazy loads will not fire off for an object that does not have a
database identity (why?
see http://www.sqlalchemy.org/trac/wiki/WhyDontForeignKeysLoadData)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
unit-of-work does a better check for "orphaned" objects that are
part of a "delete-orphan" cascade, for certain conditions where the
parent isnt available to cascade from.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
mappers can tell if one of their objects is an "orphan" based
on interactions with the attribute package. this check is based
@@ -93,22 +93,22 @@
when objects are attached and detached from each other.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
it is now invalid to declare a self-referential relationship with
"delete-orphan" (as the abovementioned check would make them impossible
to save)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
improved the check for objects being part of a session when the
unit of work seeks to flush() them as part of a relationship..
.. change::
- :tags:
+ :tags:
:tickets: 280
statement execution supports using the same BindParam
@@ -116,14 +116,14 @@
parameters. nice job by Bill Noon figuring out the basic idea.
.. change::
- :tags:
+ :tags:
:tickets: 60, 71
postgres reflection moved to use pg_schema tables, can be overridden
with use_information_schema=True argument to create_engine.
.. change::
- :tags:
+ :tags:
:tickets: 155
added case_sensitive argument to MetaData, Table, Column, determines
@@ -138,27 +138,27 @@
work with
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
unit tests updated to run without any pysqlite installed; pool
test uses a mock DBAPI
.. change::
- :tags:
+ :tags:
:tickets: 281
urls support escaped characters in passwords
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added limit/offset to UNION queries (though not yet in oracle)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added "timezone=True" flag to DateTime and Time types. postgres
so far will convert this to "TIME[STAMP] (WITH|WITHOUT) TIME ZONE",
@@ -167,26 +167,26 @@
against datetimes that dont).
.. change::
- :tags:
+ :tags:
:tickets: 287
- fix to using query.count() with distinct, **kwargs with SelectResults
+ fix to using query.count() with distinct, \**kwargs with SelectResults
count()
.. change::
- :tags:
+ :tags:
:tickets: 289
deregister Table from MetaData when autoload fails;
.. change::
- :tags:
+ :tags:
:tickets: 293
import of py2.5s sqlite3
.. change::
- :tags:
+ :tags:
:tickets: 296
unicode fix for startswith()/endswith()
@@ -196,32 +196,32 @@
:released: Sat Aug 12 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
quoting facilities set up so that database-specific quoting can be
turned on for individual table, schema, and column identifiers when
used in all queries/creates/drops. Enabled via "quote=True" in
Table or Column, as well as "quote_schema=True" in Table. Thanks to
- Aaron Spike for his excellent efforts.
+ Aaron Spike for the excellent efforts.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
assignmapper was setting is_primary=True, causing all sorts of mayhem
by not raising an error when redundant mappers were set up, fixed
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added allow_null_pks option to Mapper, allows rows where some
primary key columns are null (i.e. when mapping to outer joins etc)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
modifcation to unitofwork to not maintain ordering within the
"new" list or within the UOWTask "objects" list; instead, new objects
@@ -233,48 +233,48 @@
sort) dont have to worry about maintaining order (which they werent anyway)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed reflection of foreign keys to autoload the referenced table
if it was not loaded already
.. change::
- :tags:
+ :tags:
:tickets: 256
- pass URL query string arguments to connect() function
.. change::
- :tags:
+ :tags:
:tickets: 257
- oracle boolean type
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
custom primary/secondary join conditions in a relation *will* be propagated
to backrefs by default. specifying a backref() will override this behavior.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
better check for ambiguous join conditions in sql.Join; propagates to a
better error message in PropertyLoader (i.e. relation()/backref()) for when
the join condition can't be reasonably determined.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
sqlite creates ForeignKeyConstraint objects properly upon table
reflection.
.. change::
- :tags:
+ :tags:
:tickets: 224
adjustments to pool stemming from changes made for.
@@ -282,65 +282,65 @@
succeeded. added a test script to attempt testing this.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed mysql reflection of default values to be PassiveDefault
.. change::
- :tags:
+ :tags:
:tickets: 263, 264
added reflected 'tinyint', 'mediumint' type to MS-SQL.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
SingletonThreadPool has a size and does a cleanup pass, so that
only a given number of thread-local connections stay around (needed
for sqlite applications that dispose of threads en masse)
.. change::
- :tags:
+ :tags:
:tickets: 267, 265
fixed small pickle bug(s) with lazy loaders
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed possible error in mysql reflection where certain versions
return an array instead of string for SHOW CREATE TABLE call
.. change::
- :tags:
+ :tags:
:tickets: 1770
fix to lazy loads when mapping to joins
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
all create()/drop() calls have a keyword argument of "connectable".
"engine" is deprecated.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed ms-sql connect() to work with adodbapi
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added "nowait" flag to Select()
.. change::
- :tags:
+ :tags:
:tickets: 271
inheritance check uses issubclass() instead of direct __mro__ check
@@ -348,27 +348,27 @@
flexibly correspond to class inheritance
.. change::
- :tags:
+ :tags:
:tickets: 252
SelectResults will use a subselect, when calling an aggregate (i.e.
max, min, etc.) on a SelectResults that has an ORDER BY clause
.. change::
- :tags:
+ :tags:
:tickets: 269
fixes to types so that database-specific types more easily used;
fixes to mysql text types to work with this methodology
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
some fixes to sqlite date type organization
.. change::
- :tags:
+ :tags:
:tickets: 263
added MSTinyInteger to MS-SQL
@@ -378,7 +378,7 @@
:released: Thu Jul 20 2006
.. change::
- :tags:
+ :tags:
:tickets: 76
big overhaul to schema to allow truly composite primary and foreign
@@ -389,73 +389,73 @@
and reflection is now more table oriented rather than column oriented.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to MapperExtension calling scheme, wasnt working very well
previously
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
tweaks to ActiveMapper, supports self-referential relationships
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
slight rearrangement to objectstore (in activemapper/threadlocal)
so that the SessionContext is referenced by '.context' instead
of subclassed directly.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
activemapper will use threadlocal's objectstore if the mod is
activated when activemapper is imported
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
small fix to URL regexp to allow filenames with '@' in them
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixes to Session expunge/update/etc...needs more cleanup.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
select_table mappers *still* werent always compiling
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed up Boolean datatype
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added count()/count_by() to list of methods proxied by assignmapper;
this also adds them to activemapper
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
connection exceptions wrapped in DBAPIError
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
ActiveMapper now supports autoloading column definitions from the
database if you supply a __autoload__ = True attribute in your
@@ -463,43 +463,43 @@
any relationships.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
deferred column load could screw up the connection status in
a flush() under some circumstances, this was fixed
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
expunge() was not working with cascade, fixed.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
potential endless loop in cascading operations fixed.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added "synonym()" function, applied to properties to have a
propname the same as another, for the purposes of overriding props
and allowing the original propname to be accessible in select_by().
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fix to typing in clause construction which specifically helps
type issues with polymorphic_union (CAST/ColumnClause propagates
its type to proxy columns)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
mapper compilation work ongoing, someday it'll work....moved
around the initialization of MapperProperty objects to be after
@@ -508,34 +508,34 @@
aware of their "inherited" status if so.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
eager loads explicitly disallowed on self-referential relationships, or
relationships to an inheriting mapper (which is also self-referential)
.. change::
- :tags:
+ :tags:
:tickets: 244
reduced bind param size in query._get to appease the picky oracle
.. change::
- :tags:
+ :tags:
:tickets: 234
added 'checkfirst' argument to table.create()/table.drop(), as
well as table.exists()
.. change::
- :tags:
+ :tags:
:tickets: 245
some other ongoing fixes to inheritance
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
attribute/backref/orphan/history-tracking tweaks as usual...
@@ -544,54 +544,54 @@
:released: Sat Jul 08 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed endless loop bug in select_by(), if the traversal hit
two mappers that referenced each other
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
upgraded all unittests to insert './lib/' into sys.path,
working around new setuptools PYTHONPATH-killing behavior
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
further fixes with attributes/dependencies/etc....
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
improved error handling for when DynamicMetaData is not connected
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
MS-SQL support largely working (tested with pymssql)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
ordering of UPDATE and DELETE statements within groups is now
in order of primary key values, for more deterministic ordering
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
after_insert/delete/update mapper extensions now called per object,
not per-object-per-table
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
further fixes/refactorings to mapper compilation
@@ -600,30 +600,30 @@
:released: Tue Jun 27 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
try/except when the mapper sets init.__name__ on a mapped class,
supports python 2.3
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed bug where threadlocal engine would still autocommit
despite a transaction in progress
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
lazy load and deferred load operations require the parent object
to be in a Session to do the operation; whereas before the operation
would just return a blank list or None, it now raises an exception.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
Session.update() is slightly more lenient if the session to which
the given object was formerly attached to was garbage collected;
@@ -631,19 +631,19 @@
the previous Session.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixes to mapper compilation, checking for more error conditions
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
small fix to eager loading combined with ordering/limit/offset
.. change::
- :tags:
+ :tags:
:tickets: 206
utterly remarkable: added a single space between 'CREATE TABLE'
@@ -651,28 +651,28 @@
reserved word tablename.....*
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
more fixes to inheritance, related to many-to-many relations
properly saving
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed bug when specifying explicit module to mysql dialect
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
when QueuePool times out it raises a TimeoutError instead of
erroneously making another connection
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
Queue.Queue usage in pool has been replaced with a locally
modified version (works in py2.3/2.4!) that uses a threading.RLock
@@ -682,35 +682,35 @@
causing a reentrant hang unless threading.RLock is used.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
postgres will not place SERIAL keyword on a primary key column
if it has a foreign key constraint
.. change::
- :tags:
+ :tags:
:tickets: 221
cursor() method on ConnectionFairy allows db-specific extension
arguments to be propagated
.. change::
- :tags:
+ :tags:
:tickets: 225
lazy load bind params properly propagate column type
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
new MySQL types: MSEnum, MSTinyText, MSMediumText, MSLongText, etc.
more support for MS-specific length/precision params in numeric types
patch courtesy Mike Bernson
.. change::
- :tags:
+ :tags:
:tickets: 224
some fixes to connection pool invalidate()
@@ -720,23 +720,23 @@
:released: Sat Jun 17 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to mapper compilation to be deferred. this allows mappers
to be constructed in any order, and their relationships to each
other are compiled when the mappers are first used.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed a pretty big speed bottleneck in cascading behavior particularly
when backrefs were in use
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
the attribute instrumentation module has been completely rewritten; its
now a large degree simpler and clearer, slightly faster. the "history"
@@ -746,117 +746,117 @@
list attributes is now more open ended (i.e. theyre not sets anymore).
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
py2.4 "set" construct used internally, falls back to sets.Set when
"set" not available/ordering is needed.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fix to transaction control, so that repeated rollback() calls
dont fail (was failing pretty badly when flush() would raise
an exception in a larger try/except transaction block)
.. change::
- :tags:
+ :tags:
:tickets: 151
"foreignkey" argument to relation() can also be a list. fixed
auto-foreignkey detection
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed bug where tables with schema names werent getting indexed in
the MetaData object properly
.. change::
- :tags:
+ :tags:
:tickets: 207
fixed bug where Column with redefined "key" property wasnt getting
type conversion happening in the ResultProxy
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed 'port' attribute of URL to be an integer if present
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed old bug where if a many-to-many table mapped as "secondary"
had extra columns, delete operations didnt work
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
bugfixes for mapping against UNION queries
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed incorrect exception class thrown when no DB driver present
.. change::
- :tags:
+ :tags:
:tickets: 138
added NonExistentTable exception thrown when reflecting a table
that doesnt exist
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
small fix to ActiveMapper regarding one-to-one backrefs, other
refactorings
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overridden constructor in mapped classes gets __name__ and
__doc__ from the original class
.. change::
- :tags:
+ :tags:
:tickets: 200
fixed small bug in selectresult.py regarding mapper extension
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
small tweak to cascade_mappers, not very strongly supported
function at the moment
.. change::
- :tags:
+ :tags:
:tickets: 202
some fixes to between(), column.between() to propagate typing
information better
.. change::
- :tags:
+ :tags:
:tickets: 203
if an object fails to be constructed, is not added to the
session
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
CAST function has been made into its own clause object with
its own compilation function in ansicompiler; allows MySQL
@@ -869,97 +869,97 @@
:released: Mon Jun 05 2006
.. change::
- :tags:
+ :tags:
:tickets: 190
big improvements to polymorphic inheritance behavior, enabling it
to work with adjacency list table structures
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
major fixes and refactorings to inheritance relationships overall,
more unit tests
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed "echo_pool" flag on create_engine()
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fix to docs, removed incorrect info that close() is unsafe to use
with threadlocal strategy (its totally safe !)
.. change::
- :tags:
+ :tags:
:tickets: 188
create_engine() can take URLs as string or unicode
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
firebird support partially completed;
thanks to James Ralston and Brad Clements for their efforts.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
Oracle url translation was broken, fixed, will feed host/port/sid
into cx_oracle makedsn() if 'database' field is present, else uses
straight TNS name from the 'host' field
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fix to using unicode criterion for query.get()/query.load()
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
count() function on selectables now uses table primary key or
first column instead of "1" for criterion, also uses label "rowcount"
instead of "count".
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
got rudimental "mapping to multiple tables" functionality cleaned up,
more correctly documented
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
restored global_connect() function, attaches to a DynamicMetaData
instance called "default_metadata". leaving MetaData arg to Table
out will use the default metadata.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixes to session cascade behavior, entity_name propigation
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
reorganized unittests into subdirectories
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
more fixes to threadlocal connection nesting patterns
@@ -968,29 +968,29 @@
:released: Mon May 29 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
"pool" argument to create_engine() properly propagates
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixes to URL, raises exception if not parsed, does not pass blank
fields along to the DB connect string (a string such as
user:host@/db was breaking on postgres)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
small fixes to Mapper when it inserts and tries to get
new primary key values back
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
rewrote half of TLEngine, the ComposedSQLEngine used with
'strategy="threadlocal"'. it now properly implements engine.begin()/
@@ -998,8 +998,8 @@
added about six unittests.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
major "duh" in pool.Pool, forgot to put back the WeakValueDictionary.
unittest which was supposed to check for this was also silently missing
@@ -1007,35 +1007,35 @@
of scope.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
placeholder dispose() method added to SingletonThreadPool, doesnt
do anything yet
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
rollback() is automatically called when an exception is raised,
but only if theres no transaction in process (i.e. works more like
autocommit).
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed exception raise in sqlite if no sqlite module present
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added extra example detail for association object doc
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
Connection adds checks for already being closed
@@ -1044,8 +1044,8 @@
:released: Sat May 27 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to Engine system so that what was formerly the SQLEngine
is now a ComposedSQLEngine which consists of a variety of components,
@@ -1053,14 +1053,14 @@
db modules as well as Session and Mapper.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
create_engine now takes only RFC-1738-style strings:
driver://user:password@host:port/database
.. change::
- :tags:
+ :tags:
:tickets: 152
total rewrite of connection-scoping methodology, Connection objects
@@ -1070,8 +1070,8 @@
to the pool.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to Session interface and scoping. uses hibernate-style
methods, including query(class), save(), save_or_update(), etc.
@@ -1082,8 +1082,8 @@
across multiple engines.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to mapper's dependency and "cascade" behavior; dependency logic
factored out of properties.py into a separate module "dependency.py".
@@ -1093,8 +1093,8 @@
decisions on how that child should be updated in the DB with regards to deletes.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to Schema to build upon MetaData object instead of an Engine.
Entire SQL/Schema system can be used with no Engines whatsoever, executed
@@ -1103,13 +1103,13 @@
anymore and is replaced by DynamicMetaData.
.. change::
- :tags:
+ :tags:
:tickets: 167
true polymorphic behavior implemented, fixes
.. change::
- :tags:
+ :tags:
:tickets: 147
"oid" system has been totally moved into compile-time behavior;
@@ -1117,71 +1117,71 @@
doesnt get compiled, fixes
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to packaging; "mapping" is now "orm", "objectstore" is now
"session", the old "objectstore" namespace gets loaded in via the
"threadlocal" mod if used
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
mods now called in via "import <modname>". extensions favored over
mods as mods are globally-monkeypatching
.. change::
- :tags:
+ :tags:
:tickets: 154
fix to add_property so that it propagates properties to inheriting
mappers
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
backrefs create themselves against primary mapper of its originating
property, priamry/secondary join arguments can be specified to override.
helps their usage with polymorphic mappers
.. change::
- :tags:
+ :tags:
:tickets: 31
"table exists" function has been implemented
.. change::
- :tags:
+ :tags:
:tickets: 98
"create_all/drop_all" added to MetaData object
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
improvements and fixes to topological sort algorithm, as well as more
unit tests
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
tutorial page added to docs which also can be run with a custom doctest
runner to ensure its properly working. docs generally overhauled to
deal with new code patterns
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
many more fixes, refactorings.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
migration guide is available on the Wiki at
http://www.sqlalchemy.org/trac/wiki/02Migration
diff --git a/doc/build/changelog/changelog_03.rst b/doc/build/changelog/changelog_03.rst
index e47da340a..05e10e664 100644
--- a/doc/build/changelog/changelog_03.rst
+++ b/doc/build/changelog/changelog_03.rst
@@ -695,7 +695,7 @@
:tags: orm
:tickets:
- session.get() and session.load() propagate **kwargs through to
+ session.get() and session.load() propagate \**kwargs through to
query
.. change::
@@ -979,7 +979,7 @@
:tickets:
query strings in unicode URLs get keys encoded to ascii
- for **kwargs compat
+ for \**kwargs compat
.. change::
:tags: sql
@@ -1113,7 +1113,7 @@
:tickets:
support for SSL arguments given as inline within URL query string,
- prefixed with "ssl_", courtesy terjeros@gmail.com.
+ prefixed with "ssl\_", courtesy terjeros@gmail.com.
.. change::
:tags: <schemaname>, mysql
@@ -1266,7 +1266,7 @@
:tags: sql
:tickets:
- the "else_" parameter to the case statement now properly works when
+ the "else\_" parameter to the case statement now properly works when
set to zero.
.. change::
@@ -1279,16 +1279,15 @@
and a new one returned with additional criterion added.
The new methods include:
- filter() - applies select criterion to the query
- filter_by() - applies "by"-style criterion to the query
- avg() - return the avg() function on the given column
- join() - join to a property (or across a list of properties)
- outerjoin() - like join() but uses LEFT OUTER JOIN
- limit()/offset() - apply LIMIT/OFFSET
- range-based access which applies limit/offset:
- session.query(Foo)[3:5]
- distinct() - apply DISTINCT
- list() - evaluate the criterion and return results
+ * filter() - applies select criterion to the query
+ * filter_by() - applies "by"-style criterion to the query
+ * avg() - return the avg() function on the given column
+ * join() - join to a property (or across a list of properties)
+ * outerjoin() - like join() but uses LEFT OUTER JOIN
+ * limit()/offset() - apply LIMIT/OFFSET range-based access
+ which applies limit/offset: session.query(Foo)[3:5]
+ * distinct() - apply DISTINCT
+ * list() - evaluate the criterion and return results
no incompatible changes have been made to Query's API and no methods
have been deprecated. Existing methods like select(), select_by(),
@@ -1321,7 +1320,7 @@
:tags: orm
:tickets:
- strings and columns can also be sent to the *args of instances()
+ strings and columns can also be sent to the \*args of instances()
where those exact result columns will be part of the result tuples.
.. change::
@@ -1488,7 +1487,7 @@
:tags: mysql
:tickets:
- added a catchall **kwargs to MSString, to help reflection of
+ added a catchall \**kwargs to MSString, to help reflection of
obscure types (like "varchar() binary" in MS 4.0)
.. change::
@@ -1526,7 +1525,7 @@
:tickets:
fixed argument passing to straight textual execute() on engine,
- connection. can handle *args or a list instance for positional, **kwargs
+ connection. can handle \*args or a list instance for positional, \**kwargs
or a dict instance for named args, or a list of list or dicts to invoke
executemany()
@@ -2364,7 +2363,7 @@
:tags: engine/pool
:tickets:
- create_engine() reworked to be strict about incoming **kwargs. all keyword
+ create_engine() reworked to be strict about incoming \**kwargs. all keyword
arguments must be consumed by one of the dialect, connection pool, and engine
constructors, else a TypeError is thrown which describes the full set of
invalid kwargs in relation to the selected dialect/pool/engine configuration.
diff --git a/doc/build/changelog/changelog_04.rst b/doc/build/changelog/changelog_04.rst
index e1acfe4c0..61ea28c11 100644
--- a/doc/build/changelog/changelog_04.rst
+++ b/doc/build/changelog/changelog_04.rst
@@ -83,14 +83,11 @@
:tickets:
Added "add()" and "add_all()" to scoped_session
- methods. Workaround for 0.4.7:
+ methods. Workaround for 0.4.7::
- from sqlalchemy.orm.scoping import ScopedSession,\
- instrument
- setattr(
- ScopedSession, "add", instrument("add"))
- setattr(
- ScopedSession, "add_all", instrument("add_all"))
+ from sqlalchemy.orm.scoping import ScopedSession, instrument
+ setattr(ScopedSession, "add", instrument("add"))
+ setattr(ScopedSession, "add_all", instrument("add_all"))
.. change::
:tags: orm
@@ -344,7 +341,7 @@
:tags: orm
:tickets:
- set-based collections |=, -=, ^= and &= are stricter about
+ set-based collections \|=, -=, ^= and &= are stricter about
their operands and only operate on sets, frozensets or
subclasses of the collection type. Previously, they would
accept any duck-typed set.
@@ -424,7 +421,7 @@
:tags: ext
:tickets:
- set-based association proxies |=, -=, ^= and &= are
+ set-based association proxies \|=, -=, ^= and &= are
stricter about their operands and only operate on sets,
frozensets or other association proxies. Previously, they
would accept any duck-typed set.
@@ -541,11 +538,12 @@
The new approach also automatically allows eager loads
to work for subclasses, if they are present, for
- example
+ example::
+
sess.query(Company).options(
eagerload_all(
-
))
+
to load Company objects, their employees, and the
'machines' collection of employees who happen to be
Engineers. A "with_polymorphic" Query option should be
@@ -561,7 +559,7 @@
is not carved in stone just yet: _values() and
_from_self(). We'd like feedback on these.
- - _values(*columns) is given a list of column
+ - _values(\*columns) is given a list of column
expressions, and returns a new Query that only
returns those columns. When evaluated, the return
value is a list of tuples just like when using
@@ -594,7 +592,7 @@
:tickets:
query.order_by() and query.group_by() will accept
- multiple arguments using *args (like select()
+ multiple arguments using \*args (like select()
already does).
.. change::
@@ -1780,7 +1778,7 @@
:tags: ext
:tickets:
- '+', '*', '+=' and '*=' support for association
+ '+', '*', '+=' and '\*=' support for association
proxied lists.
.. change::
@@ -1866,7 +1864,7 @@
:tickets:
added new flag to String and create_engine(),
- assert_unicode=(True|False|'warn'|None). Defaults to `False` or `None` on
+ assert_unicode=(True|False|'warn'\|None). Defaults to `False` or `None` on
create_engine() and String, `'warn'` on the Unicode type. When `True`,
results in all unicode conversion operations raising an exception when a
non-unicode bytestring is passed as a bind parameter. 'warn' results
@@ -2010,8 +2008,8 @@
:tickets: 908
mapped classes which extend "object" and do not provide an
- __init__() method will now raise TypeError if non-empty *args
- or **kwargs are present at instance construction time (and are
+ __init__() method will now raise TypeError if non-empty \*args
+ or \**kwargs are present at instance construction time (and are
not consumed by any extensions such as the scoped_session mapper),
consistent with the behavior of normal Python classes
@@ -2818,10 +2816,10 @@
:tickets:
Improvements and fixes on Firebird reflection:
- . FBDialect now mimics OracleDialect, regarding case-sensitivity of TABLE and
- COLUMN names (see 'case_sensitive remotion' topic on this current file).
- . FBDialect.table_names() doesn't bring system tables (ticket:796).
- . FB now reflects Column's nullable property correctly.
+ * FBDialect now mimics OracleDialect, regarding case-sensitivity of TABLE and
+ COLUMN names (see 'case_sensitive remotion' topic on this current file).
+ * FBDialect.table_names() doesn't bring system tables (ticket:796).
+ * FB now reflects Column's nullable property correctly.
.. change::
:tags:
@@ -2963,7 +2961,7 @@
:tags:
:tickets:
- Changed the API for the in_ operator. in_() now accepts a single argument
+ Changed the API for the in\_ operator. in_() now accepts a single argument
that is a sequence of values or a selectable. The old API of passing in
values as varargs still works but is deprecated.
@@ -3246,7 +3244,7 @@
:tags:
:tickets:
- Tidied up what ends up in your namespace when you 'from sqlalchemy import *':
+ Tidied up what ends up in your namespace when you 'from sqlalchemy import \*':
.. change::
:tags:
@@ -3816,10 +3814,10 @@
is represented by more than one column, when using the ORM. Objects of
the new type are fully functional in query expressions, comparisons,
query.get() clauses, etc. and act as though they are regular single-column
- scalars... except they're not! Use the function composite(cls, *columns)
+ scalars... except they're not! Use the function composite(cls, \*columns)
inside of the mapper's "properties" dict, and instances of cls will be
created/mapped to a single attribute, comprised of the values correponding
- to *columns.
+ to \*columns.
.. change::
:tags: orm
@@ -3912,7 +3910,7 @@
:tickets:
All "type" keyword arguments, such as those to bindparam(), column(),
- Column(), and func.<something>(), renamed to "type_". Those objects still
+ Column(), and func.<something>(), renamed to "type\_". Those objects still
name their "type" attribute as "type".
.. change::
diff --git a/doc/build/changelog/changelog_05.rst b/doc/build/changelog/changelog_05.rst
index 0bcc1aa3f..debcc29fd 100644
--- a/doc/build/changelog/changelog_05.rst
+++ b/doc/build/changelog/changelog_05.rst
@@ -1093,7 +1093,7 @@
Session.scalar() now converts raw SQL strings to text()
the same way Session.execute() does and accepts same
- alternative **kw args.
+ alternative \**kw args.
.. change::
:tags: orm
@@ -1506,7 +1506,7 @@
:tickets:
ColumnProperty (and front-end helpers such as ``deferred``) no
- longer ignores unknown **keyword arguments.
+ longer ignores unknown \**keyword arguments.
.. change::
:tags: orm
@@ -2903,7 +2903,7 @@
:tags: orm
:tickets:
- The RowTuple object returned by Query(*cols) now features
+ The RowTuple object returned by Query(\*cols) now features
keynames which prefer mapped attribute names over column keys,
column keys over column names, i.e. Query(Class.foo,
Class.bar) will have names "foo" and "bar" even if those are
@@ -2984,7 +2984,7 @@
:tickets: 1140
class.someprop.in_() raises NotImplementedError pending the
- implementation of "in_" for relation
+ implementation of "in\_" for relation
.. change::
:tags: orm
@@ -3499,7 +3499,7 @@
Unicode, UnicodeText types now set "assert_unicode" and
"convert_unicode" by default, but accept overriding
- **kwargs for these values.
+ \**kwargs for these values.
.. change::
:tags: sql
diff --git a/doc/build/changelog/changelog_06.rst b/doc/build/changelog/changelog_06.rst
index c7f4dcdea..18d61019a 100644
--- a/doc/build/changelog/changelog_06.rst
+++ b/doc/build/changelog/changelog_06.rst
@@ -1013,7 +1013,7 @@
New Query methods: query.label(name), query.as_scalar(),
return the query's statement as a scalar subquery
with /without label;
- query.with_entities(*ent), replaces the SELECT list of
+ query.with_entities(\*ent), replaces the SELECT list of
the query with new entities.
Roughly equivalent to a generative form of query.values()
which accepts mapped entities as well as column
@@ -1246,7 +1246,7 @@
:tags: sql
:tickets:
- Added type_coerce(expr, type_) expression element.
+ Added type_coerce(expr, type\_) expression element.
Treats the given expression as the given type when evaluating
expressions and processing result rows, but does not
affect the generation of SQL, other than an anonymous
@@ -3005,7 +3005,7 @@
:tags: orm
:tickets:
- Query gains an add_columns(*columns) method which is a multi-
+ Query gains an add_columns(\*columns) method which is a multi-
version of add_column(col). add_column(col) is future
deprecated.
@@ -3641,9 +3641,9 @@
:tags: declarative
:tickets:
- DeclarativeMeta exclusively uses cls.__dict__ (not dict_)
+ DeclarativeMeta exclusively uses cls.__dict__ (not dict\_)
as the source of class information; _as_declarative exclusively
- uses the dict_ passed to it as the source of class information
+ uses the dict\_ passed to it as the source of class information
(which when using DeclarativeMeta is cls.__dict__). This should
in theory make it easier for custom metaclasses to modify
the state passed into _as_declarative.
@@ -4190,10 +4190,10 @@
* Passing a single list of elements to eagerload(),
eagerload_all(), contains_eager(), lazyload(),
defer(), and undefer() instead of multiple positional
- *args is deprecated.
+ \*args is deprecated.
* Passing a single list of elements to query.order_by(),
query.group_by(), query.join(), or query.outerjoin()
- instead of multiple positional *args is deprecated.
+ instead of multiple positional \*args is deprecated.
* query.iterate_instances() is removed. Use query.instances().
* Query.query_from_parent() is removed. Use the
sqlalchemy.orm.with_parent() function to produce a
@@ -4363,7 +4363,7 @@
"expr != expr" can be very expensive, and it's preferred
that the user not issue in_() if the list is empty,
instead simply not querying, or modifying the criterion
- as appropriate for more complex situations.
+ as appropriate for more complex situations.
.. change::
:tags: sql
@@ -4523,7 +4523,7 @@
* the "connection" argument from engine.transaction() and
engine.run_callable() is removed - Connection itself
now has those methods. All four methods accept
- *args and **kwargs which are passed to the given callable,
+ \*args and \**kwargs which are passed to the given callable,
as well as the operating connection.
.. change::
@@ -4570,11 +4570,13 @@
Removed public mutability from Index and Constraint
objects:
- - ForeignKeyConstraint.append_element()
- - Index.append_column()
- - UniqueConstraint.append_column()
- - PrimaryKeyConstraint.add()
- - PrimaryKeyConstraint.remove()
+
+ * ForeignKeyConstraint.append_element()
+ * Index.append_column()
+ * UniqueConstraint.append_column()
+ * PrimaryKeyConstraint.add()
+ * PrimaryKeyConstraint.remove()
+
These should be constructed declaratively (i.e. in one
construction).
@@ -4682,18 +4684,22 @@
The signature of the "on" callable passed to DDL() and
DDLElement() is revised as follows:
- "ddl" - the DDLElement object itself.
- "event" - the string event name.
- "target" - previously "schema_item", the Table or
- MetaData object triggering the event.
- "connection" - the Connection object in use for the operation.
- **kw - keyword arguments. In the case of MetaData before/after
- create/drop, the list of Table objects for which
- CREATE/DROP DDL is to be issued is passed as the kw
- argument "tables". This is necessary for metadata-level
- DDL that is dependent on the presence of specific tables.
+ ddl
+ the DDLElement object itself
+ event
+ the string event name.
+ target
+ previously "schema_item", the Table or MetaData object triggering the event.
+ connection
+ the Connection object in use for the operation.
+ \**kw
+ keyword arguments. In the case of MetaData before/after
+ create/drop, the list of Table objects for which
+ CREATE/DROP DDL is to be issued is passed as the kw
+ argument "tables". This is necessary for metadata-level
+ DDL that is dependent on the presence of specific tables.
- - the "schema_item" attribute of DDL has been renamed to
+ The "schema_item" attribute of DDL has been renamed to
"target".
.. change::
diff --git a/doc/build/changelog/changelog_07.rst b/doc/build/changelog/changelog_07.rst
index 99702a2f0..da89bbde3 100644
--- a/doc/build/changelog/changelog_07.rst
+++ b/doc/build/changelog/changelog_07.rst
@@ -7,15 +7,61 @@
:version: 0.7.11
.. change::
+ :tags: bug, engine
+ :tickets: 2851
+ :versions: 0.8.3, 0.9.0b1
+
+ The regexp used by the :func:`~sqlalchemy.engine.url.make_url` function now parses
+ ipv6 addresses, e.g. surrounded by brackets.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2807
+ :versions: 0.8.3, 0.9.0b1
+
+ Fixed bug where list instrumentation would fail to represent a
+ setslice of ``[0:0]`` correctly, which in particular could occur
+ when using ``insert(0, item)`` with the association proxy. Due
+ to some quirk in Python collections, the issue was much more likely
+ with Python 3 rather than 2.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2801
+ :versions: 0.8.3, 0.9.0b1
+
+ Fixed regression dating back to 0.7.9 whereby the name of a CTE might
+ not be properly quoted if it was referred to in multiple FROM clauses.
+
+ .. change::
+ :tags: mysql, bug
+ :tickets: 2791
+ :versions: 0.8.3, 0.9.0b1
+
+ Updates to MySQL reserved words for versions 5.5, 5.6, courtesy
+ Hanno Schlichting.
+
+ .. change::
+ :tags: sql, bug, cte
+ :tickets: 2783
+ :versions: 0.8.3, 0.9.0b1
+
+ Fixed bug in common table expression system where if the CTE were
+ used only as an ``alias()`` construct, it would not render using the
+ WITH keyword.
+
+ .. change::
:tags: bug, sql
:tickets: 2784
+ :versions: 0.8.3, 0.9.0b1
Fixed bug in :class:`.CheckConstraint` DDL where the "quote" flag from a
:class:`.Column` object would not be propagated.
.. change::
- :tags: bug, orm
- :tickets: 2699
+ :tags: bug, orm
+ :tickets: 2699
+ :versions: 0.8.1
Fixed bug when a query of the form:
``query(SubClass).options(subqueryload(Baseclass.attrname))``,
@@ -30,6 +76,7 @@
.. change::
:tags: bug, orm
:tickets: 2689
+ :versions: 0.8.1
Fixed bug in unit of work whereby a joined-inheritance
subclass could insert the row for the "sub" table
@@ -39,6 +86,7 @@
.. change::
:tags: feature, postgresql
:tickets: 2676
+ :versions: 0.8.0
Added support for Postgresql's traditional SUBSTRING
function syntax, renders as "SUBSTRING(x FROM y FOR z)"
@@ -81,6 +129,27 @@
:released: Thu Feb 7 2013
.. change::
+ :tags: engine, bug
+ :tickets: 2604
+ :versions: 0.8.0b2
+
+ Fixed :meth:`.MetaData.reflect` to correctly use
+ the given :class:`.Connection`, if given, without
+ opening a second connection from that connection's
+ :class:`.Engine`.
+
+ .. change::
+ :tags: mssql, bug
+ :tickets:2607
+ :versions: 0.8.0b2
+
+ Fixed bug whereby using "key" with Column
+ in conjunction with "schema" for the owning
+ Table would fail to locate result rows due
+ to the MSSQL dialect's "schema rendering"
+ logic's failure to take .key into account.
+
+ .. change::
:tags: sql, mysql, gae
:tickets: 2649
@@ -138,6 +207,7 @@
.. change::
:tags: sqlite, bug
:tickets: 2568
+ :versions: 0.8.0b2
More adjustment to this SQLite related issue which was released in
0.7.9, to intercept legacy SQLite quoting characters when reflecting
@@ -148,6 +218,7 @@
.. change::
:tags: sql, bug
:tickets: 2631
+ :versions: 0.8.0b2
Fixed bug where using server_onupdate=<FetchedValue|DefaultClause>
without passing the "for_update=True" flag would apply the default
@@ -1138,12 +1209,12 @@
:tickets:
Added some decent context managers
- to Engine, Connection:
+ to Engine, Connection::
with engine.begin() as conn:
<work with conn in a transaction>
- and:
+ and::
with engine.connect() as conn:
<work with conn>
@@ -1394,10 +1465,10 @@
:tickets: 2361
Dialect-specific compilers now raise
- CompileException for all type/statement compilation
+ CompileError for all type/statement compilation
issues, instead of InvalidRequestError or ArgumentError.
The DDL for CREATE TABLE will re-raise
- CompileExceptions to include table/column information
+ CompileError to include table/column information
for the problematic column.
.. change::
@@ -1728,10 +1799,10 @@
polymorphic_on now accepts many
new kinds of values:
- - standalone expressions that aren't
+ * standalone expressions that aren't
otherwise mapped
- - column_property() objects
- - string names of any column_property()
+ * column_property() objects
+ * string names of any column_property()
or attribute name of a mapped Column
The docs include an example using
@@ -1897,7 +1968,7 @@
:tickets: 1679
a "has_schema" method has been implemented
- on dialect, but only works on Postgresql so far.
+ on dialect, but only works on Postgresql so far.
Courtesy Manlio Perillo.
.. change::
@@ -2124,7 +2195,7 @@
Enhanced the instrumentation in the ORM to support
Py3K's new argument style of "required kw arguments",
- i.e. fn(a, b, *, c, d), fn(a, b, *args, c, d).
+ i.e. fn(a, b, \*, c, d), fn(a, b, \*args, c, d).
Argument signatures of mapped object's __init__
method will be preserved, including required kw rules.
@@ -2146,8 +2217,9 @@
Fixed a variety of synonym()-related regressions
from 0.6:
- - making a synonym against a synonym now works.
- - synonyms made against a relationship() can
+
+ * making a synonym against a synonym now works.
+ * synonyms made against a relationship() can
be passed to query.join(), options sent
to query.options(), passed by name
to query.with_parent().
@@ -2256,19 +2328,20 @@
:tickets: 2239
New declarative features:
- - __declare_last__() method, establishes an event
- listener for the class method that will be called
- when mappers are completed with the final "configure"
- step.
- - __abstract__ flag. The class will not be mapped
- at all when this flag is present on the class.
- - New helper classes ConcreteBase, AbstractConcreteBase.
- Allow concrete mappings using declarative which automatically
- set up the "polymorphic_union" when the "configure"
- mapper step is invoked.
- - The mapper itself has semi-private methods that allow
- the "with_polymorphic" selectable to be assigned
- to the mapper after it has already been configured.
+
+ * __declare_last__() method, establishes an event
+ listener for the class method that will be called
+ when mappers are completed with the final "configure"
+ step.
+ * __abstract__ flag. The class will not be mapped
+ at all when this flag is present on the class.
+ * New helper classes ConcreteBase, AbstractConcreteBase.
+ Allow concrete mappings using declarative which automatically
+ set up the "polymorphic_union" when the "configure"
+ mapper step is invoked.
+ * The mapper itself has semi-private methods that allow
+ the "with_polymorphic" selectable to be assigned
+ to the mapper after it has already been configured.
.. change::
:tags: orm
@@ -2806,7 +2879,7 @@
:tickets: 2206
Fixed bug whereby adaptation of old append_ddl_listener()
- function was passing unexpected **kw through
+ function was passing unexpected \**kw through
to the Table event. Table gets no kws, the MetaData
event in 0.6 would get "tables=somecollection",
this behavior is preserved.
@@ -4108,6 +4181,10 @@
Mutation Event Extension, supercedes "mutable=True"
+ .. seealso::
+
+ :ref:`07_migration_mutation_extension`
+
.. change::
:tags: orm
:tickets: 1980
@@ -4271,7 +4348,7 @@
:tickets: 1069
Query.distinct() now accepts column expressions
- as *args, interpreted by the Postgresql dialect
+ as \*args, interpreted by the Postgresql dialect
as DISTINCT ON (<expr>).
.. change::
@@ -4371,7 +4448,7 @@
:tickets: 1069
select.distinct() now accepts column expressions
- as *args, interpreted by the Postgresql dialect
+ as \*args, interpreted by the Postgresql dialect
as DISTINCT ON (<expr>). Note this was already
available via passing a list to the `distinct`
keyword argument to select().
@@ -4381,7 +4458,7 @@
:tickets:
select.prefix_with() accepts multiple expressions
- (i.e. *expr), 'prefix' keyword argument to select()
+ (i.e. \*expr), 'prefix' keyword argument to select()
accepts a list or tuple.
.. change::
diff --git a/doc/build/changelog/changelog_08.rst b/doc/build/changelog/changelog_08.rst
index 4e5454180..e4e5c11ab 100644
--- a/doc/build/changelog/changelog_08.rst
+++ b/doc/build/changelog/changelog_08.rst
@@ -3,19 +3,516 @@
0.8 Changelog
==============
+.. changelog_imports::
+
+ .. include:: changelog_07.rst
+ :start-line: 5
+
+.. changelog::
+ :version: 0.8.5
+
+ .. change::
+ :tags: bug, orm
+ :versions: 0.9.2
+ :pullreq: github:58
+
+ Fixed error message when an iterator object is passed to
+ :func:`.class_mapper` or similar, where the error would fail to
+ render on string formatting. Pullreq courtesy Kyle Stark.
+
+ .. change::
+ :tags: bug, firebird
+ :versions: 0.9.0
+ :tickets: 2897
+
+ The firebird dialect will quote identifiers which begin with an
+ underscore. Courtesy Treeve Jelbert.
+
+ .. change::
+ :tags: bug, firebird
+ :versions: 0.9.0
+
+ Fixed bug in Firebird index reflection where the columns within the
+ index were not sorted correctly; they are now sorted
+ in order of RDB$FIELD_POSITION.
+
+ .. change::
+ :tags: bug, mssql, firebird
+ :versions: 0.9.0
+
+ The "asdecimal" flag used with the :class:`.Float` type will now
+ work with Firebird as well as the mssql+pyodbc dialects; previously the
+ decimal conversion was not occurring.
+
+ .. change::
+ :tags: bug, mssql, pymssql
+ :versions: 0.9.0
+ :pullreq: github:51
+
+ Added "Net-Lib error during Connection reset by peer" message
+ to the list of messages checked for "disconnect" within the
+ pymssql dialect. Courtesy John Anderson.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 0.9.0
+ :tickets: 2896
+
+ Fixed issue where a primary key column that has a Sequence on it,
+ yet the column is not the "auto increment" column, either because
+ it has a foreign key constraint or ``autoincrement=False`` set,
+ would attempt to fire the Sequence on INSERT for backends that don't
+ support sequences, when presented with an INSERT missing the primary
+ key value. This would take place on non-sequence backends like
+ SQLite, MySQL.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 0.9.0
+ :tickets: 2895
+
+ Fixed bug with :meth:`.Insert.from_select` method where the order
+ of the given names would not be taken into account when generating
+ the INSERT statement, thus producing a mismatch versus the column
+ names in the given SELECT statement. Also noted that
+ :meth:`.Insert.from_select` implies that Python-side insert defaults
+ cannot be used, since the statement has no VALUES clause.
+
+ .. change::
+ :tags: enhancement, sql
+ :versions: 0.9.0
+
+ The exception raised when a :class:`.BindParameter` is present
+ in a compiled statement without a value now includes the key name
+ of the bound parameter in the error message.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 0.9.0
+ :tickets: 2887
+
+ An adjustment to the :func:`.subqueryload` strategy which ensures that
+ the query runs after the loading process has begun; this is so that
+ the subqueryload takes precedence over other loaders that may be
+ hitting the same attribute due to other eager/noload situations
+ at the wrong time.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 0.9.0
+ :tickets: 2885
+
+ Fixed bug when using joined table inheritance from a table to a
+ select/alias on the base, where the PK columns were also not same
+ named; the persistence system would fail to copy primary key values
+ from the base table to the inherited table upon INSERT.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 0.9.0
+ :tickets: 2889
+
+ :func:`.composite` will raise an informative error message when the
+ columns/attribute (names) passed don't resolve to a Column or mapped
+ attribute (such as an erroneous tuple); previously raised an unbound
+ local.
+
+ .. change::
+ :tags: bug, declarative
+ :versions: 0.9.0
+ :tickets: 2888
+
+ Error message when a string arg sent to :func:`.relationship` which
+ doesn't resolve to a class or mapper has been corrected to work
+ the same way as when a non-string arg is received, which indicates
+ the name of the relationship which had the configurational error.
+
+.. changelog::
+ :version: 0.8.4
+
+ .. change::
+ :tags: bug, engine
+ :versions: 0.9.0
+ :tickets: 2881
+
+ A DBAPI that raises an error on ``connect()`` which is not a subclass
+ of dbapi.Error (such as ``TypeError``, ``NotImplementedError``, etc.)
+ will propagate the exception unchanged. Previously,
+ the error handling specific to the ``connect()`` routine would both
+ inappropriately run the exception through the dialect's
+ :meth:`.Dialect.is_disconnect` routine as well as wrap it in
+ a :class:`sqlalchemy.exc.DBAPIError`. It is now propagated unchanged
+ in the same way as occurs within the execute process.
+
+ .. change::
+ :tags: bug, engine, pool
+ :versions: 0.9.0
+ :tickets: 2880
+
+ The :class:`.QueuePool` has been enhanced to not block new connection
+ attempts when an existing connection attempt is blocking. Previously,
+ the production of new connections was serialized within the block
+ that monitored overflow; the overflow counter is now altered within
+ it's own critical section outside of the connection process itself.
+
+ .. change::
+ :tags: bug, engine, pool
+ :versions: 0.9.0
+ :tickets: 2522
+
+ Made a slight adjustment to the logic which waits for a pooled
+ connection to be available, such that for a connection pool
+ with no timeout specified, it will every half a second break out of
+ the wait to check for the so-called "abort" flag, which allows the
+ waiter to break out in case the whole connection pool was dumped;
+ normally the waiter should break out due to a notify_all() but it's
+ possible this notify_all() is missed in very slim cases.
+ This is an extension of logic first introduced in 0.8.0, and the
+ issue has only been observed occasionally in stress tests.
+
+ .. change::
+ :tags: bug, mssql
+ :versions: 0.9.0
+ :pullreq: bitbucket:7
+
+ Fixed bug introduced in 0.8.0 where the ``DROP INDEX``
+ statement for an index in MSSQL would render incorrectly if the
+ index were in an alternate schema; the schemaname/tablename
+ would be reversed. The format has been also been revised to
+ match current MSSQL documentation. Courtesy Derek Harland.
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 1443
+ :versions: 0.9.0b1
+
+ Added support for "unique constraint" reflection, via the
+ :meth:`.Inspector.get_unique_constraints` method.
+ Thanks for Roman Podolyaka for the patch.
+
+ .. change::
+ :tags: bug, oracle
+ :tickets: 2864
+ :versions: 0.9.0
+
+ Added ORA-02396 "maximum idle time" error code to list of
+ "is disconnect" codes with cx_oracle.
+
+ .. change::
+ :tags: bug, engine
+ :tickets: 2871
+ :versions: 0.9.0
+
+ Fixed bug where SQL statement would be improperly ASCII-encoded
+ when a pre-DBAPI :class:`.StatementError` were raised within
+ :meth:`.Connection.execute`, causing encoding errors for
+ non-ASCII statements. The stringification now remains within
+ Python unicode thus avoiding encoding errors.
+
+ .. change::
+ :tags: bug, oracle
+ :tickets: 2870
+ :versions: 0.9.0
+
+ Fixed bug where Oracle ``VARCHAR`` types given with no length
+ (e.g. for a ``CAST`` or similar) would incorrectly render ``None CHAR``
+ or similar.
+
+ .. change::
+ :tags: bug, ext
+ :tickets: 2869
+ :versions: 0.9.0
+
+ Fixed bug which prevented the ``serializer`` extension from working
+ correctly with table or column names that contain non-ASCII
+ characters.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2818
+ :versions: 0.9.0
+
+ Fixed a regression introduced by :ticket:`2818` where the EXISTS
+ query being generated would produce a "columns being replaced"
+ warning for a statement with two same-named columns,
+ as the internal SELECT wouldn't have use_labels set.
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2855
+ :versions: 0.9.0
+
+ Fixed bug where index reflection would mis-interpret indkey values
+ when using the pypostgresql adapter, which returns these values
+ as lists vs. psycopg2's return type of string.
+
.. changelog::
:version: 0.8.3
+ :released: October 26, 2013
+
+ .. change::
+ :tags: bug, oracle
+ :tickets: 2853
+ :versions: 0.9.0b1
+
+ Fixed bug where Oracle table reflection using synonyms would fail
+ if the synonym and the table were in different remote schemas.
+ Patch to fix courtesy Kyle Derr.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2849
+ :versions: 0.9.0b1
+
+ Fixed bug where :func:`.type_coerce` would not interpret ORM
+ elements with a ``__clause_element__()`` method properly.
.. change::
:tags: bug, sql
- :tickets: 2784
+ :tickets: 2842
+ :versions: 0.9.0b1
- Fixed bug in :class:`.CheckConstraint` DDL where the "quote" flag from a
- :class:`.Column` object would not be propagated. Also in 0.7.11.
+ The :class:`.Enum` and :class:`.Boolean` types now bypass
+ any custom (e.g. TypeDecorator) type in use when producing the
+ CHECK constraint for the "non native" type. This so that the custom type
+ isn't involved in the expression within the CHECK, since this
+ expression is against the "impl" value and not the "decorated" value.
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2844
+ :versions: 0.9.0b1
+
+ Removed a 128-character truncation from the reflection of the
+ server default for a column; this code was original from
+ PG system views which truncated the string for readability.
+
+ .. change::
+ :tags: bug, mysql
+ :tickets: 2721, 2839
+ :versions: 0.9.0b1
+
+ The change in :ticket:`2721`, which is that the ``deferrable`` keyword
+ of :class:`.ForeignKeyConstraint` is silently ignored on the MySQL
+ backend, will be reverted as of 0.9; this keyword will now render again, raising
+ errors on MySQL as it is not understood - the same behavior will also
+ apply to the ``initially`` keyword. In 0.8, the keywords will remain
+ ignored but a warning is emitted. Additionally, the ``match`` keyword
+ now raises a :exc:`.CompileError` on 0.9 and emits a warning on 0.8;
+ this keyword is not only silently ignored by MySQL but also breaks
+ the ON UPDATE/ON DELETE options.
+
+ To use a :class:`.ForeignKeyConstraint`
+ that does not render or renders differently on MySQL, use a custom
+ compilation option. An example of this usage has been added to the
+ documentation, see :ref:`mysql_foreign_keys`.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2825
+ :versions: 0.9.0b1
+
+ The ``.unique`` flag on :class:`.Index` could be produced as ``None``
+ if it was generated from a :class:`.Column` that didn't specify ``unique``
+ (where it defaults to ``None``). The flag will now always be ``True`` or
+ ``False``.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2836
+ :versions: 0.9.0b1
+
+ Added new option to :func:`.relationship` ``distinct_target_key``.
+ This enables the subquery eager loader strategy to apply a DISTINCT
+ to the innermost SELECT subquery, to assist in the case where
+ duplicate rows are generated by the innermost query which corresponds
+ to this relationship (there's not yet a general solution to the issue
+ of dupe rows within subquery eager loading, however, when joins outside
+ of the innermost subquery produce dupes). When the flag
+ is set to ``True``, the DISTINCT is rendered unconditionally, and when
+ it is set to ``None``, DISTINCT is rendered if the innermost relationship
+ targets columns that do not comprise a full primary key.
+ The option defaults to False in 0.8 (e.g. off by default in all cases),
+ None in 0.9 (e.g. automatic by default). Thanks to Alexander Koval
+ for help with this.
+
+ .. seealso::
+
+ :ref:`change_2836`
+
+ .. change::
+ :tags: bug, mysql
+ :tickets: 2515
+ :versions: 0.9.0b1
+
+ MySQL-connector dialect now allows options in the create_engine
+ query string to override those defaults set up in the connect,
+ including "buffered" and "raise_on_warnings".
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2742
+ :versions: 0.9.0b1
+
+ Parenthesis will be applied to a compound SQL expression as
+ rendered in the column list of a CREATE INDEX statement.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2742
+ :versions: 0.9.0b1
+
+ Fixed bug in default compiler plus those of postgresql, mysql, and
+ mssql to ensure that any literal SQL expression values are
+ rendered directly as literals, instead of as bound parameters,
+ within a CREATE INDEX statement. This also changes the rendering
+ scheme for other DDL such as constraints.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2815
+ :versions: 0.9.0b1
+
+ A :func:`.select` that is made to refer to itself in its FROM clause,
+ typically via in-place mutation, will raise an informative error
+ message rather than causing a recursion overflow.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2813
+ :versions: 0.9.0b1
+
+ Fixed bug where using an annotation such as :func:`.remote` or
+ :func:`.foreign` on a :class:`.Column` before association with a parent
+ :class:`.Table` could produce issues related to the parent table not
+ rendering within joins, due to the inherent copy operation performed
+ by an annotation.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2831
+
+ Non-working "schema" argument on :class:`.ForeignKey` is deprecated;
+ raises a warning. Removed in 0.9.
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2819
+ :versions: 0.9.0b1
+
+ Fixed bug where Postgresql version strings that had a prefix preceding
+ the words "Postgresql" or "EnterpriseDB" would not parse.
+ Courtesy Scott Schaefer.
+
+ .. change::
+ :tags: feature, engine
+ :tickets: 2821
+ :versions: 0.9.0b1
+
+ ``repr()`` for the :class:`.URL` of an :class:`.Engine`
+ will now conceal the password using asterisks.
+ Courtesy Gunnlaugur Þór Briem.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2818
+ :versions: 0.9.0b1
+
+ Fixed bug where :meth:`.Query.exists` failed to work correctly
+ without any WHERE criterion. Courtesy Vladimir Magamedov.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2811
+ :versions: 0.9.0b1
+
+ Fixed bug where using the ``column_reflect`` event to change the ``.key``
+ of the incoming :class:`.Column` would prevent primary key constraints,
+ indexes, and foreign key constraints from being correctly reflected.
+
+ .. change::
+ :tags: feature
+ :versions: 0.9.0b1
+
+ Added a new flag ``system=True`` to :class:`.Column`, which marks
+ the column as a "system" column which is automatically made present
+ by the database (such as Postgresql ``oid`` or ``xmin``). The
+ column will be omitted from the ``CREATE TABLE`` statement but will
+ otherwise be available for querying. In addition, the
+ :class:`.CreateColumn` construct can be appled to a custom
+ compilation rule which allows skipping of columns, by producing
+ a rule that returns ``None``.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2779
+
+ Backported a change from 0.9 whereby the iteration of a hierarchy
+ of mappers used in polymorphic inheritance loads is sorted,
+ which allows the SELECT statements generated for polymorphic queries
+ to have deterministic rendering, which in turn helps with caching
+ schemes that cache on the SQL string itself.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2794
+ :versions: 0.9.0b1
+
+ Fixed a potential issue in an ordered sequence implementation used
+ by the ORM to iterate mapper hierarchies; under the Jython interpreter
+ this implementation wasn't ordered, even though cPython and Pypy
+ maintained ordering.
+
+ .. change::
+ :tags: bug, examples
+ :versions: 0.9.0b1
+
+ Added "autoincrement=False" to the history table created in the
+ versioning example, as this table shouldn't have autoinc on it
+ in any case, courtesy Patrick Schmid.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 0.9.0b1
+
+ The :meth:`.ColumnOperators.notin_` operator added in 0.8 now properly
+ produces the negation of the expression "IN" returns
+ when used against an empty collection.
+
+ .. change::
+ :tags: feature, examples
+ :versions: 0.9.0b1
+
+ Improved the examples in ``examples/generic_associations``, including
+ that ``discriminator_on_association.py`` makes use of single table
+ inheritance do the work with the "discriminator". Also
+ added a true "generic foreign key" example, which works similarly
+ to other popular frameworks in that it uses an open-ended integer
+ to point to any other table, foregoing traditional referential
+ integrity. While we don't recommend this pattern, information wants
+ to be free.
+
+ .. change::
+ :tags: feature, orm, declarative
+ :versions: 0.9.0b1
+
+ Added a convenience class decorator :func:`.as_declarative`, is
+ a wrapper for :func:`.declarative_base` which allows an existing base
+ class to be applied using a nifty class-decorated approach.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2786
+ :versions: 0.9.0b1
+
+ Fixed bug in ORM-level event registration where the "raw" or
+ "propagate" flags could potentially be mis-configured in some
+ "unmapped base class" configurations.
.. change::
:tags: bug, orm
:tickets: 2778
+ :versions: 0.9.0b1
A performance fix related to the usage of the :func:`.defer` option
when loading mapped entities. The function overhead of applying
@@ -31,6 +528,7 @@
.. change::
:tags: bug, sqlite
:tickets: 2781
+ :versions: 0.9.0b1
The newly added SQLite DATETIME arguments storage_format and
regexp apparently were not fully implemented correctly; while the
@@ -40,6 +538,7 @@
.. change::
:tags: bug, sql, postgresql
:tickets: 2780
+ :versions: 0.9.0b1
Fixed bug where the expression system relied upon the ``str()``
form of a some expressions when referring to the ``.c`` collection
@@ -47,13 +546,14 @@
since the element relies on dialect-specific compilation constructs,
notably the ``__getitem__()`` operator as used with a Postgresql
``ARRAY`` element. The fix also adds a new exception class
- :class:`.UnsupportedCompilationError` which is raised in those cases
+ :exc:`.UnsupportedCompilationError` which is raised in those cases
where a compiler is asked to compile something it doesn't know
how to.
.. change::
:tags: bug, engine, oracle
:tickets: 2776
+ :versions: 0.9.0b1
Dialect.initialize() is not called a second time if an :class:`.Engine`
is recreated, due to a disconnect error. This fixes a particular
@@ -70,6 +570,7 @@
.. change::
:tags: feature, sql
+ :versions: 0.9.0b1
The :func:`.update`, :func:`.insert`, and :func:`.delete` constructs
will now interpret ORM entities as target tables to be operated upon,
@@ -86,6 +587,7 @@
.. change::
:tags: bug, orm
:tickets: 2773
+ :versions: 0.9.0b1
Fixed bug whereby attribute history functions would fail
when an object we moved from "persistent" to "pending"
@@ -95,6 +597,7 @@
.. change::
:tags: bug, engine, pool
:tickets: 2772
+ :versions: 0.9.0b1
Fixed bug where :class:`.QueuePool` would lose the correct
checked out count if an existing pooled connection failed to reconnect
@@ -107,6 +610,7 @@
.. change::
:tags: bug, mysql
:tickets: 2768
+ :versions: 0.9.0b1
Fixed bug when using multi-table UPDATE where a supplemental
table is a SELECT with its own bound parameters, where the positioning
@@ -116,13 +620,15 @@
.. change::
:tags: bug, sqlite
:tickets: 2764
+ :versions: 0.9.0b1
- Added :class:`.BIGINT` to the list of type names that can be
+ Added :class:`sqlalchemy.types.BIGINT` to the list of type names that can be
reflected by the SQLite dialect; courtesy Russell Stuart.
.. change::
:tags: feature, orm, declarative
:tickets: 2761
+ :versions: 0.9.0b1
ORM descriptors such as hybrid properties can now be referenced
by name in a string argument used with ``order_by``,
@@ -132,15 +638,17 @@
.. change::
:tags: feature, firebird
:tickets: 2763
+ :versions: 0.9.0b1
Added new flag ``retaining=True`` to the kinterbasdb and fdb dialects.
This controls the value of the ``retaining`` flag sent to the
``commit()`` and ``rollback()`` methods of the DBAPI connection.
- Due to historical concerns, this flag defaults to ``True``, however
- in 0.9 this flag will be defaulted to ``False``.
+ Due to historical concerns, this flag defaults to ``True`` in 0.8.2,
+ however in 0.9.0b1 this flag defaults to ``False``.
.. change::
:tags: requirements
+ :versions: 0.9.0b1
The Python `mock <https://pypi.python.org/pypi/mock>`_ library
is now required in order to run the unit test suite. While part
@@ -151,6 +659,7 @@
.. change::
:tags: bug, orm
:tickets: 2750
+ :versions: 0.9.0b1
A warning is emitted when trying to flush an object of an inherited
class where the polymorphic discriminator has been assigned
@@ -159,6 +668,7 @@
.. change::
:tags: bug, postgresql
:tickets: 2740
+ :versions: 0.9.0b1
The behavior of :func:`.extract` has been simplified on the
Postgresql dialect to no longer inject a hardcoded ``::timestamp``
@@ -171,6 +681,7 @@
.. change::
:tags: bug, firebird
:tickets: 2757
+ :versions: 0.9.0b1
Type lookup when reflecting the Firebird types LONG and
INT64 has been fixed so that LONG is treated as INTEGER,
@@ -181,6 +692,7 @@
.. change::
:tags: bug, postgresql
:tickets: 2766
+ :versions: 0.9.0b1
Fixed bug in HSTORE type where keys/values that contained
backslashed quotes would not be escaped correctly when
@@ -190,6 +702,7 @@
.. change::
:tags: bug, postgresql
:tickets: 2767
+ :versions: 0.9.0b1
Fixed bug where the order of columns in a multi-column
Postgresql index would be reflected in the wrong order.
@@ -198,6 +711,7 @@
.. change::
:tags: bug, sql
:tickets: 2746, 2668
+ :versions: 0.9.0b1
Multiple fixes to the correlation behavior of
:class:`.Select` constructs, first introduced in 0.8.0:
@@ -232,6 +746,7 @@
.. change::
:tags: bug, ext
+ :versions: 0.9.0b1
Fixed bug whereby if a composite type were set up
with a function instead of a class, the mutable extension
@@ -252,6 +767,7 @@
.. change::
:tags: feature, postgresql
+ :versions: 0.9.0b1
Support for Postgresql 9.2 range types has been added.
Currently, no type translation is provided, so works
@@ -260,6 +776,7 @@
.. change::
:tags: bug, examples
+ :versions: 0.9.0b1
Fixed an issue with the "versioning" recipe whereby a many-to-one
reference could produce a meaningless version for the target,
@@ -269,6 +786,7 @@
.. change::
:tags: feature, postgresql
:tickets: 2072
+ :versions: 0.9.0b1
Added support for "AUTOCOMMIT" isolation when using the psycopg2
DBAPI. The keyword is available via the ``isolation_level``
@@ -277,6 +795,7 @@
.. change::
:tags: bug, orm
:tickets: 2759
+ :versions: 0.9.0b1
Fixed bug in polymorphic SQL generation where multiple joined-inheritance
entities against the same base class joined to each other as well
@@ -285,7 +804,8 @@
.. change::
:tags: bug, engine
- :pullreq: 6
+ :pullreq: github:6
+ :versions: 0.9.0b1
Fixed bug where the ``reset_on_return`` argument to various :class:`.Pool`
implementations would not be propagated when the pool was regenerated.
@@ -294,6 +814,7 @@
.. change::
:tags: bug, orm
:tickets: 2754
+ :versions: 0.9.0b1
Fixed bug where sending a composite attribute into :meth:`.Query.order_by`
would produce a parenthesized expression not accepted by some databases.
@@ -301,6 +822,7 @@
.. change::
:tags: bug, orm
:tickets: 2755
+ :versions: 0.9.0b1
Fixed the interaction between composite attributes and
the :func:`.aliased` function. Previously, composite attributes
@@ -310,6 +832,7 @@
.. change::
:tags: bug, mysql
:tickets: 2715
+ :versions: 0.9.0b1
Added another conditional to the ``mysql+gaerdbms`` dialect to
detect so-called "development" mode, where we should use the
@@ -318,6 +841,7 @@
.. change::
:tags: feature, mysql
:tickets: 2704
+ :versions: 0.9.0b1
The ``mysql_length`` parameter used with :class:`.Index` can now
be passed as a dictionary of column names/lengths, for use
@@ -327,6 +851,7 @@
.. change::
:tags: bug, mssql
:tickets: 2747
+ :versions: 0.9.0b1
When querying the information schema on SQL Server 2000, removed
a CAST call that was added in 0.8.1 to help with driver issues,
@@ -336,6 +861,7 @@
.. change::
:tags: bug, mysql
:tickets: 2721
+ :versions: 0.9.0b1
The ``deferrable`` keyword argument on :class:`.ForeignKey` and
:class:`.ForeignKeyConstraint` will not render the ``DEFERRABLE`` keyword
@@ -347,6 +873,7 @@
.. change::
:tags: bug, ext, orm
:tickets: 2730
+ :versions: 0.9.0b1
Fixed bug where :class:`.MutableDict` didn't report a change event
when ``clear()`` was called.
@@ -354,6 +881,7 @@
.. change::
:tags: bug, sql
:tickets: 2738
+ :versions: 0.9.0b1
Fixed bug whereby joining a select() of a table "A" with multiple
foreign key paths to a table "B", to that table "B", would fail
@@ -364,6 +892,7 @@
.. change::
:tags: bug, sql, reflection
:tickets: 2728
+ :versions: 0.9.0b1
Fixed bug whereby using :meth:`.MetaData.reflect` across a remote
schema as well as a local schema could produce wrong results
@@ -372,6 +901,7 @@
.. change::
:tags: bug, sql
:tickets: 2726
+ :versions: 0.9.0b1
Removed the "not implemented" ``__iter__()`` call from the base
:class:`.ColumnOperators` class, while this was introduced
@@ -406,6 +936,7 @@
.. change::
:tags: bug, postgresql
+ :pullreq: github:2
:tickets: 2735
Fixed the HSTORE type to correctly encode/decode for unicode.
@@ -541,7 +1072,6 @@
.. change::
:tags: bug, mysql
- :pullreq: 55
Fixes to support the latest cymysql DBAPI, courtesy
Hajime Nakagami.
@@ -603,7 +1133,6 @@
.. change::
:tags: bug, mysql
- :pullreq: 54
Updated a regexp to correctly extract error code on
google app engine v1.7.5 and newer. Courtesy
@@ -712,32 +1241,7 @@
(obviously assuming the state of the superclass).
.. change::
- :tags: bug, orm
- :tickets: 2699
-
- Fixed bug when a query of the form:
- ``query(SubClass).options(subqueryload(Baseclass.attrname))``,
- where ``SubClass`` is a joined inh of ``BaseClass``,
- would fail to apply the ``JOIN`` inside the subquery
- on the attribute load, producing a cartesian product.
- The populated results still tended to be correct as additional
- rows are just ignored, so this issue may be present as a
- performance degradation in applications that are
- otherwise working correctly. Also in 0.7.11.
-
- .. change::
- :tags: bug, orm
- :tickets: 2689
-
- Fixed bug in unit of work whereby a joined-inheritance
- subclass could insert the row for the "sub" table
- before the parent table, if the two tables had no
- ForeignKey constraints set up between them.
- Also in 0.7.11.
-
- .. change::
:tags: bug, mssql
- :pullreq: 47
Added support for additional "disconnect" messages
to the pymssql dialect. Courtesy John Anderson.
@@ -761,7 +1265,6 @@
.. change::
:tags: bug, mssql
:tickets: 2683
- :pullreq: 46
Fixed Py3K bug regarding "binary" types and
pymssql. Courtesy Marc Abramowitz.
@@ -790,15 +1293,6 @@
* :ref:`correlation_context_specific`
.. change::
- :tags: feature, postgresql
- :tickets: 2676
-
- Added support for Postgresql's traditional SUBSTRING
- function syntax, renders as "SUBSTRING(x FROM y FOR z)"
- when regular ``func.substring()`` is used.
- Also in 0.7.11. Courtesy Gunnlaugur Þór Briem.
-
- .. change::
:tags: feature, orm
:tickets: 2675
@@ -824,7 +1318,6 @@
.. change::
:tags: feature, mysql
- :pullreq: 42
New dialect for CyMySQL added, courtesy Hajime Nakagami.
@@ -880,7 +1373,6 @@
.. change::
:tags: bug, tests
:tickets: 2669
- :pullreq: 41
Fixed an import of "logging" in test_execute which was not
working on some linux platforms. Also in 0.7.11.
@@ -987,7 +1479,6 @@
.. change::
:tags: feature, postgresql
- :pullreq: 40
Added :meth:`.postgresql.ARRAY.Comparator.any` and
:meth:`.postgresql.ARRAY.Comparator.all`
@@ -1061,7 +1552,6 @@
.. change::
:tags: mssql, feature
- :pullreq: 35
Added ``mssql_include`` and ``mssql_clustered`` options to
:class:`.Index`, renders the ``INCLUDE`` and ``CLUSTERED`` keywords,
@@ -1110,7 +1600,6 @@
.. change::
:tags: mssql, feature
:tickets: 2644
- :pullreq: 32
DDL for IDENTITY columns is now supported on
non-primary key columns, by establishing a
@@ -1143,7 +1632,7 @@
.. change::
:tags: postgresql, bug
- Fixed bug in :func:`.postgresql.array` construct whereby using it
+ Fixed bug in :class:`~sqlalchemy.dialects.postgresql.array()` construct whereby using it
inside of an :func:`.expression.insert` construct would produce an
error regarding a parameter issue in the ``self_group()`` method.
@@ -1159,14 +1648,12 @@
.. change::
:tags: mysql, feature
- :pullreq: 33
GAE dialect now accepts username/password arguments in the URL,
courtesy Owen Nelson.
.. change::
:tags: mysql, bug
- :pullreq: 33
GAE dialect won't fail on None match if the error code can't be extracted
from the exception throw; courtesy Owen Nelson.
@@ -1185,16 +1672,6 @@
:released: December 14, 2012
.. change::
- :tags: sqlite, bug
- :tickets: 2568
-
- More adjustment to this SQLite related issue which was released in
- 0.7.9, to intercept legacy SQLite quoting characters when reflecting
- foreign keys. In addition to intercepting double quotes, other
- quoting characters such as brackets, backticks, and single quotes
- are now also intercepted. Also in 0.7.10.
-
- .. change::
:tags: orm, bug
:tickets: 2635
@@ -1265,19 +1742,6 @@
.. change::
:tags: sql, bug
- :tickets: 2631
-
- Fixed bug where using server_onupdate=<FetchedValue|DefaultClause>
- without passing the "for_update=True" flag would apply the default
- object to the server_default, blowing away whatever was there.
- The explicit for_update=True argument shouldn't be needed with this usage
- (especially since the documentation shows an example without it being
- used) so it is now arranged internally using a copy of the given default
- object, if the flag isn't set to what corresponds to that argument.
- Also in 0.7.10.
-
- .. change::
- :tags: sql, bug
:tickets: 2610
Fixed bug whereby using a label_length on dialect that was smaller
@@ -1320,7 +1784,7 @@
:tags: sql, bug
:tickets: 2618
- The :class:`.DECIMAL` type now honors the "precision" and
+ The :class:`~sqlalchemy.types.DECIMAL` type now honors the "precision" and
"scale" arguments when rendering DDL.
.. change::
@@ -1436,30 +1900,10 @@
.. change::
:tags: engine
- The "reflect=True" argument to :class:`MetaData` is deprecated.
+ The "reflect=True" argument to :class:`~sqlalchemy.schema.MetaData` is deprecated.
Please use the :meth:`.MetaData.reflect` method.
.. change::
- :tags: engine, bug
- :tickets: 2604
-
- Fixed :meth:`.MetaData.reflect` to correctly use
- the given :class:`.Connection`, if given, without
- opening a second connection from that connection's
- :class:`.Engine`. Also in 0.7.10.
-
- .. change::
- :tags: mssql, bug
- :tickets: 2607
-
- Fixed bug whereby using "key" with Column
- in conjunction with "schema" for the owning
- Table would fail to locate result rows due
- to the MSSQL dialect's "schema rendering"
- logic's failure to take .key into account.
- Also in 0.7.10.
-
- .. change::
:tags: sql, bug
:tickets: 2603
@@ -1573,7 +2017,7 @@
:tickets: 2595
The auto-correlation feature of :func:`.select`, and
- by proxy that of :class:`.orm.Query`, will not
+ by proxy that of :class:`.Query`, will not
take effect for a SELECT statement that is being
rendered directly in the FROM list of the enclosing
SELECT. Correlation in SQL only applies to column
@@ -1582,7 +2026,6 @@
.. change::
:tags: sqlite
- :pullreq: 23
:changeset: c3addcc9ffad
Added :class:`.types.NCHAR`, :class:`.types.NVARCHAR`
diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst
index 02a5c1d81..d59f3ec60 100644
--- a/doc/build/changelog/changelog_09.rst
+++ b/doc/build/changelog/changelog_09.rst
@@ -3,387 +3,1115 @@
0.9 Changelog
==============
+.. changelog_imports::
+
+ .. include:: changelog_08.rst
+ :start-line: 5
+
+ .. include:: changelog_07.rst
+ :start-line: 5
+
.. changelog::
- :version: 0.9.0
+ :version: 0.9.2
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 2910
+
+ Options can now be specified on a :class:`.PrimaryKeyConstraint` object
+ independently of the specification of columns in the table with
+ the ``primary_key=True`` flag; use a :class:`.PrimaryKeyConstraint`
+ object with no columns in it to achieve this result.
+
+ Previously, an explicit :class:`.PrimaryKeyConstraint` would have the
+ effect of those columns marked as ``primary_key=True`` being ignored;
+ since this is no longer the case, the :class:`.PrimaryKeyConstraint`
+ will now assert that either one style or the other is used to specify
+ the columns, or if both are present, that the column lists match
+ exactly. If an inconsistent set of columns in the
+ :class:`.PrimaryKeyConstraint`
+ and within the :class:`.Table` marked as ``primary_key=True`` are
+ present, a warning is emitted, and the list of columns is taken
+ only from the :class:`.PrimaryKeyConstraint` alone as was the case
+ in previous releases.
+
+
+
+ .. seealso::
+
+ :class:`.PrimaryKeyConstraint`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 2866
+
+ The system by which schema constructs and certain SQL constructs
+ accept dialect-specific keyword arguments has been enhanced. This
+ system includes commonly the :class:`.Table` and :class:`.Index` constructs,
+ which accept a wide variety of dialect-specific arguments such as
+ ``mysql_engine`` and ``postgresql_where``, as well as the constructs
+ :class:`.PrimaryKeyConstraint`, :class:`.UniqueConstraint`,
+ :class:`.Update`, :class:`.Insert` and :class:`.Delete`, and also
+ newly added kwarg capability to :class:`.ForeignKeyConstraint`
+ and :class:`.ForeignKey`. The change is that participating dialects
+ can now specify acceptable argument lists for these constructs, allowing
+ an argument error to be raised if an invalid keyword is specified for
+ a particular dialect. If the dialect portion of the keyword is unrecognized,
+ a warning is emitted only; while the system will actually make use
+ of setuptools entrypoints in order to locate non-local dialects,
+ the use case where certain dialect-specific arguments are used
+ in an environment where that third-party dialect is uninstalled remains
+ supported. Dialects also have to explicitly opt-in to this system,
+ so that external dialects which aren't making use of this system
+ will remain unaffected.
.. change::
:tags: bug, sql
- :tickets: 2784
+ :pullreq: bitbucket:11
+
+ A :class:`.UniqueConstraint` created inline with a :class:`.Table`
+ that has no columns within it will be skipped. Pullreq courtesy
+ Derek Harland.
+
+ .. change::
+ :tags: feature, mssql
+ :pullreq: bitbucket:11
+
+ Added an option ``mssql_clustered`` to the :class:`.UniqueConstraint`
+ and :class:`.PrimaryKeyConstraint` constructs; on SQL Server, this adds
+ the ``CLUSTERED`` keyword to the constraint construct within DDL.
+ Pullreq courtesy Derek Harland.
+
+ .. change::
+ :tags: bug, sql, orm
+ :tickets: 2912
+
+ Fixed the multiple-table "UPDATE..FROM" construct, only usable on
+ MySQL, to correctly render the SET clause among multiple columns
+ with the same name across tables. This also changes the name used for
+ the bound parameter in the SET clause to "<tablename>_<colname>" for
+ the non-primary table only; as this parameter is typically specified
+ using the :class:`.Column` object directly this should not have an
+ impact on applications. The fix takes effect for both
+ :meth:`.Table.update` as well as :meth:`.Query.update` in the ORM.
- Fixed bug in :class:`.CheckConstraint` DDL where the "quote" flag from a
- :class:`.Column` object would not be propagated. Also in 0.8.3, 0.7.11.
+ .. change::
+ :tags: bug, oracle
+ :tickets: 2911
+
+ It's been observed that the usage of a cx_Oracle "outputtypehandler"
+ in Python 2.xx in order to coerce string values to Unicode is inordinately
+ expensive; even though cx_Oracle is written in C, when you pass the
+ Python ``unicode`` primitive to cursor.var() and associate with an output
+ handler, the library counts every conversion as a Python function call
+ with all the requisite overhead being recorded; this *despite* the fact
+ when running in Python 3, all strings are also unconditionally coerced
+ to unicode but it does *not* incur this overhead,
+ meaning that cx_Oracle is failing to use performant techniques in Py2K.
+ As SQLAlchemy cannot easily select for this style of type handler on a
+ per-column basis, the handler was assembled unconditionally thereby
+ adding the overhead to all string access.
+
+ So this logic has been replaced with SQLAlchemy's own unicode
+ conversion system, which now
+ only takes effect in Py2K for columns that are requested as unicode.
+ When C extensions are used, SQLAlchemy's system appears to be 2-3x faster than
+ cx_Oracle's. Additionally, SQLAlchemy's unicode conversion has been
+ enhanced such that when the "conditional" converter is required
+ (now needed for the Oracle backend), the check for "already unicode" is now
+ performed in C and no longer introduces significant overhead.
+
+ This change has two impacts on the cx_Oracle backend. One is that
+ string values in Py2K which aren't specifically requested with the
+ Unicode type or convert_unicode=True will now come back as ``str``,
+ not ``unicode`` - this behavior is similar to a backend such as
+ MySQL. Additionally, when unicode values are requested with the cx_Oracle
+ backend, if the C extensions are *not* used, there is now an additional
+ overhead of an isinstance() check per column. This tradeoff has been
+ made as it can be worked around and no longer places a performance burden
+ on the likely majority of Oracle result columns that are non-unicode
+ strings.
.. change::
:tags: bug, orm
- :tickets: 2778
+ :tickets: 2908
+
+ Fixed a bug involving the new flattened JOIN structures which
+ are used with :func:`.joinedload()` (thereby causing a regression
+ in joined eager loading) as well as :func:`.aliased`
+ in conjunction with the ``flat=True`` flag and joined-table inheritance;
+ basically multiple joins across a "parent JOIN sub" entity using different
+ paths to get to a target class wouldn't form the correct ON conditions.
+ An adjustment / simplification made in the mechanics of figuring
+ out the "left side" of the join in the case of an aliased, joined-inh
+ class repairs the issue.
- A performance fix related to the usage of the :func:`.defer` option
- when loading mapped entities. The function overhead of applying
- a per-object deferred callable to an instance at load time was
- significantly higher than that of just loading the data from the row
- (note that ``defer()`` is meant to reduce DB/network overhead, not
- necessarily function call count); the function call overhead is now
- less than that of loading data from the column in all cases. There
- is also a reduction in the number of "lazy callable" objects created
- per load from N (total deferred values in the result) to 1 (total
- number of deferred cols). Also in 0.8.3.
+ .. change::
+ :tags: bug, mysql
+
+ The MySQL CAST compilation now takes into account aspects of a string
+ type such as "charset" and "collation". While MySQL wants all character-
+ based CAST calls to use the CHAR type, we now create a real CHAR
+ object at CAST time and copy over all the parameters it has, so that
+ an expression like ``cast(x, mysql.TEXT(charset='utf8'))`` will
+ render ``CAST(t.col AS CHAR CHARACTER SET utf8)``.
.. change::
- :tags: bug, sqlite
- :tickets: 2781
+ :tags: bug, mysql
+ :tickets: 2906
+
+ Added new "unicode returns" detection to the MySQL dialect and
+ to the default dialect system overall, such that any dialect
+ can add extra "tests" to the on-first-connect "does this DBAPI
+ return unicode directly?" detection. In this case, we are
+ adding a check specifically against the "utf8" encoding with
+ an explicit "utf8_bin" collation type (after checking that
+ this collation is available) to test for some buggy unicode
+ behavior observed with MySQLdb version 1.2.3. While MySQLdb
+ has resolved this issue as of 1.2.4, the check here should
+ guard against regressions. The change also allows the "unicode"
+ checks to log in the engine logs, which was not previously
+ the case.
- The newly added SQLite DATETIME arguments storage_format and
- regexp apparently were not fully implemented correctly; while the
- arguments were accepted, in practice they would have no effect;
- this has been fixed. Also in 0.8.3.
+ .. change::
+ :tags: bug, mysql, pool, engine
+ :tickets: 2907
+
+ :class:`.Connection` now associates a new
+ :class:`.RootTransaction` or :class:`.TwoPhaseTransaction`
+ with its immediate :class:`._ConnectionFairy` as a "reset handler"
+ for the span of that transaction, which takes over the task
+ of calling commit() or rollback() for the "reset on return" behavior
+ of :class:`.Pool` if the transaction was not otherwise completed.
+ This resolves the issue that a picky transaction
+ like that of MySQL two-phase will be
+ properly closed out when the connection is closed without an
+ explicit rollback or commit (e.g. no longer raises "XAER_RMFAIL"
+ in this case - note this only shows up in logging as the exception
+ is not propagated within pool reset).
+ This issue would arise e.g. when using an orm
+ :class:`.Session` with ``twophase`` set, and then
+ :meth:`.Session.close` is called without an explicit rollback or
+ commit. The change also has the effect that you will now see
+ an explicit "ROLLBACK" in the logs when using a :class:`.Session`
+ object in non-autocommit mode regardless of how that session was
+ discarded. Thanks to Jeff Dairiki and Laurence Rowe for isolating
+ the issue here.
.. change::
- :tags: bug, sql, postgresql
- :tickets: 2780
+ :tags: feature, pool, engine
- Fixed bug where the expression system relied upon the ``str()``
- form of a some expressions when referring to the ``.c`` collection
- on a ``select()`` construct, but the ``str()`` form isn't available
- since the element relies on dialect-specific compilation constructs,
- notably the ``__getitem__()`` operator as used with a Postgresql
- ``ARRAY`` element. The fix also adds a new exception class
- :class:`.UnsupportedCompilationError` which is raised in those cases
- where a compiler is asked to compile something it doesn't know
- how to. Also in 0.8.3.
+ Added a new pool event :meth:`.PoolEvents.invalidate`. Called when
+ a DBAPI connection is to be marked as "invaldated" and discarded
+ from the pool.
.. change::
- :tags: bug, engine, oracle
- :tickets: 2776
+ :tags: bug, pool
- Dialect.initialize() is not called a second time if an :class:`.Engine`
- is recreated, due to a disconnect error. This fixes a particular
- issue in the Oracle 8 dialect, but in general the dialect.initialize()
- phase should only be once per dialect. Also in 0.8.3.
+ The argument names for the :meth:`.PoolEvents.reset` event have been
+ renamed to ``dbapi_connection`` and ``connection_record`` in order
+ to maintain consistency with all the other pool events. It is expected
+ that any existing listeners for this relatively new and
+ seldom-used event are using positional style to receive arguments in
+ any case.
.. change::
- :tags: feature, sql
- :tickets: 722
+ :tags: bug, py3k, cextensions
+ :pullreq: github:55
- Added new method to the :func:`.insert` construct
- :meth:`.Insert.from_select`. Given a list of columns and
- a selectable, renders ``INSERT INTO (table) (columns) SELECT ..``.
- While this feature is highlighted as part of 0.9 it is also
- backported to 0.8.3.
+ Fixed an issue where the C extensions in Py3K are using the wrong API
+ to specify the top-level module function, which breaks
+ in Python 3.4b2. Py3.4b2 changes PyMODINIT_FUNC to return
+ "void" instead of "PyObject *", so we now make sure to use
+ "PyMODINIT_FUNC" instead of "PyObject *" directly. Pull request
+ courtesy cgohlke.
+
+ .. change::
+ :tags: bug, schema
+ :pullreq: github:57
+
+ Restored :class:`sqlalchemy.schema.SchemaVisitor` to the ``.schema``
+ module. Pullreq courtesy Sean Dague.
+
+.. changelog::
+ :version: 0.9.1
+ :released: January 5, 2014
+
+ .. change::
+ :tags: bug, orm, events
+ :tickets: 2905
+
+ Fixed regression where using a ``functools.partial()`` with the event
+ system would cause a recursion overflow due to usage of inspect.getargspec()
+ on it in order to detect a legacy calling signature for certain events,
+ and apparently there's no way to do this with a partial object. Instead
+ we skip the legacy check and assume the modern style; the check itself
+ now only occurs for the SessionEvents.after_bulk_update and
+ SessionEvents.after_bulk_delete events. Those two events will require
+ the new signature style if assigned to a "partial" event listener.
+
+ .. change::
+ :tags: feature, orm, extensions
+
+ A new, **experimental** extension :mod:`sqlalchemy.ext.automap` is added.
+ This extension expands upon the functionality of Declarative as well as
+ the :class:`.DeferredReflection` class to produce a base class which
+ automatically generates mapped classes *and relationships* based on
+ table metadata.
.. seealso::
- :ref:`feature_722`
+ :ref:`feature_automap`
+
+ :ref:`automap_toplevel`
.. change::
:tags: feature, sql
- The :func:`.update`, :func:`.insert`, and :func:`.delete` constructs
- will now interpret ORM entities as target tables to be operated upon,
- e.g.::
+ Conjunctions like :func:`.and_` and :func:`.or_` can now accept
+ Python generators as a single argument, e.g.::
+
+ and_(x == y for x, y in tuples)
- from sqlalchemy import insert, update, delete
+ The logic here looks for a single argument ``*args`` where the first
+ element is an instance of ``types.GeneratorType``.
- ins = insert(SomeMappedClass).values(x=5)
+ .. change::
+ :tags: feature, schema
- del_ = delete(SomeMappedClass).where(SomeMappedClass.id == 5)
+ The :paramref:`.Table.extend_existing` and :paramref:`.Table.autoload_replace`
+ parameters are now available on the :meth:`.MetaData.reflect`
+ method.
- upd = update(SomeMappedClass).where(SomeMappedClass.id == 5).values(name='ed')
+ .. change::
+ :tags: bug, orm, declarative
- Also in 0.8.3.
+ Fixed an extremely unlikely memory issue where when using
+ :class:`.DeferredReflection`
+ to define classes pending for reflection, if some subset of those
+ classes were discarded before the :meth:`.DeferredReflection.prepare`
+ method were called to reflect and map the class, a strong reference
+ to the class would remain held within the declarative internals.
+ This internal collection of "classes to map" now uses weak
+ references against the classes themselves.
.. change::
:tags: bug, orm
- :tickets: 2773
+ :pullreq: bitbucket:9
- Fixed bug whereby attribute history functions would fail
- when an object we moved from "persistent" to "pending"
- using the :func:`.make_transient` function, for operations
- involving collection-based backrefs. Also in 0.8.3.
+ Fixed bug where using new :attr:`.Session.info` attribute would fail
+ if the ``.info`` argument were only passed to the :class:`.sessionmaker`
+ creation call but not to the object itself. Courtesy Robin Schoonover.
.. change::
- :tags: bug, engine, pool
- :tickets: 2772
+ :tags: bug, orm
+ :tickets: 2901
- Fixed bug where :class:`.QueuePool` would lose the correct
- checked out count if an existing pooled connection failed to reconnect
- after an invalidate or recycle event. Also in 0.8.3.
+ Fixed regression where we don't check the given name against the
+ correct string class when setting up a backref based on a name,
+ therefore causing the error "too many values to unpack". This was
+ related to the Py3k conversion.
+
+ .. change::
+ :tags: bug, orm, declarative
+ :tickets: 2900
+
+ A quasi-regression where apparently in 0.8 you can set a class-level
+ attribute on declarative to simply refer directly to an :class:`.InstrumentedAttribute`
+ on a superclass or on the class itself, and it
+ acts more or less like a synonym; in 0.9, this fails to set up enough
+ bookkeeping to keep up with the more liberalized backref logic
+ from :ticket:`2789`. Even though this use case was never directly
+ considered, it is now detected by declarative at the "setattr()" level
+ as well as when setting up a subclass, and the mirrored/renamed attribute
+ is now set up as a :func:`.synonym` instead.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2903
+
+ Fixed regression where we apparently still create an implicit
+ alias when saying query(B).join(B.cs), where "C" is a joined inh
+ class; however, this implicit alias was created only considering
+ the immediate left side, and not a longer chain of joins along different
+ joined-inh subclasses of the same base. As long as we're still
+ implicitly aliasing in this case, the behavior is dialed back a bit
+ so that it will alias the right side in a wider variety of cases.
+
+.. changelog::
+ :version: 0.9.0
+ :released: December 30, 2013
+
+ .. change::
+ :tags: bug, orm, declarative
+ :tickets: 2828
+
+ Declarative does an extra check to detect if the same
+ :class:`.Column` is mapped multiple times under different properties
+ (which typically should be a :func:`.synonym` instead) or if two
+ or more :class:`.Column` objects are given the same name, raising
+ a warning if this condition is detected.
+
+ .. change::
+ :tags: bug, firebird
+ :tickets: 2898
+
+ Changed the queries used by Firebird to list table and view names
+ to query from the ``rdb$relations`` view instead of the
+ ``rdb$relation_fields`` and ``rdb$view_relations`` views.
+ Variants of both the old and new queries are mentioned on many
+ FAQ and blogs, however the new queries are taken straight from
+ the "Firebird FAQ" which appears to be the most official source
+ of info.
.. change::
:tags: bug, mysql
- :tickets: 2768
+ :tickets: 2893
- Fixed bug when using multi-table UPDATE where a supplemental
- table is a SELECT with its own bound parameters, where the positioning
- of the bound parameters would be reversed versus the statement
- itself when using MySQL's special syntax. Also in 0.8.2.
+ Improvements to the system by which SQL types generate within
+ ``__repr__()``, particularly with regards to the MySQL integer/numeric/
+ character types which feature a wide variety of keyword arguments.
+ The ``__repr__()`` is important for use with Alembic autogenerate
+ for when Python code is rendered in a migration script.
.. change::
- :tags: bug, sqlite
- :tickets: 2764
+ :tags: feature, postgresql
+ :tickets: 2581
+ :pullreq: github:50
- Added :class:`.BIGINT` to the list of type names that can be
- reflected by the SQLite dialect; courtesy Russell Stuart.
- Also in 0.8.2.
+ Support for Postgresql JSON has been added, using the new
+ :class:`.JSON` type. Huge thanks to Nathan Rice for
+ implementing and testing this.
.. change::
- :tags: feature, orm, declarative
- :tickets: 2761
+ :tags: bug, sql
- ORM descriptors such as hybrid properties can now be referenced
- by name in a string argument used with ``order_by``,
- ``primaryjoin``, or similar in :func:`.relationship`,
- in addition to column-bound attributes. Also in 0.8.2.
+ The :func:`.cast` function, when given a plain literal value,
+ will now apply the given type to the given literal value on the
+ bind parameter side according to the type given to the cast,
+ in the same manner as that of the :func:`.type_coerce` function.
+ However unlike :func:`.type_coerce`, this only takes effect if a
+ non-clauseelement value is passed to :func:`.cast`; an existing typed
+ construct will retain its type.
.. change::
- :tags: feature, engine
- :tickets: 2770
+ :tags: bug, postgresql
- New events added to :class:`.ConnectionEvents`:
+ Now using psycopg2 UNICODEARRAY extension for handling unicode arrays
+ with psycopg2 + normal "native unicode" mode, in the same way the
+ UNICODE extension is used.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2883
+
+ The :class:`.ForeignKey` class more aggressively checks the given
+ column argument. If not a string, it checks that the object is
+ at least a :class:`.ColumnClause`, or an object that resolves to one,
+ and that the ``.table`` attribute, if present, refers to a
+ :class:`.TableClause` or subclass, and not something like an
+ :class:`.Alias`. Otherwise, a :class:`.ArgumentError` is raised.
- * :meth:`.ConnectionEvents.engine_connect`
- * :meth:`.ConnectionEvents.set_connection_execution_options`
- * :meth:`.ConnectionEvents.set_engine_execution_options`
.. change::
- :tags: feature, firebird
- :tickets: 2763
+ :tags: feature, orm
- Added new flag ``retaining=False`` to the kinterbasdb and fdb dialects.
- This controls the value of the ``retaining`` flag sent to the
- ``commit()`` and ``rollback()`` methods of the DBAPI connection.
- Defaults to False. Also in 0.8.2, where it defaults to True.
+ The :class:`.exc.StatementError` or DBAPI-related subclass
+ now can accomodate additional information about the "reason" for
+ the exception; the :class:`.Session` now adds some detail to it
+ when the exception occurs within an autoflush. This approach
+ is taken as opposed to combining :class:`.FlushError` with
+ a Python 3 style "chained exception" approach so as to maintain
+ compatibility both with Py2K code as well as code that already
+ catches ``IntegrityError`` or similar.
.. change::
- :tags: requirements
+ :tags: feature, postgresql
+ :pullreq: bitbucket:8
- The Python `mock <https://pypi.python.org/pypi/mock>`_ library
- is now required in order to run the unit test suite. While part
- of the standard library as of Python 3.3, previous Python installations
- will need to install this in order to run unit tests or to
- use the ``sqlalchemy.testing`` package for external dialects.
- This applies to 0.8.2 as well.
+ Added support for Postgresql TSVECTOR via the
+ :class:`.postgresql.TSVECTOR` type. Pull request courtesy
+ Noufal Ibrahim.
.. change::
- :tags: bug, orm
- :tickets: 2750
+ :tags: feature, engine
+ :tickets: 2875
- A warning is emitted when trying to flush an object of an inherited
- mapped class where the polymorphic discriminator has been assigned
- to a value that is invalid for the class. Also in 0.8.2.
+ The :func:`.engine_from_config` function has been improved so that
+ we will be able to parse dialect-specific arguments from string
+ configuration dictionaries. Dialect classes can now provide their
+ own list of parameter types and string-conversion routines.
+ The feature is not yet used by the built-in dialects, however.
.. change::
- :tags: bug, postgresql
- :tickets: 2740
+ :tags: bug, sql
+ :tickets: 2879
+
+ The precedence rules for the :meth:`.ColumnOperators.collate` operator
+ have been modified, such that the COLLATE operator is now of lower
+ precedence than the comparison operators. This has the effect that
+ a COLLATE applied to a comparison will not render parenthesis
+ around the comparison, which is not parsed by backends such as
+ MSSQL. The change is backwards incompatible for those setups that
+ were working around the issue by applying :meth:`.Operators.collate`
+ to an individual element of the comparison expression,
+ rather than the comparison expression as a whole.
- The behavior of :func:`.extract` has been simplified on the
- Postgresql dialect to no longer inject a hardcoded ``::timestamp``
- or similar cast into the given expression, as this interfered
- with types such as timezone-aware datetimes, but also
- does not appear to be at all necessary with modern versions
- of psycopg2. Also in 0.8.2.
+ .. seelalso::
+
+ :ref:`migration_2879`
.. change::
- :tags: bug, firebird
- :tickets: 2757
+ :tags: bug, orm, declarative
+ :tickets: 2865
- Type lookup when reflecting the Firebird types LONG and
- INT64 has been fixed so that LONG is treated as INTEGER,
- INT64 treated as BIGINT, unless the type has a "precision"
- in which case it's treated as NUMERIC. Patch courtesy
- Russell Stuart. Also in 0.8.2.
+ The :class:`.DeferredReflection` class has been enhanced to provide
+ automatic reflection support for the "secondary" table referred
+ to by a :func:`.relationship`. "secondary", when specified
+ either as a string table name, or as a :class:`.Table` object with
+ only a name and :class:`.MetaData` object will also be included
+ in the reflection process when :meth:`.DeferredReflection.prepare`
+ is called.
.. change::
- :tags: bug, postgresql
- :tickets: 2766
+ :tags: feature, orm, backrefs
+ :tickets: 1535
+
+ Added new argument ``include_backrefs=True`` to the
+ :func:`.validates` function; when set to False, a validation event
+ will not be triggered if the event was initated as a backref to
+ an attribute operation from the other side.
+
+ .. seealso::
+
+ :ref:`feature_1535`
+
+ .. change::
+ :tags: bug, orm, collections, py3k
+ :pullreq: github:40
- Fixed bug in HSTORE type where keys/values that contained
- backslashed quotes would not be escaped correctly when
- using the "non native" (i.e. non-psycopg2) means
- of translating HSTORE data. Patch courtesy Ryan Kelly.
- Also in 0.8.2.
+ Added support for the Python 3 method ``list.clear()`` within
+ the ORM collection instrumentation system; pull request
+ courtesy Eduardo Schettino.
.. change::
:tags: bug, postgresql
- :tickets: 2767
+ :tickets: 2878
- Fixed bug where the order of columns in a multi-column
- Postgresql index would be reflected in the wrong order.
- Courtesy Roman Podolyaka. Also in 0.8.2.
+ Fixed bug where values within an ENUM weren't escaped for single
+ quote signs. Note that this is backwards-incompatible for existing
+ workarounds that manually escape the single quotes.
+
+ .. seealso::
+
+ :ref:`migration_2878`
.. change::
- :tags: bug, sql
- :tickets: 2746, 2668
+ :tags: bug, orm, declarative
- Multiple fixes to the correlation behavior of
- :class:`.Select` constructs, first introduced in 0.8.0:
+ Fixed bug where in Py2K a unicode literal would not be accepted
+ as the string name of a class or other argument within
+ declarative using :func:`.relationship`.
- * To satisfy the use case where FROM entries should be
- correlated outwards to a SELECT that encloses another,
- which then encloses this one, correlation now works
- across multiple levels when explicit correlation is
- established via :meth:`.Select.correlate`, provided
- that the target select is somewhere along the chain
- contained by a WHERE/ORDER BY/columns clause, not
- just nested FROM clauses. This makes
- :meth:`.Select.correlate` act more compatibly to
- that of 0.7 again while still maintaining the new
- "smart" correlation.
+ .. change::
+ :tags: feature, sql
+ :tickets: 2877, 2882
- * When explicit correlation is not used, the usual
- "implicit" correlation limits its behavior to just
- the immediate enclosing SELECT, to maximize compatibility
- with 0.7 applications, and also prevents correlation
- across nested FROMs in this case, maintaining compatibility
- with 0.8.0/0.8.1.
+ New improvements to the :func:`.text` construct, including
+ more flexible ways to set up bound parameters and return types;
+ in particular, a :func:`.text` can now be turned into a full
+ FROM-object, embeddable in other statements as an alias or CTE
+ using the new method :meth:`.TextClause.columns`. The :func:`.text`
+ construct can also render "inline" bound parameters when the construct
+ is compiled in a "literal bound" context.
- * The :meth:`.Select.correlate_except` method was not
- preventing the given FROM clauses from correlation in
- all cases, and also would cause FROM clauses to be incorrectly
- omitted entirely (more like what 0.7 would do),
- this has been fixed.
+ .. seealso::
- * Calling `select.correlate_except(None)` will enter
- all FROM clauses into correlation as would be expected.
+ :ref:`feature_2877`
+
+ .. change::
+ :tags: feature, sql
+ :pullreq: github:42
+
+ A new API for specifying the ``FOR UPDATE`` clause of a ``SELECT``
+ is added with the new :meth:`.GenerativeSelect.with_for_update` method.
+ This method supports a more straightforward system of setting
+ dialect-specific options compared to the ``for_update`` keyword
+ argument of :func:`.select`, and also includes support for the
+ SQL standard ``FOR UPDATE OF`` clause. The ORM also includes
+ a new corresponding method :meth:`.Query.with_for_update`.
+ Pull request courtesy Mario Lassnig.
+
+ .. seealso::
- Also in 0.8.2.
+ :ref:`feature_github_42`
.. change::
- :tags: bug, ext
+ :tags: feature, orm
+ :pullreq: github:42
+
+ A new API for specifying the ``FOR UPDATE`` clause of a ``SELECT``
+ is added with the new :meth:`.Query.with_for_update` method,
+ to complement the new :meth:`.GenerativeSelect.with_for_update` method.
+ Pull request courtesy Mario Lassnig.
+
+ .. seealso::
- Fixed bug whereby if a composite type were set up
- with a function instead of a class, the mutable extension
- would trip up when it tried to check that column
- for being a :class:`.MutableComposite` (which it isn't).
- Courtesy asldevi. Also in 0.8.2.
+ :ref:`feature_github_42`
.. change::
- :tags: bug, sql
- :tickets: 1765
+ :tags: bug, engine
+ :tickets: 2873
- The resolution of :class:`.ForeignKey` objects to their
- target :class:`.Column` has been reworked to be as
- immediate as possible, based on the moment that the
- target :class:`.Column` is associated with the same
- :class:`.MetaData` as this :class:`.ForeignKey`, rather
- than waiting for the first time a join is constructed,
- or similar. This along with other improvements allows
- earlier detection of some foreign key configuration
- issues. Also included here is a rework of the
- type-propagation system, so that
- it should be reliable now to set the type as ``None``
- on any :class:`.Column` that refers to another via
- :class:`.ForeignKey` - the type will be copied from the
- target column as soon as that other column is associated,
- and now works for composite foreign keys as well.
+ The :func:`.create_engine` routine and the related
+ :func:`.make_url` function no longer considers the ``+`` sign
+ to be a space within the password field. The parsing has been
+ adjuted to match RFC 1738 exactly, in that both ``username``
+ and ``password`` expect only ``:``, ``@``, and ``/`` to be
+ encoded.
.. seealso::
- :ref:`migration_1765`
+ :ref:`migration_2873`
+
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2872
+
+ Some refinements to the :class:`.AliasedClass` construct with regards
+ to descriptors, like hybrids, synonyms, composites, user-defined
+ descriptors, etc. The attribute
+ adaptation which goes on has been made more robust, such that if a descriptor
+ returns another instrumented attribute, rather than a compound SQL
+ expression element, the operation will still proceed.
+ Addtionally, the "adapted" operator will retain its class; previously,
+ a change in class from ``InstrumentedAttribute`` to ``QueryableAttribute``
+ (a superclass) would interact with Python's operator system such that
+ an expression like ``aliased(MyClass.x) > MyClass.x`` would reverse itself
+ to read ``myclass.x < myclass_1.x``. The adapted attribute will also
+ refer to the new :class:`.AliasedClass` as its parent which was not
+ always the case before.
.. change::
:tags: feature, sql
- :tickets: 2744, 2734
+ :tickets: 2867
- Provided a new attribute for :class:`.TypeDecorator`
- called :attr:`.TypeDecorator.coerce_to_is_types`,
- to make it easier to control how comparisons using
- ``==`` or ``!=`` to ``None`` and boolean types goes
- about producing an ``IS`` expression, or a plain
- equality expression with a bound parameter.
+ The precision used when coercing a returned floating point value to
+ Python ``Decimal`` via string is now configurable. The
+ flag ``decimal_return_scale`` is now supported by all :class:`.Numeric`
+ and :class:`.Float` types, which will ensure this many digits are taken
+ from the native floating point value when it is converted to string.
+ If not present, the type will make use of the value of ``.scale``, if
+ the type supports this setting and it is non-None. Otherwise the original
+ default length of 10 is used.
+
+ .. seealso::
+ :ref:`feature_2867`
.. change::
- :tags: feature, postgresql
+ :tags: bug, schema
+ :tickets: 2868
- Support for Postgresql 9.2 range types has been added.
- Currently, no type translation is provided, so works
- directly with strings or psycopg2 2.5 range extension types
- at the moment. Patch courtesy Chris Withers.
+ Fixed a regression caused by :ticket:`2812` where the repr() for
+ table and column names would fail if the name contained non-ascii
+ characters.
.. change::
- :tags: bug, examples
+ :tags: bug, engine
+ :tickets: 2848
- Fixed an issue with the "versioning" recipe whereby a many-to-one
- reference could produce a meaningless version for the target,
- even though it was not changed, when backrefs were present.
- Patch courtesy Matt Chisholm. Also in 0.8.2.
+ The :class:`.RowProxy` object is now sortable in Python as a regular
+ tuple is; this is accomplished via ensuring tuple() conversion on
+ both sides within the ``__eq__()`` method as well as
+ the addition of a ``__lt__()`` method.
+
+ .. seealso::
+
+ :ref:`migration_2848`
.. change::
- :tags: feature, postgresql
- :tickets: 2072
+ :tags: bug, orm
+ :tickets: 2833
+
+ The ``viewonly`` flag on :func:`.relationship` will now prevent
+ attribute history from being written on behalf of the target attribute.
+ This has the effect of the object not being written to the
+ Session.dirty list if it is mutated. Previously, the object would
+ be present in Session.dirty, but no change would take place on behalf
+ of the modified attribute during flush. The attribute still emits
+ events such as backref events and user-defined events and will still
+ receive mutations from backrefs.
+
+ .. seealso::
- Added support for "AUTOCOMMIT" isolation when using the psycopg2
- DBAPI. The keyword is available via the ``isolation_level``
- execution option. Patch courtesy Roman Podolyaka.
- Also in 0.8.2.
+ :ref:`migration_2833`
.. change::
:tags: bug, orm
- :tickets: 2759
- Fixed bug in polymorphic SQL generation where multiple joined-inheritance
- entities against the same base class joined to each other as well
- would not track columns on the base table independently of each other if
- the string of joins were more than two entities long. Also in 0.8.2.
+ Added support for new :attr:`.Session.info` attribute to
+ :class:`.scoped_session`.
.. change::
- :tags: bug, engine
- :pullreq: 6
+ :tags: removed
- Fixed bug where the ``reset_on_return`` argument to various :class:`.Pool`
- implementations would not be propagated when the pool was regenerated.
- Courtesy Eevee. Also in 0.8.2.
+ The "informix" and "informixdb" dialects have been removed; the code
+ is now available as a separate repository on Bitbucket. The IBM-DB
+ project has provided production-level Informix support since the
+ informixdb dialect was first added.
.. change::
:tags: bug, orm
- :tickets: 2754
- Fixed bug where sending a composite attribute into :meth:`.Query.order_by`
- would produce a parenthesized expression not accepted by some databases.
- Also in 0.8.2.
+ Fixed bug where usage of new :class:`.Bundle` object would cause
+ the :attr:`.Query.column_descriptions` attribute to fail.
+
+ .. change::
+ :tags: bug, examples
+
+ Fixed bug which prevented history_meta recipe from working with
+ joined inheritance schemes more than one level deep.
+
+ .. change::
+ :tags: bug, orm, sql, sqlite
+ :tickets: 2858
+
+ Fixed a regression introduced by the join rewriting feature of
+ :ticket:`2369` and :ticket:`2587` where a nested join with one side
+ already an aliased select would fail to translate the ON clause on the
+ outside correctly; in the ORM this could be seen when using a
+ SELECT statement as a "secondary" table.
+
+.. changelog::
+ :version: 0.9.0b1
+ :released: October 26, 2013
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2810
+
+ The association proxy now returns ``None`` when fetching a scalar
+ attribute off of a scalar relationship, where the scalar relationship
+ itself points to ``None``, instead of raising an ``AttributeError``.
+
+ .. seealso::
+
+ :ref:`migration_2810`
+
+ .. change::
+ :tags: feature, sql, postgresql, mysql
+ :tickets: 2183
+
+ The Postgresql and MySQL dialects now support reflection/inspection
+ of foreign key options, including ON UPDATE, ON DELETE. Postgresql
+ also reflects MATCH, DEFERRABLE, and INITIALLY. Coutesy ijl.
+
+ .. change::
+ :tags: bug, mysql
+ :tickets: 2839
+
+ Fix and test parsing of MySQL foreign key options within reflection;
+ this complements the work in :ticket:`2183` where we begin to support
+ reflection of foreign key options such as ON UPDATE/ON DELETE
+ cascade.
.. change::
:tags: bug, orm
- :tickets: 2755
+ :tickets: 2787
+
+ :func:`.attributes.get_history()` when used with a scalar column-mapped
+ attribute will now honor the "passive" flag
+ passed to it; as this defaults to ``PASSIVE_OFF``, the function will
+ by default query the database if the value is not present.
+ This is a behavioral change vs. 0.8.
+
+ .. seealso::
+
+ :ref:`change_2787`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2787
+
+ Added new method :meth:`.AttributeState.load_history`, works like
+ :attr:`.AttributeState.history` but also fires loader callables.
+
+ .. seealso::
+
+ :ref:`change_2787`
+
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 2850
+
+ A :func:`.bindparam` construct with a "null" type (e.g. no type
+ specified) is now copied when used in a typed expression, and the
+ new copy is assigned the actual type of the compared column. Previously,
+ this logic would occur on the given :func:`.bindparam` in place.
+ Additionally, a similar process now occurs for :func:`.bindparam` constructs
+ passed to :meth:`.ValuesBase.values` for an :class:`.Insert` or
+ :class:`.Update` construct, within the compilation phase of the
+ construct.
- Fixed the interaction between composite attributes and
- the :func:`.aliased` function. Previously, composite attributes
- wouldn't work correctly in comparison operations when aliasing
- was applied. Also in 0.8.2.
+ These are both subtle behavioral changes which may impact some
+ usages.
+
+ .. seealso::
+
+ :ref:`migration_2850`
.. change::
:tags: feature, sql
- :tickets: 1443
+ :tickets: 2804, 2823, 2734
+
+ An overhaul of expression handling for special symbols particularly
+ with conjunctions, e.g.
+ ``None`` :func:`.expression.null` :func:`.expression.true`
+ :func:`.expression.false`, including consistency in rendering NULL
+ in conjunctions, "short-circuiting" of :func:`.and_` and :func:`.or_`
+ expressions which contain boolean constants, and rendering of
+ boolean constants and expressions as compared to "1" or "0" for backends
+ that don't feature ``true``/``false`` constants.
- Added support for "unique constraint" reflection, via the
- :meth:`.Inspector.get_unique_constraints` method.
- Thanks for Roman Podolyaka for the patch.
+ .. seealso::
+
+ :ref:`migration_2804`
.. change::
- :tags: feature, pool
- :tickets: 2752
+ :tags: feature, sql
+ :tickets: 2838
- Added pool logging for "rollback-on-return" and the less used
- "commit-on-return". This is enabled with the rest of pool
- "debug" logging.
+ The typing system now handles the task of rendering "literal bind" values,
+ e.g. values that are normally bound parameters but due to context must
+ be rendered as strings, typically within DDL constructs such as
+ CHECK constraints and indexes (note that "literal bind" values
+ become used by DDL as of :ticket:`2742`). A new method
+ :meth:`.TypeEngine.literal_processor` serves as the base, and
+ :meth:`.TypeDecorator.process_literal_param` is added to allow wrapping
+ of a native literal rendering method.
+
+ .. seealso::
+
+ :ref:`change_2838`
.. change::
- :tags: bug, mysql
- :tickets: 2715
+ :tags: feature, sql
+ :tickets: 2716
- Added another conditional to the ``mysql+gaerdbms`` dialect to
- detect so-called "development" mode, where we should use the
- ``rdbms_mysqldb`` DBAPI. Patch courtesy Brett Slatkin.
- Also in 0.8.2.
+ The :meth:`.Table.tometadata` method now produces copies of
+ all :attr:`.SchemaItem.info` dictionaries from all :class:`.SchemaItem`
+ objects within the structure including columns, constraints,
+ foreign keys, etc. As these dictionaries
+ are copies, they are independent of the original dictionary.
+ Previously, only the ``.info`` dictionary of :class:`.Column` was transferred
+ within this operation, and it was only linked in place, not copied.
+
+ .. change::
+ :tags: feature, postgresql
+ :tickets: 2840
+
+ Added support for rendering ``SMALLSERIAL`` when a :class:`.SmallInteger`
+ type is used on a primary key autoincrement column, based on server
+ version detection of Postgresql version 9.2 or greater.
.. change::
:tags: feature, mysql
- :tickets: 2704
+ :tickets: 2817
+
+ The MySQL :class:`.mysql.SET` type now features the same auto-quoting
+ behavior as that of :class:`.mysql.ENUM`. Quotes are not required when
+ setting up the value, but quotes that are present will be auto-detected
+ along with a warning. This also helps with Alembic where
+ the SET type doesn't render with quotes.
+
+ .. change::
+ :tags: feature, sql
+
+ The ``default`` argument of :class:`.Column` now accepts a class
+ or object method as an argument, in addition to a standalone function;
+ will properly detect if the "context" argument is accepted or not.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2835
+
+ The "name" attribute is set on :class:`.Index` before the "attach"
+ events are called, so that attachment events can be used to dynamically
+ generate a name for the index based on the parent table and/or
+ columns.
+
+ .. change::
+ :tags: bug, engine
+ :tickets: 2748
+
+ The method signature of :meth:`.Dialect.reflecttable`, which in
+ all known cases is provided by :class:`.DefaultDialect`, has been
+ tightened to expect ``include_columns`` and ``exclude_columns``
+ arguments without any kw option, reducing ambiguity - previously
+ ``exclude_columns`` was missing.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2831
+
+ The erroneous kw arg "schema" has been removed from the :class:`.ForeignKey`
+ object. this was an accidental commit that did nothing; a warning is raised
+ in 0.8.3 when this kw arg is used.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 1418
+
+ Added a new load option :func:`.orm.load_only`. This allows a series
+ of column names to be specified as loading "only" those attributes,
+ deferring the rest.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 1418
+
+ The system of loader options has been entirely rearchitected to build
+ upon a much more comprehensive base, the :class:`.Load` object. This
+ base allows any common loader option like :func:`.joinedload`,
+ :func:`.defer`, etc. to be used in a "chained" style for the purpose
+ of specifying options down a path, such as ``joinedload("foo").subqueryload("bar")``.
+ The new system supersedes the usage of dot-separated path names,
+ multiple attributes within options, and the usage of ``_all()`` options.
+
+ .. seealso::
+
+ :ref:`feature_1418`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2824
+
+ The :func:`.composite` construct now maintains the return object
+ when used in a column-oriented :class:`.Query`, rather than expanding
+ out into individual columns. This makes use of the new :class:`.Bundle`
+ feature internally. This behavior is backwards incompatible; to
+ select from a composite column which will expand out, use
+ ``MyClass.some_composite.clauses``.
+
+ .. seealso::
+
+ :ref:`migration_2824`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2824
+
+ A new construct :class:`.Bundle` is added, which allows for specification
+ of groups of column expressions to a :class:`.Query` construct.
+ The group of columns are returned as a single tuple by default. The
+ behavior of :class:`.Bundle` can be overridden however to provide
+ any sort of result processing to the returned row. The behavior
+ of :class:`.Bundle` is also embedded into composite attributes now
+ when they are used in a column-oriented :class:`.Query`.
+
+ .. seealso::
+
+ :ref:`change_2824`
+
+ :ref:`migration_2824`
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2812
+
+ A rework to the way that "quoted" identifiers are handled, in that
+ instead of relying upon various ``quote=True`` flags being passed around,
+ these flags are converted into rich string objects with quoting information
+ included at the point at which they are passed to common schema constructs
+ like :class:`.Table`, :class:`.Column`, etc. This solves the issue
+ of various methods that don't correctly honor the "quote" flag such
+ as :meth:`.Engine.has_table` and related methods. The :class:`.quoted_name`
+ object is a string subclass that can also be used explicitly if needed;
+ the object will hold onto the quoting preferences passed and will
+ also bypass the "name normalization" performed by dialects that
+ standardize on uppercase symbols, such as Oracle, Firebird and DB2.
+ The upshot is that the "uppercase" backends can now work with force-quoted
+ names, such as lowercase-quoted names and new reserved words.
+
+ .. seealso::
+
+ :ref:`change_2812`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2793
+
+ The ``version_id_generator`` parameter of ``Mapper`` can now be specified
+ to rely upon server generated version identifiers, using triggers
+ or other database-provided versioning features, or via an optional programmatic
+ value, by setting ``version_id_generator=False``.
+ When using a server-generated version identfier, the ORM will use RETURNING when
+ available to immediately
+ load the new version value, else it will emit a second SELECT.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2793
+
+ The ``eager_defaults`` flag of :class:`.Mapper` will now allow the
+ newly generated default values to be fetched using an inline
+ RETURNING clause, rather than a second SELECT statement, for backends
+ that support RETURNING.
+
+ .. change::
+ :tags: feature, core
+ :tickets: 2793
+
+ Added a new variant to :meth:`.UpdateBase.returning` called
+ :meth:`.ValuesBase.return_defaults`; this allows arbitrary columns
+ to be added to the RETURNING clause of the statement without interfering
+ with the compilers usual "implicit returning" feature, which is used to
+ efficiently fetch newly generated primary key values. For supporting
+ backends, a dictionary of all fetched values is present at
+ :attr:`.ResultProxy.returned_defaults`.
+
+ .. change::
+ :tags: bug, mysql
+
+ Improved support for the cymysql driver, supporting version 0.6.5,
+ courtesy Hajime Nakagami.
+
+ .. change::
+ :tags: general
+
+ A large refactoring of packages has reorganized
+ the import structure of many Core modules as well as some aspects
+ of the ORM modules. In particular ``sqlalchemy.sql`` has been broken
+ out into several more modules than before so that the very large size
+ of ``sqlalchemy.sql.expression`` is now pared down. The effort
+ has focused on a large reduction in import cycles. Additionally,
+ the system of API functions in ``sqlalchemy.sql.expression`` and
+ ``sqlalchemy.orm`` has been reorganized to eliminate redundancy
+ in documentation between the functions vs. the objects they produce.
+
+ .. change::
+ :tags: orm, feature, orm
+
+ Added a new attribute :attr:`.Session.info` to :class:`.Session`;
+ this is a dictionary where applications can store arbitrary
+ data local to a :class:`.Session`.
+ The contents of :attr:`.Session.info` can be also be initialized
+ using the ``info`` argument of :class:`.Session` or
+ :class:`.sessionmaker`.
+
+
+ .. change::
+ :tags: feature, general, py3k
+ :tickets: 2161
+
+ The C extensions are ported to Python 3 and will build under
+ any supported CPython 2 or 3 environment.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2268
+
+ Removal of event listeners is now implemented. The feature is
+ provided via the :func:`.event.remove` function.
+
+ .. seealso::
+
+ :ref:`feature_2268`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2789
+
+ The mechanism by which attribute events pass along an
+ :class:`.AttributeImpl` as an "initiator" token has been changed;
+ the object is now an event-specific object called :class:`.attributes.Event`.
+ Additionally, the attribute system no longer halts events based
+ on a matching "initiator" token; this logic has been moved to be
+ specific to ORM backref event handlers, which are the typical source
+ of the re-propagation of an attribute event onto subsequent append/set/remove
+ operations. End user code which emulates the behavior of backrefs
+ must now ensure that recursive event propagation schemes are halted,
+ if the scheme does not use the backref handlers. Using this new system,
+ backref handlers can now peform a
+ "two-hop" operation when an object is appended to a collection,
+ associated with a new many-to-one, de-associated with the previous
+ many-to-one, and then removed from a previous collection. Before this
+ change, the last step of removal from the previous collection would
+ not occur.
+
+ .. seealso::
+
+ :ref:`migration_2789`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 722
+
+ Added new method to the :func:`.insert` construct
+ :meth:`.Insert.from_select`. Given a list of columns and
+ a selectable, renders ``INSERT INTO (table) (columns) SELECT ..``.
+ While this feature is highlighted as part of 0.9 it is also
+ backported to 0.8.3.
+
+ .. seealso::
+
+ :ref:`feature_722`
+
+ .. change::
+ :tags: feature, engine
+ :tickets: 2770
+
+ New events added to :class:`.ConnectionEvents`:
+
+ * :meth:`.ConnectionEvents.engine_connect`
+ * :meth:`.ConnectionEvents.set_connection_execution_options`
+ * :meth:`.ConnectionEvents.set_engine_execution_options`
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 1765
+
+ The resolution of :class:`.ForeignKey` objects to their
+ target :class:`.Column` has been reworked to be as
+ immediate as possible, based on the moment that the
+ target :class:`.Column` is associated with the same
+ :class:`.MetaData` as this :class:`.ForeignKey`, rather
+ than waiting for the first time a join is constructed,
+ or similar. This along with other improvements allows
+ earlier detection of some foreign key configuration
+ issues. Also included here is a rework of the
+ type-propagation system, so that
+ it should be reliable now to set the type as ``None``
+ on any :class:`.Column` that refers to another via
+ :class:`.ForeignKey` - the type will be copied from the
+ target column as soon as that other column is associated,
+ and now works for composite foreign keys as well.
- The ``mysql_length`` parameter used with :class:`.Index` can now
- be passed as a dictionary of column names/lengths, for use
- with composite indexes. Big thanks to Roman Podolyaka for the
- patch. Also in 0.8.2.
+ .. seealso::
+
+ :ref:`migration_1765`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 2744, 2734
+
+ Provided a new attribute for :class:`.TypeDecorator`
+ called :attr:`.TypeDecorator.coerce_to_is_types`,
+ to make it easier to control how comparisons using
+ ``==`` or ``!=`` to ``None`` and boolean types goes
+ about producing an ``IS`` expression, or a plain
+ equality expression with a bound parameter.
+
+ .. change::
+ :tags: feature, pool
+ :tickets: 2752
+
+ Added pool logging for "rollback-on-return" and the less used
+ "commit-on-return". This is enabled with the rest of pool
+ "debug" logging.
.. change::
:tags: bug, orm, associationproxy
@@ -472,72 +1200,10 @@
are passed correctly.
.. change::
- :tags: bug, mssql
- :tickets: 2747
-
- When querying the information schema on SQL Server 2000, removed
- a CAST call that was added in 0.8.1 to help with driver issues,
- which apparently is not compatible on 2000.
- The CAST remains in place for SQL Server 2005 and greater.
- Also in 0.8.2.
-
- .. change::
- :tags: bug, mysql
- :tickets: 2721
-
- The ``deferrable`` keyword argument on :class:`.ForeignKey` and
- :class:`.ForeignKeyConstraint` will not render the ``DEFERRABLE`` keyword
- on the MySQL dialect. For a long time we left this in place because
- a non-deferrable foreign key would act very differently than a deferrable
- one, but some environments just disable FKs on MySQL, so we'll be less
- opinionated here. Also in 0.8.2.
-
- .. change::
- :tags: bug, ext, orm
- :tickets: 2730
-
- Fixed bug where :class:`.MutableDict` didn't report a change event
- when ``clear()`` was called. Also in 0.8.2
-
- .. change::
- :tags: bug, sql
- :tickets: 2738
-
- Fixed bug whereby joining a select() of a table "A" with multiple
- foreign key paths to a table "B", to that table "B", would fail
- to produce the "ambiguous join condition" error that would be
- reported if you join table "A" directly to "B"; it would instead
- produce a join condition with multiple criteria. Also in 0.8.2.
-
- .. change::
- :tags: bug, sql, reflection
- :tickets: 2728
-
- Fixed bug whereby using :meth:`.MetaData.reflect` across a remote
- schema as well as a local schema could produce wrong results
- in the case where both schemas had a table of the same name.
- Also in 0.8.2.
-
- .. change::
- :tags: bug, sql
- :tickets: 2726
-
- Removed the "not implemented" ``__iter__()`` call from the base
- :class:`.ColumnOperators` class, while this was introduced
- in 0.8.0 to prevent an endless, memory-growing loop when one also
- implements a ``__getitem__()`` method on a custom
- operator and then calls erroneously ``list()`` on that object,
- it had the effect of causing column elements to report that they
- were in fact iterable types which then throw an error when you try
- to iterate. There's no real way to have both sides here so we
- stick with Python best practices. Careful with implementing
- ``__getitem__()`` on your custom operators! Also in 0.8.2.
-
- .. change::
:tags: feature, sql
:tickets: 1068
- A :class:`.Label` construct will now render as its name alone
+ A :func:`~sqlalchemy.sql.expression.label` construct will now render as its name alone
in an ``ORDER BY`` clause, if that label is also referred to
in the columns clause of the select, instead of rewriting the
full expression. This gives the database a better chance to
@@ -558,7 +1224,7 @@
official Python driver.
.. change::
- :tags: feature, general
+ :tags: feature, general, py3k
:tickets: 2671
The codebase is now "in-place" for Python
@@ -575,16 +1241,16 @@
:tags: bug, orm
:tickets: 2736
- The "auto-aliasing" behavior of the :class:`.Query.select_from`
+ The "auto-aliasing" behavior of the :meth:`.Query.select_from`
method has been turned off. The specific behavior is now
- availble via a new method :class:`.Query.select_entity_from`.
+ availble via a new method :meth:`.Query.select_entity_from`.
The auto-aliasing behavior here was never well documented and
- is generally not what's desired, as :class:`.Query.select_from`
+ is generally not what's desired, as :meth:`.Query.select_from`
has become more oriented towards controlling how a JOIN is
- rendered. :class:`.Query.select_entity_from` will also be made
+ rendered. :meth:`.Query.select_entity_from` will also be made
available in 0.8 so that applications which rely on the auto-aliasing
can shift their applications to use this method.
.. seealso::
- :ref:`migration_2736` \ No newline at end of file
+ :ref:`migration_2736`
diff --git a/doc/build/changelog/migration_04.rst b/doc/build/changelog/migration_04.rst
index 236bfc3ce..cb53534af 100644
--- a/doc/build/changelog/migration_04.rst
+++ b/doc/build/changelog/migration_04.rst
@@ -749,10 +749,10 @@ Just like it says:
b = bindparam('foo', type_=String)
-in_ Function Changed to Accept Sequence or Selectable
------------------------------------------------------
+in\_ Function Changed to Accept Sequence or Selectable
+------------------------------------------------------
-The in_ function now takes a sequence of values or a
+The in\_ function now takes a sequence of values or a
selectable as its sole argument. The previous API of passing
in values as positional arguments still works, but is now
deprecated. This means that
diff --git a/doc/build/changelog/migration_07.rst b/doc/build/changelog/migration_07.rst
index 5bc7e74aa..207397f52 100644
--- a/doc/build/changelog/migration_07.rst
+++ b/doc/build/changelog/migration_07.rst
@@ -278,18 +278,18 @@ unchanged:
:ticket:`1923`
+.. _07_migration_mutation_extension:
+
Mutation event extension, supersedes "mutable=True"
---------------------------------------------------
-A new extension, `Mutation Tracking <http://www.sqlalchemy.o
-rg/docs/07/orm/extensions/mutable.html>`_, provides a
+A new extension, :ref:`mutable_toplevel`, provides a
mechanism by which user-defined datatypes can provide change
events back to the owning parent or parents. The extension
includes an approach for scalar database values, such as
-those managed by ``PickleType``, ``postgresql.ARRAY``, or
+those managed by :class:`.PickleType`, ``postgresql.ARRAY``, or
other custom ``MutableType`` classes, as well as an approach
-for ORM "composites", those configured using :ref:`composite()
-<mapper_composite>`_.
+for ORM "composites", those configured using :func:`~.sqlalchemy.orm.composite`.
.. seealso::
diff --git a/doc/build/changelog/migration_08.rst b/doc/build/changelog/migration_08.rst
index 971dd2f51..9f1715e28 100644
--- a/doc/build/changelog/migration_08.rst
+++ b/doc/build/changelog/migration_08.rst
@@ -522,7 +522,7 @@ accepted as a FROM clause within the core::
stmt = select([User]).where(User.id == 5)
Above, the mapped ``User`` class will expand into
-:class:`.Table` to which :class:`.User` is mapped.
+the :class:`.Table` to which ``User`` is mapped.
:ticket:`2245`
@@ -1235,7 +1235,7 @@ No more magic coercion of "=" to IN when comparing to subquery in MS-SQL
------------------------------------------------------------------------
We found a very old behavior in the MSSQL dialect which
-would attempt to rescue the user from his or herself when
+would attempt to rescue users from themselves when
doing something like this:
::
diff --git a/doc/build/changelog/migration_09.rst b/doc/build/changelog/migration_09.rst
index 424802c3d..f318b0346 100644
--- a/doc/build/changelog/migration_09.rst
+++ b/doc/build/changelog/migration_09.rst
@@ -9,7 +9,7 @@ What's New in SQLAlchemy 0.9?
and SQLAlchemy version 0.9, which is expected for release
in late 2013.
- Document date: May 29, 2013
+ Document last updated: January 8, 2014
Introduction
============
@@ -18,9 +18,8 @@ This guide introduces what's new in SQLAlchemy version 0.9,
and also documents changes which affect users migrating
their applications from the 0.8 series of SQLAlchemy to 0.9.
-Version 0.9 is a faster-than-usual push from version 0.8,
-featuring a more versatile codebase with regards to modern
-Python versions. See :ref:`behavioral_changes_09` for
+Please carefully review
+:ref:`behavioral_changes_orm_09` and :ref:`behavioral_changes_core_09` for
potentially backwards-incompatible changes.
Platform Support
@@ -37,15 +36,49 @@ Python 3. All SQLAlchemy modules and unit tests are now interpreted
equally well with any Python interpreter from 2.6 forward, including
the 3.1 and 3.2 interpreters.
-At the moment, the C extensions are still not fully ported to
-Python 3.
+:ticket:`2671`
+C Extensions Supported on Python 3
+-----------------------------------
+The C extensions have been ported to support Python 3 and now build
+in both Python 2 and Python 3 environments.
-.. _behavioral_changes_09:
+:ticket:`2161`
+
+.. _behavioral_changes_orm_09:
+
+Behavioral Changes - ORM
+========================
+
+.. _migration_2824:
+
+Composite attributes are now returned as their object form when queried on a per-attribute basis
+------------------------------------------------------------------------------------------------
+
+Using a :class:`.Query` in conjunction with a composite attribute now returns the object
+type maintained by that composite, rather than being broken out into individual
+columns. Using the mapping setup at :ref:`mapper_composite`::
+
+ >>> session.query(Vertex.start, Vertex.end).\
+ ... filter(Vertex.start == Point(3, 4)).all()
+ [(Point(x=3, y=4), Point(x=5, y=6))]
+
+This change is backwards-incompatible with code that expects the indivdual attribute
+to be expanded into individual columns. To get that behavior, use the ``.clauses``
+accessor::
+
+
+ >>> session.query(Vertex.start.clauses, Vertex.end.clauses).\
+ ... filter(Vertex.start == Point(3, 4)).all()
+ [(3, 4, 5, 6)]
+
+.. seealso::
+
+ :ref:`change_2824`
+
+:ticket:`2824`
-Behavioral Changes
-==================
.. _migration_2736:
@@ -124,6 +157,65 @@ to 0.9 without issue.
:ticket:`2736`
+
+.. _migration_2833:
+
+``viewonly=True`` on ``relationship()`` prevents history from taking effect
+---------------------------------------------------------------------------
+
+The ``viewonly`` flag on :func:`.relationship` is applied to prevent changes
+to the target attribute from having any effect within the flush process.
+This is achieved by eliminating the attribute from being considered during
+the flush. However, up until now, changes to the attribute would still
+register the parent object as "dirty" and trigger a potential flush. The change
+is that the ``viewonly`` flag now prevents history from being set for the
+target attribute as well. Attribute events like backrefs and user-defined events
+still continue to function normally.
+
+The change is illustrated as follows::
+
+ from sqlalchemy import Column, Integer, ForeignKey, create_engine
+ from sqlalchemy.orm import backref, relationship, Session
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy import inspect
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+
+ class B(Base):
+ __tablename__ = 'b'
+
+ id = Column(Integer, primary_key=True)
+ a_id = Column(Integer, ForeignKey('a.id'))
+ a = relationship("A", backref=backref("bs", viewonly=True))
+
+ e = create_engine("sqlite://")
+ Base.metadata.create_all(e)
+
+ a = A()
+ b = B()
+
+ sess = Session(e)
+ sess.add_all([a, b])
+ sess.commit()
+
+ b.a = a
+
+ assert b in sess.dirty
+
+ # before 0.9.0
+ # assert a in sess.dirty
+ # assert inspect(a).attrs.bs.history.has_changes()
+
+ # after 0.9.0
+ assert a not in sess.dirty
+ assert not inspect(a).attrs.bs.history.has_changes()
+
+:ticket:`2833`
+
.. _migration_2751:
Association Proxy SQL Expression Improvements and Fixes
@@ -215,9 +307,473 @@ against ``b_value`` directly.
:ticket:`2751`
+.. _migration_2810:
+
+Association Proxy Missing Scalar returns None
+---------------------------------------------
+
+An association proxy from a scalar attribute to a scalar will now return
+``None`` if the proxied object isn't present. This is consistent with the
+fact that missing many-to-ones return None in SQLAlchemy, so should the
+proxied value. E.g.::
+
+ from sqlalchemy import *
+ from sqlalchemy.orm import *
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy.ext.associationproxy import association_proxy
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ b = relationship("B", uselist=False)
+
+ bname = association_proxy("b", "name")
+
+ class B(Base):
+ __tablename__ = 'b'
+
+ id = Column(Integer, primary_key=True)
+ a_id = Column(Integer, ForeignKey('a.id'))
+ name = Column(String)
+
+ a1 = A()
+
+ # this is how m2o's always have worked
+ assert a1.b is None
+
+ # but prior to 0.9, this would raise AttributeError,
+ # now returns None just like the proxied value.
+ assert a1.bname is None
+
+:ticket:`2810`
+
+
+.. _change_2787:
+
+attributes.get_history() will query from the DB by default if value not present
+-------------------------------------------------------------------------------
+
+A bugfix regarding :func:`.attributes.get_history` allows a column-based attribute
+to query out to the database for an unloaded value, assuming the ``passive``
+flag is left at its default of ``PASSIVE_OFF``. Previously, this flag would
+not be honored. Additionally, a new method :meth:`.AttributeState.load_history`
+is added to complement the :attr:`.AttributeState.history` attribute, which
+will emit loader callables for an unloaded attribute.
+
+This is a small change demonstrated as follows::
+
+ from sqlalchemy import Column, Integer, String, create_engine, inspect
+ from sqlalchemy.orm import Session, attributes
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+ data = Column(String)
+
+ e = create_engine("sqlite://", echo=True)
+ Base.metadata.create_all(e)
+
+ sess = Session(e)
+
+ a1 = A(data='a1')
+ sess.add(a1)
+ sess.commit() # a1 is now expired
+
+ # history doesn't emit loader callables
+ assert inspect(a1).attrs.data.history == (None, None, None)
+
+ # in 0.8, this would fail to load the unloaded state.
+ assert attributes.get_history(a1, 'data') == ((), ['a1',], ())
+
+ # load_history() is now equiavlent to get_history() with
+ # passive=PASSIVE_OFF ^ INIT_OK
+ assert inspect(a1).attrs.data.load_history() == ((), ['a1',], ())
+
+:ticket:`2787`
+
+.. _behavioral_changes_core_09:
+
+Behavioral Changes - Core
+=========================
+
+``None`` can no longer be used as a "partial AND" constructor
+--------------------------------------------------------------
+
+``None`` can no longer be used as the "backstop" to form an AND condition piecemeal.
+This pattern was not a documented pattern even though some SQLAlchemy internals
+made use of it::
+
+ condition = None
+
+ for cond in conditions:
+ condition = condition & cond
+
+ if condition is not None:
+ stmt = stmt.where(condition)
+
+The above sequence, when ``conditions`` is non-empty, will on 0.9 produce
+``SELECT .. WHERE <condition> AND NULL``. The ``None`` is no longer implicitly
+ignored, and is instead consistent with when ``None`` is interpreted in other
+contexts besides that of a conjunction.
+
+The correct code for both 0.8 and 0.9 should read::
+
+ from sqlalchemy.sql import and_
+
+ if conditions:
+ stmt = stmt.where(and_(*conditions))
+
+Another variant that works on all backends on 0.9, but on 0.8 only works on
+backends that support boolean constants::
+
+ from sqlalchemy.sql import true
+
+ condition = true()
+
+ for cond in conditions:
+ condition = cond & condition
+
+ stmt = stmt.where(condition)
+
+On 0.8, this will produce a SELECT statement that always has ``AND true``
+in the WHERE clause, which is not accepted by backends that don't support
+boolean constants (MySQL, MSSQL). On 0.9, the ``true`` constant will be dropped
+within an ``and_()`` conjunction.
+
+.. seealso::
+
+ :ref:`migration_2804`
+
+.. _migration_2873:
+
+The "password" portion of a ``create_engine()`` no longer considers the ``+`` sign as an encoded space
+------------------------------------------------------------------------------------------------------
+
+For whatever reason, the Python function ``unquote_plus()`` was applied to the
+"password" field of a URL, which is an incorrect application of the
+encoding rules described in `RFC 1738 <http://www.ietf.org/rfc/rfc1738.txt>`_
+in that it escaped spaces as plus signs. The stringiciation of a URL
+now only encodes ":", "@", or "/" and nothing else, and is now applied to both the
+``username`` and ``password`` fields (previously it only applied to the
+password). On parsing, encoded characters are converted, but plus signs and
+spaces are passed through as is::
+
+ # password: "pass word + other:words"
+ dbtype://user:pass word + other%3Awords@host/dbname
+
+ # password: "apples/oranges"
+ dbtype://username:apples%2Foranges@hostspec/database
+
+ # password: "apples@oranges@@"
+ dbtype://username:apples%40oranges%40%40@hostspec/database
+
+ # password: '', username is "username@"
+ dbtype://username%40:@hostspec/database
+
+
+:ticket:`2873`
+
+.. _migration_2879:
+
+The precedence rules for COLLATE have been changed
+--------------------------------------------------
+
+Previously, an expression like the following::
+
+ print (column('x') == 'somevalue').collate("en_EN")
+
+would produce an expression like this::
+
+ -- 0.8 behavior
+ (x = :x_1) COLLATE en_EN
+
+The above is misunderstood by MSSQL and is generally not the syntax suggested
+for any database. The expression will now produce the syntax illustrated
+by that of most database documentation::
+
+ -- 0.9 behavior
+ x = :x_1 COLLATE en_EN
+
+The potentially backwards incompatible change arises if the :meth:`.collate`
+operator is being applied to the right-hand column, as follows::
+
+ print column('x') == literal('somevalue').collate("en_EN")
+
+In 0.8, this produces::
+
+ x = :param_1 COLLATE en_EN
+
+However in 0.9, will now produce the more accurate, but probably not what you
+want, form of::
+
+ x = (:param_1 COLLATE en_EN)
+
+The :meth:`.ColumnOperators.collate` operator now works more appropriately within an
+``ORDER BY`` expression as well, as a specific precedence has been given to the
+``ASC`` and ``DESC`` operators which will again ensure no parentheses are
+generated::
+
+ >>> # 0.8
+ >>> print column('x').collate('en_EN').desc()
+ (x COLLATE en_EN) DESC
+
+ >>> # 0.9
+ >>> print column('x').collate('en_EN').desc()
+ x COLLATE en_EN DESC
+
+:ticket:`2879`
+
+
+
+.. _migration_2878:
+
+Postgresql CREATE TYPE <x> AS ENUM now applies quoting to values
+----------------------------------------------------------------
+
+The :class:`.postgresql.ENUM` type will now apply escaping to single quote
+signs within the enumerated values::
+
+ >>> from sqlalchemy.dialects import postgresql
+ >>> type = postgresql.ENUM('one', 'two', "three's", name="myenum")
+ >>> from sqlalchemy.dialects.postgresql import base
+ >>> print base.CreateEnumType(type).compile(dialect=postgresql.dialect())
+ CREATE TYPE myenum AS ENUM ('one','two','three''s')
+
+Existing workarounds which already escape single quote signs will need to be
+modified, else they will now double-escape.
+
+:ticket:`2878`
+
New Features
============
+.. _feature_2268:
+
+Event Removal API
+-----------------
+
+Events established using :func:`.event.listen` or :func:`.event.listens_for`
+can now be removed using the new :func:`.event.remove` function. The ``target``,
+``identifier`` and ``fn`` arguments sent to :func:`.event.remove` need to match
+exactly those which were sent for listening, and the event will be removed
+from all locations in which it had been established::
+
+ @event.listens_for(MyClass, "before_insert", propagate=True)
+ def my_before_insert(mapper, connection, target):
+ """listen for before_insert"""
+ # ...
+
+ event.remove(MyClass, "before_insert", my_before_insert)
+
+In the example above, the ``propagate=True`` flag is set. This
+means ``my_before_insert()`` is established as a listener for ``MyClass``
+as well as all subclasses of ``MyClass``.
+The system tracks everywhere that the ``my_before_insert()``
+listener function had been placed as a result of this call and removes it as
+a result of calling :func:`.event.remove`.
+
+The removal system uses a registry to associate arguments passed to
+:func:`.event.listen` with collections of event listeners, which are in many
+cases wrapped versions of the original user-supplied function. This registry
+makes heavy use of weak references in order to allow all the contained contents,
+such as listener targets, to be garbage collected when they go out of scope.
+
+:ticket:`2268`
+
+.. _feature_1418:
+
+New Query Options API; ``load_only()`` option
+---------------------------------------------
+
+The system of loader options such as :func:`.orm.joinedload`,
+:func:`.orm.subqueryload`, :func:`.orm.lazyload`, :func:`.orm.defer`, etc.
+all build upon a new system known as :class:`.Load`. :class:`.Load` provides
+a "method chained" (a.k.a. :term:`generative`) approach to loader options, so that
+instead of joining together long paths using dots or multiple attribute names,
+an explicit loader style is given for each path.
+
+While the new way is slightly more verbose, it is simpler to understand
+in that there is no ambiguity in what options are being applied to which paths;
+it simplifies the method signatures of the options and provides greater flexibility
+particularly for column-based options. The old systems are to remain functional
+indefinitely as well and all styles can be mixed.
+
+**Old Way**
+
+To set a certain style of loading along every link in a multi-element path, the ``_all()``
+option has to be used::
+
+ query(User).options(joinedload_all("orders.items.keywords"))
+
+**New Way**
+
+Loader options are now chainable, so the same ``joinedload(x)`` method is applied
+equally to each link, without the need to keep straight between
+:func:`.joinedload` and :func:`.joinedload_all`::
+
+ query(User).options(joinedload("orders").joinedload("items").joinedload("keywords"))
+
+**Old Way**
+
+Setting an option on path that is based on a subclass requires that all
+links in the path be spelled out as class bound attributes, since the
+:meth:`.PropComparator.of_type` method needs to be called::
+
+ session.query(Company).\
+ options(
+ subqueryload_all(
+ Company.employees.of_type(Engineer),
+ Engineer.machines
+ )
+ )
+
+**New Way**
+
+Only those elements in the path that actually need :meth:`.PropComparator.of_type`
+need to be set as a class-bound attribute, string-based names can be resumed
+afterwards::
+
+ session.query(Company).\
+ options(
+ subqueryload(Company.employees.of_type(Engineer)).
+ subqueryload("machines")
+ )
+ )
+
+**Old Way**
+
+Setting the loader option on the last link in a long path uses a syntax
+that looks a lot like it should be setting the option for all links in the
+path, causing confusion::
+
+ query(User).options(subqueryload("orders.items.keywords"))
+
+**New Way**
+
+A path can now be spelled out using :func:`.defaultload` for entries in the
+path where the existing loader style should be unchanged. More verbose
+but the intent is clearer::
+
+ query(User).options(defaultload("orders").defaultload("items").subqueryload("keywords"))
+
+
+The dotted style can still be taken advantage of, particularly in the case
+of skipping over several path elements::
+
+ query(User).options(defaultload("orders.items").subqueryload("keywords"))
+
+**Old Way**
+
+The :func:`.defer` option on a path needed to be spelled out with the full
+path for each column::
+
+ query(User).options(defer("orders.description"), defer("orders.isopen"))
+
+**New Way**
+
+A single :class:`.Load` object that arrives at the target path can have
+:meth:`.Load.defer` called upon it repeatedly::
+
+ query(User).options(defaultload("orders").defer("description").defer("isopen"))
+
+The Load Class
+^^^^^^^^^^^^^^^
+
+The :class:`.Load` class can be used directly to provide a "bound" target,
+especially when multiple parent entities are present::
+
+ from sqlalchemy.orm import Load
+
+ query(User, Address).options(Load(Address).joinedload("entries"))
+
+Load Only
+^^^^^^^^^
+
+A new option :func:`.load_only` achieves a "defer everything but" style of load,
+loading only the given columns and deferring the rest::
+
+ from sqlalchemy.orm import load_only
+
+ query(User).options(load_only("name", "fullname"))
+
+ # specify explicit parent entity
+ query(User, Address).options(Load(User).load_only("name", "fullname"))
+
+ # specify path
+ query(User).options(joinedload(User.addresses).load_only("email_address"))
+
+Class-specific Wildcards
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Using :class:`.Load`, a wildcard may be used to set the loading for all
+relationships (or perhaps columns) on a given entity, without affecting any
+others::
+
+ # lazyload all User relationships
+ query(User).options(Load(User).lazyload("*"))
+
+ # undefer all User columns
+ query(User).options(Load(User).undefer("*"))
+
+ # lazyload all Address relationships
+ query(User).options(defaultload(User.addresses).lazyload("*"))
+
+ # undefer all Address columns
+ query(User).options(defaultload(User.addresses).undefer("*"))
+
+
+:ticket:`1418`
+
+
+.. _feature_2877:
+
+New ``text()`` Capabilities
+---------------------------
+
+The :func:`.text` construct gains new methods:
+
+* :meth:`.TextClause.bindparams` allows bound parameter types and values
+ to be set flexibly::
+
+ # setup values
+ stmt = text("SELECT id, name FROM user "
+ "WHERE name=:name AND timestamp=:timestamp").\
+ bindparams(name="ed", timestamp=datetime(2012, 11, 10, 15, 12, 35))
+
+ # setup types and/or values
+ stmt = text("SELECT id, name FROM user "
+ "WHERE name=:name AND timestamp=:timestamp").\
+ bindparams(
+ bindparam("name", value="ed"),
+ bindparam("timestamp", type_=DateTime()
+ ).bindparam(timestamp=datetime(2012, 11, 10, 15, 12, 35))
+
+* :meth:`.TextClause.columns` supersedes the ``typemap`` option
+ of :func:`.text`, returning a new construct :class:`.TextAsFrom`::
+
+ # turn a text() into an alias(), with a .c. collection:
+ stmt = text("SELECT id, name FROM user").columns(id=Integer, name=String)
+ stmt = stmt.alias()
+
+ stmt = select([addresses]).select_from(
+ addresses.join(stmt), addresses.c.user_id == stmt.c.id)
+
+
+ # or into a cte():
+ stmt = text("SELECT id, name FROM user").columns(id=Integer, name=String)
+ stmt = stmt.cte("x")
+
+ stmt = select([addresses]).select_from(
+ addresses.join(stmt), addresses.c.user_id == stmt.c.id)
+
+:ticket:`2877`
+
.. _feature_722:
INSERT from SELECT
@@ -252,11 +808,247 @@ rendering::
:ticket:`722`
+.. _feature_github_42:
+
+New FOR UPDATE support on ``select()``, ``Query()``
+---------------------------------------------------
+
+An attempt is made to simplify the specification of the ``FOR UPDATE``
+clause on ``SELECT`` statements made within Core and ORM, and support is added
+for the ``FOR UPDATE OF`` SQL supported by Postgresql and Oracle.
+
+Using the core :meth:`.GenerativeSelect.with_for_update`, options like ``FOR SHARE`` and
+``NOWAIT`` can be specified individually, rather than linking to arbitrary
+string codes::
+
+ stmt = select([table]).with_for_update(read=True, nowait=True, of=table)
+
+On Posgtresql the above statement might render like::
+
+ SELECT table.a, table.b FROM table FOR SHARE OF table NOWAIT
+
+The :class:`.Query` object gains a similar method :meth:`.Query.with_for_update`
+which behaves in the same way. This method supersedes the existing
+:meth:`.Query.with_lockmode` method, which translated ``FOR UPDATE`` clauses
+using a different system. At the moment, the "lockmode" string argument is still
+accepted by the :meth:`.Session.refresh` method.
+
+
+.. _feature_2867:
+
+Floating Point String-Conversion Precision Configurable for Native Floating Point Types
+---------------------------------------------------------------------------------------
+
+The conversion which SQLAlchemy does whenever a DBAPI returns a Python
+floating point type which is to be converted into a Python ``Decimal()``
+necessarily involves an intermediary step which converts the floating point
+value to a string. The scale used for this string conversion was previously
+hardcoded to 10, and is now configurable. The setting is available on
+both the :class:`.Numeric` as well as the :class:`.Float`
+type, as well as all SQL- and dialect-specific descendant types, using the
+parameter ``decimal_return_scale``. If the type supports a ``.scale`` parameter,
+as is the case with :class:`.Numeric` and some float types such as
+:class:`.mysql.DOUBLE`, the value of ``.scale`` is used as the default
+for ``.decimal_return_scale`` if it is not otherwise specified. If both
+``.scale`` and ``.decimal_return_scale`` are absent, then the default of
+10 takes place. E.g.::
+
+ from sqlalchemy.dialects.mysql import DOUBLE
+ import decimal
+
+ data = Table('data', metadata,
+ Column('double_value',
+ mysql.DOUBLE(decimal_return_scale=12, asdecimal=True))
+ )
+
+ conn.execute(
+ data.insert(),
+ double_value=45.768392065789,
+ )
+ result = conn.scalar(select([data.c.double_value]))
+
+ # previously, this would typically be Decimal("45.7683920658"),
+ # e.g. trimmed to 10 decimal places
+
+ # now we get 12, as requested, as MySQL can support this
+ # much precision for DOUBLE
+ assert result == decimal.Decimal("45.768392065789")
+
+
+:ticket:`2867`
+
+
+.. _change_2824:
+
+Column Bundles for ORM queries
+------------------------------
+
+The :class:`.Bundle` allows for querying of sets of columns, which are then
+grouped into one name under the tuple returned by the query. The initial
+purposes of :class:`.Bundle` are 1. to allow "composite" ORM columns to be
+returned as a single value in a column-based result set, rather than expanding
+them out into individual columns and 2. to allow the creation of custom result-set
+constructs within the ORM, using ad-hoc columns and return types, without involving
+the more heavyweight mechanics of mapped classes.
+
+.. seealso::
+
+ :ref:`migration_2824`
+
+ :ref:`bundles`
+
+:ticket:`2824`
+
+
+Server Side Version Counting
+-----------------------------
+
+The versioning feature of the ORM (now also documented at :ref:`mapper_version_counter`)
+can now make use of server-side version counting schemes, such as those produced
+by triggers or database system columns, as well as conditional programmatic schemes outside
+of the version_id_counter function itself. By providing the value ``False``
+to the ``version_id_generator`` parameter, the ORM will use the already-set version
+identifier, or alternatively fetch the version identifier
+from each row at the same time the INSERT or UPDATE is emitted. When using a
+server-generated version identifier, it is strongly
+recommended that this feature be used only on a backend with strong RETURNING
+support (Postgresql, SQL Server; Oracle also supports RETURNING but the cx_oracle
+driver has only limited support), else the additional SELECT statements will
+add significant performance
+overhead. The example provided at :ref:`server_side_version_counter` illustrates
+the usage of the Postgresql ``xmin`` system column in order to integrate it with
+the ORM's versioning feature.
+
+.. seealso::
+
+ :ref:`server_side_version_counter`
+
+:ticket:`2793`
+
+.. _feature_1535:
+
+``include_backrefs=False`` option for ``@validates``
+----------------------------------------------------
+
+The :func:`.validates` function now accepts an option ``include_backrefs=True``,
+which will bypass firing the validator for the case where the event initiated
+from a backref::
+
+ from sqlalchemy import Column, Integer, ForeignKey
+ from sqlalchemy.orm import relationship, validates
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ bs = relationship("B", backref="a")
+
+ @validates("bs")
+ def validate_bs(self, key, item):
+ print("A.bs validator")
+ return item
+
+ class B(Base):
+ __tablename__ = 'b'
+
+ id = Column(Integer, primary_key=True)
+ a_id = Column(Integer, ForeignKey('a.id'))
+
+ @validates("a", include_backrefs=False)
+ def validate_a(self, key, item):
+ print("B.a validator")
+ return item
+
+ a1 = A()
+ a1.bs.append(B()) # prints only "A.bs validator"
+
+
+:ticket:`1535`
+
+
+Postgresql JSON Type
+--------------------
+
+The Postgresql dialect now features a :class:`.postgresql.JSON` type to
+complement the :class:`.postgresql.HSTORE` type.
+
+.. seealso::
+
+ :class:`.postgresql.JSON`
+
+:ticket:`2581`
+
+.. _feature_automap:
+
+Automap Extension
+-----------------
+
+A new extension is added in **0.9.1** known as :mod:`sqlalchemy.ext.automap`. This is an
+**experimental** extension which expands upon the functionality of Declarative
+as well as the :class:`.DeferredReflection` class. Essentially, the extension
+provides a base class :class:`.AutomapBase` which automatically generates
+mapped classes and relationships between them based on given table metadata.
+
+The :class:`.MetaData` in use normally might be produced via reflection, but
+there is no requirement that reflection is used. The most basic usage
+illustrates how :mod:`sqlalchemy.ext.automap` is able to deliver mapped
+classes, including relationships, based on a reflected schema::
+
+ from sqlalchemy.ext.automap import automap_base
+ from sqlalchemy.orm import Session
+ from sqlalchemy import create_engine
+
+ Base = automap_base()
+
+ # engine, suppose it has two tables 'user' and 'address' set up
+ engine = create_engine("sqlite:///mydatabase.db")
+
+ # reflect the tables
+ Base.prepare(engine, reflect=True)
+
+ # mapped classes are now created with names matching that of the table
+ # name.
+ User = Base.classes.user
+ Address = Base.classes.address
+
+ session = Session(engine)
+
+ # rudimentary relationships are produced
+ session.add(Address(email_address="foo@bar.com", user=User(name="foo")))
+ session.commit()
+
+ # collection-based relationships are by default named "<classname>_collection"
+ print (u1.address_collection)
+
+Beyond that, the :class:`.AutomapBase` class is a declarative base, and supports
+all the features that declarative does. The "automapping" feature can be used
+with an existing, explicitly declared schema to generate relationships and
+missing classes only. Naming schemes and relationship-production routines
+can be dropped in using callable functions.
+
+It is hoped that the :class:`.AutomapBase` system provides a quick
+and modernized solution to the problem that the very famous
+`SQLSoup <https://sqlsoup.readthedocs.org/en/latest/>`_
+also tries to solve, that of generating a quick and rudimentary object
+model from an existing database on the fly. By addressing the issue strictly
+at the mapper configuration level, and integrating fully with existing
+Declarative class techniques, :class:`.AutomapBase` seeks to provide
+a well-integrated approach to the issue of expediently auto-generating ad-hoc
+mappings.
+
+.. seealso::
+
+ :ref:`automap_toplevel`
+
Behavioral Improvements
=======================
-Improvements that should produce no compatibility issues, but are good
-to be aware of in case there are unexpected issues.
+Improvements that should produce no compatibility issues except in exceedingly
+rare and unusual hypothetical cases, but are good to be aware of in case there are
+unexpected issues.
.. _feature_joins_09:
@@ -422,6 +1214,254 @@ Generates (everywhere except SQLite)::
:ticket:`2369` :ticket:`2587`
+ORM can efficiently fetch just-generated INSERT/UPDATE defaults using RETURNING
+-------------------------------------------------------------------------------
+
+The :class:`.Mapper` has long supported an undocumented flag known as
+``eager_defaults=True``. The effect of this flag is that when an INSERT or UPDATE
+proceeds, and the row is known to have server-generated default values,
+a SELECT would immediately follow it in order to "eagerly" load those new values.
+Normally, the server-generated columns are marked as "expired" on the object,
+so that no overhead is incurred unless the application actually accesses these
+columns soon after the flush. The ``eager_defaults`` flag was therefore not
+of much use as it could only decrease performance, and was present only to support
+exotic event schemes where users needed default values to be available
+immediately within the flush process.
+
+In 0.9, as a result of the version id enhancements, ``eager_defaults`` can now
+emit a RETURNING clause for these values, so on a backend with strong RETURNING
+support in particular Postgresql, the ORM can fetch newly generated default
+and SQL expression values inline with the INSERT or UPDATE. ``eager_defaults``,
+when enabled, makes use of RETURNING automatically when the target backend
+and :class:`.Table` supports "implicit returning".
+
+.. _change_2836:
+
+Subquery Eager Loading will apply DISTINCT to the innermost SELECT for some queries
+------------------------------------------------------------------------------------
+
+In an effort to reduce the number of duplicate rows that can be generated
+by subquery eager loading when a many-to-one relationship is involved, a
+DISTINCT keyword will be applied to the innermost SELECT when the join is
+targeting columns that do not comprise the primary key, as in when loading
+along a many to one.
+
+That is, when subquery loading on a many-to-one from A->B::
+
+ SELECT b.id AS b_id, b.name AS b_name, anon_1.b_id AS a_b_id
+ FROM (SELECT DISTINCT a_b_id FROM a) AS anon_1
+ JOIN b ON b.id = anon_1.a_b_id
+
+Since ``a.b_id`` is a non-distinct foreign key, DISTINCT is applied so that
+redundant ``a.b_id`` are eliminated. The behavior can be turned on or off
+unconditionally for a particular :func:`.relationship` using the flag
+``distinct_target_key``, setting the value to ``True`` for unconditionally
+on, ``False`` for unconditionally off, and ``None`` for the feature to take
+effect when the target SELECT is against columns that do not comprise a full
+primary key. In 0.9, ``None`` is the default.
+
+The option is also backported to 0.8 where the ``distinct_target_key``
+option defaults to ``False``.
+
+While the feature here is designed to help performance by eliminating
+duplicate rows, the ``DISTINCT`` keyword in SQL itself can have a negative
+performance impact. If columns in the SELECT are not indexed, ``DISTINCT``
+will likely perform an ``ORDER BY`` on the rowset which can be expensive.
+By keeping the feature limited just to foreign keys which are hopefully
+indexed in any case, it's expected that the new defaults are reasonable.
+
+The feature also does not eliminate every possible dupe-row scenario; if
+a many-to-one is present elsewhere in the chain of joins, dupe rows may still
+be present.
+
+:ticket:`2836`
+
+.. _migration_2789:
+
+Backref handlers can now propagate more than one level deep
+-----------------------------------------------------------
+
+The mechanism by which attribute events pass along their "initiator", that is
+the object associated with the start of the event, has been changed; instead
+of a :class:`.AttributeImpl` being passed, a new object :class:`.attributes.Event`
+is passed instead; this object refers to the :class:`.AttributeImpl` as well as
+to an "operation token", representing if the operation is an append, remove,
+or replace operation.
+
+The attribute event system no longer looks at this "initiator" object in order to halt a
+recursive series of attribute events. Instead, the system of preventing endless
+recursion due to mutually-dependent backref handlers has been moved
+to the ORM backref event handlers specifically, which now take over the role
+of ensuring that a chain of mutually-dependent events (such as append to collection
+A.bs, set many-to-one attribute B.a in response) doesn't go into an endless recursion
+stream. The rationale here is that the backref system, given more detail and control
+over event propagation, can finally allow operations more than one level deep
+to occur; the typical scenario is when a collection append results in a many-to-one
+replacement operation, which in turn should cause the item to be removed from a
+previous collection::
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+
+ id = Column(Integer, primary_key=True)
+ children = relationship("Child", backref="parent")
+
+ class Child(Base):
+ __tablename__ = 'child'
+
+ id = Column(Integer, primary_key=True)
+ parent_id = Column(ForeignKey('parent.id'))
+
+ p1 = Parent()
+ p2 = Parent()
+ c1 = Child()
+
+ p1.children.append(c1)
+
+ assert c1.parent is p1 # backref event establishes c1.parent as p1
+
+ p2.children.append(c1)
+
+ assert c1.parent is p2 # backref event establishes c1.parent as p2
+ assert c1 not in p1.children # second backref event removes c1 from p1.children
+
+Above, prior to this change, the ``c1`` object would still have been present
+in ``p1.children``, even though it is also present in ``p2.children`` at the
+same time; the backref handlers would have stopped at replacing ``c1.parent`` with
+``p2`` instead of ``p1``. In 0.9, using the more detailed :class:`.Event`
+object as well as letting the backref handlers make more detailed decisions about
+these objects, the propagation can continue onto removing ``c1`` from ``p1.children``
+while maintaining a check against the propagation from going into an endless
+recursive loop.
+
+End-user code which a. makes use of the :meth:`.AttributeEvents.set`,
+:meth:`.AttributeEvents.append`, or :meth:`.AttributeEvents.remove` events,
+and b. initiates further attribute modification operations as a result of these
+events may need to be modified to prevent recursive loops, as the attribute system
+no longer stops a chain of events from propagating endlessly in the absense of the backref
+event handlers. Additionally, code which depends upon the value of the ``initiator``
+will need to be adjusted to the new API, and furthermore must be ready for the
+value of ``initiator`` to change from its original value within a string of
+backref-initiated events, as the backref handlers may now swap in a
+new ``initiator`` value for some operations.
+
+:ticket:`2789`
+
+.. _change_2838:
+
+The typing system now handles the task of rendering "literal bind" values
+-------------------------------------------------------------------------
+
+A new method is added to :class:`.TypeEngine` :meth:`.TypeEngine.literal_processor`
+as well as :meth:`.TypeDecorator.process_literal_param` for :class:`.TypeDecorator`
+which take on the task of rendering so-called "inline literal paramters" - parameters
+that normally render as "bound" values, but are instead being rendered inline
+into the SQL statement due to the compiler configuration. This feature is used
+when generating DDL for constructs such as :class:`.CheckConstraint`, as well
+as by Alembic when using constructs such as ``op.inline_literal()``. Previously,
+a simple "isinstance" check checked for a few basic types, and the "bind processor"
+was used unconditionally, leading to such issues as strings being encoded into utf-8
+prematurely.
+
+Custom types written with :class:`.TypeDecorator` should continue to work in
+"inline literal" scenarios, as the :meth:`.TypeDecorator.process_literal_param`
+falls back to :meth:`.TypeDecorator.process_bind_param` by default, as these methods
+usually handle a data manipulation, not as much how the data is presented to the
+database. :meth:`.TypeDecorator.process_literal_param` can be specified to
+specifically produce a string representing how a value should be rendered
+into an inline DDL statement.
+
+:ticket:`2838`
+
+
+.. _change_2812:
+
+Schema identifiers now carry along their own quoting information
+---------------------------------------------------------------------
+
+This change simplifies the Core's usage of so-called "quote" flags, such
+as the ``quote`` flag passed to :class:`.Table` and :class:`.Column`. The flag
+is now internalized within the string name itself, which is now represented
+as an instance of :class:`.quoted_name`, a string subclass. The
+:class:`.IdentifierPreparer` now relies solely on the quoting preferences
+reported by the :class:`.quoted_name` object rather than checking for any
+explicit ``quote`` flags in most cases. The issue resolved here includes
+that various case-sensitive methods such as :meth:`.Engine.has_table` as well
+as similar methods within dialects now function with explicitly quoted names,
+without the need to complicate or introduce backwards-incompatible changes
+to those APIs (many of which are 3rd party) with the details of quoting flags -
+in particular, a wider range of identifiers now function correctly with the
+so-called "uppercase" backends like Oracle, Firebird, and DB2 (backends that
+store and report upon table and column names using all uppercase for case
+insensitive names).
+
+The :class:`.quoted_name` object is used internally as needed; however if
+other keywords require fixed quoting preferences, the class is available
+publically.
+
+:ticket:`2812`
+
+.. _migration_2804:
+
+Improved rendering of Boolean constants, NULL constants, conjunctions
+----------------------------------------------------------------------
+
+New capabilities have been added to the :func:`.true` and :func:`.false`
+constants, in particular in conjunction with :func:`.and_` and :func:`.or_`
+functions as well as the behavior of the WHERE/HAVING clauses in conjunction
+with these types, boolean types overall, and the :func:`.null` constant.
+
+Starting with a table such as this::
+
+ from sqlalchemy import Table, Boolean, Integer, Column, MetaData
+
+ t1 = Table('t', MetaData(), Column('x', Boolean()), Column('y', Integer))
+
+A select construct will now render the boolean column as a binary expression
+on backends that don't feature ``true``/``false`` constant beahvior::
+
+ >>> from sqlalchemy import select, and_, false, true
+ >>> from sqlalchemy.dialects import mysql, postgresql
+
+ >>> print select([t1]).where(t1.c.x).compile(dialect=mysql.dialect())
+ SELECT t.x, t.y FROM t WHERE t.x = 1
+
+The :func:`.and_` and :func:`.or_` constructs will now exhibit quasi
+"short circuit" behavior, that is truncating a rendered expression, when a
+:func:`.true` or :func:`.false` constant is present::
+
+ >>> print select([t1]).where(and_(t1.c.y > 5, false())).compile(
+ ... dialect=postgresql.dialect())
+ SELECT t.x, t.y FROM t WHERE false
+
+:func:`.true` can be used as the base to build up an expression::
+
+ >>> expr = true()
+ >>> expr = expr & (t1.c.y > 5)
+ >>> print select([t1]).where(expr)
+ SELECT t.x, t.y FROM t WHERE t.y > :y_1
+
+The boolean constants :func:`.true` and :func:`.false` themselves render as
+``0 = 1`` and ``1 = 1`` for a backend with no boolean constants::
+
+ >>> print select([t1]).where(and_(t1.c.y > 5, false())).compile(
+ ... dialect=mysql.dialect())
+ SELECT t.x, t.y FROM t WHERE 0 = 1
+
+Interpretation of ``None``, while not particularly valid SQL, is at least
+now consistent::
+
+ >>> print select([t1.c.x]).where(None)
+ SELECT t.x FROM t WHERE NULL
+
+ >>> print select([t1.c.x]).where(None).where(None)
+ SELECT t.x FROM t WHERE NULL AND NULL
+
+ >>> print select([t1.c.x]).where(and_(None, None))
+ SELECT t.x FROM t WHERE NULL AND NULL
+
+:ticket:`2804`
+
.. _migration_1068:
Label constructs can now render as their name alone in an ORDER BY
@@ -453,13 +1493,93 @@ And now renders as::
SELECT foo(t.c1) + t.c2 AS expr
FROM t ORDER BY expr
-The ORDER BY only renders the label if the label isn't further embedded into an expression within the ORDER BY, other than a simple ``ASC`` or ``DESC``.
+The ORDER BY only renders the label if the label isn't further
+embedded into an expression within the ORDER BY, other than a simple
+``ASC`` or ``DESC``.
-The above format works on all databases tested, but might have compatibility issues with older database versions (MySQL 4? Oracle 8? etc.). Based on user reports we can add rules
-that will disable the feature based on database version detection.
+The above format works on all databases tested, but might have
+compatibility issues with older database versions (MySQL 4? Oracle 8?
+etc.). Based on user reports we can add rules that will disable the
+feature based on database version detection.
:ticket:`1068`
+.. _migration_2848:
+
+``RowProxy`` now has tuple-sorting behavior
+-------------------------------------------
+
+The :class:`.RowProxy` object acts much like a tuple, but up until now
+would not sort as a tuple if a list of them were sorted using ``sorted()``.
+The ``__eq__()`` method now compares both sides as a tuple and also
+an ``__lt__()`` method has been added::
+
+ users.insert().execute(
+ dict(user_id=1, user_name='foo'),
+ dict(user_id=2, user_name='bar'),
+ dict(user_id=3, user_name='def'),
+ )
+
+ rows = users.select().order_by(users.c.user_name).execute().fetchall()
+
+ eq_(rows, [(2, 'bar'), (3, 'def'), (1, 'foo')])
+
+ eq_(sorted(rows), [(1, 'foo'), (2, 'bar'), (3, 'def')])
+
+:ticket:`2848`
+
+.. _migration_2850:
+
+A bindparam() construct with no type gets upgraded via copy when a type is available
+------------------------------------------------------------------------------------
+
+The logic which "upgrades" a :func:`.bindparam` construct to take on the
+type of the enclosing expression has been improved in two ways. First, the
+:func:`.bindparam` object is **copied** before the new type is assigned, so that
+the given :func:`.bindparam` is not mutated in place. Secondly, this same
+operation occurs when an :class:`.Insert` or :class:`.Update` construct is compiled,
+regarding the "values" that were set in the statement via the :meth:`.ValuesBase.values`
+method.
+
+If given an untyped :func:`.bindparam`::
+
+ bp = bindparam("some_col")
+
+If we use this parameter as follows::
+
+ expr = mytable.c.col == bp
+
+The type for ``bp`` remains as ``NullType``, however if ``mytable.c.col``
+is of type ``String``, then ``expr.right``, that is the right side of the
+binary expression, will take on the ``String`` type. Previously, ``bp`` itself
+would have been changed in place to have ``String`` as its type.
+
+Similarly, this operation occurs in an :class:`.Insert` or :class:`.Update`::
+
+ stmt = mytable.update().values(col=bp)
+
+Above, ``bp`` remains unchanged, but the ``String`` type will be used when
+the statement is executed, which we can see by examining the ``binds`` dictionary::
+
+ >>> compiled = stmt.compile()
+ >>> compiled.binds['some_col'].type
+ String
+
+The feature allows custom types to take their expected effect within INSERT/UPDATE
+statements without needing to explicitly specify those types within every
+:func:`.bindparam` expression.
+
+The potentially backwards-compatible changes involve two unlikely
+scenarios. Since the the bound parameter is
+**cloned**, users should not be relying upon making in-place changes to a
+:func:`.bindparam` construct once created. Additionally, code which uses
+:func:`.bindparam` within an :class:`.Insert` or :class:`.Update` statement
+which is relying on the fact that the :func:`.bindparam` is not typed according
+to the column being assigned towards will no longer function in that way.
+
+:ticket:`2850`
+
+
.. _migration_1765:
Columns can reliably get their type from a column referred to via ForeignKey
@@ -547,6 +1667,7 @@ Scenarios which now work correctly include:
:ticket:`1765`
+
Dialect Changes
===============
diff --git a/doc/build/conf.py b/doc/build/conf.py
index e7c116c18..1546177a6 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -18,7 +18,7 @@ import os
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../lib'))
-sys.path.insert(0, os.path.abspath('../../examples'))
+sys.path.insert(0, os.path.abspath('../..')) # examples
sys.path.insert(0, os.path.abspath('.'))
import sqlalchemy
@@ -34,10 +34,12 @@ import sqlalchemy
extensions = [
'sphinx.ext.autodoc',
'builder.autodoc_mods',
- 'builder.changelog',
+ 'changelog',
+ 'sphinx_paramlinks',
'builder.dialect_info',
'builder.mako',
'builder.sqlformatter',
+ 'builder.viewsource',
]
# Add any paths that contain templates here, relative to this directory.
@@ -62,7 +64,13 @@ changelog_inner_tag_sort = ["feature", "bug", "moved", "changed", "removed"]
# how to render changelog links
changelog_render_ticket = "http://www.sqlalchemy.org/trac/ticket/%s"
-changelog_render_pullreq = "https://bitbucket.org/sqlalchemy/sqlalchemy/pull-request/%s"
+
+changelog_render_pullreq = {
+ "bitbucket": "https://bitbucket.org/zzzeek/sqlalchemy/pull-request/%s",
+ "default": "https://bitbucket.org/zzzeek/sqlalchemy/pull-request/%s",
+ "github": "https://github.com/zzzeek/sqlalchemy/pull/%s",
+}
+
changelog_render_changeset = "http://www.sqlalchemy.org/trac/changeset/%s"
@@ -74,7 +82,7 @@ master_doc = 'contents'
# General information about the project.
project = u'SQLAlchemy'
-copyright = u'2007-2013, the SQLAlchemy authors and contributors'
+copyright = u'2007-2014, the SQLAlchemy authors and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -83,9 +91,9 @@ copyright = u'2007-2013, the SQLAlchemy authors and contributors'
# The short X.Y version.
version = "0.9"
# The full version, including alpha/beta/rc tags.
-release = "0.9.0"
+release = "0.9.1"
-release_date = "(not released)"
+release_date = "January 5, 2014"
site_base = "http://www.sqlalchemy.org"
@@ -280,7 +288,7 @@ man_pages = [
epub_title = u'SQLAlchemy'
epub_author = u'SQLAlchemy authors'
epub_publisher = u'SQLAlchemy authors'
-epub_copyright = u'2013, SQLAlchemy authors'
+epub_copyright = u'2007-2014, SQLAlchemy authors'
# The language of the text. It defaults to the language option
# or en if the language is not set.
diff --git a/doc/build/copyright.rst b/doc/build/copyright.rst
index 082beb3a3..659ce6c74 100644
--- a/doc/build/copyright.rst
+++ b/doc/build/copyright.rst
@@ -1,10 +1,12 @@
+:orphan:
+
====================
Appendix: Copyright
====================
This is the MIT license: `<http://www.opensource.org/licenses/mit-license.php>`_
-Copyright (c) 2005-2013 Michael Bayer and contributors.
+Copyright (c) 2005-2014 Michael Bayer and contributors.
SQLAlchemy is a trademark of Michael Bayer.
Permission is hereby granted, free of charge, to any person obtaining a copy of this
diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst
index b55ca2a82..c05bf18d0 100644
--- a/doc/build/core/connections.rst
+++ b/doc/build/core/connections.rst
@@ -495,19 +495,15 @@ Connection / Engine API
=======================
.. autoclass:: Connection
- :show-inheritance:
:members:
.. autoclass:: Connectable
- :show-inheritance:
:members:
.. autoclass:: Engine
- :show-inheritance:
:members:
.. autoclass:: NestedTransaction
- :show-inheritance:
:members:
.. autoclass:: sqlalchemy.engine.ResultProxy
@@ -517,10 +513,8 @@ Connection / Engine API
:members:
.. autoclass:: Transaction
- :show-inheritance:
:members:
.. autoclass:: TwoPhaseTransaction
- :show-inheritance:
:members:
diff --git a/doc/build/core/constraints.rst b/doc/build/core/constraints.rst
new file mode 100644
index 000000000..13ead6fbf
--- /dev/null
+++ b/doc/build/core/constraints.rst
@@ -0,0 +1,409 @@
+.. _metadata_constraints_toplevel:
+.. _metadata_constraints:
+
+.. module:: sqlalchemy.schema
+
+=================================
+Defining Constraints and Indexes
+=================================
+
+.. _metadata_foreignkeys:
+
+This section will discuss SQL :term:`constraints` and indexes. In SQLAlchemy
+the key classes include :class:`.ForeignKeyConstraint` and :class:`.Index`.
+
+Defining Foreign Keys
+---------------------
+
+A *foreign key* in SQL is a table-level construct that constrains one or more
+columns in that table to only allow values that are present in a different set
+of columns, typically but not always located on a different table. We call the
+columns which are constrained the *foreign key* columns and the columns which
+they are constrained towards the *referenced* columns. The referenced columns
+almost always define the primary key for their owning table, though there are
+exceptions to this. The foreign key is the "joint" that connects together
+pairs of rows which have a relationship with each other, and SQLAlchemy
+assigns very deep importance to this concept in virtually every area of its
+operation.
+
+In SQLAlchemy as well as in DDL, foreign key constraints can be defined as
+additional attributes within the table clause, or for single-column foreign
+keys they may optionally be specified within the definition of a single
+column. The single column foreign key is more common, and at the column level
+is specified by constructing a :class:`~sqlalchemy.schema.ForeignKey` object
+as an argument to a :class:`~sqlalchemy.schema.Column` object::
+
+ user_preference = Table('user_preference', metadata,
+ Column('pref_id', Integer, primary_key=True),
+ Column('user_id', Integer, ForeignKey("user.user_id"), nullable=False),
+ Column('pref_name', String(40), nullable=False),
+ Column('pref_value', String(100))
+ )
+
+Above, we define a new table ``user_preference`` for which each row must
+contain a value in the ``user_id`` column that also exists in the ``user``
+table's ``user_id`` column.
+
+The argument to :class:`~sqlalchemy.schema.ForeignKey` is most commonly a
+string of the form *<tablename>.<columnname>*, or for a table in a remote
+schema or "owner" of the form *<schemaname>.<tablename>.<columnname>*. It may
+also be an actual :class:`~sqlalchemy.schema.Column` object, which as we'll
+see later is accessed from an existing :class:`~sqlalchemy.schema.Table`
+object via its ``c`` collection::
+
+ ForeignKey(user.c.user_id)
+
+The advantage to using a string is that the in-python linkage between ``user``
+and ``user_preference`` is resolved only when first needed, so that table
+objects can be easily spread across multiple modules and defined in any order.
+
+Foreign keys may also be defined at the table level, using the
+:class:`~sqlalchemy.schema.ForeignKeyConstraint` object. This object can
+describe a single- or multi-column foreign key. A multi-column foreign key is
+known as a *composite* foreign key, and almost always references a table that
+has a composite primary key. Below we define a table ``invoice`` which has a
+composite primary key::
+
+ invoice = Table('invoice', metadata,
+ Column('invoice_id', Integer, primary_key=True),
+ Column('ref_num', Integer, primary_key=True),
+ Column('description', String(60), nullable=False)
+ )
+
+And then a table ``invoice_item`` with a composite foreign key referencing
+``invoice``::
+
+ invoice_item = Table('invoice_item', metadata,
+ Column('item_id', Integer, primary_key=True),
+ Column('item_name', String(60), nullable=False),
+ Column('invoice_id', Integer, nullable=False),
+ Column('ref_num', Integer, nullable=False),
+ ForeignKeyConstraint(['invoice_id', 'ref_num'], ['invoice.invoice_id', 'invoice.ref_num'])
+ )
+
+It's important to note that the
+:class:`~sqlalchemy.schema.ForeignKeyConstraint` is the only way to define a
+composite foreign key. While we could also have placed individual
+:class:`~sqlalchemy.schema.ForeignKey` objects on both the
+``invoice_item.invoice_id`` and ``invoice_item.ref_num`` columns, SQLAlchemy
+would not be aware that these two values should be paired together - it would
+be two individual foreign key constraints instead of a single composite
+foreign key referencing two columns.
+
+.. _use_alter:
+
+Creating/Dropping Foreign Key Constraints via ALTER
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In all the above examples, the :class:`~sqlalchemy.schema.ForeignKey` object
+causes the "REFERENCES" keyword to be added inline to a column definition
+within a "CREATE TABLE" statement when
+:func:`~sqlalchemy.schema.MetaData.create_all` is issued, and
+:class:`~sqlalchemy.schema.ForeignKeyConstraint` invokes the "CONSTRAINT"
+keyword inline with "CREATE TABLE". There are some cases where this is
+undesireable, particularly when two tables reference each other mutually, each
+with a foreign key referencing the other. In such a situation at least one of
+the foreign key constraints must be generated after both tables have been
+built. To support such a scheme, :class:`~sqlalchemy.schema.ForeignKey` and
+:class:`~sqlalchemy.schema.ForeignKeyConstraint` offer the flag
+``use_alter=True``. When using this flag, the constraint will be generated
+using a definition similar to "ALTER TABLE <tablename> ADD CONSTRAINT <name>
+...". Since a name is required, the ``name`` attribute must also be specified.
+For example::
+
+ node = Table('node', meta,
+ Column('node_id', Integer, primary_key=True),
+ Column('primary_element', Integer,
+ ForeignKey('element.element_id', use_alter=True, name='fk_node_element_id')
+ )
+ )
+
+ element = Table('element', meta,
+ Column('element_id', Integer, primary_key=True),
+ Column('parent_node_id', Integer),
+ ForeignKeyConstraint(
+ ['parent_node_id'],
+ ['node.node_id'],
+ use_alter=True,
+ name='fk_element_parent_node_id'
+ )
+ )
+
+ON UPDATE and ON DELETE
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Most databases support *cascading* of foreign key values, that is the when a
+parent row is updated the new value is placed in child rows, or when the
+parent row is deleted all corresponding child rows are set to null or deleted.
+In data definition language these are specified using phrases like "ON UPDATE
+CASCADE", "ON DELETE CASCADE", and "ON DELETE SET NULL", corresponding to
+foreign key constraints. The phrase after "ON UPDATE" or "ON DELETE" may also
+other allow other phrases that are specific to the database in use. The
+:class:`~sqlalchemy.schema.ForeignKey` and
+:class:`~sqlalchemy.schema.ForeignKeyConstraint` objects support the
+generation of this clause via the ``onupdate`` and ``ondelete`` keyword
+arguments. The value is any string which will be output after the appropriate
+"ON UPDATE" or "ON DELETE" phrase::
+
+ child = Table('child', meta,
+ Column('id', Integer,
+ ForeignKey('parent.id', onupdate="CASCADE", ondelete="CASCADE"),
+ primary_key=True
+ )
+ )
+
+ composite = Table('composite', meta,
+ Column('id', Integer, primary_key=True),
+ Column('rev_id', Integer),
+ Column('note_id', Integer),
+ ForeignKeyConstraint(
+ ['rev_id', 'note_id'],
+ ['revisions.id', 'revisions.note_id'],
+ onupdate="CASCADE", ondelete="SET NULL"
+ )
+ )
+
+Note that these clauses are not supported on SQLite, and require ``InnoDB``
+tables when used with MySQL. They may also not be supported on other
+databases.
+
+
+UNIQUE Constraint
+-----------------
+
+Unique constraints can be created anonymously on a single column using the
+``unique`` keyword on :class:`~sqlalchemy.schema.Column`. Explicitly named
+unique constraints and/or those with multiple columns are created via the
+:class:`~sqlalchemy.schema.UniqueConstraint` table-level construct.
+
+.. sourcecode:: python+sql
+
+ from sqlalchemy import UniqueConstraint
+
+ meta = MetaData()
+ mytable = Table('mytable', meta,
+
+ # per-column anonymous unique constraint
+ Column('col1', Integer, unique=True),
+
+ Column('col2', Integer),
+ Column('col3', Integer),
+
+ # explicit/composite unique constraint. 'name' is optional.
+ UniqueConstraint('col2', 'col3', name='uix_1')
+ )
+
+CHECK Constraint
+----------------
+
+Check constraints can be named or unnamed and can be created at the Column or
+Table level, using the :class:`~sqlalchemy.schema.CheckConstraint` construct.
+The text of the check constraint is passed directly through to the database,
+so there is limited "database independent" behavior. Column level check
+constraints generally should only refer to the column to which they are
+placed, while table level constraints can refer to any columns in the table.
+
+Note that some databases do not actively support check constraints such as
+MySQL.
+
+.. sourcecode:: python+sql
+
+ from sqlalchemy import CheckConstraint
+
+ meta = MetaData()
+ mytable = Table('mytable', meta,
+
+ # per-column CHECK constraint
+ Column('col1', Integer, CheckConstraint('col1>5')),
+
+ Column('col2', Integer),
+ Column('col3', Integer),
+
+ # table level CHECK constraint. 'name' is optional.
+ CheckConstraint('col2 > col3 + 5', name='check1')
+ )
+
+ {sql}mytable.create(engine)
+ CREATE TABLE mytable (
+ col1 INTEGER CHECK (col1>5),
+ col2 INTEGER,
+ col3 INTEGER,
+ CONSTRAINT check1 CHECK (col2 > col3 + 5)
+ ){stop}
+
+PRIMARY KEY Constraint
+----------------------
+
+The primary key constraint of any :class:`.Table` object is implicitly
+present, based on the :class:`.Column` objects that are marked with the
+:paramref:`.Column.primary_key` flag. The :class:`.PrimaryKeyConstraint`
+object provides explicit access to this constraint, which includes the
+option of being configured directly::
+
+ from sqlalchemy import PrimaryKeyConstraint
+
+ my_table = Table('mytable', metadata,
+ Column('id', Integer),
+ Column('version_id', Integer),
+ Column('data', String(50)),
+ PrimaryKeyConstraint('id', 'version_id', name='mytable_pk')
+ )
+
+.. seealso::
+
+ :class:`.PrimaryKeyConstraint` - detailed API documentation.
+
+Setting up Constraints when using the Declarative ORM Extension
+----------------------------------------------------------------
+
+The :class:`.Table` is the SQLAlchemy Core construct that allows one to define
+table metadata, which among other things can be used by the SQLAlchemy ORM
+as a target to map a class. The :ref:`Declarative <declarative_toplevel>`
+extension allows the :class:`.Table` object to be created automatically, given
+the contents of the table primarily as a mapping of :class:`.Column` objects.
+
+To apply table-level constraint objects such as :class:`.ForeignKeyConstraint`
+to a table defined using Declarative, use the ``__table_args__`` attribute,
+described at :ref:`declarative_table_args`.
+
+Constraints API
+---------------
+.. autoclass:: Constraint
+
+
+.. autoclass:: CheckConstraint
+
+
+.. autoclass:: ColumnCollectionConstraint
+
+
+.. autoclass:: ForeignKey
+ :members:
+
+
+.. autoclass:: ForeignKeyConstraint
+ :members:
+
+
+.. autoclass:: PrimaryKeyConstraint
+
+
+.. autoclass:: UniqueConstraint
+
+
+.. _schema_indexes:
+
+Indexes
+-------
+
+Indexes can be created anonymously (using an auto-generated name ``ix_<column
+label>``) for a single column using the inline ``index`` keyword on
+:class:`~sqlalchemy.schema.Column`, which also modifies the usage of
+``unique`` to apply the uniqueness to the index itself, instead of adding a
+separate UNIQUE constraint. For indexes with specific names or which encompass
+more than one column, use the :class:`~sqlalchemy.schema.Index` construct,
+which requires a name.
+
+Below we illustrate a :class:`~sqlalchemy.schema.Table` with several
+:class:`~sqlalchemy.schema.Index` objects associated. The DDL for "CREATE
+INDEX" is issued right after the create statements for the table:
+
+.. sourcecode:: python+sql
+
+ meta = MetaData()
+ mytable = Table('mytable', meta,
+ # an indexed column, with index "ix_mytable_col1"
+ Column('col1', Integer, index=True),
+
+ # a uniquely indexed column with index "ix_mytable_col2"
+ Column('col2', Integer, index=True, unique=True),
+
+ Column('col3', Integer),
+ Column('col4', Integer),
+
+ Column('col5', Integer),
+ Column('col6', Integer),
+ )
+
+ # place an index on col3, col4
+ Index('idx_col34', mytable.c.col3, mytable.c.col4)
+
+ # place a unique index on col5, col6
+ Index('myindex', mytable.c.col5, mytable.c.col6, unique=True)
+
+ {sql}mytable.create(engine)
+ CREATE TABLE mytable (
+ col1 INTEGER,
+ col2 INTEGER,
+ col3 INTEGER,
+ col4 INTEGER,
+ col5 INTEGER,
+ col6 INTEGER
+ )
+ CREATE INDEX ix_mytable_col1 ON mytable (col1)
+ CREATE UNIQUE INDEX ix_mytable_col2 ON mytable (col2)
+ CREATE UNIQUE INDEX myindex ON mytable (col5, col6)
+ CREATE INDEX idx_col34 ON mytable (col3, col4){stop}
+
+Note in the example above, the :class:`.Index` construct is created
+externally to the table which it corresponds, using :class:`.Column`
+objects directly. :class:`.Index` also supports
+"inline" definition inside the :class:`.Table`, using string names to
+identify columns::
+
+ meta = MetaData()
+ mytable = Table('mytable', meta,
+ Column('col1', Integer),
+
+ Column('col2', Integer),
+
+ Column('col3', Integer),
+ Column('col4', Integer),
+
+ # place an index on col1, col2
+ Index('idx_col12', 'col1', 'col2'),
+
+ # place a unique index on col3, col4
+ Index('idx_col34', 'col3', 'col4', unique=True)
+ )
+
+.. versionadded:: 0.7
+ Support of "inline" definition inside the :class:`.Table`
+ for :class:`.Index`\ .
+
+The :class:`~sqlalchemy.schema.Index` object also supports its own ``create()`` method:
+
+.. sourcecode:: python+sql
+
+ i = Index('someindex', mytable.c.col5)
+ {sql}i.create(engine)
+ CREATE INDEX someindex ON mytable (col5){stop}
+
+.. _schema_indexes_functional:
+
+Functional Indexes
+~~~~~~~~~~~~~~~~~~~
+
+:class:`.Index` supports SQL and function expressions, as supported by the
+target backend. To create an index against a column using a descending
+value, the :meth:`.ColumnElement.desc` modifier may be used::
+
+ from sqlalchemy import Index
+
+ Index('someindex', mytable.c.somecol.desc())
+
+Or with a backend that supports functional indexes such as Postgresql,
+a "case insensitive" index can be created using the ``lower()`` function::
+
+ from sqlalchemy import func, Index
+
+ Index('someindex', func.lower(mytable.c.somecol))
+
+.. versionadded:: 0.8 :class:`.Index` supports SQL expressions and functions
+ as well as plain columns.
+
+Index API
+---------
+
+.. autoclass:: Index
+ :members:
diff --git a/doc/build/core/ddl.rst b/doc/build/core/ddl.rst
new file mode 100644
index 000000000..cee6f876e
--- /dev/null
+++ b/doc/build/core/ddl.rst
@@ -0,0 +1,287 @@
+.. _metadata_ddl_toplevel:
+.. _metadata_ddl:
+.. module:: sqlalchemy.schema
+
+Customizing DDL
+===============
+
+In the preceding sections we've discussed a variety of schema constructs
+including :class:`~sqlalchemy.schema.Table`,
+:class:`~sqlalchemy.schema.ForeignKeyConstraint`,
+:class:`~sqlalchemy.schema.CheckConstraint`, and
+:class:`~sqlalchemy.schema.Sequence`. Throughout, we've relied upon the
+``create()`` and :func:`~sqlalchemy.schema.MetaData.create_all` methods of
+:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.MetaData` in
+order to issue data definition language (DDL) for all constructs. When issued,
+a pre-determined order of operations is invoked, and DDL to create each table
+is created unconditionally including all constraints and other objects
+associated with it. For more complex scenarios where database-specific DDL is
+required, SQLAlchemy offers two techniques which can be used to add any DDL
+based on any condition, either accompanying the standard generation of tables
+or by itself.
+
+.. _schema_ddl_sequences:
+
+Controlling DDL Sequences
+-------------------------
+
+The ``sqlalchemy.schema`` package contains SQL expression constructs that
+provide DDL expressions. For example, to produce a ``CREATE TABLE`` statement:
+
+.. sourcecode:: python+sql
+
+ from sqlalchemy.schema import CreateTable
+ {sql}engine.execute(CreateTable(mytable))
+ CREATE TABLE mytable (
+ col1 INTEGER,
+ col2 INTEGER,
+ col3 INTEGER,
+ col4 INTEGER,
+ col5 INTEGER,
+ col6 INTEGER
+ ){stop}
+
+Above, the :class:`~sqlalchemy.schema.CreateTable` construct works like any
+other expression construct (such as ``select()``, ``table.insert()``, etc.). A
+full reference of available constructs is in :ref:`schema_api_ddl`.
+
+The DDL constructs all extend a common base class which provides the
+capability to be associated with an individual
+:class:`~sqlalchemy.schema.Table` or :class:`~sqlalchemy.schema.MetaData`
+object, to be invoked upon create/drop events. Consider the example of a table
+which contains a CHECK constraint:
+
+.. sourcecode:: python+sql
+
+ users = Table('users', metadata,
+ Column('user_id', Integer, primary_key=True),
+ Column('user_name', String(40), nullable=False),
+ CheckConstraint('length(user_name) >= 8',name="cst_user_name_length")
+ )
+
+ {sql}users.create(engine)
+ CREATE TABLE users (
+ user_id SERIAL NOT NULL,
+ user_name VARCHAR(40) NOT NULL,
+ PRIMARY KEY (user_id),
+ CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8)
+ ){stop}
+
+The above table contains a column "user_name" which is subject to a CHECK
+constraint that validates that the length of the string is at least eight
+characters. When a ``create()`` is issued for this table, DDL for the
+:class:`~sqlalchemy.schema.CheckConstraint` will also be issued inline within
+the table definition.
+
+The :class:`~sqlalchemy.schema.CheckConstraint` construct can also be
+constructed externally and associated with the
+:class:`~sqlalchemy.schema.Table` afterwards::
+
+ constraint = CheckConstraint('length(user_name) >= 8',name="cst_user_name_length")
+ users.append_constraint(constraint)
+
+So far, the effect is the same. However, if we create DDL elements
+corresponding to the creation and removal of this constraint, and associate
+them with the :class:`.Table` as events, these new events
+will take over the job of issuing DDL for the constraint. Additionally, the
+constraint will be added via ALTER:
+
+.. sourcecode:: python+sql
+
+ from sqlalchemy import event
+
+ event.listen(
+ users,
+ "after_create",
+ AddConstraint(constraint)
+ )
+ event.listen(
+ users,
+ "before_drop",
+ DropConstraint(constraint)
+ )
+
+ {sql}users.create(engine)
+ CREATE TABLE users (
+ user_id SERIAL NOT NULL,
+ user_name VARCHAR(40) NOT NULL,
+ PRIMARY KEY (user_id)
+ )
+
+ ALTER TABLE users ADD CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8){stop}
+
+ {sql}users.drop(engine)
+ ALTER TABLE users DROP CONSTRAINT cst_user_name_length
+ DROP TABLE users{stop}
+
+The real usefulness of the above becomes clearer once we illustrate the
+:meth:`.DDLElement.execute_if` method. This method returns a modified form of
+the DDL callable which will filter on criteria before responding to a
+received event. It accepts a parameter ``dialect``, which is the string
+name of a dialect or a tuple of such, which will limit the execution of the
+item to just those dialects. It also accepts a ``callable_`` parameter which
+may reference a Python callable which will be invoked upon event reception,
+returning ``True`` or ``False`` indicating if the event should proceed.
+
+If our :class:`~sqlalchemy.schema.CheckConstraint` was only supported by
+Postgresql and not other databases, we could limit its usage to just that dialect::
+
+ event.listen(
+ users,
+ 'after_create',
+ AddConstraint(constraint).execute_if(dialect='postgresql')
+ )
+ event.listen(
+ users,
+ 'before_drop',
+ DropConstraint(constraint).execute_if(dialect='postgresql')
+ )
+
+Or to any set of dialects::
+
+ event.listen(
+ users,
+ "after_create",
+ AddConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
+ )
+ event.listen(
+ users,
+ "before_drop",
+ DropConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
+ )
+
+When using a callable, the callable is passed the ddl element, the
+:class:`.Table` or :class:`.MetaData`
+object whose "create" or "drop" event is in progress, and the
+:class:`.Connection` object being used for the
+operation, as well as additional information as keyword arguments. The
+callable can perform checks, such as whether or not a given item already
+exists. Below we define ``should_create()`` and ``should_drop()`` callables
+that check for the presence of our named constraint:
+
+.. sourcecode:: python+sql
+
+ def should_create(ddl, target, connection, **kw):
+ row = connection.execute("select conname from pg_constraint where conname='%s'" % ddl.element.name).scalar()
+ return not bool(row)
+
+ def should_drop(ddl, target, connection, **kw):
+ return not should_create(ddl, target, connection, **kw)
+
+ event.listen(
+ users,
+ "after_create",
+ AddConstraint(constraint).execute_if(callable_=should_create)
+ )
+ event.listen(
+ users,
+ "before_drop",
+ DropConstraint(constraint).execute_if(callable_=should_drop)
+ )
+
+ {sql}users.create(engine)
+ CREATE TABLE users (
+ user_id SERIAL NOT NULL,
+ user_name VARCHAR(40) NOT NULL,
+ PRIMARY KEY (user_id)
+ )
+
+ select conname from pg_constraint where conname='cst_user_name_length'
+ ALTER TABLE users ADD CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8){stop}
+
+ {sql}users.drop(engine)
+ select conname from pg_constraint where conname='cst_user_name_length'
+ ALTER TABLE users DROP CONSTRAINT cst_user_name_length
+ DROP TABLE users{stop}
+
+Custom DDL
+----------
+
+Custom DDL phrases are most easily achieved using the
+:class:`~sqlalchemy.schema.DDL` construct. This construct works like all the
+other DDL elements except it accepts a string which is the text to be emitted:
+
+.. sourcecode:: python+sql
+
+ event.listen(
+ metadata,
+ "after_create",
+ DDL("ALTER TABLE users ADD CONSTRAINT "
+ "cst_user_name_length "
+ " CHECK (length(user_name) >= 8)")
+ )
+
+A more comprehensive method of creating libraries of DDL constructs is to use
+custom compilation - see :ref:`sqlalchemy.ext.compiler_toplevel` for
+details.
+
+.. _schema_api_ddl:
+
+DDL Expression Constructs API
+-----------------------------
+
+.. autoclass:: DDLElement
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DDL
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: CreateTable
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DropTable
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: CreateColumn
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: CreateSequence
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DropSequence
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: CreateIndex
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DropIndex
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: AddConstraint
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DropConstraint
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: CreateSchema
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DropSchema
+ :members:
+ :undoc-members:
+
+
diff --git a/doc/build/core/defaults.rst b/doc/build/core/defaults.rst
new file mode 100644
index 000000000..166273c18
--- /dev/null
+++ b/doc/build/core/defaults.rst
@@ -0,0 +1,345 @@
+.. _metadata_defaults_toplevel:
+.. _metadata_defaults:
+.. module:: sqlalchemy.schema
+
+Column Insert/Update Defaults
+==============================
+
+SQLAlchemy provides a very rich featureset regarding column level events which
+take place during INSERT and UPDATE statements. Options include:
+
+* Scalar values used as defaults during INSERT and UPDATE operations
+* Python functions which execute upon INSERT and UPDATE operations
+* SQL expressions which are embedded in INSERT statements (or in some cases execute beforehand)
+* SQL expressions which are embedded in UPDATE statements
+* Server side default values used during INSERT
+* Markers for server-side triggers used during UPDATE
+
+The general rule for all insert/update defaults is that they only take effect
+if no value for a particular column is passed as an ``execute()`` parameter;
+otherwise, the given value is used.
+
+Scalar Defaults
+---------------
+
+The simplest kind of default is a scalar value used as the default value of a column::
+
+ Table("mytable", meta,
+ Column("somecolumn", Integer, default=12)
+ )
+
+Above, the value "12" will be bound as the column value during an INSERT if no
+other value is supplied.
+
+A scalar value may also be associated with an UPDATE statement, though this is
+not very common (as UPDATE statements are usually looking for dynamic
+defaults)::
+
+ Table("mytable", meta,
+ Column("somecolumn", Integer, onupdate=25)
+ )
+
+
+Python-Executed Functions
+-------------------------
+
+The ``default`` and ``onupdate`` keyword arguments also accept Python
+functions. These functions are invoked at the time of insert or update if no
+other value for that column is supplied, and the value returned is used for
+the column's value. Below illustrates a crude "sequence" that assigns an
+incrementing counter to a primary key column::
+
+ # a function which counts upwards
+ i = 0
+ def mydefault():
+ global i
+ i += 1
+ return i
+
+ t = Table("mytable", meta,
+ Column('id', Integer, primary_key=True, default=mydefault),
+ )
+
+It should be noted that for real "incrementing sequence" behavior, the
+built-in capabilities of the database should normally be used, which may
+include sequence objects or other autoincrementing capabilities. For primary
+key columns, SQLAlchemy will in most cases use these capabilities
+automatically. See the API documentation for
+:class:`~sqlalchemy.schema.Column` including the ``autoincrement`` flag, as
+well as the section on :class:`~sqlalchemy.schema.Sequence` later in this
+chapter for background on standard primary key generation techniques.
+
+To illustrate onupdate, we assign the Python ``datetime`` function ``now`` to
+the ``onupdate`` attribute::
+
+ import datetime
+
+ t = Table("mytable", meta,
+ Column('id', Integer, primary_key=True),
+
+ # define 'last_updated' to be populated with datetime.now()
+ Column('last_updated', DateTime, onupdate=datetime.datetime.now),
+ )
+
+When an update statement executes and no value is passed for ``last_updated``,
+the ``datetime.datetime.now()`` Python function is executed and its return
+value used as the value for ``last_updated``. Notice that we provide ``now``
+as the function itself without calling it (i.e. there are no parenthesis
+following) - SQLAlchemy will execute the function at the time the statement
+executes.
+
+Context-Sensitive Default Functions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Python functions used by ``default`` and ``onupdate`` may also make use of
+the current statement's context in order to determine a value. The `context`
+of a statement is an internal SQLAlchemy object which contains all information
+about the statement being executed, including its source expression, the
+parameters associated with it and the cursor. The typical use case for this
+context with regards to default generation is to have access to the other
+values being inserted or updated on the row. To access the context, provide a
+function that accepts a single ``context`` argument::
+
+ def mydefault(context):
+ return context.current_parameters['counter'] + 12
+
+ t = Table('mytable', meta,
+ Column('counter', Integer),
+ Column('counter_plus_twelve', Integer, default=mydefault, onupdate=mydefault)
+ )
+
+Above we illustrate a default function which will execute for all INSERT and
+UPDATE statements where a value for ``counter_plus_twelve`` was otherwise not
+provided, and the value will be that of whatever value is present in the
+execution for the ``counter`` column, plus the number 12.
+
+While the context object passed to the default function has many attributes,
+the ``current_parameters`` member is a special member provided only during the
+execution of a default function for the purposes of deriving defaults from its
+existing values. For a single statement that is executing many sets of bind
+parameters, the user-defined function is called for each set of parameters,
+and ``current_parameters`` will be provided with each individual parameter set
+for each execution.
+
+SQL Expressions
+---------------
+
+The "default" and "onupdate" keywords may also be passed SQL expressions,
+including select statements or direct function calls::
+
+ t = Table("mytable", meta,
+ Column('id', Integer, primary_key=True),
+
+ # define 'create_date' to default to now()
+ Column('create_date', DateTime, default=func.now()),
+
+ # define 'key' to pull its default from the 'keyvalues' table
+ Column('key', String(20), default=keyvalues.select(keyvalues.c.type='type1', limit=1)),
+
+ # define 'last_modified' to use the current_timestamp SQL function on update
+ Column('last_modified', DateTime, onupdate=func.utc_timestamp())
+ )
+
+Above, the ``create_date`` column will be populated with the result of the
+``now()`` SQL function (which, depending on backend, compiles into ``NOW()``
+or ``CURRENT_TIMESTAMP`` in most cases) during an INSERT statement, and the
+``key`` column with the result of a SELECT subquery from another table. The
+``last_modified`` column will be populated with the value of
+``UTC_TIMESTAMP()``, a function specific to MySQL, when an UPDATE statement is
+emitted for this table.
+
+Note that when using ``func`` functions, unlike when using Python `datetime`
+functions we *do* call the function, i.e. with parenthesis "()" - this is
+because what we want in this case is the return value of the function, which
+is the SQL expression construct that will be rendered into the INSERT or
+UPDATE statement.
+
+The above SQL functions are usually executed "inline" with the INSERT or
+UPDATE statement being executed, meaning, a single statement is executed which
+embeds the given expressions or subqueries within the VALUES or SET clause of
+the statement. Although in some cases, the function is "pre-executed" in a
+SELECT statement of its own beforehand. This happens when all of the following
+is true:
+
+* the column is a primary key column
+* the database dialect does not support a usable ``cursor.lastrowid`` accessor
+ (or equivalent); this currently includes PostgreSQL, Oracle, and Firebird, as
+ well as some MySQL dialects.
+* the dialect does not support the "RETURNING" clause or similar, or the
+ ``implicit_returning`` flag is set to ``False`` for the dialect. Dialects
+ which support RETURNING currently include Postgresql, Oracle, Firebird, and
+ MS-SQL.
+* the statement is a single execution, i.e. only supplies one set of
+ parameters and doesn't use "executemany" behavior
+* the ``inline=True`` flag is not set on the
+ :class:`~sqlalchemy.sql.expression.Insert()` or
+ :class:`~sqlalchemy.sql.expression.Update()` construct, and the statement has
+ not defined an explicit `returning()` clause.
+
+Whether or not the default generation clause "pre-executes" is not something
+that normally needs to be considered, unless it is being addressed for
+performance reasons.
+
+When the statement is executed with a single set of parameters (that is, it is
+not an "executemany" style execution), the returned
+:class:`~sqlalchemy.engine.ResultProxy` will contain a collection
+accessible via ``result.postfetch_cols()`` which contains a list of all
+:class:`~sqlalchemy.schema.Column` objects which had an inline-executed
+default. Similarly, all parameters which were bound to the statement,
+including all Python and SQL expressions which were pre-executed, are present
+in the ``last_inserted_params()`` or ``last_updated_params()`` collections on
+:class:`~sqlalchemy.engine.ResultProxy`. The ``inserted_primary_key``
+collection contains a list of primary key values for the row inserted (a list
+so that single-column and composite-column primary keys are represented in the
+same format).
+
+Server Side Defaults
+--------------------
+
+A variant on the SQL expression default is the ``server_default``, which gets
+placed in the CREATE TABLE statement during a ``create()`` operation:
+
+.. sourcecode:: python+sql
+
+ t = Table('test', meta,
+ Column('abc', String(20), server_default='abc'),
+ Column('created_at', DateTime, server_default=text("sysdate"))
+ )
+
+A create call for the above table will produce::
+
+ CREATE TABLE test (
+ abc varchar(20) default 'abc',
+ created_at datetime default sysdate
+ )
+
+The behavior of ``server_default`` is similar to that of a regular SQL
+default; if it's placed on a primary key column for a database which doesn't
+have a way to "postfetch" the ID, and the statement is not "inlined", the SQL
+expression is pre-executed; otherwise, SQLAlchemy lets the default fire off on
+the database side normally.
+
+
+.. _triggered_columns:
+
+Triggered Columns
+------------------
+
+Columns with values set by a database trigger or other external process may be
+called out using :class:`.FetchedValue` as a marker::
+
+ t = Table('test', meta,
+ Column('abc', String(20), server_default=FetchedValue()),
+ Column('def', String(20), server_onupdate=FetchedValue())
+ )
+
+.. versionchanged:: 0.8.0b2,0.7.10
+ The ``for_update`` argument on :class:`.FetchedValue` is set automatically
+ when specified as the ``server_onupdate`` argument. If using an older version,
+ specify the onupdate above as ``server_onupdate=FetchedValue(for_update=True)``.
+
+These markers do not emit a "default" clause when the table is created,
+however they do set the same internal flags as a static ``server_default``
+clause, providing hints to higher-level tools that a "post-fetch" of these
+rows should be performed after an insert or update.
+
+.. note::
+
+ It's generally not appropriate to use :class:`.FetchedValue` in
+ conjunction with a primary key column, particularly when using the
+ ORM or any other scenario where the :attr:`.ResultProxy.inserted_primary_key`
+ attribute is required. This is becaue the "post-fetch" operation requires
+ that the primary key value already be available, so that the
+ row can be selected on its primary key.
+
+ For a server-generated primary key value, all databases provide special
+ accessors or other techniques in order to acquire the "last inserted
+ primary key" column of a table. These mechanisms aren't affected by the presence
+ of :class:`.FetchedValue`. For special situations where triggers are
+ used to generate primary key values, and the database in use does not
+ support the ``RETURNING`` clause, it may be necessary to forego the usage
+ of the trigger and instead apply the SQL expression or function as a
+ "pre execute" expression::
+
+ t = Table('test', meta,
+ Column('abc', MyType, default=func.generate_new_value(), primary_key=True)
+ )
+
+ Where above, when :meth:`.Table.insert` is used,
+ the ``func.generate_new_value()`` expression will be pre-executed
+ in the context of a scalar ``SELECT`` statement, and the new value will
+ be applied to the subsequent ``INSERT``, while at the same time being
+ made available to the :attr:`.ResultProxy.inserted_primary_key`
+ attribute.
+
+
+Defining Sequences
+-------------------
+
+SQLAlchemy represents database sequences using the
+:class:`~sqlalchemy.schema.Sequence` object, which is considered to be a
+special case of "column default". It only has an effect on databases which
+have explicit support for sequences, which currently includes Postgresql,
+Oracle, and Firebird. The :class:`~sqlalchemy.schema.Sequence` object is
+otherwise ignored.
+
+The :class:`~sqlalchemy.schema.Sequence` may be placed on any column as a
+"default" generator to be used during INSERT operations, and can also be
+configured to fire off during UPDATE operations if desired. It is most
+commonly used in conjunction with a single integer primary key column::
+
+ table = Table("cartitems", meta,
+ Column("cart_id", Integer, Sequence('cart_id_seq'), primary_key=True),
+ Column("description", String(40)),
+ Column("createdate", DateTime())
+ )
+
+Where above, the table "cartitems" is associated with a sequence named
+"cart_id_seq". When INSERT statements take place for "cartitems", and no value
+is passed for the "cart_id" column, the "cart_id_seq" sequence will be used to
+generate a value.
+
+When the :class:`~sqlalchemy.schema.Sequence` is associated with a table,
+CREATE and DROP statements issued for that table will also issue CREATE/DROP
+for the sequence object as well, thus "bundling" the sequence object with its
+parent table.
+
+The :class:`~sqlalchemy.schema.Sequence` object also implements special
+functionality to accommodate Postgresql's SERIAL datatype. The SERIAL type in
+PG automatically generates a sequence that is used implicitly during inserts.
+This means that if a :class:`~sqlalchemy.schema.Table` object defines a
+:class:`~sqlalchemy.schema.Sequence` on its primary key column so that it
+works with Oracle and Firebird, the :class:`~sqlalchemy.schema.Sequence` would
+get in the way of the "implicit" sequence that PG would normally use. For this
+use case, add the flag ``optional=True`` to the
+:class:`~sqlalchemy.schema.Sequence` object - this indicates that the
+:class:`~sqlalchemy.schema.Sequence` should only be used if the database
+provides no other option for generating primary key identifiers.
+
+The :class:`~sqlalchemy.schema.Sequence` object also has the ability to be
+executed standalone like a SQL expression, which has the effect of calling its
+"next value" function::
+
+ seq = Sequence('some_sequence')
+ nextid = connection.execute(seq)
+
+Default Objects API
+-------------------
+
+.. autoclass:: ColumnDefault
+
+
+.. autoclass:: DefaultClause
+
+
+.. autoclass:: DefaultGenerator
+
+
+.. autoclass:: FetchedValue
+
+
+.. autoclass:: PassiveDefault
+
+
+.. autoclass:: Sequence
+ :members:
diff --git a/doc/build/core/dml.rst b/doc/build/core/dml.rst
new file mode 100644
index 000000000..3b6949b79
--- /dev/null
+++ b/doc/build/core/dml.rst
@@ -0,0 +1,37 @@
+Insert, Updates, Deletes
+========================
+
+INSERT, UPDATE and DELETE statements build on a hierarchy starting
+with :class:`.UpdateBase`. The :class:`.Insert` and :class:`.Update`
+constructs build on the intermediary :class:`.ValuesBase`.
+
+.. module:: sqlalchemy.sql.expression
+
+.. autofunction:: delete
+
+.. autofunction:: insert
+
+.. autofunction:: update
+
+
+.. autoclass:: Delete
+ :members:
+ :inherited-members:
+
+.. autoclass:: Insert
+ :members:
+ :inherited-members:
+
+.. autoclass:: Update
+ :members:
+ :inherited-members:
+
+.. autoclass:: sqlalchemy.sql.expression.UpdateBase
+ :members:
+ :inherited-members:
+
+.. autoclass:: sqlalchemy.sql.expression.ValuesBase
+ :members:
+
+
+
diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst
index eea5041d7..8d34ab5c6 100644
--- a/doc/build/core/engines.rst
+++ b/doc/build/core/engines.rst
@@ -87,6 +87,8 @@ known driver available for that backend (i.e. cx_oracle, pysqlite/sqlite3,
psycopg2, mysqldb). For Jython connections, specify the `zxjdbc` driver, which
is the JDBC-DBAPI bridge included with Jython.
+.. autofunction:: sqlalchemy.engine.url.make_url
+
Postgresql
----------
@@ -125,7 +127,7 @@ More notes on connecting to MySQL at :ref:`mysql_toplevel`.
Oracle
------
-cx_oracle is usualjy used here::
+cx_oracle is usually used here::
engine = create_engine('oracle://scott:tiger@127.0.0.1:1521/sidname')
diff --git a/doc/build/core/event.rst b/doc/build/core/event.rst
index 73d0dab4c..1b873972a 100644
--- a/doc/build/core/event.rst
+++ b/doc/build/core/event.rst
@@ -75,7 +75,7 @@ as long as the names match up::
print("New DBAPI connection:", dbapi_connection)
print("Connection record:", kw['connection_record'])
-Above, the presence of ``**kw`` tells :func:`.event.listen_for` that
+Above, the presence of ``**kw`` tells :func:`.listens_for` that
arguments should be passed to the function by name, rather than positionally.
.. versionadded:: 0.9.0 Added optional ``named`` argument dispatch to
@@ -159,3 +159,6 @@ API Reference
.. autofunction:: sqlalchemy.event.listens_for
+.. autofunction:: sqlalchemy.event.remove
+
+.. autofunction:: sqlalchemy.event.contains
diff --git a/doc/build/core/events.rst b/doc/build/core/events.rst
index f43aa09f6..d52d50c5a 100644
--- a/doc/build/core/events.rst
+++ b/doc/build/core/events.rst
@@ -8,6 +8,9 @@ SQLAlchemy Core.
For an introduction to the event listening API, see :ref:`event_toplevel`.
ORM events are described in :ref:`orm_event_toplevel`.
+.. autoclass:: sqlalchemy.event.base.Events
+ :members:
+
.. versionadded:: 0.7
The event system supercedes the previous system of "extension", "listener",
and "proxy" classes.
diff --git a/doc/build/core/exceptions.rst b/doc/build/core/exceptions.rst
index f7d384ad9..30270f8b0 100644
--- a/doc/build/core/exceptions.rst
+++ b/doc/build/core/exceptions.rst
@@ -2,5 +2,4 @@ Core Exceptions
===============
.. automodule:: sqlalchemy.exc
- :show-inheritance:
:members: \ No newline at end of file
diff --git a/doc/build/core/expression_api.rst b/doc/build/core/expression_api.rst
index b17145c53..99bb98881 100644
--- a/doc/build/core/expression_api.rst
+++ b/doc/build/core/expression_api.rst
@@ -8,249 +8,13 @@ SQL Statements and Expressions API
This section presents the API reference for the SQL Expression Language. For a full introduction to its usage,
see :ref:`sqlexpression_toplevel`.
-Functions
----------
-The expression package uses functions to construct SQL expressions. The return value of each function is an object instance which is a subclass of :class:`~sqlalchemy.sql.expression.ClauseElement`.
-
-.. autofunction:: alias
-
-.. autofunction:: and_
-
-.. autofunction:: asc
-
-.. autofunction:: between
-
-.. autofunction:: bindparam
-
-.. autofunction:: case
-
-.. autofunction:: cast
-
-.. autofunction:: sqlalchemy.sql.expression.column
-
-.. autofunction:: collate
-
-.. autofunction:: delete
-
-.. autofunction:: desc
-
-.. autofunction:: distinct
-
-.. autofunction:: except_
-
-.. autofunction:: except_all
-
-.. autofunction:: exists
-
-.. autofunction:: extract
-
-.. autofunction:: false
-
-.. autodata:: func
-
-.. autofunction:: insert
-
-.. autofunction:: intersect
-
-.. autofunction:: intersect_all
-
-.. autofunction:: join
-
-.. autofunction:: label
-
-.. autofunction:: literal
-
-.. autofunction:: literal_column
-
-.. autofunction:: not_
-
-.. autofunction:: null
-
-.. autofunction:: nullsfirst
-
-.. autofunction:: nullslast
-
-.. autofunction:: or_
-
-.. autofunction:: outparam
-
-.. autofunction:: outerjoin
-
-.. autofunction:: over
-
-.. autofunction:: select
-
-.. autofunction:: subquery
-
-.. autofunction:: sqlalchemy.sql.expression.table
-
-.. autofunction:: text
-
-.. autofunction:: true
-
-.. autofunction:: tuple_
-
-.. autofunction:: type_coerce
-
-.. autofunction:: union
-
-.. autofunction:: union_all
-
-.. autofunction:: update
-
-Classes
--------
-
-.. autoclass:: Alias
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: BinaryExpression
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: BindParameter
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: ClauseElement
- :members:
- :show-inheritance:
-
-.. autoclass:: ClauseList
- :members:
- :show-inheritance:
-
-.. autoclass:: ColumnClause
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: ColumnCollection
- :members:
- :show-inheritance:
-
-.. autoclass:: ColumnElement
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: sqlalchemy.sql.operators.ColumnOperators
- :members:
- :special-members:
- :inherited-members:
- :show-inheritance:
-
-
-.. autoclass:: CompoundSelect
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: sqlalchemy.sql.operators.custom_op
- :members:
-
-.. autoclass:: CTE
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: Delete
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: Executable
- :members:
- :show-inheritance:
-
-.. autoclass:: FunctionElement
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: Function
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: FromClause
- :members:
- :show-inheritance:
-
-.. autoclass:: Insert
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: Join
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: sqlalchemy.sql.operators.Operators
- :members:
- :special-members:
-
-.. autoclass:: Select
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: Selectable
- :members:
- :show-inheritance:
-
-.. autoclass:: SelectBase
- :members:
- :show-inheritance:
-
-.. autoclass:: TableClause
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: UnaryExpression
- :members:
- :show-inheritance:
-
-.. autoclass:: Update
- :members:
- :inherited-members:
- :show-inheritance:
-
-.. autoclass:: UpdateBase
- :members:
- :show-inheritance:
-
-.. autoclass:: ValuesBase
- :members:
- :show-inheritance:
-
-.. _generic_functions:
-
-Generic Functions
------------------
-
-SQL functions which are known to SQLAlchemy with regards to database-specific
-rendering, return types and argument behavior. Generic functions are invoked
-like all SQL functions, using the :attr:`func` attribute::
-
- select([func.count()]).select_from(sometable)
-
-Note that any name not known to :attr:`func` generates the function name as is
-- there is no restriction on what SQL functions can be called, known or
-unknown to SQLAlchemy, built-in or user defined. The section here only
-describes those functions where SQLAlchemy already knows what argument and
-return types are in use.
-
-.. automodule:: sqlalchemy.sql.functions
- :members:
- :undoc-members:
- :show-inheritance:
+.. toctree::
+ :maxdepth: 1
+ sqlelement
+ selectable
+ dml
+ functions
+ types
diff --git a/doc/build/core/functions.rst b/doc/build/core/functions.rst
new file mode 100644
index 000000000..d284d125f
--- /dev/null
+++ b/doc/build/core/functions.rst
@@ -0,0 +1,27 @@
+.. _functions_toplevel:
+.. _generic_functions:
+
+=========================
+SQL and Generic Functions
+=========================
+
+.. module:: sqlalchemy.sql.expression
+
+SQL functions which are known to SQLAlchemy with regards to database-specific
+rendering, return types and argument behavior. Generic functions are invoked
+like all SQL functions, using the :attr:`func` attribute::
+
+ select([func.count()]).select_from(sometable)
+
+Note that any name not known to :attr:`func` generates the function name as is
+- there is no restriction on what SQL functions can be called, known or
+unknown to SQLAlchemy, built-in or user defined. The section here only
+describes those functions where SQLAlchemy already knows what argument and
+return types are in use.
+
+.. automodule:: sqlalchemy.sql.functions
+ :members:
+ :undoc-members:
+
+
+
diff --git a/doc/build/core/index.rst b/doc/build/core/index.rst
index 079a4b97a..210f28412 100644
--- a/doc/build/core/index.rst
+++ b/doc/build/core/index.rst
@@ -13,11 +13,10 @@ Language provides a schema-centric usage paradigm.
tutorial
expression_api
+ schema
engines
connections
pooling
- schema
- types
event
events
compiler
diff --git a/doc/build/core/internals.rst b/doc/build/core/internals.rst
index 64dc34183..1a85e9e6c 100644
--- a/doc/build/core/internals.rst
+++ b/doc/build/core/internals.rst
@@ -12,12 +12,10 @@ Some key internal constructs are listed here.
.. autoclass:: sqlalchemy.sql.compiler.DDLCompiler
:members:
- :show-inheritance:
:inherited-members:
.. autoclass:: sqlalchemy.engine.default.DefaultDialect
:members:
- :show-inheritance:
:inherited-members:
.. autoclass:: sqlalchemy.engine.interfaces.Dialect
@@ -25,17 +23,17 @@ Some key internal constructs are listed here.
.. autoclass:: sqlalchemy.engine.default.DefaultExecutionContext
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.engine.interfaces.ExecutionContext
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.sql.compiler.IdentifierPreparer
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.sql.compiler.SQLCompiler
:members:
- :show-inheritance:
+
diff --git a/doc/build/core/metadata.rst b/doc/build/core/metadata.rst
new file mode 100644
index 000000000..d6fc8c6af
--- /dev/null
+++ b/doc/build/core/metadata.rst
@@ -0,0 +1,330 @@
+.. _metadata_toplevel:
+
+.. _metadata_describing_toplevel:
+
+.. _metadata_describing:
+
+==================================
+Describing Databases with MetaData
+==================================
+
+.. module:: sqlalchemy.schema
+
+This section discusses the fundamental :class:`.Table`, :class:`.Column`
+and :class:`.MetaData` objects.
+
+A collection of metadata entities is stored in an object aptly named
+:class:`~sqlalchemy.schema.MetaData`::
+
+ from sqlalchemy import *
+
+ metadata = MetaData()
+
+:class:`~sqlalchemy.schema.MetaData` is a container object that keeps together
+many different features of a database (or multiple databases) being described.
+
+To represent a table, use the :class:`~sqlalchemy.schema.Table` class. Its two
+primary arguments are the table name, then the
+:class:`~sqlalchemy.schema.MetaData` object which it will be associated with.
+The remaining positional arguments are mostly
+:class:`~sqlalchemy.schema.Column` objects describing each column::
+
+ user = Table('user', metadata,
+ Column('user_id', Integer, primary_key = True),
+ Column('user_name', String(16), nullable = False),
+ Column('email_address', String(60)),
+ Column('password', String(20), nullable = False)
+ )
+
+Above, a table called ``user`` is described, which contains four columns. The
+primary key of the table consists of the ``user_id`` column. Multiple columns
+may be assigned the ``primary_key=True`` flag which denotes a multi-column
+primary key, known as a *composite* primary key.
+
+Note also that each column describes its datatype using objects corresponding
+to genericized types, such as :class:`~sqlalchemy.types.Integer` and
+:class:`~sqlalchemy.types.String`. SQLAlchemy features dozens of types of
+varying levels of specificity as well as the ability to create custom types.
+Documentation on the type system can be found at :ref:`types`.
+
+Accessing Tables and Columns
+----------------------------
+
+The :class:`~sqlalchemy.schema.MetaData` object contains all of the schema
+constructs we've associated with it. It supports a few methods of accessing
+these table objects, such as the ``sorted_tables`` accessor which returns a
+list of each :class:`~sqlalchemy.schema.Table` object in order of foreign key
+dependency (that is, each table is preceded by all tables which it
+references)::
+
+ >>> for t in metadata.sorted_tables:
+ ... print t.name
+ user
+ user_preference
+ invoice
+ invoice_item
+
+In most cases, individual :class:`~sqlalchemy.schema.Table` objects have been
+explicitly declared, and these objects are typically accessed directly as
+module-level variables in an application. Once a
+:class:`~sqlalchemy.schema.Table` has been defined, it has a full set of
+accessors which allow inspection of its properties. Given the following
+:class:`~sqlalchemy.schema.Table` definition::
+
+ employees = Table('employees', metadata,
+ Column('employee_id', Integer, primary_key=True),
+ Column('employee_name', String(60), nullable=False),
+ Column('employee_dept', Integer, ForeignKey("departments.department_id"))
+ )
+
+Note the :class:`~sqlalchemy.schema.ForeignKey` object used in this table -
+this construct defines a reference to a remote table, and is fully described
+in :ref:`metadata_foreignkeys`. Methods of accessing information about this
+table include::
+
+ # access the column "EMPLOYEE_ID":
+ employees.columns.employee_id
+
+ # or just
+ employees.c.employee_id
+
+ # via string
+ employees.c['employee_id']
+
+ # iterate through all columns
+ for c in employees.c:
+ print c
+
+ # get the table's primary key columns
+ for primary_key in employees.primary_key:
+ print primary_key
+
+ # get the table's foreign key objects:
+ for fkey in employees.foreign_keys:
+ print fkey
+
+ # access the table's MetaData:
+ employees.metadata
+
+ # access the table's bound Engine or Connection, if its MetaData is bound:
+ employees.bind
+
+ # access a column's name, type, nullable, primary key, foreign key
+ employees.c.employee_id.name
+ employees.c.employee_id.type
+ employees.c.employee_id.nullable
+ employees.c.employee_id.primary_key
+ employees.c.employee_dept.foreign_keys
+
+ # get the "key" of a column, which defaults to its name, but can
+ # be any user-defined string:
+ employees.c.employee_name.key
+
+ # access a column's table:
+ employees.c.employee_id.table is employees
+
+ # get the table related by a foreign key
+ list(employees.c.employee_dept.foreign_keys)[0].column.table
+
+Creating and Dropping Database Tables
+-------------------------------------
+
+Once you've defined some :class:`~sqlalchemy.schema.Table` objects, assuming
+you're working with a brand new database one thing you might want to do is
+issue CREATE statements for those tables and their related constructs (as an
+aside, it's also quite possible that you *don't* want to do this, if you
+already have some preferred methodology such as tools included with your
+database or an existing scripting system - if that's the case, feel free to
+skip this section - SQLAlchemy has no requirement that it be used to create
+your tables).
+
+The usual way to issue CREATE is to use
+:func:`~sqlalchemy.schema.MetaData.create_all` on the
+:class:`~sqlalchemy.schema.MetaData` object. This method will issue queries
+that first check for the existence of each individual table, and if not found
+will issue the CREATE statements:
+
+ .. sourcecode:: python+sql
+
+ engine = create_engine('sqlite:///:memory:')
+
+ metadata = MetaData()
+
+ user = Table('user', metadata,
+ Column('user_id', Integer, primary_key = True),
+ Column('user_name', String(16), nullable = False),
+ Column('email_address', String(60), key='email'),
+ Column('password', String(20), nullable = False)
+ )
+
+ user_prefs = Table('user_prefs', metadata,
+ Column('pref_id', Integer, primary_key=True),
+ Column('user_id', Integer, ForeignKey("user.user_id"), nullable=False),
+ Column('pref_name', String(40), nullable=False),
+ Column('pref_value', String(100))
+ )
+
+ {sql}metadata.create_all(engine)
+ PRAGMA table_info(user){}
+ CREATE TABLE user(
+ user_id INTEGER NOT NULL PRIMARY KEY,
+ user_name VARCHAR(16) NOT NULL,
+ email_address VARCHAR(60),
+ password VARCHAR(20) NOT NULL
+ )
+ PRAGMA table_info(user_prefs){}
+ CREATE TABLE user_prefs(
+ pref_id INTEGER NOT NULL PRIMARY KEY,
+ user_id INTEGER NOT NULL REFERENCES user(user_id),
+ pref_name VARCHAR(40) NOT NULL,
+ pref_value VARCHAR(100)
+ )
+
+:func:`~sqlalchemy.schema.MetaData.create_all` creates foreign key constraints
+between tables usually inline with the table definition itself, and for this
+reason it also generates the tables in order of their dependency. There are
+options to change this behavior such that ``ALTER TABLE`` is used instead.
+
+Dropping all tables is similarly achieved using the
+:func:`~sqlalchemy.schema.MetaData.drop_all` method. This method does the
+exact opposite of :func:`~sqlalchemy.schema.MetaData.create_all` - the
+presence of each table is checked first, and tables are dropped in reverse
+order of dependency.
+
+Creating and dropping individual tables can be done via the ``create()`` and
+``drop()`` methods of :class:`~sqlalchemy.schema.Table`. These methods by
+default issue the CREATE or DROP regardless of the table being present:
+
+.. sourcecode:: python+sql
+
+ engine = create_engine('sqlite:///:memory:')
+
+ meta = MetaData()
+
+ employees = Table('employees', meta,
+ Column('employee_id', Integer, primary_key=True),
+ Column('employee_name', String(60), nullable=False, key='name'),
+ Column('employee_dept', Integer, ForeignKey("departments.department_id"))
+ )
+ {sql}employees.create(engine)
+ CREATE TABLE employees(
+ employee_id SERIAL NOT NULL PRIMARY KEY,
+ employee_name VARCHAR(60) NOT NULL,
+ employee_dept INTEGER REFERENCES departments(department_id)
+ )
+ {}
+
+``drop()`` method:
+
+.. sourcecode:: python+sql
+
+ {sql}employees.drop(engine)
+ DROP TABLE employees
+ {}
+
+To enable the "check first for the table existing" logic, add the
+``checkfirst=True`` argument to ``create()`` or ``drop()``::
+
+ employees.create(engine, checkfirst=True)
+ employees.drop(engine, checkfirst=False)
+
+.. _schema_migrations:
+
+Altering Schemas through Migrations
+-----------------------------------
+
+While SQLAlchemy directly supports emitting CREATE and DROP statements for schema
+constructs, the ability to alter those constructs, usually via the ALTER statement
+as well as other database-specific constructs, is outside of the scope of SQLAlchemy
+itself. While it's easy enough to emit ALTER statements and similar by hand,
+such as by passing a string to :meth:`.Connection.execute` or by using the
+:class:`.DDL` construct, it's a common practice to automate the maintenance of
+database schemas in relation to application code using schema migration tools.
+
+There are two major migration tools available for SQLAlchemy:
+
+* `Alembic <http://alembic.readthedocs.org>`_ - Written by the author of SQLAlchemy,
+ Alembic features a highly customizable environment and a minimalistic usage pattern,
+ supporting such features as transactional DDL, automatic generation of "candidate"
+ migrations, an "offline" mode which generates SQL scripts, and support for branch
+ resolution.
+* `SQLAlchemy-Migrate <http://code.google.com/p/sqlalchemy-migrate/>`_ - The original
+ migration tool for SQLAlchemy, SQLAlchemy-Migrate is widely used and continues
+ under active development. SQLAlchemy-Migrate includes features such as
+ SQL script generation, ORM class generation, ORM model comparison, and extensive
+ support for SQLite migrations.
+
+
+Specifying the Schema Name
+---------------------------
+
+Some databases support the concept of multiple schemas. A
+:class:`~sqlalchemy.schema.Table` can reference this by specifying the
+``schema`` keyword argument::
+
+ financial_info = Table('financial_info', meta,
+ Column('id', Integer, primary_key=True),
+ Column('value', String(100), nullable=False),
+ schema='remote_banks'
+ )
+
+Within the :class:`~sqlalchemy.schema.MetaData` collection, this table will be
+identified by the combination of ``financial_info`` and ``remote_banks``. If
+another table called ``financial_info`` is referenced without the
+``remote_banks`` schema, it will refer to a different
+:class:`~sqlalchemy.schema.Table`. :class:`~sqlalchemy.schema.ForeignKey`
+objects can specify references to columns in this table using the form
+``remote_banks.financial_info.id``.
+
+The ``schema`` argument should be used for any name qualifiers required,
+including Oracle's "owner" attribute and similar. It also can accommodate a
+dotted name for longer schemes::
+
+ schema="dbo.scott"
+
+Backend-Specific Options
+------------------------
+
+:class:`~sqlalchemy.schema.Table` supports database-specific options. For
+example, MySQL has different table backend types, including "MyISAM" and
+"InnoDB". This can be expressed with :class:`~sqlalchemy.schema.Table` using
+``mysql_engine``::
+
+ addresses = Table('engine_email_addresses', meta,
+ Column('address_id', Integer, primary_key = True),
+ Column('remote_user_id', Integer, ForeignKey(users.c.user_id)),
+ Column('email_address', String(20)),
+ mysql_engine='InnoDB'
+ )
+
+Other backends may support table-level options as well - these would be
+described in the individual documentation sections for each dialect.
+
+Column, Table, MetaData API
+---------------------------
+
+.. autoclass:: Column
+ :members:
+ :inherited-members:
+ :undoc-members:
+
+
+.. autoclass:: MetaData
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: SchemaItem
+ :members:
+
+.. autoclass:: Table
+ :members:
+ :inherited-members:
+ :undoc-members:
+
+
+.. autoclass:: ThreadLocalMetaData
+ :members:
+ :undoc-members:
+
+
diff --git a/doc/build/core/pooling.rst b/doc/build/core/pooling.rst
index eb5463075..fcd8fd55c 100644
--- a/doc/build/core/pooling.rst
+++ b/doc/build/core/pooling.rst
@@ -282,6 +282,51 @@ server at the point at which the script pauses for input::
print c.execute("select 1").fetchall()
c.close()
+.. _pool_connection_invalidation:
+
+More on Invalidation
+^^^^^^^^^^^^^^^^^^^^
+
+The :class:`.Pool` provides "connection invalidation" services which allow
+both explicit invalidation of a connection as well as automatic invalidation
+in response to conditions that are determined to render a connection unusable.
+
+"Invalidation" means that a particular DBAPI connection is removed from the
+pool and discarded. The ``.close()`` method is called on this connection
+if it is not clear that the connection itself might not be closed, however
+if this method fails, the exception is logged but the operation still proceeds.
+
+When using a :class:`.Engine`, the :meth:`.Connection.invalidate` method is
+the usual entrypoint to explicit invalidation. Other conditions by which
+a DBAPI connection might be invalidated include:
+
+* a DBAPI exception such as :class:`.OperationalError`, raised when a
+ method like ``connection.execute()`` is called, is detected as indicating
+ a so-called "disconnect" condition. As the Python DBAPI provides no
+ standard system for determining the nature of an exception, all SQLAlchemy
+ dialects include a system called ``is_disconnect()`` which will examine
+ the contents of an exception object, including the string message and
+ any potential error codes included with it, in order to determine if this
+ exception indicates that the connection is no longer usable. If this is the
+ case, the :meth:`._ConnectionFairy.invalidate` method is called and the
+ DBAPI connection is then discarded.
+
+* When the connection is returned to the pool, and
+ calling the ``connection.rollback()`` or ``connection.commit()`` methods,
+ as dictated by the pool's "reset on return" behavior, throws an exception.
+ A final attempt at calling ``.close()`` on the connection will be made,
+ and it is then discarded.
+
+* When a listener implementing :meth:`.PoolEvents.checkout` raises the
+ :class:`~sqlalchemy.exc.DisconnectionError` exception, indicating that the connection
+ won't be usable and a new connection attempt needs to be made.
+
+All invalidations which occur will invoke the :meth:`.PoolEvents.invalidate`
+event.
+
+
+
+
API Documentation - Available Pool Implementations
---------------------------------------------------
@@ -291,25 +336,33 @@ API Documentation - Available Pool Implementations
.. automethod:: connect
.. automethod:: dispose
.. automethod:: recreate
+ .. automethod:: unique_connection
.. autoclass:: sqlalchemy.pool.QueuePool
- :show-inheritance:
.. automethod:: __init__
+ .. automethod:: connect
+ .. automethod:: unique_connection
.. autoclass:: SingletonThreadPool
- :show-inheritance:
.. automethod:: __init__
.. autoclass:: AssertionPool
- :show-inheritance:
+
.. autoclass:: NullPool
- :show-inheritance:
+
.. autoclass:: StaticPool
- :show-inheritance:
+
+.. autoclass:: _ConnectionFairy
+ :members:
+
+ .. autoattribute:: _connection_record
+
+.. autoclass:: _ConnectionRecord
+ :members:
Pooling Plain DB-API Connections
diff --git a/doc/build/core/reflection.rst b/doc/build/core/reflection.rst
new file mode 100644
index 000000000..952f48f74
--- /dev/null
+++ b/doc/build/core/reflection.rst
@@ -0,0 +1,168 @@
+.. module:: sqlalchemy.schema
+
+.. _metadata_reflection_toplevel:
+.. _metadata_reflection:
+
+
+Reflecting Database Objects
+===========================
+
+A :class:`~sqlalchemy.schema.Table` object can be instructed to load
+information about itself from the corresponding database schema object already
+existing within the database. This process is called *reflection*. In the
+most simple case you need only specify the table name, a :class:`~sqlalchemy.schema.MetaData`
+object, and the ``autoload=True`` flag. If the
+:class:`~sqlalchemy.schema.MetaData` is not persistently bound, also add the
+``autoload_with`` argument::
+
+ >>> messages = Table('messages', meta, autoload=True, autoload_with=engine)
+ >>> [c.name for c in messages.columns]
+ ['message_id', 'message_name', 'date']
+
+The above operation will use the given engine to query the database for
+information about the ``messages`` table, and will then generate
+:class:`~sqlalchemy.schema.Column`, :class:`~sqlalchemy.schema.ForeignKey`,
+and other objects corresponding to this information as though the
+:class:`~sqlalchemy.schema.Table` object were hand-constructed in Python.
+
+When tables are reflected, if a given table references another one via foreign
+key, a second :class:`~sqlalchemy.schema.Table` object is created within the
+:class:`~sqlalchemy.schema.MetaData` object representing the connection.
+Below, assume the table ``shopping_cart_items`` references a table named
+``shopping_carts``. Reflecting the ``shopping_cart_items`` table has the
+effect such that the ``shopping_carts`` table will also be loaded::
+
+ >>> shopping_cart_items = Table('shopping_cart_items', meta, autoload=True, autoload_with=engine)
+ >>> 'shopping_carts' in meta.tables:
+ True
+
+The :class:`~sqlalchemy.schema.MetaData` has an interesting "singleton-like"
+behavior such that if you requested both tables individually,
+:class:`~sqlalchemy.schema.MetaData` will ensure that exactly one
+:class:`~sqlalchemy.schema.Table` object is created for each distinct table
+name. The :class:`~sqlalchemy.schema.Table` constructor actually returns to
+you the already-existing :class:`~sqlalchemy.schema.Table` object if one
+already exists with the given name. Such as below, we can access the already
+generated ``shopping_carts`` table just by naming it::
+
+ shopping_carts = Table('shopping_carts', meta)
+
+Of course, it's a good idea to use ``autoload=True`` with the above table
+regardless. This is so that the table's attributes will be loaded if they have
+not been already. The autoload operation only occurs for the table if it
+hasn't already been loaded; once loaded, new calls to
+:class:`~sqlalchemy.schema.Table` with the same name will not re-issue any
+reflection queries.
+
+Overriding Reflected Columns
+-----------------------------
+
+Individual columns can be overridden with explicit values when reflecting
+tables; this is handy for specifying custom datatypes, constraints such as
+primary keys that may not be configured within the database, etc.::
+
+ >>> mytable = Table('mytable', meta,
+ ... Column('id', Integer, primary_key=True), # override reflected 'id' to have primary key
+ ... Column('mydata', Unicode(50)), # override reflected 'mydata' to be Unicode
+ ... autoload=True)
+
+Reflecting Views
+-----------------
+
+The reflection system can also reflect views. Basic usage is the same as that
+of a table::
+
+ my_view = Table("some_view", metadata, autoload=True)
+
+Above, ``my_view`` is a :class:`~sqlalchemy.schema.Table` object with
+:class:`~sqlalchemy.schema.Column` objects representing the names and types of
+each column within the view "some_view".
+
+Usually, it's desired to have at least a primary key constraint when
+reflecting a view, if not foreign keys as well. View reflection doesn't
+extrapolate these constraints.
+
+Use the "override" technique for this, specifying explicitly those columns
+which are part of the primary key or have foreign key constraints::
+
+ my_view = Table("some_view", metadata,
+ Column("view_id", Integer, primary_key=True),
+ Column("related_thing", Integer, ForeignKey("othertable.thing_id")),
+ autoload=True
+ )
+
+Reflecting All Tables at Once
+-----------------------------
+
+The :class:`~sqlalchemy.schema.MetaData` object can also get a listing of
+tables and reflect the full set. This is achieved by using the
+:func:`~sqlalchemy.schema.MetaData.reflect` method. After calling it, all
+located tables are present within the :class:`~sqlalchemy.schema.MetaData`
+object's dictionary of tables::
+
+ meta = MetaData()
+ meta.reflect(bind=someengine)
+ users_table = meta.tables['users']
+ addresses_table = meta.tables['addresses']
+
+``metadata.reflect()`` also provides a handy way to clear or delete all the rows in a database::
+
+ meta = MetaData()
+ meta.reflect(bind=someengine)
+ for table in reversed(meta.sorted_tables):
+ someengine.execute(table.delete())
+
+.. _metadata_reflection_inspector:
+
+Fine Grained Reflection with Inspector
+--------------------------------------
+
+A low level interface which provides a backend-agnostic system of loading
+lists of schema, table, column, and constraint descriptions from a given
+database is also available. This is known as the "Inspector"::
+
+ from sqlalchemy import create_engine
+ from sqlalchemy.engine import reflection
+ engine = create_engine('...')
+ insp = reflection.Inspector.from_engine(engine)
+ print insp.get_table_names()
+
+.. autoclass:: sqlalchemy.engine.reflection.Inspector
+ :members:
+ :undoc-members:
+
+Limitations of Reflection
+-------------------------
+
+It's important to note that the reflection process recreates :class:`.Table`
+metadata using only information which is represented in the relational database.
+This process by definition cannot restore aspects of a schema that aren't
+actually stored in the database. State which is not available from reflection
+includes but is not limited to:
+
+* Client side defaults, either Python functions or SQL expressions defined using
+ the ``default`` keyword of :class:`.Column` (note this is separate from ``server_default``,
+ which specifically is what's available via reflection).
+
+* Column information, e.g. data that might have been placed into the
+ :attr:`.Column.info` dictionary
+
+* The value of the ``.quote`` setting for :class:`.Column` or :class:`.Table`
+
+* The assocation of a particular :class:`.Sequence` with a given :class:`.Column`
+
+The relational database also in many cases reports on table metadata in a
+different format than what was specified in SQLAlchemy. The :class:`.Table`
+objects returned from reflection cannot be always relied upon to produce the identical
+DDL as the original Python-defined :class:`.Table` objects. Areas where
+this occurs includes server defaults, column-associated sequences and various
+idosyncrasies regarding constraints and datatypes. Server side defaults may
+be returned with cast directives (typically Postgresql will include a ``::<type>``
+cast) or different quoting patterns than originally specified.
+
+Another category of limitation includes schema structures for which reflection
+is only partially or not yet defined. Recent improvements to reflection allow
+things like views, indexes and foreign key options to be reflected. As of this
+writing, structures like CHECK constraints, table comments, and triggers are
+not reflected.
+
diff --git a/doc/build/core/schema.rst b/doc/build/core/schema.rst
index b2caf870f..aeb04be18 100644
--- a/doc/build/core/schema.rst
+++ b/doc/build/core/schema.rst
@@ -1,4 +1,4 @@
-.. _metadata_toplevel:
+.. _schema_toplevel:
==========================
Schema Definition Language
@@ -6,11 +6,8 @@ Schema Definition Language
.. module:: sqlalchemy.schema
-
-.. _metadata_describing:
-
-Describing Databases with MetaData
-==================================
+This section references SQLAlchemy **schema metadata**, a comprehensive system of describing and inspecting
+database schemas.
The core of SQLAlchemy's query and object mapping operations are supported by
*database metadata*, which is comprised of Python objects that describe tables
@@ -35,1453 +32,14 @@ designed to be used in a *declarative* style which closely resembles that of
real DDL. They are therefore most intuitive to those who have some background
in creating real schema generation scripts.
-A collection of metadata entities is stored in an object aptly named
-:class:`~sqlalchemy.schema.MetaData`::
-
- from sqlalchemy import *
-
- metadata = MetaData()
-
-:class:`~sqlalchemy.schema.MetaData` is a container object that keeps together
-many different features of a database (or multiple databases) being described.
-
-To represent a table, use the :class:`~sqlalchemy.schema.Table` class. Its two
-primary arguments are the table name, then the
-:class:`~sqlalchemy.schema.MetaData` object which it will be associated with.
-The remaining positional arguments are mostly
-:class:`~sqlalchemy.schema.Column` objects describing each column::
-
- user = Table('user', metadata,
- Column('user_id', Integer, primary_key = True),
- Column('user_name', String(16), nullable = False),
- Column('email_address', String(60)),
- Column('password', String(20), nullable = False)
- )
-
-Above, a table called ``user`` is described, which contains four columns. The
-primary key of the table consists of the ``user_id`` column. Multiple columns
-may be assigned the ``primary_key=True`` flag which denotes a multi-column
-primary key, known as a *composite* primary key.
-
-Note also that each column describes its datatype using objects corresponding
-to genericized types, such as :class:`~sqlalchemy.types.Integer` and
-:class:`~sqlalchemy.types.String`. SQLAlchemy features dozens of types of
-varying levels of specificity as well as the ability to create custom types.
-Documentation on the type system can be found at :ref:`types`.
-
-Accessing Tables and Columns
-----------------------------
-
-The :class:`~sqlalchemy.schema.MetaData` object contains all of the schema
-constructs we've associated with it. It supports a few methods of accessing
-these table objects, such as the ``sorted_tables`` accessor which returns a
-list of each :class:`~sqlalchemy.schema.Table` object in order of foreign key
-dependency (that is, each table is preceded by all tables which it
-references)::
-
- >>> for t in metadata.sorted_tables:
- ... print t.name
- user
- user_preference
- invoice
- invoice_item
-
-In most cases, individual :class:`~sqlalchemy.schema.Table` objects have been
-explicitly declared, and these objects are typically accessed directly as
-module-level variables in an application. Once a
-:class:`~sqlalchemy.schema.Table` has been defined, it has a full set of
-accessors which allow inspection of its properties. Given the following
-:class:`~sqlalchemy.schema.Table` definition::
-
- employees = Table('employees', metadata,
- Column('employee_id', Integer, primary_key=True),
- Column('employee_name', String(60), nullable=False),
- Column('employee_dept', Integer, ForeignKey("departments.department_id"))
- )
-
-Note the :class:`~sqlalchemy.schema.ForeignKey` object used in this table -
-this construct defines a reference to a remote table, and is fully described
-in :ref:`metadata_foreignkeys`. Methods of accessing information about this
-table include::
-
- # access the column "EMPLOYEE_ID":
- employees.columns.employee_id
-
- # or just
- employees.c.employee_id
-
- # via string
- employees.c['employee_id']
-
- # iterate through all columns
- for c in employees.c:
- print c
-
- # get the table's primary key columns
- for primary_key in employees.primary_key:
- print primary_key
-
- # get the table's foreign key objects:
- for fkey in employees.foreign_keys:
- print fkey
-
- # access the table's MetaData:
- employees.metadata
-
- # access the table's bound Engine or Connection, if its MetaData is bound:
- employees.bind
-
- # access a column's name, type, nullable, primary key, foreign key
- employees.c.employee_id.name
- employees.c.employee_id.type
- employees.c.employee_id.nullable
- employees.c.employee_id.primary_key
- employees.c.employee_dept.foreign_keys
-
- # get the "key" of a column, which defaults to its name, but can
- # be any user-defined string:
- employees.c.employee_name.key
-
- # access a column's table:
- employees.c.employee_id.table is employees
-
- # get the table related by a foreign key
- list(employees.c.employee_dept.foreign_keys)[0].column.table
-
-Creating and Dropping Database Tables
--------------------------------------
-
-Once you've defined some :class:`~sqlalchemy.schema.Table` objects, assuming
-you're working with a brand new database one thing you might want to do is
-issue CREATE statements for those tables and their related constructs (as an
-aside, it's also quite possible that you *don't* want to do this, if you
-already have some preferred methodology such as tools included with your
-database or an existing scripting system - if that's the case, feel free to
-skip this section - SQLAlchemy has no requirement that it be used to create
-your tables).
-
-The usual way to issue CREATE is to use
-:func:`~sqlalchemy.schema.MetaData.create_all` on the
-:class:`~sqlalchemy.schema.MetaData` object. This method will issue queries
-that first check for the existence of each individual table, and if not found
-will issue the CREATE statements:
-
- .. sourcecode:: python+sql
-
- engine = create_engine('sqlite:///:memory:')
-
- metadata = MetaData()
-
- user = Table('user', metadata,
- Column('user_id', Integer, primary_key = True),
- Column('user_name', String(16), nullable = False),
- Column('email_address', String(60), key='email'),
- Column('password', String(20), nullable = False)
- )
-
- user_prefs = Table('user_prefs', metadata,
- Column('pref_id', Integer, primary_key=True),
- Column('user_id', Integer, ForeignKey("user.user_id"), nullable=False),
- Column('pref_name', String(40), nullable=False),
- Column('pref_value', String(100))
- )
-
- {sql}metadata.create_all(engine)
- PRAGMA table_info(user){}
- CREATE TABLE user(
- user_id INTEGER NOT NULL PRIMARY KEY,
- user_name VARCHAR(16) NOT NULL,
- email_address VARCHAR(60),
- password VARCHAR(20) NOT NULL
- )
- PRAGMA table_info(user_prefs){}
- CREATE TABLE user_prefs(
- pref_id INTEGER NOT NULL PRIMARY KEY,
- user_id INTEGER NOT NULL REFERENCES user(user_id),
- pref_name VARCHAR(40) NOT NULL,
- pref_value VARCHAR(100)
- )
-
-:func:`~sqlalchemy.schema.MetaData.create_all` creates foreign key constraints
-between tables usually inline with the table definition itself, and for this
-reason it also generates the tables in order of their dependency. There are
-options to change this behavior such that ``ALTER TABLE`` is used instead.
-
-Dropping all tables is similarly achieved using the
-:func:`~sqlalchemy.schema.MetaData.drop_all` method. This method does the
-exact opposite of :func:`~sqlalchemy.schema.MetaData.create_all` - the
-presence of each table is checked first, and tables are dropped in reverse
-order of dependency.
-
-Creating and dropping individual tables can be done via the ``create()`` and
-``drop()`` methods of :class:`~sqlalchemy.schema.Table`. These methods by
-default issue the CREATE or DROP regardless of the table being present:
-
-.. sourcecode:: python+sql
-
- engine = create_engine('sqlite:///:memory:')
-
- meta = MetaData()
-
- employees = Table('employees', meta,
- Column('employee_id', Integer, primary_key=True),
- Column('employee_name', String(60), nullable=False, key='name'),
- Column('employee_dept', Integer, ForeignKey("departments.department_id"))
- )
- {sql}employees.create(engine)
- CREATE TABLE employees(
- employee_id SERIAL NOT NULL PRIMARY KEY,
- employee_name VARCHAR(60) NOT NULL,
- employee_dept INTEGER REFERENCES departments(department_id)
- )
- {}
-
-``drop()`` method:
-
-.. sourcecode:: python+sql
-
- {sql}employees.drop(engine)
- DROP TABLE employees
- {}
-
-To enable the "check first for the table existing" logic, add the
-``checkfirst=True`` argument to ``create()`` or ``drop()``::
-
- employees.create(engine, checkfirst=True)
- employees.drop(engine, checkfirst=False)
-
-Altering Schemas through Migrations
------------------------------------
-
-While SQLAlchemy directly supports emitting CREATE and DROP statements for schema
-constructs, the ability to alter those constructs, usually via the ALTER statement
-as well as other database-specific constructs, is outside of the scope of SQLAlchemy
-itself. While it's easy enough to emit ALTER statements and similar by hand,
-such as by passing a string to :meth:`.Connection.execute` or by using the
-:class:`.DDL` construct, it's a common practice to automate the maintenance of
-database schemas in relation to application code using schema migration tools.
-
-There are two major migration tools available for SQLAlchemy:
-
-* `Alembic <http://alembic.readthedocs.org>`_ - Written by the author of SQLAlchemy,
- Alembic features a highly customizable environment and a minimalistic usage pattern,
- supporting such features as transactional DDL, automatic generation of "candidate"
- migrations, an "offline" mode which generates SQL scripts, and support for branch
- resolution.
-* `SQLAlchemy-Migrate <http://code.google.com/p/sqlalchemy-migrate/>`_ - The original
- migration tool for SQLAlchemy, SQLAlchemy-Migrate is widely used and continues
- under active development. SQLAlchemy-Migrate includes features such as
- SQL script generation, ORM class generation, ORM model comparison, and extensive
- support for SQLite migrations.
-
-
-Specifying the Schema Name
----------------------------
-
-Some databases support the concept of multiple schemas. A
-:class:`~sqlalchemy.schema.Table` can reference this by specifying the
-``schema`` keyword argument::
-
- financial_info = Table('financial_info', meta,
- Column('id', Integer, primary_key=True),
- Column('value', String(100), nullable=False),
- schema='remote_banks'
- )
-
-Within the :class:`~sqlalchemy.schema.MetaData` collection, this table will be
-identified by the combination of ``financial_info`` and ``remote_banks``. If
-another table called ``financial_info`` is referenced without the
-``remote_banks`` schema, it will refer to a different
-:class:`~sqlalchemy.schema.Table`. :class:`~sqlalchemy.schema.ForeignKey`
-objects can specify references to columns in this table using the form
-``remote_banks.financial_info.id``.
-
-The ``schema`` argument should be used for any name qualifiers required,
-including Oracle's "owner" attribute and similar. It also can accommodate a
-dotted name for longer schemes::
-
- schema="dbo.scott"
-
-Backend-Specific Options
-------------------------
-
-:class:`~sqlalchemy.schema.Table` supports database-specific options. For
-example, MySQL has different table backend types, including "MyISAM" and
-"InnoDB". This can be expressed with :class:`~sqlalchemy.schema.Table` using
-``mysql_engine``::
-
- addresses = Table('engine_email_addresses', meta,
- Column('address_id', Integer, primary_key = True),
- Column('remote_user_id', Integer, ForeignKey(users.c.user_id)),
- Column('email_address', String(20)),
- mysql_engine='InnoDB'
- )
-
-Other backends may support table-level options as well - these would be
-described in the individual documentation sections for each dialect.
-
-Column, Table, MetaData API
----------------------------
-
-.. autoclass:: Column
- :members:
- :inherited-members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: MetaData
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: SchemaItem
- :show-inheritance:
- :members:
-
-.. autoclass:: Table
- :members:
- :inherited-members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: ThreadLocalMetaData
- :members:
- :undoc-members:
- :show-inheritance:
-
-
-.. _metadata_reflection:
-
-Reflecting Database Objects
-===========================
-
-A :class:`~sqlalchemy.schema.Table` object can be instructed to load
-information about itself from the corresponding database schema object already
-existing within the database. This process is called *reflection*. In the
-most simple case you need only specify the table name, a :class:`~sqlalchemy.schema.MetaData`
-object, and the ``autoload=True`` flag. If the
-:class:`~sqlalchemy.schema.MetaData` is not persistently bound, also add the
-``autoload_with`` argument::
-
- >>> messages = Table('messages', meta, autoload=True, autoload_with=engine)
- >>> [c.name for c in messages.columns]
- ['message_id', 'message_name', 'date']
-
-The above operation will use the given engine to query the database for
-information about the ``messages`` table, and will then generate
-:class:`~sqlalchemy.schema.Column`, :class:`~sqlalchemy.schema.ForeignKey`,
-and other objects corresponding to this information as though the
-:class:`~sqlalchemy.schema.Table` object were hand-constructed in Python.
-
-When tables are reflected, if a given table references another one via foreign
-key, a second :class:`~sqlalchemy.schema.Table` object is created within the
-:class:`~sqlalchemy.schema.MetaData` object representing the connection.
-Below, assume the table ``shopping_cart_items`` references a table named
-``shopping_carts``. Reflecting the ``shopping_cart_items`` table has the
-effect such that the ``shopping_carts`` table will also be loaded::
-
- >>> shopping_cart_items = Table('shopping_cart_items', meta, autoload=True, autoload_with=engine)
- >>> 'shopping_carts' in meta.tables:
- True
-
-The :class:`~sqlalchemy.schema.MetaData` has an interesting "singleton-like"
-behavior such that if you requested both tables individually,
-:class:`~sqlalchemy.schema.MetaData` will ensure that exactly one
-:class:`~sqlalchemy.schema.Table` object is created for each distinct table
-name. The :class:`~sqlalchemy.schema.Table` constructor actually returns to
-you the already-existing :class:`~sqlalchemy.schema.Table` object if one
-already exists with the given name. Such as below, we can access the already
-generated ``shopping_carts`` table just by naming it::
-
- shopping_carts = Table('shopping_carts', meta)
-
-Of course, it's a good idea to use ``autoload=True`` with the above table
-regardless. This is so that the table's attributes will be loaded if they have
-not been already. The autoload operation only occurs for the table if it
-hasn't already been loaded; once loaded, new calls to
-:class:`~sqlalchemy.schema.Table` with the same name will not re-issue any
-reflection queries.
-
-Overriding Reflected Columns
------------------------------
-
-Individual columns can be overridden with explicit values when reflecting
-tables; this is handy for specifying custom datatypes, constraints such as
-primary keys that may not be configured within the database, etc.::
-
- >>> mytable = Table('mytable', meta,
- ... Column('id', Integer, primary_key=True), # override reflected 'id' to have primary key
- ... Column('mydata', Unicode(50)), # override reflected 'mydata' to be Unicode
- ... autoload=True)
-
-Reflecting Views
------------------
-
-The reflection system can also reflect views. Basic usage is the same as that
-of a table::
-
- my_view = Table("some_view", metadata, autoload=True)
-
-Above, ``my_view`` is a :class:`~sqlalchemy.schema.Table` object with
-:class:`~sqlalchemy.schema.Column` objects representing the names and types of
-each column within the view "some_view".
-
-Usually, it's desired to have at least a primary key constraint when
-reflecting a view, if not foreign keys as well. View reflection doesn't
-extrapolate these constraints.
-
-Use the "override" technique for this, specifying explicitly those columns
-which are part of the primary key or have foreign key constraints::
-
- my_view = Table("some_view", metadata,
- Column("view_id", Integer, primary_key=True),
- Column("related_thing", Integer, ForeignKey("othertable.thing_id")),
- autoload=True
- )
-
-Reflecting All Tables at Once
------------------------------
-
-The :class:`~sqlalchemy.schema.MetaData` object can also get a listing of
-tables and reflect the full set. This is achieved by using the
-:func:`~sqlalchemy.schema.MetaData.reflect` method. After calling it, all
-located tables are present within the :class:`~sqlalchemy.schema.MetaData`
-object's dictionary of tables::
-
- meta = MetaData()
- meta.reflect(bind=someengine)
- users_table = meta.tables['users']
- addresses_table = meta.tables['addresses']
-
-``metadata.reflect()`` also provides a handy way to clear or delete all the rows in a database::
-
- meta = MetaData()
- meta.reflect(bind=someengine)
- for table in reversed(meta.sorted_tables):
- someengine.execute(table.delete())
-
-Fine Grained Reflection with Inspector
---------------------------------------
-
-A low level interface which provides a backend-agnostic system of loading
-lists of schema, table, column, and constraint descriptions from a given
-database is also available. This is known as the "Inspector"::
-
- from sqlalchemy import create_engine
- from sqlalchemy.engine import reflection
- engine = create_engine('...')
- insp = reflection.Inspector.from_engine(engine)
- print insp.get_table_names()
-
-.. autoclass:: sqlalchemy.engine.reflection.Inspector
- :members:
- :undoc-members:
- :show-inheritance:
-
-
-.. _metadata_defaults:
-
-Column Insert/Update Defaults
-==============================
-
-SQLAlchemy provides a very rich featureset regarding column level events which
-take place during INSERT and UPDATE statements. Options include:
-
-* Scalar values used as defaults during INSERT and UPDATE operations
-* Python functions which execute upon INSERT and UPDATE operations
-* SQL expressions which are embedded in INSERT statements (or in some cases execute beforehand)
-* SQL expressions which are embedded in UPDATE statements
-* Server side default values used during INSERT
-* Markers for server-side triggers used during UPDATE
-
-The general rule for all insert/update defaults is that they only take effect
-if no value for a particular column is passed as an ``execute()`` parameter;
-otherwise, the given value is used.
-
-Scalar Defaults
----------------
-
-The simplest kind of default is a scalar value used as the default value of a column::
-
- Table("mytable", meta,
- Column("somecolumn", Integer, default=12)
- )
-
-Above, the value "12" will be bound as the column value during an INSERT if no
-other value is supplied.
-
-A scalar value may also be associated with an UPDATE statement, though this is
-not very common (as UPDATE statements are usually looking for dynamic
-defaults)::
-
- Table("mytable", meta,
- Column("somecolumn", Integer, onupdate=25)
- )
-
-
-Python-Executed Functions
--------------------------
-
-The ``default`` and ``onupdate`` keyword arguments also accept Python
-functions. These functions are invoked at the time of insert or update if no
-other value for that column is supplied, and the value returned is used for
-the column's value. Below illustrates a crude "sequence" that assigns an
-incrementing counter to a primary key column::
-
- # a function which counts upwards
- i = 0
- def mydefault():
- global i
- i += 1
- return i
-
- t = Table("mytable", meta,
- Column('id', Integer, primary_key=True, default=mydefault),
- )
-
-It should be noted that for real "incrementing sequence" behavior, the
-built-in capabilities of the database should normally be used, which may
-include sequence objects or other autoincrementing capabilities. For primary
-key columns, SQLAlchemy will in most cases use these capabilities
-automatically. See the API documentation for
-:class:`~sqlalchemy.schema.Column` including the ``autoincrement`` flag, as
-well as the section on :class:`~sqlalchemy.schema.Sequence` later in this
-chapter for background on standard primary key generation techniques.
-
-To illustrate onupdate, we assign the Python ``datetime`` function ``now`` to
-the ``onupdate`` attribute::
-
- import datetime
-
- t = Table("mytable", meta,
- Column('id', Integer, primary_key=True),
-
- # define 'last_updated' to be populated with datetime.now()
- Column('last_updated', DateTime, onupdate=datetime.datetime.now),
- )
-
-When an update statement executes and no value is passed for ``last_updated``,
-the ``datetime.datetime.now()`` Python function is executed and its return
-value used as the value for ``last_updated``. Notice that we provide ``now``
-as the function itself without calling it (i.e. there are no parenthesis
-following) - SQLAlchemy will execute the function at the time the statement
-executes.
-
-Context-Sensitive Default Functions
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The Python functions used by ``default`` and ``onupdate`` may also make use of
-the current statement's context in order to determine a value. The `context`
-of a statement is an internal SQLAlchemy object which contains all information
-about the statement being executed, including its source expression, the
-parameters associated with it and the cursor. The typical use case for this
-context with regards to default generation is to have access to the other
-values being inserted or updated on the row. To access the context, provide a
-function that accepts a single ``context`` argument::
-
- def mydefault(context):
- return context.current_parameters['counter'] + 12
-
- t = Table('mytable', meta,
- Column('counter', Integer),
- Column('counter_plus_twelve', Integer, default=mydefault, onupdate=mydefault)
- )
-
-Above we illustrate a default function which will execute for all INSERT and
-UPDATE statements where a value for ``counter_plus_twelve`` was otherwise not
-provided, and the value will be that of whatever value is present in the
-execution for the ``counter`` column, plus the number 12.
-
-While the context object passed to the default function has many attributes,
-the ``current_parameters`` member is a special member provided only during the
-execution of a default function for the purposes of deriving defaults from its
-existing values. For a single statement that is executing many sets of bind
-parameters, the user-defined function is called for each set of parameters,
-and ``current_parameters`` will be provided with each individual parameter set
-for each execution.
-
-SQL Expressions
----------------
-
-The "default" and "onupdate" keywords may also be passed SQL expressions,
-including select statements or direct function calls::
-
- t = Table("mytable", meta,
- Column('id', Integer, primary_key=True),
-
- # define 'create_date' to default to now()
- Column('create_date', DateTime, default=func.now()),
-
- # define 'key' to pull its default from the 'keyvalues' table
- Column('key', String(20), default=keyvalues.select(keyvalues.c.type='type1', limit=1)),
-
- # define 'last_modified' to use the current_timestamp SQL function on update
- Column('last_modified', DateTime, onupdate=func.utc_timestamp())
- )
-
-Above, the ``create_date`` column will be populated with the result of the
-``now()`` SQL function (which, depending on backend, compiles into ``NOW()``
-or ``CURRENT_TIMESTAMP`` in most cases) during an INSERT statement, and the
-``key`` column with the result of a SELECT subquery from another table. The
-``last_modified`` column will be populated with the value of
-``UTC_TIMESTAMP()``, a function specific to MySQL, when an UPDATE statement is
-emitted for this table.
-
-Note that when using ``func`` functions, unlike when using Python `datetime`
-functions we *do* call the function, i.e. with parenthesis "()" - this is
-because what we want in this case is the return value of the function, which
-is the SQL expression construct that will be rendered into the INSERT or
-UPDATE statement.
-
-The above SQL functions are usually executed "inline" with the INSERT or
-UPDATE statement being executed, meaning, a single statement is executed which
-embeds the given expressions or subqueries within the VALUES or SET clause of
-the statement. Although in some cases, the function is "pre-executed" in a
-SELECT statement of its own beforehand. This happens when all of the following
-is true:
-
-* the column is a primary key column
-* the database dialect does not support a usable ``cursor.lastrowid`` accessor
- (or equivalent); this currently includes PostgreSQL, Oracle, and Firebird, as
- well as some MySQL dialects.
-* the dialect does not support the "RETURNING" clause or similar, or the
- ``implicit_returning`` flag is set to ``False`` for the dialect. Dialects
- which support RETURNING currently include Postgresql, Oracle, Firebird, and
- MS-SQL.
-* the statement is a single execution, i.e. only supplies one set of
- parameters and doesn't use "executemany" behavior
-* the ``inline=True`` flag is not set on the
- :class:`~sqlalchemy.sql.expression.Insert()` or
- :class:`~sqlalchemy.sql.expression.Update()` construct, and the statement has
- not defined an explicit `returning()` clause.
-
-Whether or not the default generation clause "pre-executes" is not something
-that normally needs to be considered, unless it is being addressed for
-performance reasons.
-
-When the statement is executed with a single set of parameters (that is, it is
-not an "executemany" style execution), the returned
-:class:`~sqlalchemy.engine.ResultProxy` will contain a collection
-accessible via ``result.postfetch_cols()`` which contains a list of all
-:class:`~sqlalchemy.schema.Column` objects which had an inline-executed
-default. Similarly, all parameters which were bound to the statement,
-including all Python and SQL expressions which were pre-executed, are present
-in the ``last_inserted_params()`` or ``last_updated_params()`` collections on
-:class:`~sqlalchemy.engine.ResultProxy`. The ``inserted_primary_key``
-collection contains a list of primary key values for the row inserted (a list
-so that single-column and composite-column primary keys are represented in the
-same format).
-
-Server Side Defaults
---------------------
-
-A variant on the SQL expression default is the ``server_default``, which gets
-placed in the CREATE TABLE statement during a ``create()`` operation:
-
-.. sourcecode:: python+sql
-
- t = Table('test', meta,
- Column('abc', String(20), server_default='abc'),
- Column('created_at', DateTime, server_default=text("sysdate"))
- )
-
-A create call for the above table will produce::
-
- CREATE TABLE test (
- abc varchar(20) default 'abc',
- created_at datetime default sysdate
- )
-
-The behavior of ``server_default`` is similar to that of a regular SQL
-default; if it's placed on a primary key column for a database which doesn't
-have a way to "postfetch" the ID, and the statement is not "inlined", the SQL
-expression is pre-executed; otherwise, SQLAlchemy lets the default fire off on
-the database side normally.
-
-.. _triggered_columns:
-
-Triggered Columns
-------------------
-
-Columns with values set by a database trigger or other external process may be
-called out using :class:`.FetchedValue` as a marker::
-
- t = Table('test', meta,
- Column('abc', String(20), server_default=FetchedValue()),
- Column('def', String(20), server_onupdate=FetchedValue())
- )
-
-.. versionchanged:: 0.8.0b2,0.7.10
- The ``for_update`` argument on :class:`.FetchedValue` is set automatically
- when specified as the ``server_onupdate`` argument. If using an older version,
- specify the onupdate above as ``server_onupdate=FetchedValue(for_update=True)``.
-
-These markers do not emit a "default" clause when the table is created,
-however they do set the same internal flags as a static ``server_default``
-clause, providing hints to higher-level tools that a "post-fetch" of these
-rows should be performed after an insert or update.
-
-.. note::
-
- It's generally not appropriate to use :class:`.FetchedValue` in
- conjunction with a primary key column, particularly when using the
- ORM or any other scenario where the :attr:`.ResultProxy.inserted_primary_key`
- attribute is required. This is becaue the "post-fetch" operation requires
- that the primary key value already be available, so that the
- row can be selected on its primary key.
-
- For a server-generated primary key value, all databases provide special
- accessors or other techniques in order to acquire the "last inserted
- primary key" column of a table. These mechanisms aren't affected by the presence
- of :class:`.FetchedValue`. For special situations where triggers are
- used to generate primary key values, and the database in use does not
- support the ``RETURNING`` clause, it may be necessary to forego the usage
- of the trigger and instead apply the SQL expression or function as a
- "pre execute" expression::
-
- t = Table('test', meta,
- Column('abc', MyType, default=func.generate_new_value(), primary_key=True)
- )
-
- Where above, when :meth:`.Table.insert` is used,
- the ``func.generate_new_value()`` expression will be pre-executed
- in the context of a scalar ``SELECT`` statement, and the new value will
- be applied to the subsequent ``INSERT``, while at the same time being
- made available to the :attr:`.ResultProxy.inserted_primary_key`
- attribute.
-
-
-Defining Sequences
--------------------
-
-SQLAlchemy represents database sequences using the
-:class:`~sqlalchemy.schema.Sequence` object, which is considered to be a
-special case of "column default". It only has an effect on databases which
-have explicit support for sequences, which currently includes Postgresql,
-Oracle, and Firebird. The :class:`~sqlalchemy.schema.Sequence` object is
-otherwise ignored.
-
-The :class:`~sqlalchemy.schema.Sequence` may be placed on any column as a
-"default" generator to be used during INSERT operations, and can also be
-configured to fire off during UPDATE operations if desired. It is most
-commonly used in conjunction with a single integer primary key column::
-
- table = Table("cartitems", meta,
- Column("cart_id", Integer, Sequence('cart_id_seq'), primary_key=True),
- Column("description", String(40)),
- Column("createdate", DateTime())
- )
-
-Where above, the table "cartitems" is associated with a sequence named
-"cart_id_seq". When INSERT statements take place for "cartitems", and no value
-is passed for the "cart_id" column, the "cart_id_seq" sequence will be used to
-generate a value.
-
-When the :class:`~sqlalchemy.schema.Sequence` is associated with a table,
-CREATE and DROP statements issued for that table will also issue CREATE/DROP
-for the sequence object as well, thus "bundling" the sequence object with its
-parent table.
-
-The :class:`~sqlalchemy.schema.Sequence` object also implements special
-functionality to accommodate Postgresql's SERIAL datatype. The SERIAL type in
-PG automatically generates a sequence that is used implicitly during inserts.
-This means that if a :class:`~sqlalchemy.schema.Table` object defines a
-:class:`~sqlalchemy.schema.Sequence` on its primary key column so that it
-works with Oracle and Firebird, the :class:`~sqlalchemy.schema.Sequence` would
-get in the way of the "implicit" sequence that PG would normally use. For this
-use case, add the flag ``optional=True`` to the
-:class:`~sqlalchemy.schema.Sequence` object - this indicates that the
-:class:`~sqlalchemy.schema.Sequence` should only be used if the database
-provides no other option for generating primary key identifiers.
-
-The :class:`~sqlalchemy.schema.Sequence` object also has the ability to be
-executed standalone like a SQL expression, which has the effect of calling its
-"next value" function::
-
- seq = Sequence('some_sequence')
- nextid = connection.execute(seq)
-
-Default Objects API
--------------------
-
-.. autoclass:: ColumnDefault
- :show-inheritance:
-
-.. autoclass:: DefaultClause
- :show-inheritance:
-
-.. autoclass:: DefaultGenerator
- :show-inheritance:
-
-.. autoclass:: FetchedValue
- :show-inheritance:
-
-.. autoclass:: PassiveDefault
- :show-inheritance:
-
-.. autoclass:: Sequence
- :show-inheritance:
- :members:
-
-Defining Constraints and Indexes
-=================================
-
-.. _metadata_foreignkeys:
-.. _metadata_constraints:
-
-Defining Foreign Keys
----------------------
-
-A *foreign key* in SQL is a table-level construct that constrains one or more
-columns in that table to only allow values that are present in a different set
-of columns, typically but not always located on a different table. We call the
-columns which are constrained the *foreign key* columns and the columns which
-they are constrained towards the *referenced* columns. The referenced columns
-almost always define the primary key for their owning table, though there are
-exceptions to this. The foreign key is the "joint" that connects together
-pairs of rows which have a relationship with each other, and SQLAlchemy
-assigns very deep importance to this concept in virtually every area of its
-operation.
-
-In SQLAlchemy as well as in DDL, foreign key constraints can be defined as
-additional attributes within the table clause, or for single-column foreign
-keys they may optionally be specified within the definition of a single
-column. The single column foreign key is more common, and at the column level
-is specified by constructing a :class:`~sqlalchemy.schema.ForeignKey` object
-as an argument to a :class:`~sqlalchemy.schema.Column` object::
-
- user_preference = Table('user_preference', metadata,
- Column('pref_id', Integer, primary_key=True),
- Column('user_id', Integer, ForeignKey("user.user_id"), nullable=False),
- Column('pref_name', String(40), nullable=False),
- Column('pref_value', String(100))
- )
-
-Above, we define a new table ``user_preference`` for which each row must
-contain a value in the ``user_id`` column that also exists in the ``user``
-table's ``user_id`` column.
-
-The argument to :class:`~sqlalchemy.schema.ForeignKey` is most commonly a
-string of the form *<tablename>.<columnname>*, or for a table in a remote
-schema or "owner" of the form *<schemaname>.<tablename>.<columnname>*. It may
-also be an actual :class:`~sqlalchemy.schema.Column` object, which as we'll
-see later is accessed from an existing :class:`~sqlalchemy.schema.Table`
-object via its ``c`` collection::
-
- ForeignKey(user.c.user_id)
-
-The advantage to using a string is that the in-python linkage between ``user``
-and ``user_preference`` is resolved only when first needed, so that table
-objects can be easily spread across multiple modules and defined in any order.
-
-Foreign keys may also be defined at the table level, using the
-:class:`~sqlalchemy.schema.ForeignKeyConstraint` object. This object can
-describe a single- or multi-column foreign key. A multi-column foreign key is
-known as a *composite* foreign key, and almost always references a table that
-has a composite primary key. Below we define a table ``invoice`` which has a
-composite primary key::
-
- invoice = Table('invoice', metadata,
- Column('invoice_id', Integer, primary_key=True),
- Column('ref_num', Integer, primary_key=True),
- Column('description', String(60), nullable=False)
- )
-
-And then a table ``invoice_item`` with a composite foreign key referencing
-``invoice``::
-
- invoice_item = Table('invoice_item', metadata,
- Column('item_id', Integer, primary_key=True),
- Column('item_name', String(60), nullable=False),
- Column('invoice_id', Integer, nullable=False),
- Column('ref_num', Integer, nullable=False),
- ForeignKeyConstraint(['invoice_id', 'ref_num'], ['invoice.invoice_id', 'invoice.ref_num'])
- )
-
-It's important to note that the
-:class:`~sqlalchemy.schema.ForeignKeyConstraint` is the only way to define a
-composite foreign key. While we could also have placed individual
-:class:`~sqlalchemy.schema.ForeignKey` objects on both the
-``invoice_item.invoice_id`` and ``invoice_item.ref_num`` columns, SQLAlchemy
-would not be aware that these two values should be paired together - it would
-be two individual foreign key constraints instead of a single composite
-foreign key referencing two columns.
-
-.. _use_alter:
-
-Creating/Dropping Foreign Key Constraints via ALTER
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In all the above examples, the :class:`~sqlalchemy.schema.ForeignKey` object
-causes the "REFERENCES" keyword to be added inline to a column definition
-within a "CREATE TABLE" statement when
-:func:`~sqlalchemy.schema.MetaData.create_all` is issued, and
-:class:`~sqlalchemy.schema.ForeignKeyConstraint` invokes the "CONSTRAINT"
-keyword inline with "CREATE TABLE". There are some cases where this is
-undesireable, particularly when two tables reference each other mutually, each
-with a foreign key referencing the other. In such a situation at least one of
-the foreign key constraints must be generated after both tables have been
-built. To support such a scheme, :class:`~sqlalchemy.schema.ForeignKey` and
-:class:`~sqlalchemy.schema.ForeignKeyConstraint` offer the flag
-``use_alter=True``. When using this flag, the constraint will be generated
-using a definition similar to "ALTER TABLE <tablename> ADD CONSTRAINT <name>
-...". Since a name is required, the ``name`` attribute must also be specified.
-For example::
-
- node = Table('node', meta,
- Column('node_id', Integer, primary_key=True),
- Column('primary_element', Integer,
- ForeignKey('element.element_id', use_alter=True, name='fk_node_element_id')
- )
- )
-
- element = Table('element', meta,
- Column('element_id', Integer, primary_key=True),
- Column('parent_node_id', Integer),
- ForeignKeyConstraint(
- ['parent_node_id'],
- ['node.node_id'],
- use_alter=True,
- name='fk_element_parent_node_id'
- )
- )
-
-ON UPDATE and ON DELETE
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Most databases support *cascading* of foreign key values, that is the when a
-parent row is updated the new value is placed in child rows, or when the
-parent row is deleted all corresponding child rows are set to null or deleted.
-In data definition language these are specified using phrases like "ON UPDATE
-CASCADE", "ON DELETE CASCADE", and "ON DELETE SET NULL", corresponding to
-foreign key constraints. The phrase after "ON UPDATE" or "ON DELETE" may also
-other allow other phrases that are specific to the database in use. The
-:class:`~sqlalchemy.schema.ForeignKey` and
-:class:`~sqlalchemy.schema.ForeignKeyConstraint` objects support the
-generation of this clause via the ``onupdate`` and ``ondelete`` keyword
-arguments. The value is any string which will be output after the appropriate
-"ON UPDATE" or "ON DELETE" phrase::
-
- child = Table('child', meta,
- Column('id', Integer,
- ForeignKey('parent.id', onupdate="CASCADE", ondelete="CASCADE"),
- primary_key=True
- )
- )
-
- composite = Table('composite', meta,
- Column('id', Integer, primary_key=True),
- Column('rev_id', Integer),
- Column('note_id', Integer),
- ForeignKeyConstraint(
- ['rev_id', 'note_id'],
- ['revisions.id', 'revisions.note_id'],
- onupdate="CASCADE", ondelete="SET NULL"
- )
- )
-
-Note that these clauses are not supported on SQLite, and require ``InnoDB``
-tables when used with MySQL. They may also not be supported on other
-databases.
-
-
-UNIQUE Constraint
------------------
-
-Unique constraints can be created anonymously on a single column using the
-``unique`` keyword on :class:`~sqlalchemy.schema.Column`. Explicitly named
-unique constraints and/or those with multiple columns are created via the
-:class:`~sqlalchemy.schema.UniqueConstraint` table-level construct.
-
-.. sourcecode:: python+sql
-
- meta = MetaData()
- mytable = Table('mytable', meta,
-
- # per-column anonymous unique constraint
- Column('col1', Integer, unique=True),
-
- Column('col2', Integer),
- Column('col3', Integer),
-
- # explicit/composite unique constraint. 'name' is optional.
- UniqueConstraint('col2', 'col3', name='uix_1')
- )
-
-CHECK Constraint
-----------------
-
-Check constraints can be named or unnamed and can be created at the Column or
-Table level, using the :class:`~sqlalchemy.schema.CheckConstraint` construct.
-The text of the check constraint is passed directly through to the database,
-so there is limited "database independent" behavior. Column level check
-constraints generally should only refer to the column to which they are
-placed, while table level constraints can refer to any columns in the table.
-
-Note that some databases do not actively support check constraints such as
-MySQL.
-
-.. sourcecode:: python+sql
-
- meta = MetaData()
- mytable = Table('mytable', meta,
-
- # per-column CHECK constraint
- Column('col1', Integer, CheckConstraint('col1>5')),
-
- Column('col2', Integer),
- Column('col3', Integer),
-
- # table level CHECK constraint. 'name' is optional.
- CheckConstraint('col2 > col3 + 5', name='check1')
- )
-
- {sql}mytable.create(engine)
- CREATE TABLE mytable (
- col1 INTEGER CHECK (col1>5),
- col2 INTEGER,
- col3 INTEGER,
- CONSTRAINT check1 CHECK (col2 > col3 + 5)
- ){stop}
-
-Setting up Constraints when using the Declarative ORM Extension
-----------------------------------------------------------------
-
-The :class:`.Table` is the SQLAlchemy Core construct that allows one to define
-table metadata, which among other things can be used by the SQLAlchemy ORM
-as a target to map a class. The :ref:`Declarative <declarative_toplevel>`
-extension allows the :class:`.Table` object to be created automatically, given
-the contents of the table primarily as a mapping of :class:`.Column` objects.
-
-To apply table-level constraint objects such as :class:`.ForeignKeyConstraint`
-to a table defined using Declarative, use the ``__table_args__`` attribute,
-described at :ref:`declarative_table_args`.
-
-Constraints API
----------------
-.. autoclass:: Constraint
- :show-inheritance:
-
-.. autoclass:: CheckConstraint
- :show-inheritance:
-
-.. autoclass:: ColumnCollectionConstraint
- :show-inheritance:
-
-.. autoclass:: ForeignKey
- :members:
- :show-inheritance:
-
-.. autoclass:: ForeignKeyConstraint
- :members:
- :show-inheritance:
-
-.. autoclass:: PrimaryKeyConstraint
- :show-inheritance:
-
-.. autoclass:: UniqueConstraint
- :show-inheritance:
-
-.. _schema_indexes:
-
-Indexes
--------
-
-Indexes can be created anonymously (using an auto-generated name ``ix_<column
-label>``) for a single column using the inline ``index`` keyword on
-:class:`~sqlalchemy.schema.Column`, which also modifies the usage of
-``unique`` to apply the uniqueness to the index itself, instead of adding a
-separate UNIQUE constraint. For indexes with specific names or which encompass
-more than one column, use the :class:`~sqlalchemy.schema.Index` construct,
-which requires a name.
-
-Below we illustrate a :class:`~sqlalchemy.schema.Table` with several
-:class:`~sqlalchemy.schema.Index` objects associated. The DDL for "CREATE
-INDEX" is issued right after the create statements for the table:
-
-.. sourcecode:: python+sql
-
- meta = MetaData()
- mytable = Table('mytable', meta,
- # an indexed column, with index "ix_mytable_col1"
- Column('col1', Integer, index=True),
-
- # a uniquely indexed column with index "ix_mytable_col2"
- Column('col2', Integer, index=True, unique=True),
-
- Column('col3', Integer),
- Column('col4', Integer),
-
- Column('col5', Integer),
- Column('col6', Integer),
- )
-
- # place an index on col3, col4
- Index('idx_col34', mytable.c.col3, mytable.c.col4)
-
- # place a unique index on col5, col6
- Index('myindex', mytable.c.col5, mytable.c.col6, unique=True)
-
- {sql}mytable.create(engine)
- CREATE TABLE mytable (
- col1 INTEGER,
- col2 INTEGER,
- col3 INTEGER,
- col4 INTEGER,
- col5 INTEGER,
- col6 INTEGER
- )
- CREATE INDEX ix_mytable_col1 ON mytable (col1)
- CREATE UNIQUE INDEX ix_mytable_col2 ON mytable (col2)
- CREATE UNIQUE INDEX myindex ON mytable (col5, col6)
- CREATE INDEX idx_col34 ON mytable (col3, col4){stop}
-
-Note in the example above, the :class:`.Index` construct is created
-externally to the table which it corresponds, using :class:`.Column`
-objects directly. :class:`.Index` also supports
-"inline" definition inside the :class:`.Table`, using string names to
-identify columns::
-
- meta = MetaData()
- mytable = Table('mytable', meta,
- Column('col1', Integer),
-
- Column('col2', Integer),
-
- Column('col3', Integer),
- Column('col4', Integer),
-
- # place an index on col1, col2
- Index('idx_col12', 'col1', 'col2'),
-
- # place a unique index on col3, col4
- Index('idx_col34', 'col3', 'col4', unique=True)
- )
-
-.. versionadded:: 0.7
- Support of "inline" definition inside the :class:`.Table`
- for :class:`.Index`\ .
-
-The :class:`~sqlalchemy.schema.Index` object also supports its own ``create()`` method:
-
-.. sourcecode:: python+sql
-
- i = Index('someindex', mytable.c.col5)
- {sql}i.create(engine)
- CREATE INDEX someindex ON mytable (col5){stop}
-
-.. _schema_indexes_functional:
-
-Functional Indexes
-~~~~~~~~~~~~~~~~~~~
-
-:class:`.Index` supports SQL and function expressions, as supported by the
-target backend. To create an index against a column using a descending
-value, the :meth:`.ColumnElement.desc` modifier may be used::
-
- from sqlalchemy import Index
-
- Index('someindex', mytable.c.somecol.desc())
-
-Or with a backend that supports functional indexes such as Postgresql,
-a "case insensitive" index can be created using the ``lower()`` function::
-
- from sqlalchemy import func, Index
-
- Index('someindex', func.lower(mytable.c.somecol))
-
-.. versionadded:: 0.8 :class:`.Index` supports SQL expressions and functions
- as well as plain columns.
-
-Index API
----------
-
-.. autoclass:: Index
- :show-inheritance:
- :members:
-
-.. _metadata_ddl:
-
-Customizing DDL
-===============
-
-In the preceding sections we've discussed a variety of schema constructs
-including :class:`~sqlalchemy.schema.Table`,
-:class:`~sqlalchemy.schema.ForeignKeyConstraint`,
-:class:`~sqlalchemy.schema.CheckConstraint`, and
-:class:`~sqlalchemy.schema.Sequence`. Throughout, we've relied upon the
-``create()`` and :func:`~sqlalchemy.schema.MetaData.create_all` methods of
-:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.MetaData` in
-order to issue data definition language (DDL) for all constructs. When issued,
-a pre-determined order of operations is invoked, and DDL to create each table
-is created unconditionally including all constraints and other objects
-associated with it. For more complex scenarios where database-specific DDL is
-required, SQLAlchemy offers two techniques which can be used to add any DDL
-based on any condition, either accompanying the standard generation of tables
-or by itself.
-
-.. _schema_ddl_sequences:
-
-Controlling DDL Sequences
--------------------------
-
-The ``sqlalchemy.schema`` package contains SQL expression constructs that
-provide DDL expressions. For example, to produce a ``CREATE TABLE`` statement:
-
-.. sourcecode:: python+sql
-
- from sqlalchemy.schema import CreateTable
- {sql}engine.execute(CreateTable(mytable))
- CREATE TABLE mytable (
- col1 INTEGER,
- col2 INTEGER,
- col3 INTEGER,
- col4 INTEGER,
- col5 INTEGER,
- col6 INTEGER
- ){stop}
-
-Above, the :class:`~sqlalchemy.schema.CreateTable` construct works like any
-other expression construct (such as ``select()``, ``table.insert()``, etc.). A
-full reference of available constructs is in :ref:`schema_api_ddl`.
-
-The DDL constructs all extend a common base class which provides the
-capability to be associated with an individual
-:class:`~sqlalchemy.schema.Table` or :class:`~sqlalchemy.schema.MetaData`
-object, to be invoked upon create/drop events. Consider the example of a table
-which contains a CHECK constraint:
-
-.. sourcecode:: python+sql
-
- users = Table('users', metadata,
- Column('user_id', Integer, primary_key=True),
- Column('user_name', String(40), nullable=False),
- CheckConstraint('length(user_name) >= 8',name="cst_user_name_length")
- )
-
- {sql}users.create(engine)
- CREATE TABLE users (
- user_id SERIAL NOT NULL,
- user_name VARCHAR(40) NOT NULL,
- PRIMARY KEY (user_id),
- CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8)
- ){stop}
-
-The above table contains a column "user_name" which is subject to a CHECK
-constraint that validates that the length of the string is at least eight
-characters. When a ``create()`` is issued for this table, DDL for the
-:class:`~sqlalchemy.schema.CheckConstraint` will also be issued inline within
-the table definition.
-
-The :class:`~sqlalchemy.schema.CheckConstraint` construct can also be
-constructed externally and associated with the
-:class:`~sqlalchemy.schema.Table` afterwards::
-
- constraint = CheckConstraint('length(user_name) >= 8',name="cst_user_name_length")
- users.append_constraint(constraint)
-
-So far, the effect is the same. However, if we create DDL elements
-corresponding to the creation and removal of this constraint, and associate
-them with the :class:`.Table` as events, these new events
-will take over the job of issuing DDL for the constraint. Additionally, the
-constraint will be added via ALTER:
-
-.. sourcecode:: python+sql
-
- from sqlalchemy import event
-
- event.listen(
- users,
- "after_create",
- AddConstraint(constraint)
- )
- event.listen(
- users,
- "before_drop",
- DropConstraint(constraint)
- )
-
- {sql}users.create(engine)
- CREATE TABLE users (
- user_id SERIAL NOT NULL,
- user_name VARCHAR(40) NOT NULL,
- PRIMARY KEY (user_id)
- )
-
- ALTER TABLE users ADD CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8){stop}
-
- {sql}users.drop(engine)
- ALTER TABLE users DROP CONSTRAINT cst_user_name_length
- DROP TABLE users{stop}
-
-The real usefulness of the above becomes clearer once we illustrate the
-:meth:`.DDLElement.execute_if` method. This method returns a modified form of
-the DDL callable which will filter on criteria before responding to a
-received event. It accepts a parameter ``dialect``, which is the string
-name of a dialect or a tuple of such, which will limit the execution of the
-item to just those dialects. It also accepts a ``callable_`` parameter which
-may reference a Python callable which will be invoked upon event reception,
-returning ``True`` or ``False`` indicating if the event should proceed.
-
-If our :class:`~sqlalchemy.schema.CheckConstraint` was only supported by
-Postgresql and not other databases, we could limit its usage to just that dialect::
-
- event.listen(
- users,
- 'after_create',
- AddConstraint(constraint).execute_if(dialect='postgresql')
- )
- event.listen(
- users,
- 'before_drop',
- DropConstraint(constraint).execute_if(dialect='postgresql')
- )
-
-Or to any set of dialects::
-
- event.listen(
- users,
- "after_create",
- AddConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
- )
- event.listen(
- users,
- "before_drop",
- DropConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
- )
-
-When using a callable, the callable is passed the ddl element, the
-:class:`.Table` or :class:`.MetaData`
-object whose "create" or "drop" event is in progress, and the
-:class:`.Connection` object being used for the
-operation, as well as additional information as keyword arguments. The
-callable can perform checks, such as whether or not a given item already
-exists. Below we define ``should_create()`` and ``should_drop()`` callables
-that check for the presence of our named constraint:
-
-.. sourcecode:: python+sql
-
- def should_create(ddl, target, connection, **kw):
- row = connection.execute("select conname from pg_constraint where conname='%s'" % ddl.element.name).scalar()
- return not bool(row)
-
- def should_drop(ddl, target, connection, **kw):
- return not should_create(ddl, target, connection, **kw)
-
- event.listen(
- users,
- "after_create",
- AddConstraint(constraint).execute_if(callable_=should_create)
- )
- event.listen(
- users,
- "before_drop",
- DropConstraint(constraint).execute_if(callable_=should_drop)
- )
-
- {sql}users.create(engine)
- CREATE TABLE users (
- user_id SERIAL NOT NULL,
- user_name VARCHAR(40) NOT NULL,
- PRIMARY KEY (user_id)
- )
-
- select conname from pg_constraint where conname='cst_user_name_length'
- ALTER TABLE users ADD CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8){stop}
-
- {sql}users.drop(engine)
- select conname from pg_constraint where conname='cst_user_name_length'
- ALTER TABLE users DROP CONSTRAINT cst_user_name_length
- DROP TABLE users{stop}
-
-Custom DDL
-----------
-
-Custom DDL phrases are most easily achieved using the
-:class:`~sqlalchemy.schema.DDL` construct. This construct works like all the
-other DDL elements except it accepts a string which is the text to be emitted:
-
-.. sourcecode:: python+sql
-
- event.listen(
- metadata,
- "after_create",
- DDL("ALTER TABLE users ADD CONSTRAINT "
- "cst_user_name_length "
- " CHECK (length(user_name) >= 8)")
- )
-
-A more comprehensive method of creating libraries of DDL constructs is to use
-custom compilation - see :ref:`sqlalchemy.ext.compiler_toplevel` for
-details.
-
-.. _schema_api_ddl:
-
-DDL Expression Constructs API
------------------------------
-
-.. autoclass:: DDLElement
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: DDL
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: CreateTable
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: DropTable
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: CreateColumn
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: CreateSequence
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: DropSequence
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: CreateIndex
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: DropIndex
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: AddConstraint
- :members:
- :undoc-members:
- :show-inheritance:
+.. toctree::
+ :maxdepth: 1
-.. autoclass:: DropConstraint
- :members:
- :undoc-members:
- :show-inheritance:
+ metadata
+ reflection
+ defaults
+ constraints
+ ddl
-.. autoclass:: CreateSchema
- :members:
- :undoc-members:
- :show-inheritance:
-.. autoclass:: DropSchema
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/doc/build/core/selectable.rst b/doc/build/core/selectable.rst
new file mode 100644
index 000000000..52acb28e5
--- /dev/null
+++ b/doc/build/core/selectable.rst
@@ -0,0 +1,85 @@
+Selectables, Tables, FROM objects
+=================================
+
+The term "selectable" refers to any object that rows can be selected from;
+in SQLAlchemy, these objects descend from :class:`.FromClause` and their
+distinguishing feature is their :attr:`.FromClause.c` attribute, which is
+a namespace of all the columns contained within the FROM clause (these
+elements are themselves :class:`.ColumnElement` subclasses).
+
+.. module:: sqlalchemy.sql.expression
+
+.. autofunction:: alias
+
+.. autofunction:: except_
+
+.. autofunction:: except_all
+
+.. autofunction:: exists
+
+.. autofunction:: intersect
+
+.. autofunction:: intersect_all
+
+.. autofunction:: join
+
+.. autofunction:: outerjoin
+
+.. autofunction:: select
+
+.. autofunction:: subquery
+
+.. autofunction:: sqlalchemy.sql.expression.table
+
+.. autofunction:: union
+
+.. autofunction:: union_all
+
+.. autoclass:: Alias
+ :members:
+ :inherited-members:
+
+.. autoclass:: CompoundSelect
+ :members:
+ :inherited-members:
+
+.. autoclass:: CTE
+ :members:
+ :inherited-members:
+
+.. autoclass:: Executable
+ :members:
+
+.. autoclass:: FromClause
+ :members:
+
+.. autoclass:: GenerativeSelect
+ :members:
+ :inherited-members:
+
+.. autoclass:: HasPrefixes
+ :members:
+
+.. autoclass:: Join
+ :members:
+ :inherited-members:
+
+.. autoclass:: ScalarSelect
+ :members:
+
+.. autoclass:: Select
+ :members:
+ :inherited-members:
+
+.. autoclass:: Selectable
+ :members:
+
+.. autoclass:: SelectBase
+ :members:
+
+.. autoclass:: TableClause
+ :members:
+ :inherited-members:
+
+.. autoclass:: TextAsFrom
+ :members:
diff --git a/doc/build/core/sqlelement.rst b/doc/build/core/sqlelement.rst
new file mode 100644
index 000000000..47855a6a3
--- /dev/null
+++ b/doc/build/core/sqlelement.rst
@@ -0,0 +1,140 @@
+Column Elements and Expressions
+===============================
+
+.. module:: sqlalchemy.sql.expression
+
+The most fundamental part of the SQL expression API are the "column elements",
+which allow for basic SQL expression support. The core of all SQL expression
+constructs is the :class:`.ClauseElement`, which is the base for several
+sub-branches. The :class:`.ColumnElement` class is the fundamental unit
+used to construct any kind of typed SQL expression.
+
+.. autofunction:: and_
+
+.. autofunction:: asc
+
+.. autofunction:: between
+
+.. autofunction:: bindparam
+
+.. autofunction:: case
+
+.. autofunction:: cast
+
+.. autofunction:: sqlalchemy.sql.expression.column
+
+.. autofunction:: collate
+
+.. autofunction:: desc
+
+.. autofunction:: distinct
+
+.. autofunction:: extract
+
+.. autofunction:: false
+
+.. autodata:: func
+
+.. autofunction:: label
+
+.. autofunction:: literal
+
+.. autofunction:: literal_column
+
+.. autofunction:: not_
+
+.. autofunction:: null
+
+.. autofunction:: nullsfirst
+
+.. autofunction:: nullslast
+
+.. autofunction:: or_
+
+.. autofunction:: outparam
+
+.. autofunction:: over
+
+.. autofunction:: text
+
+.. autofunction:: true
+
+.. autofunction:: tuple_
+
+.. autofunction:: type_coerce
+
+.. autoclass:: BinaryExpression
+ :members:
+
+.. autoclass:: BindParameter
+ :members:
+
+.. autoclass:: Case
+ :members:
+
+.. autoclass:: Cast
+ :members:
+
+.. autoclass:: ClauseElement
+ :members:
+
+
+.. autoclass:: ClauseList
+ :members:
+
+
+.. autoclass:: ColumnClause
+ :members:
+
+.. autoclass:: ColumnCollection
+ :members:
+
+
+.. autoclass:: ColumnElement
+ :members:
+ :inherited-members:
+ :undoc-members:
+
+.. autoclass:: sqlalchemy.sql.operators.ColumnOperators
+ :members:
+ :special-members:
+ :inherited-members:
+
+.. autoclass:: Extract
+ :members:
+
+.. autoclass:: sqlalchemy.sql.elements.False_
+ :members:
+
+.. autoclass:: Label
+ :members:
+
+.. autoclass:: sqlalchemy.sql.elements.Null
+ :members:
+
+.. autoclass:: Over
+ :members:
+
+.. autoclass:: TextClause
+ :members:
+
+.. autoclass:: Tuple
+ :members:
+
+.. autoclass:: sqlalchemy.sql.elements.True_
+ :members:
+
+.. autoclass:: sqlalchemy.sql.operators.custom_op
+ :members:
+
+.. autoclass:: sqlalchemy.sql.operators.Operators
+ :members:
+ :special-members:
+
+.. autoclass:: sqlalchemy.sql.elements.quoted_name
+
+.. autoclass:: UnaryExpression
+ :members:
+
+
+
diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst
index 0203248ae..c2a55233d 100644
--- a/doc/build/core/tutorial.rst
+++ b/doc/build/core/tutorial.rst
@@ -50,13 +50,13 @@ Version Check
=============
-A quick check to verify that we are on at least **version 0.8** of SQLAlchemy:
+A quick check to verify that we are on at least **version 0.9** of SQLAlchemy:
.. sourcecode:: pycon+sql
>>> import sqlalchemy
>>> sqlalchemy.__version__ # doctest:+SKIP
- 0.8.0
+ 0.9.0
Connecting
==========
@@ -238,7 +238,7 @@ we use the ``connect()`` method::
>>> conn #doctest: +ELLIPSIS
<sqlalchemy.engine.base.Connection object at 0x...>
-The :class:`~sqlalchemy.engine.base.Connection` object represents an actively
+The :class:`~sqlalchemy.engine.Connection` object represents an actively
checked out DBAPI connection resource. Lets feed it our
:class:`~sqlalchemy.sql.expression.Insert` object and see what happens:
@@ -252,7 +252,7 @@ checked out DBAPI connection resource. Lets feed it our
So the INSERT statement was now issued to the database. Although we got
positional "qmark" bind parameters instead of "named" bind parameters in the
output. How come ? Because when executed, the
-:class:`~sqlalchemy.engine.base.Connection` used the SQLite **dialect** to
+:class:`~sqlalchemy.engine.Connection` used the SQLite **dialect** to
help generate the statement; when we use the ``str()`` function, the statement
isn't aware of this dialect, and falls back onto a default which uses named
parameters. We can view this manually as follows:
@@ -264,9 +264,9 @@ parameters. We can view this manually as follows:
'INSERT INTO users (name, fullname) VALUES (?, ?)'
What about the ``result`` variable we got when we called ``execute()`` ? As
-the SQLAlchemy :class:`~sqlalchemy.engine.base.Connection` object references a
+the SQLAlchemy :class:`~sqlalchemy.engine.Connection` object references a
DBAPI connection, the result, known as a
-:class:`~sqlalchemy.engine.result.ResultProxy` object, is analogous to the DBAPI
+:class:`~sqlalchemy.engine.ResultProxy` object, is analogous to the DBAPI
cursor object. In the case of an INSERT, we can get important information from
it, such as the primary key values which were generated from our statement:
@@ -281,7 +281,7 @@ did not specify the ``id`` column in our
value would have been used. In either case, SQLAlchemy always knows how to get
at a newly generated primary key value, even though the method of generating
them is different across different databases; each database's
-:class:`~sqlalchemy.engine.base.Dialect` knows the specific steps needed to
+:class:`~sqlalchemy.engine.interfaces.Dialect` knows the specific steps needed to
determine the correct value (or values; note that ``inserted_primary_key``
returns a list so that it supports composite primary keys).
@@ -292,7 +292,7 @@ Our insert example above was intentionally a little drawn out to show some
various behaviors of expression language constructs. In the usual case, an
:class:`~sqlalchemy.sql.expression.Insert` statement is usually compiled
against the parameters sent to the ``execute()`` method on
-:class:`~sqlalchemy.engine.base.Connection`, so that there's no need to use
+:class:`~sqlalchemy.engine.Connection`, so that there's no need to use
the ``values`` keyword with :class:`~sqlalchemy.sql.expression.Insert`. Lets
create a generic :class:`~sqlalchemy.sql.expression.Insert` statement again
and use it in the "normal" way:
@@ -363,10 +363,10 @@ Above, we issued a basic :func:`.select` call, placing the ``users`` table
within the COLUMNS clause of the select, and then executing. SQLAlchemy
expanded the ``users`` table into the set of each of its columns, and also
generated a FROM clause for us. The result returned is again a
-:class:`~sqlalchemy.engine.result.ResultProxy` object, which acts much like a
+:class:`~sqlalchemy.engine.ResultProxy` object, which acts much like a
DBAPI cursor, including methods such as
-:func:`~sqlalchemy.engine.result.ResultProxy.fetchone` and
-:func:`~sqlalchemy.engine.result.ResultProxy.fetchall`. The easiest way to get
+:func:`~sqlalchemy.engine.ResultProxy.fetchone` and
+:func:`~sqlalchemy.engine.ResultProxy.fetchall`. The easiest way to get
rows from it is to just iterate:
.. sourcecode:: pycon+sql
@@ -414,7 +414,7 @@ But another way, whose usefulness will become apparent later on, is to use the
Result sets which have pending rows remaining should be explicitly closed
before discarding. While the cursor and connection resources referenced by the
-:class:`~sqlalchemy.engine.result.ResultProxy` will be respectively closed and
+:class:`~sqlalchemy.engine.ResultProxy` will be respectively closed and
returned to the connection pool when the object is garbage collected, it's
better to make it explicit as some database APIs are very picky about such
things:
@@ -1593,6 +1593,8 @@ table, or the same table:
COMMIT
{stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
+.. _multi_table_updates:
+
Multiple Table Updates
----------------------
diff --git a/doc/build/core/types.rst b/doc/build/core/types.rst
index 131e8e64d..b4781ee51 100644
--- a/doc/build/core/types.rst
+++ b/doc/build/core/types.rst
@@ -9,7 +9,7 @@ SQLAlchemy provides abstractions for most common database data types,
and a mechanism for specifying your own custom data types.
The methods and attributes of type objects are rarely used directly.
-Type objects are supplied to :class:`~sqlalchemy.Table` definitions
+Type objects are supplied to :class:`~sqlalchemy.schema.Table` definitions
and can be supplied as type hints to `functions` for occasions where
the database driver returns an incorrect type.
@@ -24,7 +24,7 @@ the database driver returns an incorrect type.
SQLAlchemy will use the ``Integer`` and ``String(32)`` type
information when issuing a ``CREATE TABLE`` statement and will use it
again when reading back rows ``SELECTed`` from the database.
-Functions that accept a type (such as :func:`~sqlalchemy.Column`) will
+Functions that accept a type (such as :func:`~sqlalchemy.schema.Column`) will
typically accept a type class or instance; ``Integer`` is equivalent
to ``Integer()`` with no construction arguments in this case.
@@ -41,76 +41,58 @@ type is emitted in ``CREATE TABLE``, such as ``VARCHAR`` see `SQL
Standard Types`_ and the other sections of this chapter.
.. autoclass:: BigInteger
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: Boolean
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: Date
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: DateTime
- :show-inheritance:
:members:
.. autoclass:: Enum
- :show-inheritance:
:members: __init__, create, drop
.. autoclass:: Float
- :show-inheritance:
:members:
.. autoclass:: Integer
- :show-inheritance:
:members:
.. autoclass:: Interval
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: LargeBinary
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: Numeric
- :show-inheritance:
:members:
.. autoclass:: PickleType
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: SchemaType
- :show-inheritance:
:members:
:undoc-members:
.. autoclass:: SmallInteger
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: String
- :show-inheritance:
:members:
.. autoclass:: Text
- :show-inheritance:
:members:
.. autoclass:: Time
- :show-inheritance:
:members:
.. autoclass:: Unicode
- :show-inheritance:
:members:
.. autoclass:: UnicodeText
- :show-inheritance:
:members:
.. _types_sqlstandard:
@@ -123,70 +105,70 @@ name when ``CREATE TABLE`` is issued. Some types may not be supported
on all databases.
.. autoclass:: BIGINT
- :show-inheritance:
+
.. autoclass:: BINARY
- :show-inheritance:
+
.. autoclass:: BLOB
- :show-inheritance:
+
.. autoclass:: BOOLEAN
- :show-inheritance:
+
.. autoclass:: CHAR
- :show-inheritance:
+
.. autoclass:: CLOB
- :show-inheritance:
+
.. autoclass:: DATE
- :show-inheritance:
+
.. autoclass:: DATETIME
- :show-inheritance:
+
.. autoclass:: DECIMAL
- :show-inheritance:
+
.. autoclass:: FLOAT
- :show-inheritance:
+
.. autoclass:: INT
- :show-inheritance:
+
.. autoclass:: sqlalchemy.types.INTEGER
- :show-inheritance:
+
.. autoclass:: NCHAR
- :show-inheritance:
+
.. autoclass:: NVARCHAR
- :show-inheritance:
+
.. autoclass:: NUMERIC
- :show-inheritance:
+
.. autoclass:: REAL
- :show-inheritance:
+
.. autoclass:: SMALLINT
- :show-inheritance:
+
.. autoclass:: TEXT
- :show-inheritance:
+
.. autoclass:: TIME
- :show-inheritance:
+
.. autoclass:: TIMESTAMP
- :show-inheritance:
+
.. autoclass:: VARBINARY
- :show-inheritance:
+
.. autoclass:: VARCHAR
- :show-inheritance:
+
.. _types_vendor:
@@ -194,7 +176,7 @@ Vendor-Specific Types
---------------------
Database-specific types are also available for import from each
-database's dialect module. See the :ref:`sqlalchemy.dialects_toplevel`
+database's dialect module. See the :ref:`dialect_toplevel`
reference for the database you're interested in.
For example, MySQL has a ``BIGINT`` type and PostgreSQL has an
@@ -300,7 +282,7 @@ to and from the database is required.
.. autoclass:: TypeDecorator
:members:
:inherited-members:
- :show-inheritance:
+
TypeDecorator Recipes
~~~~~~~~~~~~~~~~~~~~~
@@ -361,6 +343,8 @@ many decimal places. Here's a recipe that rounds them down::
value = value.quantize(self.quantize)
return value
+.. _custom_guid_type:
+
Backend-agnostic GUID Type
^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -690,7 +674,7 @@ to integers::
class MyInt(Integer):
class comparator_factory(Integer.Comparator):
def log(self, other):
- return func.log(self, other)
+ return func.log(self.expr, other)
Using the above type::
@@ -738,29 +722,25 @@ is needed, use :class:`.TypeDecorator` instead.
.. autoclass:: UserDefinedType
:members:
- :show-inheritance:
+
.. _types_api:
Base Type API
--------------
-.. autoclass:: AbstractType
- :members:
- :show-inheritance:
-
.. autoclass:: TypeEngine
:members:
- :show-inheritance:
+
.. autoclass:: Concatenable
:members:
:inherited-members:
- :show-inheritance:
+
.. autoclass:: NullType
- :show-inheritance:
+
.. autoclass:: Variant
- :show-inheritance:
+
:members: with_variant, __init__
diff --git a/doc/build/dialects/drizzle.rst b/doc/build/dialects/drizzle.rst
index 99ff596d7..c89bba032 100644
--- a/doc/build/dialects/drizzle.rst
+++ b/doc/build/dialects/drizzle.rst
@@ -23,51 +23,51 @@ construction arguments, are as follows:
.. autoclass:: BIGINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: CHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: DECIMAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: DOUBLE
:members: __init__
- :show-inheritance:
+
.. autoclass:: ENUM
:members: __init__
- :show-inheritance:
+
.. autoclass:: FLOAT
:members: __init__
- :show-inheritance:
+
.. autoclass:: INTEGER
:members: __init__
- :show-inheritance:
+
.. autoclass:: NUMERIC
:members: __init__
- :show-inheritance:
+
.. autoclass:: REAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: TEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TIMESTAMP
:members: __init__
- :show-inheritance:
+
.. autoclass:: VARCHAR
:members: __init__
- :show-inheritance:
+
MySQL-Python
diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst
index abf6e44f6..865d0714f 100644
--- a/doc/build/dialects/index.rst
+++ b/doc/build/dialects/index.rst
@@ -18,7 +18,6 @@ Included Dialects
drizzle
firebird
- informix
mssql
mysql
oracle
@@ -42,11 +41,35 @@ External Dialects
Current external dialect projects for SQLAlchemy include:
-* `ibm_db_sa <http://code.google.com/p/ibm-db/wiki/README>`_ - driver for IBM DB2, developed jointly by IBM and SQLAlchemy developers.
-* `sqlalchemy-access <https://bitbucket.org/zzzeek/sqlalchemy-access>`_ - driver for Microsoft Access.
-* `sqlalchemy-akiban <https://github.com/zzzeek/sqlalchemy_akiban>`_ - driver and ORM extensions for the `Akiban <http://www.akiban.com>`_ database.
-* `sqlalchemy-cubrid <https://bitbucket.org/zzzeek/sqlalchemy-cubrid>`_ - driver for the CUBRID database.
-* `sqlalchemy-maxdb <https://bitbucket.org/zzzeek/sqlalchemy-maxdb>`_ - driver for the MaxDB database.
+Production Ready
+^^^^^^^^^^^^^^^^
+
+* `ibm_db_sa <http://code.google.com/p/ibm-db/wiki/README>`_ - driver for IBM DB2 and Informix, developed jointly by IBM and SQLAlchemy developers.
+* `redshift-sqlalchemy <https://pypi.python.org/pypi/redshift-sqlalchemy>`_ - driver for Amazon Redshift, adapts
+the existing Postgresql/psycopg2 driver.
+* `sqlalchemy-sqlany <https://github.com/sqlanywhere/sqlalchemy-sqlany>`_ - driver for SAP Sybase SQL Anywhere, developed by SAP.
+* `sqlalchemy-monetdb <https://github.com/gijzelaerr/sqlalchemy-monetdb>`_ - driver for MonetDB.
+
+Experimental / Incomplete
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Dialects that are in an incomplete state or are considered somewhat experimental.
+
+* `sqlalchemy-foundationdb <https://github.com/FoundationDB/sql-layer-adapter-sqlalchemy>`_ - driver and ORM extensions for the `Foundation DB <http://foundationdb.com/>`_ database, making use of the `FoundationDB SQL Layer <https://foundationdb.com/layers/sql/index.html>`_.
* `CALCHIPAN <https://bitbucket.org/zzzeek/calchipan/>`_ - Adapts `Pandas <http://pandas.pydata.org/>`_ dataframes to SQLAlchemy.
+* `sqlalchemy-cubrid <https://bitbucket.org/zzzeek/sqlalchemy-cubrid>`_ - driver for the CUBRID database.
+
+Attic
+^^^^^
+
+Dialects in the "attic" are those that were contributed for SQLAlchemy long ago
+but have received little attention or demand since then, and are now moved out to
+their own repositories in at best a semi-working state.
+Community members interested in these dialects should feel free to pick up on
+their current codebase and fork off into working libraries.
+
+* `sqlalchemy-access <https://bitbucket.org/zzzeek/sqlalchemy-access>`_ - driver for Microsoft Access.
+* `sqlalchemy-informixdb <https://bitbucket.org/zzzeek/sqlalchemy-informixdb>`_ - driver for the informixdb DBAPI.
+* `sqlalchemy-maxdb <https://bitbucket.org/zzzeek/sqlalchemy-maxdb>`_ - driver for the MaxDB database
diff --git a/doc/build/dialects/informix.rst b/doc/build/dialects/informix.rst
deleted file mode 100644
index f37ae6cf5..000000000
--- a/doc/build/dialects/informix.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-.. _informix_toplevel:
-
-Informix
-========
-
-.. automodule:: sqlalchemy.dialects.informix.base
-
-informixdb
-----------
-
-.. automodule:: sqlalchemy.dialects.informix.informixdb \ No newline at end of file
diff --git a/doc/build/dialects/mssql.rst b/doc/build/dialects/mssql.rst
index 615d1a11d..6173ffba1 100644
--- a/doc/build/dialects/mssql.rst
+++ b/doc/build/dialects/mssql.rst
@@ -26,75 +26,75 @@ construction arguments, are as follows:
.. autoclass:: BIT
:members: __init__
- :show-inheritance:
+
.. autoclass:: CHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: DATETIME2
:members: __init__
- :show-inheritance:
+
.. autoclass:: DATETIMEOFFSET
:members: __init__
- :show-inheritance:
+
.. autoclass:: IMAGE
:members: __init__
- :show-inheritance:
+
.. autoclass:: MONEY
:members: __init__
- :show-inheritance:
+
.. autoclass:: NCHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: NTEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: NVARCHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: REAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: SMALLDATETIME
:members: __init__
- :show-inheritance:
+
.. autoclass:: SMALLMONEY
:members: __init__
- :show-inheritance:
+
.. autoclass:: SQL_VARIANT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TIME
:members: __init__
- :show-inheritance:
+
.. autoclass:: TINYINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: UNIQUEIDENTIFIER
:members: __init__
- :show-inheritance:
+
.. autoclass:: VARCHAR
:members: __init__
- :show-inheritance:
+
PyODBC
diff --git a/doc/build/dialects/mysql.rst b/doc/build/dialects/mysql.rst
index 1e2784554..de71a99ac 100644
--- a/doc/build/dialects/mysql.rst
+++ b/doc/build/dialects/mysql.rst
@@ -25,135 +25,135 @@ construction arguments, are as follows:
.. autoclass:: BIGINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: BINARY
:members: __init__
- :show-inheritance:
+
.. autoclass:: BIT
:members: __init__
- :show-inheritance:
+
.. autoclass:: BLOB
:members: __init__
- :show-inheritance:
+
.. autoclass:: BOOLEAN
:members: __init__
- :show-inheritance:
+
.. autoclass:: CHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: DATE
:members: __init__
- :show-inheritance:
+
.. autoclass:: DATETIME
:members: __init__
- :show-inheritance:
+
.. autoclass:: DECIMAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: DOUBLE
:members: __init__
- :show-inheritance:
+
.. autoclass:: ENUM
:members: __init__
- :show-inheritance:
+
.. autoclass:: FLOAT
:members: __init__
- :show-inheritance:
+
.. autoclass:: INTEGER
:members: __init__
- :show-inheritance:
+
.. autoclass:: LONGBLOB
:members: __init__
- :show-inheritance:
+
.. autoclass:: LONGTEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: MEDIUMBLOB
:members: __init__
- :show-inheritance:
+
.. autoclass:: MEDIUMINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: MEDIUMTEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: NCHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: NUMERIC
:members: __init__
- :show-inheritance:
+
.. autoclass:: NVARCHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: REAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: SET
:members: __init__
- :show-inheritance:
+
.. autoclass:: SMALLINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TIME
:members: __init__
- :show-inheritance:
+
.. autoclass:: TIMESTAMP
:members: __init__
- :show-inheritance:
+
.. autoclass:: TINYBLOB
:members: __init__
- :show-inheritance:
+
.. autoclass:: TINYINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TINYTEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: VARBINARY
:members: __init__
- :show-inheritance:
+
.. autoclass:: VARCHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: YEAR
:members: __init__
- :show-inheritance:
+
MySQL-Python
--------------------
diff --git a/doc/build/dialects/oracle.rst b/doc/build/dialects/oracle.rst
index 4be8c5b51..32a544877 100644
--- a/doc/build/dialects/oracle.rst
+++ b/doc/build/dialects/oracle.rst
@@ -25,31 +25,31 @@ construction arguments, are as follows:
.. autoclass:: BFILE
:members: __init__
- :show-inheritance:
+
.. autoclass:: DOUBLE_PRECISION
:members: __init__
- :show-inheritance:
+
.. autoclass:: INTERVAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: NCLOB
:members: __init__
- :show-inheritance:
+
.. autoclass:: NUMBER
:members: __init__
- :show-inheritance:
+
.. autoclass:: LONG
:members: __init__
- :show-inheritance:
+
.. autoclass:: RAW
:members: __init__
- :show-inheritance:
+
cx_Oracle
----------
diff --git a/doc/build/dialects/postgresql.rst b/doc/build/dialects/postgresql.rst
index 3c151483f..05b63506e 100644
--- a/doc/build/dialects/postgresql.rst
+++ b/doc/build/dialects/postgresql.rst
@@ -15,9 +15,9 @@ they originate from :mod:`sqlalchemy.types` or from the local dialect::
from sqlalchemy.dialects.postgresql import \
ARRAY, BIGINT, BIT, BOOLEAN, BYTEA, CHAR, CIDR, DATE, \
DOUBLE_PRECISION, ENUM, FLOAT, HSTORE, INET, INTEGER, \
- INTERVAL, MACADDR, NUMERIC, REAL, SMALLINT, TEXT, TIME, \
+ INTERVAL, JSON, MACADDR, NUMERIC, REAL, SMALLINT, TEXT, TIME, \
TIMESTAMP, UUID, VARCHAR, INT4RANGE, INT8RANGE, NUMRANGE, \
- DATERANGE, TSRANGE, TSTZRANGE
+ DATERANGE, TSRANGE, TSTZRANGE, TSVECTOR
Types which are specific to PostgreSQL, or have PostgreSQL-specific
construction arguments, are as follows:
@@ -28,7 +28,7 @@ construction arguments, are as follows:
.. autoclass:: ARRAY
:members: __init__, Comparator
- :show-inheritance:
+
.. autoclass:: Any
@@ -36,51 +36,58 @@ construction arguments, are as follows:
.. autoclass:: BIT
:members: __init__
- :show-inheritance:
+
.. autoclass:: BYTEA
:members: __init__
- :show-inheritance:
+
.. autoclass:: CIDR
:members: __init__
- :show-inheritance:
+
.. autoclass:: DOUBLE_PRECISION
:members: __init__
- :show-inheritance:
+
.. autoclass:: ENUM
:members: __init__, create, drop
- :show-inheritance:
+
.. autoclass:: HSTORE
:members:
- :show-inheritance:
+
.. autoclass:: hstore
:members:
- :show-inheritance:
+
.. autoclass:: INET
:members: __init__
- :show-inheritance:
+
.. autoclass:: INTERVAL
:members: __init__
- :show-inheritance:
+
+.. autoclass:: JSON
+ :members:
+
+.. autoclass:: JSONElement
+ :members:
.. autoclass:: MACADDR
:members: __init__
- :show-inheritance:
+
.. autoclass:: REAL
:members: __init__
- :show-inheritance:
+
+.. autoclass:: TSVECTOR
+ :members: __init__
.. autoclass:: UUID
:members: __init__
- :show-inheritance:
+
Range Types
~~~~~~~~~~~
@@ -89,22 +96,22 @@ The new range column types founds in PostgreSQL 9.2 onwards are
catered for by the following types:
.. autoclass:: INT4RANGE
- :show-inheritance:
+
.. autoclass:: INT8RANGE
- :show-inheritance:
+
.. autoclass:: NUMRANGE
- :show-inheritance:
+
.. autoclass:: DATERANGE
- :show-inheritance:
+
.. autoclass:: TSRANGE
- :show-inheritance:
+
.. autoclass:: TSTZRANGE
- :show-inheritance:
+
The types above get most of their functionality from the following
mixin:
@@ -127,7 +134,6 @@ SQLAlchemy supports Postgresql EXCLUDE constraints via the
:class:`ExcludeConstraint` class:
.. autoclass:: ExcludeConstraint
- :show-inheritance:
:members: __init__
For example::
diff --git a/doc/build/faq.rst b/doc/build/faq.rst
new file mode 100644
index 000000000..dd7347b0b
--- /dev/null
+++ b/doc/build/faq.rst
@@ -0,0 +1,942 @@
+:orphan:
+
+.. _faq_toplevel:
+
+============================
+Frequently Asked Questions
+============================
+
+.. contents::
+ :local:
+ :class: faq
+ :backlinks: none
+
+
+Connections / Engines
+=====================
+
+How do I configure logging?
+---------------------------
+
+See :ref:`dbengine_logging`.
+
+How do I pool database connections? Are my connections pooled?
+----------------------------------------------------------------
+
+SQLAlchemy performs application-level connection pooling automatically
+in most cases. With the exception of SQLite, a :class:`.Engine` object
+refers to a :class:`.QueuePool` as a source of connectivity.
+
+For more detail, see :ref:`engines_toplevel` and :ref:`pooling_toplevel`.
+
+How do I pass custom connect arguments to my database API?
+-----------------------------------------------------------
+
+The :func:`.create_engine` call accepts additional arguments either
+directly via the ``connect_args`` keyword argument::
+
+ e = create_engine("mysql://scott:tiger@localhost/test",
+ connect_args={"encoding": "utf8"})
+
+Or for basic string and integer arguments, they can usually be specified
+in the query string of the URL::
+
+ e = create_engine("mysql://scott:tiger@localhost/test?encoding=utf8")
+
+.. seealso::
+
+ :ref:`custom_dbapi_args`
+
+"MySQL Server has gone away"
+----------------------------
+
+There are two major causes for this error:
+
+1. The MySQL client closes connections which have been idle for a set period
+of time, defaulting to eight hours. This can be avoided by using the ``pool_recycle``
+setting with :func:`.create_engine`, described at :ref:`mysql_connection_timeouts`.
+
+2. Usage of the MySQLdb :term:`DBAPI`, or a similar DBAPI, in a non-threadsafe manner, or in an otherwise
+inappropriate way. The MySQLdb connection object is not threadsafe - this expands
+out to any SQLAlchemy system that links to a single connection, which includes the ORM
+:class:`.Session`. For background
+on how :class:`.Session` should be used in a multithreaded environment,
+see :ref:`session_faq_threadsafe`.
+
+Why does SQLAlchemy issue so many ROLLBACKs?
+---------------------------------------------
+
+SQLAlchemy currently assumes DBAPI connections are in "non-autocommit" mode -
+this is the default behavior of the Python database API, meaning it
+must be assumed that a transaction is always in progress. The
+connection pool issues ``connection.rollback()`` when a connection is returned.
+This is so that any transactional resources remaining on the connection are
+released. On a database like Postgresql or MSSQL where table resources are
+aggressively locked, this is critical so that rows and tables don't remain
+locked within connections that are no longer in use. An application can
+otherwise hang. It's not just for locks, however, and is equally critical on
+any database that has any kind of transaction isolation, including MySQL with
+InnoDB. Any connection that is still inside an old transaction will return
+stale data, if that data was already queried on that connection within
+isolation. For background on why you might see stale data even on MySQL, see
+http://dev.mysql.com/doc/refman/5.1/en/innodb-transaction-model.html
+
+I'm on MyISAM - how do I turn it off?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The behavior of the connection pool's connection return behavior can be
+configured using ``reset_on_return``::
+
+ from sqlalchemy import create_engine
+ from sqlalchemy.pool import QueuePool
+
+ engine = create_engine('mysql://scott:tiger@localhost/myisam_database', pool=QueuePool(reset_on_return=False))
+
+I'm on SQL Server - how do I turn those ROLLBACKs into COMMITs?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+``reset_on_return`` accepts the values ``commit``, ``rollback`` in addition
+to ``True``, ``False``, and ``None``. Setting to ``commit`` will cause
+a COMMIT as any connection is returned to the pool::
+
+ engine = create_engine('mssql://scott:tiger@mydsn', pool=QueuePool(reset_on_return='commit'))
+
+
+I am using multiple connections with a SQLite database (typically to test transaction operation), and my test program is not working!
+----------------------------------------------------------------------------------------------------------------------------------------------------------
+
+If using a SQLite ``:memory:`` database, or a version of SQLAlchemy prior
+to version 0.7, the default connection pool is the :class:`.SingletonThreadPool`,
+which maintains exactly one SQLite connection per thread. So two
+connections in use in the same thread will actually be the same SQLite
+connection. Make sure you're not using a :memory: database and
+use :class:`.NullPool`, which is the default for non-memory databases in
+current SQLAlchemy versions.
+
+.. seealso::
+
+ :ref:`pysqlite_threading_pooling` - info on PySQLite's behavior.
+
+How do I get at the raw DBAPI connection when using an Engine?
+--------------------------------------------------------------
+
+With a regular SA engine-level Connection, you can get at a pool-proxied
+version of the DBAPI connection via the :attr:`.Connection.connection` attribute on
+:class:`.Connection`, and for the really-real DBAPI connection you can call the
+:attr:`.ConnectionFairy.connection` attribute on that - but there should never be any need to access
+the non-pool-proxied DBAPI connection, as all methods are proxied through::
+
+ engine = create_engine(...)
+ conn = engine.connect()
+ conn.connection.<do DBAPI things>
+ cursor = conn.connection.cursor(<DBAPI specific arguments..>)
+
+You must ensure that you revert any isolation level settings or other
+operation-specific settings on the connection back to normal before returning
+it to the pool.
+
+As an alternative to reverting settings, you can call the :meth:`.Connection.detach` method on
+either :class:`.Connection` or the proxied connection, which will de-associate
+the connection from the pool such that it will be closed and discarded
+when :meth:`.Connection.close` is called::
+
+ conn = engine.connect()
+ conn.detach() # detaches the DBAPI connection from the connection pool
+ conn.connection.<go nuts>
+ conn.close() # connection is closed for real, the pool replaces it with a new connection
+
+MetaData / Schema
+==================
+
+My program is hanging when I say ``table.drop()`` / ``metadata.drop_all()``
+----------------------------------------------------------------------------
+
+This usually corresponds to two conditions: 1. using PostgreSQL, which is really
+strict about table locks, and 2. you have a connection still open which
+contains locks on the table and is distinct from the connection being used for
+the DROP statement. Heres the most minimal version of the pattern::
+
+ connection = engine.connect()
+ result = connection.execute(mytable.select())
+
+ mytable.drop(engine)
+
+Above, a connection pool connection is still checked out; furthermore, the
+result object above also maintains a link to this connection. If
+"implicit execution" is used, the result will hold this connection opened until
+the result object is closed or all rows are exhausted.
+
+The call to ``mytable.drop(engine)`` attempts to emit DROP TABLE on a second
+connection procured from the :class:`.Engine` which will lock.
+
+The solution is to close out all connections before emitting DROP TABLE::
+
+ connection = engine.connect()
+ result = connection.execute(mytable.select())
+
+ # fully read result sets
+ result.fetchall()
+
+ # close connections
+ connection.close()
+
+ # now locks are removed
+ mytable.drop(engine)
+
+Does SQLAlchemy support ALTER TABLE, CREATE VIEW, CREATE TRIGGER, Schema Upgrade Functionality?
+-----------------------------------------------------------------------------------------------
+
+General ALTER support isn't present in SQLAlchemy directly. For special DDL
+on an ad-hoc basis, the :class:`.DDL` and related constructs can be used.
+See :doc:`core/ddl` for a discussion on this subject.
+
+A more comprehensive option is to use schema migration tools, such as Alembic
+or SQLAlchemy-Migrate; see :ref:`schema_migrations` for discussion on this.
+
+How can I sort Table objects in order of their dependency?
+-----------------------------------------------------------
+
+This is available via the :attr:`.MetaData.sorted_tables` function::
+
+ metadata = MetaData()
+ # ... add Table objects to metadata
+ ti = metadata.sorted_tables:
+ for t in ti:
+ print t
+
+How can I get the CREATE TABLE/ DROP TABLE output as a string?
+---------------------------------------------------------------
+
+Modern SQLAlchemy has clause constructs which represent DDL operations. These
+can be rendered to strings like any other SQL expression::
+
+ from sqlalchemy.schema import CreateTable
+
+ print CreateTable(mytable)
+
+To get the string specific to a certain engine::
+
+ print CreateTable(mytable).compile(engine)
+
+There's also a special form of :class:`.Engine` that can let you dump an entire
+metadata creation sequence, using this recipe::
+
+ def dump(sql, *multiparams, **params):
+ print sql.compile(dialect=engine.dialect)
+ engine = create_engine('postgresql://', strategy='mock', executor=dump)
+ metadata.create_all(engine, checkfirst=False)
+
+The `Alembic <https://bitbucket.org/zzzeek/alembic>`_ tool also supports
+an "offline" SQL generation mode that renders database migrations as SQL scripts.
+
+How can I subclass Table/Column to provide certain behaviors/configurations?
+------------------------------------------------------------------------------
+
+:class:`.Table` and :class:`.Column` are not good targets for direct subclassing.
+However, there are simple ways to get on-construction behaviors using creation
+functions, and behaviors related to the linkages between schema objects such as
+constraint conventions or naming conventions using attachment events.
+An example of many of these
+techniques can be seen at `Naming Conventions <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/NamingConventions>`_.
+
+
+SQL Expressions
+=================
+
+Why does ``.col.in_([])`` Produce ``col != col``? Why not ``1=0``?
+-------------------------------------------------------------------
+
+A little introduction to the issue. The IN operator in SQL, given a list of
+elements to compare against a column, generally does not accept an empty list,
+that is while it is valid to say::
+
+ column IN (1, 2, 3)
+
+it's not valid to say::
+
+ column IN ()
+
+SQLAlchemy's :meth:`.Operators.in_` operator, when given an empty list, produces this
+expression::
+
+ column != column
+
+As of version 0.6, it also produces a warning stating that a less efficient
+comparison operation will be rendered. This expression is the only one that is
+both database agnostic and produces correct results.
+
+For example, the naive approach of "just evaluate to false, by comparing 1=0
+or 1!=1", does not handle nulls properly. An expression like::
+
+ NOT column != column
+
+will not return a row when "column" is null, but an expression which does not
+take the column into account::
+
+ NOT 1=0
+
+will.
+
+Closer to the mark is the following CASE expression::
+
+ CASE WHEN column IS NOT NULL THEN 1=0 ELSE NULL END
+
+We don't use this expression due to its verbosity, and its also not
+typically accepted by Oracle within a WHERE clause - depending
+on how you phrase it, you'll either get "ORA-00905: missing keyword" or
+"ORA-00920: invalid relational operator". It's also still less efficient than
+just rendering SQL without the clause altogether (or not issuing the SQL at
+all, if the statement is just a simple search).
+
+The best approach therefore is to avoid the usage of IN given an argument list
+of zero length. Instead, don't emit the Query in the first place, if no rows
+should be returned. The warning is best promoted to a full error condition
+using the Python warnings filter (see http://docs.python.org/library/warnings.html).
+
+ORM Configuration
+==================
+
+.. _faq_mapper_primary_key:
+
+How do I map a table that has no primary key?
+---------------------------------------------
+
+The SQLAlchemy ORM, in order to map to a particular table, needs there to be
+at least one column denoted as a primary key column; multiple-column,
+i.e. composite, primary keys are of course entirely feasible as well. These
+columns do **not** need to be actually known to the database as primary key
+columns, though it's a good idea that they are. It's only necessary that the columns
+*behave* as a primary key does, e.g. as a unique and not nullable identifier
+for a row.
+
+Most ORMs require that objects have some kind of primary key defined
+because the object in memory must correspond to a uniquely identifiable
+row in the database table; at the very least, this allows the
+object can be targeted for UPDATE and DELETE statements which will affect only
+that object's row and no other. However, the importance of the primary key
+goes far beyond that. In SQLAlchemy, all ORM-mapped objects are at all times
+linked uniquely within a :class:`.Session`
+to their specific database row using a pattern called the :term:`identity map`,
+a pattern that's central to the unit of work system employed by SQLAlchemy,
+and is also key to the most common (and not-so-common) patterns of ORM usage.
+
+
+.. note::
+
+ It's important to note that we're only talking about the SQLAlchemy ORM; an
+ application which builds on Core and deals only with :class:`.Table` objects,
+ :func:`.select` constructs and the like, **does not** need any primary key
+ to be present on or associated with a table in any way (though again, in SQL, all tables
+ should really have some kind of primary key, lest you need to actually
+ update or delete specific rows).
+
+In almost all cases, a table does have a so-called :term:`candidate key`, which is a column or series
+of columns that uniquely identify a row. If a table truly doesn't have this, and has actual
+fully duplicate rows, the table is not corresponding to `first normal form <http://en.wikipedia.org/wiki/First_normal_form>`_ and cannot be mapped. Otherwise, whatever columns comprise the best candidate key can be
+applied directly to the mapper::
+
+ class SomeClass(Base):
+ __table__ = some_table_with_no_pk
+ __mapper_args__ = {
+ 'primary_key':[some_table_with_no_pk.c.uid, some_table_with_no_pk.c.bar]
+ }
+
+Better yet is when using fully declared table metadata, use the ``primary_key=True``
+flag on those columns::
+
+ class SomeClass(Base):
+ __tablename__ = "some_table_with_no_pk"
+
+ uid = Column(Integer, primary_key=True)
+ bar = Column(String, primary_key=True)
+
+All tables in a relational database should have primary keys. Even a many-to-many
+association table - the primary key would be the composite of the two association
+columns::
+
+ CREATE TABLE my_association (
+ user_id INTEGER REFERENCES user(id),
+ account_id INTEGER REFERENCES account(id),
+ PRIMARY KEY (user_id, account_id)
+ )
+
+
+How do I configure a Column that is a Python reserved word or similar?
+----------------------------------------------------------------------------
+
+Column-based attributes can be given any name desired in the mapping. See
+:ref:`mapper_column_distinct_names`.
+
+How do I get a list of all columns, relationships, mapped attributes, etc. given a mapped class?
+-------------------------------------------------------------------------------------------------
+
+This information is all available from the :class:`.Mapper` object.
+
+To get at the :class:`.Mapper` for a particular mapped class, call the
+:func:`.inspect` function on it::
+
+ from sqlalchemy import inspect
+
+ mapper = inspect(MyClass)
+
+From there, all information about the class can be acquired using such methods as:
+
+* :attr:`.Mapper.attrs` - a namespace of all mapped attributes. The attributes
+ themselves are instances of :class:`.MapperProperty`, which contain additional
+ attributes that can lead to the mapped SQL expression or column, if applicable.
+
+* :attr:`.Mapper.column_attrs` - the mapped attribute namespace
+ limited to column and SQL expression attributes. You might want to use
+ :attr:`.Mapper.columns` to get at the :class:`.Column` objects directly.
+
+* :attr:`.Mapper.relationships` - namespace of all :class:`.RelationshipProperty` attributes.
+
+* :attr:`.Mapper.all_orm_descriptors` - namespace of all mapped attributes, plus user-defined
+ attributes defined using systems such as :class:`.hybrid_property`, :class:`.AssociationProxy` and others.
+
+* :attr:`.Mapper.columns` - A namespace of :class:`.Column` objects and other named
+ SQL expressions associated with the mapping.
+
+* :attr:`.Mapper.mapped_table` - The :class:`.Table` or other selectable to which
+ this mapper is mapped.
+
+* :attr:`.Mapper.local_table` - The :class:`.Table` that is "local" to this mapper;
+ this differs from :attr:`.Mapper.mapped_table` in the case of a mapper mapped
+ using inheritance to a composed selectable.
+
+I'm using Declarative and setting primaryjoin/secondaryjoin using an ``and_()`` or ``or_()``, and I am getting an error message about foreign keys.
+------------------------------------------------------------------------------------------------------------------------------------------------------------------
+
+Are you doing this?::
+
+ class MyClass(Base):
+ # ....
+
+ foo = relationship("Dest", primaryjoin=and_("MyClass.id==Dest.foo_id", "MyClass.foo==Dest.bar"))
+
+That's an ``and_()`` of two string expressions, which SQLAlchemy cannot apply any mapping towards. Declarative allows :func:`.relationship` arguments to be specified as strings, which are converted into expression objects using ``eval()``. But this doesn't occur inside of an ``and_()`` expression - it's a special operation declarative applies only to the *entirety* of what's passed to primaryjoin or other arguments as a string::
+
+ class MyClass(Base):
+ # ....
+
+ foo = relationship("Dest", primaryjoin="and_(MyClass.id==Dest.foo_id, MyClass.foo==Dest.bar)")
+
+Or if the objects you need are already available, skip the strings::
+
+ class MyClass(Base):
+ # ....
+
+ foo = relationship(Dest, primaryjoin=and_(MyClass.id==Dest.foo_id, MyClass.foo==Dest.bar))
+
+The same idea applies to all the other arguments, such as ``foreign_keys``::
+
+ # wrong !
+ foo = relationship(Dest, foreign_keys=["Dest.foo_id", "Dest.bar_id"])
+
+ # correct !
+ foo = relationship(Dest, foreign_keys="[Dest.foo_id, Dest.bar_id]")
+
+ # also correct !
+ foo = relationship(Dest, foreign_keys=[Dest.foo_id, Dest.bar_id])
+
+ # if you're using columns from the class that you're inside of, just use the column objects !
+ class MyClass(Base):
+ foo_id = Column(...)
+ bar_id = Column(...)
+ # ...
+
+ foo = relationship(Dest, foreign_keys=[foo_id, bar_id])
+
+
+Sessions / Queries
+===================
+
+"This Session's transaction has been rolled back due to a previous exception during flush." (or similar)
+---------------------------------------------------------------------------------------------------------
+
+This is an error that occurs when a :meth:`.Session.flush` raises an exception, rolls back
+the transaction, but further commands upon the `Session` are called without an
+explicit call to :meth:`.Session.rollback` or :meth:`.Session.close`.
+
+It usually corresponds to an application that catches an exception
+upon :meth:`.Session.flush` or :meth:`.Session.commit` and
+does not properly handle the exception. For example::
+
+ from sqlalchemy import create_engine, Column, Integer
+ from sqlalchemy.orm import sessionmaker
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base(create_engine('sqlite://'))
+
+ class Foo(Base):
+ __tablename__ = 'foo'
+ id = Column(Integer, primary_key=True)
+
+ Base.metadata.create_all()
+
+ session = sessionmaker()()
+
+ # constraint violation
+ session.add_all([Foo(id=1), Foo(id=1)])
+
+ try:
+ session.commit()
+ except:
+ # ignore error
+ pass
+
+ # continue using session without rolling back
+ session.commit()
+
+
+The usage of the :class:`.Session` should fit within a structure similar to this::
+
+ try:
+ <use session>
+ session.commit()
+ except:
+ session.rollback()
+ raise
+ finally:
+ session.close() # optional, depends on use case
+
+Many things can cause a failure within the try/except besides flushes. You
+should always have some kind of "framing" of your session operations so that
+connection and transaction resources have a definitive boundary, otherwise
+your application doesn't really have its usage of resources under control.
+This is not to say that you need to put try/except blocks all throughout your
+application - on the contrary, this would be a terrible idea. You should
+architect your application such that there is one (or few) point(s) of
+"framing" around session operations.
+
+For a detailed discussion on how to organize usage of the :class:`.Session`,
+please see :ref:`session_faq_whentocreate`.
+
+But why does flush() insist on issuing a ROLLBACK?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+It would be great if :meth:`.Session.flush` could partially complete and then not roll
+back, however this is beyond its current capabilities since its internal
+bookkeeping would have to be modified such that it can be halted at any time
+and be exactly consistent with what's been flushed to the database. While this
+is theoretically possible, the usefulness of the enhancement is greatly
+decreased by the fact that many database operations require a ROLLBACK in any
+case. Postgres in particular has operations which, once failed, the
+transaction is not allowed to continue::
+
+ test=> create table foo(id integer primary key);
+ NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "foo_pkey" for table "foo"
+ CREATE TABLE
+ test=> begin;
+ BEGIN
+ test=> insert into foo values(1);
+ INSERT 0 1
+ test=> commit;
+ COMMIT
+ test=> begin;
+ BEGIN
+ test=> insert into foo values(1);
+ ERROR: duplicate key value violates unique constraint "foo_pkey"
+ test=> insert into foo values(2);
+ ERROR: current transaction is aborted, commands ignored until end of transaction block
+
+What SQLAlchemy offers that solves both issues is support of SAVEPOINT, via
+:meth:`.Session.begin_nested`. Using :meth:`.Session.begin_nested`, you can frame an operation that may
+potentially fail within a transaction, and then "roll back" to the point
+before its failure while maintaining the enclosing transaction.
+
+But why isn't the one automatic call to ROLLBACK enough? Why must I ROLLBACK again?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This is again a matter of the :class:`.Session` providing a consistent interface and
+refusing to guess about what context its being used. For example, the
+:class:`.Session` supports "framing" above within multiple levels. Such as, suppose
+you had a decorator ``@with_session()``, which did this::
+
+ def with_session(fn):
+ def go(*args, **kw):
+ session.begin(subtransactions=True)
+ try:
+ ret = fn(*args, **kw)
+ session.commit()
+ return ret
+ except:
+ session.rollback()
+ raise
+ return go
+
+The above decorator begins a transaction if one does not exist already, and
+then commits it, if it were the creator. The "subtransactions" flag means that
+if :meth:`.Session.begin` were already called by an enclosing function, nothing happens
+except a counter is incremented - this counter is decremented when :meth:`.Session.commit`
+is called and only when it goes back to zero does the actual COMMIT happen. It
+allows this usage pattern::
+
+ @with_session
+ def one():
+ # do stuff
+ two()
+
+
+ @with_session
+ def two():
+ # etc.
+
+ one()
+
+ two()
+
+``one()`` can call ``two()``, or ``two()`` can be called by itself, and the
+``@with_session`` decorator ensures the appropriate "framing" - the transaction
+boundaries stay on the outermost call level. As you can see, if ``two()`` calls
+``flush()`` which throws an exception and then issues a ``rollback()``, there will
+*always* be a second ``rollback()`` performed by the decorator, and possibly a
+third corresponding to two levels of decorator. If the ``flush()`` pushed the
+``rollback()`` all the way out to the top of the stack, and then we said that
+all remaining ``rollback()`` calls are moot, there is some silent behavior going
+on there. A poorly written enclosing method might suppress the exception, and
+then call ``commit()`` assuming nothing is wrong, and then you have a silent
+failure condition. The main reason people get this error in fact is because
+they didn't write clean "framing" code and they would have had other problems
+down the road.
+
+If you think the above use case is a little exotic, the same kind of thing
+comes into play if you want to SAVEPOINT- you might call ``begin_nested()``
+several times, and the ``commit()``/``rollback()`` calls each resolve the most
+recent ``begin_nested()``. The meaning of ``rollback()`` or ``commit()`` is
+dependent upon which enclosing block it is called, and you might have any
+sequence of ``rollback()``/``commit()`` in any order, and its the level of nesting
+that determines their behavior.
+
+In both of the above cases, if ``flush()`` broke the nesting of transaction
+blocks, the behavior is, depending on scenario, anywhere from "magic" to
+silent failure to blatant interruption of code flow.
+
+``flush()`` makes its own "subtransaction", so that a transaction is started up
+regardless of the external transactional state, and when complete it calls
+``commit()``, or ``rollback()`` upon failure - but that ``rollback()`` corresponds
+to its own subtransaction - it doesn't want to guess how you'd like to handle
+the external "framing" of the transaction, which could be nested many levels
+with any combination of subtransactions and real SAVEPOINTs. The job of
+starting/ending the "frame" is kept consistently with the code external to the
+``flush()``, and we made a decision that this was the most consistent approach.
+
+I'm inserting 400,000 rows with the ORM and it's really slow!
+--------------------------------------------------------------
+
+The SQLAlchemy ORM uses the :term:`unit of work` pattern when synchronizing
+changes to the database. This pattern goes far beyond simple "inserts"
+of data. It includes that attributes which are assigned on objects are
+received using an attribute instrumentation system which tracks
+changes on objects as they are made, includes that all rows inserted
+are tracked in an identity map which has the effect that for each row
+SQLAlchemy must retrieve its "last inserted id" if not already given,
+and also involves that rows to be inserted are scanned and sorted for
+dependencies as needed. Objects are also subject to a fair degree of
+bookkeeping in order to keep all of this running, which for a very
+large number of rows at once can create an inordinate amount of time
+spent with large data structures, hence it's best to chunk these.
+
+Basically, unit of work is a large degree of automation in order to
+automate the task of persisting a complex object graph into a
+relational database with no explicit persistence code, and this
+automation has a price.
+
+ORMs are basically not intended for high-performance bulk inserts -
+this is the whole reason SQLAlchemy offers the Core in addition to the
+ORM as a first-class component.
+
+For the use case of fast bulk inserts, the
+SQL generation and execution system that the ORM builds on top of
+is part of the Core. Using this system directly, we can produce an INSERT that
+is competitive with using the raw database API directly.
+
+The example below illustrates time-based tests for four different
+methods of inserting rows, going from the most automated to the least.
+With cPython 2.7, runtimes observed::
+
+ classics-MacBook-Pro:sqlalchemy classic$ python test.py
+ SQLAlchemy ORM: Total time for 100000 records 14.3528850079 secs
+ SQLAlchemy ORM pk given: Total time for 100000 records 10.0164160728 secs
+ SQLAlchemy Core: Total time for 100000 records 0.775382995605 secs
+ sqlite3: Total time for 100000 records 0.676795005798 sec
+
+We can reduce the time by a factor of three using recent versions of `Pypy <http://pypy.org/>`_::
+
+ classics-MacBook-Pro:sqlalchemy classic$ /usr/local/src/pypy-2.1-beta2-osx64/bin/pypy test.py
+ SQLAlchemy ORM: Total time for 100000 records 5.88369488716 secs
+ SQLAlchemy ORM pk given: Total time for 100000 records 3.52294301987 secs
+ SQLAlchemy Core: Total time for 100000 records 0.613556146622 secs
+ sqlite3: Total time for 100000 records 0.442467927933 sec
+
+Script::
+
+ import time
+ import sqlite3
+
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy import Column, Integer, String, create_engine
+ from sqlalchemy.orm import scoped_session, sessionmaker
+
+ Base = declarative_base()
+ DBSession = scoped_session(sessionmaker())
+ engine = None
+
+ class Customer(Base):
+ __tablename__ = "customer"
+ id = Column(Integer, primary_key=True)
+ name = Column(String(255))
+
+ def init_sqlalchemy(dbname='sqlite:///sqlalchemy.db'):
+ global engine
+ engine = create_engine(dbname, echo=False)
+ DBSession.remove()
+ DBSession.configure(bind=engine, autoflush=False, expire_on_commit=False)
+ Base.metadata.drop_all(engine)
+ Base.metadata.create_all(engine)
+
+ def test_sqlalchemy_orm(n=100000):
+ init_sqlalchemy()
+ t0 = time.time()
+ for i in range(n):
+ customer = Customer()
+ customer.name = 'NAME ' + str(i)
+ DBSession.add(customer)
+ if i % 1000 == 0:
+ DBSession.flush()
+ DBSession.commit()
+ print("SQLAlchemy ORM: Total time for " + str(n) +
+ " records " + str(time.time() - t0) + " secs")
+
+ def test_sqlalchemy_orm_pk_given(n=100000):
+ init_sqlalchemy()
+ t0 = time.time()
+ for i in range(n):
+ customer = Customer(id=i+1, name="NAME " + str(i))
+ DBSession.add(customer)
+ if i % 1000 == 0:
+ DBSession.flush()
+ DBSession.commit()
+ print("SQLAlchemy ORM pk given: Total time for " + str(n) +
+ " records " + str(time.time() - t0) + " secs")
+
+ def test_sqlalchemy_core(n=100000):
+ init_sqlalchemy()
+ t0 = time.time()
+ engine.execute(
+ Customer.__table__.insert(),
+ [{"name": 'NAME ' + str(i)} for i in range(n)]
+ )
+ print("SQLAlchemy Core: Total time for " + str(n) +
+ " records " + str(time.time() - t0) + " secs")
+
+ def init_sqlite3(dbname):
+ conn = sqlite3.connect(dbname)
+ c = conn.cursor()
+ c.execute("DROP TABLE IF EXISTS customer")
+ c.execute("CREATE TABLE customer (id INTEGER NOT NULL, "
+ "name VARCHAR(255), PRIMARY KEY(id))")
+ conn.commit()
+ return conn
+
+ def test_sqlite3(n=100000, dbname='sqlite3.db'):
+ conn = init_sqlite3(dbname)
+ c = conn.cursor()
+ t0 = time.time()
+ for i in range(n):
+ row = ('NAME ' + str(i),)
+ c.execute("INSERT INTO customer (name) VALUES (?)", row)
+ conn.commit()
+ print("sqlite3: Total time for " + str(n) +
+ " records " + str(time.time() - t0) + " sec")
+
+ if __name__ == '__main__':
+ test_sqlalchemy_orm(100000)
+ test_sqlalchemy_orm_pk_given(100000)
+ test_sqlalchemy_core(100000)
+ test_sqlite3(100000)
+
+
+
+How do I make a Query that always adds a certain filter to every query?
+------------------------------------------------------------------------------------------------
+
+See the recipe at `PreFilteredQuery <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/PreFilteredQuery>`_.
+
+I've created a mapping against an Outer Join, and while the query returns rows, no objects are returned. Why not?
+------------------------------------------------------------------------------------------------------------------
+
+Rows returned by an outer join may contain NULL for part of the primary key,
+as the primary key is the composite of both tables. The :class:`.Query` object ignores incoming rows
+that don't have an acceptable primary key. Based on the setting of the ``allow_partial_pks``
+flag on :func:`.mapper`, a primary key is accepted if the value has at least one non-NULL
+value, or alternatively if the value has no NULL values. See ``allow_partial_pks``
+at :func:`.mapper`.
+
+
+I'm using ``joinedload()`` or ``lazy=False`` to create a JOIN/OUTER JOIN and SQLAlchemy is not constructing the correct query when I try to add a WHERE, ORDER BY, LIMIT, etc. (which relies upon the (OUTER) JOIN)
+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+
+The joins generated by joined eager loading are only used to fully load related
+collections, and are designed to have no impact on the primary results of the query.
+Since they are anonymously aliased, they cannot be referenced directly.
+
+For detail on this beahvior, see :doc:`orm/loading`.
+
+Query has no ``__len__()``, why not?
+------------------------------------
+
+The Python ``__len__()`` magic method applied to an object allows the ``len()``
+builtin to be used to determine the length of the collection. It's intuitive
+that a SQL query object would link ``__len__()`` to the :meth:`.Query.count`
+method, which emits a `SELECT COUNT`. The reason this is not possible is
+because evaluating the query as a list would incur two SQL calls instead of
+one::
+
+ class Iterates(object):
+ def __len__(self):
+ print "LEN!"
+ return 5
+
+ def __iter__(self):
+ print "ITER!"
+ return iter([1, 2, 3, 4, 5])
+
+ list(Iterates())
+
+output::
+
+ ITER!
+ LEN!
+
+How Do I use Textual SQL with ORM Queries?
+-------------------------------------------
+
+See:
+
+* :ref:`orm_tutorial_literal_sql` - Ad-hoc textual blocks with :class:`.Query`
+
+* :ref:`session_sql_expressions` - Using :class:`.Session` with textual SQL directly.
+
+I'm calling ``Session.delete(myobject)`` and it isn't removed from the parent collection!
+------------------------------------------------------------------------------------------
+
+See :ref:`session_deleting_from_collections` for a description of this behavior.
+
+why isnt my ``__init__()`` called when I load objects?
+------------------------------------------------------
+
+See :ref:`mapping_constructors` for a description of this behavior.
+
+how do I use ON DELETE CASCADE with SA's ORM?
+----------------------------------------------
+
+SQLAlchemy will always issue UPDATE or DELETE statements for dependent
+rows which are currently loaded in the :class:`.Session`. For rows which
+are not loaded, it will by default issue SELECT statements to load
+those rows and udpate/delete those as well; in other words it assumes
+there is no ON DELETE CASCADE configured.
+To configure SQLAlchemy to cooperate with ON DELETE CASCADE, see
+:ref:`passive_deletes`.
+
+I set the "foo_id" attribute on my instance to "7", but the "foo" attribute is still ``None`` - shouldn't it have loaded Foo with id #7?
+----------------------------------------------------------------------------------------------------------------------------------------------------
+
+The ORM is not constructed in such a way as to support
+immediate population of relationships driven from foreign
+key attribute changes - instead, it is designed to work the
+other way around - foreign key attributes are handled by the
+ORM behind the scenes, the end user sets up object
+relationships naturally. Therefore, the recommended way to
+set ``o.foo`` is to do just that - set it!::
+
+ foo = Session.query(Foo).get(7)
+ o.foo = foo
+ Session.commit()
+
+Manipulation of foreign key attributes is of course entirely legal. However,
+setting a foreign-key attribute to a new value currently does not trigger
+an "expire" event of the :func:`.relationship` in which it's involved. This means
+that for the following sequence::
+
+ o = Session.query(SomeClass).first()
+ assert o.foo is None # accessing an un-set attribute sets it to None
+ o.foo_id = 7
+
+``o.foo`` is initialized to ``None`` when we first accessed it. Setting
+``o.foo_id = 7`` will have the value of "7" as pending, but no flush
+has occurred - so ``o.foo`` is still ``None``::
+
+ # attribute is already set to None, has not been
+ # reconciled with o.foo_id = 7 yet
+ assert o.foo is None
+
+For ``o.foo`` to load based on the foreign key mutation is usually achieved
+naturally after the commit, which both flushes the new foreign key value
+and expires all state::
+
+ Session.commit() # expires all attributes
+
+ foo_7 = Session.query(Foo).get(7)
+
+ assert o.foo is foo_7 # o.foo lazyloads on access
+
+A more minimal operation is to expire the attribute individually - this can
+be performed for any :term:`persistent` object using :meth:`.Session.expire`::
+
+ o = Session.query(SomeClass).first()
+ o.foo_id = 7
+ Session.expire(o, ['foo']) # object must be persistent for this
+
+ foo_7 = Session.query(Foo).get(7)
+
+ assert o.foo is foo_7 # o.foo lazyloads on access
+
+Note that if the object is not persistent but present in the :class:`.Session`,
+it's known as :term:`pending`. This means the row for the object has not been
+INSERTed into the database yet. For such an object, setting ``foo_id`` does not
+have meaning until the row is inserted; otherwise there is no row yet::
+
+ new_obj = SomeClass()
+ new_obj.foo_id = 7
+
+ Session.add(new_obj)
+
+ # accessing an un-set attribute sets it to None
+ assert new_obj.foo is None
+
+ Session.flush() # emits INSERT
+
+ # expire this because we already set .foo to None
+ Session.expire(o, ['foo'])
+
+ assert new_obj.foo is foo_7 # now it loads
+
+
+.. topic:: Attribute loading for non-persistent objects
+
+ One variant on the "pending" behavior above is if we use the flag
+ ``load_on_pending`` on :func:`.relationship`. When this flag is set, the
+ lazy loader will emit for ``new_obj.foo`` before the INSERT proceeds; another
+ variant of this is to use the :meth:`.Session.enable_relationship_loading`
+ method, which can "attach" an object to a :class:`.Session` in such a way that
+ many-to-one relationships load as according to foreign key attributes
+ regardless of the object being in any particular state.
+ Both techniques are **not recommended for general use**; they were added to suit
+ specfic programming scenarios encountered by users which involve the repurposing
+ of the ORM's usual object states.
+
+The recipe `ExpireRelationshipOnFKChange <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/ExpireRelationshipOnFKChange>`_ features an example using SQLAlchemy events
+in order to coordinate the setting of foreign key attributes with many-to-one
+relationships.
+
+Is there a way to automagically have only unique keywords (or other kinds of objects) without doing a query for the keyword and getting a reference to the row containing that keyword?
+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+
+When people read the many-to-many example in the docs, they get hit with the
+fact that if you create the same ``Keyword`` twice, it gets put in the DB twice.
+Which is somewhat inconvenient.
+
+This `UniqueObject <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/UniqueObject>`_ recipe was created to address this issue.
+
+
diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst
index 564668691..defeabcff 100644
--- a/doc/build/glossary.rst
+++ b/doc/build/glossary.rst
@@ -1,15 +1,11 @@
+:orphan:
+
.. _glossary:
========
Glossary
========
-.. note::
-
- The Glossary is a brand new addition to the documentation. While
- sparse at the moment we hope to fill it up with plenty of new
- terms soon!
-
.. glossary::
:sorted:
@@ -95,8 +91,26 @@ Glossary
class which each represent a particular database column
or relationship to a related class.
+ identity map
+ A mapping between Python objects and their database identities.
+ The identity map is a collection that's associated with an
+ ORM :term:`session` object, and maintains a single instance
+ of every database object keyed to its identity. The advantage
+ to this pattern is that all operations which occur for a particular
+ database identity are transparently coordinated onto a single
+ object instance. When using an identity map in conjunction with
+ an :term:`isolated` transaction, having a reference
+ to an object that's known to have a particular primary key can
+ be considered from a practical standpoint to be a
+ proxy to the actual database row.
+
+ .. seealso::
+
+ Martin Fowler - Identity Map - http://martinfowler.com/eaaCatalog/identityMap.html
+
lazy load
lazy loads
+ lazy loading
In object relational mapping, a "lazy load" refers to an
attribute that does not contain its database-side value
for some period of time, typically when the object is
@@ -249,6 +263,15 @@ Glossary
`PEP 249 - Python Database API Specification v2.0 <http://www.python.org/dev/peps/pep-0249/>`_
+ domain model
+
+ A domain model in problem solving and software engineering is a conceptual model of all the topics related to a specific problem. It describes the various entities, their attributes, roles, and relationships, plus the constraints that govern the problem domain.
+
+ (via Wikipedia)
+
+ .. seealso::
+
+ `Domain Model (wikipedia) <http://en.wikipedia.org/wiki/Domain_model>`_
unit of work
This pattern is where the system transparently keeps
@@ -263,6 +286,16 @@ Glossary
:doc:`orm/session`
+ Session
+ The container or scope for ORM database operations. Sessions
+ load instances from the database, track changes to mapped
+ instances and persist changes in a single unit of work when
+ flushed.
+
+ .. seealso::
+
+ :doc:`orm/session`
+
columns clause
The portion of the ``SELECT`` statement which enumerates the
SQL expressions to be returned in the result set. The expressions
@@ -411,3 +444,599 @@ Glossary
query via its ``FROM``
clause is not possible, because the correlation can only proceed once the
original source rows from the enclosing statement's FROM clause are available.
+
+
+ ACID
+ ACID model
+ An acronym for "Atomicity, Consistency, Isolation,
+ Durability"; a set of properties that guarantee that
+ database transactions are processed reliably.
+ (via Wikipedia)
+
+ .. seealso::
+
+ :term:`atomicity`
+
+ :term:`consistency`
+
+ :term:`isolation`
+
+ :term:`durability`
+
+ http://en.wikipedia.org/wiki/ACID_Model
+
+ atomicity
+ Atomicity is one of the components of the :term:`ACID` model,
+ and requires that each transaction is "all or nothing":
+ if one part of the transaction fails, the entire transaction
+ fails, and the database state is left unchanged. An atomic
+ system must guarantee atomicity in each and every situation,
+ including power failures, errors, and crashes.
+ (via Wikipedia)
+
+ .. seealso::
+
+ :term:`ACID`
+
+ http://en.wikipedia.org/wiki/Atomicity_(database_systems)
+
+ consistency
+ Consistency is one of the compoments of the :term:`ACID` model,
+ and ensures that any transaction will
+ bring the database from one valid state to another. Any data
+ written to the database must be valid according to all defined
+ rules, including but not limited to :term:`constraints`, cascades,
+ triggers, and any combination thereof.
+ (via Wikipedia)
+
+ .. seealso::
+
+ :term:`ACID`
+
+ http://en.wikipedia.org/wiki/Consistency_(database_systems)
+
+ isolation
+ isolated
+ The isolation property of the :term:`ACID` model
+ ensures that the concurrent execution
+ of transactions results in a system state that would be
+ obtained if transactions were executed serially, i.e. one
+ after the other. Each transaction must execute in total
+ isolation i.e. if T1 and T2 execute concurrently then each
+ should remain independent of the other.
+ (via Wikipedia)
+
+ .. seealso::
+
+ :term:`ACID`
+
+ http://en.wikipedia.org/wiki/Isolation_(database_systems)
+
+ durability
+ Durability is a property of the :term:`ACID` model
+ which means that once a transaction has been committed,
+ it will remain so, even in the event of power loss, crashes,
+ or errors. In a relational database, for instance, once a
+ group of SQL statements execute, the results need to be stored
+ permanently (even if the database crashes immediately
+ thereafter).
+ (via Wikipedia)
+
+ .. seealso::
+
+ :term:`ACID`
+
+ http://en.wikipedia.org/wiki/Durability_(database_systems)
+
+ RETURNING
+ This is a non-SQL standard clause provided in various forms by
+ certain backends, which provides the service of returning a result
+ set upon execution of an INSERT, UPDATE or DELETE statement. Any set
+ of columns from the matched rows can be returned, as though they were
+ produced from a SELECT statement.
+
+ The RETURNING clause provides both a dramatic performance boost to
+ common update/select scenarios, including retrieval of inline- or
+ default- generated primary key values and defaults at the moment they
+ were created, as well as a way to get at server-generated
+ default values in an atomic way.
+
+ An example of RETURNING, idiomatic to Postgresql, looks like::
+
+ INSERT INTO user_account (name) VALUES ('new name') RETURNING id, timestamp
+
+ Above, the INSERT statement will provide upon execution a result set
+ which includes the values of the columns ``user_account.id`` and
+ ``user_account.timestamp``, which above should have been generated as default
+ values as they are not included otherwise (but note any series of columns
+ or SQL expressions can be placed into RETURNING, not just default-value columns).
+
+ The backends that currently support
+ RETURNING or a similar construct are Postgresql, SQL Server, Oracle,
+ and Firebird. The Postgresql and Firebird implementations are generally
+ full featured, whereas the implementations of SQL Server and Oracle
+ have caveats. On SQL Server, the clause is known as "OUTPUT INSERTED"
+ for INSERT and UPDATE statements and "OUTPUT DELETED" for DELETE statements;
+ the key caveat is that triggers are not supported in conjunction with this
+ keyword. On Oracle, it is known as "RETURNING...INTO", and requires that the
+ value be placed into an OUT paramter, meaning not only is the syntax awkward,
+ but it can also only be used for one row at a time.
+
+ SQLAlchemy's :meth:`.UpdateBase.returning` system provides a layer of abstraction
+ on top of the RETURNING systems of these backends to provide a consistent
+ interface for returning columns. The ORM also includes many optimizations
+ that make use of RETURNING when available.
+
+ one to many
+ A style of :func:`~sqlalchemy.orm.relationship` which links
+ the primary key of the parent mapper's table to the foreign
+ key of a related table. Each unique parent object can
+ then refer to zero or more unique related objects.
+
+ The related objects in turn will have an implicit or
+ explicit :term:`many to one` relationship to their parent
+ object.
+
+ An example one to many schema (which, note, is identical
+ to the :term:`many to one` schema):
+
+ .. sourcecode:: sql
+
+ CREATE TABLE department (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE employee (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30),
+ dep_id INTEGER REFERENCES department(id)
+ )
+
+ The relationship from ``department`` to ``employee`` is
+ one to many, since many employee records can be associated with a
+ single department. A SQLAlchemy mapping might look like::
+
+ class Department(Base):
+ __tablename__ = 'department'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ employees = relationship("Employee")
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ dep_id = Column(Integer, ForeignKey('department.id'))
+
+ .. seealso::
+
+ :term:`relationship`
+
+ :term:`many to one`
+
+ :term:`backref`
+
+ many to one
+ A style of :func:`~sqlalchemy.orm.relationship` which links
+ a foreign key in the parent mapper's table to the primary
+ key of a related table. Each parent object can
+ then refer to exactly zero or one related object.
+
+ The related objects in turn will have an implicit or
+ explicit :term:`one to many` relationship to any number
+ of parent objects that refer to them.
+
+ An example many to one schema (which, note, is identical
+ to the :term:`one to many` schema):
+
+ .. sourcecode:: sql
+
+ CREATE TABLE department (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE employee (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30),
+ dep_id INTEGER REFERENCES department(id)
+ )
+
+
+ The relationship from ``employee`` to ``department`` is
+ many to one, since many employee records can be associated with a
+ single department. A SQLAlchemy mapping might look like::
+
+ class Department(Base):
+ __tablename__ = 'department'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ dep_id = Column(Integer, ForeignKey('department.id'))
+ department = relationship("Department")
+
+ .. seealso::
+
+ :term:`relationship`
+
+ :term:`one to many`
+
+ :term:`backref`
+
+ backref
+ bidirectional relationship
+ An extension to the :term:`relationship` system whereby two
+ distinct :func:`~sqlalchemy.orm.relationship` objects can be
+ mutually associated with each other, such that they coordinate
+ in memory as changes occur to either side. The most common
+ way these two relationships are constructed is by using
+ the :func:`~sqlalchemy.orm.relationship` function explicitly
+ for one side and specifying the ``backref`` keyword to it so that
+ the other :func:`~sqlalchemy.orm.relationship` is created
+ automatically. We can illustrate this against the example we've
+ used in :term:`one to many` as follows::
+
+ class Department(Base):
+ __tablename__ = 'department'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ employees = relationship("Employee", backref="department")
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ dep_id = Column(Integer, ForeignKey('department.id'))
+
+ A backref can be applied to any relationship, including one to many,
+ many to one, and :term:`many to many`.
+
+ .. seealso::
+
+ :term:`relationship`
+
+ :term:`one to many`
+
+ :term:`many to one`
+
+ :term:`many to many`
+
+ many to many
+ A style of :func:`sqlalchemy.orm.relationship` which links two tables together
+ via an intermediary table in the middle. Using this configuration,
+ any number of rows on the left side may refer to any number of
+ rows on the right, and vice versa.
+
+ A schema where employees can be associated with projects:
+
+ .. sourcecode:: sql
+
+ CREATE TABLE employee (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE project (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE employee_project (
+ employee_id INTEGER PRIMARY KEY,
+ project_id INTEGER PRIMARY KEY,
+ FOREIGN KEY employee_id REFERENCES employee(id),
+ FOREIGN KEY project_id REFERENCES project(id)
+ )
+
+ Above, the ``employee_project`` table is the many-to-many table,
+ which naturally forms a composite primary key consisting
+ of the primary key from each related table.
+
+ In SQLAlchemy, the :func:`sqlalchemy.orm.relationship` function
+ can represent this style of relationship in a mostly
+ transparent fashion, where the many-to-many table is
+ specified using plain table metadata::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key)
+ name = Column(String(30))
+
+ projects = relationship(
+ "Project",
+ secondary=Table('employee_project', Base.metadata,
+ Column("employee_id", Integer, ForeignKey('employee.id'),
+ primary_key=True),
+ Column("project_id", Integer, ForeignKey('project.id'),
+ primary_key=True)
+ ),
+ backref="employees"
+ )
+
+ class Project(Base):
+ __tablename__ = 'project'
+
+ id = Column(Integer, primary_key)
+ name = Column(String(30))
+
+ Above, the ``Employee.projects`` and back-referencing ``Project.employees``
+ collections are defined::
+
+ proj = Project(name="Client A")
+
+ emp1 = Employee(name="emp1")
+ emp2 = Employee(name="emp2")
+
+ proj.employees.extend([emp1, emp2])
+
+ .. seealso::
+
+ :term:`association relationship`
+
+ :term:`relationship`
+
+ :term:`one to many`
+
+ :term:`many to one`
+
+ relationship
+ relationships
+ A connecting unit between two mapped classes, corresponding
+ to some relationship between the two tables in the database.
+
+ The relationship is defined using the SQLAlchemy function
+ :func:`~sqlalchemy.orm.relationship`. Once created, SQLAlchemy
+ inspects the arguments and underlying mappings involved
+ in order to classify the relationship as one of three types:
+ :term:`one to many`, :term:`many to one`, or :term:`many to many`.
+ With this classification, the relationship construct
+ handles the task of persisting the appropriate linkages
+ in the database in response to in-memory object associations,
+ as well as the job of loading object references and collections
+ into memory based on the current linkages in the
+ database.
+
+ .. seealso::
+
+ :ref:`relationship_config_toplevel`
+
+ association relationship
+ A two-tiered :term:`relationship` which links two tables
+ together using an association table in the middle. The
+ association relationship differs from a :term:`many to many`
+ relationship in that the many-to-many table is mapped
+ by a full class, rather than invisibly handled by the
+ :func:`sqlalchemy.orm.relationship` construct as in the case
+ with many-to-many, so that additional attributes are
+ explicitly available.
+
+ For example, if we wanted to associate employees with
+ projects, also storing the specific role for that employee
+ with the project, the relational schema might look like:
+
+ .. sourcecode:: sql
+
+ CREATE TABLE employee (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE project (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE employee_project (
+ employee_id INTEGER PRIMARY KEY,
+ project_id INTEGER PRIMARY KEY,
+ role_name VARCHAR(30),
+ FOREIGN KEY employee_id REFERENCES employee(id),
+ FOREIGN KEY project_id REFERENCES project(id)
+ )
+
+ A SQLAlchemy declarative mapping for the above might look like::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key)
+ name = Column(String(30))
+
+
+ class Project(Base):
+ __tablename__ = 'project'
+
+ id = Column(Integer, primary_key)
+ name = Column(String(30))
+
+
+ class EmployeeProject(Base):
+ __tablename__ = 'employee_project'
+
+ employee_id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
+ project_id = Column(Integer, ForeignKey('project.id'), primary_key=True)
+ role_name = Column(String(30))
+
+ project = relationship("Project", backref="project_employees")
+ employee = relationship("Employee", backref="employee_projects")
+
+
+ Employees can be added to a project given a role name::
+
+ proj = Project(name="Client A")
+
+ emp1 = Employee(name="emp1")
+ emp2 = Employee(name="emp2")
+
+ proj.project_employees.extend([
+ EmployeeProject(employee=emp1, role="tech lead"),
+ EmployeeProject(employee=emp2, role="account executive")
+ ])
+
+ .. seealso::
+
+ :term:`many to many`
+
+ constraint
+ constraints
+ constrained
+ Rules established within a relational database that ensure
+ the validity and consistency of data. Common forms
+ of constraint include :term:`primary key constraint`,
+ :term:`foreign key constraint`, and :term:`check constraint`.
+
+ candidate key
+
+ A :term:`relational algebra` term referring to an attribute or set
+ of attributes that form a uniquely identifying key for a
+ row. A row may have more than one candidate key, each of which
+ is suitable for use as the primary key of that row.
+ The primary key of a table is always a candidate key.
+
+ .. seealso::
+
+ :term:`primary key`
+
+ http://en.wikipedia.org/wiki/Candidate_key
+
+ primary key
+ primary key constraint
+
+ A :term:`constraint` that uniquely defines the characteristics
+ of each :term:`row`. The primary key has to consist of
+ characteristics that cannot be duplicated by any other row.
+ The primary key may consist of a single attribute or
+ multiple attributes in combination.
+ (via Wikipedia)
+
+ The primary key of a table is typically, though not always,
+ defined within the ``CREATE TABLE`` :term:`DDL`:
+
+ .. sourcecode:: sql
+
+ CREATE TABLE employee (
+ emp_id INTEGER,
+ emp_name VARCHAR(30),
+ dep_id INTEGER,
+ PRIMARY KEY (emp_id)
+ )
+
+ .. seealso::
+
+ http://en.wikipedia.org/wiki/Primary_Key
+
+ foreign key constraint
+ A referential constraint between two tables. A foreign key is a field or set of fields in a
+ relational table that matches a :term:`candidate key` of another table.
+ The foreign key can be used to cross-reference tables.
+ (via Wikipedia)
+
+ A foreign key constraint can be added to a table in standard
+ SQL using :term:`DDL` like the following:
+
+ .. sourcecode:: sql
+
+ ALTER TABLE employee ADD CONSTRAINT dep_id_fk
+ FOREIGN KEY (employee) REFERENCES department (dep_id)
+
+ .. seealso::
+
+ http://en.wikipedia.org/wiki/Foreign_key_constraint
+
+ check constraint
+
+ A check constraint is a
+ condition that defines valid data when adding or updating an
+ entry in a table of a relational database. A check constraint
+ is applied to each row in the table.
+
+ (via Wikipedia)
+
+ A check constraint can be added to a table in standard
+ SQL using :term:`DDL` like the following:
+
+ .. sourcecode:: sql
+
+ ALTER TABLE distributors ADD CONSTRAINT zipchk CHECK (char_length(zipcode) = 5);
+
+ .. seealso::
+
+ http://en.wikipedia.org/wiki/Check_constraint
+
+ unique constraint
+ unique key index
+ A unique key index can uniquely identify each row of data
+ values in a database table. A unique key index comprises a
+ single column or a set of columns in a single database table.
+ No two distinct rows or data records in a database table can
+ have the same data value (or combination of data values) in
+ those unique key index columns if NULL values are not used.
+ Depending on its design, a database table may have many unique
+ key indexes but at most one primary key index.
+
+ (via Wikipedia)
+
+ .. seealso::
+
+ http://en.wikipedia.org/wiki/Unique_key#Defining_unique_keys
+
+ transient
+ This describes one of the four major object states which
+ an object can have within a :term:`session`; a transient object
+ is a new object that doesn't have any database identity
+ and has not been associated with a session yet. When the
+ object is added to the session, it moves to the
+ :term:`pending` state.
+
+ .. seealso::
+
+ :ref:`session_object_states`
+
+ pending
+ This describes one of the four major object states which
+ an object can have within a :term:`session`; a pending object
+ is a new object that doesn't have any database identity,
+ but has been recently associated with a session. When
+ the session emits a flush and the row is inserted, the
+ object moves to the :term:`persistent` state.
+
+ .. seealso::
+
+ :ref:`session_object_states`
+
+ persistent
+ This describes one of the four major object states which
+ an object can have within a :term:`session`; a persistent object
+ is an object that has a database identity (i.e. a primary key)
+ and is currently associated with a session. Any object
+ that was previously :term:`pending` and has now been inserted
+ is in the persistent state, as is any object that's
+ been loaded by the session from the database. When a
+ persistent object is removed from a session, it is known
+ as :term:`detached`.
+
+ .. seealso::
+
+ :ref:`session_object_states`
+
+ detached
+ This describes one of the four major object states which
+ an object can have within a :term:`session`; a detached object
+ is an object that has a database identity (i.e. a primary key)
+ but is not associated with any session. An object that
+ was previously :term:`persistent` and was removed from its
+ session either because it was expunged, or the owning
+ session was closed, moves into the detached state.
+ The detached state is generally used when objects are being
+ moved between sessions or when being moved to/from an external
+ object cache.
+
+ .. seealso::
+
+ :ref:`session_object_states`
diff --git a/doc/build/index.rst b/doc/build/index.rst
index c8ccc430c..716a83d0e 100644
--- a/doc/build/index.rst
+++ b/doc/build/index.rst
@@ -1,3 +1,5 @@
+:orphan:
+
.. _index_toplevel:
========================
@@ -11,6 +13,7 @@ A high level view and getting set up.
:ref:`Overview <overview>` |
:ref:`Installation Guide <installation>` |
+:doc:`Frequently Asked Questions <faq>` |
:doc:`Migration from 0.8 <changelog/migration_09>` |
:doc:`Glossary <glossary>` |
:doc:`Changelog catalog <changelog/index>`
@@ -36,6 +39,7 @@ of Python objects, proceed first to the tutorial.
:doc:`Declarative Extension <orm/extensions/declarative>` |
:doc:`Association Proxy <orm/extensions/associationproxy>` |
:doc:`Hybrid Attributes <orm/extensions/hybrid>` |
+ :doc:`Automap <orm/extensions/automap>` (**new**) |
:doc:`Mutable Scalars <orm/extensions/mutable>` |
:doc:`Ordered List <orm/extensions/orderinglist>`
@@ -74,11 +78,11 @@ are documented here. In contrast to the ORM's domain-centric mode of usage, the
:doc:`Connection Pooling <core/pooling>`
* **Schema Definition:**
- :ref:`Tables and Columns <metadata_describing>` |
- :ref:`Database Introspection (Reflection) <metadata_reflection>` |
- :ref:`Insert/Update Defaults <metadata_defaults>` |
- :ref:`Constraints and Indexes <metadata_constraints>` |
- :ref:`Using Data Definition Language (DDL) <metadata_ddl>`
+ :ref:`Tables and Columns <metadata_describing_toplevel>` |
+ :ref:`Database Introspection (Reflection) <metadata_reflection_toplevel>` |
+ :ref:`Insert/Update Defaults <metadata_defaults_toplevel>` |
+ :ref:`Constraints and Indexes <metadata_constraints_toplevel>` |
+ :ref:`Using Data Definition Language (DDL) <metadata_ddl_toplevel>`
* **Datatypes:**
:ref:`Overview <types_toplevel>` |
diff --git a/doc/build/intro.rst b/doc/build/intro.rst
index c5e7f7425..588701ce2 100644
--- a/doc/build/intro.rst
+++ b/doc/build/intro.rst
@@ -74,13 +74,12 @@ Supported Platforms
SQLAlchemy has been tested against the following platforms:
-* cPython since version 2.5, through the 2.xx series
+* cPython since version 2.6, through the 2.xx series
* cPython version 3, throughout all 3.xx series
-* `Jython <http://www.jython.org/>`_ 2.5 or greater
-* `Pypy <http://pypy.org/>`_ 1.5 or greater
+* `Pypy <http://pypy.org/>`_ 2.1 or greater
-.. versionchanged:: 0.8
- Python 2.5 is now the minimum Python version supported.
+.. versionchanged:: 0.9
+ Python 2.6 is now the minimum Python version supported.
Supported Installation Methods
-------------------------------
@@ -91,13 +90,9 @@ SQLAlchemy supports installation using standard Python "distutils" or
* **Plain Python Distutils** - SQLAlchemy can be installed with a clean
Python install using the services provided via `Python Distutils <http://docs.python.org/distutils/>`_,
using the ``setup.py`` script. The C extensions as well as Python 3 builds are supported.
-* **Standard Setuptools** - When using `setuptools <http://pypi.python.org/pypi/setuptools/>`_,
+* **Setuptools or Distribute** - When using `setuptools <http://pypi.python.org/pypi/setuptools/>`_,
SQLAlchemy can be installed via ``setup.py`` or ``easy_install``, and the C
- extensions are supported. setuptools is not supported on Python 3 at the time
- of this writing.
-* **Distribute** - With `distribute <http://pypi.python.org/pypi/distribute/>`_,
- SQLAlchemy can be installed via ``setup.py`` or ``easy_install``, and the C
- extensions as well as Python 3 builds are supported.
+ extensions are supported.
* **pip** - `pip <http://pypi.python.org/pypi/pip/>`_ is an installer that
rides on top of ``setuptools`` or ``distribute``, replacing the usage
of ``easy_install``. It is often preferred for its simpler mode of usage.
@@ -117,6 +112,11 @@ Or with pip::
This command will download the latest version of SQLAlchemy from the `Python
Cheese Shop <http://pypi.python.org/pypi/SQLAlchemy>`_ and install it to your system.
+.. note::
+
+ Beta releases of SQLAlchemy may not be present on Pypi, and may instead
+ require a direct download first.
+
Installing using setup.py
----------------------------------
@@ -128,8 +128,12 @@ Installing the C Extensions
----------------------------------
SQLAlchemy includes C extensions which provide an extra speed boost for
-dealing with result sets. Currently, the extensions are only supported on the
-2.xx series of cPython, not Python 3 or Pypy.
+dealing with result sets. The extensions are supported on both the 2.xx
+and 3.xx series of cPython.
+
+.. versionchanged:: 0.9.0
+
+ The C extensions now compile on Python 3 as well as Python 2.
setup.py will automatically build the extensions if an appropriate platform is
detected. If the build of the C extensions fails, due to missing compiler or
@@ -155,11 +159,12 @@ Or with pip::
Installing on Python 3
----------------------------------
-SQLAlchemy ships as Python 2 code. For Python 3 usage, the ``setup.py`` script
-will invoke the Python ``2to3`` tool on the build, plugging in an extra
-"preprocessor" as well. The 2to3 step works with Python distutils
-(part of the standard Python install) and Distribute - it will **not**
-work with a non-Distribute setuptools installation.
+SQLAlchemy runs directly on Python 2 or Python 3, and can be installed in
+either environment without any adjustments or code conversion.
+
+.. versionchanged:: 0.9.0 Python 3 is now supported in place with no 2to3 step
+ required.
+
Installing a Database API
----------------------------------
@@ -172,7 +177,7 @@ the available DBAPIs for each database, including external links.
Checking the Installed SQLAlchemy Version
------------------------------------------
-This documentation covers SQLAlchemy version 0.8. If you're working on a
+This documentation covers SQLAlchemy version 0.9. If you're working on a
system that already has SQLAlchemy installed, check the version from your
Python prompt like this:
@@ -180,11 +185,11 @@ Python prompt like this:
>>> import sqlalchemy
>>> sqlalchemy.__version__ # doctest: +SKIP
- 0.8.0
+ 0.9.0
.. _migration:
-0.7 to 0.8 Migration
+0.8 to 0.9 Migration
=====================
-Notes on what's changed from 0.7 to 0.8 is available here at :doc:`changelog/migration_08`.
+Notes on what's changed from 0.8 to 0.9 is available here at :doc:`changelog/migration_09`.
diff --git a/doc/build/orm/deprecated.rst b/doc/build/orm/deprecated.rst
index 943059747..8d277011c 100644
--- a/doc/build/orm/deprecated.rst
+++ b/doc/build/orm/deprecated.rst
@@ -1,3 +1,5 @@
+:orphan:
+
.. _dep_interfaces_orm_toplevel:
Deprecated ORM Event Interfaces
diff --git a/doc/build/orm/events.rst b/doc/build/orm/events.rst
index 235861952..2be74bf57 100644
--- a/doc/build/orm/events.rst
+++ b/doc/build/orm/events.rst
@@ -39,6 +39,8 @@ Session Events
Instrumentation Events
-----------------------
+.. automodule:: sqlalchemy.orm.instrumentation
+
.. autoclass:: sqlalchemy.orm.events.InstrumentationEvents
:members:
diff --git a/doc/build/orm/examples.rst b/doc/build/orm/examples.rst
index e0c87dadf..99ca4bb8d 100644
--- a/doc/build/orm/examples.rst
+++ b/doc/build/orm/examples.rst
@@ -1,140 +1,134 @@
.. _examples_toplevel:
-Examples
-========
+============
+ORM Examples
+============
The SQLAlchemy distribution includes a variety of code examples illustrating
a select set of patterns, some typical and some not so typical. All are
runnable and can be found in the ``/examples`` directory of the
-distribution. Each example contains a README in its ``__init__.py`` file,
-each of which are listed below.
+distribution. Descriptions and source code for all can be found here.
Additional SQLAlchemy examples, some user contributed, are available on the
wiki at `<http://www.sqlalchemy.org/trac/wiki/UsageRecipes>`_.
+
+Mapping Recipes
+===============
+
.. _examples_adjacencylist:
Adjacency List
--------------
-Location: /examples/adjacency_list/
-
-.. automodule:: adjacency_list
+.. automodule:: examples.adjacency_list
.. _examples_associations:
Associations
------------
-Location: /examples/association/
+.. automodule:: examples.association
-.. automodule:: association
+Directed Graphs
+---------------
+.. automodule:: examples.graphs
-.. _examples_instrumentation:
+Dynamic Relations as Dictionaries
+------------------------------------
-Attribute Instrumentation
--------------------------
+.. automodule:: examples.dynamic_dict
-Location: /examples/custom_attributes/
+.. _examples_generic_associations:
-.. automodule:: custom_attributes
+Generic Associations
+------------------------
-.. _examples_caching:
+.. automodule:: examples.generic_associations
-Dogpile Caching
----------------
+Large Collections
+------------------------
-Location: /examples/dogpile_caching/
+.. automodule:: examples.large_collection
-.. automodule:: dogpile_caching
+Nested Sets
+------------
-Directed Graphs
----------------
+.. automodule:: examples.nested_sets
-Location: /examples/graphs/
+.. _examples_relationships:
-.. automodule:: graphs
+Relationship Join Conditions
+----------------------------
-Dynamic Relations as Dictionaries
-----------------------------------
+.. automodule:: examples.join_conditions
-Location: /examples/dynamic_dict/
+.. _examples_xmlpersistence:
-.. automodule:: dynamic_dict
+XML Persistence
+------------------------
-.. _examples_generic_associations:
+.. automodule:: examples.elementtree
-Generic Associations
---------------------
+Versioning Objects
+------------------------
-Location: /examples/generic_associations
+Versioning with a History Table
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-.. automodule:: generic_associations
+.. automodule:: examples.versioned_history
-.. _examples_sharding:
+Versioning using Temporal Rows
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Horizontal Sharding
--------------------
+.. automodule:: examples.versioned_rows
-Location: /examples/sharding
+Vertical Attribute Mapping
+------------------------------------
-.. automodule:: sharding
+.. automodule:: examples.vertical
-Inheritance Mappings
---------------------
-Location: /examples/inheritance/
+Inheritance Mapping Recipes
+============================
-.. automodule:: inheritance
+Basic Inheritance Mappings
+----------------------------------
-Large Collections
------------------
+.. automodule:: examples.inheritance
-Location: /examples/large_collection/
+Special APIs
+============
-.. automodule:: large_collection
+.. _examples_instrumentation:
-Nested Sets
------------
+Attribute Instrumentation
+------------------------------------
-Location: /examples/nested_sets/
+.. automodule:: examples.custom_attributes
-.. automodule:: nested_sets
+.. _examples_sharding:
-Polymorphic Associations
+Horizontal Sharding
------------------------
-See :ref:`examples_generic_associations` for a modern version of polymorphic associations.
-
-.. _examples_postgis:
-
-PostGIS Integration
--------------------
-
-Location: /examples/postgis
-
-.. automodule:: postgis
-
-Versioned Objects
------------------
-
-Location: /examples/versioning
+.. automodule:: examples.sharding
-.. automodule:: versioning
+Extending the ORM
+=================
-Vertical Attribute Mapping
---------------------------
+.. _examples_caching:
-Location: /examples/vertical
+Dogpile Caching
+------------------------
-.. automodule:: vertical
+.. automodule:: examples.dogpile_caching
-.. _examples_xmlpersistence:
+.. _examples_postgis:
-XML Persistence
----------------
+PostGIS Integration
+------------------------
-Location: /examples/elementtree/
+.. automodule:: examples.postgis
-.. automodule:: elementtree
diff --git a/doc/build/orm/exceptions.rst b/doc/build/orm/exceptions.rst
index 1dde4248f..f95b26eed 100644
--- a/doc/build/orm/exceptions.rst
+++ b/doc/build/orm/exceptions.rst
@@ -2,5 +2,4 @@ ORM Exceptions
==============
.. automodule:: sqlalchemy.orm.exc
- :show-inheritance:
:members: \ No newline at end of file
diff --git a/doc/build/orm/extensions/associationproxy.rst b/doc/build/orm/extensions/associationproxy.rst
index 90bb29ebf..9b25c4a68 100644
--- a/doc/build/orm/extensions/associationproxy.rst
+++ b/doc/build/orm/extensions/associationproxy.rst
@@ -15,6 +15,7 @@ the construction of sophisticated collections and dictionary
views of virtually any geometry, persisted to the database using
standard, transparently configured relational patterns.
+
Simplifying Scalar Collections
------------------------------
diff --git a/doc/build/orm/extensions/automap.rst b/doc/build/orm/extensions/automap.rst
new file mode 100644
index 000000000..d1d200609
--- /dev/null
+++ b/doc/build/orm/extensions/automap.rst
@@ -0,0 +1,22 @@
+.. _automap_toplevel:
+
+Automap
+=======
+
+.. automodule:: sqlalchemy.ext.automap
+
+API Reference
+-------------
+
+.. autofunction:: automap_base
+
+.. autoclass:: AutomapBase
+ :members:
+
+.. autofunction:: classname_for_table
+
+.. autofunction:: name_for_scalar_relationship
+
+.. autofunction:: name_for_collection_relationship
+
+.. autofunction:: generate_relationship
diff --git a/doc/build/orm/extensions/declarative.rst b/doc/build/orm/extensions/declarative.rst
index 35895e8df..636bb451b 100644
--- a/doc/build/orm/extensions/declarative.rst
+++ b/doc/build/orm/extensions/declarative.rst
@@ -10,6 +10,8 @@ API Reference
.. autofunction:: declarative_base
+.. autofunction:: as_declarative
+
.. autoclass:: declared_attr
.. autofunction:: sqlalchemy.ext.declarative.api._declarative_constructor
@@ -27,3 +29,4 @@ API Reference
.. autoclass:: ConcreteBase
.. autoclass:: DeferredReflection
+ :members:
diff --git a/doc/build/orm/extensions/hybrid.rst b/doc/build/orm/extensions/hybrid.rst
index 3ee76fd9b..16cdafebc 100644
--- a/doc/build/orm/extensions/hybrid.rst
+++ b/doc/build/orm/extensions/hybrid.rst
@@ -15,7 +15,7 @@ API Reference
:members:
.. autoclass:: Comparator
- :show-inheritance:
+
.. autodata:: HYBRID_METHOD
diff --git a/doc/build/orm/extensions/instrumentation.rst b/doc/build/orm/extensions/instrumentation.rst
index 94946b1ae..16084e319 100644
--- a/doc/build/orm/extensions/instrumentation.rst
+++ b/doc/build/orm/extensions/instrumentation.rst
@@ -10,6 +10,8 @@ API Reference
.. autodata:: INSTRUMENTATION_MANAGER
+.. autoclass:: sqlalchemy.orm.instrumentation.InstrumentationFactory
+
.. autoclass:: InstrumentationManager
:members:
:undoc-members:
@@ -17,7 +19,6 @@ API Reference
.. autodata:: instrumentation_finders
.. autoclass:: ExtendedInstrumentationRegistry
- :show-inheritance:
:members:
diff --git a/doc/build/orm/extensions/mutable.rst b/doc/build/orm/extensions/mutable.rst
index ba3e10542..14875cd3c 100644
--- a/doc/build/orm/extensions/mutable.rst
+++ b/doc/build/orm/extensions/mutable.rst
@@ -12,15 +12,14 @@ API Reference
:members: _parents, coerce
.. autoclass:: Mutable
- :show-inheritance:
:members:
+ :inherited-members:
+ :private-members:
.. autoclass:: MutableComposite
- :show-inheritance:
:members:
.. autoclass:: MutableDict
- :show-inheritance:
:members:
diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst
index a82fcf675..e6c1e378b 100644
--- a/doc/build/orm/inheritance.rst
+++ b/doc/build/orm/inheritance.rst
@@ -478,8 +478,11 @@ Below we load ``Company`` rows while eagerly loading related ``Engineer``
objects, querying the ``employee`` and ``engineer`` tables simultaneously::
session.query(Company).\
- options(subqueryload_all(Company.employees.of_type(Engineer),
- Engineer.machines))
+ options(
+ subqueryload(Company.employees.of_type(Engineer)).
+ subqueryload("machines")
+ )
+ )
.. versionadded:: 0.8
:func:`.joinedload` and :func:`.subqueryload` support
diff --git a/doc/build/orm/internals.rst b/doc/build/orm/internals.rst
index 38efdb08a..250bc777d 100644
--- a/doc/build/orm/internals.rst
+++ b/doc/build/orm/internals.rst
@@ -10,62 +10,75 @@ sections, are listed here.
.. autoclass:: sqlalchemy.orm.state.AttributeState
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.instrumentation.ClassManager
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.properties.ColumnProperty
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.descriptor_props.CompositeProperty
:members:
- :show-inheritance:
+
+
+.. autoclass:: sqlalchemy.orm.attributes.Event
+ :members:
+
.. autoclass:: sqlalchemy.orm.interfaces._InspectionAttr
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.orm.state.InstanceState
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.orm.attributes.InstrumentedAttribute
:members: __get__, __set__, __delete__
- :show-inheritance:
+
:undoc-members:
+.. autodata:: sqlalchemy.orm.interfaces.MANYTOONE
+
+.. autodata:: sqlalchemy.orm.interfaces.MANYTOMANY
+
.. autoclass:: sqlalchemy.orm.interfaces.MapperProperty
:members:
- :show-inheritance:
.. autodata:: sqlalchemy.orm.interfaces.NOT_EXTENSION
+
+.. autodata:: sqlalchemy.orm.interfaces.ONETOMANY
+
.. autoclass:: sqlalchemy.orm.interfaces.PropComparator
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.properties.RelationshipProperty
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.descriptor_props.SynonymProperty
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.query.QueryContext
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.orm.attributes.QueryableAttribute
:members:
- :show-inheritance:
:inherited-members:
+
+.. autoclass:: sqlalchemy.orm.session.UOWTransaction
+ :members:
+
diff --git a/doc/build/orm/loading.rst b/doc/build/orm/loading.rst
index e84179558..7be25de4a 100644
--- a/doc/build/orm/loading.rst
+++ b/doc/build/orm/loading.rst
@@ -1,3 +1,5 @@
+.. _loading_toplevel:
+
.. currentmodule:: sqlalchemy.orm
Relationship Loading Techniques
@@ -82,24 +84,25 @@ The default **loader strategy** for any :func:`~sqlalchemy.orm.relationship`
is configured by the ``lazy`` keyword argument, which defaults to ``select`` - this indicates
a "select" statement .
Below we set it as ``joined`` so that the ``children`` relationship is eager
-loading, using a join:
-
-.. sourcecode:: python+sql
+loaded using a JOIN::
# load the 'children' collection using LEFT OUTER JOIN
- mapper(Parent, parent_table, properties={
- 'children': relationship(Child, lazy='joined')
- })
+ class Parent(Base):
+ __tablename__ = 'parent'
+
+ id = Column(Integer, primary_key=True)
+ children = relationship("Child", lazy='joined')
We can also set it to eagerly load using a second query for all collections,
-using ``subquery``:
+using ``subquery``::
-.. sourcecode:: python+sql
+ # load the 'children' collection using a second query which
+ # JOINS to a subquery of the original
+ class Parent(Base):
+ __tablename__ = 'parent'
- # load the 'children' attribute using a join to a subquery
- mapper(Parent, parent_table, properties={
- 'children': relationship(Child, lazy='subquery')
- })
+ id = Column(Integer, primary_key=True)
+ children = relationship("Child", lazy='subquery')
When querying, all three choices of loader strategy are available on a
per-query basis, using the :func:`~sqlalchemy.orm.joinedload`,
@@ -117,42 +120,37 @@ query options:
# set children to load eagerly with a second statement
session.query(Parent).options(subqueryload('children')).all()
-To reference a relationship that is deeper than one level, separate the names by periods:
-
-.. sourcecode:: python+sql
-
- session.query(Parent).options(joinedload('foo.bar.bat')).all()
-
-When using dot-separated names with :func:`~sqlalchemy.orm.joinedload` or
-:func:`~sqlalchemy.orm.subqueryload`, the option applies **only** to the actual
-attribute named, and **not** its ancestors. For example, suppose a mapping
-from ``A`` to ``B`` to ``C``, where the relationships, named ``atob`` and
-``btoc``, are both lazy-loading. A statement like the following:
-
-.. sourcecode:: python+sql
-
- session.query(A).options(joinedload('atob.btoc')).all()
-
-will load only ``A`` objects to start. When the ``atob`` attribute on each
-``A`` is accessed, the returned ``B`` objects will *eagerly* load their ``C``
-objects.
-
-Therefore, to modify the eager load to load both ``atob`` as well as ``btoc``,
-place joinedloads for both:
-
-.. sourcecode:: python+sql
-
- session.query(A).options(joinedload('atob'), joinedload('atob.btoc')).all()
-
-or more succinctly just use :func:`~sqlalchemy.orm.joinedload_all` or
-:func:`~sqlalchemy.orm.subqueryload_all`:
-
-.. sourcecode:: python+sql
-
- session.query(A).options(joinedload_all('atob.btoc')).all()
-
-There are two other loader strategies available, **dynamic loading** and **no
-loading**; these are described in :ref:`largecollections`.
+Loading Along Paths
+-------------------
+
+To reference a relationship that is deeper than one level, method chaining
+may be used. The object returned by all loader options is an instance of
+the :class:`.Load` class, which provides a so-called "generative" interface::
+
+ session.query(Parent).options(
+ joinedload('foo').
+ joinedload('bar').
+ joinedload('bat')
+ ).all()
+
+Using method chaining, the loader style of each link in the path is explicitly
+stated. To navigate along a path without changing the existing loader style
+of a particular attribute, the :func:`.defaultload` method/function may be used::
+
+ session.query(A).options(
+ defaultload("atob").joinedload("btoc")
+ ).all()
+
+.. versionchanged:: 0.9.0
+ The previous approach of specifying dot-separated paths within loader
+ options has been superseded by the less ambiguous approach of the
+ :class:`.Load` object and related methods. With this system, the user
+ specifies the style of loading for each link along the chain explicitly,
+ rather than guessing between options like ``joinedload()`` vs. ``joinedload_all()``.
+ The :func:`.orm.defaultload` is provided to allow path navigation without
+ modification of existing loader options. The dot-separated path system
+ as well as the ``_all()`` functions will remain available for backwards-
+ compatibility indefinitely.
Default Loading Strategies
--------------------------
@@ -175,8 +173,9 @@ of all :func:`.relationship` constructs in use for that query,
except for those which use the ``'dynamic'`` style of loading.
If some relationships specify
``lazy='joined'`` or ``lazy='subquery'``, for example,
-using ``default_strategy(lazy='select')`` will unilaterally
-cause all those relationships to use ``'select'`` loading.
+using ``lazyload('*')`` will unilaterally
+cause all those relationships to use ``'select'`` loading, e.g. emit a
+SELECT statement when each attribute is accessed.
The option does not supercede loader options stated in the
query, such as :func:`.eagerload`,
@@ -191,6 +190,22 @@ for the ``widget`` relationship::
If multiple ``'*'`` options are passed, the last one overrides
those previously passed.
+Per-Entity Default Loading Strategies
+-------------------------------------
+
+.. versionadded:: 0.9.0
+ Per-entity default loader strategies.
+
+A variant of the default loader strategy is the ability to set the strategy
+on a per-entity basis. For example, if querying for ``User`` and ``Address``,
+we can instruct all relationships on ``Address`` only to use lazy loading
+by first applying the :class:`.Load` object, then specifying the ``*`` as a
+chained option::
+
+ session.query(User, Address).options(Load(Address).lazyload('*'))
+
+Above, all relationships on ``Address`` will be set to a lazy load.
+
.. _zen_of_eager_loading:
The Zen of Eager Loading
@@ -402,31 +417,27 @@ For this SQLAlchemy supplies the :func:`~sqlalchemy.orm.contains_eager()`
option. This option is used in the same manner as the
:func:`~sqlalchemy.orm.joinedload()` option except it is assumed that the
:class:`~sqlalchemy.orm.query.Query` will specify the appropriate joins
-explicitly. Below it's used with a ``from_statement`` load::
+explicitly. Below, we specify a join between ``User`` and ``Address``
+and addtionally establish this as the basis for eager loading of ``User.addresses``::
- # mapping is the users->addresses mapping
- mapper(User, users_table, properties={
- 'addresses': relationship(Address, addresses_table)
- })
+ class User(Base):
+ __tablename__ = 'user'
+ id = Column(Integer, primary_key=True)
+ addresses = relationship("Address")
- # define a query on USERS with an outer join to ADDRESSES
- statement = users_table.outerjoin(addresses_table).select().apply_labels()
+ class Address(Base):
+ __tablename__ = 'address'
- # construct a Query object which expects the "addresses" results
- query = session.query(User).options(contains_eager('addresses'))
-
- # get results normally
- r = query.from_statement(statement)
+ # ...
-It works just as well with an inline :meth:`.Query.join` or
-:meth:`.Query.outerjoin`::
+ q = session.query(User).join(User.addresses).\
+ options(contains_eager(User.addresses))
- session.query(User).outerjoin(User.addresses).options(contains_eager(User.addresses)).all()
If the "eager" portion of the statement is "aliased", the ``alias`` keyword
argument to :func:`~sqlalchemy.orm.contains_eager` may be used to indicate it.
-This is a string alias name or reference to an actual
-:class:`~sqlalchemy.sql.expression.Alias` (or other selectable) object:
+This is sent as a reference to an :func:`.aliased` or :class:`.Alias`
+construct:
.. sourcecode:: python+sql
@@ -444,10 +455,23 @@ This is a string alias name or reference to an actual
adalias.user_id AS adalias_user_id, adalias.email_address AS adalias_email_address, (...other columns...)
FROM users LEFT OUTER JOIN email_addresses AS email_addresses_1 ON users.user_id = email_addresses_1.user_id
-The ``alias`` argument is used only as a source of columns to match up to the
-result set. You can use it to match up the result to arbitrary label
-names in a string SQL statement, by passing a :func:`.select` which links those
-labels to the mapped :class:`.Table`::
+The path given as the argument to :func:`.contains_eager` needs
+to be a full path from the starting entity. For example if we were loading
+``Users->orders->Order->items->Item``, the string version would look like::
+
+ query(User).options(contains_eager('orders').contains_eager('items'))
+
+Or using the class-bound descriptor::
+
+ query(User).options(contains_eager(User.orders).contains_eager(Order.items))
+
+Advanced Usage with Arbitrary Statements
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ``alias`` argument can be more creatively used, in that it can be made
+to represent any set of arbitrary names to match up into a statement.
+Below it is linked to a :func:`.select` which links a set of column objects
+to a string SQL statement::
# label the columns of the addresses table
eager_columns = select([
@@ -463,24 +487,17 @@ labels to the mapped :class:`.Table`::
"from users left outer join addresses on users.user_id=addresses.user_id").\
options(contains_eager(User.addresses, alias=eager_columns))
-The path given as the argument to :func:`.contains_eager` needs
-to be a full path from the starting entity. For example if we were loading
-``Users->orders->Order->items->Item``, the string version would look like::
- query(User).options(contains_eager('orders', 'items'))
-
-Or using the class-bound descriptor::
-
- query(User).options(contains_eager(User.orders, Order.items))
-
-Relation Loader API
---------------------
+Relationship Loader API
+------------------------
.. autofunction:: contains_alias
.. autofunction:: contains_eager
+.. autofunction:: defaultload
+
.. autofunction:: eagerload
.. autofunction:: eagerload_all
diff --git a/doc/build/orm/mapper_config.rst b/doc/build/orm/mapper_config.rst
index 2560c6f41..17bd31a6f 100644
--- a/doc/build/orm/mapper_config.rst
+++ b/doc/build/orm/mapper_config.rst
@@ -88,13 +88,19 @@ named according to the name of the column itself (specifically, the ``key``
attribute of :class:`.Column`). This behavior can be
modified in several ways.
+.. _mapper_column_distinct_names:
+
Naming Columns Distinctly from Attribute Names
----------------------------------------------
A mapping by default shares the same name for a
-:class:`.Column` as that of the mapped attribute.
-The name assigned to the :class:`.Column` can be different,
-as we illustrate here in a Declarative mapping::
+:class:`.Column` as that of the mapped attribute - specifically
+it matches the :attr:`.Column.key` attribute on :class:`.Column`, which
+by default is the same as the :attr:`.Column.name`.
+
+The name assigned to the Python attribute which maps to
+:class:`.Column` can be different from either :attr:`.Column.name` or :attr:`.Column.key`
+just by assigning it that way, as we illustrate here in a Declarative mapping::
class User(Base):
__tablename__ = 'user'
@@ -120,14 +126,50 @@ with the desired key::
'name': user_table.c.user_name,
})
+In the next section we'll examine the usage of ``.key`` more closely.
+
+.. _mapper_automated_reflection_schemes:
+
+Automating Column Naming Schemes from Reflected Tables
+------------------------------------------------------
+
+In the previous section :ref:`mapper_column_distinct_names`, we showed how
+a :class:`.Column` explicitly mapped to a class can have a different attribute
+name than the column. But what if we aren't listing out :class:`.Column`
+objects explicitly, and instead are automating the production of :class:`.Table`
+objects using reflection (e.g. as described in :ref:`metadata_reflection_toplevel`)?
+In this case we can make use of the :meth:`.DDLEvents.column_reflect` event
+to intercept the production of :class:`.Column` objects and provide them
+with the :attr:`.Column.key` of our choice::
+
+ @event.listens_for(Table, "column_reflect")
+ def column_reflect(inspector, table, column_info):
+ # set column.key = "attr_<lower_case_name>"
+ column_info['key'] = "attr_%s" % column_info['name'].lower()
+
+With the above event, the reflection of :class:`.Column` objects will be intercepted
+with our event that adds a new ".key" element, such as in a mapping as below::
+
+ class MyClass(Base):
+ __table__ = Table("some_table", Base.metadata,
+ autoload=True, autoload_with=some_engine)
+
+If we want to qualify our event to only react for the specific :class:`.MetaData`
+object above, we can check for it in our event::
+
+ @event.listens_for(Table, "column_reflect")
+ def column_reflect(inspector, table, column_info):
+ if table.metadata is Base.metadata:
+ # set column.key = "attr_<lower_case_name>"
+ column_info['key'] = "attr_%s" % column_info['name'].lower()
+
.. _column_prefix:
Naming All Columns with a Prefix
--------------------------------
-A way to automate the assignment of a prefix to
-the mapped attribute names relative to the column name
-is to use ``column_prefix``::
+A quick approach to prefix column names, typically when mapping
+to an existing :class:`.Table` object, is to use ``column_prefix``::
class User(Base):
__table__ = user_table
@@ -136,9 +178,10 @@ is to use ``column_prefix``::
The above will place attribute names such as ``_user_id``, ``_user_name``,
``_password`` etc. on the mapped ``User`` class.
-The classical version of the above::
+This approach is uncommon in modern usage. For dealing with reflected
+tables, a more flexible approach is to use that described in
+:ref:`mapper_automated_reflection_schemes`.
- mapper(User, user_table, column_prefix='_')
Using column_property for column level options
-----------------------------------------------
@@ -308,23 +351,75 @@ separately when it is accessed::
photo3 = deferred(Column(Binary), group='photos')
You can defer or undefer columns at the :class:`~sqlalchemy.orm.query.Query`
-level using the :func:`.orm.defer` and :func:`.orm.undefer` query options::
+level using options, including :func:`.orm.defer` and :func:`.orm.undefer`::
from sqlalchemy.orm import defer, undefer
query = session.query(Book)
- query.options(defer('summary')).all()
- query.options(undefer('excerpt')).all()
+ query = query.options(defer('summary'))
+ query = query.options(undefer('excerpt'))
+ query.all()
-And an entire "deferred group", i.e. which uses the ``group`` keyword argument
-to :func:`.orm.deferred`, can be undeferred using
-:func:`.orm.undefer_group`, sending in the group name::
+:func:`.orm.deferred` attributes which are marked with a "group" can be undeferred
+using :func:`.orm.undefer_group`, sending in the group name::
from sqlalchemy.orm import undefer_group
query = session.query(Book)
query.options(undefer_group('photos')).all()
+Load Only Cols
+---------------
+
+An arbitrary set of columns can be selected as "load only" columns, which will
+be loaded while deferring all other columns on a given entity, using :func:`.orm.load_only`::
+
+ from sqlalchemy.orm import load_only
+
+ session.query(Book).options(load_only("summary", "excerpt"))
+
+.. versionadded:: 0.9.0
+
+Deferred Loading with Multiple Entities
+---------------------------------------
+
+To specify column deferral options within a :class:`.Query` that loads multiple types
+of entity, the :class:`.Load` object can specify which parent entity to start with::
+
+ from sqlalchemy.orm import Load
+
+ query = session.query(Book, Author).join(Book.author)
+ query = query.options(
+ Load(Book).load_only("summary", "excerpt"),
+ Load(Author).defer("bio")
+ )
+
+To specify column deferral options along the path of various relationships,
+the options support chaining, where the loading style of each relationship
+is specified first, then is chained to the deferral options. Such as, to load
+``Book`` instances, then joined-eager-load the ``Author``, then apply deferral
+options to the ``Author`` entity::
+
+ from sqlalchemy.orm import joinedload
+
+ query = session.query(Book)
+ query = query.options(
+ joinedload(Book.author).load_only("summary", "excerpt"),
+ )
+
+In the case where the loading style of parent relationships should be left
+unchanged, use :func:`.orm.defaultload`::
+
+ from sqlalchemy.orm import defaultload
+
+ query = session.query(Book)
+ query = query.options(
+ defaultload(Book.author).load_only("summary", "excerpt"),
+ )
+
+.. versionadded:: 0.9.0 support for :class:`.Load` and other options which
+ allow for better targeting of deferral options.
+
Column Deferral API
-------------------
@@ -332,6 +427,8 @@ Column Deferral API
.. autofunction:: defer
+.. autofunction:: load_only
+
.. autofunction:: undefer
.. autofunction:: undefer_group
@@ -570,7 +667,7 @@ issued when the ORM is populating the object::
assert '@' in address
return address
-Validators also receive collection events, when items are added to a
+Validators also receive collection append events, when items are added to a
collection::
from sqlalchemy.orm import validates
@@ -585,6 +682,51 @@ collection::
assert '@' in address.email
return address
+
+The validation function by default does not get emitted for collection
+remove events, as the typical expectation is that a value being discarded
+doesn't require validation. However, :func:`.validates` supports reception
+of these events by specifying ``include_removes=True`` to the decorator. When
+this flag is set, the validation function must receive an additional boolean
+argument which if ``True`` indicates that the operation is a removal::
+
+ from sqlalchemy.orm import validates
+
+ class User(Base):
+ # ...
+
+ addresses = relationship("Address")
+
+ @validates('addresses', include_removes=True)
+ def validate_address(self, key, address, is_remove):
+ if is_remove:
+ raise ValueError(
+ "not allowed to remove items from the collection")
+ else:
+ assert '@' in address.email
+ return address
+
+The case where mutually dependent validators are linked via a backref
+can also be tailored, using the ``include_backrefs=False`` option; this option,
+when set to ``False``, prevents a validation function from emitting if the
+event occurs as a result of a backref::
+
+ from sqlalchemy.orm import validates
+
+ class User(Base):
+ # ...
+
+ addresses = relationship("Address", backref='user')
+
+ @validates('addresses', include_backrefs=False)
+ def validate_address(self, key, address):
+ assert '@' in address.email
+ return address
+
+Above, if we were to assign to ``Address.user`` as in ``some_address.user = some_user``,
+the ``validate_address()`` function would *not* be emitted, even though an append
+occurs to ``some_user.addresses`` - the event is caused by a backref.
+
Note that the :func:`~.validates` decorator is a convenience function built on
top of attribute events. An application that requires more control over
configuration of attribute change behavior can make use of this system,
@@ -592,13 +734,13 @@ described at :class:`~.AttributeEvents`.
.. autofunction:: validates
-.. _synonyms:
+.. _mapper_hybrids:
Using Descriptors and Hybrids
-----------------------------
A more comprehensive way to produce modified behavior for an attribute is to
-use descriptors. These are commonly used in Python using the ``property()``
+use :term:`descriptors`. These are commonly used in Python using the ``property()``
function. The standard SQLAlchemy technique for descriptors is to create a
plain descriptor, and to have it read/write from a mapped attribute with a
different name. Below we illustrate this using Python 2.6-style properties::
@@ -722,14 +864,88 @@ attribute, a SQL function is rendered which produces the same effect:
Read more about Hybrids at :ref:`hybrids_toplevel`.
+.. _synonyms:
+
Synonyms
--------
-Synonyms are a mapper-level construct that applies expression behavior to a descriptor
-based attribute.
+Synonyms are a mapper-level construct that allow any attribute on a class
+to "mirror" another attribute that is mapped.
-.. versionchanged:: 0.7
- The functionality of synonym is superceded as of 0.7 by hybrid attributes.
+In the most basic sense, the synonym is an easy way to make a certain
+attribute available by an additional name::
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+
+ id = Column(Integer, primary_key=True)
+ job_status = Column(String(50))
+
+ status = synonym("job_status")
+
+The above class ``MyClass`` has two attributes, ``.job_status`` and
+``.status`` that will behave as one attribute, both at the expression
+level::
+
+ >>> print MyClass.job_status == 'some_status'
+ my_table.job_status = :job_status_1
+
+ >>> print MyClass.status == 'some_status'
+ my_table.job_status = :job_status_1
+
+and at the instance level::
+
+ >>> m1 = MyClass(status='x')
+ >>> m1.status, m1.job_status
+ ('x', 'x')
+
+ >>> m1.job_status = 'y'
+ >>> m1.status, m1.job_status
+ ('y', 'y')
+
+The :func:`.synonym` can be used for any kind of mapped attribute that
+subclasses :class:`.MapperProperty`, including mapped columns and relationships,
+as well as synonyms themselves.
+
+Beyond a simple mirror, :func:`.synonym` can also be made to reference
+a user-defined :term:`descriptor`. We can supply our
+``status`` synonym with a ``@property``::
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+
+ id = Column(Integer, primary_key=True)
+ status = Column(String(50))
+
+ @property
+ def job_status(self):
+ return "Status: " + self.status
+
+ job_status = synonym("status", descriptor=job_status)
+
+When using Declarative, the above pattern can be expressed more succinctly
+using the :func:`.synonym_for` decorator::
+
+ from sqlalchemy.ext.declarative import synonym_for
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+
+ id = Column(Integer, primary_key=True)
+ status = Column(String(50))
+
+ @synonym_for("status")
+ @property
+ def job_status(self):
+ return "Status: " + self.status
+
+While the :func:`.synonym` is useful for simple mirroring, the use case
+of augmenting attribute behavior with descriptors is better handled in modern
+usage using the :ref:`hybrid attribute <mapper_hybrids>` feature, which
+is more oriented towards Python descriptors. Techically, a :func:`.synonym`
+can do everything that a :class:`.hybrid_property` can do, as it also supports
+injection of custom SQL capabilities, but the hybrid is more straightforward
+to use in more complex situations.
.. autofunction:: synonym
@@ -770,6 +986,10 @@ class you provide.
in-place mutation is no longer automatic; see the section below on
enabling mutability to support tracking of in-place changes.
+.. versionchanged:: 0.9
+ Composites will return their object-form, rather than as individual columns,
+ when used in a column-oriented :class:`.Query` construct. See :ref:`migration_2824`.
+
A simple example represents pairs of columns as a ``Point`` object.
``Point`` represents such a pair as ``.x`` and ``.y``::
@@ -909,6 +1129,54 @@ the same expression that the base "greater than" does::
end = composite(Point, x2, y2,
comparator_factory=PointComparator)
+.. _bundles:
+
+Column Bundles
+===============
+
+The :class:`.Bundle` may be used to query for groups of columns under one
+namespace.
+
+.. versionadded:: 0.9.0
+
+The bundle allows columns to be grouped together::
+
+ from sqlalchemy.orm import Bundle
+
+ bn = Bundle('mybundle', MyClass.data1, MyClass.data2)
+ for row in session.query(bn).filter(bn.c.data1 == 'd1'):
+ print row.mybundle.data1, row.mybundle.data2
+
+The bundle can be subclassed to provide custom behaviors when results
+are fetched. The method :meth:`.Bundle.create_row_processor` is given
+the :class:`.Query` and a set of "row processor" functions at query execution
+time; these processor functions when given a result row will return the
+individual attribute value, which can then be adapted into any kind of
+return data structure. Below illustrates replacing the usual :class:`.KeyedTuple`
+return structure with a straight Python dictionary::
+
+ from sqlalchemy.orm import Bundle
+
+ class DictBundle(Bundle):
+ def create_row_processor(self, query, procs, labels):
+ """Override create_row_processor to return values as dictionaries"""
+ def proc(row, result):
+ return dict(
+ zip(labels, (proc(row, result) for proc in procs))
+ )
+ return proc
+
+A result from the above bundle will return dictionary values::
+
+ bn = DictBundle('mybundle', MyClass.data1, MyClass.data2)
+ for row in session.query(bn).filter(bn.c.data1 == 'd1'):
+ print row.mybundle['data1'], row.mybundle['data2']
+
+The :class:`.Bundle` construct is also integrated into the behavior
+of :func:`.composite`, where it is used to return composite attributes as objects
+when queried as individual attributes.
+
+
.. _maptojoin:
Mapping a Class against Multiple Tables
@@ -1055,6 +1323,9 @@ for each target table. SQLAlchemy refers to this as the "entity name"
pattern, which is described as a recipe at `Entity Name
<http://www.sqlalchemy.org/trac/wiki/UsageRecipes/EntityName>`_.
+
+.. _mapping_constructors:
+
Constructors and Object Initialization
=======================================
@@ -1110,6 +1381,251 @@ of these events.
.. autofunction:: reconstructor
+
+.. _mapper_version_counter:
+
+Configuring a Version Counter
+=============================
+
+The :class:`.Mapper` supports management of a :term:`version id column`, which
+is a single table column that increments or otherwise updates its value
+each time an ``UPDATE`` to the mapped table occurs. This value is checked each
+time the ORM emits an ``UPDATE`` or ``DELETE`` against the row to ensure that
+the value held in memory matches the database value.
+
+The purpose of this feature is to detect when two concurrent transactions
+are modifying the same row at roughly the same time, or alternatively to provide
+a guard against the usage of a "stale" row in a system that might be re-using
+data from a previous transaction without refreshing (e.g. if one sets ``expire_on_commit=False``
+with a :class:`.Session`, it is possible to re-use the data from a previous
+transaction).
+
+.. topic:: Concurrent transaction updates
+
+ When detecting concurrent updates within transactions, it is typically the
+ case that the database's transaction isolation level is below the level of
+ :term:`repeatable read`; otherwise, the transaction will not be exposed
+ to a new row value created by a concurrent update which conflicts with
+ the locally updated value. In this case, the SQLAlchemy versioning
+ feature will typically not be useful for in-transaction conflict detection,
+ though it still can be used for cross-transaction staleness detection.
+
+ The database that enforces repeatable reads will typically either have locked the
+ target row against a concurrent update, or is employing some form
+ of multi version concurrency control such that it will emit an error
+ when the transaction is committed. SQLAlchemy's version_id_col is an alternative
+ which allows version tracking to occur for specific tables within a transaction
+ that otherwise might not have this isolation level set.
+
+ .. seealso::
+
+ `Repeatable Read Isolation Level <http://www.postgresql.org/docs/9.1/static/transaction-iso.html#XACT-REPEATABLE-READ>`_ - Postgresql's implementation of repeatable read, including a description of the error condition.
+
+Simple Version Counting
+-----------------------
+
+The most straightforward way to track versions is to add an integer column
+to the mapped table, then establish it as the ``version_id_col`` within the
+mapper options::
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ version_id = Column(Integer, nullable=False)
+ name = Column(String(50), nullable=False)
+
+ __mapper_args__ = {
+ "version_id_col": version_id
+ }
+
+Above, the ``User`` mapping tracks integer versions using the column
+``version_id``. When an object of type ``User`` is first flushed, the
+``version_id`` column will be given a value of "1". Then, an UPDATE
+of the table later on will always be emitted in a manner similar to the
+following::
+
+ UPDATE user SET version_id=:version_id, name=:name
+ WHERE user.id = :user_id AND user.version_id = :user_version_id
+ {"name": "new name", "version_id": 2, "user_id": 1, "user_version_id": 1}
+
+The above UPDATE statement is updating the row that not only matches
+``user.id = 1``, it also is requiring that ``user.version_id = 1``, where "1"
+is the last version identifier we've been known to use on this object.
+If a transaction elsewhere has modifed the row independently, this version id
+will no longer match, and the UPDATE statement will report that no rows matched;
+this is the condition that SQLAlchemy tests, that exactly one row matched our
+UPDATE (or DELETE) statement. If zero rows match, that indicates our version
+of the data is stale, and a :exc:`.StaleDataError` is raised.
+
+.. _custom_version_counter:
+
+Custom Version Counters / Types
+-------------------------------
+
+Other kinds of values or counters can be used for versioning. Common types include
+dates and GUIDs. When using an alternate type or counter scheme, SQLAlchemy
+provides a hook for this scheme using the ``version_id_generator`` argument,
+which accepts a version generation callable. This callable is passed the value of the current
+known version, and is expected to return the subsequent version.
+
+For example, if we wanted to track the versioning of our ``User`` class
+using a randomly generated GUID, we could do this (note that some backends
+support a native GUID type, but we illustrate here using a simple string)::
+
+ import uuid
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ version_uuid = Column(String(32))
+ name = Column(String(50), nullable=False)
+
+ __mapper_args__ = {
+ 'version_id_col':version_uuid,
+ 'version_id_generator':lambda version: uuid.uuid4().hex
+ }
+
+The persistence engine will call upon ``uuid.uuid4()`` each time a
+``User`` object is subject to an INSERT or an UPDATE. In this case, our
+version generation function can disregard the incoming value of ``version``,
+as the ``uuid4()`` function
+generates identifiers without any prerequisite value. If we were using
+a sequential versioning scheme such as numeric or a special character system,
+we could make use of the given ``version`` in order to help determine the
+subsequent value.
+
+.. seealso::
+
+ :ref:`custom_guid_type`
+
+.. _server_side_version_counter:
+
+Server Side Version Counters
+----------------------------
+
+The ``version_id_generator`` can also be configured to rely upon a value
+that is generated by the database. In this case, the database would need
+some means of generating new identifiers when a row is subject to an INSERT
+as well as with an UPDATE. For the UPDATE case, typically an update trigger
+is needed, unless the database in question supports some other native
+version identifier. The Postgresql database in particular supports a system
+column called `xmin <http://www.postgresql.org/docs/9.1/static/ddl-system-columns.html>`_
+which provides UPDATE versioning. We can make use
+of the Postgresql ``xmin`` column to version our ``User``
+class as follows::
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50), nullable=False)
+ xmin = Column("xmin", Integer, system=True)
+
+ __mapper_args__ = {
+ 'version_id_col': xmin,
+ 'version_id_generator': False
+ }
+
+With the above mapping, the ORM will rely upon the ``xmin`` column for
+automatically providing the new value of the version id counter.
+
+.. topic:: creating tables that refer to system columns
+
+ In the above scenario, as ``xmin`` is a system column provided by Postgresql,
+ we use the ``system=True`` argument to mark it as a system-provided
+ column, omitted from the ``CREATE TABLE`` statement.
+
+
+The ORM typically does not actively fetch the values of database-generated
+values when it emits an INSERT or UPDATE, instead leaving these columns as
+"expired" and to be fetched when they are next accessed, unless the ``eager_defaults``
+:func:`.mapper` flag is set. However, when a
+server side version column is used, the ORM needs to actively fetch the newly
+generated value. This is so that the version counter is set up *before*
+any concurrent transaction may update it again. This fetching is also
+best done simultaneously within the INSERT or UPDATE statement using :term:`RETURNING`,
+otherwise if emitting a SELECT statement afterwards, there is still a potential
+race condition where the version counter may change before it can be fetched.
+
+When the target database supports RETURNING, an INSERT statement for our ``User`` class will look
+like this::
+
+ INSERT INTO "user" (name) VALUES (%(name)s) RETURNING "user".id, "user".xmin
+ {'name': 'ed'}
+
+Where above, the ORM can acquire any newly generated primary key values along
+with server-generated version identifiers in one statement. When the backend
+does not support RETURNING, an additional SELECT must be emitted for **every**
+INSERT and UPDATE, which is much less efficient, and also introduces the possibility of
+missed version counters::
+
+ INSERT INTO "user" (name) VALUES (%(name)s)
+ {'name': 'ed'}
+
+ SELECT "user".version_id AS user_version_id FROM "user" where
+ "user".id = :param_1
+ {"param_1": 1}
+
+It is *strongly recommended* that server side version counters only be used
+when absolutely necessary and only on backends that support :term:`RETURNING`,
+e.g. Postgresql, Oracle, SQL Server (though SQL Server has
+`major caveats <http://blogs.msdn.com/b/sqlprogrammability/archive/2008/07/11/update-with-output-clause-triggers-and-sqlmoreresults.aspx>`_ when triggers are used), Firebird.
+
+.. versionadded:: 0.9.0
+
+ Support for server side version identifier tracking.
+
+Programmatic or Conditional Version Counters
+---------------------------------------------
+
+When ``version_id_generator`` is set to False, we can also programmatically
+(and conditionally) set the version identifier on our object in the same way
+we assign any other mapped attribute. Such as if we used our UUID example, but
+set ``version_id_generator`` to ``False``, we can set the version identifier
+at our choosing::
+
+ import uuid
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ version_uuid = Column(String(32))
+ name = Column(String(50), nullable=False)
+
+ __mapper_args__ = {
+ 'version_id_col':version_uuid,
+ 'version_id_generator': False
+ }
+
+ u1 = User(name='u1', version_uuid=uuid.uuid4())
+
+ session.add(u1)
+
+ session.commit()
+
+ u1.name = 'u2'
+ u1.version_uuid = uuid.uuid4()
+
+ session.commit()
+
+We can update our ``User`` object without incrementing the version counter
+as well; the value of the counter will remain unchanged, and the UPDATE
+statement will still check against the previous value. This may be useful
+for schemes where only certain classes of UPDATE are sensitive to concurrency
+issues::
+
+ # will leave version_uuid unchanged
+ u1.name = 'u3'
+ session.commit()
+
+.. versionadded:: 0.9.0
+
+ Support for programmatic and conditional version identifier tracking.
+
+
Class Mapping API
=================
diff --git a/doc/build/orm/query.rst b/doc/build/orm/query.rst
index 73aa5c555..5e31d710f 100644
--- a/doc/build/orm/query.rst
+++ b/doc/build/orm/query.rst
@@ -13,7 +13,7 @@ For an in-depth introduction to querying with the SQLAlchemy ORM, please see the
The Query Object
----------------
-:class:`~.Query` is produced in terms of a given :class:`~.Session`, using the :func:`~.Query.query` function::
+:class:`~.Query` is produced in terms of a given :class:`~.Session`, using the :meth:`~.Session.query` method::
q = session.query(SomeMappedClass)
@@ -31,9 +31,15 @@ ORM-Specific Query Constructs
.. autoclass:: sqlalchemy.orm.util.AliasedInsp
+.. autoclass:: sqlalchemy.orm.query.Bundle
+ :members:
+
.. autoclass:: sqlalchemy.util.KeyedTuple
:members: keys, _fields, _asdict
+.. autoclass:: sqlalchemy.orm.strategy_options.Load
+ :members:
+
.. autofunction:: join
.. autofunction:: outerjoin
diff --git a/doc/build/orm/relationships.rst b/doc/build/orm/relationships.rst
index e98ec657c..67a41c808 100644
--- a/doc/build/orm/relationships.rst
+++ b/doc/build/orm/relationships.rst
@@ -967,7 +967,7 @@ load those ``Address`` objects which specify a city of "Boston"::
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String)
- addresses = relationship("Address",
+ boston_addresses = relationship("Address",
primaryjoin="and_(User.id==Address.user_id, "
"Address.city=='Boston')")
diff --git a/doc/build/orm/session.rst b/doc/build/orm/session.rst
index 3e35f02cb..c979586f2 100644
--- a/doc/build/orm/session.rst
+++ b/doc/build/orm/session.rst
@@ -198,196 +198,319 @@ at the same time).
.. _session_faq:
-.. _session_faq_whentocreate:
-
Session Frequently Asked Questions
-----------------------------------
-* When do I make a :class:`.sessionmaker` ?
-
- Just one time, somewhere in your application's global scope. It should be
- looked upon as part of your application's configuration. If your
- application has three .py files in a package, you could, for example,
- place the :class:`.sessionmaker` line in your ``__init__.py`` file; from
- that point on your other modules say "from mypackage import Session". That
- way, everyone else just uses :class:`.Session()`,
- and the configuration of that session is controlled by that central point.
-
- If your application starts up, does imports, but does not know what
- database it's going to be connecting to, you can bind the
- :class:`.Session` at the "class" level to the
- engine later on, using :meth:`.sessionmaker.configure`.
-
- In the examples in this section, we will frequently show the
- :class:`.sessionmaker` being created right above the line where we actually
- invoke :class:`.Session`. But that's just for
- example's sake! In reality, the :class:`.sessionmaker` would be somewhere
- at the module level. The calls to instantiate :class:`.Session`
- would then be placed at the point in the application where database
- conversations begin.
-
-* When do I construct a :class:`.Session`, when do I commit it, and when do I close it ?
-
- A :class:`.Session` is typically constructed at the beginning of a logical
- operation where database access is potentially anticipated.
-
- The :class:`.Session`, whenever it is used to talk to the database,
- begins a database transaction as soon as it starts communicating.
- Assuming the ``autocommit`` flag is left at its recommended default
- of ``False``, this transaction remains in progress until the :class:`.Session`
- is rolled back, committed, or closed. The :class:`.Session` will
- begin a new transaction if it is used again, subsequent to the previous
- transaction ending; from this it follows that the :class:`.Session`
- is capable of having a lifespan across many transactions, though only
- one at a time. We refer to these two concepts as **transaction scope**
- and **session scope**.
-
- The implication here is that the SQLAlchemy ORM is encouraging the
- developer to establish these two scopes in his or her application,
- including not only when the scopes begin and end, but also the
- expanse of those scopes, for example should a single
- :class:`.Session` instance be local to the execution flow within a
- function or method, should it be a global object used by the
- entire application, or somewhere in between these two.
-
- The burden placed on the developer to determine this scope is one
- area where the SQLAlchemy ORM necessarily has a strong opinion
- about how the database should be used. The unit-of-work pattern
- is specifically one of accumulating changes over time and flushing
- them periodically, keeping in-memory state in sync with what's
- known to be present in a local transaction. This pattern is only
- effective when meaningful transaction scopes are in place.
-
- It's usually not very hard to determine the best points at which
- to begin and end the scope of a :class:`.Session`, though the wide
- variety of application architectures possible can introduce
- challenging situations.
-
- A common choice is to tear down the :class:`.Session` at the same
- time the transaction ends, meaning the transaction and session scopes
- are the same. This is a great choice to start out with as it
- removes the need to consider session scope as separate from transaction
- scope.
-
- While there's no one-size-fits-all recommendation for how transaction
- scope should be determined, there are common patterns. Especially
- if one is writing a web application, the choice is pretty much established.
-
- A web application is the easiest case because such an appication is already
- constructed around a single, consistent scope - this is the **request**,
- which represents an incoming request from a browser, the processing
- of that request to formulate a response, and finally the delivery of that
- response back to the client. Integrating web applications with the
- :class:`.Session` is then the straightforward task of linking the
- scope of the :class:`.Session` to that of the request. The :class:`.Session`
- can be established as the request begins, or using a **lazy initialization**
- pattern which establishes one as soon as it is needed. The request
- then proceeds, with some system in place where application logic can access
- the current :class:`.Session` in a manner associated with how the actual
- request object is accessed. As the request ends, the :class:`.Session`
- is torn down as well, usually through the usage of event hooks provided
- by the web framework. The transaction used by the :class:`.Session`
- may also be committed at this point, or alternatively the application may
- opt for an explicit commit pattern, only committing for those requests
- where one is warranted, but still always tearing down the :class:`.Session`
- unconditionally at the end.
-
- Most web frameworks include infrastructure to establish a single
- :class:`.Session`, associated with the request, which is correctly
- constructed and torn down corresponding
- torn down at the end of a request. Such infrastructure pieces
- include products such as `Flask-SQLAlchemy <http://packages.python.org/Flask-SQLAlchemy/>`_,
- for usage in conjunction with the Flask web framework,
- and `Zope-SQLAlchemy <http://pypi.python.org/pypi/zope.sqlalchemy>`_,
- for usage in conjunction with the Pyramid and Zope frameworks.
- SQLAlchemy strongly recommends that these products be used as
- available.
-
- In those situations where integration libraries are not available,
- SQLAlchemy includes its own "helper" class known as
- :class:`.scoped_session`. A tutorial on the usage of this object
- is at :ref:`unitofwork_contextual`. It provides both a quick way
- to associate a :class:`.Session` with the current thread, as well as
- patterns to associate :class:`.Session` objects with other kinds of
- scopes.
-
- As mentioned before, for non-web applications there is no one clear
- pattern, as applications themselves don't have just one pattern
- of architecture. The best strategy is to attempt to demarcate
- "operations", points at which a particular thread begins to perform
- a series of operations for some period of time, which can be committed
- at the end. Some examples:
-
- * A background daemon which spawns off child forks
- would want to create a :class:`.Session` local to each child
- process work with that :class:`.Session` through the life of the "job"
- that the fork is handling, then tear it down when the job is completed.
-
- * For a command-line script, the application would create a single, global
- :class:`.Session` that is established when the program begins to do its
- work, and commits it right as the program is completing its task.
-
- * For a GUI interface-driven application, the scope of the :class:`.Session`
- may best be within the scope of a user-generated event, such as a button
- push. Or, the scope may correspond to explicit user interaction, such as
- the user "opening" a series of records, then "saving" them.
-
-* Is the Session a cache ?
-
- Yeee...no. It's somewhat used as a cache, in that it implements the
- identity map pattern, and stores objects keyed to their primary key.
- However, it doesn't do any kind of query caching. This means, if you say
- ``session.query(Foo).filter_by(name='bar')``, even if ``Foo(name='bar')``
- is right there, in the identity map, the session has no idea about that.
- It has to issue SQL to the database, get the rows back, and then when it
- sees the primary key in the row, *then* it can look in the local identity
- map and see that the object is already there. It's only when you say
- ``query.get({some primary key})`` that the
- :class:`~sqlalchemy.orm.session.Session` doesn't have to issue a query.
-
- Additionally, the Session stores object instances using a weak reference
- by default. This also defeats the purpose of using the Session as a cache.
-
- The :class:`.Session` is not designed to be a
- global object from which everyone consults as a "registry" of objects.
- That's more the job of a **second level cache**. SQLAlchemy provides
- a pattern for implementing second level caching using `dogpile.cache <http://dogpilecache.readthedocs.org/>`_,
- via the :ref:`examples_caching` example.
-
-* How can I get the :class:`~sqlalchemy.orm.session.Session` for a certain object ?
-
- Use the :meth:`~.Session.object_session` classmethod
- available on :class:`~sqlalchemy.orm.session.Session`::
-
- session = Session.object_session(someobject)
-
-* Is the session thread-safe?
-
- The :class:`.Session` is very much intended to be used in a
- **non-concurrent** fashion, which usually means in only one thread at a
- time.
-
- The :class:`.Session` should be used in such a way that one
- instance exists for a single series of operations within a single
- transaction. One expedient way to get this effect is by associating
- a :class:`.Session` with the current thread (see :ref:`unitofwork_contextual`
- for background). Another is to use a pattern
- where the :class:`.Session` is passed between functions and is otherwise
- not shared with other threads.
-
- The bigger point is that you should not *want* to use the session
- with multiple concurrent threads. That would be like having everyone at a
- restaurant all eat from the same plate. The session is a local "workspace"
- that you use for a specific set of tasks; you don't want to, or need to,
- share that session with other threads who are doing some other task.
-
- If there are in fact multiple threads participating
- in the same task, then you may consider sharing the session between
- those threads, though this would be an extremely unusual scenario.
- In this case it would be necessary
- to implement a proper locking scheme so that the :class:`.Session` is still not
- exposed to concurrent access.
+When do I make a :class:`.sessionmaker`?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Just one time, somewhere in your application's global scope. It should be
+looked upon as part of your application's configuration. If your
+application has three .py files in a package, you could, for example,
+place the :class:`.sessionmaker` line in your ``__init__.py`` file; from
+that point on your other modules say "from mypackage import Session". That
+way, everyone else just uses :class:`.Session()`,
+and the configuration of that session is controlled by that central point.
+
+If your application starts up, does imports, but does not know what
+database it's going to be connecting to, you can bind the
+:class:`.Session` at the "class" level to the
+engine later on, using :meth:`.sessionmaker.configure`.
+
+In the examples in this section, we will frequently show the
+:class:`.sessionmaker` being created right above the line where we actually
+invoke :class:`.Session`. But that's just for
+example's sake! In reality, the :class:`.sessionmaker` would be somewhere
+at the module level. The calls to instantiate :class:`.Session`
+would then be placed at the point in the application where database
+conversations begin.
+
+.. _session_faq_whentocreate:
+
+When do I construct a :class:`.Session`, when do I commit it, and when do I close it?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. topic:: tl;dr;
+
+ As a general rule, keep the lifecycle of the session **separate and
+ external** from functions and objects that access and/or manipulate
+ database data.
+
+A :class:`.Session` is typically constructed at the beginning of a logical
+operation where database access is potentially anticipated.
+
+The :class:`.Session`, whenever it is used to talk to the database,
+begins a database transaction as soon as it starts communicating.
+Assuming the ``autocommit`` flag is left at its recommended default
+of ``False``, this transaction remains in progress until the :class:`.Session`
+is rolled back, committed, or closed. The :class:`.Session` will
+begin a new transaction if it is used again, subsequent to the previous
+transaction ending; from this it follows that the :class:`.Session`
+is capable of having a lifespan across many transactions, though only
+one at a time. We refer to these two concepts as **transaction scope**
+and **session scope**.
+
+The implication here is that the SQLAlchemy ORM is encouraging the
+developer to establish these two scopes in their application,
+including not only when the scopes begin and end, but also the
+expanse of those scopes, for example should a single
+:class:`.Session` instance be local to the execution flow within a
+function or method, should it be a global object used by the
+entire application, or somewhere in between these two.
+
+The burden placed on the developer to determine this scope is one
+area where the SQLAlchemy ORM necessarily has a strong opinion
+about how the database should be used. The :term:`unit of work` pattern
+is specifically one of accumulating changes over time and flushing
+them periodically, keeping in-memory state in sync with what's
+known to be present in a local transaction. This pattern is only
+effective when meaningful transaction scopes are in place.
+
+It's usually not very hard to determine the best points at which
+to begin and end the scope of a :class:`.Session`, though the wide
+variety of application architectures possible can introduce
+challenging situations.
+
+A common choice is to tear down the :class:`.Session` at the same
+time the transaction ends, meaning the transaction and session scopes
+are the same. This is a great choice to start out with as it
+removes the need to consider session scope as separate from transaction
+scope.
+
+While there's no one-size-fits-all recommendation for how transaction
+scope should be determined, there are common patterns. Especially
+if one is writing a web application, the choice is pretty much established.
+
+A web application is the easiest case because such an appication is already
+constructed around a single, consistent scope - this is the **request**,
+which represents an incoming request from a browser, the processing
+of that request to formulate a response, and finally the delivery of that
+response back to the client. Integrating web applications with the
+:class:`.Session` is then the straightforward task of linking the
+scope of the :class:`.Session` to that of the request. The :class:`.Session`
+can be established as the request begins, or using a :term:`lazy initialization`
+pattern which establishes one as soon as it is needed. The request
+then proceeds, with some system in place where application logic can access
+the current :class:`.Session` in a manner associated with how the actual
+request object is accessed. As the request ends, the :class:`.Session`
+is torn down as well, usually through the usage of event hooks provided
+by the web framework. The transaction used by the :class:`.Session`
+may also be committed at this point, or alternatively the application may
+opt for an explicit commit pattern, only committing for those requests
+where one is warranted, but still always tearing down the :class:`.Session`
+unconditionally at the end.
+
+Most web frameworks include infrastructure to establish a single
+:class:`.Session`, associated with the request, which is correctly
+constructed and torn down corresponding
+torn down at the end of a request. Such infrastructure pieces
+include products such as `Flask-SQLAlchemy <http://packages.python.org/Flask-SQLAlchemy/>`_,
+for usage in conjunction with the Flask web framework,
+and `Zope-SQLAlchemy <http://pypi.python.org/pypi/zope.sqlalchemy>`_,
+for usage in conjunction with the Pyramid and Zope frameworks.
+SQLAlchemy strongly recommends that these products be used as
+available.
+
+In those situations where integration libraries are not available,
+SQLAlchemy includes its own "helper" class known as
+:class:`.scoped_session`. A tutorial on the usage of this object
+is at :ref:`unitofwork_contextual`. It provides both a quick way
+to associate a :class:`.Session` with the current thread, as well as
+patterns to associate :class:`.Session` objects with other kinds of
+scopes.
+
+As mentioned before, for non-web applications there is no one clear
+pattern, as applications themselves don't have just one pattern
+of architecture. The best strategy is to attempt to demarcate
+"operations", points at which a particular thread begins to perform
+a series of operations for some period of time, which can be committed
+at the end. Some examples:
+
+* A background daemon which spawns off child forks
+ would want to create a :class:`.Session` local to each child
+ process, work with that :class:`.Session` through the life of the "job"
+ that the fork is handling, then tear it down when the job is completed.
+
+* For a command-line script, the application would create a single, global
+ :class:`.Session` that is established when the program begins to do its
+ work, and commits it right as the program is completing its task.
+
+* For a GUI interface-driven application, the scope of the :class:`.Session`
+ may best be within the scope of a user-generated event, such as a button
+ push. Or, the scope may correspond to explicit user interaction, such as
+ the user "opening" a series of records, then "saving" them.
+
+As a general rule, the application should manage the lifecycle of the
+session *externally* to functions that deal with specific data. This is a
+fundamental separation of concerns which keeps data-specific operations
+agnostic of the context in which they access and manipulate that data.
+
+E.g. **don't do this**::
+
+ ### this is the **wrong way to do it** ###
+
+ class ThingOne(object):
+ def go(self):
+ session = Session()
+ try:
+ session.query(FooBar).update({"x": 5})
+ session.commit()
+ except:
+ session.rollback()
+ raise
+
+ class ThingTwo(object):
+ def go(self):
+ session = Session()
+ try:
+ session.query(Widget).update({"q": 18})
+ session.commit()
+ except:
+ session.rollback()
+ raise
+
+ def run_my_program():
+ ThingOne().go()
+ ThingTwo().go()
+
+Keep the lifecycle of the session (and usually the transaction)
+**separate and external**::
+
+ ### this is a **better** (but not the only) way to do it ###
+
+ class ThingOne(object):
+ def go(self, session):
+ session.query(FooBar).update({"x": 5})
+
+ class ThingTwo(object):
+ def go(self, session):
+ session.query(Widget).update({"q": 18})
+
+ def run_my_program():
+ session = Session()
+ try:
+ ThingOne().go(session)
+ ThingTwo().go(session)
+
+ session.commit()
+ except:
+ session.rollback()
+ raise
+ finally:
+ session.close()
+
+The advanced developer will try to keep the details of session, transaction
+and exception management as far as possible from the details of the program
+doing its work. For example, we can further separate concerns using a `context manager <http://docs.python.org/3/library/contextlib.html#contextlib.contextmanager>`_::
+
+ ### another way (but again *not the only way*) to do it ###
+
+ from contextlib import contextmanager
+
+ @contextmanager
+ def session_scope():
+ """Provide a transactional scope around a series of operations."""
+ session = Session()
+ try:
+ yield session
+ session.commit()
+ except:
+ session.rollback()
+ raise
+ finally:
+ session.close()
+
+
+ def run_my_program():
+ with session_scope() as session:
+ ThingOne().go(session)
+ ThingTwo().go(session)
+
+
+Is the Session a cache?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Yeee...no. It's somewhat used as a cache, in that it implements the
+:term:`identity map` pattern, and stores objects keyed to their primary key.
+However, it doesn't do any kind of query caching. This means, if you say
+``session.query(Foo).filter_by(name='bar')``, even if ``Foo(name='bar')``
+is right there, in the identity map, the session has no idea about that.
+It has to issue SQL to the database, get the rows back, and then when it
+sees the primary key in the row, *then* it can look in the local identity
+map and see that the object is already there. It's only when you say
+``query.get({some primary key})`` that the
+:class:`~sqlalchemy.orm.session.Session` doesn't have to issue a query.
+
+Additionally, the Session stores object instances using a weak reference
+by default. This also defeats the purpose of using the Session as a cache.
+
+The :class:`.Session` is not designed to be a
+global object from which everyone consults as a "registry" of objects.
+That's more the job of a **second level cache**. SQLAlchemy provides
+a pattern for implementing second level caching using `dogpile.cache <http://dogpilecache.readthedocs.org/>`_,
+via the :ref:`examples_caching` example.
+
+How can I get the :class:`~sqlalchemy.orm.session.Session` for a certain object?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Use the :meth:`~.Session.object_session` classmethod
+available on :class:`~sqlalchemy.orm.session.Session`::
+
+ session = Session.object_session(someobject)
+
+The newer :ref:`core_inspection_toplevel` system can also be used::
+
+ from sqlalchemy import inspect
+ session = inspect(someobject).session
+
+.. _session_faq_threadsafe:
+
+Is the session thread-safe?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The :class:`.Session` is very much intended to be used in a
+**non-concurrent** fashion, which usually means in only one thread at a
+time.
+
+The :class:`.Session` should be used in such a way that one
+instance exists for a single series of operations within a single
+transaction. One expedient way to get this effect is by associating
+a :class:`.Session` with the current thread (see :ref:`unitofwork_contextual`
+for background). Another is to use a pattern
+where the :class:`.Session` is passed between functions and is otherwise
+not shared with other threads.
+
+The bigger point is that you should not *want* to use the session
+with multiple concurrent threads. That would be like having everyone at a
+restaurant all eat from the same plate. The session is a local "workspace"
+that you use for a specific set of tasks; you don't want to, or need to,
+share that session with other threads who are doing some other task.
+
+Making sure the :class:`.Session` is only used in a single concurrent thread at a time
+is called a "share nothing" approach to concurrency. But actually, not
+sharing the :class:`.Session` implies a more significant pattern; it
+means not just the :class:`.Session` object itself, but
+also **all objects that are associated with that Session**, must be kept within
+the scope of a single concurrent thread. The set of mapped
+objects associated with a :class:`.Session` are essentially proxies for data
+within database rows accessed over a database connection, and so just like
+the :class:`.Session` itself, the whole
+set of objects is really just a large-scale proxy for a database connection
+(or connections). Ultimately, it's mostly the DBAPI connection itself that
+we're keeping away from concurrent access; but since the :class:`.Session`
+and all the objects associated with it are all proxies for that DBAPI connection,
+the entire graph is essentially not safe for concurrent access.
+
+If there are in fact multiple threads participating
+in the same task, then you may consider sharing the session and its objects between
+those threads; however, in this extremely unusual scenario the application would
+need to ensure that a proper locking scheme is implemented so that there isn't
+*concurrent* access to the :class:`.Session` or its state. A more common approach
+to this situation is to maintain a single :class:`.Session` per concurrent thread,
+but to instead *copy* objects from one :class:`.Session` to another, often
+using the :meth:`.Session.merge` method to copy the state of an object into
+a new object local to a different :class:`.Session`.
Querying
--------
@@ -679,6 +802,8 @@ into the Session's list of objects to be marked as deleted::
# commit (or flush)
session.commit()
+.. _session_deleting_from_collections:
+
Deleting from Collections
~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1472,6 +1597,8 @@ flush/commit operation, the ``value`` attribute on ``someobject`` above is
expired, so that when next accessed the newly generated value will be loaded
from the database.
+.. _session_sql_expressions:
+
Using SQL Expressions with Sessions
====================================
@@ -1521,8 +1648,8 @@ proper context for the desired engine::
.. _session_external_transaction:
-Joining a Session into an External Transaction
-===============================================
+Joining a Session into an External Transaction (such as for test suites)
+========================================================================
If a :class:`.Connection` is being used which is already in a transactional
state (i.e. has a :class:`.Transaction` established), a :class:`.Session` can
@@ -1559,11 +1686,12 @@ entire database interaction is rolled back::
self.session.commit()
def tearDown(self):
+ self.session.close()
+
# rollback - everything that happened with the
# Session above (including calls to commit())
# is rolled back.
self.trans.rollback()
- self.session.close()
# return connection to the Engine
self.connection.close()
@@ -1575,6 +1703,42 @@ nested begin/commit-or-rollback pairs where only the outermost begin/commit
pair actually commits the transaction, or if the outermost block rolls back,
everything is rolled back.
+.. topic:: Supporting Tests with Rollbacks
+
+ The above recipe works well for any kind of database enabled test, except
+ for a test that needs to actually invoke :meth:`.Session.rollback` within
+ the scope of the test itself. The above recipe can be expanded, such
+ that the :class:`.Session` always runs all operations within the scope
+ of a SAVEPOINT, which is established at the start of each transaction,
+ so that tests can also rollback the "transaction" as well while still
+ remaining in the scope of a larger "transaction" that's never committed,
+ using two extra events::
+
+ from sqlalchemy import event
+
+ class SomeTest(TestCase):
+ def setUp(self):
+ # connect to the database
+ self.connection = engine.connect()
+
+ # begin a non-ORM transaction
+ self.trans = connection.begin()
+
+ # bind an individual Session to the connection
+ self.session = Session(bind=self.connection)
+
+ # start the session in a SAVEPOINT...
+ self.session.begin_nested()
+
+ # then each time that SAVEPOINT ends, reopen it
+ @event.listens_for(self.session, "after_transaction_end")
+ def restart_savepoint(session, transaction):
+ if transaction.nested and not transaction._parent.nested:
+ session.begin_nested()
+
+
+ # ... the tearDown() method stays the same
+
.. _unitofwork_contextual:
Contextual/Thread-local Sessions
@@ -1636,7 +1800,7 @@ we call upon the registry a second time, we get back the **same** :class:`.Sessi
This pattern allows disparate sections of the application to call upon a global
:class:`.scoped_session`, so that all those areas may share the same session
without the need to pass it explicitly. The :class:`.Session` we've established
-in our registry will remain, until we explicitly tell our regsitry to dispose of it,
+in our registry will remain, until we explicitly tell our registry to dispose of it,
by calling :meth:`.scoped_session.remove`::
>>> Session.remove()
@@ -1928,12 +2092,10 @@ Session and sessionmaker()
.. autoclass:: sessionmaker
:members:
- :show-inheritance:
:inherited-members:
.. autoclass:: sqlalchemy.orm.session.Session
:members:
- :show-inheritance:
:inherited-members:
.. autoclass:: sqlalchemy.orm.session.SessionTransaction
diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst
index 9686d7c85..aa9a51178 100644
--- a/doc/build/orm/tutorial.rst
+++ b/doc/build/orm/tutorial.rst
@@ -8,8 +8,7 @@ The SQLAlchemy Object Relational Mapper presents a method of associating
user-defined Python classes with database tables, and instances of those
classes (objects) with rows in their corresponding tables. It includes a
system that transparently synchronizes all changes in state between objects
-and their related rows, called a `unit of work
-<http://martinfowler.com/eaaCatalog/unitOfWork.html>`_, as well as a system
+and their related rows, called a :term:`unit of work`, as well as a system
for expressing database queries in terms of the user defined classes and their
defined relationships between each other.
@@ -23,8 +22,7 @@ example of applied usage of the Expression Language.
While there is overlap among the usage patterns of the ORM and the Expression
Language, the similarities are more superficial than they may at first appear.
One approaches the structure and content of data from the perspective of a
-user-defined `domain model
-<http://en.wikipedia.org/wiki/Domain_model>`_ which is transparently
+user-defined :term:`domain model` which is transparently
persisted and refreshed from its underlying storage model. The other
approaches it from the perspective of literal schema and SQL expression
representations which are explicitly composed into messages consumed
@@ -42,11 +40,11 @@ following text represents the expected return value.
Version Check
=============
-A quick check to verify that we are on at least **version 0.8** of SQLAlchemy::
+A quick check to verify that we are on at least **version 0.9** of SQLAlchemy::
>>> import sqlalchemy
>>> sqlalchemy.__version__ # doctest:+SKIP
- 0.8.0
+ 0.9.0
Connecting
==========
@@ -65,26 +63,21 @@ the SQL behind a popup window so it doesn't get in our way; just click the
"SQL" links to see what's being generated.
The return value of :func:`.create_engine` is an instance of :class:`.Engine`, and it represents
-the core interface to the database, adapted through a **dialect** that handles the details
-of the database and DBAPI in use. In this case the SQLite dialect will interpret instructions
+the core interface to the database, adapted through a :term:`dialect` that handles the details
+of the database and :term:`DBAPI` in use. In this case the SQLite dialect will interpret instructions
to the Python built-in ``sqlite3`` module.
-The :class:`.Engine` has not actually tried to connect to the database yet; that happens
-only the first time it is asked to perform a task against the database. We can illustrate
-this by asking it to perform a simple SELECT statement:
+.. sidebar:: Lazy Connecting
-.. sourcecode:: python+sql
+ The :class:`.Engine`, when first returned by :func:`.create_engine`,
+ has not actually tried to connect to the database yet; that happens
+ only the first time it is asked to perform a task against the database.
- {sql}>>> engine.execute("select 1").scalar()
- select 1
- ()
- {stop}1
-
-As the :meth:`.Engine.execute` method is called, the :class:`.Engine` establishes a connection to the
-SQLite database, which is then used to emit the SQL. The connection is then returned to an internal
-connection pool where it will be reused on subsequent statement executions. While we illustrate direct usage of the
-:class:`.Engine` here, this isn't typically necessary when using the ORM, where the :class:`.Engine`,
-once created, is used behind the scenes by the ORM as we'll see shortly.
+The first time a method like :meth:`.Engine.execute` or :meth:`.Engine.connect`
+is called, the :class:`.Engine` establishes a real :term:`DBAPI` connection to the
+database, which is then used to emit the SQL. When using the ORM, we typically
+don't use the :class:`.Engine` directly once created; instead, it's used
+behind the scenes by the ORM as we'll see shortly.
Declare a Mapping
=================
@@ -111,11 +104,9 @@ function, as follows::
Now that we have a "base", we can define any number of mapped classes in terms
of it. We will start with just a single table called ``users``, which will store
records for the end-users using our application.
-A new class called ``User`` will be the class to which we map this table. The
-imports we'll need to accomplish this include objects that represent the components
-of our table, including the :class:`.Column` class which represents a database column,
-as well as the :class:`.Integer` and :class:`.String` classes that
-represent basic datatypes used in columns::
+A new class called ``User`` will be the class to which we map this table. Within
+the class, we define details about the table to which we'll be mapping, primarily
+the table name, and names and datatypes of columns::
>>> from sqlalchemy import Column, Integer, String
>>> class User(Base):
@@ -126,71 +117,84 @@ represent basic datatypes used in columns::
... fullname = Column(String)
... password = Column(String)
...
- ... def __init__(self, name, fullname, password):
- ... self.name = name
- ... self.fullname = fullname
- ... self.password = password
- ...
... def __repr__(self):
- ... return "<User('%s','%s', '%s')>" % (self.name, self.fullname, self.password)
-
-The above ``User`` class establishes details about the table being mapped, including the name of the table denoted
-by the ``__tablename__`` attribute, a set of columns ``id``, ``name``, ``fullname`` and ``password``,
-where the ``id`` column will also be the primary key of the table. While its certainly possible
-that some database tables don't have primary key columns (as is also the case with views, which can
-also be mapped), the ORM in order to actually map to a particular table needs there
-to be at least one column denoted as a primary key column; multiple-column, i.e. composite, primary keys
-are of course entirely feasible as well.
-
-We define a constructor via ``__init__()`` and also a ``__repr__()`` method - both are optional. The
-class of course can have any number of other methods and attributes as required by the application,
-as it's basically just a plain Python class. Inheriting from ``Base`` is also only a requirement
-of the declarative configurational system, which itself is optional and relatively open ended; at its
-core, the SQLAlchemy ORM only requires that a class be a so-called "new style class", that is, it inherits
-from ``object`` in Python 2, in order to be mapped. All classes in Python 3 are "new style" classes.
-
-.. topic:: The Non Opinionated Philosophy
-
- In our ``User`` mapping example, it was required that we identify the name of the table
- in use, as well as the names and characteristics of all columns which we care about,
- including which column or columns
- represent the primary key, as well as some basic information about the types in use.
- SQLAlchemy never makes assumptions about these decisions - the developer must
- always be explicit about specific conventions in use. However, that doesn't mean the
- task can't be automated. While this tutorial will keep things explicit, developers are
- encouraged to make use of helper functions as well as "Declarative Mixins" to
- automate their tasks in large scale applications. The section :ref:`declarative_mixins`
- introduces many of these techniques.
+ ... return "<User(name='%s', fullname='%s', password='%s')>" % (
+ ... self.name, self.fullname, self.password)
+
+.. sidebar:: Tip
+
+ The ``User`` class defines a ``__repr__()`` method,
+ but note that is **optional**; we only implement it in
+ this tutorial so that our examples show nicely
+ formatted ``User`` objects.
+
+A class using Declarative at a minimum
+needs a ``__tablename__`` attribute, and at least one
+:class:`.Column` which is part of a primary key [#]_. SQLAlchemy never makes any
+assumptions by itself about the table to which
+a class refers, including that it has no built-in conventions for names,
+datatypes, or constraints. But this doesn't mean
+boilerplate is required; instead, you're encouraged to create your
+own automated conventions using helper functions and mixin classes, which
+is described in detail at :ref:`declarative_mixins`.
+
+When our class is constructed, Declarative replaces all the :class:`.Column`
+objects with special Python accessors known as :term:`descriptors`; this is a
+process known as :term:`instrumentation`. The "instrumented" mapped class
+will provide us with the means to refer to our table in a SQL context as well
+as to persist and load the values of columns from the database.
+
+Outside of what the mapping process does to our class, the class remains
+otherwise mostly a normal Python class, to which we can define any
+number of ordinary attributes and methods needed by our application.
+
+.. [#] For information on why a primary key is required, see
+ :ref:`faq_mapper_primary_key`.
+
+
+Create a Schema
+===============
With our ``User`` class constructed via the Declarative system, we have defined information about
-our table, known as **table metadata**, as well as a user-defined class which is linked to this
-table, known as a **mapped class**. Declarative has provided for us a shorthand system for what in SQLAlchemy is
-called a "Classical Mapping", which specifies these two units separately and is discussed
-in :ref:`classical_mapping`. The table
-is actually represented by a datastructure known as :class:`.Table`, and the mapping represented
-by a :class:`.Mapper` object generated by a function called :func:`.mapper`. Declarative performs both of
-these steps for us, making available the
-:class:`.Table` it has created via the ``__table__`` attribute::
+our table, known as :term:`table metadata`. The object used by SQLAlchemy to represent
+this information for a specific table is called the :class:`.Table` object, and here Declarative has made
+one for us. We can see this object by inspecting the ``__table__`` attribute::
>>> User.__table__ # doctest: +NORMALIZE_WHITESPACE
- Table('users', MetaData(None),
+ Table('users', MetaData(bind=None),
Column('id', Integer(), table=<users>, primary_key=True, nullable=False),
Column('name', String(), table=<users>),
Column('fullname', String(), table=<users>),
Column('password', String(), table=<users>), schema=None)
-and while rarely needed, making available the :class:`.Mapper` object via the ``__mapper__`` attribute::
-
- >>> User.__mapper__ # doctest: +ELLIPSIS
- <Mapper at 0x...; User>
-
-The Declarative base class also contains a catalog of all the :class:`.Table` objects
-that have been defined called :class:`.MetaData`, available via the ``.metadata``
-attribute. In this example, we are defining
-new tables that have yet to be created in our SQLite database, so one helpful feature
-the :class:`.MetaData` object offers is the ability to issue CREATE TABLE statements
-to the database for all tables that don't yet exist. We illustrate this
-by calling the :meth:`.MetaData.create_all` method, passing in our :class:`.Engine`
+.. sidebar:: Classical Mappings
+
+ The Declarative system, though highly recommended,
+ is not required in order to use SQLAlchemy's ORM.
+ Outside of Declarative, any
+ plain Python class can be mapped to any :class:`.Table`
+ using the :func:`.mapper` function directly; this
+ less common usage is described at :ref:`classical_mapping`.
+
+When we declared our class, Declarative used a Python metaclass in order to
+perform additional activities once the class declaration was complete; within
+this phase, it then created a :class:`.Table` object according to our
+specifications, and associated it with the class by constructing
+a :class:`.Mapper` object. This object is a behind-the-scenes object we normally
+don't need to deal with directly (though it can provide plenty of information
+about our mapping when we need it).
+
+The :class:`.Table` object is a member of a larger collection
+known as :class:`.MetaData`. When using Declarative,
+this object is available using the ``.metadata``
+attribute of our declarative base class.
+
+The :class:`.MetaData`
+is a :term:`registry` which includes the ability to emit a limited set
+of schema generation commands to the database. As our SQLite database
+does not actually have a ``users`` table present, we can use :class:`.MetaData`
+to issue CREATE TABLE statements to the database for all tables that don't yet exist.
+Below, we call the :meth:`.MetaData.create_all` method, passing in our :class:`.Engine`
as a source of database connectivity. We will see that special commands are
first emitted to check for the presence of the ``users`` table, and following that
the actual ``CREATE TABLE`` statement:
@@ -242,13 +246,9 @@ the actual ``CREATE TABLE`` statement:
fullname = Column(String(50))
password = Column(String(12))
- def __init__(self, name, fullname, password):
- self.name = name
- self.fullname = fullname
- self.password = password
-
def __repr__(self):
- return "<User('%s','%s', '%s')>" % (self.name, self.fullname, self.password)
+ return "<User(name='%s', fullname='%s', password='%s')>" % (
+ self.name, self.fullname, self.password)
We include this more verbose table definition separately
to highlight the difference between a minimal construct geared primarily
@@ -261,7 +261,7 @@ Create an Instance of the Mapped Class
With mappings complete, let's now create and inspect a ``User`` object::
- >>> ed_user = User('ed', 'Ed Jones', 'edspassword')
+ >>> ed_user = User(name='ed', fullname='Ed Jones', password='edspassword')
>>> ed_user.name
'ed'
>>> ed_user.password
@@ -269,41 +269,23 @@ With mappings complete, let's now create and inspect a ``User`` object::
>>> str(ed_user.id)
'None'
-The ``id`` attribute, which while not defined by our ``__init__()`` method,
-exists with a value of ``None`` on our ``User`` instance due to the ``id``
-column we declared in our mapping. By
-default, the ORM creates class attributes for all columns present
-in the table being mapped. These class attributes exist as
-:term:`descriptors`, and
-define **instrumentation** for the mapped class. The
-functionality of this instrumentation includes the ability to fire on change
-events, track modifications, and to automatically load new data from the database when
-needed.
-
-Since we have not yet told SQLAlchemy to persist ``Ed Jones`` within the
-database, its id is ``None``. When we persist the object later, this attribute
-will be populated with a newly generated value.
-
-.. topic:: The default ``__init__()`` method
-
- Note that in our ``User`` example we supplied an ``__init__()`` method,
- which receives ``name``, ``fullname`` and ``password`` as positional arguments.
- The Declarative system supplies for us a default constructor if one is
- not already present, which accepts keyword arguments of the same name
- as that of the mapped attributes. Below we define ``User`` without
- specifying a constructor::
-
- class User(Base):
- __tablename__ = 'users'
- id = Column(Integer, primary_key=True)
- name = Column(String)
- fullname = Column(String)
- password = Column(String)
-
- Our ``User`` class above will make usage of the default constructor, and provide
- ``id``, ``name``, ``fullname``, and ``password`` as keyword arguments::
-
- u1 = User(name='ed', fullname='Ed Jones', password='foobar')
+
+.. sidebar:: the ``__init__()`` method
+
+ Our ``User`` class, as defined using the Declarative system, has
+ been provided with a constructor (e.g. ``__init__()`` method) which automatically
+ accepts keyword names that match the columns we've mapped. We are free
+ to define any explicit ``__init__()`` method we prefer on our class, which
+ will override the default method provided by Declarative.
+
+Even though we didn't specify it in the constructor, the ``id`` attribute
+still produces a value of ``None`` when we access it (as opposed to Python's
+usual behavior of raising ``AttributeError`` for an undefined attribute).
+SQLAlchemy's :term:`instrumentation` normally produces this default value for
+column-mapped attributes when first accessed. For those attributes where
+we've actually assigned a value, the instrumentation system is tracking
+those assignments for use within an eventual INSERT statement to be emitted to the
+database.
Creating a Session
==================
@@ -330,10 +312,21 @@ connect it to the :class:`~sqlalchemy.orm.session.Session` using
>>> Session.configure(bind=engine) # once engine is available
+.. sidebar:: Session Lifecycle Patterns
+
+ The question of when to make a :class:`.Session` depends a lot on what
+ kind of application is being built. Keep in mind,
+ the :class:`.Session` is just a workspace for your objects,
+ local to a particular database connection - if you think of
+ an application thread as a guest at a dinner party, the :class:`.Session`
+ is the guest's plate and the objects it holds are the food
+ (and the database...the kitchen?)! More on this topic
+ available at :ref:`session_faq_whentocreate`.
+
This custom-made :class:`~sqlalchemy.orm.session.Session` class will create
new :class:`~sqlalchemy.orm.session.Session` objects which are bound to our
database. Other transactional characteristics may be defined when calling
-:func:`~.sessionmaker` as well; these are described in a later
+:class:`~.sessionmaker` as well; these are described in a later
chapter. Then, whenever you need to have a conversation with the database, you
instantiate a :class:`~sqlalchemy.orm.session.Session`::
@@ -345,24 +338,13 @@ used, it retrieves a connection from a pool of connections maintained by the
:class:`.Engine`, and holds onto it until we commit all changes and/or close the
session object.
-.. topic:: Session Creational Patterns
-
- The business of acquiring a :class:`.Session` has a good deal of variety based
- on the variety of types of applications and frameworks out there.
- Keep in mind the :class:`.Session` is just a workspace for your objects,
- local to a particular database connection - if you think of
- an application thread as a guest at a dinner party, the :class:`.Session`
- is the guest's plate and the objects it holds are the food
- (and the database...the kitchen?)! Hints on
- how :class:`.Session` is integrated into an application are at
- :ref:`session_faq`.
Adding New Objects
==================
To persist our ``User`` object, we :meth:`~.Session.add` it to our :class:`~sqlalchemy.orm.session.Session`::
- >>> ed_user = User('ed', 'Ed Jones', 'edspassword')
+ >>> ed_user = User(name='ed', fullname='Ed Jones', password='edspassword')
>>> session.add(ed_user)
At this point, we say that the instance is **pending**; no SQL has yet been issued
@@ -393,7 +375,7 @@ added:
LIMIT ? OFFSET ?
('ed', 1, 0)
{stop}>>> our_user
- <User('ed','Ed Jones', 'edspassword')>
+ <User(name='ed', fullname='Ed Jones', password='edspassword')>
In fact, the :class:`~sqlalchemy.orm.session.Session` has identified that the
row returned is the **same** row as one already represented within its
@@ -403,7 +385,7 @@ that which we just added::
>>> ed_user is our_user
True
-The ORM concept at work here is known as an `identity map <http://martinfowler.com/eaaCatalog/identityMap.html>`_
+The ORM concept at work here is known as an :term:`identity map`
and ensures that
all operations upon a particular row within a
:class:`~sqlalchemy.orm.session.Session` operate upon the same set of data.
@@ -420,11 +402,11 @@ We can add more ``User`` objects at once using
.. sourcecode:: python+sql
>>> session.add_all([
- ... User('wendy', 'Wendy Williams', 'foobar'),
- ... User('mary', 'Mary Contrary', 'xxg527'),
- ... User('fred', 'Fred Flinstone', 'blah')])
+ ... User(name='wendy', fullname='Wendy Williams', password='foobar'),
+ ... User(name='mary', fullname='Mary Contrary', password='xxg527'),
+ ... User(name='fred', fullname='Fred Flinstone', password='blah')])
-Also, Ed has already decided his password isn't too secure, so lets change it:
+Also, we've decided the password for Ed isn't too secure, so lets change it:
.. sourcecode:: python+sql
@@ -436,16 +418,16 @@ for example, that ``Ed Jones`` has been modified:
.. sourcecode:: python+sql
>>> session.dirty
- IdentitySet([<User('ed','Ed Jones', 'f8s7ccs')>])
+ IdentitySet([<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>])
and that three new ``User`` objects are pending:
.. sourcecode:: python+sql
>>> session.new # doctest: +SKIP
- IdentitySet([<User('wendy','Wendy Williams', 'foobar')>,
- <User('mary','Mary Contrary', 'xxg527')>,
- <User('fred','Fred Flinstone', 'blah')>])
+ IdentitySet([<User(name='wendy', fullname='Wendy Williams', password='foobar')>,
+ <User(name='mary', fullname='Mary Contrary', password='xxg527')>,
+ <User(name='fred', fullname='Fred Flinstone', password='blah')>])
We tell the :class:`~sqlalchemy.orm.session.Session` that we'd like to issue
all remaining changes to the database and commit the transaction, which has
@@ -517,7 +499,7 @@ and we'll add another erroneous user, ``fake_user``:
.. sourcecode:: python+sql
- >>> fake_user = User('fakeuser', 'Invalid', '12345')
+ >>> fake_user = User(name='fakeuser', fullname='Invalid', password='12345')
>>> session.add(fake_user)
Querying the session, we can see that they're flushed into the current transaction:
@@ -536,7 +518,7 @@ Querying the session, we can see that they're flushed into the current transacti
FROM users
WHERE users.name IN (?, ?)
('Edwardo', 'fakeuser')
- {stop}[<User('Edwardo','Ed Jones', 'f8s7ccs')>, <User('fakeuser','Invalid', '12345')>]
+ {stop}[<User(name='Edwardo', fullname='Ed Jones', password='f8s7ccs')>, <User(user='fakeuser', fullname='Invalid', password='12345')>]
Rolling back, we can see that ``ed_user``'s name is back to ``ed``, and
``fake_user`` has been kicked out of the session:
@@ -572,7 +554,7 @@ issuing a SELECT illustrates the changes made to the database:
FROM users
WHERE users.name IN (?, ?)
('ed', 'fakeuser')
- {stop}[<User('ed','Ed Jones', 'f8s7ccs')>]
+ {stop}[<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>]
.. _ormtutorial_querying:
@@ -638,13 +620,13 @@ class:
users.password AS users_password
FROM users
()
- {stop}<User('ed','Ed Jones', 'f8s7ccs')> ed
- <User('wendy','Wendy Williams', 'foobar')> wendy
- <User('mary','Mary Contrary', 'xxg527')> mary
- <User('fred','Fred Flinstone', 'blah')> fred
+ {stop}<User(name='ed', fullname='Ed Jones', password='f8s7ccs')> ed
+ <User(name='wendy', fullname='Wendy Williams', password='foobar')> wendy
+ <User(name='mary', fullname='Mary Contrary', password='xxg527')> mary
+ <User(name='fred', fullname='Fred Flinstone', password='blah')> fred
You can control the names of individual column expressions using the
-:meth:`~.CompareMixin.label` construct, which is available from
+:meth:`~.ColumnElement.label` construct, which is available from
any :class:`.ColumnElement`-derived object, as well as any class attribute which
is mapped to one (such as ``User.name``):
@@ -662,7 +644,7 @@ is mapped to one (such as ``User.name``):
The name given to a full entity such as ``User``, assuming that multiple
entities are present in the call to :meth:`~.Session.query`, can be controlled using
-:class:`~.orm.aliased` :
+:func:`~.sqlalchemy.orm.aliased` :
.. sourcecode:: python+sql
@@ -677,10 +659,10 @@ entities are present in the call to :meth:`~.Session.query`, can be controlled u
user_alias.password AS user_alias_password
FROM users AS user_alias
(){stop}
- <User('ed','Ed Jones', 'f8s7ccs')>
- <User('wendy','Wendy Williams', 'foobar')>
- <User('mary','Mary Contrary', 'xxg527')>
- <User('fred','Fred Flinstone', 'blah')>
+ <User(name='ed', fullname='Ed Jones', password='f8s7ccs')>
+ <User(name='wendy', fullname='Wendy Williams', password='foobar')>
+ <User(name='mary', fullname='Mary Contrary', password='xxg527')>
+ <User(name='fred', fullname='Fred Flinstone', password='blah')>
Basic operations with :class:`~sqlalchemy.orm.query.Query` include issuing
LIMIT and OFFSET, most conveniently using Python array slices and typically in
@@ -697,8 +679,8 @@ conjunction with ORDER BY:
FROM users ORDER BY users.id
LIMIT ? OFFSET ?
(2, 1){stop}
- <User('wendy','Wendy Williams', 'foobar')>
- <User('mary','Mary Contrary', 'xxg527')>
+ <User(name='wendy', fullname='Wendy Williams', password='foobar')>
+ <User(name='mary', fullname='Mary Contrary', password='xxg527')>
and filtering results, which is accomplished either with
:func:`~sqlalchemy.orm.query.Query.filter_by`, which uses keyword arguments:
@@ -747,13 +729,13 @@ users named "ed" with a full name of "Ed Jones", you can call
FROM users
WHERE users.name = ? AND users.fullname = ?
('ed', 'Ed Jones')
- {stop}<User('ed','Ed Jones', 'f8s7ccs')>
-
+ {stop}<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>
Common Filter Operators
-----------------------
-Here's a rundown of some of the most common operators used in :func:`~sqlalchemy.orm.query.Query.filter`:
+Here's a rundown of some of the most common operators used in
+:func:`~sqlalchemy.orm.query.Query.filter`:
* equals::
@@ -772,8 +754,9 @@ Here's a rundown of some of the most common operators used in :func:`~sqlalchemy
query.filter(User.name.in_(['ed', 'wendy', 'jack']))
# works with query objects too:
-
- query.filter(User.name.in_(session.query(User.name).filter(User.name.like('%ed%'))))
+ query.filter(User.name.in_(
+ session.query(User.name).filter(User.name.like('%ed%'))
+ ))
* NOT IN::
@@ -781,24 +764,28 @@ Here's a rundown of some of the most common operators used in :func:`~sqlalchemy
* IS NULL::
- filter(User.name == None)
+ query.filter(User.name == None)
* IS NOT NULL::
- filter(User.name != None)
+ query.filter(User.name != None)
* AND::
+ # use and_()
from sqlalchemy import and_
- filter(and_(User.name == 'ed', User.fullname == 'Ed Jones'))
+ query.filter(and_(User.name == 'ed', User.fullname == 'Ed Jones'))
- # or call filter()/filter_by() multiple times
- filter(User.name == 'ed').filter(User.fullname == 'Ed Jones')
+ # or send multiple expressions to .filter()
+ query.filter(User.name == 'ed', User.fullname == 'Ed Jones')
+
+ # or chain multiple filter()/filter_by() calls
+ query.filter(User.name == 'ed').filter(User.fullname == 'Ed Jones')
* OR::
from sqlalchemy import or_
- filter(or_(User.name == 'ed', User.name == 'wendy'))
+ query.filter(or_(User.name == 'ed', User.name == 'wendy'))
* match::
@@ -809,76 +796,101 @@ Here's a rundown of some of the most common operators used in :func:`~sqlalchemy
Returning Lists and Scalars
---------------------------
-The :meth:`~sqlalchemy.orm.query.Query.all()`,
-:meth:`~sqlalchemy.orm.query.Query.one()`, and
-:meth:`~sqlalchemy.orm.query.Query.first()` methods of
-:class:`~sqlalchemy.orm.query.Query` immediately issue SQL and return a
-non-iterator value. :meth:`~sqlalchemy.orm.query.Query.all()` returns a list:
-
-.. sourcecode:: python+sql
-
- >>> query = session.query(User).filter(User.name.like('%ed')).order_by(User.id)
- {sql}>>> query.all() #doctest: +NORMALIZE_WHITESPACE
- SELECT users.id AS users_id,
- users.name AS users_name,
- users.fullname AS users_fullname,
- users.password AS users_password
- FROM users
- WHERE users.name LIKE ? ORDER BY users.id
- ('%ed',)
- {stop}[<User('ed','Ed Jones', 'f8s7ccs')>, <User('fred','Fred Flinstone', 'blah')>]
-
-:meth:`~sqlalchemy.orm.query.Query.first()` applies a limit of one and returns
-the first result as a scalar:
-
-.. sourcecode:: python+sql
-
- {sql}>>> query.first() #doctest: +NORMALIZE_WHITESPACE
- SELECT users.id AS users_id,
- users.name AS users_name,
- users.fullname AS users_fullname,
- users.password AS users_password
- FROM users
- WHERE users.name LIKE ? ORDER BY users.id
- LIMIT ? OFFSET ?
- ('%ed', 1, 0)
- {stop}<User('ed','Ed Jones', 'f8s7ccs')>
-
-:meth:`~sqlalchemy.orm.query.Query.one()`, fully fetches all rows, and if not
-exactly one object identity or composite row is present in the result, raises
-an error:
-
-.. sourcecode:: python+sql
-
- {sql}>>> from sqlalchemy.orm.exc import MultipleResultsFound
- >>> try: #doctest: +NORMALIZE_WHITESPACE
- ... user = query.one()
- ... except MultipleResultsFound, e:
- ... print e
- SELECT users.id AS users_id,
- users.name AS users_name,
- users.fullname AS users_fullname,
- users.password AS users_password
- FROM users
- WHERE users.name LIKE ? ORDER BY users.id
- ('%ed',)
- {stop}Multiple rows were found for one()
-
-.. sourcecode:: python+sql
-
- {sql}>>> from sqlalchemy.orm.exc import NoResultFound
- >>> try: #doctest: +NORMALIZE_WHITESPACE
- ... user = query.filter(User.id == 99).one()
- ... except NoResultFound, e:
- ... print e
- SELECT users.id AS users_id,
- users.name AS users_name,
- users.fullname AS users_fullname,
- users.password AS users_password
- FROM users
- WHERE users.name LIKE ? AND users.id = ? ORDER BY users.id
- ('%ed', 99)
- {stop}No row was found for one()
+A number of methods on :class:`.Query`
+immediately issue SQL and return a value containing loaded
+database results. Here's a brief tour:
+
+* :meth:`~.Query.all()` returns a list:
+
+ .. sourcecode:: python+sql
+
+ >>> query = session.query(User).filter(User.name.like('%ed')).order_by(User.id)
+ {sql}>>> query.all() #doctest: +NORMALIZE_WHITESPACE
+ SELECT users.id AS users_id,
+ users.name AS users_name,
+ users.fullname AS users_fullname,
+ users.password AS users_password
+ FROM users
+ WHERE users.name LIKE ? ORDER BY users.id
+ ('%ed',)
+ {stop}[<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>,
+ <User(name='fred', fullname='Fred Flinstone', password='blah')>]
+
+* :meth:`~.Query.first()` applies a limit of one and returns
+ the first result as a scalar:
+
+ .. sourcecode:: python+sql
+
+ {sql}>>> query.first() #doctest: +NORMALIZE_WHITESPACE
+ SELECT users.id AS users_id,
+ users.name AS users_name,
+ users.fullname AS users_fullname,
+ users.password AS users_password
+ FROM users
+ WHERE users.name LIKE ? ORDER BY users.id
+ LIMIT ? OFFSET ?
+ ('%ed', 1, 0)
+ {stop}<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>
+
+* :meth:`~.Query.one()`, fully fetches all rows, and if not
+ exactly one object identity or composite row is present in the result, raises
+ an error. With multiple rows found:
+
+ .. sourcecode:: python+sql
+
+ {sql}>>> from sqlalchemy.orm.exc import MultipleResultsFound
+ >>> try: #doctest: +NORMALIZE_WHITESPACE
+ ... user = query.one()
+ ... except MultipleResultsFound, e:
+ ... print e
+ SELECT users.id AS users_id,
+ users.name AS users_name,
+ users.fullname AS users_fullname,
+ users.password AS users_password
+ FROM users
+ WHERE users.name LIKE ? ORDER BY users.id
+ ('%ed',)
+ {stop}Multiple rows were found for one()
+
+ With no rows found:
+
+ .. sourcecode:: python+sql
+
+ {sql}>>> from sqlalchemy.orm.exc import NoResultFound
+ >>> try: #doctest: +NORMALIZE_WHITESPACE
+ ... user = query.filter(User.id == 99).one()
+ ... except NoResultFound, e:
+ ... print e
+ SELECT users.id AS users_id,
+ users.name AS users_name,
+ users.fullname AS users_fullname,
+ users.password AS users_password
+ FROM users
+ WHERE users.name LIKE ? AND users.id = ? ORDER BY users.id
+ ('%ed', 99)
+ {stop}No row was found for one()
+
+ The :meth:`~.Query.one` method is great for systems that expect to handle
+ "no items found" versus "multiple items found" differently; such as a RESTful
+ web service, which may want to raise a "404 not found" when no results are found,
+ but raise an application error when multiple results are found.
+
+* :meth:`~.Query.scalar` invokes the :meth:`~.Query.one` method, and upon
+ success returns the first column of the row:
+
+ .. sourcecode:: python+sql
+
+ >>> query = session.query(User.id).filter(User.name.like('%ed')).\
+ ... order_by(User.id)
+ {sql}>>> query.scalar() #doctest: +NORMALIZE_WHITESPACE
+ SELECT users.id AS users_id
+ FROM users
+ WHERE users.name LIKE ? ORDER BY users.id
+ LIMIT ? OFFSET ?
+ ('%ed', 1, 0)
+ {stop}7
+
+.. _orm_tutorial_literal_sql:
Using Literal SQL
-----------------
@@ -922,7 +934,7 @@ method:
FROM users
WHERE id<? and name=? ORDER BY users.id
(224, 'fred')
- {stop}<User('fred','Fred Flinstone', 'blah')>
+ {stop}<User(name='fred', fullname='Fred Flinstone', password='blah')>
To use an entirely string-based statement, using
:meth:`~sqlalchemy.orm.query.Query.from_statement()`; just ensure that the
@@ -936,7 +948,7 @@ mapper (below illustrated using an asterisk):
... params(name='ed').all()
SELECT * FROM users where name=?
('ed',)
- {stop}[<User('ed','Ed Jones', 'f8s7ccs')>]
+ {stop}[<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>]
You can use :meth:`~sqlalchemy.orm.query.Query.from_statement()` to go
completely "raw", using string names to identify desired columns:
@@ -1056,6 +1068,16 @@ counting called :meth:`~sqlalchemy.orm.query.Query.count()`:
('%ed',)
{stop}2
+.. sidebar:: Counting on ``count()``
+
+ :meth:`.Query.count` used to be a very complicated method
+ when it would try to guess whether or not a subquery was needed
+ around the
+ existing query, and in some exotic cases it wouldn't do the right thing.
+ Now that it uses a simple subquery every time, it's only two lines long
+ and always returns the right answer. Use ``func.count()`` if a
+ particular statement absolutely cannot tolerate the subquery being present.
+
The :meth:`~.Query.count()` method is used to determine
how many rows the SQL statement would return. Looking
at the generated SQL above, SQLAlchemy always places whatever it is we are
@@ -1123,15 +1145,12 @@ declarative, we define this table along with its mapped class, ``Address``:
...
... user = relationship("User", backref=backref('addresses', order_by=id))
...
- ... def __init__(self, email_address):
- ... self.email_address = email_address
- ...
... def __repr__(self):
- ... return "<Address('%s')>" % self.email_address
+ ... return "<Address(email_address='%s')>" % self.email_address
The above class introduces the :class:`.ForeignKey` construct, which is a
directive applied to :class:`.Column` that indicates that values in this
-column should be **constrained** to be values present in the named remote
+column should be :term:`constrained` to be values present in the named remote
column. This is a core feature of relational databases, and is the "glue" that
transforms an otherwise unconnected collection of tables to have rich
overlapping relationships. The :class:`.ForeignKey` above expresses that
@@ -1143,17 +1162,17 @@ tells the ORM that the ``Address`` class itself should be linked
to the ``User`` class, using the attribute ``Address.user``.
:func:`.relationship` uses the foreign key
relationships between the two tables to determine the nature of
-this linkage, determining that ``Address.user`` will be **many-to-one**.
+this linkage, determining that ``Address.user`` will be :term:`many to one`.
A subdirective of :func:`.relationship` called :func:`.backref` is
placed inside of :func:`.relationship`, providing details about
the relationship as expressed in reverse, that of a collection of ``Address``
objects on ``User`` referenced by ``User.addresses``. The reverse
-side of a many-to-one relationship is always **one-to-many**.
+side of a many-to-one relationship is always :term:`one to many`.
A full catalog of available :func:`.relationship` configurations
is at :ref:`relationship_patterns`.
The two complementing relationships ``Address.user`` and ``User.addresses``
-are referred to as a **bidirectional relationship**, and is a key
+are referred to as a :term:`bidirectional relationship`, and is a key
feature of the SQLAlchemy ORM. The section :ref:`relationships_backref`
discusses the "backref" feature in detail.
@@ -1218,7 +1237,7 @@ default, the collection is a Python list.
.. sourcecode:: python+sql
- >>> jack = User('jack', 'Jack Bean', 'gjffdd')
+ >>> jack = User(name='jack', fullname='Jack Bean', password='gjffdd')
>>> jack.addresses
[]
@@ -1239,14 +1258,15 @@ using any SQL:
.. sourcecode:: python+sql
>>> jack.addresses[1]
- <Address('j25@yahoo.com')>
+ <Address(email_address='j25@yahoo.com')>
>>> jack.addresses[1].user
- <User('jack','Jack Bean', 'gjffdd')>
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')>
-Let's add and commit ``Jack Bean`` to the database. ``jack`` as well as the
-two ``Address`` members in his ``addresses`` collection are both added to the
-session at once, using a process known as **cascading**:
+Let's add and commit ``Jack Bean`` to the database. ``jack`` as well
+as the two ``Address`` members in the corresponding ``addresses``
+collection are both added to the session at once, using a process
+known as **cascading**:
.. sourcecode:: python+sql
@@ -1276,7 +1296,7 @@ Querying for Jack, we get just Jack back. No SQL is yet issued for Jack's addre
('jack',)
{stop}>>> jack
- <User('jack','Jack Bean', 'gjffdd')>
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')>
Let's look at the ``addresses`` collection. Watch the SQL:
@@ -1290,10 +1310,10 @@ Let's look at the ``addresses`` collection. Watch the SQL:
FROM addresses
WHERE ? = addresses.user_id ORDER BY addresses.id
(5,)
- {stop}[<Address('jack@google.com')>, <Address('j25@yahoo.com')>]
+ {stop}[<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
When we accessed the ``addresses`` collection, SQL was suddenly issued. This
-is an example of a **lazy loading relationship**. The ``addresses`` collection
+is an example of a :term:`lazy loading` relationship. The ``addresses`` collection
is now loaded and behaves just like an ordinary list. We'll cover ways
to optimize the loading of this collection in a bit.
@@ -1318,7 +1338,8 @@ Below we load the ``User`` and ``Address`` entities at once using this method:
... filter(User.id==Address.user_id).\
... filter(Address.email_address=='jack@google.com').\
... all(): # doctest: +NORMALIZE_WHITESPACE
- ... print u, a
+ ... print u
+ ... print a
SELECT users.id AS users_id,
users.name AS users_name,
users.fullname AS users_fullname,
@@ -1330,10 +1351,11 @@ Below we load the ``User`` and ``Address`` entities at once using this method:
WHERE users.id = addresses.user_id
AND addresses.email_address = ?
('jack@google.com',)
- {stop}<User('jack','Jack Bean', 'gjffdd')> <Address('jack@google.com')>
+ {stop}<User(name='jack', fullname='Jack Bean', password='gjffdd')>
+ <Address(email_address='jack@google.com')>
-The actual SQL JOIN syntax, on the other hand, is most easily achieved using the :meth:`.Query.join`
-method:
+The actual SQL JOIN syntax, on the other hand, is most easily achieved
+using the :meth:`.Query.join` method:
.. sourcecode:: python+sql
@@ -1347,7 +1369,7 @@ method:
FROM users JOIN addresses ON users.id = addresses.user_id
WHERE addresses.email_address = ?
('jack@google.com',)
- {stop}[<User('jack','Jack Bean', 'gjffdd')>]
+ {stop}[<User(name='jack', fullname='Jack Bean', email_address='gjffdd')>]
:meth:`.Query.join` knows how to join between ``User``
and ``Address`` because there's only one foreign key between them. If there
@@ -1457,11 +1479,11 @@ accessible through an attribute called ``c``:
ON users.id = anon_1.user_id
ORDER BY users.id
('*',)
- {stop}<User('ed','Ed Jones', 'f8s7ccs')> None
- <User('wendy','Wendy Williams', 'foobar')> None
- <User('mary','Mary Contrary', 'xxg527')> None
- <User('fred','Fred Flinstone', 'blah')> None
- <User('jack','Jack Bean', 'gjffdd')> 2
+ {stop}<User(name='ed', fullname='Ed Jones', password='f8s7ccs')> None
+ <User(name='wendy', fullname='Wendy Williams', password='foobar')> None
+ <User(name='mary', fullname='Mary Contrary', password='xxg527')> None
+ <User(name='fred', fullname='Fred Flinstone', password='blah')> None
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')> 2
Selecting Entities from Subqueries
----------------------------------
@@ -1478,7 +1500,8 @@ to associate an "alias" of a mapped class to a subquery:
>>> adalias = aliased(Address, stmt)
>>> for user, address in session.query(User, adalias).\
... join(adalias, User.addresses): # doctest: +NORMALIZE_WHITESPACE
- ... print user, address
+ ... print user
+ ... print address
SELECT users.id AS users_id,
users.name AS users_name,
users.fullname AS users_fullname,
@@ -1494,7 +1517,8 @@ to associate an "alias" of a mapped class to a subquery:
WHERE addresses.email_address != ?) AS anon_1
ON users.id = anon_1.user_id
('j25@yahoo.com',)
- {stop}<User('jack','Jack Bean', 'gjffdd')> <Address('jack@google.com')>
+ {stop}<User(name='jack', fullname='Jack Bean', password='gjffdd')>
+ <Address(email_address='jack@google.com')>
Using EXISTS
------------
@@ -1611,13 +1635,13 @@ and behavior:
Eager Loading
=============
-Recall earlier that we illustrated a **lazy loading** operation, when
+Recall earlier that we illustrated a :term:`lazy loading` operation, when
we accessed the ``User.addresses`` collection of a ``User`` and SQL
was emitted. If you want to reduce the number of queries (dramatically, in many cases),
-we can apply an **eager load** to the query operation. SQLAlchemy
+we can apply an :term:`eager load` to the query operation. SQLAlchemy
offers three types of eager loading, two of which are automatic, and a third
which involves custom criterion. All three are usually invoked via functions known
-as **query options** which give additional instructions to the :class:`.Query` on how
+as :term:`query options` which give additional instructions to the :class:`.Query` on how
we would like various attributes to be loaded, via the :meth:`.Query.options` method.
Subquery Load
@@ -1655,10 +1679,10 @@ very easy to use:
ORDER BY anon_1.users_id, addresses.id
('jack',)
{stop}>>> jack
- <User('jack','Jack Bean', 'gjffdd')>
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')>
>>> jack.addresses
- [<Address('jack@google.com')>, <Address('j25@yahoo.com')>]
+ [<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
Joined Load
-------------
@@ -1691,10 +1715,10 @@ will emit the extra join regardless:
('jack',)
{stop}>>> jack
- <User('jack','Jack Bean', 'gjffdd')>
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')>
>>> jack.addresses
- [<Address('jack@google.com')>, <Address('j25@yahoo.com')>]
+ [<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
Note that even though the OUTER JOIN resulted in two rows, we still only got
one instance of ``User`` back. This is because :class:`.Query` applies a "uniquing"
@@ -1752,10 +1776,10 @@ attribute:
('jack',)
{stop}>>> jacks_addresses
- [<Address('jack@google.com')>, <Address('j25@yahoo.com')>]
+ [<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
>>> jacks_addresses[0].user
- <User('jack','Jack Bean', 'gjffdd')>
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')>
For more information on eager loading, including how to configure various forms
of loading by default, see the section :doc:`/orm/loading`.
@@ -1835,13 +1859,15 @@ including the cascade configuration (we'll leave the constructor out too)::
... fullname = Column(String)
... password = Column(String)
...
- ... addresses = relationship("Address", backref='user', cascade="all, delete, delete-orphan")
+ ... addresses = relationship("Address", backref='user',
+ ... cascade="all, delete, delete-orphan")
...
... def __repr__(self):
- ... return "<User('%s','%s', '%s')>" % (self.name, self.fullname, self.password)
+ ... return "<User(name='%s', fullname='%s', password'%s')>" % (
+ ... self.name, self.fullname, self.password)
-Then we recreate ``Address``, noting that in this case we've created the ``Address.user`` relationship
-via the ``User`` class already::
+Then we recreate ``Address``, noting that in this case we've created
+the ``Address.user`` relationship via the ``User`` class already::
>>> class Address(Base):
... __tablename__ = 'addresses'
@@ -1850,11 +1876,12 @@ via the ``User`` class already::
... user_id = Column(Integer, ForeignKey('users.id'))
...
... def __repr__(self):
- ... return "<Address('%s')>" % self.email_address
+ ... return "<Address(email_address='%s')>" % self.email_address
-Now when we load Jack (below using :meth:`~.Query.get`, which loads by primary key),
-removing an address from his ``addresses`` collection will result in that
-``Address`` being deleted:
+Now when we load the user ``jack`` (below using :meth:`~.Query.get`,
+which loads by primary key), removing an address from the
+corresponding ``addresses`` collection will result in that ``Address``
+being deleted:
.. sourcecode:: python+sql
@@ -1895,7 +1922,8 @@ removing an address from his ``addresses`` collection will result in that
('jack@google.com', 'j25@yahoo.com')
{stop}1
-Deleting Jack will delete both Jack and his remaining ``Address``:
+Deleting Jack will delete both Jack and the remaining ``Address`` associated
+with the user:
.. sourcecode:: python+sql
@@ -1991,6 +2019,11 @@ via the ``post_keywords`` table::
... def __init__(self, keyword):
... self.keyword = keyword
+.. note::
+
+ The above class declarations illustrate explicit ``__init__()`` methods.
+ Remember, when using Declarative, it's optional!
+
Above, the many-to-many relationship is ``BlogPost.keywords``. The defining
feature of a many-to-many relationship is the ``secondary`` keyword argument
which references a :class:`~sqlalchemy.schema.Table` object representing the
@@ -2112,10 +2145,10 @@ keyword string 'firstpost'":
AND keywords.id = post_keywords.keyword_id
AND keywords.keyword = ?)
('firstpost',)
- {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User('wendy','Wendy Williams', 'foobar')>)]
+ {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User(name='wendy', fullname='Wendy Williams', password='foobar')>)]
-If we want to look up just Wendy's posts, we can tell the query to narrow down
-to her as a parent:
+If we want to look up posts owned by the user ``wendy``, we can tell
+the query to narrow down to that ``User`` object as a parent:
.. sourcecode:: python+sql
@@ -2134,7 +2167,7 @@ to her as a parent:
AND keywords.id = post_keywords.keyword_id
AND keywords.keyword = ?))
(2, 'firstpost')
- {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User('wendy','Wendy Williams', 'foobar')>)]
+ {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User(name='wendy', fullname='Wendy Williams', password='foobar')>)]
Or we can use Wendy's own ``posts`` relationship, which is a "dynamic"
relationship, to query straight from there:
@@ -2155,7 +2188,7 @@ relationship, to query straight from there:
AND keywords.id = post_keywords.keyword_id
AND keywords.keyword = ?))
(2, 'firstpost')
- {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User('wendy','Wendy Williams', 'foobar')>)]
+ {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User(name='wendy', fullname='Wendy Williams', password='foobar')>)]
Further Reference
==================
diff --git a/doc/build/requirements.txt b/doc/build/requirements.txt
index 2951cdd49..34f031b0b 100644
--- a/doc/build/requirements.txt
+++ b/doc/build/requirements.txt
@@ -1 +1,3 @@
mako
+changelog>=0.3.4
+sphinx-paramlinks>=0.2.2
diff --git a/doc/build/static/docs.css b/doc/build/static/docs.css
index 62e102c50..bd08f3b20 100644
--- a/doc/build/static/docs.css
+++ b/doc/build/static/docs.css
@@ -151,7 +151,6 @@ a.headerlink:hover {
#docs-body-container {
background-color:#EFEFEF;
border: solid 1px #CCC;
-
}
#docs-body,
@@ -166,6 +165,10 @@ a.headerlink:hover {
}
+#docs-body {
+ min-height: 700px;
+}
+
#docs-sidebar > ul {
font-size:.85em;
}
@@ -319,10 +322,24 @@ th.field-name {
text-align:right;
}
+div.section {
+ clear:right;
+}
div.note, div.warning, p.deprecated, div.topic, div.admonition {
background-color:#EEFFEF;
}
+.footnote {
+ font-size: .95em;
+}
+
+div.faq {
+ background-color: #EFEFEF;
+}
+
+div.faq ul {
+ list-style: square outside none;
+}
div.admonition, div.topic, .deprecated, .versionadded, .versionchanged {
border:1px solid #CCCCCC;
@@ -332,6 +349,21 @@ div.admonition, div.topic, .deprecated, .versionadded, .versionchanged {
box-shadow: 2px 2px 3px #DFDFDF;
}
+
+div.sidebar {
+ background-color: #FFFFEE;
+ border: 1px solid #DDDDBB;
+ float: right;
+ margin: 10px 0 10px 1em;
+ padding: 7px 7px 0;
+ width: 40%;
+ font-size:.9em;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+
/* grrr sphinx changing your document structures, removing classes.... */
.versionadded .versionmodified,
@@ -339,7 +371,7 @@ div.admonition, div.topic, .deprecated, .versionadded, .versionchanged {
.deprecated .versionmodified,
.versionadded > p:first-child > span:first-child,
.versionchanged > p:first-child > span:first-child,
-.deprecated > p:first-child > span:first-child
+.deprecated > p:first-child > span:first-child
{
background-color: #ECF0F3;
color: #990000;
diff --git a/doc/build/templates/genindex.mako b/doc/build/templates/genindex.mako
index 0a25b6724..9ea6795bc 100644
--- a/doc/build/templates/genindex.mako
+++ b/doc/build/templates/genindex.mako
@@ -1,4 +1,4 @@
-<%inherit file="${context['layout']}"/>
+<%inherit file="layout.mako"/>
<%block name="show_title" filter="util.striptags">
${_('Index')}
diff --git a/doc/build/templates/layout.mako b/doc/build/templates/layout.mako
index ba3e81c01..a879fa481 100644
--- a/doc/build/templates/layout.mako
+++ b/doc/build/templates/layout.mako
@@ -2,13 +2,19 @@
<%!
local_script_files = []
+
+ default_css_files = [
+ '_static/pygments.css',
+ '_static/docs.css',
+ ]
%>
+
<%doc>
Structural elements are all prefixed with "docs-"
- to prevent conflicts when the structure is integrated into the
+ to prevent conflicts when the structure is integrated into the
main site.
-
+
docs-container ->
docs-header ->
docs-search
@@ -31,16 +37,19 @@ withsidebar = bool(toc) and current_page_name != 'index'
<%block name="head_title">
% if current_page_name != 'index':
- ${capture(self.show_title) | util.striptags} &mdash;
+ ${capture(self.show_title) | util.striptags} &mdash;
% endif
${docstitle|h}
</%block>
<div id="docs-container">
+
<%block name="headers">
- <link rel="stylesheet" href="${pathto('_static/pygments.css', 1)}" type="text/css" />
- <link rel="stylesheet" href="${pathto('_static/docs.css', 1)}" type="text/css" />
+
+ ${parent.headers()}
+
+ <!-- begin layout.mako headers -->
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
@@ -50,9 +59,13 @@ withsidebar = bool(toc) and current_page_name != 'index'
FILE_SUFFIX: '${file_suffix}'
};
</script>
+
+ <!-- begin iterate through sphinx environment script_files -->
% for scriptfile in script_files + self.attr.local_script_files:
<script type="text/javascript" src="${pathto(scriptfile, 1)}"></script>
% endfor
+ <!-- end iterate through sphinx environment script_files -->
+
<script type="text/javascript" src="${pathto('_static/init.js', 1)}"></script>
% if hasdoc('about'):
<link rel="author" title="${_('About these documents')}" href="${pathto('about')}" />
@@ -72,6 +85,8 @@ withsidebar = bool(toc) and current_page_name != 'index'
% if prevtopic:
<link rel="prev" title="${prevtopic['title']|util.striptags}" href="${prevtopic['link']|h}" />
% endif
+ <!-- end layout.mako headers -->
+
</%block>
<div id="docs-header">
@@ -129,7 +144,7 @@ withsidebar = bool(toc) and current_page_name != 'index'
% endfor
% endif
% if current_page_name != 'index':
- » ${self.show_title()}
+ » ${self.show_title()}
% endif
<h2>
diff --git a/doc/build/templates/page.mako b/doc/build/templates/page.mako
index 61cf9a05e..e0f98cf64 100644
--- a/doc/build/templates/page.mako
+++ b/doc/build/templates/page.mako
@@ -1,2 +1,2 @@
-<%inherit file="${context['layout']}"/>
+<%inherit file="layout.mako"/>
${body| util.strip_toplevel_anchors} \ No newline at end of file
diff --git a/doc/build/templates/rtd_layout.mako b/doc/build/templates/rtd_layout.mako
deleted file mode 100644
index a3083bd7b..000000000
--- a/doc/build/templates/rtd_layout.mako
+++ /dev/null
@@ -1,164 +0,0 @@
-<%inherit file="/layout.mako"/>
-
-<%
- newscript = []
- # strip out script files that RTD wants to provide
- for script in script_files:
- for token in ("jquery.js", "underscore.js", "doctools.js"):
- if token in script:
- break
- else:
- newscript.append(script)
- script_files[:] = newscript
-%>
-
-<%block name="headers">
-<!-- RTD <head> -->
-<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.4/jquery.min.js"></script>
-<script type="text/javascript" src="${MEDIA_URL}javascript/underscore.js"></script>
-<script type="text/javascript" src="${MEDIA_URL}javascript/doctools.js"></script>
-<script type="text/javascript" src="${MEDIA_URL}javascript/searchtools.js"></script>
- <script type="text/javascript">
- var doc_version = "${current_version}";
- var doc_slug = "${slug}";
- var static_root = "${pathto('_static', 1)}"
- </script>
-<!-- end RTD <head> -->
- ${parent.headers()}
-</%block>
-
-${next.body()}
-
-<%block name="footer">
-${parent.footer()}
- <!-- End original user content -->
-## Keep this here, so that the RTD logo doesn't stomp on the bottom of the theme.
-<br>
-<br>
-<br>
-
-<style type="text/css">
- .badge {
- position: fixed;
- display: block;
- bottom: 5px;
- height: 40px;
- text-indent: -9999em;
- border-radius: 3px;
- -moz-border-radius: 3px;
- -webkit-border-radius: 3px;
- box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 1px 0 rgba(255, 255, 255, 0.2) inset;
- -moz-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 1px 0 rgba(255, 255, 255, 0.2) inset;
- -webkit-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 1px 0 rgba(255, 255, 255, 0.2) inset;
- }
- #version_menu {
- position: fixed;
- display: none;
- bottom: 11px;
- right: 166px;
- list-style-type: none;
- margin: 0;
- }
- .footer_popout:hover #version_menu {
- display: block;
- }
- #version_menu li {
- display: block;
- float: right;
- }
- #version_menu li a {
- display: block;
- padding: 6px 10px 4px 10px;
- margin: 7px 7px 0 0;
- font-weight: bold;
- font-size: 14px;
- height: 20px;
- line-height: 17px;
- text-decoration: none;
- color: #fff;
- background: #8ca1af url(http://media.readthedocs.org/images/gradient-light.png) bottom left repeat-x;
- border-radius: 3px;
- -moz-border-radius: 3px;
- -webkit-border-radius: 3px;
- box-shadow: 0 1px 1px #465158;
- -moz-box-shadow: 0 1px 1px #465158;
- -webkit-box-shadow: 0 1px 1px #465158;
- text-shadow: 0 1px 1px rgba(0, 0, 0, 0.5);
- }
- #version_menu li a:hover {
- text-decoration: none;
- background-color: #697983;
- box-shadow: 0 1px 0px #465158;
- -moz-box-shadow: 0 1px 0px #465158;
- -webkit-box-shadow: 0 1px 0px #465158;
- }
- .badge.rtd {
- background: #257597 url(http://media.readthedocs.org/images/badge-rtd.png) top left no-repeat;
- border: 1px solid #282E32;
- width: 160px;
- right: 5px;
- }
- .badge.revsys { background: #465158 url(http://media.readthedocs.org/images/badge-revsys.png) top left no-repeat;
- border: 1px solid #1C5871;
- width: 290px;
- right: 173px;
- }
- .badge.revsys-inline-sponsored {
- position: inherit;
- margin-left: auto;
- margin-right: 175px;
- margin-bottom: 5px;
- background: #465158 url(http://media.readthedocs.org/images/badge-revsys.png) top left no-repeat;
- border: 1px solid #1C5871;
- width: 290px;
- right: 173px;
- }
- .badge.revsys-inline {
- position: inherit;
- margin-left: auto;
- margin-right: 175px;
- margin-bottom: 5px;
- background: #465158 url(http://media.readthedocs.org/images/badge-revsys-sm.png) top left no-repeat;
- border: 1px solid #1C5871;
- width: 205px;
- right: 173px;
- }
-
-</style>
-<div class="rtd_doc_footer">
- <div class="footer_popout">
- <a href="http://readthedocs.org/projects/${slug}/?fromdocs=${slug}" class="badge rtd">Brought to you by Read the Docs</a>
- <ul id="version_menu">
- ## rtd fills this in client side
- </ul>
- </div>
-</div>
-<!-- RTD Analytics Code -->
-<script type="text/javascript">
- var _gaq = _gaq || [];
- _gaq.push(['_setAccount', 'UA-17997319-1']);
- _gaq.push(['_trackPageview']);
-
- (function() {
- var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
- ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
- var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
- })();
-</script>
-
-% if analytics_code:
-<!-- User Analytics Code -->
-<script type="text/javascript">
- var _gaq = _gaq || [];
- _gaq.push(['_setAccount', '${analytics_code}']);
- _gaq.push(['_trackPageview']);
-
- (function() {
- var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
- ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
- var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
- })();
-</script>
-% endif
-
-</%block>
diff --git a/doc/build/templates/search.mako b/doc/build/templates/search.mako
index 68e3fb991..83a3fcd84 100644
--- a/doc/build/templates/search.mako
+++ b/doc/build/templates/search.mako
@@ -1,4 +1,4 @@
-<%inherit file="${context['layout']}"/>
+<%inherit file="layout.mako"/>
<%!
local_script_files = ['_static/searchtools.js']
diff --git a/doc/build/templates/static_base.mako b/doc/build/templates/static_base.mako
index 40bf1c68f..9eb5ec046 100644
--- a/doc/build/templates/static_base.mako
+++ b/doc/build/templates/static_base.mako
@@ -9,6 +9,15 @@
<%block name="head_title">
</%block>
</title>
+
+ <%block name="css">
+ <!-- begin iterate through SQLA + sphinx environment css_files -->
+ % for cssfile in self.attr.default_css_files + css_files:
+ <link rel="stylesheet" href="${pathto(cssfile, 1)}" type="text/css" />
+ % endfor
+ <!-- end iterate through SQLA + sphinx environment css_files -->
+ </%block>
+
<%block name="headers"/>
</head>
<body>
diff --git a/doc/build/testdocs.py b/doc/build/testdocs.py
index 9d84808e5..815aa8669 100644
--- a/doc/build/testdocs.py
+++ b/doc/build/testdocs.py
@@ -60,8 +60,7 @@ def replace_file(s, newfile):
raise ValueError("Couldn't find suitable create_engine call to replace '%s' in it" % oldfile)
return s
-#for filename in 'orm/tutorial','core/tutorial',:
-for filename in 'core/tutorial',:
+for filename in 'orm/tutorial','core/tutorial',:
filename = '%s.rst' % filename
s = open(filename).read()
#s = replace_file(s, ':memory:')
diff --git a/examples/adjacency_list/__init__.py b/examples/adjacency_list/__init__.py
index 44f27090b..5d80363e4 100644
--- a/examples/adjacency_list/__init__.py
+++ b/examples/adjacency_list/__init__.py
@@ -12,5 +12,7 @@ E.g.::
dump_tree(node)
+.. autosource::
+
"""
diff --git a/examples/adjacency_list/adjacency_list.py b/examples/adjacency_list/adjacency_list.py
index a0683ea0c..9e62bc0be 100644
--- a/examples/adjacency_list/adjacency_list.py
+++ b/examples/adjacency_list/adjacency_list.py
@@ -1,7 +1,5 @@
-from sqlalchemy import MetaData, Table, Column, Sequence, ForeignKey,\
- Integer, String, create_engine
-
-from sqlalchemy.orm import sessionmaker, relationship, backref,\
+from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
+from sqlalchemy.orm import Session, relationship, backref,\
joinedload_all
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm.collections import attribute_mapped_collection
@@ -18,7 +16,7 @@ class TreeNode(Base):
children = relationship("TreeNode",
# cascade deletions
- cascade="all",
+ cascade="all, delete-orphan",
# many to one + adjacency list - remote_side
# is required to reference the 'remote'
@@ -42,11 +40,10 @@ class TreeNode(Base):
)
def dump(self, _indent=0):
-
return " " * _indent + repr(self) + \
"\n" + \
"".join([
- c.dump(_indent +1)
+ c.dump(_indent + 1)
for c in self.children.values()]
)
@@ -63,10 +60,7 @@ if __name__ == '__main__':
Base.metadata.create_all(engine)
- # session. using expire_on_commit=False
- # so that the session's contents are not expired
- # after each transaction commit.
- session = sessionmaker(engine, expire_on_commit=False)()
+ session = Session(engine)
node = TreeNode('rootnode')
TreeNode('node1', parent=node)
@@ -91,16 +85,13 @@ if __name__ == '__main__':
TreeNode('subnode4', parent=node.children['node4'])
TreeNode('subsubnode1', parent=node.children['node4'].children['subnode3'])
- # mark node1 as deleted and remove
- session.delete(node.children['node1'])
+ # remove node1 from the parent, which will trigger a delete
+ # via the delete-orphan cascade.
+ del node.children['node1']
msg("Removed node1. flush + commit:")
session.commit()
- # expire the "children" collection so that
- # it reflects the deletion of "node1".
- session.expire(node, ['children'])
-
msg("Tree after save:\n %s", node.dump())
msg("Emptying out the session entirely, "
@@ -109,12 +100,12 @@ if __name__ == '__main__':
node = session.query(TreeNode).\
options(joinedload_all("children", "children",
"children", "children")).\
- filter(TreeNode.name=="rootnode").\
+ filter(TreeNode.name == "rootnode").\
first()
msg("Full Tree:\n%s", node.dump())
- msg( "Marking root node as deleted, flush + commit:" )
+ msg("Marking root node as deleted, flush + commit:")
session.delete(node)
session.commit()
diff --git a/examples/association/__init__.py b/examples/association/__init__.py
index df736f4fb..4cd64c22f 100644
--- a/examples/association/__init__.py
+++ b/examples/association/__init__.py
@@ -1,20 +1,8 @@
"""
-
Examples illustrating the usage of the "association object" pattern,
where an intermediary class mediates the relationship between two
classes that are associated in a many-to-many pattern.
-This directory includes the following examples:
-
-* basic_association.py - illustrate a many-to-many relationship between an
- "Order" and a collection of "Item" objects, associating a purchase price
- with each via an association object called "OrderItem"
-* proxied_association.py - same example as basic_association, adding in
- usage of :mod:`sqlalchemy.ext.associationproxy` to make explicit references
- to "OrderItem" optional.
-* dict_of_sets_with_default.py - an advanced association proxy example which
- illustrates nesting of association proxies to produce multi-level Python
- collections, in this case a dictionary with string keys and sets of integers
- as values, which conceal the underlying mapped classes.
+.. autosource::
""" \ No newline at end of file
diff --git a/examples/association/basic_association.py b/examples/association/basic_association.py
index a175b1b89..8a8a54ad1 100644
--- a/examples/association/basic_association.py
+++ b/examples/association/basic_association.py
@@ -1,4 +1,8 @@
-"""A basic example of using the association object pattern.
+"""basic_association.py
+
+illustrate a many-to-many relationship between an
+"Order" and a collection of "Item" objects, associating a purchase price
+with each via an association object called "OrderItem"
The association object pattern is a form of many-to-many which
associates additional data with each association between parent/child.
diff --git a/examples/association/dict_of_sets_with_default.py b/examples/association/dict_of_sets_with_default.py
index f541727e7..fc4aebfca 100644
--- a/examples/association/dict_of_sets_with_default.py
+++ b/examples/association/dict_of_sets_with_default.py
@@ -1,4 +1,9 @@
-"""Illustrate a 'dict of sets of integers' model.
+"""dict_of_sets_with_default.py
+
+an advanced association proxy example which
+illustrates nesting of association proxies to produce multi-level Python
+collections, in this case a dictionary with string keys and sets of integers
+as values, which conceal the underlying mapped classes.
This is a three table model which represents a parent table referencing a
dictionary of string keys and sets as values, where each set stores a
diff --git a/examples/association/proxied_association.py b/examples/association/proxied_association.py
index 4cf1c51be..7cb4c9338 100644
--- a/examples/association/proxied_association.py
+++ b/examples/association/proxied_association.py
@@ -1,5 +1,9 @@
-"""An extension to the basic_association.py example, which illustrates
-the usage of sqlalchemy.ext.associationproxy.
+"""proxied_association.py
+
+same example as basic_association, adding in
+usage of :mod:`sqlalchemy.ext.associationproxy` to make explicit references
+to ``OrderItem`` optional.
+
"""
diff --git a/examples/custom_attributes/__init__.py b/examples/custom_attributes/__init__.py
index b28e97d95..2072c051f 100644
--- a/examples/custom_attributes/__init__.py
+++ b/examples/custom_attributes/__init__.py
@@ -2,18 +2,6 @@
Two examples illustrating modifications to SQLAlchemy's attribute management
system.
-``listen_for_events.py`` illustrates the usage of
-:class:`~sqlalchemy.orm.interfaces.AttributeExtension` to intercept attribute
-events. It additionally illustrates a way to automatically attach these
-listeners to all class attributes using a
-:class:`.InstrumentationManager`.
-
-``custom_management.py`` illustrates much deeper usage of
-:class:`.InstrumentationManager` as well as
-collection adaptation, to completely change the underlying method used to
-store state on an object. This example was developed to illustrate
-techniques which would be used by other third party object instrumentation
-systems to interact with SQLAlchemy's event system and is only intended for
-very intricate framework integrations.
+.. autosource::
""" \ No newline at end of file
diff --git a/examples/dogpile_caching/__init__.py b/examples/dogpile_caching/__init__.py
index 00c386bda..bf67eeb17 100644
--- a/examples/dogpile_caching/__init__.py
+++ b/examples/dogpile_caching/__init__.py
@@ -40,45 +40,20 @@ exactly one SQL statement against two tables will be emitted - the
displayed result however will utilize dozens of lazyloads that all
pull from cache.
-The demo scripts themselves, in order of complexity, are run as follows::
+The demo scripts themselves, in order of complexity, are run as Python
+modules so that relative imports work::
- python examples/dogpile_caching/helloworld.py
+ python -m examples.dogpile_caching.helloworld
- python examples/dogpile_caching/relationship_caching.py
+ python -m examples.dogpile_caching.relationship_caching
- python examples/dogpile_caching/advanced.py
+ python -m examples.dogpile_caching.advanced
- python examples/dogpile_caching/local_session_caching.py
+ python -m examples.dogpile_caching.local_session_caching
-
-Listing of files:
-
- environment.py - Establish the Session, a dictionary
- of "regions", a sample cache region against a .dbm
- file, data / cache file paths, and configurations,
- bootstrap fixture data if necessary.
-
- caching_query.py - Represent functions and classes
- which allow the usage of Dogpile caching with SQLAlchemy.
- Introduces a query option called FromCache.
-
- model.py - The datamodel, which represents Person that has multiple
- Address objects, each with PostalCode, City, Country
-
- fixture_data.py - creates demo PostalCode, Address, Person objects
- in the database.
-
- helloworld.py - the basic idea.
-
- relationship_caching.py - Illustrates how to add cache options on
- relationship endpoints, so that lazyloads load from cache.
-
- advanced.py - Further examples of how to use FromCache. Combines
- techniques from the first two scripts.
-
- local_session_caching.py - Grok everything so far ? This example
- creates a new dogpile.cache backend that will persist data in a dictionary
- which is local to the current session. remove() the session
- and the cache is gone.
+.. autosource::
+ :files: environment.py, caching_query.py, model.py, fixture_data.py, \
+ helloworld.py, relationship_caching.py, advanced.py, \
+ local_session_caching.py
"""
diff --git a/examples/dogpile_caching/advanced.py b/examples/dogpile_caching/advanced.py
index f1a18a4d7..feccaa3ba 100644
--- a/examples/dogpile_caching/advanced.py
+++ b/examples/dogpile_caching/advanced.py
@@ -1,15 +1,13 @@
"""advanced.py
Illustrate usage of Query combined with the FromCache option,
-including front-end loading, cache invalidation, namespace techniques
-and collection caching.
+including front-end loading, cache invalidation and collection caching.
"""
from .environment import Session
-from .model import Person, Address, cache_address_bits
+from .model import Person, cache_address_bits
from .caching_query import FromCache, RelationshipCache
-from sqlalchemy.orm import joinedload
def load_name_range(start, end, invalidate=False):
"""Load Person objects on a range of names.
@@ -23,7 +21,7 @@ def load_name_range(start, end, invalidate=False):
The `Person.addresses` collections are also cached. Its basically
another level of tuning here, as that particular cache option
can be transparently replaced with joinedload(Person.addresses).
- The effect is that each Person and his/her Address collection
+ The effect is that each Person and their Address collection
is cached either together or separately, affecting the kind of
SQL that emits for unloaded Person objects as well as the distribution
of data within the cache.
diff --git a/examples/dogpile_caching/caching_query.py b/examples/dogpile_caching/caching_query.py
index 7fe84bede..9ac0d431a 100644
--- a/examples/dogpile_caching/caching_query.py
+++ b/examples/dogpile_caching/caching_query.py
@@ -1,7 +1,8 @@
"""caching_query.py
-Represent persistence structures which allow the usage of
-dogpile.cache caching with SQLAlchemy.
+Represent functions and classes
+which allow the usage of Dogpile caching with SQLAlchemy.
+Introduces a query option called FromCache.
The three new concepts introduced here are:
diff --git a/examples/dogpile_caching/environment.py b/examples/dogpile_caching/environment.py
index 36b9585b2..aeba65e19 100644
--- a/examples/dogpile_caching/environment.py
+++ b/examples/dogpile_caching/environment.py
@@ -10,7 +10,12 @@ from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from dogpile.cache.region import make_region
import os
-import md5
+from hashlib import md5
+import sys
+py2k = sys.version_info < (3, 0)
+
+if py2k:
+ input = raw_input
# dogpile cache regions. A home base for cache configurations.
regions = {}
@@ -47,7 +52,7 @@ def md5_key_mangler(key):
distill them into an md5 hash.
"""
- return md5.md5(key).hexdigest()
+ return md5(key.encode('ascii')).hexdigest()
# configure the "default" cache region.
regions['default'] = make_region(
diff --git a/examples/dogpile_caching/helloworld.py b/examples/dogpile_caching/helloworld.py
index 4561097b6..22d7f97be 100644
--- a/examples/dogpile_caching/helloworld.py
+++ b/examples/dogpile_caching/helloworld.py
@@ -8,27 +8,27 @@ from .environment import Session
from .model import Person
from .caching_query import FromCache
-# load Person objects. cache the result under the namespace "all_people".
+# load Person objects. cache the result in the "default" cache region
print("loading people....")
people = Session.query(Person).options(FromCache("default")).all()
# remove the Session. next query starts from scratch.
Session.remove()
-# load again, using the same FromCache option. now they're cached
-# under "all_people", no SQL is emitted.
+# load again, using the same FromCache option. now they're cached,
+# so no SQL is emitted.
print("loading people....again!")
people = Session.query(Person).options(FromCache("default")).all()
-# want to load on some different kind of query ? change the namespace
-# you send to FromCache
+# Specifying a different query produces a different cache key, so
+# these results are independently cached.
print("loading people two through twelve")
people_two_through_twelve = Session.query(Person).\
options(FromCache("default")).\
filter(Person.name.between("person 02", "person 12")).\
all()
-# the data is cached under the "namespace" you send to FromCache, *plus*
+# the data is cached under string structure of the SQL statement, *plus*
# the bind parameters of the query. So this query, having
# different literal parameters under "Person.name.between()" than the
# previous one, issues new SQL...
diff --git a/examples/dogpile_caching/local_session_caching.py b/examples/dogpile_caching/local_session_caching.py
index cf0083d2e..e6c712b4a 100644
--- a/examples/dogpile_caching/local_session_caching.py
+++ b/examples/dogpile_caching/local_session_caching.py
@@ -1,5 +1,10 @@
"""local_session_caching.py
+Grok everything so far ? This example
+creates a new dogpile.cache backend that will persist data in a dictionary
+which is local to the current session. remove() the session
+and the cache is gone.
+
Create a new Dogpile cache backend that will store
cached data local to the current Session.
diff --git a/examples/dogpile_caching/model.py b/examples/dogpile_caching/model.py
index 622d31e6a..75c0ad28a 100644
--- a/examples/dogpile_caching/model.py
+++ b/examples/dogpile_caching/model.py
@@ -1,6 +1,7 @@
-"""Model. We are modeling Person objects with a collection
-of Address objects. Each Address has a PostalCode, which
-in turn references a City and then a Country:
+"""model.py
+
+The datamodel, which represents Person that has multiple
+Address objects, each with PostalCode, City, Country.
Person --(1..n)--> Address
Address --(has a)--> PostalCode
diff --git a/examples/dogpile_caching/relation_caching.py b/examples/dogpile_caching/relationship_caching.py
index d40752e48..320ced48a 100644
--- a/examples/dogpile_caching/relation_caching.py
+++ b/examples/dogpile_caching/relationship_caching.py
@@ -1,5 +1,8 @@
"""relationship_caching.py
+Illustrates how to add cache options on
+relationship endpoints, so that lazyloads load from cache.
+
Load a set of Person and Address objects, specifying that
related PostalCode, City, Country objects should be pulled from long
term cache.
diff --git a/examples/dynamic_dict/__init__.py b/examples/dynamic_dict/__init__.py
index 7f7b0691d..e592ea200 100644
--- a/examples/dynamic_dict/__init__.py
+++ b/examples/dynamic_dict/__init__.py
@@ -3,4 +3,6 @@
string keys) can operate upon a large collection without loading the
full collection at once.
+.. autosource::
+
""" \ No newline at end of file
diff --git a/examples/elementtree/__init__.py b/examples/elementtree/__init__.py
index 6462dd562..66e9cfbbe 100644
--- a/examples/elementtree/__init__.py
+++ b/examples/elementtree/__init__.py
@@ -7,26 +7,6 @@ native cElementTree as well as lxml, and can be adapted to
suit any kind of DOM representation system. Querying along
xpath-like strings is illustrated as well.
-In order of complexity:
-
-* ``pickle.py`` - Quick and dirty, serialize the whole DOM into a BLOB
- column. While the example is very brief, it has very limited
- functionality.
-
-* ``adjacency_list.py`` - Each DOM node is stored in an individual
- table row, with attributes represented in a separate table. The
- nodes are associated in a hierarchy using an adjacency list
- structure. A query function is introduced which can search for nodes
- along any path with a given structure of attributes, basically a
- (very narrow) subset of xpath.
-
-* ``optimized_al.py`` - Uses the same strategy as
- ``adjacency_list.py``, but associates each DOM row with its owning
- document row, so that a full document of DOM nodes can be loaded
- using O(1) queries - the construction of the "hierarchy" is performed
- after the load in a non-recursive fashion and is much more
- efficient.
-
E.g.::
# parse an XML file and persist in the database
@@ -39,4 +19,7 @@ E.g.::
# dump the XML
print document
+.. autosource::
+ :files: pickle.py, adjacency_list.py, optimized_al.py
+
""" \ No newline at end of file
diff --git a/examples/elementtree/adjacency_list.py b/examples/elementtree/adjacency_list.py
index a3ad42778..5e27ba9ca 100644
--- a/examples/elementtree/adjacency_list.py
+++ b/examples/elementtree/adjacency_list.py
@@ -1,9 +1,17 @@
-"""illustrates an explicit way to persist an XML document expressed using ElementTree.
+"""Illustrates an explicit way to persist an XML document expressed using ElementTree.
+
+Each DOM node is stored in an individual
+table row, with attributes represented in a separate table. The
+nodes are associated in a hierarchy using an adjacency list
+structure. A query function is introduced which can search for nodes
+along any path with a given structure of attributes, basically a
+(very narrow) subset of xpath.
This example explicitly marshals/unmarshals the ElementTree document into
mapped entities which have their own tables. Compare to pickle.py which
uses pickle to accomplish the same task. Note that the usage of both
styles of persistence are identical, as is the structure of the main Document class.
+
"""
################################# PART I - Imports/Coniguration ####################################
diff --git a/examples/elementtree/optimized_al.py b/examples/elementtree/optimized_al.py
index 1dbad0943..e13f5b0ee 100644
--- a/examples/elementtree/optimized_al.py
+++ b/examples/elementtree/optimized_al.py
@@ -1,7 +1,9 @@
-"""This script duplicates adjacency_list.py, but optimizes the loading
-of XML nodes to be based on a "flattened" datamodel. Any number of XML documents,
-each of arbitrary complexity, can be loaded in their entirety via a single query
-which joins on only three tables.
+"""Uses the same strategy as
+ ``adjacency_list.py``, but associates each DOM row with its owning
+ document row, so that a full document of DOM nodes can be loaded
+ using O(1) queries - the construction of the "hierarchy" is performed
+ after the load in a non-recursive fashion and is more
+ efficient.
"""
diff --git a/examples/generic_associations/__init__.py b/examples/generic_associations/__init__.py
index b6cb24088..b6593b4f4 100644
--- a/examples/generic_associations/__init__.py
+++ b/examples/generic_associations/__init__.py
@@ -9,17 +9,10 @@ subclassing the ``HasAddresses`` mixin, which ensures that the
parent class is provided with an ``addresses`` collection
which contains ``Address`` objects.
-The configurations include:
+The :viewsource:`.discriminator_on_association` and :viewsource:`.generic_fk` scripts
+are modernized versions of recipes presented in the 2007 blog post
+`Polymorphic Associations with SQLAlchemy <http://techspot.zzzeek.org/2007/05/29/polymorphic-associations-with-sqlalchemy/>`_.
-* ``table_per_related.py`` - illustrates a distinct table per related collection.
-* ``table_per_association.py`` - illustrates a shared collection table, using a
- table per association.
-* ``discriminator_on_association.py`` - shared collection table and shared
- association table, including a discriminator column.
-
-The ``discriminator_on_association.py`` script in particular is a modernized
-version of the "polymorphic associations" example present in older versions of
-SQLAlchemy, originally from the blog post at
-http://techspot.zzzeek.org/2007/05/29/polymorphic-associations-with-sqlalchemy/.
+.. autosource::
""" \ No newline at end of file
diff --git a/examples/generic_associations/discriminator_on_association.py b/examples/generic_associations/discriminator_on_association.py
index 7b4565a85..e03cfec00 100644
--- a/examples/generic_associations/discriminator_on_association.py
+++ b/examples/generic_associations/discriminator_on_association.py
@@ -1,28 +1,29 @@
"""discriminator_on_related.py
-The HasAddresses mixin will provide a relationship
-to the fixed Address table based on a fixed association table.
-
-The association table will also contain a "discriminator"
-which determines what type of parent object associates to the
-Address row.
-
-This is a "polymorphic association". Even though a "discriminator"
-that refers to a particular table is present, the extra association
-table is used so that traditional foreign key constraints may be used.
-
-This configuration has the advantage that a fixed set of tables
-are used, with no extra-table-per-parent needed. The individual
-Address record can also locate its parent with no need to scan
-amongst many tables.
+Illustrates a mixin which provides a generic association
+using a single target table and a single association table,
+referred to by all parent tables. The association table
+contains a "discriminator" column which determines what type of
+parent object associates to each particular row in the association
+table.
+
+SQLAlchemy's single-table-inheritance feature is used
+to target different association types.
+
+This configuration attempts to simulate a so-called "generic foreign key"
+as closely as possible without actually foregoing the use of real
+foreign keys. Unlike table-per-related and table-per-association,
+it uses a fixed number of tables to serve any number of potential parent
+objects, but is also slightly more complex.
"""
-from sqlalchemy.ext.declarative import declarative_base, declared_attr
+from sqlalchemy.ext.declarative import as_declarative, declared_attr
from sqlalchemy import create_engine, Integer, Column, \
- String, ForeignKey, Table
+ String, ForeignKey
from sqlalchemy.orm import Session, relationship, backref
from sqlalchemy.ext.associationproxy import association_proxy
+@as_declarative()
class Base(object):
"""Base class which provides automated table name
and surrogate primary key column.
@@ -32,7 +33,6 @@ class Base(object):
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
-Base = declarative_base(cls=Base)
class AddressAssociation(Base):
"""Associates a collection of Address objects
@@ -41,22 +41,10 @@ class AddressAssociation(Base):
"""
__tablename__ = "address_association"
- @classmethod
- def creator(cls, discriminator):
- """Provide a 'creator' function to use with
- the association proxy."""
-
- return lambda addresses:AddressAssociation(
- addresses=addresses,
- discriminator=discriminator)
-
discriminator = Column(String)
"""Refers to the type of parent."""
- @property
- def parent(self):
- """Return the parent object."""
- return getattr(self, "%s_parent" % self.discriminator)
+ __mapper_args__ = {"polymorphic_on": discriminator}
class Address(Base):
"""The Address class.
@@ -65,15 +53,11 @@ class Address(Base):
single table.
"""
- association_id = Column(Integer,
- ForeignKey("address_association.id")
- )
+ association_id = Column(Integer, ForeignKey("address_association.id"))
street = Column(String)
city = Column(String)
zip = Column(String)
- association = relationship(
- "AddressAssociation",
- backref="addresses")
+ association = relationship("AddressAssociation", backref="addresses")
parent = association_proxy("association", "parent")
@@ -89,19 +73,29 @@ class HasAddresses(object):
"""
@declared_attr
def address_association_id(cls):
- return Column(Integer,
- ForeignKey("address_association.id"))
+ return Column(Integer, ForeignKey("address_association.id"))
@declared_attr
def address_association(cls):
- discriminator = cls.__name__.lower()
- cls.addresses= association_proxy(
+ name = cls.__name__
+ discriminator = name.lower()
+
+ assoc_cls = type(
+ "%sAddressAssociation" % name,
+ (AddressAssociation, ),
+ dict(
+ __mapper_args__={
+ "polymorphic_identity": discriminator
+ }
+ )
+ )
+
+ cls.addresses = association_proxy(
"address_association", "addresses",
- creator=AddressAssociation.creator(discriminator)
+ creator=lambda addresses: assoc_cls(addresses=addresses)
)
- return relationship("AddressAssociation",
- backref=backref("%s_parent" % discriminator,
- uselist=False))
+ return relationship(assoc_cls,
+ backref=backref("parent", uselist=False))
class Customer(HasAddresses, Base):
diff --git a/examples/generic_associations/generic_fk.py b/examples/generic_associations/generic_fk.py
new file mode 100644
index 000000000..e228c6ba4
--- /dev/null
+++ b/examples/generic_associations/generic_fk.py
@@ -0,0 +1,140 @@
+"""generic_fk.py
+
+Illustrates a so-called "generic foreign key", in a similar fashion
+to that of popular frameworks such as Django, ROR, etc. This
+approach bypasses standard referential integrity
+practices, in that the "foreign key" column is not actually
+constrained to refer to any particular table; instead,
+in-application logic is used to determine which table is referenced.
+
+This approach is not in line with SQLAlchemy's usual style, as foregoing
+foreign key integrity means that the tables can easily contain invalid
+references and also have no ability to use in-database cascade functionality.
+
+However, due to the popularity of these systems, as well as that it uses
+the fewest number of tables (which doesn't really offer any "advantage",
+though seems to be comforting to many) this recipe remains in
+high demand, so in the interests of having an easy StackOverflow answer
+queued up, here it is. The author recommends "table_per_related"
+or "table_per_association" instead of this approach.
+
+.. versionadded:: 0.8.3
+
+"""
+from sqlalchemy.ext.declarative import as_declarative, declared_attr
+from sqlalchemy import create_engine, Integer, Column, \
+ String, and_
+from sqlalchemy.orm import Session, relationship, foreign, remote, backref
+from sqlalchemy import event
+
+
+@as_declarative()
+class Base(object):
+ """Base class which provides automated table name
+ and surrogate primary key column.
+
+ """
+ @declared_attr
+ def __tablename__(cls):
+ return cls.__name__.lower()
+ id = Column(Integer, primary_key=True)
+
+class Address(Base):
+ """The Address class.
+
+ This represents all address records in a
+ single table.
+
+ """
+ street = Column(String)
+ city = Column(String)
+ zip = Column(String)
+
+ discriminator = Column(String)
+ """Refers to the type of parent."""
+
+ parent_id = Column(Integer)
+ """Refers to the primary key of the parent.
+
+ This could refer to any table.
+ """
+
+ @property
+ def parent(self):
+ """Provides in-Python access to the "parent" by choosing
+ the appropriate relationship.
+
+ """
+ return getattr(self, "parent_%s" % self.discriminator)
+
+ def __repr__(self):
+ return "%s(street=%r, city=%r, zip=%r)" % \
+ (self.__class__.__name__, self.street,
+ self.city, self.zip)
+
+class HasAddresses(object):
+ """HasAddresses mixin, creates a relationship to
+ the address_association table for each parent.
+
+ """
+
+@event.listens_for(HasAddresses, "mapper_configured", propagate=True)
+def setup_listener(mapper, class_):
+ name = class_.__name__
+ discriminator = name.lower()
+ class_.addresses = relationship(Address,
+ primaryjoin=and_(
+ class_.id == foreign(remote(Address.parent_id)),
+ Address.discriminator == discriminator
+ ),
+ backref=backref(
+ "parent_%s" % discriminator,
+ primaryjoin=remote(class_.id) == foreign(Address.parent_id)
+ )
+ )
+ @event.listens_for(class_.addresses, "append")
+ def append_address(target, value, initiator):
+ value.discriminator = discriminator
+
+class Customer(HasAddresses, Base):
+ name = Column(String)
+
+class Supplier(HasAddresses, Base):
+ company_name = Column(String)
+
+engine = create_engine('sqlite://', echo=True)
+Base.metadata.create_all(engine)
+
+session = Session(engine)
+
+session.add_all([
+ Customer(
+ name='customer 1',
+ addresses=[
+ Address(
+ street='123 anywhere street',
+ city="New York",
+ zip="10110"),
+ Address(
+ street='40 main street',
+ city="San Francisco",
+ zip="95732")
+ ]
+ ),
+ Supplier(
+ company_name="Ace Hammers",
+ addresses=[
+ Address(
+ street='2569 west elm',
+ city="Detroit",
+ zip="56785")
+ ]
+ ),
+])
+
+session.commit()
+
+for customer in session.query(Customer):
+ for address in customer.addresses:
+ print(address)
+ print(address.parent) \ No newline at end of file
diff --git a/examples/generic_associations/table_per_association.py b/examples/generic_associations/table_per_association.py
index 84e85de2f..4993492a4 100644
--- a/examples/generic_associations/table_per_association.py
+++ b/examples/generic_associations/table_per_association.py
@@ -1,8 +1,9 @@
"""table_per_association.py
-The HasAddresses mixin will provide a new "address_association" table for
-each parent class. The "address" table will be shared
-for all parents.
+Illustrates a mixin which provides a generic association
+via a individually generated association tables for each parent class.
+The associated objects themselves are persisted in a single table
+shared among all parents.
This configuration has the advantage that all Address
rows are in one table, so that the definition of "Address"
@@ -12,11 +13,12 @@ has no dependency on the system.
"""
-from sqlalchemy.ext.declarative import declarative_base, declared_attr
+from sqlalchemy.ext.declarative import as_declarative, declared_attr
from sqlalchemy import create_engine, Integer, Column, \
String, ForeignKey, Table
from sqlalchemy.orm import Session, relationship
+@as_declarative()
class Base(object):
"""Base class which provides automated table name
and surrogate primary key column.
@@ -26,7 +28,6 @@ class Base(object):
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
-Base = declarative_base(cls=Base)
class Address(Base):
"""The Address class.
diff --git a/examples/generic_associations/table_per_related.py b/examples/generic_associations/table_per_related.py
index 0ec5f29b0..aff6e40ce 100644
--- a/examples/generic_associations/table_per_related.py
+++ b/examples/generic_associations/table_per_related.py
@@ -1,7 +1,8 @@
"""table_per_related.py
-The HasAddresses mixin will provide a new "address" table for
-each parent class, as well as a distinct "Address" subclass.
+Illustrates a generic association which persists association
+objects within individual tables, each one generated to persist
+those objects on behalf of a particular parent class.
This configuration has the advantage that each type of parent
maintains its "Address" rows separately, so that collection
@@ -9,11 +10,19 @@ size for one type of parent will have no impact on other types
of parent. Navigation between parent and "Address" is simple,
direct, and bidirectional.
+This recipe is the most efficient (speed wise and storage wise)
+and simple of all of them.
+
+The creation of many related tables may seem at first like an issue
+but there really isn't any - the management and targeting of these tables
+is completely automated.
+
"""
-from sqlalchemy.ext.declarative import declarative_base, declared_attr
+from sqlalchemy.ext.declarative import as_declarative, declared_attr
from sqlalchemy import create_engine, Integer, Column, String, ForeignKey
from sqlalchemy.orm import Session, relationship
+@as_declarative()
class Base(object):
"""Base class which provides automated table name
and surrogate primary key column.
@@ -23,7 +32,6 @@ class Base(object):
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
-Base = declarative_base(cls=Base)
class Address(object):
"""Define columns that will be present in each
@@ -54,11 +62,11 @@ class HasAddresses(object):
"%sAddress" % cls.__name__,
(Address, Base,),
dict(
- __tablename__ = "%s_address" %
+ __tablename__="%s_address" %
cls.__tablename__,
- parent_id = Column(Integer,
- ForeignKey("%s.id" % cls.__tablename__)),
- parent = relationship(cls)
+ parent_id=Column(Integer,
+ ForeignKey("%s.id" % cls.__tablename__)),
+ parent=relationship(cls)
)
)
return relationship(cls.Address)
diff --git a/examples/graphs/__init__.py b/examples/graphs/__init__.py
index 629808abe..57d41453b 100644
--- a/examples/graphs/__init__.py
+++ b/examples/graphs/__init__.py
@@ -8,4 +8,6 @@ and querying for lower- and upper- neighbors are illustrated::
n2.add_neighbor(n5)
print n2.higher_neighbors()
+.. autosource::
+
""" \ No newline at end of file
diff --git a/examples/inheritance/__init__.py b/examples/inheritance/__init__.py
index 09519a679..eb3e843ca 100644
--- a/examples/inheritance/__init__.py
+++ b/examples/inheritance/__init__.py
@@ -1,4 +1,6 @@
"""Working examples of single-table, joined-table, and concrete-table
inheritance as described in :ref:`datamapping_inheritance`.
+.. autosource::
+
""" \ No newline at end of file
diff --git a/examples/inheritance/concrete.py b/examples/inheritance/concrete.py
index b05afa5ea..f9bdc81b4 100644
--- a/examples/inheritance/concrete.py
+++ b/examples/inheritance/concrete.py
@@ -1,3 +1,5 @@
+"""Concrete (table-per-class) inheritance example."""
+
from sqlalchemy import create_engine, MetaData, Table, Column, Integer, \
String
from sqlalchemy.orm import mapper, sessionmaker, polymorphic_union
diff --git a/examples/inheritance/joined.py b/examples/inheritance/joined.py
index c6ce37146..6e0205e04 100644
--- a/examples/inheritance/joined.py
+++ b/examples/inheritance/joined.py
@@ -1,4 +1,4 @@
-"""this example illustrates a polymorphic load of two classes"""
+"""Joined-table (table-per-subclass) inheritance example."""
from sqlalchemy import Table, Column, Integer, String, \
ForeignKey, create_engine, inspect, or_
@@ -133,3 +133,4 @@ print(session.query(Company).\
session.commit()
+
diff --git a/examples/inheritance/single.py b/examples/inheritance/single.py
index b445f74a6..22a6fe027 100644
--- a/examples/inheritance/single.py
+++ b/examples/inheritance/single.py
@@ -1,3 +1,5 @@
+"""Single-table inheritance example."""
+
from sqlalchemy import MetaData, Table, Column, Integer, String, \
ForeignKey, create_engine
from sqlalchemy.orm import mapper, relationship, sessionmaker
diff --git a/examples/join_conditions/__init__.py b/examples/join_conditions/__init__.py
new file mode 100644
index 000000000..3a561d084
--- /dev/null
+++ b/examples/join_conditions/__init__.py
@@ -0,0 +1,7 @@
+"""Examples of various :func:`.orm.relationship` configurations,
+which make use of the ``primaryjoin`` argument to compose special types
+of join conditions.
+
+.. autosource::
+
+""" \ No newline at end of file
diff --git a/examples/join_conditions/cast.py b/examples/join_conditions/cast.py
new file mode 100644
index 000000000..246bc1d57
--- /dev/null
+++ b/examples/join_conditions/cast.py
@@ -0,0 +1,95 @@
+"""Illustrate a :func:`.relationship` that joins two columns where those
+columns are not of the same type, and a CAST must be used on the SQL
+side in order to match them.
+
+When complete, we'd like to see a load of the relationship to look like::
+
+ -- load the primary row, a_id is a string
+ SELECT a.id AS a_id_1, a.a_id AS a_a_id
+ FROM a
+ WHERE a.a_id = '2'
+
+ -- then load the collection using CAST, b.a_id is an integer
+ SELECT b.id AS b_id, b.a_id AS b_a_id
+ FROM b
+ WHERE CAST('2' AS INTEGER) = b.a_id
+
+The relationship is essentially configured as follows::
+
+ class B(Base):
+ # ...
+
+ a = relationship(A,
+ primaryjoin=cast(A.a_id, Integer) == foreign(B.a_id),
+ backref="bs")
+
+Where above, we are making use of the :func:`.cast` function in order
+to produce CAST, as well as the :func:`.foreign` :term:`annotation` function
+in order to note to the ORM that ``B.a_id`` should be treated like the
+"foreign key" column.
+
+"""
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.ext.declarative import declarative_base
+
+Base = declarative_base()
+
+class StringAsInt(TypeDecorator):
+ """Coerce string->integer type.
+
+ This is needed only if the relationship() from
+ int to string is writable, as SQLAlchemy will copy
+ the string parent values into the integer attribute
+ on the child during a flush.
+
+ """
+ impl = Integer
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = int(value)
+ return value
+
+class A(Base):
+ """Parent. The referenced column is a string type."""
+
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ a_id = Column(String)
+
+class B(Base):
+ """Child. The column we reference 'A' with is an integer."""
+
+ __tablename__ = 'b'
+
+ id = Column(Integer, primary_key=True)
+ a_id = Column(StringAsInt)
+ a = relationship("A",
+ # specify primaryjoin. The string form is optional
+ # here, but note that Declarative makes available all
+ # of the built-in functions we might need, including
+ # cast() and foreign().
+ primaryjoin="cast(A.a_id, Integer) == foreign(B.a_id)",
+ backref="bs")
+
+# we demonstrate with SQLite, but the important part
+# is the CAST rendered in the SQL output.
+
+e = create_engine('sqlite://', echo=True)
+Base.metadata.create_all(e)
+
+s = Session(e)
+
+s.add_all([
+ A(a_id="1"),
+ A(a_id="2", bs=[B(), B()]),
+ A(a_id="3", bs=[B()]),
+])
+s.commit()
+
+b1 = s.query(B).filter_by(a_id="2").first()
+print(b1.a)
+
+a1 = s.query(A).filter_by(a_id="2").first()
+print(a1.bs) \ No newline at end of file
diff --git a/examples/join_conditions/threeway.py b/examples/join_conditions/threeway.py
new file mode 100644
index 000000000..13df0f349
--- /dev/null
+++ b/examples/join_conditions/threeway.py
@@ -0,0 +1,108 @@
+"""Illustrate a "three way join" - where a primary table joins to a remote
+table via an association table, but then the primary table also needs
+to refer to some columns in the remote table directly.
+
+E.g.::
+
+ first.first_id -> second.first_id
+ second.other_id --> partitioned.other_id
+ first.partition_key ---------------------> partitioned.partition_key
+
+For a relationship like this, "second" is a lot like a "secondary" table,
+but the mechanics aren't present within the "secondary" feature to allow
+for the join directly between first and partitioned. Instead, we
+will derive a selectable from partitioned and second combined together, then
+link first to that derived selectable.
+
+If we define the derived selectable as::
+
+ second JOIN partitioned ON second.other_id = partitioned.other_id
+
+A JOIN from first to this derived selectable is then::
+
+ first JOIN (second JOIN partitioned
+ ON second.other_id = partitioned.other_id)
+ ON first.first_id = second.first_id AND
+ first.partition_key = partitioned.partition_key
+
+We will use the "non primary mapper" feature in order to produce this.
+A non primary mapper is essentially an "extra" :func:`.mapper` that we can
+use to associate a particular class with some selectable that is
+not its usual mapped table. It is used only when called upon within
+a Query (or a :func:`.relationship`).
+
+
+"""
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.ext.declarative import declarative_base
+
+Base = declarative_base()
+
+class First(Base):
+ __tablename__ = 'first'
+
+ first_id = Column(Integer, primary_key=True)
+ partition_key = Column(String)
+
+ def __repr__(self):
+ return ("First(%s, %s)" % (self.first_id, self.partition_key))
+
+class Second(Base):
+ __tablename__ = 'second'
+
+ first_id = Column(Integer, primary_key=True)
+ other_id = Column(Integer, primary_key=True)
+
+class Partitioned(Base):
+ __tablename__ = 'partitioned'
+
+ other_id = Column(Integer, primary_key=True)
+ partition_key = Column(String, primary_key=True)
+
+ def __repr__(self):
+ return ("Partitioned(%s, %s)" % (self.other_id, self.partition_key))
+
+
+j = join(Partitioned, Second, Partitioned.other_id == Second.other_id)
+
+partitioned_second = mapper(Partitioned, j, non_primary=True, properties={
+ # note we need to disambiguate columns here - the join()
+ # will provide them as j.c.<tablename>_<colname> for access,
+ # but they retain their real names in the mapping
+ "other_id": [j.c.partitioned_other_id, j.c.second_other_id],
+ })
+
+First.partitioned = relationship(
+ partitioned_second,
+ primaryjoin=and_(
+ First.partition_key == partitioned_second.c.partition_key,
+ First.first_id == foreign(partitioned_second.c.first_id)
+ ), innerjoin=True)
+
+# when using any database other than SQLite, we will get a nested
+# join, e.g. "first JOIN (partitioned JOIN second ON ..) ON ..".
+# On SQLite, SQLAlchemy needs to render a full subquery.
+e = create_engine("sqlite://", echo=True)
+
+Base.metadata.create_all(e)
+s = Session(e)
+s.add_all([
+ First(first_id=1, partition_key='p1'),
+ First(first_id=2, partition_key='p1'),
+ First(first_id=3, partition_key='p2'),
+ Second(first_id=1, other_id=1),
+ Second(first_id=2, other_id=1),
+ Second(first_id=3, other_id=2),
+ Partitioned(partition_key='p1', other_id=1),
+ Partitioned(partition_key='p1', other_id=2),
+ Partitioned(partition_key='p2', other_id=2),
+])
+s.commit()
+
+for row in s.query(First, Partitioned).join(First.partitioned):
+ print(row)
+
+for f in s.query(First):
+ for p in f.partitioned:
+ print(f.partition_key, p.partition_key)
diff --git a/examples/large_collection/__init__.py b/examples/large_collection/__init__.py
index 4098cd53a..432d9196f 100644
--- a/examples/large_collection/__init__.py
+++ b/examples/large_collection/__init__.py
@@ -9,4 +9,6 @@ objects is very large, including:
``passive_deletes=True`` to greatly improve the performance of
related collection deletion.
+.. autosource::
+
"""
diff --git a/examples/nested_sets/__init__.py b/examples/nested_sets/__init__.py
index 1a97b9aef..3e73bb13e 100644
--- a/examples/nested_sets/__init__.py
+++ b/examples/nested_sets/__init__.py
@@ -1,4 +1,6 @@
""" Illustrates a rudimentary way to implement the "nested sets"
pattern for hierarchical data using the SQLAlchemy ORM.
+.. autosource::
+
""" \ No newline at end of file
diff --git a/examples/nested_sets/nested_sets.py b/examples/nested_sets/nested_sets.py
index 8225a09f2..c64b15b61 100644
--- a/examples/nested_sets/nested_sets.py
+++ b/examples/nested_sets/nested_sets.py
@@ -88,13 +88,13 @@ session.commit()
print(session.query(Employee).all())
-# 1. Find an employee and all his/her supervisors, no matter how deep the tree.
+# 1. Find an employee and all their supervisors, no matter how deep the tree.
ealias = aliased(Employee)
print(session.query(Employee).\
filter(ealias.left.between(Employee.left, Employee.right)).\
filter(ealias.emp == 'Eddie').all())
-#2. Find the employee and all his/her subordinates.
+#2. Find the employee and all their subordinates.
# (This query has a nice symmetry with the first query.)
print(session.query(Employee).\
filter(Employee.left.between(ealias.left, ealias.right)).\
diff --git a/examples/postgis/__init__.py b/examples/postgis/__init__.py
index cec5ad48a..250d9ce87 100644
--- a/examples/postgis/__init__.py
+++ b/examples/postgis/__init__.py
@@ -33,5 +33,7 @@ E.g.::
print session.query(Road).filter(Road.road_geom.intersects(r1.road_geom)).all()
+.. autosource::
+
"""
diff --git a/examples/sharding/__init__.py b/examples/sharding/__init__.py
index dacc815f9..59d26a217 100644
--- a/examples/sharding/__init__.py
+++ b/examples/sharding/__init__.py
@@ -27,4 +27,6 @@ is a simple method of assigning objects to different tables (and potentially
database nodes) in an explicit way - described on the wiki at
`EntityName <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/EntityName>`_.
+.. autosource::
+
"""
diff --git a/examples/versioning/__init__.py b/examples/versioned_history/__init__.py
index 4621fae3b..d630b994b 100644
--- a/examples/versioning/__init__.py
+++ b/examples/versioned_history/__init__.py
@@ -1,8 +1,6 @@
"""
Illustrates an extension which creates version tables for entities and stores
-records for each change. The same idea as Elixir's versioned extension, but
-more efficient (uses attribute API to get history) and handles class
-inheritance. The given extensions generate an anonymous "history" class which
+records for each change. The given extensions generate an anonymous "history" class which
represents historical versions of the target object.
Usage is illustrated via a unit test module ``test_versioning.py``, which can
@@ -57,4 +55,6 @@ can be applied::
SomeHistoryClass = SomeClass.__history_mapper__.class_
+.. autosource::
+
""" \ No newline at end of file
diff --git a/examples/versioning/history_meta.py b/examples/versioned_history/history_meta.py
index deef67a0a..8cb523434 100644
--- a/examples/versioning/history_meta.py
+++ b/examples/versioned_history/history_meta.py
@@ -1,3 +1,5 @@
+"""Versioned mixin class and other utilities."""
+
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import mapper, class_mapper, attributes, object_mapper
from sqlalchemy.orm.exc import UnmappedClassError, UnmappedColumnError
@@ -25,6 +27,7 @@ def _history_mapper(local_mapper):
polymorphic_on = None
super_fks = []
+
if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
cols = []
for column in local_mapper.local_table.c:
@@ -43,10 +46,10 @@ def _history_mapper(local_mapper):
polymorphic_on = col
if super_mapper:
- super_fks.append(('version', super_history_mapper.base_mapper.local_table.c.version))
- cols.append(Column('version', Integer, primary_key=True))
+ super_fks.append(('version', super_history_mapper.local_table.c.version))
+ cols.append(Column('version', Integer, primary_key=True, autoincrement=False))
else:
- cols.append(Column('version', Integer, primary_key=True))
+ cols.append(Column('version', Integer, primary_key=True, autoincrement=False))
if super_fks:
cols.append(ForeignKeyConstraint(*zip(*super_fks)))
@@ -181,4 +184,4 @@ def versioned_session(session):
for obj in versioned_objects(session.dirty):
create_version(obj, session)
for obj in versioned_objects(session.deleted):
- create_version(obj, session, deleted = True) \ No newline at end of file
+ create_version(obj, session, deleted = True)
diff --git a/examples/versioning/test_versioning.py b/examples/versioned_history/test_versioning.py
index 297d9e47e..906280555 100644
--- a/examples/versioning/test_versioning.py
+++ b/examples/versioned_history/test_versioning.py
@@ -1,40 +1,46 @@
+"""Unit tests illustrating usage of the ``history_meta.py`` module functions."""
+
from unittest import TestCase
from sqlalchemy.ext.declarative import declarative_base
from .history_meta import Versioned, versioned_session
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
-from sqlalchemy.orm import clear_mappers, sessionmaker, deferred, relationship
-from ._lib import ComparableEntity, eq_
+from sqlalchemy.orm import clear_mappers, Session, deferred, relationship
+from sqlalchemy.testing import AssertsCompiledSQL, eq_, assert_raises
+from sqlalchemy.testing.entities import BasicEntity, ComparableEntity
+from sqlalchemy.orm import exc as orm_exc
-engine = Session = None
+engine = None
def setup():
global engine
engine = create_engine('sqlite://', echo=True)
-class TestVersioning(TestCase):
+class TestVersioning(TestCase, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
def setUp(self):
- global Base, Session, Versioned
- Base = declarative_base()
- Session = sessionmaker(engine)
- versioned_session(Session)
+ self.session = Session(engine)
+ self.Base = declarative_base()
+ versioned_session(self.session)
def tearDown(self):
+ self.session.close()
clear_mappers()
- Base.metadata.drop_all(engine)
+ self.Base.metadata.drop_all(engine)
def create_tables(self):
- Base.metadata.create_all(engine)
+ self.Base.metadata.create_all(engine)
def test_plain(self):
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass(name='sc1')
sess.add(sc)
sess.commit()
@@ -90,15 +96,44 @@ class TestVersioning(TestCase):
]
)
+ def test_w_mapper_versioning(self):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
+ __tablename__ = 'sometable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+
+ SomeClass.__mapper__.version_id_col = SomeClass.__table__.c.version
+
+ self.create_tables()
+ sess = self.session
+ sc = SomeClass(name='sc1')
+ sess.add(sc)
+ sess.commit()
+
+ s2 = Session(sess.bind)
+ sc2 = s2.query(SomeClass).first()
+ sc2.name = 'sc1modified'
+
+ sc.name = 'sc1modified_again'
+ sess.commit()
+
+ eq_(sc.version, 2)
+
+ assert_raises(
+ orm_exc.StaleDataError,
+ s2.flush
+ )
+
def test_from_null(self):
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass()
sess.add(sc)
sess.commit()
@@ -111,7 +146,7 @@ class TestVersioning(TestCase):
def test_deferred(self):
"""test versioning of unloaded, deferred columns."""
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
@@ -119,7 +154,7 @@ class TestVersioning(TestCase):
data = deferred(Column(String(25)))
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass(name='sc1', data='somedata')
sess.add(sc)
sess.commit()
@@ -142,7 +177,7 @@ class TestVersioning(TestCase):
def test_joined_inheritance(self):
- class BaseClass(Versioned, Base, ComparableEntity):
+ class BaseClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'basetable'
id = Column(Integer, primary_key=True)
@@ -169,7 +204,7 @@ class TestVersioning(TestCase):
__mapper_args__ = {'polymorphic_identity':'same'}
self.create_tables()
- sess = Session()
+ sess = self.session
sep1 = SubClassSeparatePk(name='sep1', subdata1='sep1subdata')
base1 = BaseClass(name='base1')
@@ -218,8 +253,82 @@ class TestVersioning(TestCase):
]
)
+ def test_joined_inheritance_multilevel(self):
+ class BaseClass(Versioned, self.Base, ComparableEntity):
+ __tablename__ = 'basetable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ type = Column(String(20))
+
+ __mapper_args__ = {'polymorphic_on': type,
+ 'polymorphic_identity': 'base'}
+
+ class SubClass(BaseClass):
+ __tablename__ = 'subtable'
+
+ id = Column(Integer, primary_key=True)
+ base_id = Column(Integer, ForeignKey('basetable.id'))
+ subdata1 = Column(String(50))
+
+ __mapper_args__ = {'polymorphic_identity': 'sub'}
+
+ class SubSubClass(SubClass):
+ __tablename__ = 'subsubtable'
+
+ id = Column(Integer, ForeignKey('subtable.id'), primary_key=True)
+ subdata2 = Column(String(50))
+
+ __mapper_args__ = {'polymorphic_identity': 'subsub'}
+
+ self.create_tables()
+
+ SubSubHistory = SubSubClass.__history_mapper__.class_
+ sess = self.session
+ q = sess.query(SubSubHistory)
+ self.assert_compile(
+ q,
+ "SELECT subsubtable_history.id AS subsubtable_history_id, "
+ "subtable_history.id AS subtable_history_id, "
+ "basetable_history.id AS basetable_history_id, "
+ "basetable_history.name AS basetable_history_name, "
+ "basetable_history.type AS basetable_history_type, "
+ "subsubtable_history.version AS subsubtable_history_version, "
+ "subtable_history.version AS subtable_history_version, "
+ "basetable_history.version AS basetable_history_version, "
+ "subtable_history.base_id AS subtable_history_base_id, "
+ "subtable_history.subdata1 AS subtable_history_subdata1, "
+ "subsubtable_history.subdata2 AS subsubtable_history_subdata2 "
+ "FROM basetable_history "
+ "JOIN subtable_history "
+ "ON basetable_history.id = subtable_history.base_id "
+ "AND basetable_history.version = subtable_history.version "
+ "JOIN subsubtable_history ON subtable_history.id = "
+ "subsubtable_history.id AND subtable_history.version = subsubtable_history.version"
+ )
+
+ ssc = SubSubClass(name='ss1', subdata1='sd1', subdata2='sd2')
+ sess.add(ssc)
+ sess.commit()
+ eq_(
+ sess.query(SubSubHistory).all(),
+ []
+ )
+ ssc.subdata1 = 'sd11'
+ ssc.subdata2 = 'sd22'
+ sess.commit()
+ eq_(
+ sess.query(SubSubHistory).all(),
+ [SubSubHistory(name='ss1', subdata1='sd1',
+ subdata2='sd2', type='subsub', version=1)]
+ )
+ eq_(ssc, SubSubClass(name='ss1', subdata1='sd11',
+ subdata2='sd22', version=2))
+
+
+
def test_single_inheritance(self):
- class BaseClass(Versioned, Base, ComparableEntity):
+ class BaseClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'basetable'
id = Column(Integer, primary_key=True)
@@ -233,7 +342,7 @@ class TestVersioning(TestCase):
__mapper_args__ = {'polymorphic_identity':'sub'}
self.create_tables()
- sess = Session()
+ sess = self.session
b1 = BaseClass(name='b1')
sc = SubClass(name='s1', subname='sc1')
@@ -270,7 +379,7 @@ class TestVersioning(TestCase):
sess.flush()
def test_unique(self):
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
@@ -278,7 +387,7 @@ class TestVersioning(TestCase):
data = Column(String(50))
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass(name='sc1', data='sc1')
sess.add(sc)
sess.commit()
@@ -295,12 +404,12 @@ class TestVersioning(TestCase):
def test_relationship(self):
- class SomeRelated(Base, ComparableEntity):
+ class SomeRelated(self.Base, ComparableEntity):
__tablename__ = 'somerelated'
id = Column(Integer, primary_key=True)
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
@@ -311,7 +420,7 @@ class TestVersioning(TestCase):
SomeClassHistory = SomeClass.__history_mapper__.class_
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass(name='sc1')
sess.add(sc)
sess.commit()
@@ -343,7 +452,7 @@ class TestVersioning(TestCase):
def test_backref_relationship(self):
- class SomeRelated(Base, ComparableEntity):
+ class SomeRelated(self.Base, ComparableEntity):
__tablename__ = 'somerelated'
id = Column(Integer, primary_key=True)
@@ -351,13 +460,13 @@ class TestVersioning(TestCase):
related_id = Column(Integer, ForeignKey('sometable.id'))
related = relationship("SomeClass", backref='related')
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass()
sess.add(sc)
sess.commit()
diff --git a/examples/versioned_rows/__init__.py b/examples/versioned_rows/__init__.py
new file mode 100644
index 000000000..7a4e89c33
--- /dev/null
+++ b/examples/versioned_rows/__init__.py
@@ -0,0 +1,7 @@
+"""
+Illustrates an extension which versions data by storing new rows for each change;
+that is, what would normally be an UPDATE becomes an INSERT.
+
+.. autosource::
+
+""" \ No newline at end of file
diff --git a/examples/versioned_rows/versioned_map.py b/examples/versioned_rows/versioned_map.py
new file mode 100644
index 000000000..774bfbe05
--- /dev/null
+++ b/examples/versioned_rows/versioned_map.py
@@ -0,0 +1,284 @@
+"""A variant of the versioned_rows example. Here
+we store a dictionary of key/value pairs, storing the k/v's in a
+"vertical" fashion where each key gets a row. The value is split out
+into two separate datatypes, string and int - the range of datatype
+storage can be adjusted for individual needs.
+
+Changes to the "data" attribute of a ConfigData object result in the
+ConfigData object being copied into a new one, and new associations to
+its data are created. Values which aren't changed between versions are
+referenced by both the former and the newer ConfigData object.
+Overall, only INSERT statements are emitted - no rows are UPDATed or
+DELETEd.
+
+An optional feature is also illustrated which associates individual
+key/value pairs with the ConfigData object in which it first
+originated. Since a new row is only persisted when a new value is
+created for a particular key, the recipe provides a way to query among
+the full series of changes which occurred for any particular key in
+the dictionary.
+
+The set of all ConfigData in a particular table represents a single
+series of versions. By adding additional columns to ConfigData, the
+system can be made to store multiple version streams distinguished by
+those additional values.
+
+"""
+
+from sqlalchemy import Column, String, Integer, ForeignKey, \
+ create_engine
+from sqlalchemy.orm.interfaces import SessionExtension
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import attributes, relationship, backref, \
+ sessionmaker, make_transient, validates
+from sqlalchemy.ext.associationproxy import association_proxy
+from sqlalchemy.orm.collections import attribute_mapped_collection
+
+class VersionExtension(SessionExtension):
+ """Apply the new_version() method of objects which are
+ marked as dirty during a flush.
+
+ See http://www.sqlalchemy.org/trac/wiki/UsageRecipes/VersionedRows
+
+ """
+ def before_flush(self, session, flush_context, instances):
+ for instance in session.dirty:
+ if hasattr(instance, 'new_version') and \
+ session.is_modified(instance, passive=True):
+
+ # make it transient
+ instance.new_version(session)
+
+ # re-add
+ session.add(instance)
+
+Base = declarative_base()
+
+class ConfigData(Base):
+ """Represent a series of key/value pairs.
+
+ ConfigData will generate a new version of itself
+ upon change.
+
+ The "data" dictionary provides access via
+ string name mapped to a string/int value.
+
+ """
+ __tablename__ = 'config'
+
+ id = Column(Integer, primary_key=True)
+ """Primary key column of this ConfigData."""
+
+ elements = relationship("ConfigValueAssociation",
+ collection_class=attribute_mapped_collection("name"),
+ backref=backref("config_data"),
+ lazy="subquery"
+ )
+ """Dictionary-backed collection of ConfigValueAssociation objects,
+ keyed to the name of the associated ConfigValue.
+
+ Note there's no "cascade" here. ConfigValueAssociation objects
+ are never deleted or changed.
+ """
+
+ def _new_value(name, value):
+ """Create a new entry for usage in the 'elements' dictionary."""
+ return ConfigValueAssociation(ConfigValue(name, value))
+
+ data = association_proxy("elements", "value", creator=_new_value)
+ """Proxy to the 'value' elements of each related ConfigValue,
+ via the 'elements' dictionary.
+ """
+
+ def __init__(self, data):
+ self.data = data
+
+ @validates('elements')
+ def _associate_with_element(self, key, element):
+ """Associate incoming ConfigValues with this
+ ConfigData, if not already associated.
+
+ This is an optional feature which allows
+ more comprehensive history tracking.
+
+ """
+ if element.config_value.originating_config is None:
+ element.config_value.originating_config = self
+ return element
+
+ def new_version(self, session):
+ # convert to an INSERT
+ make_transient(self)
+ self.id = None
+
+ # history of the 'elements' collecton.
+ # this is a tuple of groups: (added, unchanged, deleted)
+ hist = attributes.get_history(self, 'elements')
+
+ # rewrite the 'elements' collection
+ # from scratch, removing all history
+ attributes.set_committed_value(self, 'elements', {})
+
+ # new elements in the "added" group
+ # are moved to our new collection.
+ for elem in hist.added:
+ self.elements[elem.name] = elem
+
+ # copy elements in the 'unchanged' group.
+ # the new ones associate with the new ConfigData,
+ # the old ones stay associated with the old ConfigData
+ for elem in hist.unchanged:
+ self.elements[elem.name] = ConfigValueAssociation(elem.config_value)
+
+ # we also need to expire changes on each ConfigValueAssociation
+ # that is to remain associated with the old ConfigData.
+ # Here, each one takes care of that in its new_version()
+ # method, though we could do that here as well.
+
+
+class ConfigValueAssociation(Base):
+ """Relate ConfigData objects to associated ConfigValue objects."""
+
+ __tablename__ = 'config_value_association'
+
+ config_id = Column(ForeignKey('config.id'), primary_key=True)
+ """Reference the primary key of the ConfigData object."""
+
+
+ config_value_id = Column(ForeignKey('config_value.id'), primary_key=True)
+ """Reference the primary key of hte ConfigValue object."""
+
+ config_value = relationship("ConfigValue", lazy="joined", innerjoin=True)
+ """Reference the related ConfigValue object."""
+
+ def __init__(self, config_value):
+ self.config_value = config_value
+
+ def new_version(self, session):
+ """Expire all pending state, as ConfigValueAssociation is immutable."""
+
+ session.expire(self)
+
+ @property
+ def name(self):
+ return self.config_value.name
+
+ @property
+ def value(self):
+ return self.config_value.value
+
+ @value.setter
+ def value(self, value):
+ """Intercept set events.
+
+ Create a new ConfigValueAssociation upon change,
+ replacing this one in the parent ConfigData's dictionary.
+
+ If no net change, do nothing.
+
+ """
+ if value != self.config_value.value:
+ self.config_data.elements[self.name] = \
+ ConfigValueAssociation(
+ ConfigValue(self.config_value.name, value)
+ )
+
+class ConfigValue(Base):
+ """Represent an individual key/value pair at a given point in time.
+
+ ConfigValue is immutable.
+
+ """
+ __tablename__ = 'config_value'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50), nullable=False)
+ originating_config_id = Column(Integer, ForeignKey('config.id'),
+ nullable=False)
+ int_value = Column(Integer)
+ string_value = Column(String(255))
+
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+
+ originating_config = relationship("ConfigData")
+ """Reference to the originating ConfigData.
+
+ This is optional, and allows history tracking of
+ individual values.
+
+ """
+
+ def new_version(self, session):
+ raise NotImplementedError("ConfigValue is immutable.")
+
+ @property
+ def value(self):
+ for k in ('int_value', 'string_value'):
+ v = getattr(self, k)
+ if v is not None:
+ return v
+ else:
+ return None
+
+ @value.setter
+ def value(self, value):
+ if isinstance(value, int):
+ self.int_value = value
+ self.string_value = None
+ else:
+ self.string_value = str(value)
+ self.int_value = None
+
+if __name__ == '__main__':
+ engine = create_engine('sqlite://', echo=True)
+ Base.metadata.create_all(engine)
+ Session = sessionmaker(bind=engine, extension=VersionExtension())
+
+ sess = Session()
+
+ config = ConfigData({
+ 'user_name':'twitter',
+ 'hash_id':'4fedffca37eaf',
+ 'x':27,
+ 'y':450
+ })
+
+ sess.add(config)
+ sess.commit()
+ version_one = config.id
+
+ config.data['user_name'] = 'yahoo'
+ sess.commit()
+
+ version_two = config.id
+
+ assert version_one != version_two
+
+ # two versions have been created.
+
+ assert config.data == {
+ 'user_name':'yahoo',
+ 'hash_id':'4fedffca37eaf',
+ 'x':27,
+ 'y':450
+ }
+
+ old_config = sess.query(ConfigData).get(version_one)
+ assert old_config.data == {
+ 'user_name':'twitter',
+ 'hash_id':'4fedffca37eaf',
+ 'x':27,
+ 'y':450
+ }
+
+ # the history of any key can be acquired using
+ # the originating_config_id attribute
+ history = sess.query(ConfigValue).\
+ filter(ConfigValue.name=='user_name').\
+ order_by(ConfigValue.originating_config_id).\
+ all()
+
+ assert [(h.value, h.originating_config_id) for h in history] == \
+ [('twitter', version_one), ('yahoo', version_two)]
diff --git a/examples/versioned_rows/versioned_rows.py b/examples/versioned_rows/versioned_rows.py
new file mode 100644
index 000000000..30acf4e0d
--- /dev/null
+++ b/examples/versioned_rows/versioned_rows.py
@@ -0,0 +1,105 @@
+"""Illustrates a method to intercept changes on objects, turning
+an UPDATE statement on a single row into an INSERT statement, so that a new
+row is inserted with the new data, keeping the old row intact.
+
+"""
+from sqlalchemy.orm import *
+from sqlalchemy import *
+from sqlalchemy.orm.interfaces import SessionExtension
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import attributes
+
+class Versioned(object):
+ def new_version(self, session):
+ # if on SQLA 0.6.1 or earlier,
+ # make sure 'id' isn't expired.
+ # self.id
+
+ # make us transient (removes persistent
+ # identity).
+ make_transient(self)
+
+ # set 'id' to None.
+ # a new PK will be generated on INSERT.
+ self.id = None
+
+class VersionExtension(SessionExtension):
+ def before_flush(self, session, flush_context, instances):
+ for instance in session.dirty:
+ if not isinstance(instance, Versioned):
+ continue
+ if not session.is_modified(instance, passive=True):
+ continue
+
+ if not attributes.instance_state(instance).has_identity:
+ continue
+
+ # make it transient
+ instance.new_version(session)
+ # re-add
+ session.add(instance)
+
+Base = declarative_base()
+
+engine = create_engine('sqlite://', echo=True)
+
+Session = sessionmaker(engine, extension=[VersionExtension()])
+
+# example 1, simple versioning
+
+class Example(Versioned, Base):
+ __tablename__ = 'example'
+ id = Column(Integer, primary_key=True)
+ data = Column(String)
+
+Base.metadata.create_all(engine)
+
+session = Session()
+e1 = Example(data='e1')
+session.add(e1)
+session.commit()
+
+e1.data = 'e2'
+session.commit()
+
+assert session.query(Example.id, Example.data).order_by(Example.id).all() == \
+ [(1, 'e1'), (2, 'e2')]
+
+# example 2, versioning with a parent
+
+class Parent(Base):
+ __tablename__ = 'parent'
+ id = Column(Integer, primary_key=True)
+ child_id = Column(Integer, ForeignKey('child.id'))
+ child = relationship("Child", backref=backref('parent', uselist=False))
+
+class Child(Versioned, Base):
+ __tablename__ = 'child'
+
+ id = Column(Integer, primary_key=True)
+ data = Column(String)
+
+ def new_version(self, session):
+ # expire parent's reference to us
+ session.expire(self.parent, ['child'])
+
+ # create new version
+ Versioned.new_version(self, session)
+
+ # re-add ourselves to the parent
+ self.parent.child = self
+
+Base.metadata.create_all(engine)
+
+session = Session()
+
+p1 = Parent(child=Child(data='c1'))
+session.add(p1)
+session.commit()
+
+p1.child.data = 'c2'
+session.commit()
+
+assert p1.child_id == 2
+assert session.query(Child.id, Child.data).order_by(Child.id).all() == \
+ [(1, 'c1'), (2, 'c2')] \ No newline at end of file
diff --git a/examples/versioning/_lib.py b/examples/versioning/_lib.py
deleted file mode 100644
index 9132f9b35..000000000
--- a/examples/versioning/_lib.py
+++ /dev/null
@@ -1,96 +0,0 @@
-"""copy of ComparableEntity and eq_() from test.lib.
-
-This is just to support running the example outside of
-the SQLA testing environment which is no longer part of
-SQLAlchemy as of 0.7.
-
-"""
-
-import sqlalchemy as sa
-from sqlalchemy import exc as sa_exc
-
-
-def eq_(a, b, msg=None):
- """Assert a == b, with repr messaging on failure."""
- assert a == b, msg or "%r != %r" % (a, b)
-
-_repr_stack = set()
-class BasicEntity(object):
- def __init__(self, **kw):
- for key, value in kw.items():
- setattr(self, key, value)
-
- def __repr__(self):
- if id(self) in _repr_stack:
- return object.__repr__(self)
- _repr_stack.add(id(self))
- try:
- return "%s(%s)" % (
- (self.__class__.__name__),
- ', '.join(["%s=%r" % (key, getattr(self, key))
- for key in sorted(self.__dict__.keys())
- if not key.startswith('_')]))
- finally:
- _repr_stack.remove(id(self))
-
-_recursion_stack = set()
-class ComparableEntity(BasicEntity):
- def __hash__(self):
- return hash(self.__class__)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __eq__(self, other):
- """'Deep, sparse compare.
-
- Deeply compare two entities, following the non-None attributes of the
- non-persisted object, if possible.
-
- """
- if other is self:
- return True
- elif not self.__class__ == other.__class__:
- return False
-
- if id(self) in _recursion_stack:
- return True
- _recursion_stack.add(id(self))
-
- try:
- # pick the entity thats not SA persisted as the source
- try:
- self_key = sa.orm.attributes.instance_state(self).key
- except sa.orm.exc.NO_STATE:
- self_key = None
-
- if other is None:
- a = self
- b = other
- elif self_key is not None:
- a = other
- b = self
- else:
- a = self
- b = other
-
- for attr in a.__dict__.keys():
- if attr.startswith('_'):
- continue
- value = getattr(a, attr)
-
- try:
- # handle lazy loader errors
- battr = getattr(b, attr)
- except (AttributeError, sa_exc.UnboundExecutionError):
- return False
-
- if hasattr(value, '__iter__'):
- if list(value) != list(battr):
- return False
- else:
- if value is not None and value != battr:
- return False
- return True
- finally:
- _recursion_stack.remove(id(self))
diff --git a/examples/vertical/__init__.py b/examples/vertical/__init__.py
index 6073da91c..0b69f32ea 100644
--- a/examples/vertical/__init__.py
+++ b/examples/vertical/__init__.py
@@ -29,5 +29,6 @@ Example::
AnimalFact.value == True))))
print 'weasel-like animals', q.all()
+.. autosource::
""" \ No newline at end of file
diff --git a/examples/vertical/dictlike-polymorphic.py b/examples/vertical/dictlike-polymorphic.py
index 872a7c52e..e3d5ba578 100644
--- a/examples/vertical/dictlike-polymorphic.py
+++ b/examples/vertical/dictlike-polymorphic.py
@@ -1,10 +1,7 @@
"""Mapping a polymorphic-valued vertical table as a dictionary.
-This example illustrates accessing and modifying a "vertical" (or
-"properties", or pivoted) table via a dict-like interface. The 'dictlike.py'
-example explains the basics of vertical tables and the general approach. This
-example adds a twist- the vertical table holds several "value" columns, one
-for each type of data that can be stored. For example::
+Builds upon the dictlike.py example to also add differently typed
+columns to the "fact" table, e.g.::
Table('properties', metadata
Column('owner_id', Integer, ForeignKey('owner.id'),
@@ -25,80 +22,43 @@ we'll use a @hybrid_property to build a smart '.value' attribute that wraps up
reading and writing those various '_value' columns and keeps the '.type' up to
date.
-Class decorators are used, so Python 2.6 or greater is required.
"""
from sqlalchemy.orm.interfaces import PropComparator
-from sqlalchemy.orm import comparable_property
from sqlalchemy.ext.hybrid import hybrid_property
-
-# Using the VerticalPropertyDictMixin from the base example
-from .dictlike import VerticalPropertyDictMixin
+from sqlalchemy import event
+from sqlalchemy import literal_column
+from .dictlike import ProxiedDictMixin
class PolymorphicVerticalProperty(object):
"""A key/value pair with polymorphic value storage.
- Supplies a smart 'value' attribute that provides convenient read/write
- access to the row's current value without the caller needing to worry
- about the 'type' attribute or multiple columns.
-
- The 'value' attribute can also be used for basic comparisons in queries,
- allowing the row's logical value to be compared without foreknowledge of
- which column it might be in. This is not going to be a very efficient
- operation on the database side, but it is possible. If you're mapping to
- an existing database and you have some rows with a value of str('1') and
- others of int(1), then this could be useful.
-
- Subclasses must provide a 'type_map' class attribute with the following
- form::
-
- type_map = {
- <python type> : ('type column value', 'column name'),
- # ...
- }
-
- For example,::
-
- type_map = {
- int: ('integer', 'integer_value'),
- str: ('varchar', 'varchar_value'),
- }
+ The class which is mapped should indicate typing information
+ within the "info" dictionary of mapped Column objects; see
+ the AnimalFact mapping below for an example.
- Would indicate that a Python int value should be stored in the
- 'integer_value' column and the .type set to 'integer'. Conversely, if the
- value of '.type' is 'integer, then the 'integer_value' column is consulted
- for the current value.
"""
- type_map = {
- type(None): (None, None),
- }
-
def __init__(self, key, value=None):
self.key = key
self.value = value
@hybrid_property
def value(self):
- for discriminator, field in self.type_map.values():
- if self.type == discriminator:
- return getattr(self, field)
- return None
+ fieldname, discriminator = self.type_map[self.type]
+ if fieldname is None:
+ return None
+ else:
+ return getattr(self, fieldname)
@value.setter
def value(self, value):
py_type = type(value)
- if py_type not in self.type_map:
- raise TypeError(py_type)
-
- for field_type in self.type_map:
- discriminator, field = self.type_map[field_type]
- field_value = None
- if py_type == field_type:
- self.type = discriminator
- field_value = value
- if field is not None:
- setattr(self, field, field_value)
+ fieldname, discriminator = self.type_map[py_type]
+
+ self.type = discriminator
+ if fieldname is not None:
+ setattr(self, fieldname, value)
@value.deleter
def value(self):
@@ -113,9 +73,14 @@ class PolymorphicVerticalProperty(object):
self.cls = cls
def _case(self):
- whens = [(text("'%s'" % p[0]), cast(getattr(self.cls, p[1]), String))
- for p in self.cls.type_map.values()
- if p[1] is not None]
+ pairs = set(self.cls.type_map.values())
+ whens = [
+ (
+ literal_column("'%s'" % discriminator),
+ cast(getattr(self.cls, attribute), String)
+ ) for attribute, discriminator in pairs
+ if attribute is not None
+ ]
return case(whens, self.cls.type, null())
def __eq__(self, other):
return self._case() == cast(other, String)
@@ -125,69 +90,78 @@ class PolymorphicVerticalProperty(object):
def __repr__(self):
return '<%s %r=%r>' % (self.__class__.__name__, self.key, self.value)
+@event.listens_for(PolymorphicVerticalProperty, "mapper_configured", propagate=True)
+def on_new_class(mapper, cls_):
+ """Look for Column objects with type info in them, and work up
+ a lookup table."""
+
+ info_dict = {}
+ info_dict[type(None)] = (None, 'none')
+ info_dict['none'] = (None, 'none')
+
+ for k in mapper.c.keys():
+ col = mapper.c[k]
+ if 'type' in col.info:
+ python_type, discriminator = col.info['type']
+ info_dict[python_type] = (k, discriminator)
+ info_dict[discriminator] = (k, discriminator)
+ cls_.type_map = info_dict
if __name__ == '__main__':
- from sqlalchemy import (MetaData, Table, Column, Integer, Unicode,
- ForeignKey, UnicodeText, and_, not_, or_, String, Boolean, cast, text,
+ from sqlalchemy import (Column, Integer, Unicode,
+ ForeignKey, UnicodeText, and_, or_, String, Boolean, cast,
null, case, create_engine)
- from sqlalchemy.orm import mapper, relationship, Session
+ from sqlalchemy.orm import relationship, Session
from sqlalchemy.orm.collections import attribute_mapped_collection
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy.ext.associationproxy import association_proxy
+
+ Base = declarative_base()
- metadata = MetaData()
-
- animals = Table('animal', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', Unicode(100)))
-
- chars = Table('facts', metadata,
- Column('animal_id', Integer, ForeignKey('animal.id'),
- primary_key=True),
- Column('key', Unicode(64), primary_key=True),
- Column('type', Unicode(16), default=None),
- Column('int_value', Integer, default=None),
- Column('char_value', UnicodeText, default=None),
- Column('boolean_value', Boolean, default=None))
-
- class AnimalFact(PolymorphicVerticalProperty):
- type_map = {
- int: ('integer', 'int_value'),
- str: ('char', 'char_value'),
- bool: ('boolean', 'boolean_value'),
- type(None): (None, None),
- }
-
- class Animal(VerticalPropertyDictMixin):
- """An animal.
-
- Animal facts are available via the 'facts' property or by using
- dict-like accessors on an Animal instance::
-
- cat['color'] = 'calico'
- # or, equivalently:
- cat.facts['color'] = AnimalFact('color', 'calico')
- """
- _property_type = AnimalFact
- _property_mapping = 'facts'
+ class AnimalFact(PolymorphicVerticalProperty, Base):
+ """A fact about an animal."""
+
+ __tablename__ = 'animal_fact'
+
+ animal_id = Column(ForeignKey('animal.id'), primary_key=True)
+ key = Column(Unicode(64), primary_key=True)
+ type = Column(Unicode(16))
+
+ # add information about storage for different types
+ # in the info dictionary of Columns
+ int_value = Column(Integer, info={'type': (int, 'integer')})
+ char_value = Column(UnicodeText, info={'type': (str, 'string')})
+ boolean_value = Column(Boolean, info={'type': (bool, 'boolean')})
+
+ class Animal(ProxiedDictMixin._base_class(Base)):
+ """an Animal"""
+
+ __tablename__ = 'animal'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode(100))
+
+ facts = relationship("AnimalFact",
+ collection_class=attribute_mapped_collection('key'))
+
+ _proxied = association_proxy("facts", "value",
+ creator=
+ lambda key, value: AnimalFact(key=key, value=value))
def __init__(self, name):
self.name = name
def __repr__(self):
- return '<%s %r>' % (self.__class__.__name__, self.name)
-
+ return "Animal(%r)" % self.name
- mapper(Animal, animals, properties={
- 'facts': relationship(
- AnimalFact, backref='animal',
- collection_class=attribute_mapped_collection('key')),
- })
-
- mapper(AnimalFact, chars)
+ @classmethod
+ def with_characteristic(self, key, value):
+ return self.facts.any(key=key, value=value)
engine = create_engine('sqlite://', echo=True)
- metadata.create_all(engine)
+ Base.metadata.create_all(engine)
session = Session(engine)
stoat = Animal('stoat')
@@ -227,30 +201,24 @@ if __name__ == '__main__':
AnimalFact.value == True))))
print('weasel-like animals', q.all())
- # Save some typing by wrapping that up in a function:
- with_characteristic = lambda key, value: and_(AnimalFact.key == key,
- AnimalFact.value == value)
-
q = (session.query(Animal).
- filter(Animal.facts.any(
- with_characteristic('weasel-like', True))))
+ filter(Animal.with_characteristic('weasel-like', True)))
print('weasel-like animals again', q.all())
q = (session.query(Animal).
- filter(Animal.facts.any(with_characteristic('poisonous', False))))
+ filter(Animal.with_characteristic('poisonous', False)))
print('animals with poisonous=False', q.all())
q = (session.query(Animal).
- filter(or_(Animal.facts.any(
- with_characteristic('poisonous', False)),
- not_(Animal.facts.any(AnimalFact.key == 'poisonous')))))
+ filter(or_(
+ Animal.with_characteristic('poisonous', False),
+ ~Animal.facts.any(AnimalFact.key == 'poisonous')
+ )
+ )
+ )
print('non-poisonous animals', q.all())
q = (session.query(Animal).
filter(Animal.facts.any(AnimalFact.value == 5)))
print('any animal with a .value of 5', q.all())
- # Facts can be queried as well.
- q = (session.query(AnimalFact).
- filter(with_characteristic('cuteness', 'very cute')))
- print(q.all())
diff --git a/examples/vertical/dictlike.py b/examples/vertical/dictlike.py
index f17d1acc8..08989d8c2 100644
--- a/examples/vertical/dictlike.py
+++ b/examples/vertical/dictlike.py
@@ -30,150 +30,83 @@ accessing them like a Python dict can be very convenient. The example below
can be used with many common vertical schemas as-is or with minor adaptations.
"""
+from __future__ import unicode_literals
-class VerticalProperty(object):
- """A key/value pair.
-
- This class models rows in the vertical table.
- """
-
- def __init__(self, key, value):
- self.key = key
- self.value = value
-
- def __repr__(self):
- return '<%s %r=%r>' % (self.__class__.__name__, self.key, self.value)
-
-
-class VerticalPropertyDictMixin(object):
+class ProxiedDictMixin(object):
"""Adds obj[key] access to a mapped class.
- This is a mixin class. It can be inherited from directly, or included
- with multiple inheritence.
-
- Classes using this mixin must define two class properties::
-
- _property_type:
- The mapped type of the vertical key/value pair instances. Will be
- invoked with two positional arugments: key, value
-
- _property_mapping:
- A string, the name of the Python attribute holding a dict-based
- relationship of _property_type instances.
-
- Using the VerticalProperty class above as an example,::
-
- class MyObj(VerticalPropertyDictMixin):
- _property_type = VerticalProperty
- _property_mapping = 'props'
-
- mapper(MyObj, sometable, properties={
- 'props': relationship(VerticalProperty,
- collection_class=attribute_mapped_collection('key'))})
-
- Dict-like access to MyObj is proxied through to the 'props' relationship::
-
- myobj['key'] = 'value'
- # ...is shorthand for:
- myobj.props['key'] = VerticalProperty('key', 'value')
-
- myobj['key'] = 'updated value']
- # ...is shorthand for:
- myobj.props['key'].value = 'updated value'
-
- print myobj['key']
- # ...is shorthand for:
- print myobj.props['key'].value
+ This class basically proxies dictionary access to an attribute
+ called ``_proxied``. The class which inherits this class
+ should have an attribute called ``_proxied`` which points to a dictionary.
"""
- _property_type = VerticalProperty
- _property_mapping = None
+ def __len__(self):
+ return len(self._proxied)
- __map = property(lambda self: getattr(self, self._property_mapping))
+ def __iter__(self):
+ return iter(self._proxied)
def __getitem__(self, key):
- return self.__map[key].value
-
- def __setitem__(self, key, value):
- property = self.__map.get(key, None)
- if property is None:
- self.__map[key] = self._property_type(key, value)
- else:
- property.value = value
-
- def __delitem__(self, key):
- del self.__map[key]
+ return self._proxied[key]
def __contains__(self, key):
- return key in self.__map
-
- # Implement other dict methods to taste. Here are some examples:
- def keys(self):
- return self.__map.keys()
+ return key in self._proxied
- def values(self):
- return [prop.value for prop in self.__map.values()]
-
- def items(self):
- return [(key, prop.value) for key, prop in self.__map.items()]
+ def __setitem__(self, key, value):
+ self._proxied[key] = value
- def __iter__(self):
- return iter(self.keys())
+ def __delitem__(self, key):
+ del self._proxied[key]
if __name__ == '__main__':
- from sqlalchemy import (MetaData, Table, Column, Integer, Unicode,
- ForeignKey, UnicodeText, and_, not_, create_engine)
- from sqlalchemy.orm import mapper, relationship, Session
+ from sqlalchemy import (Column, Integer, Unicode,
+ ForeignKey, UnicodeText, and_, create_engine)
+ from sqlalchemy.orm import relationship, Session
from sqlalchemy.orm.collections import attribute_mapped_collection
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy.ext.associationproxy import association_proxy
- metadata = MetaData()
+ Base = declarative_base()
- # Here we have named animals, and a collection of facts about them.
- animals = Table('animal', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', Unicode(100)))
+ class AnimalFact(Base):
+ """A fact about an animal."""
- facts = Table('facts', metadata,
- Column('animal_id', Integer, ForeignKey('animal.id'),
- primary_key=True),
- Column('key', Unicode(64), primary_key=True),
- Column('value', UnicodeText, default=None),)
+ __tablename__ = 'animal_fact'
- class AnimalFact(VerticalProperty):
- """A fact about an animal."""
+ animal_id = Column(ForeignKey('animal.id'), primary_key=True)
+ key = Column(Unicode(64), primary_key=True)
+ value = Column(UnicodeText)
- class Animal(VerticalPropertyDictMixin):
- """An animal.
+ class Animal(ProxiedDictMixin, Base):
+ """an Animal"""
- Animal facts are available via the 'facts' property or by using
- dict-like accessors on an Animal instance::
+ __tablename__ = 'animal'
- cat['color'] = 'calico'
- # or, equivalently:
- cat.facts['color'] = AnimalFact('color', 'calico')
- """
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode(100))
- _property_type = AnimalFact
- _property_mapping = 'facts'
+ facts = relationship("AnimalFact",
+ collection_class=attribute_mapped_collection('key'))
+
+ _proxied = association_proxy("facts", "value",
+ creator=
+ lambda key, value: AnimalFact(key=key, value=value))
def __init__(self, name):
self.name = name
def __repr__(self):
- return '<%s %r>' % (self.__class__.__name__, self.name)
-
+ return "Animal(%r)" % self.name
- mapper(Animal, animals, properties={
- 'facts': relationship(
- AnimalFact, backref='animal',
- collection_class=attribute_mapped_collection('key')),
- })
- mapper(AnimalFact, facts)
+ @classmethod
+ def with_characteristic(self, key, value):
+ return self.facts.any(key=key, value=value)
engine = create_engine("sqlite://")
- metadata.create_all(engine)
+ Base.metadata.create_all(engine)
+
session = Session(bind=engine)
stoat = Animal('stoat')
@@ -194,9 +127,6 @@ if __name__ == '__main__':
critter['cuteness'] = 'very'
print('changing cuteness:')
- engine.echo = True
- session.commit()
- engine.echo = False
marten = Animal('marten')
marten['color'] = 'brown'
@@ -212,7 +142,6 @@ if __name__ == '__main__':
loris['cuteness'] = 'fairly'
loris['poisonous-part'] = 'elbows'
session.add(loris)
- session.commit()
q = (session.query(Animal).
filter(Animal.facts.any(
@@ -220,27 +149,17 @@ if __name__ == '__main__':
AnimalFact.value == 'reddish'))))
print('reddish animals', q.all())
- # Save some typing by wrapping that up in a function:
- with_characteristic = lambda key, value: and_(AnimalFact.key == key,
- AnimalFact.value == value)
-
- q = (session.query(Animal).
- filter(Animal.facts.any(
- with_characteristic('color', 'brown'))))
+ q = session.query(Animal).\
+ filter(Animal.with_characteristic("color", 'brown'))
print('brown animals', q.all())
- q = (session.query(Animal).
- filter(not_(Animal.facts.any(
- with_characteristic('poisonous-part', 'elbows')))))
+ q = session.query(Animal).\
+ filter(~Animal.with_characteristic("poisonous-part", 'elbows'))
print('animals without poisonous-part == elbows', q.all())
q = (session.query(Animal).
- filter(Animal.facts.any(AnimalFact.value == 'somewhat')))
+ filter(Animal.facts.any(value='somewhat')))
print('any animal with any .value of "somewhat"', q.all())
- # Facts can be queried as well.
- q = (session.query(AnimalFact).
- filter(with_characteristic('cuteness', 'very')))
- print('just the facts', q.all())
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 2c805e607..1d1f8c4aa 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -1,11 +1,9 @@
# sqlalchemy/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-import inspect as _inspect
-import sys
from .sql import (
alias,
@@ -23,6 +21,7 @@ from .sql import (
except_all,
exists,
extract,
+ false,
func,
insert,
intersect,
@@ -40,6 +39,7 @@ from .sql import (
select,
subquery,
text,
+ true,
tuple_,
type_coerce,
union,
@@ -97,7 +97,6 @@ from .schema import (
Column,
ColumnDefault,
Constraint,
- DDL,
DefaultClause,
FetchedValue,
ForeignKey,
@@ -110,19 +109,25 @@ from .schema import (
Table,
ThreadLocalMetaData,
UniqueConstraint,
- )
+ DDL,
+)
-from .inspection import inspect
+from .inspection import inspect
from .engine import create_engine, engine_from_config
+__version__ = '0.9.2'
-__all__ = sorted(name for name, obj in locals().items()
- if not (name.startswith('_') or _inspect.ismodule(obj)))
+def __go(lcls):
+ global __all__
-__version__ = '0.9.0'
+ from . import events
+ from . import util as _sa_util
-del _inspect, sys
+ import inspect as _inspect
+
+ __all__ = sorted(name for name, obj in lcls.items()
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
-from . import util as _sa_util
-_sa_util.importlater.resolve_all()
+ _sa_util.dependencies.resolve_all("sqlalchemy")
+__go(locals()) \ No newline at end of file
diff --git a/lib/sqlalchemy/cextension/processors.c b/lib/sqlalchemy/cextension/processors.c
index 4e82ffc6b..d56817763 100644
--- a/lib/sqlalchemy/cextension/processors.c
+++ b/lib/sqlalchemy/cextension/processors.c
@@ -1,7 +1,7 @@
/*
processors.c
-Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
+Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com
This module is part of SQLAlchemy and is released under
the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -10,13 +10,15 @@ the MIT License: http://www.opensource.org/licenses/mit-license.php
#include <Python.h>
#include <datetime.h>
+#define MODULE_NAME "cprocessors"
+#define MODULE_DOC "Module containing C versions of data processing functions."
+
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
typedef int Py_ssize_t;
#define PY_SSIZE_T_MAX INT_MAX
#define PY_SSIZE_T_MIN INT_MIN
#endif
-
static PyObject *
int_to_boolean(PyObject *self, PyObject *arg)
{
@@ -26,7 +28,12 @@ int_to_boolean(PyObject *self, PyObject *arg)
if (arg == Py_None)
Py_RETURN_NONE;
+
+#if PY_MAJOR_VERSION >= 3
+ l = PyLong_AsLong(arg);
+#else
l = PyInt_AsLong(arg);
+#endif
if (l == 0) {
res = Py_False;
} else if (l == 1) {
@@ -65,23 +72,48 @@ to_float(PyObject *self, PyObject *arg)
static PyObject *
str_to_datetime(PyObject *self, PyObject *arg)
{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *bytes;
+ PyObject *err_bytes;
+#endif
const char *str;
+ int numparsed;
unsigned int year, month, day, hour, minute, second, microsecond = 0;
PyObject *err_repr;
if (arg == Py_None)
Py_RETURN_NONE;
+#if PY_MAJOR_VERSION >= 3
+ bytes = PyUnicode_AsASCIIString(arg);
+ if (bytes == NULL)
+ str = NULL;
+ else
+ str = PyBytes_AS_STRING(bytes);
+#else
str = PyString_AsString(arg);
+#endif
if (str == NULL) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse datetime string '%.200s' "
+ "- value is not a string.",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse datetime string '%.200s' "
"- value is not a string.",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
@@ -92,15 +124,30 @@ str_to_datetime(PyObject *self, PyObject *arg)
not accept "2000-01-01 00:00:00.". I don't know which is better, but they
should be coherent.
*/
- if (sscanf(str, "%4u-%2u-%2u %2u:%2u:%2u.%6u", &year, &month, &day,
- &hour, &minute, &second, &microsecond) < 6) {
+ numparsed = sscanf(str, "%4u-%2u-%2u %2u:%2u:%2u.%6u", &year, &month, &day,
+ &hour, &minute, &second, &microsecond);
+#if PY_MAJOR_VERSION >= 3
+ Py_DECREF(bytes);
+#endif
+ if (numparsed < 6) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse datetime string: %.200s",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse datetime string: %.200s",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
@@ -111,22 +158,47 @@ str_to_datetime(PyObject *self, PyObject *arg)
static PyObject *
str_to_time(PyObject *self, PyObject *arg)
{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *bytes;
+ PyObject *err_bytes;
+#endif
const char *str;
+ int numparsed;
unsigned int hour, minute, second, microsecond = 0;
PyObject *err_repr;
if (arg == Py_None)
Py_RETURN_NONE;
+#if PY_MAJOR_VERSION >= 3
+ bytes = PyUnicode_AsASCIIString(arg);
+ if (bytes == NULL)
+ str = NULL;
+ else
+ str = PyBytes_AS_STRING(bytes);
+#else
str = PyString_AsString(arg);
+#endif
if (str == NULL) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse time string '%.200s' - value is not a string.",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse time string '%.200s' - value is not a string.",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
@@ -137,15 +209,30 @@ str_to_time(PyObject *self, PyObject *arg)
not accept "00:00:00.". I don't know which is better, but they should be
coherent.
*/
- if (sscanf(str, "%2u:%2u:%2u.%6u", &hour, &minute, &second,
- &microsecond) < 3) {
+ numparsed = sscanf(str, "%2u:%2u:%2u.%6u", &hour, &minute, &second,
+ &microsecond);
+#if PY_MAJOR_VERSION >= 3
+ Py_DECREF(bytes);
+#endif
+ if (numparsed < 3) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse time string: %.200s",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse time string: %.200s",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
@@ -155,34 +242,73 @@ str_to_time(PyObject *self, PyObject *arg)
static PyObject *
str_to_date(PyObject *self, PyObject *arg)
{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *bytes;
+ PyObject *err_bytes;
+#endif
const char *str;
+ int numparsed;
unsigned int year, month, day;
PyObject *err_repr;
if (arg == Py_None)
Py_RETURN_NONE;
+#if PY_MAJOR_VERSION >= 3
+ bytes = PyUnicode_AsASCIIString(arg);
+ if (bytes == NULL)
+ str = NULL;
+ else
+ str = PyBytes_AS_STRING(bytes);
+#else
str = PyString_AsString(arg);
+#endif
if (str == NULL) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse date string '%.200s' - value is not a string.",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse date string '%.200s' - value is not a string.",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
- if (sscanf(str, "%4u-%2u-%2u", &year, &month, &day) != 3) {
+ numparsed = sscanf(str, "%4u-%2u-%2u", &year, &month, &day);
+#if PY_MAJOR_VERSION >= 3
+ Py_DECREF(bytes);
+#endif
+ if (numparsed != 3) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse date string: %.200s",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse date string: %.200s",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
@@ -219,17 +345,35 @@ UnicodeResultProcessor_init(UnicodeResultProcessor *self, PyObject *args,
PyObject *encoding, *errors = NULL;
static char *kwlist[] = {"encoding", "errors", NULL};
+#if PY_MAJOR_VERSION >= 3
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "U|U:__init__", kwlist,
+ &encoding, &errors))
+ return -1;
+#else
if (!PyArg_ParseTupleAndKeywords(args, kwds, "S|S:__init__", kwlist,
&encoding, &errors))
return -1;
+#endif
+#if PY_MAJOR_VERSION >= 3
+ encoding = PyUnicode_AsASCIIString(encoding);
+#else
Py_INCREF(encoding);
+#endif
self->encoding = encoding;
if (errors) {
+#if PY_MAJOR_VERSION >= 3
+ errors = PyUnicode_AsASCIIString(errors);
+#else
Py_INCREF(errors);
+#endif
} else {
+#if PY_MAJOR_VERSION >= 3
+ errors = PyBytes_FromString("strict");
+#else
errors = PyString_FromString("strict");
+#endif
if (errors == NULL)
return -1;
}
@@ -248,11 +392,58 @@ UnicodeResultProcessor_process(UnicodeResultProcessor *self, PyObject *value)
if (value == Py_None)
Py_RETURN_NONE;
+#if PY_MAJOR_VERSION >= 3
+ if (PyBytes_AsStringAndSize(value, &str, &len))
+ return NULL;
+
+ encoding = PyBytes_AS_STRING(self->encoding);
+ errors = PyBytes_AS_STRING(self->errors);
+#else
+ if (PyString_AsStringAndSize(value, &str, &len))
+ return NULL;
+
+ encoding = PyString_AS_STRING(self->encoding);
+ errors = PyString_AS_STRING(self->errors);
+#endif
+
+ return PyUnicode_Decode(str, len, encoding, errors);
+}
+
+static PyObject *
+UnicodeResultProcessor_conditional_process(UnicodeResultProcessor *self, PyObject *value)
+{
+ const char *encoding, *errors;
+ char *str;
+ Py_ssize_t len;
+
+ if (value == Py_None)
+ Py_RETURN_NONE;
+
+#if PY_MAJOR_VERSION >= 3
+ if (PyUnicode_Check(value) == 1) {
+ Py_INCREF(value);
+ return value;
+ }
+
+ if (PyBytes_AsStringAndSize(value, &str, &len))
+ return NULL;
+
+ encoding = PyBytes_AS_STRING(self->encoding);
+ errors = PyBytes_AS_STRING(self->errors);
+#else
+
+ if (PyUnicode_Check(value) == 1) {
+ Py_INCREF(value);
+ return value;
+ }
+
if (PyString_AsStringAndSize(value, &str, &len))
return NULL;
+
encoding = PyString_AS_STRING(self->encoding);
errors = PyString_AS_STRING(self->errors);
+#endif
return PyUnicode_Decode(str, len, encoding, errors);
}
@@ -262,18 +453,23 @@ UnicodeResultProcessor_dealloc(UnicodeResultProcessor *self)
{
Py_XDECREF(self->encoding);
Py_XDECREF(self->errors);
+#if PY_MAJOR_VERSION >= 3
+ Py_TYPE(self)->tp_free((PyObject*)self);
+#else
self->ob_type->tp_free((PyObject*)self);
+#endif
}
static PyMethodDef UnicodeResultProcessor_methods[] = {
{"process", (PyCFunction)UnicodeResultProcessor_process, METH_O,
"The value processor itself."},
+ {"conditional_process", (PyCFunction)UnicodeResultProcessor_conditional_process, METH_O,
+ "Conditional version of the value processor."},
{NULL} /* Sentinel */
};
static PyTypeObject UnicodeResultProcessorType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
+ PyVarObject_HEAD_INIT(NULL, 0)
"sqlalchemy.cprocessors.UnicodeResultProcessor", /* tp_name */
sizeof(UnicodeResultProcessor), /* tp_basicsize */
0, /* tp_itemsize */
@@ -323,7 +519,11 @@ DecimalResultProcessor_init(DecimalResultProcessor *self, PyObject *args,
{
PyObject *type, *format;
+#if PY_MAJOR_VERSION >= 3
+ if (!PyArg_ParseTuple(args, "OU", &type, &format))
+#else
if (!PyArg_ParseTuple(args, "OS", &type, &format))
+#endif
return -1;
Py_INCREF(type);
@@ -343,11 +543,21 @@ DecimalResultProcessor_process(DecimalResultProcessor *self, PyObject *value)
if (value == Py_None)
Py_RETURN_NONE;
+ /* Decimal does not accept float values directly */
+ /* SQLite can also give us an integer here (see [ticket:2432]) */
+ /* XXX: starting with Python 3.1, we could use Decimal.from_float(f),
+ but the result wouldn't be the same */
+
args = PyTuple_Pack(1, value);
if (args == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ str = PyUnicode_Format(self->format, args);
+#else
str = PyString_Format(self->format, args);
+#endif
+
Py_DECREF(args);
if (str == NULL)
return NULL;
@@ -362,7 +572,11 @@ DecimalResultProcessor_dealloc(DecimalResultProcessor *self)
{
Py_XDECREF(self->type);
Py_XDECREF(self->format);
+#if PY_MAJOR_VERSION >= 3
+ Py_TYPE(self)->tp_free((PyObject*)self);
+#else
self->ob_type->tp_free((PyObject*)self);
+#endif
}
static PyMethodDef DecimalResultProcessor_methods[] = {
@@ -372,8 +586,7 @@ static PyMethodDef DecimalResultProcessor_methods[] = {
};
static PyTypeObject DecimalResultProcessorType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
+ PyVarObject_HEAD_INIT(NULL, 0)
"sqlalchemy.DecimalResultProcessor", /* tp_name */
sizeof(DecimalResultProcessor), /* tp_basicsize */
0, /* tp_itemsize */
@@ -413,11 +626,6 @@ static PyTypeObject DecimalResultProcessorType = {
0, /* tp_new */
};
-#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
-#define PyMODINIT_FUNC void
-#endif
-
-
static PyMethodDef module_methods[] = {
{"int_to_boolean", int_to_boolean, METH_O,
"Convert an integer to a boolean."},
@@ -434,23 +642,53 @@ static PyMethodDef module_methods[] = {
{NULL, NULL, 0, NULL} /* Sentinel */
};
+#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
+#define PyMODINIT_FUNC void
+#endif
+
+
+#if PY_MAJOR_VERSION >= 3
+
+static struct PyModuleDef module_def = {
+ PyModuleDef_HEAD_INIT,
+ MODULE_NAME,
+ MODULE_DOC,
+ -1,
+ module_methods
+};
+
+#define INITERROR return NULL
+
+PyMODINIT_FUNC
+PyInit_cprocessors(void)
+
+#else
+
+#define INITERROR return
+
PyMODINIT_FUNC
initcprocessors(void)
+
+#endif
+
{
PyObject *m;
UnicodeResultProcessorType.tp_new = PyType_GenericNew;
if (PyType_Ready(&UnicodeResultProcessorType) < 0)
- return;
+ INITERROR;
DecimalResultProcessorType.tp_new = PyType_GenericNew;
if (PyType_Ready(&DecimalResultProcessorType) < 0)
- return;
+ INITERROR;
- m = Py_InitModule3("cprocessors", module_methods,
- "Module containing C versions of data processing functions.");
+#if PY_MAJOR_VERSION >= 3
+ m = PyModule_Create(&module_def);
+#else
+ m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
+#endif
if (m == NULL)
- return;
+ INITERROR;
PyDateTime_IMPORT;
@@ -461,5 +699,8 @@ initcprocessors(void)
Py_INCREF(&DecimalResultProcessorType);
PyModule_AddObject(m, "DecimalResultProcessor",
(PyObject *)&DecimalResultProcessorType);
-}
+#if PY_MAJOR_VERSION >= 3
+ return m;
+#endif
+}
diff --git a/lib/sqlalchemy/cextension/resultproxy.c b/lib/sqlalchemy/cextension/resultproxy.c
index b70f9c271..218c7b807 100644
--- a/lib/sqlalchemy/cextension/resultproxy.c
+++ b/lib/sqlalchemy/cextension/resultproxy.c
@@ -1,7 +1,7 @@
/*
resultproxy.c
-Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
+Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com
This module is part of SQLAlchemy and is released under
the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -9,6 +9,9 @@ the MIT License: http://www.opensource.org/licenses/mit-license.php
#include <Python.h>
+#define MODULE_NAME "cresultproxy"
+#define MODULE_DOC "Module containing C versions of core ResultProxy classes."
+
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
typedef int Py_ssize_t;
#define PY_SSIZE_T_MAX INT_MAX
@@ -150,7 +153,11 @@ BaseRowProxy_dealloc(BaseRowProxy *self)
Py_XDECREF(self->row);
Py_XDECREF(self->processors);
Py_XDECREF(self->keymap);
+#if PY_MAJOR_VERSION >= 3
+ Py_TYPE(self)->tp_free((PyObject *)self);
+#else
self->ob_type->tp_free((PyObject *)self);
+#endif
}
static PyObject *
@@ -245,14 +252,21 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
PyObject *processor, *value, *processed_value;
PyObject *row, *record, *result, *indexobject;
PyObject *exc_module, *exception, *cstr_obj;
+#if PY_MAJOR_VERSION >= 3
+ PyObject *bytes;
+#endif
char *cstr_key;
long index;
int key_fallback = 0;
int tuple_check = 0;
+#if PY_MAJOR_VERSION < 3
if (PyInt_CheckExact(key)) {
index = PyInt_AS_LONG(key);
- } else if (PyLong_CheckExact(key)) {
+ }
+#endif
+
+ if (PyLong_CheckExact(key)) {
index = PyLong_AsLong(key);
if ((index == -1) && PyErr_Occurred())
/* -1 can be either the actual value, or an error flag. */
@@ -305,7 +319,21 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
cstr_obj = PyObject_Str(key);
if (cstr_obj == NULL)
return NULL;
+
+/*
+ FIXME: raise encoding error exception (in both versions below)
+ if the key contains non-ascii chars, instead of an
+ InvalidRequestError without any message like in the
+ python version.
+*/
+#if PY_MAJOR_VERSION >= 3
+ bytes = PyUnicode_AsASCIIString(cstr_obj);
+ if (bytes == NULL)
+ return NULL;
+ cstr_key = PyBytes_AS_STRING(bytes);
+#else
cstr_key = PyString_AsString(cstr_obj);
+#endif
if (cstr_key == NULL) {
Py_DECREF(cstr_obj);
return NULL;
@@ -318,7 +346,11 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
return NULL;
}
+#if PY_MAJOR_VERSION >= 3
+ index = PyLong_AsLong(indexobject);
+#else
index = PyInt_AsLong(indexobject);
+#endif
if ((index == -1) && PyErr_Occurred())
/* -1 can be either the actual value, or an error flag. */
return NULL;
@@ -357,13 +389,23 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
static PyObject *
BaseRowProxy_getitem(PyObject *self, Py_ssize_t i)
{
- return BaseRowProxy_subscript((BaseRowProxy*)self, PyInt_FromSsize_t(i));
+ PyObject *index;
+
+#if PY_MAJOR_VERSION >= 3
+ index = PyLong_FromSsize_t(i);
+#else
+ index = PyInt_FromSsize_t(i);
+#endif
+ return BaseRowProxy_subscript((BaseRowProxy*)self, index);
}
static PyObject *
BaseRowProxy_getattro(BaseRowProxy *self, PyObject *name)
{
PyObject *tmp;
+#if PY_MAJOR_VERSION >= 3
+ PyObject *err_bytes;
+#endif
if (!(tmp = PyObject_GenericGetAttr((PyObject *)self, name))) {
if (!PyErr_ExceptionMatches(PyExc_AttributeError))
@@ -375,11 +417,23 @@ BaseRowProxy_getattro(BaseRowProxy *self, PyObject *name)
tmp = BaseRowProxy_subscript(self, name);
if (tmp == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) {
+
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(name);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_AttributeError,
+ "Could not locate column in row for column '%.200s'",
+ PyBytes_AS_STRING(err_bytes)
+ );
+#else
PyErr_Format(
PyExc_AttributeError,
"Could not locate column in row for column '%.200s'",
PyString_AsString(name)
);
+#endif
return NULL;
}
return tmp;
@@ -565,8 +619,7 @@ static PyMappingMethods BaseRowProxy_as_mapping = {
};
static PyTypeObject BaseRowProxyType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
+ PyVarObject_HEAD_INIT(NULL, 0)
"sqlalchemy.cresultproxy.BaseRowProxy", /* tp_name */
sizeof(BaseRowProxy), /* tp_basicsize */
0, /* tp_itemsize */
@@ -606,34 +659,60 @@ static PyTypeObject BaseRowProxyType = {
0 /* tp_new */
};
+static PyMethodDef module_methods[] = {
+ {"safe_rowproxy_reconstructor", safe_rowproxy_reconstructor, METH_VARARGS,
+ "reconstruct a RowProxy instance from its pickled form."},
+ {NULL, NULL, 0, NULL} /* Sentinel */
+};
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
#define PyMODINIT_FUNC void
#endif
-static PyMethodDef module_methods[] = {
- {"safe_rowproxy_reconstructor", safe_rowproxy_reconstructor, METH_VARARGS,
- "reconstruct a RowProxy instance from its pickled form."},
- {NULL, NULL, 0, NULL} /* Sentinel */
+#if PY_MAJOR_VERSION >= 3
+
+static struct PyModuleDef module_def = {
+ PyModuleDef_HEAD_INIT,
+ MODULE_NAME,
+ MODULE_DOC,
+ -1,
+ module_methods
};
+#define INITERROR return NULL
+
+PyMODINIT_FUNC
+PyInit_cresultproxy(void)
+
+#else
+
+#define INITERROR return
+
PyMODINIT_FUNC
initcresultproxy(void)
+
+#endif
+
{
PyObject *m;
BaseRowProxyType.tp_new = PyType_GenericNew;
if (PyType_Ready(&BaseRowProxyType) < 0)
- return;
+ INITERROR;
- m = Py_InitModule3("cresultproxy", module_methods,
- "Module containing C versions of core ResultProxy classes.");
+#if PY_MAJOR_VERSION >= 3
+ m = PyModule_Create(&module_def);
+#else
+ m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
+#endif
if (m == NULL)
- return;
+ INITERROR;
Py_INCREF(&BaseRowProxyType);
PyModule_AddObject(m, "BaseRowProxy", (PyObject *)&BaseRowProxyType);
+#if PY_MAJOR_VERSION >= 3
+ return m;
+#endif
}
-
diff --git a/lib/sqlalchemy/cextension/utils.c b/lib/sqlalchemy/cextension/utils.c
index 5928c4103..377ba8a8d 100644
--- a/lib/sqlalchemy/cextension/utils.c
+++ b/lib/sqlalchemy/cextension/utils.c
@@ -1,6 +1,6 @@
/*
utils.c
-Copyright (C) 2012-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+Copyright (C) 2012-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
This module is part of SQLAlchemy and is released under
the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -8,6 +8,9 @@ the MIT License: http://www.opensource.org/licenses/mit-license.php
#include <Python.h>
+#define MODULE_NAME "cutils"
+#define MODULE_DOC "Module containing C versions of utility functions."
+
/*
Given arguments from the calling form *multiparams, **params,
return a list of bind parameter structures, usually a list of
@@ -172,26 +175,51 @@ distill_params(PyObject *self, PyObject *args)
}
}
-#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
-#define PyMODINIT_FUNC void
-#endif
-
-
static PyMethodDef module_methods[] = {
{"_distill_params", distill_params, METH_VARARGS,
"Distill an execute() parameter structure."},
{NULL, NULL, 0, NULL} /* Sentinel */
};
+#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
+#define PyMODINIT_FUNC void
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+
+static struct PyModuleDef module_def = {
+ PyModuleDef_HEAD_INIT,
+ MODULE_NAME,
+ MODULE_DOC,
+ -1,
+ module_methods
+ };
+#endif
+
+
+#if PY_MAJOR_VERSION >= 3
+PyMODINIT_FUNC
+PyInit_cutils(void)
+#else
PyMODINIT_FUNC
initcutils(void)
+#endif
{
PyObject *m;
- m = Py_InitModule3("cutils", module_methods,
- "Internal utility functions.");
- if (m == NULL)
- return;
+#if PY_MAJOR_VERSION >= 3
+ m = PyModule_Create(&module_def);
+#else
+ m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
+#endif
+#if PY_MAJOR_VERSION >= 3
+ if (m == NULL)
+ return NULL;
+ return m;
+#else
+ if (m == NULL)
+ return;
+#endif
}
diff --git a/lib/sqlalchemy/connectors/__init__.py b/lib/sqlalchemy/connectors/__init__.py
index 26156a403..761024fe7 100644
--- a/lib/sqlalchemy/connectors/__init__.py
+++ b/lib/sqlalchemy/connectors/__init__.py
@@ -1,5 +1,5 @@
# connectors/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/connectors/mxodbc.py b/lib/sqlalchemy/connectors/mxodbc.py
index ebdcd2758..e5562a25e 100644
--- a/lib/sqlalchemy/connectors/mxodbc.py
+++ b/lib/sqlalchemy/connectors/mxodbc.py
@@ -1,5 +1,5 @@
# connectors/mxodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/connectors/mysqldb.py b/lib/sqlalchemy/connectors/mysqldb.py
index 5f4b3e4d3..33e59218b 100644
--- a/lib/sqlalchemy/connectors/mysqldb.py
+++ b/lib/sqlalchemy/connectors/mysqldb.py
@@ -1,3 +1,9 @@
+# connectors/mysqldb.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Define behaviors common to MySQLdb dialects.
Currently includes MySQL and Drizzle.
@@ -56,6 +62,7 @@ class MySQLDBConnector(Connector):
# is overridden when pymysql is used
return __import__('MySQLdb')
+
def do_executemany(self, cursor, statement, parameters, context=None):
rowcount = cursor.executemany(statement, parameters)
if context is not None:
diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py
index 6b4e3036d..284de288a 100644
--- a/lib/sqlalchemy/connectors/pyodbc.py
+++ b/lib/sqlalchemy/connectors/pyodbc.py
@@ -1,5 +1,5 @@
# connectors/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/connectors/zxJDBC.py b/lib/sqlalchemy/connectors/zxJDBC.py
index 593449151..e0bbc5734 100644
--- a/lib/sqlalchemy/connectors/zxJDBC.py
+++ b/lib/sqlalchemy/connectors/zxJDBC.py
@@ -1,5 +1,5 @@
# connectors/zxJDBC.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/databases/__init__.py b/lib/sqlalchemy/databases/__init__.py
index e6821b009..915eefa4a 100644
--- a/lib/sqlalchemy/databases/__init__.py
+++ b/lib/sqlalchemy/databases/__init__.py
@@ -1,5 +1,5 @@
# databases/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -15,7 +15,6 @@ from ..dialects.mysql import base as mysql
from ..dialects.drizzle import base as drizzle
from ..dialects.oracle import base as oracle
from ..dialects.firebird import base as firebird
-from ..dialects.informix import base as informix
from ..dialects.mssql import base as mssql
from ..dialects.sybase import base as sybase
@@ -23,7 +22,6 @@ from ..dialects.sybase import base as sybase
__all__ = (
'drizzle',
'firebird',
- 'informix',
'mssql',
'mysql',
'postgresql',
diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py
index 7f5d34707..974d4f787 100644
--- a/lib/sqlalchemy/dialects/__init__.py
+++ b/lib/sqlalchemy/dialects/__init__.py
@@ -1,5 +1,5 @@
# dialects/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,7 +7,6 @@
__all__ = (
'drizzle',
'firebird',
-# 'informix',
'mssql',
'mysql',
'oracle',
diff --git a/lib/sqlalchemy/dialects/drizzle/base.py b/lib/sqlalchemy/dialects/drizzle/base.py
index efad13549..b5addb422 100644
--- a/lib/sqlalchemy/dialects/drizzle/base.py
+++ b/lib/sqlalchemy/dialects/drizzle/base.py
@@ -1,5 +1,5 @@
# drizzle/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# Copyright (C) 2010-2011 Monty Taylor <mordred@inaugust.com>
#
# This module is part of SQLAlchemy and is released under
@@ -417,6 +417,7 @@ class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer):
pass
+@log.class_logger
class DrizzleDialect(mysql_dialect.MySQLDialect):
"""Details of the Drizzle dialect.
@@ -495,4 +496,3 @@ class DrizzleDialect(mysql_dialect.MySQLDialect):
self._backslash_escapes = False
-log.class_logger(DrizzleDialect)
diff --git a/lib/sqlalchemy/dialects/firebird/__init__.py b/lib/sqlalchemy/dialects/firebird/__init__.py
index e57457a39..094ac3e83 100644
--- a/lib/sqlalchemy/dialects/firebird/__init__.py
+++ b/lib/sqlalchemy/dialects/firebird/__init__.py
@@ -1,5 +1,5 @@
# firebird/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py
index dcaa68f4e..21db57b68 100644
--- a/lib/sqlalchemy/dialects/firebird/base.py
+++ b/lib/sqlalchemy/dialects/firebird/base.py
@@ -1,5 +1,5 @@
# firebird/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -359,6 +359,7 @@ class FBIdentifierPreparer(sql.compiler.IdentifierPreparer):
"""Install Firebird specific reserved words."""
reserved_words = RESERVED_WORDS
+ illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(['_'])
def __init__(self, dialect):
super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True)
@@ -401,6 +402,8 @@ class FBDialect(default.DefaultDialect):
colspecs = colspecs
ischema_names = ischema_names
+ construct_arguments = []
+
# defaults to dialect ver. 3,
# will be autodetected off upon
# first connect
@@ -474,18 +477,34 @@ class FBDialect(default.DefaultDialect):
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
+ # there are two queries commonly mentioned for this.
+ # this one, using view_blr, is at the Firebird FAQ among other places:
+ # http://www.firebirdfaq.org/faq174/
s = """
- SELECT DISTINCT rdb$relation_name
- FROM rdb$relation_fields
- WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
+ select rdb$relation_name
+ from rdb$relations
+ where rdb$view_blr is null
+ and (rdb$system_flag is null or rdb$system_flag = 0);
"""
+
+ # the other query is this one. It's not clear if there's really
+ # any difference between these two. This link:
+ # http://www.alberton.info/firebird_sql_meta_info.html#.Ur3vXfZGni8
+ # states them as interchangeable. Some discussion at [ticket:2898]
+ # SELECT DISTINCT rdb$relation_name
+ # FROM rdb$relation_fields
+ # WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
+
return [self.normalize_name(row[0]) for row in connection.execute(s)]
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
+ # see http://www.firebirdfaq.org/faq174/
s = """
- SELECT distinct rdb$view_name
- FROM rdb$view_relations
+ select rdb$relation_name
+ from rdb$relations
+ where rdb$view_blr is not null
+ and (rdb$system_flag is null or rdb$system_flag = 0);
"""
return [self.normalize_name(row[0]) for row in connection.execute(s)]
@@ -700,7 +719,7 @@ class FBDialect(default.DefaultDialect):
ic.rdb$index_name
WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL
AND rdb$relation_constraints.rdb$constraint_type IS NULL
- ORDER BY index_name, field_name
+ ORDER BY index_name, ic.rdb$field_position
"""
c = connection.execute(qry, [self.denormalize_name(table_name)])
diff --git a/lib/sqlalchemy/dialects/firebird/fdb.py b/lib/sqlalchemy/dialects/firebird/fdb.py
index 36b424d49..4d94ef0d5 100644
--- a/lib/sqlalchemy/dialects/firebird/fdb.py
+++ b/lib/sqlalchemy/dialects/firebird/fdb.py
@@ -1,5 +1,5 @@
# firebird/fdb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
index c8d8e986f..b8a83a07b 100644
--- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
+++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
@@ -1,5 +1,5 @@
# firebird/kinterbasdb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -42,7 +42,7 @@ from re import match
import decimal
-class _FBNumeric_kinterbasdb(sqltypes.Numeric):
+class _kinterbasdb_numeric(object):
def bind_processor(self, dialect):
def process(value):
if isinstance(value, decimal.Decimal):
@@ -51,6 +51,12 @@ class _FBNumeric_kinterbasdb(sqltypes.Numeric):
return value
return process
+class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, sqltypes.Numeric):
+ pass
+
+class _FBFloat_kinterbasdb(_kinterbasdb_numeric, sqltypes.Float):
+ pass
+
class FBExecutionContext_kinterbasdb(FBExecutionContext):
@property
@@ -74,6 +80,7 @@ class FBDialect_kinterbasdb(FBDialect):
FBDialect.colspecs,
{
sqltypes.Numeric: _FBNumeric_kinterbasdb,
+ sqltypes.Float: _FBFloat_kinterbasdb,
}
)
diff --git a/lib/sqlalchemy/dialects/informix/__init__.py b/lib/sqlalchemy/dialects/informix/__init__.py
deleted file mode 100644
index a55277c9f..000000000
--- a/lib/sqlalchemy/dialects/informix/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# informix/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.informix import base, informixdb
-
-base.dialect = informixdb.dialect
diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py
deleted file mode 100644
index e13ea8819..000000000
--- a/lib/sqlalchemy/dialects/informix/base.py
+++ /dev/null
@@ -1,590 +0,0 @@
-# informix/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-# coding: gbk
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-.. dialect:: informix
- :name: Informix
-
-.. note::
-
- The Informix dialect functions on current SQLAlchemy versions
- but is not regularly tested, and may have many issues and
- caveats not currently handled.
-
-"""
-
-
-import datetime
-
-from sqlalchemy import sql, schema, exc, pool, util
-from sqlalchemy.sql import compiler, text
-from sqlalchemy.engine import default, reflection
-from sqlalchemy import types as sqltypes
-from functools import reduce
-
-RESERVED_WORDS = set(
- ["abs", "absolute", "access", "access_method", "acos", "active", "add",
- "address", "add_months", "admin", "after", "aggregate", "alignment",
- "all", "allocate", "all_rows", "alter", "and", "ansi", "any", "append",
- "array", "as", "asc", "ascii", "asin", "at", "atan", "atan2", "attach",
- "attributes", "audit", "authentication", "authid", "authorization",
- "authorized", "auto", "autofree", "auto_reprepare", "auto_stat_mode",
- "avg", "avoid_execute", "avoid_fact", "avoid_full", "avoid_hash",
- "avoid_index", "avoid_index_sj", "avoid_multi_index", "avoid_nl",
- "avoid_star_join", "avoid_subqf", "based", "before", "begin",
- "between", "bigint", "bigserial", "binary", "bitand", "bitandnot",
- "bitnot", "bitor", "bitxor", "blob", "blobdir", "boolean", "both",
- "bound_impl_pdq", "buffered", "builtin", "by", "byte", "cache", "call",
- "cannothash", "cardinality", "cascade", "case", "cast", "ceil", "char",
- "character", "character_length", "char_length", "check", "class",
- "class_origin", "client", "clob", "clobdir", "close", "cluster",
- "clustersize", "cobol", "codeset", "collation", "collection",
- "column", "columns", "commit", "committed", "commutator", "component",
- "components", "concat", "concurrent", "connect", "connection",
- "connection_name", "connect_by_iscycle", "connect_by_isleaf",
- "connect_by_rootconst", "constraint", "constraints", "constructor",
- "context", "continue", "copy", "cos", "costfunc", "count", "crcols",
- "create", "cross", "current", "current_role", "currval", "cursor",
- "cycle", "database", "datafiles", "dataskip", "date", "datetime",
- "day", "dba", "dbdate", "dbinfo", "dbpassword", "dbsecadm",
- "dbservername", "deallocate", "debug", "debugmode", "debug_env", "dec",
- "decimal", "declare", "decode", "decrypt_binary", "decrypt_char",
- "dec_t", "default", "default_role", "deferred", "deferred_prepare",
- "define", "delay", "delete", "deleting", "delimited", "delimiter",
- "deluxe", "desc", "describe", "descriptor", "detach", "diagnostics",
- "directives", "dirty", "disable", "disabled", "disconnect", "disk",
- "distinct", "distributebinary", "distributesreferences",
- "distributions", "document", "domain", "donotdistribute", "dormant",
- "double", "drop", "dtime_t", "each", "elif", "else", "enabled",
- "encryption", "encrypt_aes", "encrypt_tdes", "end", "enum",
- "environment", "error", "escape", "exception", "exclusive", "exec",
- "execute", "executeanywhere", "exemption", "exists", "exit", "exp",
- "explain", "explicit", "express", "expression", "extdirectives",
- "extend", "extent", "external", "fact", "false", "far", "fetch",
- "file", "filetoblob", "filetoclob", "fillfactor", "filtering", "first",
- "first_rows", "fixchar", "fixed", "float", "floor", "flush", "for",
- "force", "forced", "force_ddl_exec", "foreach", "foreign", "format",
- "format_units", "fortran", "found", "fraction", "fragment",
- "fragments", "free", "from", "full", "function", "general", "get",
- "gethint", "global", "go", "goto", "grant", "greaterthan",
- "greaterthanorequal", "group", "handlesnulls", "hash", "having", "hdr",
- "hex", "high", "hint", "hold", "home", "hour", "idslbacreadarray",
- "idslbacreadset", "idslbacreadtree", "idslbacrules",
- "idslbacwritearray", "idslbacwriteset", "idslbacwritetree",
- "idssecuritylabel", "if", "ifx_auto_reprepare", "ifx_batchedread_table",
- "ifx_int8_t", "ifx_lo_create_spec_t", "ifx_lo_stat_t", "immediate",
- "implicit", "implicit_pdq", "in", "inactive", "increment", "index",
- "indexes", "index_all", "index_sj", "indicator", "informix", "init",
- "initcap", "inline", "inner", "inout", "insert", "inserting", "instead",
- "int", "int8", "integ", "integer", "internal", "internallength",
- "interval", "into", "intrvl_t", "is", "iscanonical", "isolation",
- "item", "iterator", "java", "join", "keep", "key", "label", "labeleq",
- "labelge", "labelglb", "labelgt", "labelle", "labellt", "labellub",
- "labeltostring", "language", "last", "last_day", "leading", "left",
- "length", "lessthan", "lessthanorequal", "let", "level", "like",
- "limit", "list", "listing", "load", "local", "locator", "lock", "locks",
- "locopy", "loc_t", "log", "log10", "logn", "long", "loop", "lotofile",
- "low", "lower", "lpad", "ltrim", "lvarchar", "matched", "matches",
- "max", "maxerrors", "maxlen", "maxvalue", "mdy", "median", "medium",
- "memory", "memory_resident", "merge", "message_length", "message_text",
- "middle", "min", "minute", "minvalue", "mod", "mode", "moderate",
- "modify", "module", "money", "month", "months_between", "mounting",
- "multiset", "multi_index", "name", "nchar", "negator", "new", "next",
- "nextval", "next_day", "no", "nocache", "nocycle", "nomaxvalue",
- "nomigrate", "nominvalue", "none", "non_dim", "non_resident", "noorder",
- "normal", "not", "notemplatearg", "notequal", "null", "nullif",
- "numeric", "numrows", "numtodsinterval", "numtoyminterval", "nvarchar",
- "nvl", "octet_length", "of", "off", "old", "on", "online", "only",
- "opaque", "opclass", "open", "optcompind", "optical", "optimization",
- "option", "or", "order", "ordered", "out", "outer", "output",
- "override", "page", "parallelizable", "parameter", "partition",
- "pascal", "passedbyvalue", "password", "pdqpriority", "percaltl_cos",
- "pipe", "pli", "pload", "policy", "pow", "power", "precision",
- "prepare", "previous", "primary", "prior", "private", "privileges",
- "procedure", "properties", "public", "put", "raise", "range", "raw",
- "read", "real", "recordend", "references", "referencing", "register",
- "rejectfile", "relative", "release", "remainder", "rename",
- "reoptimization", "repeatable", "replace", "replication", "reserve",
- "resolution", "resource", "restart", "restrict", "resume", "retain",
- "retainupdatelocks", "return", "returned_sqlstate", "returning",
- "returns", "reuse", "revoke", "right", "robin", "role", "rollback",
- "rollforward", "root", "round", "routine", "row", "rowid", "rowids",
- "rows", "row_count", "rpad", "rtrim", "rule", "sameas", "samples",
- "sampling", "save", "savepoint", "schema", "scroll", "seclabel_by_comp",
- "seclabel_by_name", "seclabel_to_char", "second", "secondary",
- "section", "secured", "security", "selconst", "select", "selecting",
- "selfunc", "selfuncargs", "sequence", "serial", "serial8",
- "serializable", "serveruuid", "server_name", "session", "set",
- "setsessionauth", "share", "short", "siblings", "signed", "sin",
- "sitename", "size", "skall", "skinhibit", "skip", "skshow",
- "smallfloat", "smallint", "some", "specific", "sql", "sqlcode",
- "sqlcontext", "sqlerror", "sqlstate", "sqlwarning", "sqrt",
- "stability", "stack", "standard", "start", "star_join", "statchange",
- "statement", "static", "statistics", "statlevel", "status", "stdev",
- "step", "stop", "storage", "store", "strategies", "string",
- "stringtolabel", "struct", "style", "subclass_origin", "substr",
- "substring", "sum", "support", "sync", "synonym", "sysdate",
- "sysdbclose", "sysdbopen", "system", "sys_connect_by_path", "table",
- "tables", "tan", "task", "temp", "template", "test", "text", "then",
- "time", "timeout", "to", "today", "to_char", "to_date",
- "to_dsinterval", "to_number", "to_yminterval", "trace", "trailing",
- "transaction", "transition", "tree", "trigger", "triggers", "trim",
- "true", "trunc", "truncate", "trusted", "type", "typedef", "typeid",
- "typename", "typeof", "uid", "uncommitted", "under", "union",
- "unique", "units", "unknown", "unload", "unlock", "unsigned",
- "update", "updating", "upon", "upper", "usage", "use",
- "uselastcommitted", "user", "use_hash", "use_nl", "use_subqf",
- "using", "value", "values", "var", "varchar", "variable", "variance",
- "variant", "varying", "vercols", "view", "violations", "void",
- "volatile", "wait", "warning", "weekday", "when", "whenever", "where",
- "while", "with", "without", "work", "write", "writedown", "writeup",
- "xadatasource", "xid", "xload", "xunload", "year"
- ])
-
-
-class InfoDateTime(sqltypes.DateTime):
-
- def bind_processor(self, dialect):
- def process(value):
- if value is not None:
- if value.microsecond:
- value = value.replace(microsecond=0)
- return value
- return process
-
-
-class InfoTime(sqltypes.Time):
-
- def bind_processor(self, dialect):
- def process(value):
- if value is not None:
- if value.microsecond:
- value = value.replace(microsecond=0)
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- def process(value):
- if isinstance(value, datetime.datetime):
- return value.time()
- else:
- return value
- return process
-
-colspecs = {
- sqltypes.DateTime: InfoDateTime,
- sqltypes.TIMESTAMP: InfoDateTime,
- sqltypes.Time: InfoTime,
-}
-
-
-ischema_names = {
- 0: sqltypes.CHAR, # CHAR
- 1: sqltypes.SMALLINT, # SMALLINT
- 2: sqltypes.INTEGER, # INT
- 3: sqltypes.FLOAT, # Float
- 3: sqltypes.Float, # SmallFloat
- 5: sqltypes.DECIMAL, # DECIMAL
- 6: sqltypes.Integer, # Serial
- 7: sqltypes.DATE, # DATE
- 8: sqltypes.Numeric, # MONEY
- 10: sqltypes.DATETIME, # DATETIME
- 11: sqltypes.LargeBinary, # BYTE
- 12: sqltypes.TEXT, # TEXT
- 13: sqltypes.VARCHAR, # VARCHAR
- 15: sqltypes.NCHAR, # NCHAR
- 16: sqltypes.NVARCHAR, # NVARCHAR
- 17: sqltypes.Integer, # INT8
- 18: sqltypes.Integer, # Serial8
- 43: sqltypes.String, # LVARCHAR
- -1: sqltypes.BLOB, # BLOB
- -1: sqltypes.CLOB, # CLOB
-}
-
-
-class InfoTypeCompiler(compiler.GenericTypeCompiler):
- def visit_DATETIME(self, type_):
- return "DATETIME YEAR TO SECOND"
-
- def visit_TIME(self, type_):
- return "DATETIME HOUR TO SECOND"
-
- def visit_TIMESTAMP(self, type_):
- return "DATETIME YEAR TO SECOND"
-
- def visit_large_binary(self, type_):
- return "BYTE"
-
- def visit_boolean(self, type_):
- return "SMALLINT"
-
-
-class InfoSQLCompiler(compiler.SQLCompiler):
-
- def default_from(self):
- return " from systables where tabname = 'systables' "
-
- def get_select_precolumns(self, select):
- s = ""
- if select._offset:
- s += "SKIP %s " % select._offset
- if select._limit:
- s += "FIRST %s " % select._limit
- s += select._distinct and "DISTINCT " or ""
- return s
-
- def visit_select(self, select, asfrom=False, parens=True, **kw):
- text = compiler.SQLCompiler.visit_select(self, select, asfrom, parens, **kw)
- if asfrom and parens and self.dialect.server_version_info < (11,):
- #assuming that 11 version doesn't need this, not tested
- return "table(multiset" + text + ")"
- else:
- return text
-
- def limit_clause(self, select):
- return ""
-
- def visit_function(self, func, **kw):
- if func.name.lower() == 'current_date':
- return "today"
- elif func.name.lower() == 'current_time':
- return "CURRENT HOUR TO SECOND"
- elif func.name.lower() in ('current_timestamp', 'now'):
- return "CURRENT YEAR TO SECOND"
- else:
- return compiler.SQLCompiler.visit_function(self, func, **kw)
-
- def visit_mod_binary(self, binary, operator, **kw):
- return "MOD(%s, %s)" % (self.process(binary.left, **kw),
- self.process(binary.right, **kw))
-
-
-class InfoDDLCompiler(compiler.DDLCompiler):
-
- def visit_add_constraint(self, create):
- preparer = self.preparer
- return "ALTER TABLE %s ADD CONSTRAINT %s" % (
- self.preparer.format_table(create.element.table),
- self.process(create.element)
- )
-
- def get_column_specification(self, column, **kw):
- colspec = self.preparer.format_column(column)
- first = None
- if column.primary_key and column.autoincrement:
- try:
- first = [c for c in column.table.primary_key.columns
- if (c.autoincrement and
- isinstance(c.type, sqltypes.Integer) and
- not c.foreign_keys)].pop(0)
- except IndexError:
- pass
-
- if column is first:
- colspec += " SERIAL"
- else:
- colspec += " " + self.dialect.type_compiler.process(column.type)
- default = self.get_column_default_string(column)
- if default is not None:
- colspec += " DEFAULT " + default
-
- if not column.nullable:
- colspec += " NOT NULL"
-
- return colspec
-
- def get_column_default_string(self, column):
- if (isinstance(column.server_default, schema.DefaultClause) and
- isinstance(column.server_default.arg, util.string_types)):
- if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
- return self.sql_compiler.process(text(column.server_default.arg))
-
- return super(InfoDDLCompiler, self).get_column_default_string(column)
-
- ### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py
- def visit_primary_key_constraint(self, constraint):
- if len(constraint) == 0:
- return ''
- text = "PRIMARY KEY "
- text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
- for c in constraint)
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += " CONSTRAINT %s" % self.preparer.format_constraint(constraint)
- return text
-
- def visit_foreign_key_constraint(self, constraint):
- preparer = self.dialect.identifier_preparer
- remote_table = list(constraint._elements.values())[0].column.table
- text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % (
- ', '.join(preparer.quote(f.parent.name, f.parent.quote)
- for f in constraint._elements.values()),
- preparer.format_table(remote_table),
- ', '.join(preparer.quote(f.column.name, f.column.quote)
- for f in constraint._elements.values())
- )
- text += self.define_constraint_cascades(constraint)
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += " CONSTRAINT %s " % \
- preparer.format_constraint(constraint)
- return text
-
- def visit_unique_constraint(self, constraint):
- text = "UNIQUE (%s)" % (', '.join(self.preparer.quote(c.name, c.quote) for c in constraint))
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint)
- return text
-
-
-class InformixIdentifierPreparer(compiler.IdentifierPreparer):
-
- reserved_words = RESERVED_WORDS
-
-
-class InformixDialect(default.DefaultDialect):
- name = 'informix'
-
- max_identifier_length = 128 # adjusts at runtime based on server version
-
- type_compiler = InfoTypeCompiler
- statement_compiler = InfoSQLCompiler
- ddl_compiler = InfoDDLCompiler
- colspecs = colspecs
- ischema_names = ischema_names
- preparer = InformixIdentifierPreparer
- default_paramstyle = 'qmark'
-
- def initialize(self, connection):
- super(InformixDialect, self).initialize(connection)
-
- # http://www.querix.com/support/knowledge-base/error_number_message/error_200
- if self.server_version_info < (9, 2):
- self.max_identifier_length = 18
- else:
- self.max_identifier_length = 128
-
- def _get_table_names(self, connection, schema, type, **kw):
- schema = schema or self.default_schema_name
- s = "select tabname, owner from systables where owner=? and tabtype=?"
- return [row[0] for row in connection.execute(s, schema, type)]
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- return self._get_table_names(connection, schema, 'T', **kw)
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- return self._get_table_names(connection, schema, 'V', **kw)
-
- @reflection.cache
- def get_schema_names(self, connection, **kw):
- s = "select owner from systables"
- return [row[0] for row in connection.execute(s)]
-
- def has_table(self, connection, table_name, schema=None):
- schema = schema or self.default_schema_name
- cursor = connection.execute(
- """select tabname from systables where tabname=? and owner=?""",
- table_name, schema)
- return cursor.first() is not None
-
- @reflection.cache
- def get_columns(self, connection, table_name, schema=None, **kw):
- schema = schema or self.default_schema_name
- c = connection.execute(
- """select colname, coltype, collength, t3.default, t1.colno from
- syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3
- where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=?
- and t3.tabid = t2.tabid and t3.colno = t1.colno
- order by t1.colno""", table_name, schema)
-
- pk_constraint = self.get_pk_constraint(connection, table_name, schema, **kw)
- primary_cols = pk_constraint['constrained_columns']
-
- columns = []
- rows = c.fetchall()
- for name, colattr, collength, default, colno in rows:
- name = name.lower()
-
- autoincrement = False
- primary_key = False
-
- if name in primary_cols:
- primary_key = True
-
- # in 7.31, coltype = 0x000
- # ^^-- column type
- # ^-- 1 not null, 0 null
- not_nullable, coltype = divmod(colattr, 256)
- if coltype not in (0, 13) and default:
- default = default.split()[-1]
-
- if coltype == 6: # Serial, mark as autoincrement
- autoincrement = True
-
- if coltype == 0 or coltype == 13: # char, varchar
- coltype = ischema_names[coltype](collength)
- if default:
- default = "'%s'" % default
- elif coltype == 5: # decimal
- precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF
- if scale == 255:
- scale = 0
- coltype = sqltypes.Numeric(precision, scale)
- else:
- try:
- coltype = ischema_names[coltype]
- except KeyError:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (coltype, name))
- coltype = sqltypes.NULLTYPE
-
- column_info = dict(name=name, type=coltype, nullable=not not_nullable,
- default=default, autoincrement=autoincrement,
- primary_key=primary_key)
- columns.append(column_info)
- return columns
-
- @reflection.cache
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- schema_sel = schema or self.default_schema_name
- c = connection.execute(
- """select t1.constrname as cons_name,
- t4.colname as local_column, t7.tabname as remote_table,
- t6.colname as remote_column, t7.owner as remote_owner
- from sysconstraints as t1 , systables as t2 ,
- sysindexes as t3 , syscolumns as t4 ,
- sysreferences as t5 , syscolumns as t6 , systables as t7 ,
- sysconstraints as t8 , sysindexes as t9
- where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
- and t3.tabid = t2.tabid and t3.idxname = t1.idxname
- and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
- t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
- t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
- and t5.constrid = t1.constrid and t8.constrid = t5.primary
- and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
- t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
- t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
- t8.idxname
- and t7.tabid = t5.ptabid""", table_name, schema_sel)
-
- def fkey_rec():
- return {
- 'name': None,
- 'constrained_columns': [],
- 'referred_schema': None,
- 'referred_table': None,
- 'referred_columns': []
- }
-
- fkeys = util.defaultdict(fkey_rec)
-
- rows = c.fetchall()
- for cons_name, local_column, \
- remote_table, remote_column, remote_owner in rows:
-
- rec = fkeys[cons_name]
- rec['name'] = cons_name
- local_cols, remote_cols = \
- rec['constrained_columns'], rec['referred_columns']
-
- if not rec['referred_table']:
- rec['referred_table'] = remote_table
- if schema is not None:
- rec['referred_schema'] = remote_owner
-
- if local_column not in local_cols:
- local_cols.append(local_column)
- if remote_column not in remote_cols:
- remote_cols.append(remote_column)
-
- return list(fkeys.values())
-
- @reflection.cache
- def get_pk_constraint(self, connection, table_name, schema=None, **kw):
- schema = schema or self.default_schema_name
-
- # Select the column positions from sysindexes for sysconstraints
- data = connection.execute(
- """select t2.*
- from systables as t1, sysindexes as t2, sysconstraints as t3
- where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
- and t2.idxname=t3.idxname and t3.constrtype='P'""",
- table_name, schema
- ).fetchall()
-
- colpositions = set()
-
- for row in data:
- colpos = set([getattr(row, 'part%d' % x) for x in range(1, 16)])
- colpositions |= colpos
-
- if not len(colpositions):
- return {'constrained_columns': [], 'name': None}
-
- # Select the column names using the columnpositions
- # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table)
- place_holder = ','.join('?' * len(colpositions))
- c = connection.execute(
- """select t1.colname
- from syscolumns as t1, systables as t2
- where t2.tabname=? and t1.tabid = t2.tabid and
- t1.colno in (%s)""" % place_holder,
- table_name, *colpositions
- ).fetchall()
-
- cols = reduce(lambda x, y: list(x) + list(y), c, [])
- return {'constrained_columns': cols, 'name': None}
-
- @reflection.cache
- def get_indexes(self, connection, table_name, schema, **kw):
- # TODO: schema...
- c = connection.execute(
- """select t1.*
- from sysindexes as t1 , systables as t2
- where t1.tabid = t2.tabid and t2.tabname=?""",
- table_name)
-
- indexes = []
- for row in c.fetchall():
- colnames = [getattr(row, 'part%d' % x) for x in range(1, 16)]
- colnames = [x for x in colnames if x]
- place_holder = ','.join('?' * len(colnames))
- c = connection.execute(
- """select t1.colname
- from syscolumns as t1, systables as t2
- where t2.tabname=? and t1.tabid = t2.tabid and
- t1.colno in (%s)""" % place_holder,
- table_name, *colnames
- ).fetchall()
- c = reduce(lambda x, y: list(x) + list(y), c, [])
- indexes.append({
- 'name': row.idxname,
- 'unique': row.idxtype.lower() == 'u',
- 'column_names': c
- })
- return indexes
-
- @reflection.cache
- def get_view_definition(self, connection, view_name, schema=None, **kw):
- schema = schema or self.default_schema_name
- c = connection.execute(
- """select t1.viewtext
- from sysviews as t1 , systables as t2
- where t1.tabid=t2.tabid and t2.tabname=?
- and t2.owner=? order by seqno""",
- view_name, schema).fetchall()
-
- return ''.join([row[0] for row in c])
-
- def _get_default_schema_name(self, connection):
- return connection.execute('select CURRENT_ROLE from systables').scalar()
diff --git a/lib/sqlalchemy/dialects/informix/informixdb.py b/lib/sqlalchemy/dialects/informix/informixdb.py
deleted file mode 100644
index f2f0d3e80..000000000
--- a/lib/sqlalchemy/dialects/informix/informixdb.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# informix/informixdb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-
-.. dialect:: informix+informixdb
- :name: informixdb
- :dbapi: informixdb
- :connectstring: informix+informixdb://user:password@host/dbname
- :url: http://informixdb.sourceforge.net/
-
-"""
-
-import re
-
-from sqlalchemy.dialects.informix.base import InformixDialect
-from sqlalchemy.engine import default
-
-VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)')
-
-
-class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
-
- def post_exec(self):
- if self.isinsert:
- self._lastrowid = self.cursor.sqlerrd[1]
-
- def get_lastrowid(self):
- return self._lastrowid
-
-
-class InformixDialect_informixdb(InformixDialect):
- driver = 'informixdb'
- execution_ctx_cls = InformixExecutionContext_informixdb
-
- @classmethod
- def dbapi(cls):
- return __import__('informixdb')
-
- def create_connect_args(self, url):
- if url.host:
- dsn = '%s@%s' % (url.database, url.host)
- else:
- dsn = url.database
-
- if url.username:
- opt = {'user': url.username, 'password': url.password}
- else:
- opt = {}
-
- return ([dsn], opt)
-
- def _get_server_version_info(self, connection):
- # http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers
- v = VERSION_RE.split(connection.connection.dbms_version)
- return (int(v[1]), int(v[2]), v[3])
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, self.dbapi.OperationalError):
- return 'closed the connection' in str(e) \
- or 'connection not open' in str(e)
- else:
- return False
-
-
-dialect = InformixDialect_informixdb
diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py
index 0b81d6df9..7a2dfa60b 100644
--- a/lib/sqlalchemy/dialects/mssql/__init__.py
+++ b/lib/sqlalchemy/dialects/mssql/__init__.py
@@ -1,5 +1,5 @@
# mssql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/adodbapi.py b/lib/sqlalchemy/dialects/mssql/adodbapi.py
index 167b4e807..95cf42423 100644
--- a/lib/sqlalchemy/dialects/mssql/adodbapi.py
+++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py
@@ -1,5 +1,5 @@
# mssql/adodbapi.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 7621f4aab..522cb5ce3 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -1,5 +1,5 @@
# mssql/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -103,22 +103,49 @@ for these types will be issued as DATETIME.
.. _mssql_indexes:
-MSSQL-Specific Index Options
------------------------------
-
-The MSSQL dialect supports special options for :class:`.Index`.
+Clustered Index Support
+-----------------------
-CLUSTERED
-^^^^^^^^^^
+The MSSQL dialect supports clustered indexes (and primary keys) via the
+``mssql_clustered`` option. This option is available to :class:`.Index`,
+:class:`.UniqueConstraint`. and :class:`.PrimaryKeyConstraint`.
-The ``mssql_clustered`` option adds the CLUSTERED keyword to the index::
+To generate a clustered index::
Index("my_index", table.c.x, mssql_clustered=True)
-would render the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``
+which renders the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``.
.. versionadded:: 0.8
+To generate a clustered primary key use::
+
+ Table('my_table', metadata,
+ Column('x', ...),
+ Column('y', ...),
+ PrimaryKeyConstraint("x", "y", mssql_clustered=True))
+
+which will render the table, for example, as::
+
+ CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, PRIMARY KEY CLUSTERED (x, y))
+
+Similarly, we can generate a clustered unique constraint using::
+
+ Table('my_table', metadata,
+ Column('x', ...),
+ Column('y', ...),
+ PrimaryKeyConstraint("x"),
+ UniqueConstraint("y", mssql_clustered=True),
+ )
+
+ .. versionadded:: 0.9.2
+
+MSSQL-Specific Index Options
+-----------------------------
+
+In addition to clustering, the MSSQL dialect supports other special options
+for :class:`.Index`.
+
INCLUDE
^^^^^^^
@@ -991,7 +1018,7 @@ class MSDDLCompiler(compiler.DDLCompiler):
text += "UNIQUE "
# handle clustering option
- if index.kwargs.get("mssql_clustered"):
+ if index.dialect_options['mssql']['clustered']:
text += "CLUSTERED "
text += "INDEX %s ON %s (%s)" \
@@ -1001,29 +1028,61 @@ class MSDDLCompiler(compiler.DDLCompiler):
preparer.format_table(index.table),
', '.join(
self.sql_compiler.process(expr,
- include_table=False) for
+ include_table=False, literal_binds=True) for
expr in index.expressions)
)
# handle other included columns
- if index.kwargs.get("mssql_include"):
+ if index.dialect_options['mssql']['include']:
inclusions = [index.table.c[col]
if isinstance(col, util.string_types) else col
- for col in index.kwargs["mssql_include"]]
+ for col in index.dialect_options['mssql']['include']]
text += " INCLUDE (%s)" \
- % ', '.join([preparer.quote(c.name, c.quote)
+ % ', '.join([preparer.quote(c.name)
for c in inclusions])
return text
def visit_drop_index(self, drop):
- return "\nDROP INDEX %s.%s" % (
- self.preparer.quote_identifier(drop.element.table.name),
- self._prepared_index_name(drop.element,
- include_schema=True)
+ return "\nDROP INDEX %s ON %s" % (
+ self._prepared_index_name(drop.element, include_schema=False),
+ self.preparer.format_table(drop.element.table)
)
+ def visit_primary_key_constraint(self, constraint):
+ if len(constraint) == 0:
+ return ''
+ text = ""
+ if constraint.name is not None:
+ text += "CONSTRAINT %s " % \
+ self.preparer.format_constraint(constraint)
+ text += "PRIMARY KEY "
+
+ if constraint.dialect_options['mssql']['clustered']:
+ text += "CLUSTERED "
+
+ text += "(%s)" % ', '.join(self.preparer.quote(c.name)
+ for c in constraint)
+ text += self.define_constraint_deferrability(constraint)
+ return text
+
+ def visit_unique_constraint(self, constraint):
+ if len(constraint) == 0:
+ return ''
+ text = ""
+ if constraint.name is not None:
+ text += "CONSTRAINT %s " % \
+ self.preparer.format_constraint(constraint)
+ text += "UNIQUE "
+
+ if constraint.dialect_options['mssql']['clustered']:
+ text += "CLUSTERED "
+
+ text += "(%s)" % ', '.join(self.preparer.quote(c.name)
+ for c in constraint)
+ text += self.define_constraint_deferrability(constraint)
+ return text
class MSIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = RESERVED_WORDS
@@ -1035,7 +1094,7 @@ class MSIdentifierPreparer(compiler.IdentifierPreparer):
def _escape_identifier(self, value):
return value
- def quote_schema(self, schema, force=True):
+ def quote_schema(self, schema, force=None):
"""Prepare a quoted table and schema name."""
result = '.'.join([self.quote(x, force) for x in schema.split('.')])
return result
@@ -1105,6 +1164,19 @@ class MSDialect(default.DefaultDialect):
type_compiler = MSTypeCompiler
preparer = MSIdentifierPreparer
+ construct_arguments = [
+ (sa_schema.PrimaryKeyConstraint, {
+ "clustered": False
+ }),
+ (sa_schema.UniqueConstraint, {
+ "clustered": False
+ }),
+ (sa_schema.Index, {
+ "clustered": False,
+ "include": None
+ })
+ ]
+
def __init__(self,
query_timeout=None,
use_scope_identity=True,
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index 7ac1b703e..26e70f7f0 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -1,5 +1,5 @@
# mssql/information_schema.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py
index ddf31100c..5b686c47a 100644
--- a/lib/sqlalchemy/dialects/mssql/mxodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py
@@ -1,5 +1,5 @@
# mssql/mxodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index b916612fb..021219cb9 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -1,5 +1,5 @@
# mssql/pymssql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -86,6 +86,7 @@ class MSDialect_pymssql(MSDialect):
def is_disconnect(self, e, connection, cursor):
for msg in (
"Adaptive Server connection timed out",
+ "Net-Lib error during Connection reset by peer",
"message 20003", # connection timeout
"Error 10054",
"Not connected to any MS SQL server",
diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py
index 5a359d179..8c43eb8a1 100644
--- a/lib/sqlalchemy/dialects/mssql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py
@@ -1,5 +1,5 @@
# mssql/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -116,8 +116,8 @@ from ...connectors.pyodbc import PyODBCConnector
from ... import types as sqltypes, util
import decimal
+class _ms_numeric_pyodbc(object):
-class _MSNumeric_pyodbc(sqltypes.Numeric):
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
The routines here are needed for older pyodbc versions
@@ -127,7 +127,7 @@ class _MSNumeric_pyodbc(sqltypes.Numeric):
def bind_processor(self, dialect):
- super_process = super(_MSNumeric_pyodbc, self).\
+ super_process = super(_ms_numeric_pyodbc, self).\
bind_processor(dialect)
if not dialect._need_decimal_fix:
@@ -180,6 +180,11 @@ class _MSNumeric_pyodbc(sqltypes.Numeric):
[str(s) for s in _int][0:value.adjusted() + 1]))
return result
+class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric):
+ pass
+
+class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
+ pass
class MSExecutionContext_pyodbc(MSExecutionContext):
_embedded_scope_identity = False
@@ -238,7 +243,8 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect):
colspecs = util.update_copy(
MSDialect.colspecs,
{
- sqltypes.Numeric: _MSNumeric_pyodbc
+ sqltypes.Numeric: _MSNumeric_pyodbc,
+ sqltypes.Float: _MSFloat_pyodbc
}
)
diff --git a/lib/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/sqlalchemy/dialects/mssql/zxjdbc.py
index 9bf31e20c..706eef3a4 100644
--- a/lib/sqlalchemy/dialects/mssql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mssql/zxjdbc.py
@@ -1,5 +1,5 @@
# mssql/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py
index 2bb636ff3..4eb8cc6d2 100644
--- a/lib/sqlalchemy/dialects/mysql/__init__.py
+++ b/lib/sqlalchemy/dialects/mysql/__init__.py
@@ -1,5 +1,5 @@
# mysql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index d5e33c802..e45f6ecd8 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -1,5 +1,5 @@
# mysql/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -20,6 +20,8 @@ example, they won't work in SQLAlchemy either.
See the official MySQL documentation for detailed information about features
supported in any given server release.
+.. _mysql_connection_timeouts:
+
Connection Timeouts
-------------------
@@ -263,6 +265,41 @@ http://dev.mysql.com/doc/refman/5.0/en/create-index.html
http://dev.mysql.com/doc/refman/5.0/en/create-table.html
+.. _mysql_foreign_keys:
+
+MySQL Foreign Key Options
+-------------------------
+
+MySQL does not support the foreign key arguments "DEFERRABLE", "INITIALLY",
+or "MATCH". Using the ``deferrable`` or ``initially`` keyword argument with
+:class:`.ForeignKeyConstraint` or :class:`.ForeignKey` will have the effect of these keywords being
+rendered in a DDL expression, which will then raise an error on MySQL.
+In order to use these keywords on a foreign key while having them ignored
+on a MySQL backend, use a custom compile rule::
+
+ from sqlalchemy.ext.compiler import compiles
+ from sqlalchemy.schema import ForeignKeyConstraint
+
+ @compiles(ForeignKeyConstraint, "mysql")
+ def process(element, compiler, **kw):
+ element.deferrable = element.initially = None
+ return compiler.visit_foreign_key_constraint(element, **kw)
+
+.. versionchanged:: 0.9.0 - the MySQL backend no longer silently ignores
+ the ``deferrable`` or ``initially`` keyword arguments of :class:`.ForeignKeyConstraint`
+ and :class:`.ForeignKey`.
+
+The "MATCH" keyword is in fact more insidious, and is explicitly disallowed
+by SQLAlchemy in conjunction with the MySQL backend. This argument is silently
+ignored by MySQL, but in addition has the effect of ON UPDATE and ON DELETE options
+also being ignored by the backend. Therefore MATCH should never be used with the
+MySQL backend; as is the case with DEFERRABLE and INITIALLY, custom compilation
+rules can be used to correct a MySQL ForeignKeyConstraint at DDL definition time.
+
+.. versionadded:: 0.9.0 - the MySQL backend will raise a :class:`.CompileError`
+ when the ``match`` keyword is used with :class:`.ForeignKeyConstraint`
+ or :class:`.ForeignKey`.
+
"""
import datetime
@@ -318,10 +355,20 @@ RESERVED_WORDS = set(
'union', 'unique', 'unlock', 'unsigned', 'update', 'usage', 'use',
'using', 'utc_date', 'utc_time', 'utc_timestamp', 'values', 'varbinary',
'varchar', 'varcharacter', 'varying', 'when', 'where', 'while', 'with',
+
'write', 'x509', 'xor', 'year_month', 'zerofill', # 5.0
+
'columns', 'fields', 'privileges', 'soname', 'tables', # 4.1
+
'accessible', 'linear', 'master_ssl_verify_server_cert', 'range',
'read_only', 'read_write', # 5.1
+
+ 'general', 'ignore_server_ids', 'master_heartbeat_period', 'maxvalue',
+ 'resignal', 'signal', 'slow', # 5.5
+
+ 'get', 'io_after_gtids', 'io_before_gtids', 'master_bind', 'one_shot',
+ 'partition', 'sql_after_gtids', 'sql_before_gtids', # 5.6
+
])
AUTOCOMMIT_RE = re.compile(
@@ -333,13 +380,21 @@ SET_RE = re.compile(
class _NumericType(object):
- """Base for MySQL numeric types."""
+ """Base for MySQL numeric types.
+
+ This is the base both for NUMERIC as well as INTEGER, hence
+ it's a mixin.
+
+ """
def __init__(self, unsigned=False, zerofill=False, **kw):
self.unsigned = unsigned
self.zerofill = zerofill
super(_NumericType, self).__init__(**kw)
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_NumericType, sqltypes.Numeric])
class _FloatType(_NumericType, sqltypes.Float):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
@@ -351,22 +406,27 @@ class _FloatType(_NumericType, sqltypes.Float):
raise exc.ArgumentError(
"You must specify both precision and scale or omit "
"both altogether.")
-
super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw)
self.scale = scale
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_FloatType, _NumericType, sqltypes.Float])
class _IntegerType(_NumericType, sqltypes.Integer):
def __init__(self, display_width=None, **kw):
self.display_width = display_width
super(_IntegerType, self).__init__(**kw)
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_IntegerType, _NumericType, sqltypes.Integer])
class _StringType(sqltypes.String):
"""Base for MySQL string types."""
def __init__(self, charset=None, collation=None,
- ascii=False, binary=False,
+ ascii=False, binary=False, unicode=False,
national=False, **kw):
self.charset = charset
@@ -374,16 +434,14 @@ class _StringType(sqltypes.String):
kw.setdefault('collation', kw.pop('collate', collation))
self.ascii = ascii
- # We have to munge the 'unicode' param strictly as a dict
- # otherwise 2to3 will turn it into str.
- self.__dict__['unicode'] = kw.get('unicode', False)
- # sqltypes.String does not accept the 'unicode' arg at all.
- if 'unicode' in kw:
- del kw['unicode']
+ self.unicode = unicode
self.binary = binary
self.national = national
super(_StringType, self).__init__(**kw)
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_StringType, sqltypes.String])
class NUMERIC(_NumericType, sqltypes.NUMERIC):
"""MySQL NUMERIC type."""
@@ -443,6 +501,14 @@ class DOUBLE(_FloatType):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a DOUBLE.
+ .. note::
+
+ The :class:`.DOUBLE` type by default converts from float
+ to Decimal, using a truncation that defaults to 10 digits. Specify
+ either ``scale=n`` or ``decimal_return_scale=n`` in order to change
+ this scale, or ``asdecimal=False`` to return values directly as
+ Python floating points.
+
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
@@ -468,6 +534,14 @@ class REAL(_FloatType, sqltypes.REAL):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a REAL.
+ .. note::
+
+ The :class:`.REAL` type by default converts from float
+ to Decimal, using a truncation that defaults to 10 digits. Specify
+ either ``scale=n`` or ``decimal_return_scale=n`` in order to change
+ this scale, or ``asdecimal=False`` to return values directly as
+ Python floating points.
+
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
@@ -902,6 +976,25 @@ class CHAR(_StringType, sqltypes.CHAR):
"""
super(CHAR, self).__init__(length=length, **kwargs)
+ @classmethod
+ def _adapt_string_for_cast(self, type_):
+ # copy the given string type into a CHAR
+ # for the purposes of rendering a CAST expression
+ type_ = sqltypes.to_instance(type_)
+ if isinstance(type_, sqltypes.CHAR):
+ return type_
+ elif isinstance(type_, _StringType):
+ return CHAR(
+ length=type_.length,
+ charset=type_.charset,
+ collation=type_.collation,
+ ascii=type_.ascii,
+ binary=type_.binary,
+ unicode=type_.unicode,
+ national=False # not supported in CAST
+ )
+ else:
+ return CHAR(length=type_.length)
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
"""MySQL NVARCHAR type.
@@ -972,8 +1065,49 @@ class LONGBLOB(sqltypes._Binary):
__visit_name__ = 'LONGBLOB'
+class _EnumeratedValues(_StringType):
+ def _init_values(self, values, kw):
+ self.quoting = kw.pop('quoting', 'auto')
-class ENUM(sqltypes.Enum, _StringType):
+ if self.quoting == 'auto' and len(values):
+ # What quoting character are we using?
+ q = None
+ for e in values:
+ if len(e) == 0:
+ self.quoting = 'unquoted'
+ break
+ elif q is None:
+ q = e[0]
+
+ if len(e) == 1 or e[0] != q or e[-1] != q:
+ self.quoting = 'unquoted'
+ break
+ else:
+ self.quoting = 'quoted'
+
+ if self.quoting == 'quoted':
+ util.warn_deprecated(
+ 'Manually quoting %s value literals is deprecated. Supply '
+ 'unquoted values and use the quoting= option in cases of '
+ 'ambiguity.' % self.__class__.__name__)
+
+ values = self._strip_values(values)
+
+ self._enumerated_values = values
+ length = max([len(v) for v in values] + [0])
+ return values, length
+
+ @classmethod
+ def _strip_values(cls, values):
+ strip_values = []
+ for a in values:
+ if a[0:1] == '"' or a[0:1] == "'":
+ # strip enclosing quotes and unquote interior
+ a = a[1:-1].replace(a[0] * 2, a[0])
+ strip_values.append(a)
+ return strip_values
+
+class ENUM(sqltypes.Enum, _EnumeratedValues):
"""MySQL ENUM type."""
__visit_name__ = 'ENUM'
@@ -981,9 +1115,9 @@ class ENUM(sqltypes.Enum, _StringType):
def __init__(self, *enums, **kw):
"""Construct an ENUM.
- Example:
+ E.g.::
- Column('myenum', MSEnum("foo", "bar", "baz"))
+ Column('myenum', ENUM("foo", "bar", "baz"))
:param enums: The range of valid values for this ENUM. Values will be
quoted when generating the schema according to the quoting flag (see
@@ -1027,33 +1161,8 @@ class ENUM(sqltypes.Enum, _StringType):
literals for you. This is a transitional option.
"""
- self.quoting = kw.pop('quoting', 'auto')
-
- if self.quoting == 'auto' and len(enums):
- # What quoting character are we using?
- q = None
- for e in enums:
- if len(e) == 0:
- self.quoting = 'unquoted'
- break
- elif q is None:
- q = e[0]
-
- if e[0] != q or e[-1] != q:
- self.quoting = 'unquoted'
- break
- else:
- self.quoting = 'quoted'
-
- if self.quoting == 'quoted':
- util.warn_deprecated(
- 'Manually quoting ENUM value literals is deprecated. Supply '
- 'unquoted values and use the quoting= option in cases of '
- 'ambiguity.')
- enums = self._strip_enums(enums)
-
+ values, length = self._init_values(enums, kw)
self.strict = kw.pop('strict', False)
- length = max([len(v) for v in enums] + [0])
kw.pop('metadata', None)
kw.pop('schema', None)
kw.pop('name', None)
@@ -1061,17 +1170,11 @@ class ENUM(sqltypes.Enum, _StringType):
kw.pop('native_enum', None)
kw.pop('inherit_schema', None)
_StringType.__init__(self, length=length, **kw)
- sqltypes.Enum.__init__(self, *enums)
+ sqltypes.Enum.__init__(self, *values)
- @classmethod
- def _strip_enums(cls, enums):
- strip_enums = []
- for a in enums:
- if a[0:1] == '"' or a[0:1] == "'":
- # strip enclosing quotes and unquote interior
- a = a[1:-1].replace(a[0] * 2, a[0])
- strip_enums.append(a)
- return strip_enums
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[ENUM, _StringType, sqltypes.Enum])
def bind_processor(self, dialect):
super_convert = super(ENUM, self).bind_processor(dialect)
@@ -1091,7 +1194,7 @@ class ENUM(sqltypes.Enum, _StringType):
return sqltypes.Enum.adapt(self, impltype, **kw)
-class SET(_StringType):
+class SET(_EnumeratedValues):
"""MySQL SET type."""
__visit_name__ = 'SET'
@@ -1099,15 +1202,16 @@ class SET(_StringType):
def __init__(self, *values, **kw):
"""Construct a SET.
- Example::
+ E.g.::
- Column('myset', MSSet("'foo'", "'bar'", "'baz'"))
+ Column('myset', SET("foo", "bar", "baz"))
:param values: The range of valid values for this SET. Values will be
- used exactly as they appear when generating schemas. Strings must
- be quoted, as in the example above. Single-quotes are suggested for
- ANSI compatibility and are required for portability to servers with
- ANSI_QUOTES enabled.
+ quoted when generating the schema according to the quoting flag (see
+ below).
+
+ .. versionchanged:: 0.9.0 quoting is applied automatically to
+ :class:`.mysql.SET` in the same way as for :class:`.mysql.ENUM`.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
@@ -1126,18 +1230,27 @@ class SET(_StringType):
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
- """
- self._ddl_values = values
+ :param quoting: Defaults to 'auto': automatically determine enum value
+ quoting. If all enum values are surrounded by the same quoting
+ character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
- strip_values = []
- for a in values:
- if a[0:1] == '"' or a[0:1] == "'":
- # strip enclosing quotes and unquote interior
- a = a[1:-1].replace(a[0] * 2, a[0])
- strip_values.append(a)
+ 'quoted': values in enums are already quoted, they will be used
+ directly when generating the schema - this usage is deprecated.
+
+ 'unquoted': values in enums are not quoted, they will be escaped and
+ surrounded by single quotes when generating the schema.
+
+ Previous versions of this type always required manually quoted
+ values to be supplied; future versions will always quote the string
+ literals for you. This is a transitional option.
+
+ .. versionadded:: 0.9.0
+
+ """
+ values, length = self._init_values(values, kw)
+ self.values = tuple(values)
- self.values = strip_values
- kw.setdefault('length', max([len(v) for v in strip_values] + [0]))
+ kw.setdefault('length', length)
super(SET, self).__init__(**kw)
def result_processor(self, dialect, coltype):
@@ -1209,6 +1322,9 @@ MSFloat = FLOAT
MSInteger = INTEGER
colspecs = {
+ _IntegerType: _IntegerType,
+ _NumericType: _NumericType,
+ _FloatType: _FloatType,
sqltypes.Numeric: NUMERIC,
sqltypes.Float: FLOAT,
sqltypes.Time: TIME,
@@ -1300,14 +1416,9 @@ class MySQLCompiler(compiler.SQLCompiler):
elif isinstance(type_, (sqltypes.DECIMAL, sqltypes.DateTime,
sqltypes.Date, sqltypes.Time)):
return self.dialect.type_compiler.process(type_)
- elif isinstance(type_, sqltypes.Text):
- return 'CHAR'
- elif (isinstance(type_, sqltypes.String) and not
- isinstance(type_, (ENUM, SET))):
- if getattr(type_, 'length'):
- return 'CHAR(%s)' % type_.length
- else:
- return 'CHAR'
+ elif isinstance(type_, sqltypes.String) and not isinstance(type_, (ENUM, SET)):
+ adapted = CHAR._adapt_string_for_cast(type_)
+ return self.dialect.type_compiler.process(adapted)
elif isinstance(type_, sqltypes._Binary):
return 'BINARY'
elif isinstance(type_, sqltypes.NUMERIC):
@@ -1359,10 +1470,10 @@ class MySQLCompiler(compiler.SQLCompiler):
self.process(join.onclause, **kwargs)))
def for_update_clause(self, select):
- if select.for_update == 'read':
- return ' LOCK IN SHARE MODE'
+ if select._for_update_arg.read:
+ return " LOCK IN SHARE MODE"
else:
- return super(MySQLCompiler, self).for_update_clause(select)
+ return " FOR UPDATE"
def limit_clause(self, select):
# MySQL supports:
@@ -1426,9 +1537,9 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
constraint_string = super(
MySQLDDLCompiler, self).create_table_constraints(table)
- engine_key = '%s_engine' % self.dialect.name
- is_innodb = engine_key in table.kwargs and \
- table.kwargs[engine_key].lower() == 'innodb'
+ # why self.dialect.name and not 'mysql'? because of drizzle
+ is_innodb = 'engine' in table.dialect_options[self.dialect.name] and \
+ table.dialect_options[self.dialect.name]['engine'].lower() == 'innodb'
auto_inc_column = table._autoincrement_column
@@ -1439,7 +1550,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
constraint_string += ", \n\t"
constraint_string += "KEY %s (%s)" % (
self.preparer.quote(
- "idx_autoinc_%s" % auto_inc_column.name, None
+ "idx_autoinc_%s" % auto_inc_column.name
),
self.preparer.format_column(auto_inc_column)
)
@@ -1511,7 +1622,8 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
self._verify_index_table(index)
preparer = self.preparer
table = preparer.format_table(index.table)
- columns = [self.sql_compiler.process(expr, include_table=False)
+ columns = [self.sql_compiler.process(expr, include_table=False,
+ literal_binds=True)
for expr in index.expressions]
name = self._prepared_index_name(index)
@@ -1521,8 +1633,8 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
text += "UNIQUE "
text += "INDEX %s ON %s " % (name, table)
- if 'mysql_length' in index.kwargs:
- length = index.kwargs['mysql_length']
+ length = index.dialect_options['mysql']['length']
+ if length is not None:
if isinstance(length, dict):
# length value can be a (column_name --> integer value) mapping
@@ -1543,19 +1655,18 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
columns = ', '.join(columns)
text += '(%s)' % columns
- if 'mysql_using' in index.kwargs:
- using = index.kwargs['mysql_using']
- text += " USING %s" % (preparer.quote(using, index.quote))
+ using = index.dialect_options['mysql']['using']
+ if using is not None:
+ text += " USING %s" % (preparer.quote(using))
return text
def visit_primary_key_constraint(self, constraint):
text = super(MySQLDDLCompiler, self).\
visit_primary_key_constraint(constraint)
- if "mysql_using" in constraint.kwargs:
- using = constraint.kwargs['mysql_using']
- text += " USING %s" % (
- self.preparer.quote(using, constraint.quote))
+ using = constraint.dialect_options['mysql']['using']
+ if using:
+ text += " USING %s" % (self.preparer.quote(using))
return text
def visit_drop_index(self, drop):
@@ -1584,7 +1695,11 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
(self.preparer.format_table(constraint.table),
qual, const)
- def define_constraint_deferrability(self, constraint):
+ def define_constraint_match(self, constraint):
+ if constraint.match is not None:
+ raise exc.CompileError(
+ "MySQL ignores the 'MATCH' keyword while at the same time "
+ "causes ON UPDATE/ON DELETE clauses to be ignored.")
return ""
class MySQLTypeCompiler(compiler.GenericTypeCompiler):
@@ -1818,7 +1933,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
if not type_.native_enum:
return super(MySQLTypeCompiler, self).visit_enum(type_)
else:
- return self.visit_ENUM(type_)
+ return self._visit_enumerated_values("ENUM", type_, type_.enums)
def visit_BLOB(self, type_):
if type_.length:
@@ -1835,16 +1950,21 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
def visit_LONGBLOB(self, type_):
return "LONGBLOB"
- def visit_ENUM(self, type_):
+ def _visit_enumerated_values(self, name, type_, enumerated_values):
quoted_enums = []
- for e in type_.enums:
+ for e in enumerated_values:
quoted_enums.append("'%s'" % e.replace("'", "''"))
- return self._extend_string(type_, {}, "ENUM(%s)" %
- ",".join(quoted_enums))
+ return self._extend_string(type_, {}, "%s(%s)" % (
+ name, ",".join(quoted_enums))
+ )
+
+ def visit_ENUM(self, type_):
+ return self._visit_enumerated_values("ENUM", type_,
+ type_._enumerated_values)
def visit_SET(self, type_):
- return self._extend_string(type_, {}, "SET(%s)" %
- ",".join(type_._ddl_values))
+ return self._visit_enumerated_values("SET", type_,
+ type_._enumerated_values)
def visit_BOOLEAN(self, type):
return "BOOL"
@@ -1871,6 +1991,7 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
return tuple([self.quote_identifier(i) for i in ids if i is not None])
+@log.class_logger
class MySQLDialect(default.DefaultDialect):
"""Details of the MySQL dialect. Not used directly in application code."""
@@ -1902,6 +2023,22 @@ class MySQLDialect(default.DefaultDialect):
_backslash_escapes = True
_server_ansiquotes = False
+ construct_arguments = [
+ (sa_schema.Table, {
+ "*": None
+ }),
+ (sql.Update, {
+ "limit": None
+ }),
+ (sa_schema.PrimaryKeyConstraint, {
+ "using": None
+ }),
+ (sa_schema.Index, {
+ "using": None,
+ "length": None,
+ })
+ ]
+
def __init__(self, isolation_level=None, **kwargs):
kwargs.pop('use_ansiquotes', None) # legacy
default.DefaultDialect.__init__(self, **kwargs)
@@ -2058,7 +2195,6 @@ class MySQLDialect(default.DefaultDialect):
rs.close()
def initialize(self, connection):
- default.DefaultDialect.initialize(self, connection)
self._connection_charset = self._detect_charset(connection)
self._detect_ansiquotes(connection)
if self._server_ansiquotes:
@@ -2067,6 +2203,8 @@ class MySQLDialect(default.DefaultDialect):
self.identifier_preparer = self.preparer(self,
server_ansiquotes=self._server_ansiquotes)
+ default.DefaultDialect.initialize(self, connection)
+
@property
def _supports_cast(self):
return self.server_version_info is None or \
@@ -2163,7 +2301,7 @@ class MySQLDialect(default.DefaultDialect):
ref_names = spec['foreign']
con_kw = {}
- for opt in ('name', 'onupdate', 'ondelete'):
+ for opt in ('onupdate', 'ondelete'):
if spec.get(opt, False):
con_kw[opt] = spec[opt]
@@ -2336,6 +2474,7 @@ class MySQLDialect(default.DefaultDialect):
# as of MySQL 5.0.1
self._backslash_escapes = 'NO_BACKSLASH_ESCAPES' not in mode
+
def _show_create_table(self, connection, table, charset=None,
full_name=None):
"""Run SHOW CREATE TABLE for a ``Table``."""
@@ -2394,6 +2533,7 @@ class ReflectedState(object):
self.constraints = []
+@log.class_logger
class MySQLTableDefinitionParser(object):
"""Parses the results of a SHOW CREATE TABLE statement."""
@@ -2558,8 +2698,8 @@ class MySQLTableDefinitionParser(object):
if spec.get(kw, False):
type_kw[kw] = spec[kw]
- if type_ == 'enum':
- type_args = ENUM._strip_enums(type_args)
+ if issubclass(col_type, _EnumeratedValues):
+ type_args = _EnumeratedValues._strip_values(type_args)
type_instance = col_type(*type_args, **type_kw)
@@ -2733,7 +2873,7 @@ class MySQLTableDefinitionParser(object):
#
# unique constraints come back as KEYs
kw = quotes.copy()
- kw['on'] = 'RESTRICT|CASCASDE|SET NULL|NOACTION'
+ kw['on'] = 'RESTRICT|CASCADE|SET NULL|NOACTION'
self._re_constraint = _re_compile(
r' '
r'CONSTRAINT +'
@@ -2796,8 +2936,6 @@ class MySQLTableDefinitionParser(object):
_options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY',
'PASSWORD', 'CONNECTION')
-log.class_logger(MySQLTableDefinitionParser)
-log.class_logger(MySQLDialect)
class _DecodingRowProxy(object):
diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py
index deb2de449..e81a79b85 100644
--- a/lib/sqlalchemy/dialects/mysql/cymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/cymysql.py
@@ -1,5 +1,5 @@
# mysql/cymysql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -37,7 +37,9 @@ class MySQLDialect_cymysql(MySQLDialect_mysqldb):
driver = 'cymysql'
description_encoding = None
- supports_sane_rowcount = False
+ supports_sane_rowcount = True
+ supports_sane_multi_rowcount = False
+ supports_unicode_statements = True
colspecs = util.update_copy(
MySQLDialect.colspecs,
diff --git a/lib/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
index c479e25e0..13203fce3 100644
--- a/lib/sqlalchemy/dialects/mysql/gaerdbms.py
+++ b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
@@ -1,5 +1,5 @@
# mysql/gaerdbms.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
index b1906d3b9..b6e7c75fb 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -1,5 +1,5 @@
# mysql/mysqlconnector.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -79,12 +79,13 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
+
opts.update(url.query)
util.coerce_kw_type(opts, 'buffered', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
- opts['buffered'] = True
- opts['raise_on_warnings'] = True
+ opts.setdefault('buffered', True)
+ opts.setdefault('raise_on_warnings', True)
# FOUND_ROWS must be set in ClientFlag to enable
# supports_sane_rowcount.
diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py
index 60e68e5ee..84e8299d5 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqldb.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py
@@ -1,5 +1,5 @@
# mysql/mysqldb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -56,7 +56,8 @@ from ...connectors.mysqldb import (
MySQLDBIdentifierPreparer,
MySQLDBConnector
)
-
+from .base import TEXT
+from ... import sql
class MySQLExecutionContext_mysqldb(MySQLDBExecutionContext, MySQLExecutionContext):
pass
@@ -75,4 +76,27 @@ class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
statement_compiler = MySQLCompiler_mysqldb
preparer = MySQLIdentifierPreparer_mysqldb
+ def _check_unicode_returns(self, connection):
+ # work around issue fixed in
+ # https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
+ # specific issue w/ the utf8_bin collation and unicode returns
+
+ has_utf8_bin = connection.scalar(
+ "show collation where %s = 'utf8' and %s = 'utf8_bin'"
+ % (
+ self.identifier_preparer.quote("Charset"),
+ self.identifier_preparer.quote("Collation")
+ ))
+ if has_utf8_bin:
+ additional_tests = [
+ sql.collate(sql.cast(
+ sql.literal_column(
+ "'test collated returns'"),
+ TEXT(charset='utf8')), "utf8_bin")
+ ]
+ else:
+ additional_tests = []
+ return super(MySQLDBConnector, self)._check_unicode_returns(
+ connection, additional_tests)
+
dialect = MySQLDialect_mysqldb
diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py
index 77370f91d..e6b50f335 100644
--- a/lib/sqlalchemy/dialects/mysql/oursql.py
+++ b/lib/sqlalchemy/dialects/mysql/oursql.py
@@ -1,5 +1,5 @@
# mysql/oursql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py
index ba48017ac..74de09c4d 100644
--- a/lib/sqlalchemy/dialects/mysql/pymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/pymysql.py
@@ -1,5 +1,5 @@
# mysql/pymysql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py
index af3579665..e60e39cea 100644
--- a/lib/sqlalchemy/dialects/mysql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py
@@ -1,5 +1,5 @@
# mysql/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
index 20f2e7359..b5fcfbdaf 100644
--- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
@@ -1,5 +1,5 @@
# mysql/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py
index 576790707..070e387d0 100644
--- a/lib/sqlalchemy/dialects/oracle/__init__.py
+++ b/lib/sqlalchemy/dialects/oracle/__init__.py
@@ -1,5 +1,5 @@
# oracle/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 272bd1740..218a7ccfc 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -1,5 +1,5 @@
# oracle/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,12 +16,12 @@ Connect Arguments
The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which
affect the behavior of the dialect regardless of driver in use.
-* *use_ansi* - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
+* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
to ``True``. If ``False``, Oracle-8 compatible constructs are used for joins.
-* *optimize_limits* - defaults to ``False``. see the section on LIMIT/OFFSET.
+* ``optimize_limits`` - defaults to ``False``. see the section on LIMIT/OFFSET.
-* *use_binds_for_limits* - defaults to ``True``. see the section on LIMIT/OFFSET.
+* ``use_binds_for_limits`` - defaults to ``True``. see the section on LIMIT/OFFSET.
Auto Increment Behavior
-----------------------
@@ -99,6 +99,41 @@ http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault
which installs a select compiler that overrides the generation of limit/offset with
a window function.
+RETURNING Support
+-----------------
+
+The Oracle database supports a limited form of RETURNING, in order to retrieve result
+sets of matched rows from INSERT, UPDATE and DELETE statements. Oracle's
+RETURNING..INTO syntax only supports one row being returned, as it relies upon
+OUT parameters in order to function. In addition, supported DBAPIs have further
+limitations (see :ref:`cx_oracle_returning`).
+
+SQLAlchemy's "implicit returning" feature, which employs RETURNING within an INSERT
+and sometimes an UPDATE statement in order to fetch newly generated primary key values
+and other SQL defaults and expressions, is normally enabled on the Oracle
+backend. By default, "implicit returning" typically only fetches the value of a
+single ``nextval(some_seq)`` expression embedded into an INSERT in order to increment
+a sequence within an INSERT statement and get the value back at the same time.
+To disable this feature across the board, specify ``implicit_returning=False`` to
+:func:`.create_engine`::
+
+ engine = create_engine("oracle://scott:tiger@dsn", implicit_returning=False)
+
+Implicit returning can also be disabled on a table-by-table basis as a table option::
+
+ # Core Table
+ my_table = Table("my_table", metadata, ..., implicit_returning=False)
+
+
+ # declarative
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+ __table_args__ = {"implicit_returning": False}
+
+.. seealso::
+
+ :ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on implicit returning.
+
ON UPDATE CASCADE
-----------------
@@ -133,9 +168,10 @@ Synonym/DBLINK Reflection
-------------------------
When using reflection with Table objects, the dialect can optionally search for tables
-indicated by synonyms that reference DBLINK-ed tables by passing the flag
-oracle_resolve_synonyms=True as a keyword argument to the Table construct. If DBLINK
-is not in use this flag should be left off.
+indicated by synonyms, either in local or remote schemas or accessed over DBLINK,
+by passing the flag oracle_resolve_synonyms=True as a
+keyword argument to the Table construct. If synonyms are not in use
+this flag should be left off.
"""
@@ -145,7 +181,7 @@ from sqlalchemy import util, sql
from sqlalchemy.engine import default, base, reflection
from sqlalchemy.sql import compiler, visitors, expression
from sqlalchemy.sql import operators as sql_operators, functions as sql_functions
-from sqlalchemy import types as sqltypes
+from sqlalchemy import types as sqltypes, schema as sa_schema
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, DATE, DATETIME, \
BLOB, CLOB, TIMESTAMP, FLOAT
@@ -362,7 +398,9 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
return self._visit_varchar(type_, '', '')
def _visit_varchar(self, type_, n, num):
- if not n and self.dialect._supports_char_length:
+ if not type_.length:
+ return "%(n)sVARCHAR%(two)s" % {'two': num, 'n': n}
+ elif not n and self.dialect._supports_char_length:
varchar = "VARCHAR%(two)s(%(length)s CHAR)"
return varchar % {'length': type_.length, 'two': num}
else:
@@ -521,7 +559,6 @@ class OracleCompiler(compiler.SQLCompiler):
return self.process(alias.original, **kwargs)
def returning_clause(self, stmt, returning_cols):
-
columns = []
binds = []
for i, column in enumerate(expression._select_iterables(returning_cols)):
@@ -595,7 +632,7 @@ class OracleCompiler(compiler.SQLCompiler):
# If needed, add the ora_rn, and wrap again with offset.
if select._offset is None:
- limitselect.for_update = select.for_update
+ limitselect._for_update_arg = select._for_update_arg
select = limitselect
else:
limitselect = limitselect.column(
@@ -614,7 +651,7 @@ class OracleCompiler(compiler.SQLCompiler):
offsetselect.append_whereclause(
sql.literal_column("ora_rn") > offset_value)
- offsetselect.for_update = select.for_update
+ offsetselect._for_update_arg = select._for_update_arg
select = offsetselect
kwargs['iswrapper'] = getattr(select, '_is_wrapper', False)
@@ -626,10 +663,19 @@ class OracleCompiler(compiler.SQLCompiler):
def for_update_clause(self, select):
if self.is_subquery():
return ""
- elif select.for_update == "nowait":
- return " FOR UPDATE NOWAIT"
- else:
- return super(OracleCompiler, self).for_update_clause(select)
+
+ tmp = ' FOR UPDATE'
+
+ if select._for_update_arg.of:
+ tmp += ' OF ' + ', '.join(
+ self.process(elem) for elem in
+ select._for_update_arg.of
+ )
+
+ if select._for_update_arg.nowait:
+ tmp += " NOWAIT"
+
+ return tmp
class OracleDDLCompiler(compiler.DDLCompiler):
@@ -708,6 +754,10 @@ class OracleDialect(default.DefaultDialect):
reflection_options = ('oracle_resolve_synonyms', )
+ construct_arguments = [
+ (sa_schema.Table, {"resolve_synonyms": False})
+ ]
+
def __init__(self,
use_ansi=True,
optimize_limits=False,
@@ -800,14 +850,15 @@ class OracleDialect(default.DefaultDialect):
returns the actual name, owner, dblink name, and synonym name if found.
"""
- q = "SELECT owner, table_owner, table_name, db_link, synonym_name FROM all_synonyms WHERE "
+ q = "SELECT owner, table_owner, table_name, db_link, "\
+ "synonym_name FROM all_synonyms WHERE "
clauses = []
params = {}
if desired_synonym:
clauses.append("synonym_name = :synonym_name")
params['synonym_name'] = desired_synonym
if desired_owner:
- clauses.append("table_owner = :desired_owner")
+ clauses.append("owner = :desired_owner")
params['desired_owner'] = desired_owner
if desired_table:
clauses.append("table_name = :tname")
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index e013799db..599eb21a3 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -1,5 +1,5 @@
# oracle/cx_oracle.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -17,11 +17,11 @@ Additional Connect Arguments
When connecting with ``dbname`` present, the host, port, and dbname tokens are
converted to a TNS name using
-the cx_oracle :func:`makedsn()` function. Otherwise, the host token is taken
+the cx_oracle ``makedsn()`` function. Otherwise, the host token is taken
directly as a TNS name.
Additional arguments which may be specified either as query string arguments
-on the URL, or as keyword arguments to :func:`~sqlalchemy.create_engine()` are:
+on the URL, or as keyword arguments to :func:`.create_engine()` are:
* allow_twophase - enable two-phase transactions. Defaults to ``True``.
@@ -65,6 +65,27 @@ of the encoding to be used.
Note that this behavior is disabled when Oracle 8 is detected, as it has been
observed that issues remain when passing Python unicodes to cx_oracle with Oracle 8.
+.. _cx_oracle_returning:
+
+RETURNING Support
+-----------------
+
+cx_oracle supports a limited subset of Oracle's already limited RETURNING support.
+Typically, results can only be guaranteed for at most one column being returned;
+this is the typical case when SQLAlchemy uses RETURNING to get just the value of a
+primary-key-associated sequence value. Additional column expressions will
+cause problems in a non-determinative way, due to cx_oracle's lack of support for
+the OCI_DATA_AT_EXEC API which is required for more complex RETURNING scenarios.
+
+.. seealso::
+
+ http://docs.oracle.com/cd/B10501_01/appdev.920/a96584/oci05bnd.htm#420693 - OCI documentation for RETURNING
+
+ http://sourceforge.net/mailarchive/message.php?msg_id=31338136 - cx_oracle developer commentary
+
+
+
+
LOB Objects
-----------
@@ -75,7 +96,7 @@ like result.fetchmany() and result.fetchall(). This means that by default, LOB
objects are fully fetched unconditionally by SQLAlchemy, and the linkage to a live
cursor is broken.
-To disable this processing, pass ``auto_convert_lobs=False`` to :func:`create_engine()`.
+To disable this processing, pass ``auto_convert_lobs=False`` to :func:`.create_engine()`.
Two Phase Transaction Support
-----------------------------
@@ -108,7 +129,7 @@ the application can make one of several choices:
* For ad-hoc two-phase operations without disabling pooling, the DBAPI
connection in use can be evicted from the connection pool using the
- :class:`.Connection.detach` method.
+ :meth:`.Connection.detach` method.
.. versionchanged:: 0.8.0b2,0.7.10
Support for cx_oracle prepared transactions has been implemented
@@ -211,10 +232,7 @@ class _OracleNumeric(sqltypes.Numeric):
if dialect.supports_native_decimal:
if self.asdecimal:
- if self.scale is None:
- fstring = "%.10f"
- else:
- fstring = "%%.%df" % self.scale
+ fstring = "%%.%df" % self._effective_decimal_return_scale
def to_decimal(value):
if value is None:
@@ -362,7 +380,8 @@ class _OracleRowid(oracle.ROWID):
class OracleCompiler_cx_oracle(OracleCompiler):
- def bindparam_string(self, name, quote=None, **kw):
+ def bindparam_string(self, name, **kw):
+ quote = getattr(name, 'quote', None)
if quote is True or quote is not False and \
self.preparer._bindparam_requires_quotes(name):
quoted_name = '"%s"' % name
@@ -506,7 +525,6 @@ class ReturningResultProxy(_result.FullyBufferedResultProxy):
def _cursor_description(self):
returning = self.context.compiled.returning
-
return [
("ret_%d" % i, None)
for i, col in enumerate(returning)
@@ -730,9 +748,6 @@ class OracleDialect_cx_oracle(OracleDialect):
255,
outconverter=self._detect_decimal,
arraysize=cursor.arraysize)
- # allow all strings to come back natively as Unicode
- elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
- return cursor.var(util.text_type, size, cursor.arraysize)
def on_connect(conn):
conn.outputtypehandler = output_type_handler
@@ -805,8 +820,9 @@ class OracleDialect_cx_oracle(OracleDialect):
# ORA-03113: end-of-file on communication channel
# ORA-03135: connection lost contact
# ORA-01033: ORACLE initialization or shutdown in progress
+ # ORA-02396: exceeded maximum idle time, please connect again
# TODO: Others ?
- return error.code in (28, 3114, 3113, 3135, 1033)
+ return error.code in (28, 3114, 3113, 3135, 1033, 2396)
else:
return False
diff --git a/lib/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
index ad53b89a1..710645b23 100644
--- a/lib/sqlalchemy/dialects/oracle/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
@@ -1,5 +1,5 @@
# oracle/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgres.py b/lib/sqlalchemy/dialects/postgres.py
index 82d1a39c2..6ed7e18bc 100644
--- a/lib/sqlalchemy/dialects/postgres.py
+++ b/lib/sqlalchemy/dialects/postgres.py
@@ -1,5 +1,5 @@
# dialects/postgres.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py
index 408b67846..180e9fc7e 100644
--- a/lib/sqlalchemy/dialects/postgresql/__init__.py
+++ b/lib/sqlalchemy/dialects/postgresql/__init__.py
@@ -1,5 +1,5 @@
# postgresql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -11,9 +11,11 @@ base.dialect = psycopg2.dialect
from .base import \
INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \
INET, CIDR, UUID, BIT, MACADDR, DOUBLE_PRECISION, TIMESTAMP, TIME, \
- DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All
+ DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All, \
+ TSVECTOR
from .constraints import ExcludeConstraint
from .hstore import HSTORE, hstore
+from .json import JSON, JSONElement
from .ranges import INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, \
TSTZRANGE
@@ -23,5 +25,5 @@ __all__ = (
'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN',
'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'Any', 'All', 'array', 'HSTORE',
'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE',
- 'TSRANGE', 'TSTZRANGE'
+ 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONElement'
)
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 6ccf7190e..11bd3830d 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -1,5 +1,5 @@
# postgresql/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -131,6 +131,44 @@ use the :meth:`._UpdateBase.returning` method on a per-statement basis::
where(table.c.name=='foo')
print result.fetchall()
+.. _postgresql_match:
+
+Full Text Search
+----------------
+
+SQLAlchemy makes available the Postgresql ``@@`` operator via the
+:meth:`.ColumnElement.match` method on any textual column expression.
+On a Postgresql dialect, an expression like the following::
+
+ select([sometable.c.text.match("search string")])
+
+will emit to the database::
+
+ SELECT text @@ to_tsquery('search string') FROM table
+
+The Postgresql text search functions such as ``to_tsquery()``
+and ``to_tsvector()`` are available
+explicitly using the standard :attr:`.func` construct. For example::
+
+ select([
+ func.to_tsvector('fat cats ate rats').match('cat & rat')
+ ])
+
+Emits the equivalent of::
+
+ SELECT to_tsvector('fat cats ate rats') @@ to_tsquery('cat & rat')
+
+The :class:`.postgresql.TSVECTOR` type can provide for explicit CAST::
+
+ from sqlalchemy.dialects.postgresql import TSVECTOR
+ from sqlalchemy import select, cast
+ select([cast("some text", TSVECTOR)])
+
+produces a statement equivalent to::
+
+ SELECT CAST('some text' AS TSVECTOR) AS anon_1
+
+
FROM ONLY ...
------------------------
@@ -210,7 +248,7 @@ import re
from ... import sql, schema, exc, util
from ...engine import default, reflection
-from ...sql import compiler, expression, util as sql_util, operators
+from ...sql import compiler, expression, operators
from ... import types as sqltypes
try:
@@ -230,7 +268,7 @@ RESERVED_WORDS = set(
"default", "deferrable", "desc", "distinct", "do", "else", "end",
"except", "false", "fetch", "for", "foreign", "from", "grant", "group",
"having", "in", "initially", "intersect", "into", "leading", "limit",
- "localtime", "localtimestamp", "new", "not", "null", "off", "offset",
+ "localtime", "localtimestamp", "new", "not", "null", "of", "off", "offset",
"old", "on", "only", "or", "order", "placing", "primary", "references",
"returning", "select", "session_user", "some", "symmetric", "table",
"then", "to", "trailing", "true", "union", "unique", "user", "using",
@@ -368,6 +406,23 @@ class UUID(sqltypes.TypeEngine):
PGUuid = UUID
+class TSVECTOR(sqltypes.TypeEngine):
+ """The :class:`.postgresql.TSVECTOR` type implements the Postgresql
+ text search type TSVECTOR.
+
+ It can be used to do full text queries on natural language
+ documents.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :ref:`postgresql_match`
+
+ """
+ __visit_name__ = 'TSVECTOR'
+
+
class _Slice(expression.ColumnElement):
__visit_name__ = 'slice'
@@ -913,6 +968,7 @@ ischema_names = {
'interval': INTERVAL,
'interval year to month': INTERVAL,
'interval day to second': INTERVAL,
+ 'tsvector' : TSVECTOR
}
@@ -954,25 +1010,30 @@ class PGCompiler(compiler.SQLCompiler):
def visit_ilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
+
return '%s ILIKE %s' % \
(self.process(binary.left, **kw),
self.process(binary.right, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_notilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return '%s NOT ILIKE %s' % \
(self.process(binary.left, **kw),
self.process(binary.right, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def render_literal_value(self, value, type_):
value = super(PGCompiler, self).render_literal_value(value, type_)
- # TODO: need to inspect "standard_conforming_strings"
+
if self.dialect._backslash_escapes:
value = value.replace('\\', '\\\\')
return value
@@ -1009,14 +1070,25 @@ class PGCompiler(compiler.SQLCompiler):
return ""
def for_update_clause(self, select):
- if select.for_update == 'nowait':
- return " FOR UPDATE NOWAIT"
- elif select.for_update == 'read':
- return " FOR SHARE"
- elif select.for_update == 'read_nowait':
- return " FOR SHARE NOWAIT"
+
+ if select._for_update_arg.read:
+ tmp = " FOR SHARE"
else:
- return super(PGCompiler, self).for_update_clause(select)
+ tmp = " FOR UPDATE"
+
+ if select._for_update_arg.of:
+ tables = util.OrderedSet(
+ c.table if isinstance(c, expression.ColumnClause)
+ else c for c in select._for_update_arg.of)
+ tmp += " OF " + ", ".join(
+ self.process(table, ashint=True)
+ for table in tables
+ )
+
+ if select._for_update_arg.nowait:
+ tmp += " NOWAIT"
+
+ return tmp
def returning_clause(self, stmt, returning_cols):
@@ -1039,12 +1111,15 @@ class PGCompiler(compiler.SQLCompiler):
class PGDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs):
+
colspec = self.preparer.format_column(column)
impl_type = column.type.dialect_impl(self.dialect)
if column.primary_key and \
column is column.table._autoincrement_column and \
- not isinstance(impl_type, sqltypes.SmallInteger) and \
(
+ self.dialect.supports_smallserial or
+ not isinstance(impl_type, sqltypes.SmallInteger)
+ ) and (
column.default is None or
(
isinstance(column.default, schema.Sequence) and
@@ -1052,6 +1127,8 @@ class PGDDLCompiler(compiler.DDLCompiler):
)):
if isinstance(impl_type, sqltypes.BigInteger):
colspec += " BIGSERIAL"
+ elif isinstance(impl_type, sqltypes.SmallInteger):
+ colspec += " SMALLSERIAL"
else:
colspec += " SERIAL"
else:
@@ -1069,7 +1146,9 @@ class PGDDLCompiler(compiler.DDLCompiler):
return "CREATE TYPE %s AS ENUM (%s)" % (
self.preparer.format_type(type_),
- ",".join("'%s'" % e for e in type_.enums)
+ ", ".join(
+ self.sql_compiler.process(sql.literal(e), literal_binds=True)
+ for e in type_.enums)
)
def visit_drop_enum_type(self, drop):
@@ -1092,31 +1171,29 @@ class PGDDLCompiler(compiler.DDLCompiler):
preparer.format_table(index.table)
)
- if 'postgresql_using' in index.kwargs:
- using = index.kwargs['postgresql_using']
- text += "USING %s " % preparer.quote(using, index.quote)
+ using = index.dialect_options['postgresql']['using']
+ if using:
+ text += "USING %s " % preparer.quote(using)
- ops = index.kwargs.get('postgresql_ops', {})
+ ops = index.dialect_options["postgresql"]["ops"]
text += "(%s)" \
% (
', '.join([
- self.sql_compiler.process(expr, include_table=False) +
-
-
+ self.sql_compiler.process(
+ expr.self_group()
+ if not isinstance(expr, expression.ColumnClause)
+ else expr,
+ include_table=False, literal_binds=True) +
(c.key in ops and (' ' + ops[c.key]) or '')
-
-
for expr, c in zip(index.expressions, index.columns)])
)
- if 'postgresql_where' in index.kwargs:
- whereclause = index.kwargs['postgresql_where']
- else:
- whereclause = None
+ whereclause = index.dialect_options["postgresql"]["where"]
if whereclause is not None:
- whereclause = sql_util.expression_as_ddl(whereclause)
- where_compiled = self.sql_compiler.process(whereclause)
+ where_compiled = self.sql_compiler.process(
+ whereclause, include_table=False,
+ literal_binds=True)
text += " WHERE " + where_compiled
return text
@@ -1128,16 +1205,20 @@ class PGDDLCompiler(compiler.DDLCompiler):
elements = []
for c in constraint.columns:
op = constraint.operators[c.name]
- elements.append(self.preparer.quote(c.name, c.quote)+' WITH '+op)
+ elements.append(self.preparer.quote(c.name) + ' WITH '+op)
text += "EXCLUDE USING %s (%s)" % (constraint.using, ', '.join(elements))
if constraint.where is not None:
- sqltext = sql_util.expression_as_ddl(constraint.where)
- text += ' WHERE (%s)' % self.sql_compiler.process(sqltext)
+ text += ' WHERE (%s)' % self.sql_compiler.process(
+ constraint.where,
+ literal_binds=True)
text += self.define_constraint_deferrability(constraint)
return text
class PGTypeCompiler(compiler.GenericTypeCompiler):
+ def visit_TSVECTOR(self, type):
+ return "TSVECTOR"
+
def visit_INET(self, type_):
return "INET"
@@ -1162,6 +1243,9 @@ class PGTypeCompiler(compiler.GenericTypeCompiler):
def visit_HSTORE(self, type_):
return "HSTORE"
+ def visit_JSON(self, type_):
+ return "JSON"
+
def visit_INT4RANGE(self, type_):
return "INT4RANGE"
@@ -1250,9 +1334,9 @@ class PGIdentifierPreparer(compiler.IdentifierPreparer):
if not type_.name:
raise exc.CompileError("Postgresql ENUM type requires a name.")
- name = self.quote(type_.name, type_.quote)
+ name = self.quote(type_.name)
if not self.omit_schema and use_schema and type_.schema is not None:
- name = self.quote_schema(type_.schema, type_.quote) + "." + name
+ name = self.quote_schema(type_.schema) + "." + name
return name
@@ -1328,6 +1412,7 @@ class PGDialect(default.DefaultDialect):
supports_native_enum = True
supports_native_boolean = True
+ supports_smallserial = True
supports_sequences = True
sequences_optional = True
@@ -1349,12 +1434,22 @@ class PGDialect(default.DefaultDialect):
inspector = PGInspector
isolation_level = None
- # TODO: need to inspect "standard_conforming_strings"
+ construct_arguments = [
+ (schema.Index, {
+ "using": False,
+ "where": None,
+ "ops": {}
+ })
+ ]
+
_backslash_escapes = True
- def __init__(self, isolation_level=None, **kwargs):
+ def __init__(self, isolation_level=None, json_serializer=None,
+ json_deserializer=None, **kwargs):
default.DefaultDialect.__init__(self, **kwargs)
self.isolation_level = isolation_level
+ self._json_deserializer = json_deserializer
+ self._json_serializer = json_serializer
def initialize(self, connection):
super(PGDialect, self).initialize(connection)
@@ -1368,6 +1463,13 @@ class PGDialect(default.DefaultDialect):
# psycopg2, others may have placed ENUM here as well
self.colspecs.pop(ENUM, None)
+ # http://www.postgresql.org/docs/9.3/static/release-9-2.html#AEN116689
+ self.supports_smallserial = self.server_version_info >= (9, 2)
+
+ self._backslash_escapes = connection.scalar(
+ "show standard_conforming_strings"
+ ) == 'off'
+
def on_connect(self):
if self.isolation_level is not None:
def connect(conn):
@@ -1515,12 +1617,6 @@ class PGDialect(default.DefaultDialect):
return bool(cursor.first())
def has_type(self, connection, type_name, schema=None):
- bindparams = [
- sql.bindparam('typname',
- util.text_type(type_name), type_=sqltypes.Unicode),
- sql.bindparam('nspname',
- util.text_type(schema), type_=sqltypes.Unicode),
- ]
if schema is not None:
query = """
SELECT EXISTS (
@@ -1530,6 +1626,7 @@ class PGDialect(default.DefaultDialect):
AND n.nspname = :nspname
)
"""
+ query = sql.text(query)
else:
query = """
SELECT EXISTS (
@@ -1538,13 +1635,23 @@ class PGDialect(default.DefaultDialect):
AND pg_type_is_visible(t.oid)
)
"""
- cursor = connection.execute(sql.text(query, bindparams=bindparams))
+ query = sql.text(query)
+ query = query.bindparams(
+ sql.bindparam('typname',
+ util.text_type(type_name), type_=sqltypes.Unicode),
+ )
+ if schema is not None:
+ query = query.bindparams(
+ sql.bindparam('nspname',
+ util.text_type(schema), type_=sqltypes.Unicode),
+ )
+ cursor = connection.execute(query)
return bool(cursor.scalar())
def _get_server_version_info(self, connection):
v = connection.execute("select version()").scalar()
m = re.match(
- '(?:PostgreSQL|EnterpriseDB) '
+ '.*(?:PostgreSQL|EnterpriseDB) '
'(\d+)\.(\d+)(?:\.(\d+))?(?:\.\d+)?(?:devel)?',
v)
if not m:
@@ -1578,12 +1685,10 @@ class PGDialect(default.DefaultDialect):
table_name = util.text_type(table_name)
if schema is not None:
schema = util.text_type(schema)
- s = sql.text(query, bindparams=[
- sql.bindparam('table_name', type_=sqltypes.Unicode),
- sql.bindparam('schema', type_=sqltypes.Unicode)
- ],
- typemap={'oid': sqltypes.Integer}
- )
+ s = sql.text(query).bindparams(table_name=sqltypes.Unicode)
+ s = s.columns(oid=sqltypes.Integer)
+ if schema:
+ s = s.bindparams(sql.bindparam('schema', type_=sqltypes.Unicode))
c = connection.execute(s, table_name=table_name, schema=schema)
table_oid = c.scalar()
if table_oid is None:
@@ -1675,8 +1780,7 @@ class PGDialect(default.DefaultDialect):
SQL_COLS = """
SELECT a.attname,
pg_catalog.format_type(a.atttypid, a.atttypmod),
- (SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid)
- for 128)
+ (SELECT pg_catalog.pg_get_expr(d.adbin, d.adrelid)
FROM pg_catalog.pg_attrdef d
WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum
AND a.atthasdef)
@@ -1883,6 +1987,15 @@ class PGDialect(default.DefaultDialect):
n.oid = c.relnamespace
ORDER BY 1
"""
+ # http://www.postgresql.org/docs/9.0/static/sql-createtable.html
+ FK_REGEX = re.compile(
+ r'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)'
+ r'[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?'
+ r'[\s]?(ON UPDATE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
+ r'[\s]?(ON DELETE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
+ r'[\s]?(DEFERRABLE|NOT DEFERRABLE)?'
+ r'[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?'
+ )
t = sql.text(FK_SQL, typemap={
'conname': sqltypes.Unicode,
@@ -1890,15 +2003,18 @@ class PGDialect(default.DefaultDialect):
c = connection.execute(t, table=table_oid)
fkeys = []
for conname, condef, conschema in c.fetchall():
- m = re.search('FOREIGN KEY \((.*?)\) REFERENCES '
- '(?:(.*?)\.)?(.*?)\((.*?)\)', condef).groups()
+ m = re.search(FK_REGEX, condef).groups()
constrained_columns, referred_schema, \
- referred_table, referred_columns = m
+ referred_table, referred_columns, \
+ _, match, _, onupdate, _, ondelete, \
+ deferrable, _, initially = m
+ if deferrable is not None:
+ deferrable = True if deferrable == 'DEFERRABLE' else False
constrained_columns = [preparer._unquote_identifier(x)
for x in re.split(r'\s*,\s*', constrained_columns)]
if referred_schema:
- referred_schema =\
+ referred_schema = \
preparer._unquote_identifier(referred_schema)
elif schema is not None and schema == conschema:
# no schema was returned by pg_get_constraintdef(). This
@@ -1916,7 +2032,14 @@ class PGDialect(default.DefaultDialect):
'constrained_columns': constrained_columns,
'referred_schema': referred_schema,
'referred_table': referred_table,
- 'referred_columns': referred_columns
+ 'referred_columns': referred_columns,
+ 'options': {
+ 'onupdate': onupdate,
+ 'ondelete': ondelete,
+ 'deferrable': deferrable,
+ 'initially': initially,
+ 'match': match
+ }
}
fkeys.append(fkey_d)
return fkeys
@@ -1926,11 +2049,14 @@ class PGDialect(default.DefaultDialect):
table_oid = self.get_table_oid(connection, table_name, schema,
info_cache=kw.get('info_cache'))
+ # cast indkey as varchar since it's an int2vector,
+ # returned as a list by some drivers such as pypostgresql
+
IDX_SQL = """
SELECT
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
- a.attname, a.attnum, ix.indkey
+ a.attname, a.attnum, ix.indkey::varchar
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
diff --git a/lib/sqlalchemy/dialects/postgresql/constraints.py b/lib/sqlalchemy/dialects/postgresql/constraints.py
index 5b8bbe643..f45cef1a2 100644
--- a/lib/sqlalchemy/dialects/postgresql/constraints.py
+++ b/lib/sqlalchemy/dialects/postgresql/constraints.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -39,7 +39,7 @@ class ExcludeConstraint(ColumnCollectionConstraint):
:param using:
Optional string. If set, emit USING <index_method> when issuing DDL
for this constraint. Defaults to 'gist'.
-
+
:param where:
Optional string. If set, emit WHERE <predicate> when issuing DDL
for this constraint.
@@ -60,7 +60,7 @@ class ExcludeConstraint(ColumnCollectionConstraint):
where = kw.get('where')
if where:
self.where = expression._literal_as_text(where)
-
+
def copy(self, **kw):
elements = [(col, self.operators[col])
for col in self.columns.keys()]
diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py
index c645e25d2..76562088d 100644
--- a/lib/sqlalchemy/dialects/postgresql/hstore.py
+++ b/lib/sqlalchemy/dialects/postgresql/hstore.py
@@ -1,5 +1,5 @@
# postgresql/hstore.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -144,8 +144,10 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
For usage with the SQLAlchemy ORM, it may be desirable to combine
the usage of :class:`.HSTORE` with :class:`.MutableDict` dictionary
now part of the :mod:`sqlalchemy.ext.mutable`
- extension. This extension will allow in-place changes to dictionary
- values to be detected by the unit of work::
+ extension. This extension will allow "in-place" changes to the
+ dictionary, e.g. addition of new keys or replacement/removal of existing
+ keys to/from the current dictionary, to produce events which will be detected
+ by the unit of work::
from sqlalchemy.ext.mutable import MutableDict
@@ -163,6 +165,11 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
session.commit()
+ When the :mod:`sqlalchemy.ext.mutable` extension is not used, the ORM
+ will not be alerted to any changes to the contents of an existing dictionary,
+ unless that dictionary value is re-assigned to the HSTORE-attribute itself,
+ thus generating a change event.
+
.. versionadded:: 0.8
.. seealso::
diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py
new file mode 100644
index 000000000..2e29185e8
--- /dev/null
+++ b/lib/sqlalchemy/dialects/postgresql/json.py
@@ -0,0 +1,199 @@
+# postgresql/json.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+from __future__ import absolute_import
+
+import json
+
+from .base import ischema_names
+from ... import types as sqltypes
+from ...sql.operators import custom_op
+from ... import sql
+from ...sql import elements
+from ... import util
+
+__all__ = ('JSON', 'JSONElement')
+
+
+class JSONElement(elements.BinaryExpression):
+ """Represents accessing an element of a :class:`.JSON` value.
+
+ The :class:`.JSONElement` is produced whenever using the Python index
+ operator on an expression that has the type :class:`.JSON`::
+
+ expr = mytable.c.json_data['some_key']
+
+ The expression typically compiles to a JSON access such as ``col -> key``.
+ Modifiers are then available for typing behavior, including :meth:`.JSONElement.cast`
+ and :attr:`.JSONElement.astext`.
+
+ """
+ def __init__(self, left, right, astext=False, opstring=None, result_type=None):
+ self._astext = astext
+ if opstring is None:
+ if hasattr(right, '__iter__') and \
+ not isinstance(right, util.string_types):
+ opstring = "#>"
+ right = "{%s}" % (", ".join(util.text_type(elem) for elem in right))
+ else:
+ opstring = "->"
+
+ self._json_opstring = opstring
+ operator = custom_op(opstring, precedence=5)
+ right = left._check_literal(left, operator, right)
+ super(JSONElement, self).__init__(left, right, operator, type_=result_type)
+
+ @property
+ def astext(self):
+ """Convert this :class:`.JSONElement` to use the 'astext' operator
+ when evaluated.
+
+ E.g.::
+
+ select([data_table.c.data['some key'].astext])
+
+ .. seealso::
+
+ :meth:`.JSONElement.cast`
+
+ """
+ if self._astext:
+ return self
+ else:
+ return JSONElement(
+ self.left,
+ self.right,
+ astext=True,
+ opstring=self._json_opstring + ">",
+ result_type=sqltypes.String(convert_unicode=True)
+ )
+
+ def cast(self, type_):
+ """Convert this :class:`.JSONElement` to apply both the 'astext' operator
+ as well as an explicit type cast when evaulated.
+
+ E.g.::
+
+ select([data_table.c.data['some key'].cast(Integer)])
+
+ .. seealso::
+
+ :attr:`.JSONElement.astext`
+
+ """
+ if not self._astext:
+ return self.astext.cast(type_)
+ else:
+ return sql.cast(self, type_)
+
+
+class JSON(sqltypes.TypeEngine):
+ """Represent the Postgresql JSON type.
+
+ The :class:`.JSON` type stores arbitrary JSON format data, e.g.::
+
+ data_table = Table('data_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', JSON)
+ )
+
+ with engine.connect() as conn:
+ conn.execute(
+ data_table.insert(),
+ data = {"key1": "value1", "key2": "value2"}
+ )
+
+ :class:`.JSON` provides several operations:
+
+ * Index operations::
+
+ data_table.c.data['some key']
+
+ * Index operations returning text (required for text comparison)::
+
+ data_table.c.data['some key'].astext == 'some value'
+
+ * Index operations with a built-in CAST call::
+
+ data_table.c.data['some key'].cast(Integer) == 5
+
+ * Path index operations::
+
+ data_table.c.data[('key_1', 'key_2', ..., 'key_n')]
+
+ * Path index operations returning text (required for text comparison)::
+
+ data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == 'some value'
+
+ Index operations return an instance of :class:`.JSONElement`, which represents
+ an expression such as ``column -> index``. This element then defines
+ methods such as :attr:`.JSONElement.astext` and :meth:`.JSONElement.cast`
+ for setting up type behavior.
+
+ The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not detect
+ in-place mutations to the structure. In order to detect these, the
+ :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
+ allow "in-place" changes to the datastructure to produce events which
+ will be detected by the unit of work. See the example at :class:`.HSTORE`
+ for a simple example involving a dictionary.
+
+ Custom serializers and deserializers are specified at the dialect level,
+ that is using :func:`.create_engine`. The reason for this is that when
+ using psycopg2, the DBAPI only allows serializers at the per-cursor
+ or per-connection level. E.g.::
+
+ engine = create_engine("postgresql://scott:tiger@localhost/test",
+ json_serializer=my_serialize_fn,
+ json_deserializer=my_deserialize_fn
+ )
+
+ When using the psycopg2 dialect, the json_deserializer is registered
+ against the database using ``psycopg2.extras.register_default_json``.
+
+ .. versionadded:: 0.9
+
+ """
+
+ __visit_name__ = 'JSON'
+
+ class comparator_factory(sqltypes.Concatenable.Comparator):
+ """Define comparison operations for :class:`.JSON`."""
+
+ def __getitem__(self, other):
+ """Get the value at a given key."""
+
+ return JSONElement(self.expr, other)
+
+ def _adapt_expression(self, op, other_comparator):
+ if isinstance(op, custom_op):
+ if op.opstring == '->':
+ return op, sqltypes.Text
+ return sqltypes.Concatenable.Comparator.\
+ _adapt_expression(self, op, other_comparator)
+
+ def bind_processor(self, dialect):
+ json_serializer = dialect._json_serializer or json.dumps
+ if util.py2k:
+ encoding = dialect.encoding
+ def process(value):
+ return json_serializer(value).encode(encoding)
+ else:
+ def process(value):
+ return json_serializer(value)
+ return process
+
+ def result_processor(self, dialect, coltype):
+ json_deserializer = dialect._json_deserializer or json.loads
+ if util.py2k:
+ encoding = dialect.encoding
+ def process(value):
+ return json_deserializer(value.decode(encoding))
+ else:
+ def process(value):
+ return json_deserializer(value)
+ return process
+
+
+ischema_names['json'] = JSON
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index 0e503746c..bc73f9757 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -1,5 +1,5 @@
# postgresql/pg8000.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -39,7 +39,9 @@ class _PGNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
- return processors.to_decimal_processor_factory(decimal.Decimal)
+ return processors.to_decimal_processor_factory(
+ decimal.Decimal,
+ self._effective_decimal_return_scale)
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 02eda094e..e9f64f829 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -1,5 +1,5 @@
# postgresql/psycopg2.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -179,6 +179,7 @@ from .base import PGDialect, PGCompiler, \
ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
_INT_TYPES
from .hstore import HSTORE
+from .json import JSON
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
@@ -191,7 +192,9 @@ class _PGNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
- return processors.to_decimal_processor_factory(decimal.Decimal)
+ return processors.to_decimal_processor_factory(
+ decimal.Decimal,
+ self._effective_decimal_return_scale)
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
@@ -210,23 +213,13 @@ class _PGNumeric(sqltypes.Numeric):
class _PGEnum(ENUM):
- def __init__(self, *arg, **kw):
- super(_PGEnum, self).__init__(*arg, **kw)
- if util.py2k:
- if self.convert_unicode:
- self.convert_unicode = "force"
-
-
-class _PGArray(ARRAY):
- def __init__(self, *arg, **kw):
- super(_PGArray, self).__init__(*arg, **kw)
- if util.py2k:
- # FIXME: this check won't work for setups that
- # have convert_unicode only on their create_engine().
- if isinstance(self.item_type, sqltypes.String) and \
- self.item_type.convert_unicode:
- self.item_type.convert_unicode = "force"
-
+ def result_processor(self, dialect, coltype):
+ if util.py2k and self.convert_unicode is True:
+ # we can't easily use PG's extensions here because
+ # the OID is on the fly, and we need to give it a python
+ # function anyway - not really worth it.
+ self.convert_unicode = "force_nocheck"
+ return super(_PGEnum, self).result_processor(dialect, coltype)
class _PGHStore(HSTORE):
def bind_processor(self, dialect):
@@ -241,6 +234,15 @@ class _PGHStore(HSTORE):
else:
return super(_PGHStore, self).result_processor(dialect, coltype)
+
+class _PGJSON(JSON):
+
+ def result_processor(self, dialect, coltype):
+ if dialect._has_native_json:
+ return None
+ else:
+ return super(_PGJSON, self).result_processor(dialect, coltype)
+
# When we're handed literal SQL, ensure it's a SELECT-query. Since
# 8.3, combining cursors and "FOR UPDATE" has been fine.
SERVER_SIDE_CURSOR_RE = re.compile(
@@ -325,6 +327,7 @@ class PGDialect_psycopg2(PGDialect):
psycopg2_version = (0, 0)
_has_native_hstore = False
+ _has_native_json = False
colspecs = util.update_copy(
PGDialect.colspecs,
@@ -332,8 +335,8 @@ class PGDialect_psycopg2(PGDialect):
sqltypes.Numeric: _PGNumeric,
ENUM: _PGEnum, # needs force_unicode
sqltypes.Enum: _PGEnum, # needs force_unicode
- ARRAY: _PGArray, # needs force_unicode
HSTORE: _PGHStore,
+ JSON: _PGJSON
}
)
@@ -361,6 +364,7 @@ class PGDialect_psycopg2(PGDialect):
self._has_native_hstore = self.use_native_hstore and \
self._hstore_oids(connection.connection) \
is not None
+ self._has_native_json = self.psycopg2_version >= (2, 5)
@classmethod
def dbapi(cls):
@@ -369,7 +373,7 @@ class PGDialect_psycopg2(PGDialect):
@util.memoized_property
def _isolation_lookup(self):
- extensions = __import__('psycopg2.extensions').extensions
+ from psycopg2 import extensions
return {
'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
@@ -407,6 +411,7 @@ class PGDialect_psycopg2(PGDialect):
if self.dbapi and self.use_native_unicode:
def on_connect(conn):
extensions.register_type(extensions.UNICODE, conn)
+ extensions.register_type(extensions.UNICODEARRAY, conn)
fns.append(on_connect)
if self.dbapi and self.use_native_hstore:
@@ -423,6 +428,11 @@ class PGDialect_psycopg2(PGDialect):
array_oid=array_oid)
fns.append(on_connect)
+ if self.dbapi and self._json_deserializer:
+ def on_connect(conn):
+ extras.register_default_json(conn, loads=self._json_deserializer)
+ fns.append(on_connect)
+
if fns:
def on_connect(conn):
for fn in fns:
diff --git a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
index 289bef114..f030d2c1b 100644
--- a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
+++ b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
@@ -1,5 +1,5 @@
# postgresql/pypostgresql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py
index d03f948a7..57b0c4c30 100644
--- a/lib/sqlalchemy/dialects/postgresql/ranges.py
+++ b/lib/sqlalchemy/dialects/postgresql/ranges.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
index 583afc23f..67e7d53e6 100644
--- a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
@@ -1,5 +1,5 @@
# postgresql/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py
index 0d06160ae..a9b23575b 100644
--- a/lib/sqlalchemy/dialects/sqlite/__init__.py
+++ b/lib/sqlalchemy/dialects/sqlite/__init__.py
@@ -1,5 +1,5 @@
# sqlite/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index fb7d968be..579a61046 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -1,5 +1,5 @@
# sqlite/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -130,14 +130,14 @@ for new connections through the usage of events::
import datetime
import re
-from sqlalchemy import sql, exc
-from sqlalchemy.engine import default, base, reflection
-from sqlalchemy import types as sqltypes
-from sqlalchemy import util
-from sqlalchemy.sql import compiler
-from sqlalchemy import processors
+from ... import sql, exc
+from ...engine import default, reflection
+from ... import types as sqltypes, schema as sa_schema
+from ... import util
+from ...sql import compiler
+from ... import processors
-from sqlalchemy.types import BIGINT, BLOB, BOOLEAN, CHAR,\
+from ...types import BIGINT, BLOB, BOOLEAN, CHAR,\
DECIMAL, FLOAT, REAL, INTEGER, NUMERIC, SMALLINT, TEXT,\
TIMESTAMP, VARCHAR
@@ -160,6 +160,13 @@ class _DateTimeMixin(object):
kw["regexp"] = self._reg
return util.constructor_copy(self, cls, **kw)
+ def literal_processor(self, dialect):
+ bp = self.bind_processor(dialect)
+ def process(value):
+ return "'%s'" % bp(value)
+ return process
+
+
class DATETIME(_DateTimeMixin, sqltypes.DateTime):
"""Represent a Python datetime object in SQLite using a string.
@@ -211,6 +218,7 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime):
"%(hour)02d:%(minute)02d:%(second)02d"
)
+
def bind_processor(self, dialect):
datetime_datetime = datetime.datetime
datetime_date = datetime.date
@@ -491,7 +499,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
colspec += " NOT NULL"
if (column.primary_key and
- column.table.kwargs.get('sqlite_autoincrement', False) and
+ column.table.dialect_options['sqlite']['autoincrement'] and
len(column.table.primary_key.columns) == 1 and
issubclass(column.type._type_affinity, sqltypes.Integer) and
not column.foreign_keys):
@@ -506,7 +514,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
if len(constraint.columns) == 1:
c = list(constraint)[0]
if c.primary_key and \
- c.table.kwargs.get('sqlite_autoincrement', False) and \
+ c.table.dialect_options['sqlite']['autoincrement'] and \
issubclass(c.type._type_affinity, sqltypes.Integer) and \
not c.foreign_keys:
return None
@@ -615,6 +623,12 @@ class SQLiteDialect(default.DefaultDialect):
supports_cast = True
supports_default_values = True
+ construct_arguments = [
+ (sa_schema.Table, {
+ "autoincrement": False
+ })
+ ]
+
_broken_fk_pragma_quotes = False
def __init__(self, isolation_level=None, native_datetime=False, **kwargs):
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
index ad0dd5292..b53f4d4a0 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
@@ -1,5 +1,5 @@
# sqlite/pysqlite.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -97,6 +97,8 @@ or result processing. Execution of "func.current_date()" will return a string.
"func.current_timestamp()" is registered as returning a DATETIME type in
SQLAlchemy, so this function still receives SQLAlchemy-level result processing.
+.. _pysqlite_threading_pooling:
+
Threading/Pooling Behavior
---------------------------
@@ -160,8 +162,8 @@ Using Temporary Tables with SQLite
Due to the way SQLite deals with temporary tables, if you wish to use a
temporary table in a file-based SQLite database across multiple checkouts
from the connection pool, such as when using an ORM :class:`.Session` where
-the temporary table should continue to remain after :meth:`.commit` or
-:meth:`.rollback` is called, a pool which maintains a single connection must
+the temporary table should continue to remain after :meth:`.Session.commit` or
+:meth:`.Session.rollback` is called, a pool which maintains a single connection must
be used. Use :class:`.SingletonThreadPool` if the scope is only needed
within the current thread, or :class:`.StaticPool` is scope is needed within
multiple threads for this case::
diff --git a/lib/sqlalchemy/dialects/sybase/__init__.py b/lib/sqlalchemy/dialects/sybase/__init__.py
index f61352ceb..85f9dd9c9 100644
--- a/lib/sqlalchemy/dialects/sybase/__init__.py
+++ b/lib/sqlalchemy/dialects/sybase/__init__.py
@@ -1,5 +1,5 @@
# sybase/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index 1c42d4846..501270778 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -1,5 +1,5 @@
# sybase/base.py
-# Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# get_select_precolumns(), limit_clause() implementation
# copyright (C) 2007 Fisch Asset Management
# AG http://www.fam.ch, with coding by Alexander Houben
@@ -440,6 +440,8 @@ class SybaseDialect(default.DefaultDialect):
preparer = SybaseIdentifierPreparer
inspector = SybaseInspector
+ construct_arguments = []
+
def _get_default_schema_name(self, connection):
return connection.scalar(
text("SELECT user_name() as user_name",
diff --git a/lib/sqlalchemy/dialects/sybase/mxodbc.py b/lib/sqlalchemy/dialects/sybase/mxodbc.py
index 829132bdc..f14d1c420 100644
--- a/lib/sqlalchemy/dialects/sybase/mxodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/mxodbc.py
@@ -1,5 +1,5 @@
# sybase/mxodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py
index 283c60da3..f773e5a6d 100644
--- a/lib/sqlalchemy/dialects/sybase/pyodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py
@@ -1,5 +1,5 @@
# sybase/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sybase/pysybase.py b/lib/sqlalchemy/dialects/sybase/pysybase.py
index 8961ce8ef..664bd9ac0 100644
--- a/lib/sqlalchemy/dialects/sybase/pysybase.py
+++ b/lib/sqlalchemy/dialects/sybase/pysybase.py
@@ -1,5 +1,5 @@
# sybase/pysybase.py
-# Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 0a5a96784..890c76645 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -1,5 +1,5 @@
# engine/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -50,14 +50,13 @@ url.py
within a URL.
"""
-# not sure what this was used for
-#import sqlalchemy.databases
-
from .interfaces import (
- Compiled,
Connectable,
Dialect,
ExecutionContext,
+
+ # backwards compat
+ Compiled,
TypeCompiler
)
@@ -83,8 +82,12 @@ from .util import (
connection_memoize
)
+
from . import util, strategies
+# backwards compat
+from ..sql import ddl
+
default_strategy = 'plain'
@@ -345,10 +348,13 @@ def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
arguments.
"""
- opts = util._coerce_config(configuration, prefix)
- opts.update(kwargs)
- url = opts.pop('url')
- return create_engine(url, **opts)
+ options = dict((key[len(prefix):], configuration[key])
+ for key in configuration
+ if key.startswith(prefix))
+ options['_coerce_config'] = True
+ options.update(kwargs)
+ url = options.pop('url')
+ return create_engine(url, **options)
__all__ = (
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index f69bd3d4b..1f2b7a3e5 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1,5 +1,5 @@
# engine/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -11,8 +11,8 @@ from __future__ import with_statement
import sys
-from .. import exc, schema, util, log, interfaces
-from ..sql import expression, util as sql_util
+from .. import exc, util, log, interfaces
+from ..sql import expression, util as sql_util, schema, ddl
from .interfaces import Connectable, Compiled
from .util import _distill_params
import contextlib
@@ -303,20 +303,40 @@ class Connection(Connectable):
def invalidate(self, exception=None):
"""Invalidate the underlying DBAPI connection associated with
- this Connection.
+ this :class:`.Connection`.
- The underlying DB-API connection is literally closed (if
+ The underlying DBAPI connection is literally closed (if
possible), and is discarded. Its source connection pool will
typically lazily create a new connection to replace it.
- Upon the next usage, this Connection will attempt to reconnect
- to the pool with a new connection.
+ Upon the next use (where "use" typically means using the
+ :meth:`.Connection.execute` method or similar),
+ this :class:`.Connection` will attempt to
+ procure a new DBAPI connection using the services of the
+ :class:`.Pool` as a source of connectivty (e.g. a "reconnection").
+
+ If a transaction was in progress (e.g. the
+ :meth:`.Connection.begin` method has been called) when
+ :meth:`.Connection.invalidate` method is called, at the DBAPI
+ level all state associated with this transaction is lost, as
+ the DBAPI connection is closed. The :class:`.Connection`
+ will not allow a reconnection to proceed until the :class:`.Transaction`
+ object is ended, by calling the :meth:`.Transaction.rollback`
+ method; until that point, any attempt at continuing to use the
+ :class:`.Connection` will raise an
+ :class:`~sqlalchemy.exc.InvalidRequestError`.
+ This is to prevent applications from accidentally
+ continuing an ongoing transactional operations despite the
+ fact that the transaction has been lost due to an
+ invalidation.
+
+ The :meth:`.Connection.invalidate` method, just like auto-invalidation,
+ will at the connection pool level invoke the :meth:`.PoolEvents.invalidate`
+ event.
- Transactions in progress remain in an "opened" state (even though the
- actual transaction is gone); these must be explicitly rolled back
- before a reconnect on this Connection can proceed. This is to prevent
- applications from accidentally continuing their transactional
- operations in a non-transactional state.
+ .. seealso::
+
+ :ref:`pool_connection_invalidation`
"""
if self.invalidated:
@@ -403,7 +423,6 @@ class Connection(Connectable):
See also :meth:`.Connection.begin`,
:meth:`.Connection.begin_twophase`.
"""
-
if self.__transaction is None:
self.__transaction = RootTransaction(self)
else:
@@ -450,7 +469,7 @@ class Connection(Connectable):
return self.__transaction is not None
- def _begin_impl(self):
+ def _begin_impl(self, transaction):
if self._echo:
self.engine.logger.info("BEGIN (implicit)")
@@ -459,6 +478,8 @@ class Connection(Connectable):
try:
self.engine.dialect.do_begin(self.connection)
+ if self.connection._reset_agent is None:
+ self.connection._reset_agent = transaction
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
@@ -471,9 +492,12 @@ class Connection(Connectable):
self.engine.logger.info("ROLLBACK")
try:
self.engine.dialect.do_rollback(self.connection)
- self.__transaction = None
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
else:
self.__transaction = None
@@ -485,9 +509,12 @@ class Connection(Connectable):
self.engine.logger.info("COMMIT")
try:
self.engine.dialect.do_commit(self.connection)
- self.__transaction = None
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
def _savepoint_impl(self, name=None):
if self._has_events:
@@ -516,14 +543,17 @@ class Connection(Connectable):
self.engine.dialect.do_release_savepoint(self, name)
self.__transaction = context
- def _begin_twophase_impl(self, xid):
+ def _begin_twophase_impl(self, transaction):
if self._echo:
self.engine.logger.info("BEGIN TWOPHASE (implicit)")
if self._has_events:
- self.dispatch.begin_twophase(self, xid)
+ self.dispatch.begin_twophase(self, transaction.xid)
if self._still_open_and_connection_is_valid:
- self.engine.dialect.do_begin_twophase(self, xid)
+ self.engine.dialect.do_begin_twophase(self, transaction.xid)
+
+ if self.connection._reset_agent is None:
+ self.connection._reset_agent = transaction
def _prepare_twophase_impl(self, xid):
if self._has_events:
@@ -539,8 +569,14 @@ class Connection(Connectable):
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
- self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
- self.__transaction = None
+ try:
+ self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
+ else:
+ self.__transaction = None
def _commit_twophase_impl(self, xid, is_prepared):
if self._has_events:
@@ -548,8 +584,14 @@ class Connection(Connectable):
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
- self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
- self.__transaction = None
+ try:
+ self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
+ else:
+ self.__transaction = None
def _autorollback(self):
if not self.in_transaction():
@@ -581,6 +623,8 @@ class Connection(Connectable):
else:
if not self.__branch:
conn.close()
+ if conn._reset_agent is self.__transaction:
+ conn._reset_agent = None
del self.__connection
self.__can_reconnect = False
self.__transaction = None
@@ -652,17 +696,16 @@ class Connection(Connectable):
DBAPI-agnostic way, use the :func:`~.expression.text` construct.
"""
- for c in type(object).__mro__:
- if c in Connection.executors:
- return Connection.executors[c](
- self,
- object,
- multiparams,
- params)
- else:
+ if isinstance(object, util.string_types[0]):
+ return self._execute_text(object, multiparams, params)
+ try:
+ meth = object._execute_on_connection
+ except AttributeError:
raise exc.InvalidRequestError(
"Unexecutable object type: %s" %
type(object))
+ else:
+ return meth(self, multiparams, params)
def _execute_function(self, func, multiparams, params):
"""Execute a sql.FunctionElement object."""
@@ -825,7 +868,7 @@ class Connection(Connectable):
context = constructor(dialect, self, conn, *args)
except Exception as e:
self._handle_dbapi_exception(e,
- str(statement), parameters,
+ util.text_type(statement), parameters,
None, None)
if context.compiled:
@@ -898,6 +941,11 @@ class Connection(Connectable):
elif not context._is_explicit_returning:
result.close(_autoclose_connection=False)
result._metadata = None
+ elif context.isupdate and context._is_implicit_returning:
+ context._fetch_implicit_update_returning(result)
+ result.close(_autoclose_connection=False)
+ result._metadata = None
+
elif result._metadata is None:
# no results, get rowcount
# (which requires open cursor on some drivers
@@ -1033,16 +1081,6 @@ class Connection(Connectable):
if self.should_close_with_result:
self.close()
- # poor man's multimethod/generic function thingy
- executors = {
- expression.FunctionElement: _execute_function,
- expression.ClauseElement: _execute_clauseelement,
- Compiled: _execute_compiled,
- schema.SchemaItem: _execute_default,
- schema.DDLElement: _execute_ddl,
- util.string_types[0]: _execute_text
- }
-
def default_schema_name(self):
return self.engine.dialect.get_default_schema_name(self)
@@ -1210,7 +1248,7 @@ class Transaction(object):
class RootTransaction(Transaction):
def __init__(self, connection):
super(RootTransaction, self).__init__(connection, None)
- self.connection._begin_impl()
+ self.connection._begin_impl(self)
def _do_rollback(self):
if self.is_active:
@@ -1259,7 +1297,7 @@ class TwoPhaseTransaction(Transaction):
super(TwoPhaseTransaction, self).__init__(connection, None)
self._is_prepared = False
self.xid = xid
- self.connection._begin_twophase_impl(self.xid)
+ self.connection._begin_twophase_impl(self)
def prepare(self):
"""Prepare this :class:`.TwoPhaseTransaction`.
@@ -1423,7 +1461,7 @@ class Engine(Connectable, log.Identified):
echo = log.echo_property()
def __repr__(self):
- return 'Engine(%s)' % str(self.url)
+ return 'Engine(%r)' % self.url
def dispose(self):
"""Dispose of the connection pool used by this :class:`.Engine`.
@@ -1667,6 +1705,17 @@ class Engine(Connectable, log.Identified):
return self.dialect.get_table_names(conn, schema)
def has_table(self, table_name, schema=None):
+ """Return True if the given backend has a table of the given name.
+
+ .. seealso::
+
+ :ref:`metadata_reflection_inspector` - detailed schema inspection using
+ the :class:`.Inspector` interface.
+
+ :class:`.quoted_name` - used to pass quoting information along
+ with a schema identifier.
+
+ """
return self.run_callable(self.dialect.has_table, table_name, schema)
def raw_connection(self):
diff --git a/lib/sqlalchemy/engine/ddl.py b/lib/sqlalchemy/engine/ddl.py
deleted file mode 100644
index 6daa9be6b..000000000
--- a/lib/sqlalchemy/engine/ddl.py
+++ /dev/null
@@ -1,193 +0,0 @@
-# engine/ddl.py
-# Copyright (C) 2009-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Routines to handle CREATE/DROP workflow."""
-
-from .. import schema
-from ..sql import util as sql_util
-
-
-class DDLBase(schema.SchemaVisitor):
- def __init__(self, connection):
- self.connection = connection
-
-
-class SchemaGenerator(DDLBase):
-
- def __init__(self, dialect, connection, checkfirst=False,
- tables=None, **kwargs):
- super(SchemaGenerator, self).__init__(connection, **kwargs)
- self.checkfirst = checkfirst
- self.tables = tables
- self.preparer = dialect.identifier_preparer
- self.dialect = dialect
- self.memo = {}
-
- def _can_create_table(self, table):
- self.dialect.validate_identifier(table.name)
- if table.schema:
- self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or \
- not self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
-
- def _can_create_sequence(self, sequence):
- return self.dialect.supports_sequences and \
- (
- (not self.dialect.sequences_optional or
- not sequence.optional) and
- (
- not self.checkfirst or
- not self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema)
- )
- )
-
- def visit_metadata(self, metadata):
- if self.tables is not None:
- tables = self.tables
- else:
- tables = list(metadata.tables.values())
- collection = [t for t in sql_util.sort_tables(tables)
- if self._can_create_table(t)]
- seq_coll = [s for s in metadata._sequences.values()
- if s.column is None and self._can_create_sequence(s)]
-
- metadata.dispatch.before_create(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- for seq in seq_coll:
- self.traverse_single(seq, create_ok=True)
-
- for table in collection:
- self.traverse_single(table, create_ok=True)
-
- metadata.dispatch.after_create(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- def visit_table(self, table, create_ok=False):
- if not create_ok and not self._can_create_table(table):
- return
-
- table.dispatch.before_create(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- for column in table.columns:
- if column.default is not None:
- self.traverse_single(column.default)
-
- self.connection.execute(schema.CreateTable(table))
-
- if hasattr(table, 'indexes'):
- for index in table.indexes:
- self.traverse_single(index)
-
- table.dispatch.after_create(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- def visit_sequence(self, sequence, create_ok=False):
- if not create_ok and not self._can_create_sequence(sequence):
- return
- self.connection.execute(schema.CreateSequence(sequence))
-
- def visit_index(self, index):
- self.connection.execute(schema.CreateIndex(index))
-
-
-class SchemaDropper(DDLBase):
-
- def __init__(self, dialect, connection, checkfirst=False,
- tables=None, **kwargs):
- super(SchemaDropper, self).__init__(connection, **kwargs)
- self.checkfirst = checkfirst
- self.tables = tables
- self.preparer = dialect.identifier_preparer
- self.dialect = dialect
- self.memo = {}
-
- def visit_metadata(self, metadata):
- if self.tables is not None:
- tables = self.tables
- else:
- tables = list(metadata.tables.values())
-
- collection = [
- t
- for t in reversed(sql_util.sort_tables(tables))
- if self._can_drop_table(t)
- ]
-
- seq_coll = [
- s
- for s in metadata._sequences.values()
- if s.column is None and self._can_drop_sequence(s)
- ]
-
- metadata.dispatch.before_drop(
- metadata, self.connection, tables=collection,
- checkfirst=self.checkfirst, _ddl_runner=self)
-
- for table in collection:
- self.traverse_single(table, drop_ok=True)
-
- for seq in seq_coll:
- self.traverse_single(seq, drop_ok=True)
-
- metadata.dispatch.after_drop(
- metadata, self.connection, tables=collection,
- checkfirst=self.checkfirst, _ddl_runner=self)
-
- def _can_drop_table(self, table):
- self.dialect.validate_identifier(table.name)
- if table.schema:
- self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
-
- def _can_drop_sequence(self, sequence):
- return self.dialect.supports_sequences and \
- ((not self.dialect.sequences_optional or
- not sequence.optional) and
- (not self.checkfirst or
- self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema))
- )
-
- def visit_index(self, index):
- self.connection.execute(schema.DropIndex(index))
-
- def visit_table(self, table, drop_ok=False):
- if not drop_ok and not self._can_drop_table(table):
- return
-
- table.dispatch.before_drop(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- for column in table.columns:
- if column.default is not None:
- self.traverse_single(column.default)
-
- self.connection.execute(schema.DropTable(table))
-
- table.dispatch.after_drop(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- def visit_sequence(self, sequence, drop_ok=False):
- if not drop_ok and not self._can_drop_sequence(sequence):
- return
- self.connection.execute(schema.DropSequence(sequence))
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 3e8e96a42..ed975b8cf 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -1,5 +1,5 @@
# engine/default.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,7 +16,8 @@ import re
import random
from . import reflection, interfaces, result
from ..sql import compiler, expression
-from .. import exc, types as sqltypes, util, pool, processors
+from .. import types as sqltypes
+from .. import exc, util, pool, processors
import codecs
import weakref
from .. import event
@@ -26,6 +27,7 @@ AUTOCOMMIT_REGEXP = re.compile(
re.I | re.UNICODE)
+
class DefaultDialect(interfaces.Dialect):
"""Default implementation of Dialect"""
@@ -57,6 +59,18 @@ class DefaultDialect(interfaces.Dialect):
supports_simple_order_by_label = True
+ engine_config_types = util.immutabledict([
+ ('convert_unicode', util.bool_or_str('force')),
+ ('pool_timeout', int),
+ ('echo', util.bool_or_str('debug')),
+ ('echo_pool', util.bool_or_str('debug')),
+ ('pool_recycle', int),
+ ('pool_size', int),
+ ('max_overflow', int),
+ ('pool_threadlocal', bool),
+ ('use_native_unicode', bool),
+ ])
+
# if the NUMERIC type
# returns decimal.Decimal.
# *not* the FLOAT type however.
@@ -97,6 +111,33 @@ class DefaultDialect(interfaces.Dialect):
server_version_info = None
+ construct_arguments = None
+ """Optional set of argument specifiers for various SQLAlchemy
+ constructs, typically schema items.
+
+ To
+ implement, establish as a series of tuples, as in::
+
+ construct_arguments = [
+ (schema.Index, {
+ "using": False,
+ "where": None,
+ "ops": None
+ })
+ ]
+
+ If the above construct is established on the Postgresql dialect,
+ the ``Index`` construct will now accept additional keyword arguments
+ such as ``postgresql_using``, ``postgresql_where``, etc. Any kind of
+ ``postgresql_XYZ`` argument not corresponding to the above template will
+ be rejected with an ``ArgumentError`, for all those SQLAlchemy constructs
+ which implement the :class:`.DialectKWArgs` class.
+
+ The default is ``None``; older dialects which don't implement the argument
+ will have the old behavior of un-validated kwargs to schema/SQL constructs.
+
+ """
+
# indicates symbol names are
# UPPERCASEd if they are case insensitive
# within the database.
@@ -111,6 +152,7 @@ class DefaultDialect(interfaces.Dialect):
implicit_returning=None,
supports_right_nested_joins=None,
case_sensitive=True,
+ supports_native_boolean=None,
label_length=None, **kwargs):
if not getattr(self, 'ported_sqla_06', True):
@@ -136,7 +178,8 @@ class DefaultDialect(interfaces.Dialect):
self.type_compiler = self.type_compiler(self)
if supports_right_nested_joins is not None:
self.supports_right_nested_joins = supports_right_nested_joins
-
+ if supports_native_boolean is not None:
+ self.supports_native_boolean = supports_native_boolean
self.case_sensitive = case_sensitive
if label_length and label_length > self.max_identifier_length:
@@ -159,6 +202,8 @@ class DefaultDialect(interfaces.Dialect):
self._encoder = codecs.getencoder(self.encoding)
self._decoder = processors.to_unicode_processor_factory(self.encoding)
+
+
@util.memoized_property
def _type_memos(self):
return weakref.WeakKeyDictionary()
@@ -191,6 +236,10 @@ class DefaultDialect(interfaces.Dialect):
self.returns_unicode_strings = self._check_unicode_returns(connection)
+ if self.description_encoding is not None and \
+ self._check_unicode_description(connection):
+ self._description_decoder = self.description_encoding = None
+
self.do_rollback(connection.connection)
def on_connect(self):
@@ -207,46 +256,78 @@ class DefaultDialect(interfaces.Dialect):
"""
return None
- def _check_unicode_returns(self, connection):
+ def _check_unicode_returns(self, connection, additional_tests=None):
if util.py2k and not self.supports_unicode_statements:
cast_to = util.binary_type
else:
cast_to = util.text_type
- def check_unicode(formatstr, type_):
- cursor = connection.connection.cursor()
+ if self.positional:
+ parameters = self.execute_sequence_format()
+ else:
+ parameters = {}
+
+ def check_unicode(test):
+ statement = cast_to(expression.select([test]).compile(dialect=self))
try:
- try:
- cursor.execute(
- cast_to(
- expression.select(
- [expression.cast(
- expression.literal_column(
- "'test %s returns'" % formatstr),
- type_)
- ]).compile(dialect=self)
- )
- )
- row = cursor.fetchone()
-
- return isinstance(row[0], util.text_type)
- except self.dbapi.Error as de:
- util.warn("Exception attempting to "
- "detect unicode returns: %r" % de)
- return False
- finally:
+ cursor = connection.connection.cursor()
+ connection._cursor_execute(cursor, statement, parameters)
+ row = cursor.fetchone()
cursor.close()
+ except exc.DBAPIError as de:
+ # note that _cursor_execute() will have closed the cursor
+ # if an exception is thrown.
+ util.warn("Exception attempting to "
+ "detect unicode returns: %r" % de)
+ return False
+ else:
+ return isinstance(row[0], util.text_type)
+
+ tests = [
+ # detect plain VARCHAR
+ expression.cast(
+ expression.literal_column("'test plain returns'"),
+ sqltypes.VARCHAR(60)
+ ),
+ # detect if there's an NVARCHAR type with different behavior available
+ expression.cast(
+ expression.literal_column("'test unicode returns'"),
+ sqltypes.Unicode(60)
+ ),
+ ]
+
+ if additional_tests:
+ tests += additional_tests
+
+ results = set([check_unicode(test) for test in tests])
+
+ if results.issuperset([True, False]):
+ return "conditional"
+ else:
+ return results == set([True])
- # detect plain VARCHAR
- unicode_for_varchar = check_unicode("plain", sqltypes.VARCHAR(60))
-
- # detect if there's an NVARCHAR type with different behavior available
- unicode_for_unicode = check_unicode("unicode", sqltypes.Unicode(60))
+ def _check_unicode_description(self, connection):
+ # all DBAPIs on Py2K return cursor.description as encoded,
+ # until pypy2.1beta2 with sqlite, so let's just check it -
+ # it's likely others will start doing this too in Py2k.
- if unicode_for_unicode and not unicode_for_varchar:
- return "conditional"
+ if util.py2k and not self.supports_unicode_statements:
+ cast_to = util.binary_type
else:
- return unicode_for_varchar
+ cast_to = util.text_type
+
+ cursor = connection.connection.cursor()
+ try:
+ cursor.execute(
+ cast_to(
+ expression.select([
+ expression.literal_column("'x'").label("some_label")
+ ]).compile(dialect=self)
+ )
+ )
+ return isinstance(cursor.description[0][0], util.text_type)
+ finally:
+ cursor.close()
def type_descriptor(self, typeobj):
"""Provide a database-specific :class:`.TypeEngine` object, given
@@ -259,8 +340,7 @@ class DefaultDialect(interfaces.Dialect):
"""
return sqltypes.adapt_type(typeobj, self.colspecs)
- def reflecttable(self, connection, table, include_columns,
- exclude_columns=None):
+ def reflecttable(self, connection, table, include_columns, exclude_columns):
insp = reflection.Inspector.from_engine(connection)
return insp.reflecttable(table, include_columns, exclude_columns)
@@ -368,6 +448,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
statement = None
postfetch_cols = None
prefetch_cols = None
+ returning_cols = None
_is_implicit_returning = False
_is_explicit_returning = False
@@ -464,6 +545,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
if self.isinsert or self.isupdate:
self.postfetch_cols = self.compiled.postfetch
self.prefetch_cols = self.compiled.prefetch
+ self.returning_cols = self.compiled.returning
self.__process_defaults()
processors = compiled._bind_processors
@@ -722,6 +804,11 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
ipk.append(row[c])
self.inserted_primary_key = ipk
+ self.returned_defaults = row
+
+ def _fetch_implicit_update_returning(self, resultproxy):
+ row = resultproxy.fetchone()
+ self.returned_defaults = row
def lastrow_has_defaults(self):
return (self.isinsert or self.isupdate) and \
@@ -808,6 +895,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
and generate inserted_primary_key collection.
"""
+ key_getter = self.compiled._key_getters_for_crud_column[2]
+
if self.executemany:
if len(self.compiled.prefetch):
scalar_defaults = {}
@@ -831,7 +920,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
else:
val = self.get_update_default(c)
if val is not None:
- param[c.key] = val
+ param[key_getter(c)] = val
del self.current_parameters
else:
self.current_parameters = compiled_parameters = \
@@ -844,12 +933,12 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
val = self.get_update_default(c)
if val is not None:
- compiled_parameters[c.key] = val
+ compiled_parameters[key_getter(c)] = val
del self.current_parameters
if self.isinsert:
self.inserted_primary_key = [
- self.compiled_parameters[0].get(c.key, None)
+ self.compiled_parameters[0].get(key_getter(c), None)
for c in self.compiled.\
statement.table.primary_key
]
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index 750aa2fcd..5c44933e8 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -1,13 +1,15 @@
# engine/interfaces.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Define core interfaces used by the engine system."""
-from .. import util, event, events
+from .. import util, event
+# backwards compat
+from ..sql.compiler import Compiled, TypeCompiler
class Dialect(object):
"""Define the behavior of a specific database and DB-API combination.
@@ -191,19 +193,21 @@ class Dialect(object):
pass
- def reflecttable(self, connection, table, include_columns=None):
+ def reflecttable(self, connection, table, include_columns, exclude_columns):
"""Load table description from the database.
Given a :class:`.Connection` and a
:class:`~sqlalchemy.schema.Table` object, reflect its columns and
- properties from the database. If include_columns (a list or
- set) is specified, limit the autoload to the given column
- names.
+ properties from the database.
- The default implementation uses the
- :class:`~sqlalchemy.engine.reflection.Inspector` interface to
- provide the output, building upon the granular table/column/
- constraint etc. methods of :class:`.Dialect`.
+ The implementation of this method is provided by
+ :meth:`.DefaultDialect.reflecttable`, which makes use of
+ :class:`.Inspector` to retrieve column information.
+
+ Dialects should **not** seek to implement this method, and should
+ instead implement individual schema inspection operations such as
+ :meth:`.Dialect.get_columns`, :meth:`.Dialect.get_pk_constraint`,
+ etc.
"""
@@ -246,7 +250,7 @@ class Dialect(object):
Deprecated. This method is only called by the default
implementation of :meth:`.Dialect.get_pk_constraint`. Dialects should
- instead implement this method directly.
+ instead implement the :meth:`.Dialect.get_pk_constraint` method directly.
"""
@@ -338,7 +342,7 @@ class Dialect(object):
raise NotImplementedError()
- def get_unique_constraints(self, table_name, schema=None, **kw):
+ def get_unique_constraints(self, connection, table_name, schema=None, **kw):
"""Return information about unique constraints in `table_name`.
Given a string `table_name` and an optional string `schema`, return
@@ -769,110 +773,6 @@ class ExecutionContext(object):
raise NotImplementedError()
-class Compiled(object):
- """Represent a compiled SQL or DDL expression.
-
- The ``__str__`` method of the ``Compiled`` object should produce
- the actual text of the statement. ``Compiled`` objects are
- specific to their underlying database dialect, and also may
- or may not be specific to the columns referenced within a
- particular set of bind parameters. In no case should the
- ``Compiled`` object be dependent on the actual values of those
- bind parameters, even though it may reference those values as
- defaults.
- """
-
- def __init__(self, dialect, statement, bind=None,
- compile_kwargs=util.immutabledict()):
- """Construct a new ``Compiled`` object.
-
- :param dialect: ``Dialect`` to compile against.
-
- :param statement: ``ClauseElement`` to be compiled.
-
- :param bind: Optional Engine or Connection to compile this
- statement against.
-
- :param compile_kwargs: additional kwargs that will be
- passed to the initial call to :meth:`.Compiled.process`.
-
- .. versionadded:: 0.8
-
- """
-
- self.dialect = dialect
- self.bind = bind
- if statement is not None:
- self.statement = statement
- self.can_execute = statement.supports_execution
- self.string = self.process(self.statement, **compile_kwargs)
-
- @util.deprecated("0.7", ":class:`.Compiled` objects now compile "
- "within the constructor.")
- def compile(self):
- """Produce the internal string representation of this element."""
- pass
-
- @property
- def sql_compiler(self):
- """Return a Compiled that is capable of processing SQL expressions.
-
- If this compiler is one, it would likely just return 'self'.
-
- """
-
- raise NotImplementedError()
-
- def process(self, obj, **kwargs):
- return obj._compiler_dispatch(self, **kwargs)
-
- def __str__(self):
- """Return the string text of the generated SQL or DDL."""
-
- return self.string or ''
-
- def construct_params(self, params=None):
- """Return the bind params for this compiled object.
-
- :param params: a dict of string/object pairs whose values will
- override bind values compiled in to the
- statement.
- """
-
- raise NotImplementedError()
-
- @property
- def params(self):
- """Return the bind params for this compiled object."""
- return self.construct_params()
-
- def execute(self, *multiparams, **params):
- """Execute this compiled object."""
-
- e = self.bind
- if e is None:
- raise exc.UnboundExecutionError(
- "This Compiled object is not bound to any Engine "
- "or Connection.")
- return e._execute_compiled(self, multiparams, params)
-
- def scalar(self, *multiparams, **params):
- """Execute this compiled object and return the result's
- scalar value."""
-
- return self.execute(*multiparams, **params).scalar()
-
-
-class TypeCompiler(object):
- """Produces DDL specification for TypeEngine objects."""
-
- def __init__(self, dialect):
- self.dialect = dialect
-
- def process(self, type_):
- return type_._compiler_dispatch(self)
-
-
class Connectable(object):
"""Interface for an object which supports execution of SQL constructs.
@@ -884,8 +784,6 @@ class Connectable(object):
"""
- dispatch = event.dispatcher(events.ConnectionEvents)
-
def connect(self, **kwargs):
"""Return a :class:`.Connection` object.
@@ -914,7 +812,8 @@ class Connectable(object):
"object directly, i.e. :meth:`.Table.create`, "
":meth:`.Index.create`, :meth:`.MetaData.create_all`")
def create(self, entity, **kwargs):
- """Emit CREATE statements for the given schema entity."""
+ """Emit CREATE statements for the given schema entity.
+ """
raise NotImplementedError()
@@ -923,7 +822,8 @@ class Connectable(object):
"object directly, i.e. :meth:`.Table.drop`, "
":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
def drop(self, entity, **kwargs):
- """Emit DROP statements for the given schema entity."""
+ """Emit DROP statements for the given schema entity.
+ """
raise NotImplementedError()
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 50b3f774c..45f100518 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -1,5 +1,5 @@
# engine/reflection.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -25,9 +25,9 @@ methods such as get_table_names, get_columns, etc.
"""
from .. import exc, sql
-from .. import schema as sa_schema
+from ..sql import schema as sa_schema
from .. import util
-from ..types import TypeEngine
+from ..sql.type_api import TypeEngine
from ..util import deprecated
from ..util import topological
from .. import inspection
@@ -161,7 +161,7 @@ class Inspector(object):
"""Return all table names in referred to within a particular schema.
The names are expected to be real tables only, not views.
- Views are instead returned using the :meth:`.get_view_names`
+ Views are instead returned using the :meth:`.Inspector.get_view_names`
method.
@@ -169,7 +169,7 @@ class Inspector(object):
database's default schema is
used, else the named schema is searched. If the database does not
support named schemas, behavior is undefined if ``schema`` is not
- passed as ``None``.
+ passed as ``None``. For special quoting, use :class:`.quoted_name`.
:param order_by: Optional, may be the string "foreign_key" to sort
the result on foreign key dependencies.
@@ -206,6 +206,13 @@ class Inspector(object):
This currently includes some options that apply to MySQL tables.
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
if hasattr(self.dialect, 'get_table_options'):
return self.dialect.get_table_options(
@@ -217,6 +224,8 @@ class Inspector(object):
"""Return all view names in `schema`.
:param schema: Optional, retrieve names from a non-default schema.
+ For special quoting, use :class:`.quoted_name`.
+
"""
return self.dialect.get_view_names(self.bind, schema,
@@ -226,6 +235,8 @@ class Inspector(object):
"""Return definition for `view_name`.
:param schema: Optional, retrieve names from a non-default schema.
+ For special quoting, use :class:`.quoted_name`.
+
"""
return self.dialect.get_view_definition(
@@ -251,6 +262,14 @@ class Inspector(object):
attrs
dict containing optional column attributes
+
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
col_defs = self.dialect.get_columns(self.bind, table_name, schema,
@@ -288,6 +307,13 @@ class Inspector(object):
name
optional name of the primary key constraint.
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
info_cache=self.info_cache,
@@ -315,6 +341,13 @@ class Inspector(object):
name
optional name of the foreign key constraint.
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
return self.dialect.get_foreign_keys(self.bind, table_name, schema,
@@ -336,6 +369,13 @@ class Inspector(object):
unique
boolean
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
return self.dialect.get_indexes(self.bind, table_name,
@@ -354,7 +394,14 @@ class Inspector(object):
column_names
list of column names in order
- .. versionadded:: 0.9.0
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
+ .. versionadded:: 0.8.4
"""
@@ -384,24 +431,25 @@ class Inspector(object):
"""
dialect = self.bind.dialect
- # table attributes we might need.
- reflection_options = dict(
- (k, table.kwargs.get(k)) for k in dialect.reflection_options if k in table.kwargs)
-
schema = table.schema
table_name = table.name
- # apply table options
- tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
- if tbl_opts:
- table.kwargs.update(tbl_opts)
+ # get table-level arguments that are specifically
+ # intended for reflection, e.g. oracle_resolve_synonyms.
+ # these are unconditionally passed to related Table
+ # objects
+ reflection_options = dict(
+ (k, table.dialect_kwargs.get(k))
+ for k in dialect.reflection_options
+ if k in table.dialect_kwargs
+ )
- # table.kwargs will need to be passed to each reflection method. Make
- # sure keywords are strings.
- tblkw = table.kwargs.copy()
- for (k, v) in list(tblkw.items()):
- del tblkw[k]
- tblkw[str(k)] = v
+ # reflect table options, like mysql_engine
+ tbl_opts = self.get_table_options(table_name, schema, **table.dialect_kwargs)
+ if tbl_opts:
+ # add additional kwargs to the Table if the dialect
+ # returned them
+ table._validate_dialect_kwargs(tbl_opts)
if util.py2k:
if isinstance(schema, str):
@@ -409,10 +457,13 @@ class Inspector(object):
if isinstance(table_name, str):
table_name = table_name.decode(dialect.encoding)
- # columns
found_table = False
- for col_d in self.get_columns(table_name, schema, **tblkw):
+ cols_by_orig_name = {}
+
+ for col_d in self.get_columns(table_name, schema, **table.dialect_kwargs):
found_table = True
+ orig_name = col_d['name']
+
table.dispatch.column_reflect(self, table, col_d)
name = col_d['name']
@@ -422,12 +473,12 @@ class Inspector(object):
continue
coltype = col_d['type']
- col_kw = {
- 'nullable': col_d['nullable'],
- }
- for k in ('autoincrement', 'quote', 'info', 'key'):
- if k in col_d:
- col_kw[k] = col_d[k]
+
+ col_kw = dict(
+ (k, col_d[k])
+ for k in ['nullable', 'autoincrement', 'quote', 'info', 'key']
+ if k in col_d
+ )
colargs = []
if col_d.get('default') is not None:
@@ -441,7 +492,7 @@ class Inspector(object):
)
if 'sequence' in col_d:
- # TODO: mssql, maxdb and sybase are using this.
+ # TODO: mssql and sybase are using this.
seq = col_d['sequence']
sequence = sa_schema.Sequence(seq['name'], 1, 1)
if 'start' in seq:
@@ -450,37 +501,41 @@ class Inspector(object):
sequence.increment = seq['increment']
colargs.append(sequence)
- col = sa_schema.Column(name, coltype, *colargs, **col_kw)
+ cols_by_orig_name[orig_name] = col = \
+ sa_schema.Column(name, coltype, *colargs, **col_kw)
+
+ if col.key in table.primary_key:
+ col.primary_key = True
table.append_column(col)
if not found_table:
raise exc.NoSuchTableError(table.name)
- # Primary keys
- pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
+ pk_cons = self.get_pk_constraint(table_name, schema, **table.dialect_kwargs)
if pk_cons:
pk_cols = [
- table.c[pk]
+ cols_by_orig_name[pk]
for pk in pk_cons['constrained_columns']
- if pk in table.c and pk not in exclude_columns
- ]
- pk_cols += [
- pk
- for pk in table.primary_key
- if pk.key in exclude_columns
+ if pk in cols_by_orig_name and pk not in exclude_columns
]
- primary_key_constraint = sa_schema.PrimaryKeyConstraint(
- name=pk_cons.get('name'),
- *pk_cols
- )
- table.append_constraint(primary_key_constraint)
+ # update pk constraint name
+ table.primary_key.name = pk_cons.get('name')
+
+ # tell the PKConstraint to re-initialize
+ # it's column collection
+ table.primary_key._reload(pk_cols)
- # Foreign keys
- fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
+ fkeys = self.get_foreign_keys(table_name, schema, **table.dialect_kwargs)
for fkey_d in fkeys:
conname = fkey_d['name']
- constrained_columns = fkey_d['constrained_columns']
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ constrained_columns = [
+ cols_by_orig_name[c].key
+ if c in cols_by_orig_name else c
+ for c in fkey_d['constrained_columns']
+ ]
if exclude_columns and set(constrained_columns).intersection(
exclude_columns):
continue
@@ -504,9 +559,14 @@ class Inspector(object):
)
for column in referred_columns:
refspec.append(".".join([referred_table, column]))
+ if 'options' in fkey_d:
+ options = fkey_d['options']
+ else:
+ options = {}
table.append_constraint(
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
- conname, link_to_name=True))
+ conname, link_to_name=True,
+ **options))
# Indexes
indexes = self.get_indexes(table_name, schema)
for index_d in indexes:
@@ -520,5 +580,11 @@ class Inspector(object):
"Omitting %s KEY for (%s), key covers omitted columns." %
(flavor, ', '.join(columns)))
continue
- sa_schema.Index(name, *[table.columns[c] for c in columns],
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ sa_schema.Index(name, *[
+ cols_by_orig_name[c] if c in cols_by_orig_name
+ else table.c[c]
+ for c in columns
+ ],
**dict(unique=unique))
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 65ce3b742..f9e0ca0d2 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -1,5 +1,5 @@
# engine/result.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -9,8 +9,8 @@ and :class:`.RowProxy."""
-from .. import exc, types, util
-from ..sql import expression
+from .. import exc, util
+from ..sql import expression, sqltypes
import collections
# This reconstructor is necessary so that pickles with the C extension or
@@ -125,8 +125,11 @@ class RowProxy(BaseRowProxy):
__hash__ = None
+ def __lt__(self, other):
+ return tuple(self) < tuple(other)
+
def __eq__(self, other):
- return other is self or other == tuple(self)
+ return other is self or tuple(other) == tuple(self)
def __ne__(self, other):
return not self.__eq__(other)
@@ -205,10 +208,10 @@ class ResultMetaData(object):
else colname.lower()]
except KeyError:
name, obj, type_ = \
- colname, None, typemap.get(coltype, types.NULLTYPE)
+ colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
else:
name, obj, type_ = \
- colname, None, typemap.get(coltype, types.NULLTYPE)
+ colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
processor = context.get_result_processor(type_, colname, coltype)
@@ -621,6 +624,24 @@ class ResultProxy(object):
else:
return self.context.compiled_parameters[0]
+ @property
+ def returned_defaults(self):
+ """Return the values of default columns that were fetched using
+ the :meth:`.ValuesBase.return_defaults` feature.
+
+ The value is an instance of :class:`.RowProxy`, or ``None``
+ if :meth:`.ValuesBase.return_defaults` was not used or if the
+ backend does not support RETURNING.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :meth:`.ValuesBase.return_defaults`
+
+ """
+ return self.context.returned_defaults
+
def lastrow_has_defaults(self):
"""Return ``lastrow_has_defaults()`` from the underlying
:class:`.ExecutionContext`.
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index ab9d370a3..f6c064033 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -1,5 +1,5 @@
# engine/strategies.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -49,18 +49,27 @@ class DefaultEngineStrategy(EngineStrategy):
dialect_cls = u.get_dialect()
+ if kwargs.pop('_coerce_config', False):
+ def pop_kwarg(key, default=None):
+ value = kwargs.pop(key, default)
+ if key in dialect_cls.engine_config_types:
+ value = dialect_cls.engine_config_types[key](value)
+ return value
+ else:
+ pop_kwarg = kwargs.pop
+
dialect_args = {}
# consume dialect arguments from kwargs
for k in util.get_cls_kwargs(dialect_cls):
if k in kwargs:
- dialect_args[k] = kwargs.pop(k)
+ dialect_args[k] = pop_kwarg(k)
dbapi = kwargs.pop('module', None)
if dbapi is None:
dbapi_args = {}
for k in util.get_func_kwargs(dialect_cls.dbapi):
if k in kwargs:
- dbapi_args[k] = kwargs.pop(k)
+ dbapi_args[k] = pop_kwarg(k)
dbapi = dialect_cls.dbapi(**dbapi_args)
dialect_args['dbapi'] = dbapi
@@ -70,15 +79,15 @@ class DefaultEngineStrategy(EngineStrategy):
# assemble connection arguments
(cargs, cparams) = dialect.create_connect_args(u)
- cparams.update(kwargs.pop('connect_args', {}))
+ cparams.update(pop_kwarg('connect_args', {}))
# look for existing pool or create
- pool = kwargs.pop('pool', None)
+ pool = pop_kwarg('pool', None)
if pool is None:
def connect():
try:
return dialect.connect(*cargs, **cparams)
- except Exception as e:
+ except dialect.dbapi.Error as e:
invalidated = dialect.is_disconnect(e, None, None)
util.raise_from_cause(
exc.DBAPIError.instance(None, None,
@@ -87,9 +96,9 @@ class DefaultEngineStrategy(EngineStrategy):
)
)
- creator = kwargs.pop('creator', connect)
+ creator = pop_kwarg('creator', connect)
- poolclass = kwargs.pop('poolclass', None)
+ poolclass = pop_kwarg('poolclass', None)
if poolclass is None:
poolclass = dialect_cls.get_pool_class(u)
pool_args = {}
@@ -106,7 +115,7 @@ class DefaultEngineStrategy(EngineStrategy):
for k in util.get_cls_kwargs(poolclass):
tk = translate.get(k, k)
if tk in kwargs:
- pool_args[k] = kwargs.pop(tk)
+ pool_args[k] = pop_kwarg(tk)
pool = poolclass(creator, **pool_args)
else:
if isinstance(pool, poollib._DBProxy):
@@ -119,7 +128,7 @@ class DefaultEngineStrategy(EngineStrategy):
engine_args = {}
for k in util.get_cls_kwargs(engineclass):
if k in kwargs:
- engine_args[k] = kwargs.pop(k)
+ engine_args[k] = pop_kwarg(k)
_initialize = kwargs.pop('_initialize', True)
diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py
index d4aeafd6f..ae647a78e 100644
--- a/lib/sqlalchemy/engine/threadlocal.py
+++ b/lib/sqlalchemy/engine/threadlocal.py
@@ -1,5 +1,5 @@
# engine/threadlocal.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
index ed5729eea..78ac06187 100644
--- a/lib/sqlalchemy/engine/url.py
+++ b/lib/sqlalchemy/engine/url.py
@@ -1,5 +1,5 @@
# engine/url.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,6 +16,7 @@ be used directly and is also accepted directly by ``create_engine()``.
import re
from .. import exc, util
from . import Dialect
+from ..dialects import registry
class URL(object):
@@ -23,8 +24,8 @@ class URL(object):
Represent the components of a URL used to connect to a database.
This object is suitable to be passed directly to a
- ``create_engine()`` call. The fields of the URL are parsed from a
- string by the ``module-level make_url()`` function. the string
+ :func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed from a
+ string by the :func:`.make_url` function. the string
format of the URL is an RFC-1738-style string.
All initialization parameters are available as public attributes.
@@ -61,15 +62,19 @@ class URL(object):
self.database = database
self.query = query or {}
- def __str__(self):
+ def __to_string__(self, hide_password=True):
s = self.drivername + "://"
if self.username is not None:
- s += self.username
+ s += _rfc_1738_quote(self.username)
if self.password is not None:
- s += ':' + util.quote_plus(self.password)
+ s += ':' + ('***' if hide_password
+ else _rfc_1738_quote(self.password))
s += "@"
if self.host is not None:
- s += self.host
+ if ':' in self.host:
+ s += "[%s]" % self.host
+ else:
+ s += self.host
if self.port is not None:
s += ':' + str(self.port)
if self.database is not None:
@@ -80,6 +85,12 @@ class URL(object):
s += '?' + "&".join("%s=%s" % (k, self.query[k]) for k in keys)
return s
+ def __str__(self):
+ return self.__to_string__(hide_password=False)
+
+ def __repr__(self):
+ return self.__to_string__()
+
def __hash__(self):
return hash(str(self))
@@ -102,7 +113,6 @@ class URL(object):
name = self.drivername
else:
name = self.drivername.replace('+', '.')
- from sqlalchemy.dialects import registry
cls = registry.load(name)
# check for legacy dialects that
# would return a module with 'dialect' as the
@@ -160,10 +170,13 @@ def _parse_rfc1738_args(name):
(?P<name>[\w\+]+)://
(?:
(?P<username>[^:/]*)
- (?::(?P<password>[^/]*))?
+ (?::(?P<password>.*))?
@)?
(?:
- (?P<host>[^/:]*)
+ (?:
+ \[(?P<ipv6host>[^/]+)\] |
+ (?P<ipv4host>[^/:]+)
+ )?
(?::(?P<port>[^/]*))?
)?
(?:/(?P<database>.*))?
@@ -182,10 +195,15 @@ def _parse_rfc1738_args(name):
query = None
components['query'] = query
+ if components['username'] is not None:
+ components['username'] = _rfc_1738_unquote(components['username'])
+
if components['password'] is not None:
- components['password'] = \
- util.unquote_plus(components['password'])
+ components['password'] = _rfc_1738_unquote(components['password'])
+ ipv4host = components.pop('ipv4host')
+ ipv6host = components.pop('ipv6host')
+ components['host'] = ipv4host or ipv6host
name = components.pop('name')
return URL(name, **components)
else:
@@ -193,6 +211,12 @@ def _parse_rfc1738_args(name):
"Could not parse rfc1738 URL from string '%s'" % name)
+def _rfc_1738_quote(text):
+ return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text)
+
+def _rfc_1738_unquote(text):
+ return util.unquote(text)
+
def _parse_keyvalue_args(name):
m = re.match(r'(\w+)://(.*)', name)
if m is not None:
diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py
index e56452751..6c0644be4 100644
--- a/lib/sqlalchemy/engine/util.py
+++ b/lib/sqlalchemy/engine/util.py
@@ -1,33 +1,11 @@
# engine/util.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .. import util
-
-def _coerce_config(configuration, prefix):
- """Convert configuration values to expected types."""
-
- options = dict((key[len(prefix):], configuration[key])
- for key in configuration
- if key.startswith(prefix))
- for option, type_ in (
- ('convert_unicode', util.bool_or_str('force')),
- ('pool_timeout', int),
- ('echo', util.bool_or_str('debug')),
- ('echo_pool', util.bool_or_str('debug')),
- ('pool_recycle', int),
- ('pool_size', int),
- ('max_overflow', int),
- ('pool_threadlocal', bool),
- ('use_native_unicode', bool),
- ):
- util.coerce_kw_type(options, option, type_)
- return options
-
-
def connection_memoize(key):
"""Decorator, memoize a function in a connection.info stash.
diff --git a/lib/sqlalchemy/event.py b/lib/sqlalchemy/event.py
deleted file mode 100644
index 64ae49976..000000000
--- a/lib/sqlalchemy/event.py
+++ /dev/null
@@ -1,735 +0,0 @@
-# sqlalchemy/event.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Base event API."""
-
-from __future__ import absolute_import
-
-from . import util, exc
-from itertools import chain
-import weakref
-
-CANCEL = util.symbol('CANCEL')
-NO_RETVAL = util.symbol('NO_RETVAL')
-
-
-def listen(target, identifier, fn, *args, **kw):
- """Register a listener function for the given target.
-
- e.g.::
-
- from sqlalchemy import event
- from sqlalchemy.schema import UniqueConstraint
-
- def unique_constraint_name(const, table):
- const.name = "uq_%s_%s" % (
- table.name,
- list(const.columns)[0].name
- )
- event.listen(
- UniqueConstraint,
- "after_parent_attach",
- unique_constraint_name)
-
- """
-
- for evt_cls in _registrars[identifier]:
- tgt = evt_cls._accept_with(target)
- if tgt is not None:
- tgt.dispatch._listen(tgt, identifier, fn, *args, **kw)
- return
- raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
- (identifier, target))
-
-
-def listens_for(target, identifier, *args, **kw):
- """Decorate a function as a listener for the given target + identifier.
-
- e.g.::
-
- from sqlalchemy import event
- from sqlalchemy.schema import UniqueConstraint
-
- @event.listens_for(UniqueConstraint, "after_parent_attach")
- def unique_constraint_name(const, table):
- const.name = "uq_%s_%s" % (
- table.name,
- list(const.columns)[0].name
- )
- """
- def decorate(fn):
- listen(target, identifier, fn, *args, **kw)
- return fn
- return decorate
-
-
-def remove(target, identifier, fn):
- """Remove an event listener.
-
- Note that some event removals, particularly for those event dispatchers
- which create wrapper functions and secondary even listeners, may not yet
- be supported.
-
- """
- for evt_cls in _registrars[identifier]:
- for tgt in evt_cls._accept_with(target):
- tgt.dispatch._remove(identifier, tgt, fn)
- return
-
-def _legacy_signature(since, argnames, converter=None):
- def leg(fn):
- if not hasattr(fn, '_legacy_signatures'):
- fn._legacy_signatures = []
- fn._legacy_signatures.append((since, argnames, converter))
- return fn
- return leg
-
-
-_registrars = util.defaultdict(list)
-
-
-def _is_event_name(name):
- return not name.startswith('_') and name != 'dispatch'
-
-
-class _UnpickleDispatch(object):
- """Serializable callable that re-generates an instance of
- :class:`_Dispatch` given a particular :class:`.Events` subclass.
-
- """
- def __call__(self, _parent_cls):
- for cls in _parent_cls.__mro__:
- if 'dispatch' in cls.__dict__:
- return cls.__dict__['dispatch'].dispatch_cls(_parent_cls)
- else:
- raise AttributeError("No class with a 'dispatch' member present.")
-
-
-class _Dispatch(object):
- """Mirror the event listening definitions of an Events class with
- listener collections.
-
- Classes which define a "dispatch" member will return a
- non-instantiated :class:`._Dispatch` subclass when the member
- is accessed at the class level. When the "dispatch" member is
- accessed at the instance level of its owner, an instance
- of the :class:`._Dispatch` class is returned.
-
- A :class:`._Dispatch` class is generated for each :class:`.Events`
- class defined, by the :func:`._create_dispatcher_class` function.
- The original :class:`.Events` classes remain untouched.
- This decouples the construction of :class:`.Events` subclasses from
- the implementation used by the event internals, and allows
- inspecting tools like Sphinx to work in an unsurprising
- way against the public API.
-
- """
-
- def __init__(self, _parent_cls):
- self._parent_cls = _parent_cls
-
- def _join(self, other):
- """Create a 'join' of this :class:`._Dispatch` and another.
-
- This new dispatcher will dispatch events to both
- :class:`._Dispatch` objects.
-
- Once constructed, the joined dispatch will respond to new events
- added to this dispatcher, but may not be aware of events
- added to the other dispatcher after creation of the join. This is
- currently for performance reasons so that both dispatchers need
- not be "evaluated" fully on each call.
-
- """
- if '_joined_dispatch_cls' not in self.__class__.__dict__:
- cls = type(
- "Joined%s" % self.__class__.__name__,
- (_JoinedDispatcher, self.__class__), {}
- )
- for ls in _event_descriptors(self):
- setattr(cls, ls.name, _JoinedDispatchDescriptor(ls.name))
-
- self.__class__._joined_dispatch_cls = cls
- return self._joined_dispatch_cls(self, other)
-
- def __reduce__(self):
- return _UnpickleDispatch(), (self._parent_cls, )
-
- def _update(self, other, only_propagate=True):
- """Populate from the listeners in another :class:`_Dispatch`
- object."""
-
- for ls in _event_descriptors(other):
- getattr(self, ls.name).\
- for_modify(self)._update(ls, only_propagate=only_propagate)
-
- @util.hybridmethod
- def _clear(self):
- for attr in dir(self):
- if _is_event_name(attr):
- getattr(self, attr).for_modify(self).clear()
-
-
-def _event_descriptors(target):
- return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
-
-
-class _EventMeta(type):
- """Intercept new Event subclasses and create
- associated _Dispatch classes."""
-
- def __init__(cls, classname, bases, dict_):
- _create_dispatcher_class(cls, classname, bases, dict_)
- return type.__init__(cls, classname, bases, dict_)
-
-
-def _create_dispatcher_class(cls, classname, bases, dict_):
- """Create a :class:`._Dispatch` class corresponding to an
- :class:`.Events` class."""
-
- # there's all kinds of ways to do this,
- # i.e. make a Dispatch class that shares the '_listen' method
- # of the Event class, this is the straight monkeypatch.
- dispatch_base = getattr(cls, 'dispatch', _Dispatch)
- cls.dispatch = dispatch_cls = type("%sDispatch" % classname,
- (dispatch_base, ), {})
- dispatch_cls._listen = cls._listen
-
- for k in dict_:
- if _is_event_name(k):
- setattr(dispatch_cls, k, _DispatchDescriptor(cls, dict_[k]))
- _registrars[k].append(cls)
-
-
-def _remove_dispatcher(cls):
- for k in dir(cls):
- if _is_event_name(k):
- _registrars[k].remove(cls)
- if not _registrars[k]:
- del _registrars[k]
-
-class Events(util.with_metaclass(_EventMeta, object)):
- """Define event listening functions for a particular target type."""
-
- @classmethod
- def _accept_with(cls, target):
- # Mapper, ClassManager, Session override this to
- # also accept classes, scoped_sessions, sessionmakers, etc.
- if hasattr(target, 'dispatch') and (
- isinstance(target.dispatch, cls.dispatch) or \
- isinstance(target.dispatch, type) and \
- issubclass(target.dispatch, cls.dispatch)
- ):
- return target
- else:
- return None
-
- @classmethod
- def _listen(cls, target, identifier, fn, propagate=False, insert=False,
- named=False):
- dispatch_descriptor = getattr(target.dispatch, identifier)
- fn = dispatch_descriptor._adjust_fn_spec(fn, named)
-
- if insert:
- dispatch_descriptor.\
- for_modify(target.dispatch).insert(fn, target, propagate)
- else:
- dispatch_descriptor.\
- for_modify(target.dispatch).append(fn, target, propagate)
-
- @classmethod
- def _remove(cls, target, identifier, fn):
- getattr(target.dispatch, identifier).remove(fn, target)
-
- @classmethod
- def _clear(cls):
- cls.dispatch._clear()
-
-
-class _DispatchDescriptor(object):
- """Class-level attributes on :class:`._Dispatch` classes."""
-
- def __init__(self, parent_dispatch_cls, fn):
- self.__name__ = fn.__name__
- argspec = util.inspect_getargspec(fn)
- self.arg_names = argspec.args[1:]
- self.has_kw = bool(argspec.keywords)
- self.legacy_signatures = list(reversed(
- sorted(
- getattr(fn, '_legacy_signatures', []),
- key=lambda s: s[0]
- )
- ))
- self.__doc__ = fn.__doc__ = self._augment_fn_docs(parent_dispatch_cls, fn)
-
- self._clslevel = weakref.WeakKeyDictionary()
- self._empty_listeners = weakref.WeakKeyDictionary()
-
- def _adjust_fn_spec(self, fn, named):
- argspec = util.get_callable_argspec(fn, no_self=True)
- if named:
- fn = self._wrap_fn_for_kw(fn)
- fn = self._wrap_fn_for_legacy(fn, argspec)
- return fn
-
- def _wrap_fn_for_kw(self, fn):
- def wrap_kw(*args, **kw):
- argdict = dict(zip(self.arg_names, args))
- argdict.update(kw)
- return fn(**argdict)
- return wrap_kw
-
- def _wrap_fn_for_legacy(self, fn, argspec):
- for since, argnames, conv in self.legacy_signatures:
- if argnames[-1] == "**kw":
- has_kw = True
- argnames = argnames[0:-1]
- else:
- has_kw = False
-
- if len(argnames) == len(argspec.args) \
- and has_kw is bool(argspec.keywords):
-
- if conv:
- assert not has_kw
- def wrap_leg(*args):
- return fn(*conv(*args))
- else:
- def wrap_leg(*args, **kw):
- argdict = dict(zip(self.arg_names, args))
- args = [argdict[name] for name in argnames]
- if has_kw:
- return fn(*args, **kw)
- else:
- return fn(*args)
- return wrap_leg
- else:
- return fn
-
- def _indent(self, text, indent):
- return "\n".join(
- indent + line
- for line in text.split("\n")
- )
-
- def _standard_listen_example(self, sample_target, fn):
- example_kw_arg = self._indent(
- "\n".join(
- "%(arg)s = kw['%(arg)s']" % {"arg": arg}
- for arg in self.arg_names[0:2]
- ),
- " ")
- if self.legacy_signatures:
- current_since = max(since for since, args, conv in self.legacy_signatures)
- else:
- current_since = None
- text = (
- "from sqlalchemy import event\n\n"
- "# standard decorator style%(current_since)s\n"
- "@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
- "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
- " \"listen for the '%(event_name)s' event\"\n"
- "\n # ... (event handling logic) ...\n"
- )
-
- if len(self.arg_names) > 2:
- text += (
-
- "\n# named argument style (new in 0.9)\n"
- "@event.listens_for(%(sample_target)s, '%(event_name)s', named=True)\n"
- "def receive_%(event_name)s(**kw):\n"
- " \"listen for the '%(event_name)s' event\"\n"
- "%(example_kw_arg)s\n"
- "\n # ... (event handling logic) ...\n"
- )
-
- text %= {
- "current_since": " (arguments as of %s)" %
- current_since if current_since else "",
- "event_name": fn.__name__,
- "has_kw_arguments": " **kw" if self.has_kw else "",
- "named_event_arguments": ", ".join(self.arg_names),
- "example_kw_arg": example_kw_arg,
- "sample_target": sample_target
- }
- return text
-
- def _legacy_listen_examples(self, sample_target, fn):
- text = ""
- for since, args, conv in self.legacy_signatures:
- text += (
- "\n# legacy calling style (pre-%(since)s)\n"
- "@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
- "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
- " \"listen for the '%(event_name)s' event\"\n"
- "\n # ... (event handling logic) ...\n" % {
- "since": since,
- "event_name": fn.__name__,
- "has_kw_arguments": " **kw" if self.has_kw else "",
- "named_event_arguments": ", ".join(args),
- "sample_target": sample_target
- }
- )
- return text
-
- def _version_signature_changes(self):
- since, args, conv = self.legacy_signatures[0]
- return (
- "\n.. versionchanged:: %(since)s\n"
- " The ``%(event_name)s`` event now accepts the \n"
- " arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
- " Listener functions which accept the previous argument \n"
- " signature(s) listed above will be automatically \n"
- " adapted to the new signature." % {
- "since": since,
- "event_name": self.__name__,
- "named_event_arguments": ", ".join(self.arg_names),
- "has_kw_arguments": ", **kw" if self.has_kw else ""
- }
- )
-
- def _augment_fn_docs(self, parent_dispatch_cls, fn):
- header = ".. container:: event_signatures\n\n"\
- " Example argument forms::\n"\
- "\n"
-
- sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj")
- text = (
- header +
- self._indent(
- self._standard_listen_example(sample_target, fn),
- " " * 8)
- )
- if self.legacy_signatures:
- text += self._indent(
- self._legacy_listen_examples(sample_target, fn),
- " " * 8)
-
- text += self._version_signature_changes()
-
- return util.inject_docstring_text(fn.__doc__,
- text,
- 1
- )
-
- def _contains(self, cls, evt):
- return cls in self._clslevel and \
- evt in self._clslevel[cls]
-
- def insert(self, obj, target, propagate):
- assert isinstance(target, type), \
- "Class-level Event targets must be classes."
- stack = [target]
- while stack:
- cls = stack.pop(0)
- stack.extend(cls.__subclasses__())
- if cls is not target and cls not in self._clslevel:
- self.update_subclass(cls)
- else:
- if cls not in self._clslevel:
- self._clslevel[cls] = []
- self._clslevel[cls].insert(0, obj)
-
- def append(self, obj, target, propagate):
- assert isinstance(target, type), \
- "Class-level Event targets must be classes."
-
- stack = [target]
- while stack:
- cls = stack.pop(0)
- stack.extend(cls.__subclasses__())
- if cls is not target and cls not in self._clslevel:
- self.update_subclass(cls)
- else:
- if cls not in self._clslevel:
- self._clslevel[cls] = []
- self._clslevel[cls].append(obj)
-
- def update_subclass(self, target):
- if target not in self._clslevel:
- self._clslevel[target] = []
- clslevel = self._clslevel[target]
- for cls in target.__mro__[1:]:
- if cls in self._clslevel:
- clslevel.extend([
- fn for fn
- in self._clslevel[cls]
- if fn not in clslevel
- ])
-
- def remove(self, obj, target):
- stack = [target]
- while stack:
- cls = stack.pop(0)
- stack.extend(cls.__subclasses__())
- if cls in self._clslevel:
- self._clslevel[cls].remove(obj)
-
- def clear(self):
- """Clear all class level listeners"""
-
- for dispatcher in self._clslevel.values():
- dispatcher[:] = []
-
- def for_modify(self, obj):
- """Return an event collection which can be modified.
-
- For _DispatchDescriptor at the class level of
- a dispatcher, this returns self.
-
- """
- return self
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- elif obj._parent_cls in self._empty_listeners:
- ret = self._empty_listeners[obj._parent_cls]
- else:
- self._empty_listeners[obj._parent_cls] = ret = \
- _EmptyListener(self, obj._parent_cls)
- # assigning it to __dict__ means
- # memoized for fast re-access. but more memory.
- obj.__dict__[self.__name__] = ret
- return ret
-
-class _HasParentDispatchDescriptor(object):
- def _adjust_fn_spec(self, fn, named):
- return self.parent._adjust_fn_spec(fn, named)
-
-class _EmptyListener(_HasParentDispatchDescriptor):
- """Serves as a class-level interface to the events
- served by a _DispatchDescriptor, when there are no
- instance-level events present.
-
- Is replaced by _ListenerCollection when instance-level
- events are added.
-
- """
- def __init__(self, parent, target_cls):
- if target_cls not in parent._clslevel:
- parent.update_subclass(target_cls)
- self.parent = parent # _DispatchDescriptor
- self.parent_listeners = parent._clslevel[target_cls]
- self.name = parent.__name__
- self.propagate = frozenset()
- self.listeners = ()
-
-
- def for_modify(self, obj):
- """Return an event collection which can be modified.
-
- For _EmptyListener at the instance level of
- a dispatcher, this generates a new
- _ListenerCollection, applies it to the instance,
- and returns it.
-
- """
- result = _ListenerCollection(self.parent, obj._parent_cls)
- if obj.__dict__[self.name] is self:
- obj.__dict__[self.name] = result
- return result
-
- def _needs_modify(self, *args, **kw):
- raise NotImplementedError("need to call for_modify()")
-
- exec_once = insert = append = remove = clear = _needs_modify
-
- def __call__(self, *args, **kw):
- """Execute this event."""
-
- for fn in self.parent_listeners:
- fn(*args, **kw)
-
- def __len__(self):
- return len(self.parent_listeners)
-
- def __iter__(self):
- return iter(self.parent_listeners)
-
- def __bool__(self):
- return bool(self.parent_listeners)
-
- __nonzero__ = __bool__
-
-
-class _CompoundListener(_HasParentDispatchDescriptor):
- _exec_once = False
-
- def exec_once(self, *args, **kw):
- """Execute this event, but only if it has not been
- executed already for this collection."""
-
- if not self._exec_once:
- self(*args, **kw)
- self._exec_once = True
-
- # I'm not entirely thrilled about the overhead here,
- # but this allows class-level listeners to be added
- # at any point.
- #
- # In the absense of instance-level listeners,
- # we stay with the _EmptyListener object when called
- # at the instance level.
-
- def __call__(self, *args, **kw):
- """Execute this event."""
-
- for fn in self.parent_listeners:
- fn(*args, **kw)
- for fn in self.listeners:
- fn(*args, **kw)
-
- def __len__(self):
- return len(self.parent_listeners) + len(self.listeners)
-
- def __iter__(self):
- return chain(self.parent_listeners, self.listeners)
-
- def __bool__(self):
- return bool(self.listeners or self.parent_listeners)
-
- __nonzero__ = __bool__
-
-class _ListenerCollection(_CompoundListener):
- """Instance-level attributes on instances of :class:`._Dispatch`.
-
- Represents a collection of listeners.
-
- As of 0.7.9, _ListenerCollection is only first
- created via the _EmptyListener.for_modify() method.
-
- """
-
- def __init__(self, parent, target_cls):
- if target_cls not in parent._clslevel:
- parent.update_subclass(target_cls)
- self.parent_listeners = parent._clslevel[target_cls]
- self.parent = parent
- self.name = parent.__name__
- self.listeners = []
- self.propagate = set()
-
- def for_modify(self, obj):
- """Return an event collection which can be modified.
-
- For _ListenerCollection at the instance level of
- a dispatcher, this returns self.
-
- """
- return self
-
- def _update(self, other, only_propagate=True):
- """Populate from the listeners in another :class:`_Dispatch`
- object."""
-
- existing_listeners = self.listeners
- existing_listener_set = set(existing_listeners)
- self.propagate.update(other.propagate)
- existing_listeners.extend([l for l
- in other.listeners
- if l not in existing_listener_set
- and not only_propagate or l in self.propagate
- ])
-
- def insert(self, obj, target, propagate):
- if obj not in self.listeners:
- self.listeners.insert(0, obj)
- if propagate:
- self.propagate.add(obj)
-
- def append(self, obj, target, propagate):
- if obj not in self.listeners:
- self.listeners.append(obj)
- if propagate:
- self.propagate.add(obj)
-
- def remove(self, obj, target):
- if obj in self.listeners:
- self.listeners.remove(obj)
- self.propagate.discard(obj)
-
- def clear(self):
- self.listeners[:] = []
- self.propagate.clear()
-
-
-class _JoinedDispatcher(object):
- """Represent a connection between two _Dispatch objects."""
-
- def __init__(self, local, parent):
- self.local = local
- self.parent = parent
- self._parent_cls = local._parent_cls
-
-
-class _JoinedDispatchDescriptor(object):
- def __init__(self, name):
- self.name = name
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- else:
- obj.__dict__[self.name] = ret = _JoinedListener(
- obj.parent, self.name,
- getattr(obj.local, self.name)
- )
- return ret
-
-
-class _JoinedListener(_CompoundListener):
- _exec_once = False
-
- def __init__(self, parent, name, local):
- self.parent = parent
- self.name = name
- self.local = local
- self.parent_listeners = self.local
-
- # fix .listeners for the parent. This means
- # new events added to the parent won't be picked
- # up here. Alternatively, the listeners can
- # be via @property to just return getattr(self.parent, self.name)
- # each time. less performant.
- self.listeners = list(getattr(self.parent, self.name))
-
- def _adjust_fn_spec(self, fn, named):
- return self.local._adjust_fn_spec(fn, named)
-
- def for_modify(self, obj):
- self.local = self.parent_listeners = self.local.for_modify(obj)
- return self
-
- def insert(self, obj, target, propagate):
- self.local.insert(obj, target, propagate)
-
- def append(self, obj, target, propagate):
- self.local.append(obj, target, propagate)
-
- def remove(self, obj, target):
- self.local.remove(obj, target)
-
- def clear(self):
- raise NotImplementedError()
-
-
-class dispatcher(object):
- """Descriptor used by target classes to
- deliver the _Dispatch class at the class level
- and produce new _Dispatch instances for target
- instances.
-
- """
- def __init__(self, events):
- self.dispatch_cls = events.dispatch
- self.events = events
-
- def __get__(self, obj, cls):
- if obj is None:
- return self.dispatch_cls
- obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls)
- return disp
diff --git a/lib/sqlalchemy/event/__init__.py b/lib/sqlalchemy/event/__init__.py
new file mode 100644
index 000000000..b43bf9bfa
--- /dev/null
+++ b/lib/sqlalchemy/event/__init__.py
@@ -0,0 +1,10 @@
+# event/__init__.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from .api import CANCEL, NO_RETVAL, listen, listens_for, remove, contains
+from .base import Events, dispatcher
+from .attr import RefCollection
+from .legacy import _legacy_signature
diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py
new file mode 100644
index 000000000..20e74d90e
--- /dev/null
+++ b/lib/sqlalchemy/event/api.py
@@ -0,0 +1,107 @@
+# event/api.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Public API functions for the event system.
+
+"""
+from __future__ import absolute_import
+
+from .. import util, exc
+from .base import _registrars
+from .registry import _EventKey
+
+CANCEL = util.symbol('CANCEL')
+NO_RETVAL = util.symbol('NO_RETVAL')
+
+
+def _event_key(target, identifier, fn):
+ for evt_cls in _registrars[identifier]:
+ tgt = evt_cls._accept_with(target)
+ if tgt is not None:
+ return _EventKey(target, identifier, fn, tgt)
+ else:
+ raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
+ (identifier, target))
+
+def listen(target, identifier, fn, *args, **kw):
+ """Register a listener function for the given target.
+
+ e.g.::
+
+ from sqlalchemy import event
+ from sqlalchemy.schema import UniqueConstraint
+
+ def unique_constraint_name(const, table):
+ const.name = "uq_%s_%s" % (
+ table.name,
+ list(const.columns)[0].name
+ )
+ event.listen(
+ UniqueConstraint,
+ "after_parent_attach",
+ unique_constraint_name)
+
+ """
+
+ _event_key(target, identifier, fn).listen(*args, **kw)
+
+
+def listens_for(target, identifier, *args, **kw):
+ """Decorate a function as a listener for the given target + identifier.
+
+ e.g.::
+
+ from sqlalchemy import event
+ from sqlalchemy.schema import UniqueConstraint
+
+ @event.listens_for(UniqueConstraint, "after_parent_attach")
+ def unique_constraint_name(const, table):
+ const.name = "uq_%s_%s" % (
+ table.name,
+ list(const.columns)[0].name
+ )
+ """
+ def decorate(fn):
+ listen(target, identifier, fn, *args, **kw)
+ return fn
+ return decorate
+
+
+def remove(target, identifier, fn):
+ """Remove an event listener.
+
+ The arguments here should match exactly those which were sent to
+ :func:`.listen`; all the event registration which proceeded as a result
+ of this call will be reverted by calling :func:`.remove` with the same
+ arguments.
+
+ e.g.::
+
+ # if a function was registered like this...
+ @event.listens_for(SomeMappedClass, "before_insert", propagate=True)
+ def my_listener_function(*arg):
+ pass
+
+ # ... it's removed like this
+ event.remove(SomeMappedClass, "before_insert", my_listener_function)
+
+ Above, the listener function associated with ``SomeMappedClass`` was also
+ propagated to subclasses of ``SomeMappedClass``; the :func:`.remove` function
+ will revert all of these operations.
+
+ .. versionadded:: 0.9.0
+
+ """
+ _event_key(target, identifier, fn).remove()
+
+def contains(target, identifier, fn):
+ """Return True if the given target/ident/fn is set up to listen.
+
+ .. versionadded:: 0.9.0
+
+ """
+
+ return _event_key(target, identifier, fn).contains()
diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py
new file mode 100644
index 000000000..3f8947546
--- /dev/null
+++ b/lib/sqlalchemy/event/attr.py
@@ -0,0 +1,376 @@
+# event/attr.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Attribute implementation for _Dispatch classes.
+
+The various listener targets for a particular event class are represented
+as attributes, which refer to collections of listeners to be fired off.
+These collections can exist at the class level as well as at the instance
+level. An event is fired off using code like this::
+
+ some_object.dispatch.first_connect(arg1, arg2)
+
+Above, ``some_object.dispatch`` would be an instance of ``_Dispatch`` and
+``first_connect`` is typically an instance of ``_ListenerCollection``
+if event listeners are present, or ``_EmptyListener`` if none are present.
+
+The attribute mechanics here spend effort trying to ensure listener functions
+are available with a minimum of function call overhead, that unnecessary
+objects aren't created (i.e. many empty per-instance listener collections),
+as well as that everything is garbage collectable when owning references are
+lost. Other features such as "propagation" of listener functions across
+many ``_Dispatch`` instances, "joining" of multiple ``_Dispatch`` instances,
+as well as support for subclass propagation (e.g. events assigned to
+``Pool`` vs. ``QueuePool``) are all implemented here.
+
+"""
+
+from __future__ import absolute_import
+
+from .. import util
+from . import registry
+from . import legacy
+from itertools import chain
+import weakref
+
+class RefCollection(object):
+ @util.memoized_property
+ def ref(self):
+ return weakref.ref(self, registry._collection_gced)
+
+class _DispatchDescriptor(RefCollection):
+ """Class-level attributes on :class:`._Dispatch` classes."""
+
+ def __init__(self, parent_dispatch_cls, fn):
+ self.__name__ = fn.__name__
+ argspec = util.inspect_getargspec(fn)
+ self.arg_names = argspec.args[1:]
+ self.has_kw = bool(argspec.keywords)
+ self.legacy_signatures = list(reversed(
+ sorted(
+ getattr(fn, '_legacy_signatures', []),
+ key=lambda s: s[0]
+ )
+ ))
+ self.__doc__ = fn.__doc__ = legacy._augment_fn_docs(
+ self, parent_dispatch_cls, fn)
+
+ self._clslevel = weakref.WeakKeyDictionary()
+ self._empty_listeners = weakref.WeakKeyDictionary()
+
+ def _adjust_fn_spec(self, fn, named):
+ if named:
+ fn = self._wrap_fn_for_kw(fn)
+ if self.legacy_signatures:
+ try:
+ argspec = util.get_callable_argspec(fn, no_self=True)
+ except ValueError:
+ pass
+ else:
+ fn = legacy._wrap_fn_for_legacy(self, fn, argspec)
+ return fn
+
+ def _wrap_fn_for_kw(self, fn):
+ def wrap_kw(*args, **kw):
+ argdict = dict(zip(self.arg_names, args))
+ argdict.update(kw)
+ return fn(**argdict)
+ return wrap_kw
+
+
+ def insert(self, event_key, propagate):
+ target = event_key.dispatch_target
+ assert isinstance(target, type), \
+ "Class-level Event targets must be classes."
+ stack = [target]
+ while stack:
+ cls = stack.pop(0)
+ stack.extend(cls.__subclasses__())
+ if cls is not target and cls not in self._clslevel:
+ self.update_subclass(cls)
+ else:
+ if cls not in self._clslevel:
+ self._clslevel[cls] = []
+ self._clslevel[cls].insert(0, event_key._listen_fn)
+ registry._stored_in_collection(event_key, self)
+
+ def append(self, event_key, propagate):
+ target = event_key.dispatch_target
+ assert isinstance(target, type), \
+ "Class-level Event targets must be classes."
+
+ stack = [target]
+ while stack:
+ cls = stack.pop(0)
+ stack.extend(cls.__subclasses__())
+ if cls is not target and cls not in self._clslevel:
+ self.update_subclass(cls)
+ else:
+ if cls not in self._clslevel:
+ self._clslevel[cls] = []
+ self._clslevel[cls].append(event_key._listen_fn)
+ registry._stored_in_collection(event_key, self)
+
+ def update_subclass(self, target):
+ if target not in self._clslevel:
+ self._clslevel[target] = []
+ clslevel = self._clslevel[target]
+ for cls in target.__mro__[1:]:
+ if cls in self._clslevel:
+ clslevel.extend([
+ fn for fn
+ in self._clslevel[cls]
+ if fn not in clslevel
+ ])
+
+ def remove(self, event_key):
+ target = event_key.dispatch_target
+ stack = [target]
+ while stack:
+ cls = stack.pop(0)
+ stack.extend(cls.__subclasses__())
+ if cls in self._clslevel:
+ self._clslevel[cls].remove(event_key.fn)
+ registry._removed_from_collection(event_key, self)
+
+ def clear(self):
+ """Clear all class level listeners"""
+
+ to_clear = set()
+ for dispatcher in self._clslevel.values():
+ to_clear.update(dispatcher)
+ dispatcher[:] = []
+ registry._clear(self, to_clear)
+
+ def for_modify(self, obj):
+ """Return an event collection which can be modified.
+
+ For _DispatchDescriptor at the class level of
+ a dispatcher, this returns self.
+
+ """
+ return self
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ elif obj._parent_cls in self._empty_listeners:
+ ret = self._empty_listeners[obj._parent_cls]
+ else:
+ self._empty_listeners[obj._parent_cls] = ret = \
+ _EmptyListener(self, obj._parent_cls)
+ # assigning it to __dict__ means
+ # memoized for fast re-access. but more memory.
+ obj.__dict__[self.__name__] = ret
+ return ret
+
+class _HasParentDispatchDescriptor(object):
+ def _adjust_fn_spec(self, fn, named):
+ return self.parent._adjust_fn_spec(fn, named)
+
+class _EmptyListener(_HasParentDispatchDescriptor):
+ """Serves as a class-level interface to the events
+ served by a _DispatchDescriptor, when there are no
+ instance-level events present.
+
+ Is replaced by _ListenerCollection when instance-level
+ events are added.
+
+ """
+ def __init__(self, parent, target_cls):
+ if target_cls not in parent._clslevel:
+ parent.update_subclass(target_cls)
+ self.parent = parent # _DispatchDescriptor
+ self.parent_listeners = parent._clslevel[target_cls]
+ self.name = parent.__name__
+ self.propagate = frozenset()
+ self.listeners = ()
+
+
+ def for_modify(self, obj):
+ """Return an event collection which can be modified.
+
+ For _EmptyListener at the instance level of
+ a dispatcher, this generates a new
+ _ListenerCollection, applies it to the instance,
+ and returns it.
+
+ """
+ result = _ListenerCollection(self.parent, obj._parent_cls)
+ if obj.__dict__[self.name] is self:
+ obj.__dict__[self.name] = result
+ return result
+
+ def _needs_modify(self, *args, **kw):
+ raise NotImplementedError("need to call for_modify()")
+
+ exec_once = insert = append = remove = clear = _needs_modify
+
+ def __call__(self, *args, **kw):
+ """Execute this event."""
+
+ for fn in self.parent_listeners:
+ fn(*args, **kw)
+
+ def __len__(self):
+ return len(self.parent_listeners)
+
+ def __iter__(self):
+ return iter(self.parent_listeners)
+
+ def __bool__(self):
+ return bool(self.parent_listeners)
+
+ __nonzero__ = __bool__
+
+
+class _CompoundListener(_HasParentDispatchDescriptor):
+ _exec_once = False
+
+ def exec_once(self, *args, **kw):
+ """Execute this event, but only if it has not been
+ executed already for this collection."""
+
+ if not self._exec_once:
+ self(*args, **kw)
+ self._exec_once = True
+
+ def __call__(self, *args, **kw):
+ """Execute this event."""
+
+ for fn in self.parent_listeners:
+ fn(*args, **kw)
+ for fn in self.listeners:
+ fn(*args, **kw)
+
+ def __len__(self):
+ return len(self.parent_listeners) + len(self.listeners)
+
+ def __iter__(self):
+ return chain(self.parent_listeners, self.listeners)
+
+ def __bool__(self):
+ return bool(self.listeners or self.parent_listeners)
+
+ __nonzero__ = __bool__
+
+class _ListenerCollection(RefCollection, _CompoundListener):
+ """Instance-level attributes on instances of :class:`._Dispatch`.
+
+ Represents a collection of listeners.
+
+ As of 0.7.9, _ListenerCollection is only first
+ created via the _EmptyListener.for_modify() method.
+
+ """
+
+ def __init__(self, parent, target_cls):
+ if target_cls not in parent._clslevel:
+ parent.update_subclass(target_cls)
+ self.parent_listeners = parent._clslevel[target_cls]
+ self.parent = parent
+ self.name = parent.__name__
+ self.listeners = []
+ self.propagate = set()
+
+ def for_modify(self, obj):
+ """Return an event collection which can be modified.
+
+ For _ListenerCollection at the instance level of
+ a dispatcher, this returns self.
+
+ """
+ return self
+
+ def _update(self, other, only_propagate=True):
+ """Populate from the listeners in another :class:`_Dispatch`
+ object."""
+
+ existing_listeners = self.listeners
+ existing_listener_set = set(existing_listeners)
+ self.propagate.update(other.propagate)
+ other_listeners = [l for l
+ in other.listeners
+ if l not in existing_listener_set
+ and not only_propagate or l in self.propagate
+ ]
+
+ existing_listeners.extend(other_listeners)
+
+ to_associate = other.propagate.union(other_listeners)
+ registry._stored_in_collection_multi(self, other, to_associate)
+
+ def insert(self, event_key, propagate):
+ if event_key._listen_fn not in self.listeners:
+ event_key.prepend_to_list(self, self.listeners)
+ if propagate:
+ self.propagate.add(event_key._listen_fn)
+
+ def append(self, event_key, propagate):
+ if event_key._listen_fn not in self.listeners:
+ event_key.append_to_list(self, self.listeners)
+ if propagate:
+ self.propagate.add(event_key._listen_fn)
+
+ def remove(self, event_key):
+ self.listeners.remove(event_key._listen_fn)
+ self.propagate.discard(event_key._listen_fn)
+ registry._removed_from_collection(event_key, self)
+
+ def clear(self):
+ registry._clear(self, self.listeners)
+ self.propagate.clear()
+ self.listeners[:] = []
+
+
+class _JoinedDispatchDescriptor(object):
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ else:
+ obj.__dict__[self.name] = ret = _JoinedListener(
+ obj.parent, self.name,
+ getattr(obj.local, self.name)
+ )
+ return ret
+
+
+class _JoinedListener(_CompoundListener):
+ _exec_once = False
+
+ def __init__(self, parent, name, local):
+ self.parent = parent
+ self.name = name
+ self.local = local
+ self.parent_listeners = self.local
+
+ @property
+ def listeners(self):
+ return getattr(self.parent, self.name)
+
+ def _adjust_fn_spec(self, fn, named):
+ return self.local._adjust_fn_spec(fn, named)
+
+ def for_modify(self, obj):
+ self.local = self.parent_listeners = self.local.for_modify(obj)
+ return self
+
+ def insert(self, event_key, propagate):
+ self.local.insert(event_key, propagate)
+
+ def append(self, event_key, propagate):
+ self.local.append(event_key, propagate)
+
+ def remove(self, event_key):
+ self.local.remove(event_key)
+
+ def clear(self):
+ raise NotImplementedError()
+
+
diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py
new file mode 100644
index 000000000..5c8d92cb3
--- /dev/null
+++ b/lib/sqlalchemy/event/base.py
@@ -0,0 +1,217 @@
+# event/base.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Base implementation classes.
+
+The public-facing ``Events`` serves as the base class for an event interface;
+it's public attributes represent different kinds of events. These attributes
+are mirrored onto a ``_Dispatch`` class, which serves as a container for
+collections of listener functions. These collections are represented both
+at the class level of a particular ``_Dispatch`` class as well as within
+instances of ``_Dispatch``.
+
+"""
+from __future__ import absolute_import
+
+from .. import util
+from .attr import _JoinedDispatchDescriptor, _EmptyListener, _DispatchDescriptor
+
+_registrars = util.defaultdict(list)
+
+
+def _is_event_name(name):
+ return not name.startswith('_') and name != 'dispatch'
+
+
+class _UnpickleDispatch(object):
+ """Serializable callable that re-generates an instance of
+ :class:`_Dispatch` given a particular :class:`.Events` subclass.
+
+ """
+ def __call__(self, _parent_cls):
+ for cls in _parent_cls.__mro__:
+ if 'dispatch' in cls.__dict__:
+ return cls.__dict__['dispatch'].dispatch_cls(_parent_cls)
+ else:
+ raise AttributeError("No class with a 'dispatch' member present.")
+
+
+class _Dispatch(object):
+ """Mirror the event listening definitions of an Events class with
+ listener collections.
+
+ Classes which define a "dispatch" member will return a
+ non-instantiated :class:`._Dispatch` subclass when the member
+ is accessed at the class level. When the "dispatch" member is
+ accessed at the instance level of its owner, an instance
+ of the :class:`._Dispatch` class is returned.
+
+ A :class:`._Dispatch` class is generated for each :class:`.Events`
+ class defined, by the :func:`._create_dispatcher_class` function.
+ The original :class:`.Events` classes remain untouched.
+ This decouples the construction of :class:`.Events` subclasses from
+ the implementation used by the event internals, and allows
+ inspecting tools like Sphinx to work in an unsurprising
+ way against the public API.
+
+ """
+
+ _events = None
+ """reference the :class:`.Events` class which this
+ :class:`._Dispatch` is created for."""
+
+ def __init__(self, _parent_cls):
+ self._parent_cls = _parent_cls
+
+ @util.classproperty
+ def _listen(cls):
+ return cls._events._listen
+
+ def _join(self, other):
+ """Create a 'join' of this :class:`._Dispatch` and another.
+
+ This new dispatcher will dispatch events to both
+ :class:`._Dispatch` objects.
+
+ """
+ if '_joined_dispatch_cls' not in self.__class__.__dict__:
+ cls = type(
+ "Joined%s" % self.__class__.__name__,
+ (_JoinedDispatcher, self.__class__), {}
+ )
+ for ls in _event_descriptors(self):
+ setattr(cls, ls.name, _JoinedDispatchDescriptor(ls.name))
+
+ self.__class__._joined_dispatch_cls = cls
+ return self._joined_dispatch_cls(self, other)
+
+ def __reduce__(self):
+ return _UnpickleDispatch(), (self._parent_cls, )
+
+ def _update(self, other, only_propagate=True):
+ """Populate from the listeners in another :class:`_Dispatch`
+ object."""
+
+ for ls in _event_descriptors(other):
+ if isinstance(ls, _EmptyListener):
+ continue
+ getattr(self, ls.name).\
+ for_modify(self)._update(ls, only_propagate=only_propagate)
+
+ @util.hybridmethod
+ def _clear(self):
+ for attr in dir(self):
+ if _is_event_name(attr):
+ getattr(self, attr).for_modify(self).clear()
+
+
+def _event_descriptors(target):
+ return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
+
+
+class _EventMeta(type):
+ """Intercept new Event subclasses and create
+ associated _Dispatch classes."""
+
+ def __init__(cls, classname, bases, dict_):
+ _create_dispatcher_class(cls, classname, bases, dict_)
+ return type.__init__(cls, classname, bases, dict_)
+
+
+def _create_dispatcher_class(cls, classname, bases, dict_):
+ """Create a :class:`._Dispatch` class corresponding to an
+ :class:`.Events` class."""
+
+ # there's all kinds of ways to do this,
+ # i.e. make a Dispatch class that shares the '_listen' method
+ # of the Event class, this is the straight monkeypatch.
+ dispatch_base = getattr(cls, 'dispatch', _Dispatch)
+ dispatch_cls = type("%sDispatch" % classname,
+ (dispatch_base, ), {})
+ cls._set_dispatch(cls, dispatch_cls)
+
+ for k in dict_:
+ if _is_event_name(k):
+ setattr(dispatch_cls, k, _DispatchDescriptor(cls, dict_[k]))
+ _registrars[k].append(cls)
+
+ if getattr(cls, '_dispatch_target', None):
+ cls._dispatch_target.dispatch = dispatcher(cls)
+
+
+def _remove_dispatcher(cls):
+ for k in dir(cls):
+ if _is_event_name(k):
+ _registrars[k].remove(cls)
+ if not _registrars[k]:
+ del _registrars[k]
+
+class Events(util.with_metaclass(_EventMeta, object)):
+ """Define event listening functions for a particular target type."""
+
+ @staticmethod
+ def _set_dispatch(cls, dispatch_cls):
+ # this allows an Events subclass to define additional utility
+ # methods made available to the target via
+ # "self.dispatch._events.<utilitymethod>"
+ # @staticemethod to allow easy "super" calls while in a metaclass
+ # constructor.
+ cls.dispatch = dispatch_cls
+ dispatch_cls._events = cls
+
+
+ @classmethod
+ def _accept_with(cls, target):
+ # Mapper, ClassManager, Session override this to
+ # also accept classes, scoped_sessions, sessionmakers, etc.
+ if hasattr(target, 'dispatch') and (
+ isinstance(target.dispatch, cls.dispatch) or \
+ isinstance(target.dispatch, type) and \
+ issubclass(target.dispatch, cls.dispatch)
+ ):
+ return target
+ else:
+ return None
+
+ @classmethod
+ def _listen(cls, event_key, propagate=False, insert=False, named=False):
+ event_key.base_listen(propagate=propagate, insert=insert, named=named)
+
+ @classmethod
+ def _remove(cls, event_key):
+ event_key.remove()
+
+ @classmethod
+ def _clear(cls):
+ cls.dispatch._clear()
+
+
+class _JoinedDispatcher(object):
+ """Represent a connection between two _Dispatch objects."""
+
+ def __init__(self, local, parent):
+ self.local = local
+ self.parent = parent
+ self._parent_cls = local._parent_cls
+
+
+class dispatcher(object):
+ """Descriptor used by target classes to
+ deliver the _Dispatch class at the class level
+ and produce new _Dispatch instances for target
+ instances.
+
+ """
+ def __init__(self, events):
+ self.dispatch_cls = events.dispatch
+ self.events = events
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self.dispatch_cls
+ obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls)
+ return disp
+
diff --git a/lib/sqlalchemy/event/legacy.py b/lib/sqlalchemy/event/legacy.py
new file mode 100644
index 000000000..d8a66674d
--- /dev/null
+++ b/lib/sqlalchemy/event/legacy.py
@@ -0,0 +1,156 @@
+# event/legacy.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Routines to handle adaption of legacy call signatures,
+generation of deprecation notes and docstrings.
+
+"""
+
+from .. import util
+
+def _legacy_signature(since, argnames, converter=None):
+ def leg(fn):
+ if not hasattr(fn, '_legacy_signatures'):
+ fn._legacy_signatures = []
+ fn._legacy_signatures.append((since, argnames, converter))
+ return fn
+ return leg
+
+def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec):
+ for since, argnames, conv in dispatch_descriptor.legacy_signatures:
+ if argnames[-1] == "**kw":
+ has_kw = True
+ argnames = argnames[0:-1]
+ else:
+ has_kw = False
+
+ if len(argnames) == len(argspec.args) \
+ and has_kw is bool(argspec.keywords):
+
+ if conv:
+ assert not has_kw
+ def wrap_leg(*args):
+ return fn(*conv(*args))
+ else:
+ def wrap_leg(*args, **kw):
+ argdict = dict(zip(dispatch_descriptor.arg_names, args))
+ args = [argdict[name] for name in argnames]
+ if has_kw:
+ return fn(*args, **kw)
+ else:
+ return fn(*args)
+ return wrap_leg
+ else:
+ return fn
+
+def _indent(text, indent):
+ return "\n".join(
+ indent + line
+ for line in text.split("\n")
+ )
+
+def _standard_listen_example(dispatch_descriptor, sample_target, fn):
+ example_kw_arg = _indent(
+ "\n".join(
+ "%(arg)s = kw['%(arg)s']" % {"arg": arg}
+ for arg in dispatch_descriptor.arg_names[0:2]
+ ),
+ " ")
+ if dispatch_descriptor.legacy_signatures:
+ current_since = max(since for since, args, conv
+ in dispatch_descriptor.legacy_signatures)
+ else:
+ current_since = None
+ text = (
+ "from sqlalchemy import event\n\n"
+ "# standard decorator style%(current_since)s\n"
+ "@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
+ "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
+ " \"listen for the '%(event_name)s' event\"\n"
+ "\n # ... (event handling logic) ...\n"
+ )
+
+ if len(dispatch_descriptor.arg_names) > 3:
+ text += (
+
+ "\n# named argument style (new in 0.9)\n"
+ "@event.listens_for(%(sample_target)s, '%(event_name)s', named=True)\n"
+ "def receive_%(event_name)s(**kw):\n"
+ " \"listen for the '%(event_name)s' event\"\n"
+ "%(example_kw_arg)s\n"
+ "\n # ... (event handling logic) ...\n"
+ )
+
+ text %= {
+ "current_since": " (arguments as of %s)" %
+ current_since if current_since else "",
+ "event_name": fn.__name__,
+ "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "",
+ "named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
+ "example_kw_arg": example_kw_arg,
+ "sample_target": sample_target
+ }
+ return text
+
+def _legacy_listen_examples(dispatch_descriptor, sample_target, fn):
+ text = ""
+ for since, args, conv in dispatch_descriptor.legacy_signatures:
+ text += (
+ "\n# legacy calling style (pre-%(since)s)\n"
+ "@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
+ "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
+ " \"listen for the '%(event_name)s' event\"\n"
+ "\n # ... (event handling logic) ...\n" % {
+ "since": since,
+ "event_name": fn.__name__,
+ "has_kw_arguments": " **kw" if dispatch_descriptor.has_kw else "",
+ "named_event_arguments": ", ".join(args),
+ "sample_target": sample_target
+ }
+ )
+ return text
+
+def _version_signature_changes(dispatch_descriptor):
+ since, args, conv = dispatch_descriptor.legacy_signatures[0]
+ return (
+ "\n.. versionchanged:: %(since)s\n"
+ " The ``%(event_name)s`` event now accepts the \n"
+ " arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
+ " Listener functions which accept the previous argument \n"
+ " signature(s) listed above will be automatically \n"
+ " adapted to the new signature." % {
+ "since": since,
+ "event_name": dispatch_descriptor.__name__,
+ "named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
+ "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else ""
+ }
+ )
+
+def _augment_fn_docs(dispatch_descriptor, parent_dispatch_cls, fn):
+ header = ".. container:: event_signatures\n\n"\
+ " Example argument forms::\n"\
+ "\n"
+
+ sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj")
+ text = (
+ header +
+ _indent(
+ _standard_listen_example(
+ dispatch_descriptor, sample_target, fn),
+ " " * 8)
+ )
+ if dispatch_descriptor.legacy_signatures:
+ text += _indent(
+ _legacy_listen_examples(
+ dispatch_descriptor, sample_target, fn),
+ " " * 8)
+
+ text += _version_signature_changes(dispatch_descriptor)
+
+ return util.inject_docstring_text(fn.__doc__,
+ text,
+ 1
+ )
diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py
new file mode 100644
index 000000000..7710ff2d2
--- /dev/null
+++ b/lib/sqlalchemy/event/registry.py
@@ -0,0 +1,236 @@
+# event/registry.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Provides managed registration services on behalf of :func:`.listen`
+arguments.
+
+By "managed registration", we mean that event listening functions and
+other objects can be added to various collections in such a way that their
+membership in all those collections can be revoked at once, based on
+an equivalent :class:`._EventKey`.
+
+"""
+
+from __future__ import absolute_import
+
+import weakref
+import collections
+import types
+from .. import exc
+
+
+_key_to_collection = collections.defaultdict(dict)
+"""
+Given an original listen() argument, can locate all
+listener collections and the listener fn contained
+
+(target, identifier, fn) -> {
+ ref(listenercollection) -> ref(listener_fn)
+ ref(listenercollection) -> ref(listener_fn)
+ ref(listenercollection) -> ref(listener_fn)
+ }
+"""
+
+_collection_to_key = collections.defaultdict(dict)
+"""
+Given a _ListenerCollection or _DispatchDescriptor, can locate
+all the original listen() arguments and the listener fn contained
+
+ref(listenercollection) -> {
+ ref(listener_fn) -> (target, identifier, fn),
+ ref(listener_fn) -> (target, identifier, fn),
+ ref(listener_fn) -> (target, identifier, fn),
+ }
+"""
+
+def _collection_gced(ref):
+ # defaultdict, so can't get a KeyError
+ if not _collection_to_key or ref not in _collection_to_key:
+ return
+ listener_to_key = _collection_to_key.pop(ref)
+ for key in listener_to_key.values():
+ if key in _key_to_collection:
+ # defaultdict, so can't get a KeyError
+ dispatch_reg = _key_to_collection[key]
+ dispatch_reg.pop(ref)
+ if not dispatch_reg:
+ _key_to_collection.pop(key)
+
+def _stored_in_collection(event_key, owner):
+ key = event_key._key
+
+ dispatch_reg = _key_to_collection[key]
+
+ owner_ref = owner.ref
+ listen_ref = weakref.ref(event_key._listen_fn)
+
+ if owner_ref in dispatch_reg:
+ assert dispatch_reg[owner_ref] == listen_ref
+ else:
+ dispatch_reg[owner_ref] = listen_ref
+
+ listener_to_key = _collection_to_key[owner_ref]
+ listener_to_key[listen_ref] = key
+
+def _removed_from_collection(event_key, owner):
+ key = event_key._key
+
+ dispatch_reg = _key_to_collection[key]
+
+ listen_ref = weakref.ref(event_key._listen_fn)
+
+ owner_ref = owner.ref
+ dispatch_reg.pop(owner_ref, None)
+ if not dispatch_reg:
+ del _key_to_collection[key]
+
+ if owner_ref in _collection_to_key:
+ listener_to_key = _collection_to_key[owner_ref]
+ listener_to_key.pop(listen_ref)
+
+def _stored_in_collection_multi(newowner, oldowner, elements):
+ if not elements:
+ return
+
+ oldowner = oldowner.ref
+ newowner = newowner.ref
+
+ old_listener_to_key = _collection_to_key[oldowner]
+ new_listener_to_key = _collection_to_key[newowner]
+
+ for listen_fn in elements:
+ listen_ref = weakref.ref(listen_fn)
+ key = old_listener_to_key[listen_ref]
+ dispatch_reg = _key_to_collection[key]
+ if newowner in dispatch_reg:
+ assert dispatch_reg[newowner] == listen_ref
+ else:
+ dispatch_reg[newowner] = listen_ref
+
+ new_listener_to_key[listen_ref] = key
+
+def _clear(owner, elements):
+ if not elements:
+ return
+
+ owner = owner.ref
+ listener_to_key = _collection_to_key[owner]
+ for listen_fn in elements:
+ listen_ref = weakref.ref(listen_fn)
+ key = listener_to_key[listen_ref]
+ dispatch_reg = _key_to_collection[key]
+ dispatch_reg.pop(owner, None)
+
+ if not dispatch_reg:
+ del _key_to_collection[key]
+
+
+class _EventKey(object):
+ """Represent :func:`.listen` arguments.
+ """
+
+
+ def __init__(self, target, identifier, fn, dispatch_target, _fn_wrap=None):
+ self.target = target
+ self.identifier = identifier
+ self.fn = fn
+ if isinstance(fn, types.MethodType):
+ self.fn_key = id(fn.__func__), id(fn.__self__)
+ else:
+ self.fn_key = id(fn)
+ self.fn_wrap = _fn_wrap
+ self.dispatch_target = dispatch_target
+
+ @property
+ def _key(self):
+ return (id(self.target), self.identifier, self.fn_key)
+
+ def with_wrapper(self, fn_wrap):
+ if fn_wrap is self._listen_fn:
+ return self
+ else:
+ return _EventKey(
+ self.target,
+ self.identifier,
+ self.fn,
+ self.dispatch_target,
+ _fn_wrap=fn_wrap
+ )
+
+ def with_dispatch_target(self, dispatch_target):
+ if dispatch_target is self.dispatch_target:
+ return self
+ else:
+ return _EventKey(
+ self.target,
+ self.identifier,
+ self.fn,
+ dispatch_target,
+ _fn_wrap=self.fn_wrap
+ )
+
+ def listen(self, *args, **kw):
+ self.dispatch_target.dispatch._listen(self, *args, **kw)
+
+ def remove(self):
+ key = self._key
+
+ if key not in _key_to_collection:
+ raise exc.InvalidRequestError(
+ "No listeners found for event %s / %r / %s " %
+ (self.target, self.identifier, self.fn)
+ )
+ dispatch_reg = _key_to_collection.pop(key)
+
+ for collection_ref, listener_ref in dispatch_reg.items():
+ collection = collection_ref()
+ listener_fn = listener_ref()
+ if collection is not None and listener_fn is not None:
+ collection.remove(self.with_wrapper(listener_fn))
+
+ def contains(self):
+ """Return True if this event key is registered to listen.
+ """
+ return self._key in _key_to_collection
+
+ def base_listen(self, propagate=False, insert=False,
+ named=False):
+
+ target, identifier, fn = \
+ self.dispatch_target, self.identifier, self._listen_fn
+
+ dispatch_descriptor = getattr(target.dispatch, identifier)
+
+ fn = dispatch_descriptor._adjust_fn_spec(fn, named)
+ self = self.with_wrapper(fn)
+
+ if insert:
+ dispatch_descriptor.\
+ for_modify(target.dispatch).insert(self, propagate)
+ else:
+ dispatch_descriptor.\
+ for_modify(target.dispatch).append(self, propagate)
+
+ @property
+ def _listen_fn(self):
+ return self.fn_wrap or self.fn
+
+ def append_value_to_list(self, owner, list_, value):
+ _stored_in_collection(self, owner)
+ list_.append(value)
+
+ def append_to_list(self, owner, list_):
+ _stored_in_collection(self, owner)
+ list_.append(self._listen_fn)
+
+ def remove_from_list(self, owner, list_):
+ _removed_from_collection(self, owner)
+ list_.remove(self._listen_fn)
+
+ def prepend_to_list(self, owner, list_):
+ _stored_in_collection(self, owner)
+ list_.insert(0, self._listen_fn)
+
diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py
index 4fb997b9c..9f05c8b5b 100644
--- a/lib/sqlalchemy/events.py
+++ b/lib/sqlalchemy/events.py
@@ -1,20 +1,20 @@
# sqlalchemy/events.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Core event interfaces."""
-from . import event, exc, util
-engine = util.importlater('sqlalchemy', 'engine')
-pool = util.importlater('sqlalchemy', 'pool')
-
+from . import event, exc
+from .pool import Pool
+from .engine import Connectable, Engine
+from .sql.base import SchemaEventTarget
class DDLEvents(event.Events):
"""
Define event listeners for schema objects,
- that is, :class:`.SchemaItem` and :class:`.SchemaEvent`
+ that is, :class:`.SchemaItem` and other :class:`.SchemaEventTarget`
subclasses, including :class:`.MetaData`, :class:`.Table`,
:class:`.Column`.
@@ -71,6 +71,7 @@ class DDLEvents(event.Events):
"""
_target_class_doc = "SomeSchemaClassOrObject"
+ _dispatch_target = SchemaEventTarget
def before_create(self, target, connection, **kw):
"""Called before CREATE statments are emitted.
@@ -219,25 +220,6 @@ class DDLEvents(event.Events):
"""
-class SchemaEventTarget(object):
- """Base class for elements that are the targets of :class:`.DDLEvents`
- events.
-
- This includes :class:`.SchemaItem` as well as :class:`.SchemaType`.
-
- """
- dispatch = event.dispatcher(DDLEvents)
-
- def _set_parent(self, parent):
- """Associate with this SchemaEvent's parent object."""
-
- raise NotImplementedError()
-
- def _set_parent_with_dispatch(self, parent):
- self.dispatch.before_parent_attach(self, parent)
- self._set_parent(parent)
- self.dispatch.after_parent_attach(self, parent)
-
class PoolEvents(event.Events):
"""Available events for :class:`.Pool`.
@@ -269,62 +251,74 @@ class PoolEvents(event.Events):
"""
_target_class_doc = "SomeEngineOrPool"
+ _dispatch_target = Pool
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
- if issubclass(target, engine.Engine):
- return pool.Pool
- elif issubclass(target, pool.Pool):
+ if issubclass(target, Engine):
+ return Pool
+ elif issubclass(target, Pool):
return target
- elif isinstance(target, engine.Engine):
+ elif isinstance(target, Engine):
return target.pool
else:
return target
def connect(self, dbapi_connection, connection_record):
- """Called once for each new DB-API connection or Pool's ``creator()``.
+ """Called at the moment a particular DBAPI connection is first
+ created for a given :class:`.Pool`.
+
+ This event allows one to capture the point directly after which
+ the DBAPI module-level ``.connect()`` method has been used in order
+ to produce a new DBAPI connection.
- :param dbapi_con:
- A newly connected raw DB-API connection (not a SQLAlchemy
- ``Connection`` wrapper).
+ :param dbapi_connection: a DBAPI connection.
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
"""
def first_connect(self, dbapi_connection, connection_record):
- """Called exactly once for the first DB-API connection.
+ """Called exactly once for the first time a DBAPI connection is
+ checked out from a particular :class:`.Pool`.
- :param dbapi_con:
- A newly connected raw DB-API connection (not a SQLAlchemy
- ``Connection`` wrapper).
+ The rationale for :meth:`.PoolEvents.first_connect` is to determine
+ information about a particular series of database connections based
+ on the settings used for all connections. Since a particular
+ :class:`.Pool` refers to a single "creator" function (which in terms
+ of a :class:`.Engine` refers to the URL and connection options used),
+ it is typically valid to make observations about a single connection
+ that can be safely assumed to be valid about all subsequent connections,
+ such as the database version, the server and client encoding settings,
+ collation settings, and many others.
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
+ :param dbapi_connection: a DBAPI connection.
+
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
"""
def checkout(self, dbapi_connection, connection_record, connection_proxy):
"""Called when a connection is retrieved from the Pool.
- :param dbapi_con:
- A raw DB-API connection
+ :param dbapi_connection: a DBAPI connection.
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
- :param con_proxy:
- The ``_ConnectionFairy`` which manages the connection for the span of
- the current checkout.
+ :param connection_proxy: the :class:`._ConnectionFairy` object which
+ will proxy the public interface of the DBAPI connection for the lifespan
+ of the checkout.
If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current
connection will be disposed and a fresh connection retrieved.
Processing of all checkout listeners will abort and restart
using the new connection.
- .. seealso:: :meth:`.ConnectionEvents.connect` - a similar event
+ .. seealso:: :meth:`.ConnectionEvents.engine_connect` - a similar event
which occurs upon creation of a new :class:`.Connection`.
"""
@@ -336,15 +330,14 @@ class PoolEvents(event.Events):
connection has been invalidated. ``checkin`` will not be called
for detached connections. (They do not return to the pool.)
- :param dbapi_con:
- A raw DB-API connection
+ :param dbapi_connection: a DBAPI connection.
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
"""
- def reset(self, dbapi_con, con_record):
+ def reset(self, dbapi_connnection, connection_record):
"""Called before the "reset" action occurs for a pooled connection.
This event represents
@@ -358,11 +351,10 @@ class PoolEvents(event.Events):
the :meth:`.PoolEvents.checkin` event is called, except in those
cases where the connection is discarded immediately after reset.
- :param dbapi_con:
- A raw DB-API connection
+ :param dbapi_connection: a DBAPI connection.
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
.. versionadded:: 0.8
@@ -374,6 +366,30 @@ class PoolEvents(event.Events):
"""
+ def invalidate(self, dbapi_connection, connection_record, exception):
+ """Called when a DBAPI connection is to be "invalidated".
+
+ This event is called any time the :meth:`._ConnectionRecord.invalidate`
+ method is invoked, either from API usage or via "auto-invalidation".
+ The event occurs before a final attempt to call ``.close()`` on the connection
+ occurs.
+
+ :param dbapi_connection: a DBAPI connection.
+
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
+
+ :param exception: the exception object corresponding to the reason
+ for this invalidation, if any. May be ``None``.
+
+ .. versionadded:: 0.9.2 Added support for connection invalidation
+ listening.
+
+ .. seealso::
+
+ :ref:`pool_connection_invalidation`
+
+ """
class ConnectionEvents(event.Events):
@@ -448,9 +464,14 @@ class ConnectionEvents(event.Events):
"""
_target_class_doc = "SomeEngine"
+ _dispatch_target = Connectable
+
@classmethod
- def _listen(cls, target, identifier, fn, retval=False):
+ def _listen(cls, event_key, retval=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
target._has_events = True
if not retval:
@@ -479,7 +500,7 @@ class ConnectionEvents(event.Events):
"'before_cursor_execute' engine "
"event listeners accept the 'retval=True' "
"argument.")
- event.Events._listen(target, identifier, fn)
+ event_key.with_wrapper(fn).base_listen()
def before_execute(self, conn, clauseelement, multiparams, params):
"""Intercept high level execute() events, receiving uncompiled
@@ -680,7 +701,7 @@ class ConnectionEvents(event.Events):
Note that this method is not called when a new :class:`.Connection`
is produced which is inheriting execution options from its parent
:class:`.Engine`; to intercept this condition, use the
- :meth:`.ConnectionEvents.connect` event.
+ :meth:`.ConnectionEvents.engine_connect` event.
:param conn: The newly copied :class:`.Connection` object
diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py
index cfd1e2bc7..68e517e26 100644
--- a/lib/sqlalchemy/exc.py
+++ b/lib/sqlalchemy/exc.py
@@ -1,14 +1,14 @@
# sqlalchemy/exc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Exceptions used with SQLAlchemy.
-The base exception class is :class:`.SQLAlchemyError`. Exceptions which are
+The base exception class is :exc:`.SQLAlchemyError`. Exceptions which are
raised as a result of DBAPI exceptions are all subclasses of
-:class:`.DBAPIError`.
+:exc:`.DBAPIError`.
"""
@@ -26,6 +26,9 @@ class ArgumentError(SQLAlchemyError):
"""
+class NoSuchModuleError(ArgumentError):
+ """Raised when a dynamically-loaded module (usually a database dialect)
+ of a particular name cannot be located."""
class NoForeignKeysError(ArgumentError):
"""Raised when no foreign keys can be located between two selectables
@@ -169,7 +172,7 @@ class UnboundExecutionError(InvalidRequestError):
class DontWrapMixin(object):
"""A mixin class which, when applied to a user-defined Exception class,
- will not be wrapped inside of :class:`.StatementError` if the error is
+ will not be wrapped inside of :exc:`.StatementError` if the error is
emitted within the process of executing a statement.
E.g.::
@@ -187,10 +190,6 @@ class DontWrapMixin(object):
raise MyCustomException("invalid!")
"""
-import sys
-if sys.version_info < (2, 5):
- class DontWrapMixin:
- pass
# Moved to orm.exc; compatibility definition installed by orm import until 0.6
UnmappedColumnError = None
@@ -223,6 +222,10 @@ class StatementError(SQLAlchemyError):
self.statement = statement
self.params = params
self.orig = orig
+ self.detail = []
+
+ def add_detail(self, msg):
+ self.detail.append(msg)
def __reduce__(self):
return self.__class__, (self.args[0], self.statement,
@@ -231,8 +234,13 @@ class StatementError(SQLAlchemyError):
def __str__(self):
from sqlalchemy.sql import util
params_repr = util._repr_params(self.params, 10)
- return ' '.join((SQLAlchemyError.__str__(self),
- repr(self.statement), repr(params_repr)))
+
+ return ' '.join([
+ "(%s)" % det for det in self.detail
+ ] + [
+ SQLAlchemyError.__str__(self),
+ repr(self.statement), repr(params_repr)
+ ])
def __unicode__(self):
return self.__str__()
diff --git a/lib/sqlalchemy/ext/__init__.py b/lib/sqlalchemy/ext/__init__.py
index 0efc37bd5..1d77acaa7 100644
--- a/lib/sqlalchemy/ext/__init__.py
+++ b/lib/sqlalchemy/ext/__init__.py
@@ -1,5 +1,5 @@
# ext/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py
index fca2f0008..e62958b49 100644
--- a/lib/sqlalchemy/ext/associationproxy.py
+++ b/lib/sqlalchemy/ext/associationproxy.py
@@ -1,5 +1,5 @@
# ext/associationproxy.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -242,7 +242,11 @@ class AssociationProxy(interfaces._InspectionAttr):
return self
if self.scalar:
- return self._scalar_get(getattr(obj, self.target_collection))
+ target = getattr(obj, self.target_collection)
+ if target is not None:
+ return self._scalar_get(target)
+ else:
+ return None
else:
try:
# If the owning instance is reborn (orm session resurrect,
diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py
new file mode 100644
index 000000000..7a1512f6a
--- /dev/null
+++ b/lib/sqlalchemy/ext/automap.py
@@ -0,0 +1,840 @@
+# ext/automap.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Define an extension to the :mod:`sqlalchemy.ext.declarative` system
+which automatically generates mapped classes and relationships from a database
+schema, typically though not necessarily one which is reflected.
+
+.. versionadded:: 0.9.1 Added :mod:`sqlalchemy.ext.automap`.
+
+.. note::
+
+ The :mod:`sqlalchemy.ext.automap` extension should be considered
+ **experimental** as of 0.9.1. Featureset and API stability is
+ not guaranteed at this time.
+
+It is hoped that the :class:`.AutomapBase` system provides a quick
+and modernized solution to the problem that the very famous
+`SQLSoup <https://sqlsoup.readthedocs.org/en/latest/>`_
+also tries to solve, that of generating a quick and rudimentary object
+model from an existing database on the fly. By addressing the issue strictly
+at the mapper configuration level, and integrating fully with existing
+Declarative class techniques, :class:`.AutomapBase` seeks to provide
+a well-integrated approach to the issue of expediently auto-generating ad-hoc
+mappings.
+
+
+Basic Use
+=========
+
+The simplest usage is to reflect an existing database into a new model.
+We create a new :class:`.AutomapBase` class in a similar manner as to how
+we create a declarative base class, using :func:`.automap_base`.
+We then call :meth:`.AutomapBase.prepare` on the resulting base class,
+asking it to reflect the schema and produce mappings::
+
+ from sqlalchemy.ext.automap import automap_base
+ from sqlalchemy.orm import Session
+ from sqlalchemy import create_engine
+
+ Base = automap_base()
+
+ # engine, suppose it has two tables 'user' and 'address' set up
+ engine = create_engine("sqlite:///mydatabase.db")
+
+ # reflect the tables
+ Base.prepare(engine, reflect=True)
+
+ # mapped classes are now created with names by default
+ # matching that of the table name.
+ User = Base.classes.user
+ Address = Base.classes.address
+
+ session = Session(engine)
+
+ # rudimentary relationships are produced
+ session.add(Address(email_address="foo@bar.com", user=User(name="foo")))
+ session.commit()
+
+ # collection-based relationships are by default named "<classname>_collection"
+ print (u1.address_collection)
+
+Above, calling :meth:`.AutomapBase.prepare` while passing along the
+:paramref:`.AutomapBase.prepare.reflect` parameter indicates that the
+:meth:`.MetaData.reflect` method will be called on this declarative base
+classes' :class:`.MetaData` collection; then, each viable
+:class:`.Table` within the :class:`.MetaData` will get a new mapped class
+generated automatically. The :class:`.ForeignKeyConstraint` objects which
+link the various tables together will be used to produce new, bidirectional
+:func:`.relationship` objects between classes. The classes and relationships
+follow along a default naming scheme that we can customize. At this point,
+our basic mapping consisting of related ``User`` and ``Address`` classes is ready
+to use in the traditional way.
+
+Generating Mappings from an Existing MetaData
+=============================================
+
+We can pass a pre-declared :class:`.MetaData` object to :func:`.automap_base`.
+This object can be constructed in any way, including programmatically, from
+a serialized file, or from itself being reflected using :meth:`.MetaData.reflect`.
+Below we illustrate a combination of reflection and explicit table declaration::
+
+ from sqlalchemy import create_engine, MetaData, Table, Column, ForeignKey
+ engine = create_engine("sqlite:///mydatabase.db")
+
+ # produce our own MetaData object
+ metadata = MetaData()
+
+ # we can reflect it ourselves from a database, using options
+ # such as 'only' to limit what tables we look at...
+ metadata.reflect(engine, only=['user', 'address'])
+
+ # ... or just define our own Table objects with it (or combine both)
+ Table('user_order', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('user_id', ForeignKey('user.id'))
+ )
+
+ # we can then produce a set of mappings from this MetaData.
+ Base = automap_base(metadata=metadata)
+
+ # calling prepare() just sets up mapped classes and relationships.
+ Base.prepare()
+
+ # mapped classes are ready
+ User, Address, Order = Base.classes.user, Base.classes.address, Base.classes.user_order
+
+Specifying Classes Explcitly
+============================
+
+The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined
+explicitly, in a way similar to that of the :class:`.DeferredReflection` class.
+Classes that extend from :class:`.AutomapBase` act like regular declarative
+classes, but are not immediately mapped after their construction, and are instead
+mapped when we call :meth:`.AutomapBase.prepare`. The :meth:`.AutomapBase.prepare`
+method will make use of the classes we've established based on the table name
+we use. If our schema contains tables ``user`` and ``address``, we can define
+one or both of the classes to be used::
+
+ from sqlalchemy.ext.automap import automap_base
+ from sqlalchemy import create_engine
+
+ # automap base
+ Base = automap_base()
+
+ # pre-declare User for the 'user' table
+ class User(Base):
+ __tablename__ = 'user'
+
+ # override schema elements like Columns
+ user_name = Column('name', String)
+
+ # override relationships too, if desired.
+ # we must use the same name that automap would use for the relationship,
+ # and also must refer to the class name that automap will generate
+ # for "address"
+ address_collection = relationship("address", collection_class=set)
+
+ # reflect
+ engine = create_engine("sqlite:///mydatabase.db")
+ Base.prepare(engine, reflect=True)
+
+ # we still have Address generated from the tablename "address",
+ # but User is the same as Base.classes.User now
+
+ Address = Base.classes.address
+
+ u1 = session.query(User).first()
+ print (u1.address_collection)
+
+ # the backref is still there:
+ a1 = session.query(Address).first()
+ print (a1.user)
+
+Above, one of the more intricate details is that we illustrated overriding
+one of the :func:`.relationship` objects that automap would have created.
+To do this, we needed to make sure the names match up with what automap
+would normally generate, in that the relationship name would be ``User.address_collection``
+and the name of the class referred to, from automap's perspective, is called
+``address``, even though we are referring to it as ``Address`` within our usage
+of this class.
+
+Overriding Naming Schemes
+=========================
+
+:mod:`.sqlalchemy.ext.automap` is tasked with producing mapped classes and
+relationship names based on a schema, which means it has decision points in how
+these names are determined. These three decision points are provided using
+functions which can be passed to the :meth:`.AutomapBase.prepare` method, and
+are known as :func:`.classname_for_table`,
+:func:`.name_for_scalar_relationship`,
+and :func:`.name_for_collection_relationship`. Any or all of these
+functions are provided as in the example below, where we use a "camel case"
+scheme for class names and a "pluralizer" for collection names using the
+`Inflect <https://pypi.python.org/pypi/inflect>`_ package::
+
+ import re
+ import inflect
+
+ def camelize_classname(base, tablename, table):
+ "Produce a 'camelized' class name, e.g. "
+ "'words_and_underscores' -> 'WordsAndUnderscores'"
+
+ return str(tablename[0].upper() + \\
+ re.sub(r'_(\w)', lambda m: m.group(1).upper(), tablename[1:]))
+
+ _pluralizer = inflect.engine()
+ def pluralize_collection(base, local_cls, referred_cls, constraint):
+ "Produce an 'uncamelized', 'pluralized' class name, e.g. "
+ "'SomeTerm' -> 'some_terms'"
+
+ referred_name = referred_cls.__name__
+ uncamelized = referred_name[0].lower() + \\
+ re.sub(r'\W',
+ lambda m: "_%s" % m.group(0).lower(),
+ referred_name[1:])
+ pluralized = _pluralizer.plural(uncamelized)
+ return pluralized
+
+ from sqlalchemy.ext.automap import automap_base
+
+ Base = automap_base()
+
+ engine = create_engine("sqlite:///mydatabase.db")
+
+ Base.prepare(engine, reflect=True,
+ classname_for_table=camelize_classname,
+ name_for_collection_relationship=pluralize_collection
+ )
+
+From the above mapping, we would now have classes ``User`` and ``Address``,
+where the collection from ``User`` to ``Address`` is called ``User.addresses``::
+
+ User, Address = Base.classes.User, Base.classes.Address
+
+ u1 = User(addresses=[Address(email="foo@bar.com")])
+
+Relationship Detection
+======================
+
+The vast majority of what automap accomplishes is the generation of
+:func:`.relationship` structures based on foreign keys. The mechanism
+by which this works for many-to-one and one-to-many relationships is as follows:
+
+1. A given :class:`.Table`, known to be mapped to a particular class,
+ is examined for :class:`.ForeignKeyConstraint` objects.
+
+2. From each :class:`.ForeignKeyConstraint`, the remote :class:`.Table`
+ object present is matched up to the class to which it is to be mapped,
+ if any, else it is skipped.
+
+3. As the :class:`.ForeignKeyConstraint` we are examining correponds to a reference
+ from the immediate mapped class,
+ the relationship will be set up as a many-to-one referring to the referred class;
+ a corresponding one-to-many backref will be created on the referred class referring
+ to this class.
+
+4. The names of the relationships are determined using the
+ :paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and
+ :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
+ callable functions. It is important to note that the default relationship
+ naming derives the name from the **the actual class name**. If you've
+ given a particular class an explicit name by declaring it, or specified an
+ alternate class naming scheme, that's the name from which the relationship
+ name will be derived.
+
+5. The classes are inspected for an existing mapped property matching these
+ names. If one is detected on one side, but none on the other side, :class:`.AutomapBase`
+ attempts to create a relationship on the missing side, then uses the
+ :paramref:`.relationship.back_populates` parameter in order to point
+ the new relationship to the other side.
+
+6. In the usual case where no relationship is on either side,
+ :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the "many-to-one"
+ side and matches it to the other using the :paramref:`.relationship.backref`
+ parameter.
+
+7. Production of the :func:`.relationship` and optionally the :func:`.backref`
+ is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship`
+ function, which can be supplied by the end-user in order to augment
+ the arguments passed to :func:`.relationship` or :func:`.backref` or to
+ make use of custom implementations of these functions.
+
+Custom Relationship Arguments
+-----------------------------
+
+The :paramref:`.AutomapBase.prepare.generate_relationship` hook can be used
+to add parameters to relationships. For most cases, we can make use of the
+existing :func:`.automap.generate_relationship` function to return
+the object, after augmenting the given keyword dictionary with our own
+arguments.
+
+Below is an illustration of how to send
+:paramref:`.relationship.cascade` and
+:paramref:`.relationship.passive_deletes`
+options along to all one-to-many relationships::
+
+ from sqlalchemy.ext.automap import generate_relationship
+
+ def _gen_relationship(base, direction, return_fn,
+ attrname, local_cls, referred_cls, **kw):
+ if direction is interfaces.ONETOMANY:
+ kw['cascade'] = 'all, delete-orphan'
+ kw['passive_deletes'] = True
+ # make use of the built-in function to actually return
+ # the result.
+ return generate_relationship(base, direction, return_fn,
+ attrname, local_cls, referred_cls, **kw)
+
+ from sqlalchemy.ext.automap import automap_base
+ from sqlalchemy import create_engine
+
+ # automap base
+ Base = automap_base()
+
+ engine = create_engine("sqlite:///mydatabase.db")
+ Base.prepare(engine, reflect=True,
+ generate_relationship=_gen_relationship)
+
+Many-to-Many relationships
+--------------------------
+
+:mod:`.sqlalchemy.ext.automap` will generate many-to-many relationships, e.g.
+those which contain a ``secondary`` argument. The process for producing these
+is as follows:
+
+1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint` objects,
+ before any mapped class has been assigned to it.
+
+2. If the table contains two and exactly two :class:`.ForeignKeyConstraint`
+ objects, and all columns within this table are members of these two
+ :class:`.ForeignKeyConstraint` objects, the table is assumed to be a
+ "secondary" table, and will **not be mapped directly**.
+
+3. The two (or one, for self-referential) external tables to which the :class:`.Table`
+ refers to are matched to the classes to which they will be mapped, if any.
+
+4. If mapped classes for both sides are located, a many-to-many bi-directional
+ :func:`.relationship` / :func:`.backref` pair is created between the two
+ classes.
+
+5. The override logic for many-to-many works the same as that of one-to-many/
+ many-to-one; the :func:`.generate_relationship` function is called upon
+ to generate the strucures and existing attributes will be maintained.
+
+Using Automap with Explicit Declarations
+========================================
+
+As noted previously, automap has no dependency on reflection, and can make
+use of any collection of :class:`.Table` objects within a :class:`.MetaData`
+collection. From this, it follows that automap can also be used
+generate missing relationships given an otherwise complete model that fully defines
+table metadata::
+
+ from sqlalchemy.ext.automap import automap_base
+ from sqlalchemy import Column, Integer, String, ForeignKey
+
+ Base = automap_base()
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String)
+
+ class Address(Base):
+ __tablename__ = 'address'
+
+ id = Column(Integer, primary_key=True)
+ email = Column(String)
+ user_id = Column(ForeignKey('user.id'))
+
+ # produce relationships
+ Base.prepare()
+
+ # mapping is complete, with "address_collection" and
+ # "user" relationships
+ a1 = Address(email='u1')
+ a2 = Address(email='u2')
+ u1 = User(address_collection=[a1, a2])
+ assert a1.user is u1
+
+Above, given mostly complete ``User`` and ``Address`` mappings, the
+:class:`.ForeignKey` which we defined on ``Address.user_id`` allowed a
+bidirectional relationship pair ``Address.user`` and ``User.address_collection``
+to be generated on the mapped classes.
+
+Note that when subclassing :class:`.AutomapBase`, the :meth:`.AutomapBase.prepare`
+method is required; if not called, the classes we've declared are in an
+un-mapped state.
+
+
+"""
+from .declarative import declarative_base as _declarative_base
+from .declarative.base import _DeferredMapperConfig
+from ..sql import and_
+from ..schema import ForeignKeyConstraint
+from ..orm import relationship, backref, interfaces
+from .. import util
+
+
+def classname_for_table(base, tablename, table):
+ """Return the class name that should be used, given the name
+ of a table.
+
+ The default implementation is::
+
+ return str(tablename)
+
+ Alternate implementations can be specified using the
+ :paramref:`.AutomapBase.prepare.classname_for_table`
+ parameter.
+
+ :param base: the :class:`.AutomapBase` class doing the prepare.
+
+ :param tablename: string name of the :class:`.Table`.
+
+ :param table: the :class:`.Table` object itself.
+
+ :return: a string class name.
+
+ .. note::
+
+ In Python 2, the string used for the class name **must** be a non-Unicode
+ object, e.g. a ``str()`` object. The ``.name`` attribute of
+ :class:`.Table` is typically a Python unicode subclass, so the ``str()``
+ function should be applied to this name, after accounting for any non-ASCII
+ characters.
+
+ """
+ return str(tablename)
+
+def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
+ """Return the attribute name that should be used to refer from one
+ class to another, for a scalar object reference.
+
+ The default implementation is::
+
+ return referred_cls.__name__.lower()
+
+ Alternate implementations can be specified using the
+ :paramref:`.AutomapBase.prepare.name_for_scalar_relationship`
+ parameter.
+
+ :param base: the :class:`.AutomapBase` class doing the prepare.
+
+ :param local_cls: the class to be mapped on the local side.
+
+ :param referred_cls: the class to be mapped on the referring side.
+
+ :param constraint: the :class:`.ForeignKeyConstraint` that is being
+ inspected to produce this relationship.
+
+ """
+ return referred_cls.__name__.lower()
+
+def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
+ """Return the attribute name that should be used to refer from one
+ class to another, for a collection reference.
+
+ The default implementation is::
+
+ return referred_cls.__name__.lower() + "_collection"
+
+ Alternate implementations
+ can be specified using the :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
+ parameter.
+
+ :param base: the :class:`.AutomapBase` class doing the prepare.
+
+ :param local_cls: the class to be mapped on the local side.
+
+ :param referred_cls: the class to be mapped on the referring side.
+
+ :param constraint: the :class:`.ForeignKeyConstraint` that is being
+ inspected to produce this relationship.
+
+ """
+ return referred_cls.__name__.lower() + "_collection"
+
+def generate_relationship(base, direction, return_fn, attrname, local_cls, referred_cls, **kw):
+ """Generate a :func:`.relationship` or :func:`.backref` on behalf of two
+ mapped classes.
+
+ An alternate implementation of this function can be specified using the
+ :paramref:`.AutomapBase.prepare.generate_relationship` parameter.
+
+ The default implementation of this function is as follows::
+
+ if return_fn is backref:
+ return return_fn(attrname, **kw)
+ elif return_fn is relationship:
+ return return_fn(referred_cls, **kw)
+ else:
+ raise TypeError("Unknown relationship function: %s" % return_fn)
+
+ :param base: the :class:`.AutomapBase` class doing the prepare.
+
+ :param direction: indicate the "direction" of the relationship; this will
+ be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`.
+
+ :param return_fn: the function that is used by default to create the
+ relationship. This will be either :func:`.relationship` or :func:`.backref`.
+ The :func:`.backref` function's result will be used to produce a new
+ :func:`.relationship` in a second step, so it is critical that user-defined
+ implementations correctly differentiate between the two functions, if
+ a custom relationship function is being used.
+
+ :attrname: the attribute name to which this relationship is being assigned.
+ If the value of :paramref:`.generate_relationship.return_fn` is the
+ :func:`.backref` function, then this name is the name that is being
+ assigned to the backref.
+
+ :param local_cls: the "local" class to which this relationship or backref
+ will be locally present.
+
+ :param referred_cls: the "referred" class to which the relationship or backref
+ refers to.
+
+ :param \**kw: all additional keyword arguments are passed along to the
+ function.
+
+ :return: a :func:`.relationship` or :func:`.backref` construct, as dictated
+ by the :paramref:`.generate_relationship.return_fn` parameter.
+
+ """
+ if return_fn is backref:
+ return return_fn(attrname, **kw)
+ elif return_fn is relationship:
+ return return_fn(referred_cls, **kw)
+ else:
+ raise TypeError("Unknown relationship function: %s" % return_fn)
+
+class AutomapBase(object):
+ """Base class for an "automap" schema.
+
+ The :class:`.AutomapBase` class can be compared to the "declarative base"
+ class that is produced by the :func:`.declarative.declarative_base`
+ function. In practice, the :class:`.AutomapBase` class is always used
+ as a mixin along with an actual declarative base.
+
+ A new subclassable :class:`.AutomapBase` is typically instantated
+ using the :func:`.automap_base` function.
+
+ .. seealso::
+
+ :ref:`automap_toplevel`
+
+ """
+ __abstract__ = True
+
+ classes = None
+ """An instance of :class:`.util.Properties` containing classes.
+
+ This object behaves much like the ``.c`` collection on a table. Classes
+ are present under the name they were given, e.g.::
+
+ Base = automap_base()
+ Base.prepare(engine=some_engine, reflect=True)
+
+ User, Address = Base.classes.User, Base.classes.Address
+
+ """
+
+ @classmethod
+ def prepare(cls,
+ engine=None,
+ reflect=False,
+ classname_for_table=classname_for_table,
+ collection_class=list,
+ name_for_scalar_relationship=name_for_scalar_relationship,
+ name_for_collection_relationship=name_for_collection_relationship,
+ generate_relationship=generate_relationship):
+
+ """Extract mapped classes and relationships from the :class:`.MetaData` and
+ perform mappings.
+
+ :param engine: an :class:`.Engine` or :class:`.Connection` with which
+ to perform schema reflection, if specified.
+ If the :paramref:`.AutomapBase.prepare.reflect` argument is False, this
+ object is not used.
+
+ :param reflect: if True, the :meth:`.MetaData.reflect` method is called
+ on the :class:`.MetaData` associated with this :class:`.AutomapBase`.
+ The :class:`.Engine` passed via :paramref:`.AutomapBase.prepare.engine` will
+ be used to perform the reflection if present; else, the :class:`.MetaData`
+ should already be bound to some engine else the operation will fail.
+
+ :param classname_for_table: callable function which will be used to
+ produce new class names, given a table name. Defaults to
+ :func:`.classname_for_table`.
+
+ :param name_for_scalar_relationship: callable function which will be used
+ to produce relationship names for scalar relationships. Defaults to
+ :func:`.name_for_scalar_relationship`.
+
+ :param name_for_collection_relationship: callable function which will be used
+ to produce relationship names for collection-oriented relationships. Defaults to
+ :func:`.name_for_collection_relationship`.
+
+ :param generate_relationship: callable function which will be used to
+ actually generate :func:`.relationship` and :func:`.backref` constructs.
+ Defaults to :func:`.generate_relationship`.
+
+ :param collection_class: the Python collection class that will be used
+ when a new :func:`.relationship` object is created that represents a
+ collection. Defaults to ``list``.
+
+ """
+ if reflect:
+ cls.metadata.reflect(
+ engine,
+ extend_existing=True,
+ autoload_replace=False
+ )
+
+ table_to_map_config = dict(
+ (m.local_table, m)
+ for m in _DeferredMapperConfig.classes_for_base(cls)
+ )
+
+ many_to_many = []
+
+ for table in cls.metadata.tables.values():
+ lcl_m2m, rem_m2m, m2m_const = _is_many_to_many(cls, table)
+ if lcl_m2m is not None:
+ many_to_many.append((lcl_m2m, rem_m2m, m2m_const, table))
+ elif not table.primary_key:
+ continue
+ elif table not in table_to_map_config:
+ mapped_cls = type(
+ classname_for_table(cls, table.name, table),
+ (cls, ),
+ {"__table__": table}
+ )
+ map_config = _DeferredMapperConfig.config_for_cls(mapped_cls)
+ cls.classes[map_config.cls.__name__] = mapped_cls
+ table_to_map_config[table] = map_config
+
+ for map_config in table_to_map_config.values():
+ _relationships_for_fks(cls,
+ map_config,
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship)
+
+ for lcl_m2m, rem_m2m, m2m_const, table in many_to_many:
+ _m2m_relationship(cls, lcl_m2m, rem_m2m, m2m_const, table,
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship)
+ for map_config in table_to_map_config.values():
+ map_config.map()
+
+
+ _sa_decl_prepare = True
+ """Indicate that the mapping of classes should be deferred.
+
+ The presence of this attribute name indicates to declarative
+ that the call to mapper() should not occur immediately; instead,
+ information about the table and attributes to be mapped are gathered
+ into an internal structure called _DeferredMapperConfig. These
+ objects can be collected later using classes_for_base(), additional
+ mapping decisions can be made, and then the map() method will actually
+ apply the mapping.
+
+ The only real reason this deferral of the whole
+ thing is needed is to support primary key columns that aren't reflected
+ yet when the class is declared; everything else can theoretically be
+ added to the mapper later. However, the _DeferredMapperConfig is a
+ nice interface in any case which exists at that not usually exposed point
+ at which declarative has the class and the Table but hasn't called
+ mapper() yet.
+
+ """
+
+def automap_base(declarative_base=None, **kw):
+ """Produce a declarative automap base.
+
+ This function produces a new base class that is a product of the
+ :class:`.AutomapBase` class as well a declarative base produced by
+ :func:`.declarative.declarative_base`.
+
+ All parameters other than ``declarative_base`` are keyword arguments
+ that are passed directly to the :func:`.declarative.declarative_base`
+ function.
+
+ :param declarative_base: an existing class produced by
+ :func:`.declarative.declarative_base`. When this is passed, the function
+ no longer invokes :func:`.declarative.declarative_base` itself, and all other
+ keyword arguments are ignored.
+
+ :param \**kw: keyword arguments are passed along to
+ :func:`.declarative.declarative_base`.
+
+ """
+ if declarative_base is None:
+ Base = _declarative_base(**kw)
+ else:
+ Base = declarative_base
+
+ return type(
+ Base.__name__,
+ (AutomapBase, Base,),
+ {"__abstract__": True, "classes": util.Properties({})}
+ )
+
+def _is_many_to_many(automap_base, table):
+ fk_constraints = [const for const in table.constraints
+ if isinstance(const, ForeignKeyConstraint)]
+ if len(fk_constraints) != 2:
+ return None, None, None
+
+ cols = sum(
+ [[fk.parent for fk in fk_constraint.elements]
+ for fk_constraint in fk_constraints], [])
+
+ if set(cols) != set(table.c):
+ return None, None, None
+
+ return (
+ fk_constraints[0].elements[0].column.table,
+ fk_constraints[1].elements[0].column.table,
+ fk_constraints
+ )
+
+def _relationships_for_fks(automap_base, map_config, table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship):
+ local_table = map_config.local_table
+ local_cls = map_config.cls
+
+ for constraint in local_table.constraints:
+ if isinstance(constraint, ForeignKeyConstraint):
+ fks = constraint.elements
+ referred_table = fks[0].column.table
+ referred_cfg = table_to_map_config.get(referred_table, None)
+ if referred_cfg is None:
+ continue
+ referred_cls = referred_cfg.cls
+
+ relationship_name = name_for_scalar_relationship(
+ automap_base,
+ local_cls,
+ referred_cls, constraint)
+ backref_name = name_for_collection_relationship(
+ automap_base,
+ referred_cls,
+ local_cls,
+ constraint
+ )
+
+ create_backref = backref_name not in referred_cfg.properties
+
+ if relationship_name not in map_config.properties:
+ if create_backref:
+ backref_obj = generate_relationship(automap_base,
+ interfaces.ONETOMANY, backref,
+ backref_name, referred_cls, local_cls,
+ collection_class=collection_class)
+ else:
+ backref_obj = None
+ map_config.properties[relationship_name] = \
+ generate_relationship(automap_base,
+ interfaces.MANYTOONE,
+ relationship,
+ relationship_name,
+ local_cls, referred_cls,
+ foreign_keys=[fk.parent for fk in constraint.elements],
+ backref=backref_obj,
+ remote_side=[fk.column for fk in constraint.elements]
+ )
+ if not create_backref:
+ referred_cfg.properties[backref_name].back_populates = relationship_name
+ elif create_backref:
+ referred_cfg.properties[backref_name] = \
+ generate_relationship(automap_base,
+ interfaces.ONETOMANY,
+ relationship,
+ backref_name,
+ referred_cls, local_cls,
+ foreign_keys=[fk.parent for fk in constraint.elements],
+ back_populates=relationship_name,
+ collection_class=collection_class)
+ map_config.properties[relationship_name].back_populates = backref_name
+
+def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table,
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship):
+
+ map_config = table_to_map_config.get(lcl_m2m, None)
+ referred_cfg = table_to_map_config.get(rem_m2m, None)
+ if map_config is None or referred_cfg is None:
+ return
+
+ local_cls = map_config.cls
+ referred_cls = referred_cfg.cls
+
+ relationship_name = name_for_collection_relationship(
+ automap_base,
+ local_cls,
+ referred_cls, m2m_const[0])
+ backref_name = name_for_collection_relationship(
+ automap_base,
+ referred_cls,
+ local_cls,
+ m2m_const[1]
+ )
+
+ create_backref = backref_name not in referred_cfg.properties
+
+ if relationship_name not in map_config.properties:
+ if create_backref:
+ backref_obj = generate_relationship(automap_base,
+ interfaces.MANYTOMANY,
+ backref,
+ backref_name,
+ referred_cls, local_cls,
+ collection_class=collection_class
+ )
+ else:
+ backref_obj = None
+ map_config.properties[relationship_name] = \
+ generate_relationship(automap_base,
+ interfaces.MANYTOMANY,
+ relationship,
+ relationship_name,
+ local_cls, referred_cls,
+ secondary=table,
+ primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
+ secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
+ backref=backref_obj,
+ collection_class=collection_class
+ )
+ if not create_backref:
+ referred_cfg.properties[backref_name].back_populates = relationship_name
+ elif create_backref:
+ referred_cfg.properties[backref_name] = \
+ generate_relationship(automap_base,
+ interfaces.MANYTOMANY,
+ relationship,
+ backref_name,
+ referred_cls, local_cls,
+ secondary=table,
+ primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
+ secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
+ back_populates=relationship_name,
+ collection_class=collection_class)
+ map_config.properties[relationship_name].back_populates = backref_name
diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py
index 703475de7..5dde74e09 100644
--- a/lib/sqlalchemy/ext/compiler.py
+++ b/lib/sqlalchemy/ext/compiler.py
@@ -1,5 +1,5 @@
# ext/compiler.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -238,7 +238,7 @@ A synopsis is as follows:
class timestamp(ColumnElement):
type = TIMESTAMP()
-* :class:`~sqlalchemy.sql.expression.FunctionElement` - This is a hybrid of a
+* :class:`~sqlalchemy.sql.functions.FunctionElement` - This is a hybrid of a
``ColumnElement`` and a "from clause" like object, and represents a SQL
function or stored procedure type of call. Since most databases support
statements along the line of "SELECT FROM <some function>"
diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py
index f8c685da0..0ee4e33fd 100644
--- a/lib/sqlalchemy/ext/declarative/__init__.py
+++ b/lib/sqlalchemy/ext/declarative/__init__.py
@@ -1,5 +1,5 @@
# ext/declarative/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -897,11 +897,57 @@ reference a common target class via many-to-one::
__tablename__ = 'target'
id = Column(Integer, primary_key=True)
+Using Advanced Relationship Arguments (e.g. ``primaryjoin``, etc.)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
:func:`~sqlalchemy.orm.relationship` definitions which require explicit
-primaryjoin, order_by etc. expressions should use the string forms
-for these arguments, so that they are evaluated as late as possible.
-To reference the mixin class in these expressions, use the given ``cls``
-to get its name::
+primaryjoin, order_by etc. expressions should in all but the most
+simplistic cases use **late bound** forms
+for these arguments, meaning, using either the string form or a lambda.
+The reason for this is that the related :class:`.Column` objects which are to
+be configured using ``@declared_attr`` are not available to another
+``@declared_attr`` attribute; while the methods will work and return new
+:class:`.Column` objects, those are not the :class:`.Column` objects that
+Declarative will be using as it calls the methods on its own, thus using
+*different* :class:`.Column` objects.
+
+The canonical example is the primaryjoin condition that depends upon
+another mixed-in column::
+
+ class RefTargetMixin(object):
+ @declared_attr
+ def target_id(cls):
+ return Column('target_id', ForeignKey('target.id'))
+
+ @declared_attr
+ def target(cls):
+ return relationship(Target,
+ primaryjoin=Target.id==cls.target_id # this is *incorrect*
+ )
+
+Mapping a class using the above mixin, we will get an error like::
+
+ sqlalchemy.exc.InvalidRequestError: this ForeignKey's parent column is not
+ yet associated with a Table.
+
+This is because the ``target_id`` :class:`.Column` we've called upon in our ``target()``
+method is not the same :class:`.Column` that declarative is actually going to map
+to our table.
+
+The condition above is resolved using a lambda::
+
+ class RefTargetMixin(object):
+ @declared_attr
+ def target_id(cls):
+ return Column('target_id', ForeignKey('target.id'))
+
+ @declared_attr
+ def target(cls):
+ return relationship(Target,
+ primaryjoin=lambda: Target.id==cls.target_id
+ )
+
+or alternatively, the string form (which ultmately generates a lambda)::
class RefTargetMixin(object):
@declared_attr
@@ -1238,7 +1284,7 @@ Sessions
Note that ``declarative`` does nothing special with sessions, and is
only intended as an easier way to configure mappers and
:class:`~sqlalchemy.schema.Table` objects. A typical application
-setup using :class:`~sqlalchemy.orm.scoped_session` might look like::
+setup using :class:`~sqlalchemy.orm.scoping.scoped_session` might look like::
engine = create_engine('postgresql://scott:tiger@localhost/test')
Session = scoped_session(sessionmaker(autocommit=False,
@@ -1254,7 +1300,7 @@ Mapped instances then make usage of
from .api import declarative_base, synonym_for, comparable_using, \
instrument_declarative, ConcreteBase, AbstractConcreteBase, \
DeclarativeMeta, DeferredReflection, has_inherited_table,\
- declared_attr
+ declared_attr, as_declarative
__all__ = ['declarative_base', 'synonym_for', 'has_inherited_table',
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index 2f222f682..2418c6e50 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -1,5 +1,5 @@
# ext/declarative/api.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -9,14 +9,17 @@
from ...schema import Table, MetaData
from ...orm import synonym as _orm_synonym, mapper,\
comparable_property,\
- interfaces
-from ...orm.util import polymorphic_union, _mapper_or_none
+ interfaces, properties
+from ...orm.util import polymorphic_union
+from ...orm.base import _mapper_or_none
+from ...util import compat
from ... import exc
import weakref
from .base import _as_declarative, \
_declarative_constructor,\
- _MapperConfig, _add_attribute
+ _DeferredMapperConfig, _add_attribute
+from .clsregistry import _class_resolver
def instrument_declarative(cls, registry, metadata):
@@ -173,16 +176,16 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
of the class.
:param bind: An optional
- :class:`~sqlalchemy.engine.base.Connectable`, will be assigned
- the ``bind`` attribute on the :class:`~sqlalchemy.MetaData`
+ :class:`~sqlalchemy.engine.Connectable`, will be assigned
+ the ``bind`` attribute on the :class:`~sqlalchemy.schema.MetaData`
instance.
:param metadata:
- An optional :class:`~sqlalchemy.MetaData` instance. All
+ An optional :class:`~sqlalchemy.schema.MetaData` instance. All
:class:`~sqlalchemy.schema.Table` objects implicitly declared by
subclasses of the base will share this MetaData. A MetaData instance
will be created if none is provided. The
- :class:`~sqlalchemy.MetaData` instance will be available via the
+ :class:`~sqlalchemy.schema.MetaData` instance will be available via the
`metadata` attribute of the generated declarative base class.
:param mapper:
@@ -218,6 +221,10 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
compatible callable to use as the meta type of the generated
declarative base class.
+ .. seealso::
+
+ :func:`.as_declarative`
+
"""
lcl_metadata = metadata or MetaData()
if bind:
@@ -237,6 +244,42 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
return metaclass(name, bases, class_dict)
+def as_declarative(**kw):
+ """
+ Class decorator for :func:`.declarative_base`.
+
+ Provides a syntactical shortcut to the ``cls`` argument
+ sent to :func:`.declarative_base`, allowing the base class
+ to be converted in-place to a "declarative" base::
+
+ from sqlalchemy.ext.declarative import as_declarative
+
+ @as_declarative()
+ class Base(object):
+ @declared_attr
+ def __tablename__(cls):
+ return cls.__name__.lower()
+ id = Column(Integer, primary_key=True)
+
+ class MyMappedClass(Base):
+ # ...
+
+ All keyword arguments passed to :func:`.as_declarative` are passed
+ along to :func:`.declarative_base`.
+
+ .. versionadded:: 0.8.3
+
+ .. seealso::
+
+ :func:`.declarative_base`
+
+ """
+ def decorate(cls):
+ kw['cls'] = cls
+ kw['name'] = cls.__name__
+ return declarative_base(**kw)
+
+ return decorate
class ConcreteBase(object):
"""A helper class for 'concrete' declarative mappings.
@@ -245,7 +288,7 @@ class ConcreteBase(object):
function automatically, against all tables mapped as a subclass
to this class. The function is called via the
``__declare_last__()`` function, which is essentially
- a hook for the :func:`.MapperEvents.after_configured` event.
+ a hook for the :meth:`.after_configured` event.
:class:`.ConcreteBase` produces a mapped
table for the class itself. Compare to :class:`.AbstractConcreteBase`,
@@ -300,7 +343,7 @@ class AbstractConcreteBase(ConcreteBase):
function automatically, against all tables mapped as a subclass
to this class. The function is called via the
``__declare_last__()`` function, which is essentially
- a hook for the :func:`.MapperEvents.after_configured` event.
+ a hook for the :meth:`.after_configured` event.
:class:`.AbstractConcreteBase` does not produce a mapped
table for the class itself. Compare to :class:`.ConcreteBase`,
@@ -380,7 +423,7 @@ class DeferredReflection(object):
Above, ``MyClass`` is not yet mapped. After a series of
classes have been defined in the above fashion, all tables
can be reflected and mappings created using
- :meth:`.DeferredReflection.prepare`::
+ :meth:`.prepare`::
engine = create_engine("someengine://...")
DeferredReflection.prepare(engine)
@@ -424,11 +467,30 @@ class DeferredReflection(object):
def prepare(cls, engine):
"""Reflect all :class:`.Table` objects for all current
:class:`.DeferredReflection` subclasses"""
- to_map = [m for m in _MapperConfig.configs.values()
- if issubclass(m.cls, cls)]
+
+ to_map = _DeferredMapperConfig.classes_for_base(cls)
for thingy in to_map:
cls._sa_decl_prepare(thingy.local_table, engine)
thingy.map()
+ mapper = thingy.cls.__mapper__
+ metadata = mapper.class_.metadata
+ for rel in mapper._props.values():
+ if isinstance(rel, properties.RelationshipProperty) and \
+ rel.secondary is not None:
+ if isinstance(rel.secondary, Table):
+ cls._reflect_table(rel.secondary, engine)
+ elif isinstance(rel.secondary, _class_resolver):
+ rel.secondary._resolvers += (
+ cls._sa_deferred_table_resolver(engine, metadata),
+ )
+
+ @classmethod
+ def _sa_deferred_table_resolver(cls, engine, metadata):
+ def _resolve(key):
+ t1 = Table(key, metadata)
+ cls._reflect_table(t1, engine)
+ return t1
+ return _resolve
@classmethod
def _sa_decl_prepare(cls, local_table, engine):
@@ -437,10 +499,14 @@ class DeferredReflection(object):
# will fill in db-loaded columns
# into the existing Table object.
if local_table is not None:
- Table(local_table.name,
- local_table.metadata,
- extend_existing=True,
- autoload_replace=False,
- autoload=True,
- autoload_with=engine,
- schema=local_table.schema)
+ cls._reflect_table(local_table, engine)
+
+ @classmethod
+ def _reflect_table(cls, table, engine):
+ Table(table.name,
+ table.metadata,
+ extend_existing=True,
+ autoload_replace=False,
+ autoload=True,
+ autoload_with=engine,
+ schema=table.schema)
diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py
index 5a2b88db4..a764f126b 100644
--- a/lib/sqlalchemy/ext/declarative/base.py
+++ b/lib/sqlalchemy/ext/declarative/base.py
@@ -1,25 +1,27 @@
# ext/declarative/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Internal implementation for declarative."""
from ...schema import Table, Column
-from ...orm import mapper, class_mapper
+from ...orm import mapper, class_mapper, synonym
from ...orm.interfaces import MapperProperty
from ...orm.properties import ColumnProperty, CompositeProperty
-from ...orm.util import _is_mapped_class
+from ...orm.attributes import QueryableAttribute
+from ...orm.base import _is_mapped_class
from ... import util, exc
from ...sql import expression
from ... import event
from . import clsregistry
-
+import collections
+import weakref
def _declared_mapping_info(cls):
# deferred mapping
- if cls in _MapperConfig.configs:
- return _MapperConfig.configs[cls]
+ if _DeferredMapperConfig.has_cls(cls):
+ return _DeferredMapperConfig.config_for_cls(cls)
# regular mapping
elif _is_mapped_class(cls):
return class_mapper(cls, configure=False)
@@ -148,6 +150,15 @@ def _as_declarative(cls, classname, dict_):
if isinstance(value, declarative_props):
value = getattr(cls, k)
+ elif isinstance(value, QueryableAttribute) and \
+ value.class_ is not cls and \
+ value.key != k:
+ # detect a QueryableAttribute that's already mapped being
+ # assigned elsewhere in userland, turn into a synonym()
+ value = synonym(value.key)
+ setattr(cls, k, value)
+
+
if (isinstance(value, tuple) and len(value) == 1 and
isinstance(value[0], (Column, MapperProperty))):
util.warn("Ignoring declarative-like tuple value of attribute "
@@ -173,15 +184,19 @@ def _as_declarative(cls, classname, dict_):
# extract columns from the class dict
declared_columns = set()
+ name_to_prop_key = collections.defaultdict(set)
for key, c in list(our_stuff.items()):
if isinstance(c, (ColumnProperty, CompositeProperty)):
for col in c.columns:
if isinstance(col, Column) and \
col.table is None:
_undefer_column_name(key, col)
+ if not isinstance(c, CompositeProperty):
+ name_to_prop_key[col.name].add(key)
declared_columns.add(col)
elif isinstance(c, Column):
_undefer_column_name(key, c)
+ name_to_prop_key[c.name].add(key)
declared_columns.add(c)
# if the column is the same name as the key,
# remove it from the explicit properties dict.
@@ -190,6 +205,15 @@ def _as_declarative(cls, classname, dict_):
# in multi-column ColumnProperties.
if key == c.key:
del our_stuff[key]
+
+ for name, keys in name_to_prop_key.items():
+ if len(keys) > 1:
+ util.warn(
+ "On class %r, Column object %r named directly multiple times, "
+ "only one will be used: %s" %
+ (classname, name, (", ".join(sorted(keys))))
+ )
+
declared_columns = sorted(
declared_columns, key=lambda c: c._creation_order)
table = None
@@ -281,19 +305,24 @@ def _as_declarative(cls, classname, dict_):
inherited_mapped_table is not inherited_table:
inherited_mapped_table._refresh_for_new_column(c)
- mt = _MapperConfig(mapper_cls,
+ defer_map = hasattr(cls, '_sa_decl_prepare')
+ if defer_map:
+ cfg_cls = _DeferredMapperConfig
+ else:
+ cfg_cls = _MapperConfig
+ mt = cfg_cls(mapper_cls,
cls, table,
inherits,
declared_columns,
column_copies,
our_stuff,
mapper_args_fn)
- if not hasattr(cls, '_sa_decl_prepare'):
+ if not defer_map:
mt.map()
class _MapperConfig(object):
- configs = util.OrderedDict()
+
mapped_table = None
def __init__(self, mapper_cls,
@@ -311,7 +340,7 @@ class _MapperConfig(object):
self.mapper_args_fn = mapper_args_fn
self.declared_columns = declared_columns
self.column_copies = column_copies
- self.configs[cls] = self
+
def _prepare_mapper_arguments(self):
properties = self.properties
@@ -368,7 +397,6 @@ class _MapperConfig(object):
return result_mapper_args
def map(self):
- self.configs.pop(self.cls, None)
mapper_args = self._prepare_mapper_arguments()
self.cls.__mapper__ = self.mapper_cls(
self.cls,
@@ -376,6 +404,42 @@ class _MapperConfig(object):
**mapper_args
)
+class _DeferredMapperConfig(_MapperConfig):
+ _configs = util.OrderedDict()
+
+ @property
+ def cls(self):
+ return self._cls()
+
+ @cls.setter
+ def cls(self, class_):
+ self._cls = weakref.ref(class_, self._remove_config_cls)
+ self._configs[self._cls] = self
+
+ @classmethod
+ def _remove_config_cls(cls, ref):
+ cls._configs.pop(ref, None)
+
+ @classmethod
+ def has_cls(cls, class_):
+ # 2.6 fails on weakref if class_ is an old style class
+ return isinstance(class_, type) and \
+ weakref.ref(class_) in cls._configs
+
+ @classmethod
+ def config_for_cls(cls, class_):
+ return cls._configs[weakref.ref(class_)]
+
+
+ @classmethod
+ def classes_for_base(cls, base_cls):
+ return [m for m in cls._configs.values()
+ if issubclass(m.cls, base_cls)]
+
+ def map(self):
+ self._configs.pop(self._cls, None)
+ super(_DeferredMapperConfig, self).map()
+
def _add_attribute(cls, key, value):
"""add an attribute to an existing declarative class.
@@ -384,6 +448,7 @@ def _add_attribute(cls, key, value):
adds it to the Mapper, adds a column to the mapped Table, etc.
"""
+
if '__mapper__' in cls.__dict__:
if isinstance(value, Column):
_undefer_column_name(key, value)
@@ -400,6 +465,14 @@ def _add_attribute(cls, key, value):
key,
clsregistry._deferred_relationship(cls, value)
)
+ elif isinstance(value, QueryableAttribute) and value.key != key:
+ # detect a QueryableAttribute that's already mapped being
+ # assigned elsewhere in userland, turn into a synonym()
+ value = synonym(value.key)
+ cls.__mapper__.add_property(
+ key,
+ clsregistry._deferred_relationship(cls, value)
+ )
else:
type.__setattr__(cls, key, value)
else:
diff --git a/lib/sqlalchemy/ext/declarative/clsregistry.py b/lib/sqlalchemy/ext/declarative/clsregistry.py
index a669e37f4..fda1cffb5 100644
--- a/lib/sqlalchemy/ext/declarative/clsregistry.py
+++ b/lib/sqlalchemy/ext/declarative/clsregistry.py
@@ -1,5 +1,5 @@
# ext/declarative/clsregistry.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -225,47 +225,62 @@ def _determine_container(key, value):
return _GetColumns(value)
-def _resolver(cls, prop):
- def resolve_arg(arg):
- import sqlalchemy
- from sqlalchemy.orm import foreign, remote
-
- fallback = sqlalchemy.__dict__.copy()
- fallback.update({'foreign': foreign, 'remote': remote})
-
- def access_cls(key):
- if key in cls._decl_class_registry:
- return _determine_container(key, cls._decl_class_registry[key])
- elif key in cls.metadata.tables:
- return cls.metadata.tables[key]
- elif key in cls.metadata._schemas:
- return _GetTable(key, cls.metadata)
- elif '_sa_module_registry' in cls._decl_class_registry and \
- key in cls._decl_class_registry['_sa_module_registry']:
- registry = cls._decl_class_registry['_sa_module_registry']
- return registry.resolve_attr(key)
+class _class_resolver(object):
+ def __init__(self, cls, prop, fallback, arg):
+ self.cls = cls
+ self.prop = prop
+ self.arg = self._declarative_arg = arg
+ self.fallback = fallback
+ self._dict = util.PopulateDict(self._access_cls)
+ self._resolvers = ()
+
+ def _access_cls(self, key):
+ cls = self.cls
+ if key in cls._decl_class_registry:
+ return _determine_container(key, cls._decl_class_registry[key])
+ elif key in cls.metadata.tables:
+ return cls.metadata.tables[key]
+ elif key in cls.metadata._schemas:
+ return _GetTable(key, cls.metadata)
+ elif '_sa_module_registry' in cls._decl_class_registry and \
+ key in cls._decl_class_registry['_sa_module_registry']:
+ registry = cls._decl_class_registry['_sa_module_registry']
+ return registry.resolve_attr(key)
+ elif self._resolvers:
+ for resolv in self._resolvers:
+ value = resolv(key)
+ if value is not None:
+ return value
+
+ return self.fallback[key]
+
+ def __call__(self):
+ try:
+ x = eval(self.arg, globals(), self._dict)
+
+ if isinstance(x, _GetColumns):
+ return x.cls
else:
- return fallback[key]
+ return x
+ except NameError as n:
+ raise exc.InvalidRequestError(
+ "When initializing mapper %s, expression %r failed to "
+ "locate a name (%r). If this is a class name, consider "
+ "adding this relationship() to the %r class after "
+ "both dependent classes have been defined." %
+ (self.prop.parent, self.arg, n.args[0], self.cls)
+ )
- d = util.PopulateDict(access_cls)
- def return_cls():
- try:
- x = eval(arg, globals(), d)
+def _resolver(cls, prop):
+ import sqlalchemy
+ from sqlalchemy.orm import foreign, remote
- if isinstance(x, _GetColumns):
- return x.cls
- else:
- return x
- except NameError as n:
- raise exc.InvalidRequestError(
- "When initializing mapper %s, expression %r failed to "
- "locate a name (%r). If this is a class name, consider "
- "adding this relationship() to the %r class after "
- "both dependent classes have been defined." %
- (prop.parent, arg, n.args[0], cls)
- )
- return return_cls
+ fallback = sqlalchemy.__dict__.copy()
+ fallback.update({'foreign': foreign, 'remote': remote})
+
+ def resolve_arg(arg):
+ return _class_resolver(cls, prop, fallback, arg)
return resolve_arg
@@ -277,7 +292,7 @@ def _deferred_relationship(cls, prop):
for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin',
'secondary', '_user_defined_foreign_keys', 'remote_side'):
v = getattr(prop, attr)
- if isinstance(v, str):
+ if isinstance(v, util.string_types):
setattr(prop, attr, resolve_arg(v))
if prop.backref and isinstance(prop.backref, tuple):
diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py
index 95e264c3b..8b3f968dc 100644
--- a/lib/sqlalchemy/ext/horizontal_shard.py
+++ b/lib/sqlalchemy/ext/horizontal_shard.py
@@ -1,5 +1,5 @@
# ext/horizontal_shard.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py
index 59e5a74cb..576e0bd4e 100644
--- a/lib/sqlalchemy/ext/hybrid.py
+++ b/lib/sqlalchemy/ext/hybrid.py
@@ -1,5 +1,5 @@
# ext/hybrid.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -229,7 +229,7 @@ mapping which relates a ``User`` to a ``SavingsAccount``::
account = Account(owner=self)
else:
account = self.accounts[0]
- account.balance = balance
+ account.balance = value
@balance.expression
def balance(cls):
@@ -269,7 +269,7 @@ Correlated Subquery Relationship Hybrid
We can, of course, forego being dependent on the enclosing query's usage
of joins in favor of the correlated subquery, which can portably be packed
-into a single colunn expression. A correlated subquery is more portable, but
+into a single column expression. A correlated subquery is more portable, but
often performs more poorly at the SQL level. Using the same technique
illustrated at :ref:`mapper_column_property_sql_expressions`,
we can adjust our ``SavingsAccount`` example to aggregate the balances for
diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py
index bb44a492c..2cf36e9bd 100644
--- a/lib/sqlalchemy/ext/instrumentation.py
+++ b/lib/sqlalchemy/ext/instrumentation.py
@@ -22,7 +22,7 @@ see the example :ref:`examples_instrumentation`.
:mod:`sqlalchemy.orm.instrumentation` so that it
takes effect, including recognition of
``__sa_instrumentation_manager__`` on mapped classes, as
- well :attr:`.instrumentation_finders`
+ well :data:`.instrumentation_finders`
being used to determine class instrumentation resolution.
"""
@@ -31,7 +31,7 @@ from ..orm.instrumentation import (
ClassManager, InstrumentationFactory, _default_state_getter,
_default_dict_getter, _default_manager_getter
)
-from ..orm import attributes, collections
+from ..orm import attributes, collections, base as orm_base
from .. import util
from ..orm import exc as orm_exc
import weakref
@@ -399,9 +399,9 @@ def _install_lookups(lookups):
instance_state = lookups['instance_state']
instance_dict = lookups['instance_dict']
manager_of_class = lookups['manager_of_class']
- attributes.instance_state = \
+ orm_base.instance_state = attributes.instance_state = \
orm_instrumentation.instance_state = instance_state
- attributes.instance_dict = \
+ orm_base.instance_dict = attributes.instance_dict = \
orm_instrumentation.instance_dict = instance_dict
- attributes.manager_of_class = \
+ orm_base.manager_of_class = attributes.manager_of_class = \
orm_instrumentation.manager_of_class = manager_of_class
diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py
index d3133b1f5..82410031d 100644
--- a/lib/sqlalchemy/ext/mutable.py
+++ b/lib/sqlalchemy/ext/mutable.py
@@ -1,5 +1,5 @@
# ext/mutable.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,13 +7,9 @@
"""Provide support for tracking of in-place changes to scalar values,
which are propagated into ORM change events on owning parent objects.
-The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy
-approach to in-place mutations of scalar values, established by the
-:class:`.types.MutableType` class as well as the ``mutable=True`` type flag,
-with a system that allows change events to be propagated from the value to
-the owning parent, thereby removing the need for the ORM to maintain copies
-of values as well as the very expensive requirement of scanning through all
-"mutable" values on each flush call, looking for changes.
+.. versionadded:: 0.7 :mod:`sqlalchemy.ext.mutable` replaces SQLAlchemy's
+ legacy approach to in-place mutations of scalar values; see
+ :ref:`07_migration_mutation_extension`.
.. _mutable_scalars:
@@ -182,7 +178,7 @@ callbacks. In our case, this is a good thing, since if this dictionary were
picklable, it could lead to an excessively large pickle size for our value
objects that are pickled by themselves outside of the context of the parent.
The developer responsibility here is only to provide a ``__getstate__`` method
-that excludes the :meth:`~.MutableBase._parents` collection from the pickle
+that excludes the :meth:`~MutableBase._parents` collection from the pickle
stream::
class MyMutableType(Mutable):
@@ -332,7 +328,7 @@ Supporting Pickling
As is the case with :class:`.Mutable`, the :class:`.MutableComposite` helper
class uses a ``weakref.WeakKeyDictionary`` available via the
-:meth:`.MutableBase._parents` attribute which isn't picklable. If we need to
+:meth:`MutableBase._parents` attribute which isn't picklable. If we need to
pickle instances of ``Point`` or its owning class ``Vertex``, we at least need
to define a ``__getstate__`` that doesn't include the ``_parents`` dictionary.
Below we define both a ``__getstate__`` and a ``__setstate__`` that package up
@@ -349,7 +345,7 @@ the minimal form of our ``Point`` class::
As with :class:`.Mutable`, the :class:`.MutableComposite` augments the
pickling process of the parent's object-relational state so that the
-:meth:`.MutableBase._parents` collection is restored to all ``Point`` objects.
+:meth:`MutableBase._parents` collection is restored to all ``Point`` objects.
"""
from ..orm.attributes import flag_modified
@@ -542,7 +538,7 @@ class Mutable(MutableBase):
To associate a particular mutable type with all occurrences of a
particular type, use the :meth:`.Mutable.associate_with` classmethod
- of the particular :meth:`.Mutable` subclass to establish a global
+ of the particular :class:`.Mutable` subclass to establish a global
association.
.. warning::
@@ -595,7 +591,7 @@ def _setup_composite_listener():
issubclass(prop.composite_class, MutableComposite)):
prop.composite_class._listen_on_attribute(
getattr(class_, prop.key), False, class_)
- if not Mapper.dispatch.mapper_configured._contains(Mapper, _listen_for_type):
+ if not event.contains(Mapper, "mapper_configured", _listen_for_type):
event.listen(Mapper, 'mapper_configured', _listen_for_type)
_setup_composite_listener()
diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py
index 24d405e39..9310c6071 100644
--- a/lib/sqlalchemy/ext/orderinglist.py
+++ b/lib/sqlalchemy/ext/orderinglist.py
@@ -1,5 +1,5 @@
# ext/orderinglist.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py
index 8abd1fdf3..388cd4048 100644
--- a/lib/sqlalchemy/ext/serializer.py
+++ b/lib/sqlalchemy/ext/serializer.py
@@ -1,5 +1,5 @@
# ext/serializer.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -58,7 +58,7 @@ from ..orm.interfaces import MapperProperty
from ..orm.attributes import QueryableAttribute
from .. import Table, Column
from ..engine import Engine
-from ..util import pickle, byte_buffer, b64encode, b64decode
+from ..util import pickle, byte_buffer, b64encode, b64decode, text_type
import re
@@ -80,9 +80,9 @@ def Serializer(*args, **kw):
id = "mapperprop:" + b64encode(pickle.dumps(obj.parent.class_)) + \
":" + obj.key
elif isinstance(obj, Table):
- id = "table:" + str(obj)
+ id = "table:" + text_type(obj.key)
elif isinstance(obj, Column) and isinstance(obj.table, Table):
- id = "column:" + str(obj.table) + ":" + obj.key
+ id = "column:" + text_type(obj.table.key) + ":" + text_type(obj.key)
elif isinstance(obj, Session):
id = "session:"
elif isinstance(obj, Engine):
@@ -112,7 +112,7 @@ def Deserializer(file, metadata=None, scoped_session=None, engine=None):
return None
def persistent_load(id):
- m = our_ids.match(str(id))
+ m = our_ids.match(text_type(id))
if not m:
return None
else:
diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py
index 74218fa91..fe9e40555 100644
--- a/lib/sqlalchemy/inspection.py
+++ b/lib/sqlalchemy/inspection.py
@@ -1,5 +1,5 @@
# sqlalchemy/inspect.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -39,11 +39,11 @@ def inspect(subject, raiseerr=True):
The returned value in some cases may be the
same object as the one given, such as if a
- :class:`.orm.Mapper` object is passed. In other
+ :class:`.Mapper` object is passed. In other
cases, it will be an instance of the registered
inspection type for the given object, such as
- if a :class:`.engine.Engine` is passed, an
- :class:`.engine.Inspector` object is returned.
+ if an :class:`.engine.Engine` is passed, an
+ :class:`.Inspector` object is returned.
:param subject: the subject to be inspected.
:param raiseerr: When ``True``, if the given subject
@@ -87,5 +87,6 @@ def _inspects(*types):
return decorate
-def _self_inspects(*types):
- _inspects(*types)(True)
+def _self_inspects(cls):
+ _inspects(cls)(True)
+ return cls
diff --git a/lib/sqlalchemy/interfaces.py b/lib/sqlalchemy/interfaces.py
index 2968176a7..ed50a6456 100644
--- a/lib/sqlalchemy/interfaces.py
+++ b/lib/sqlalchemy/interfaces.py
@@ -1,5 +1,5 @@
# sqlalchemy/interfaces.py
-# Copyright (C) 2007-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2007-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# Copyright (C) 2007 Jason Kirtland jek@discorporate.us
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py
index 1bb5581de..935761d5f 100644
--- a/lib/sqlalchemy/log.py
+++ b/lib/sqlalchemy/log.py
@@ -1,5 +1,5 @@
# sqlalchemy/log.py
-# Copyright (C) 2006-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2006-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk
#
# This module is part of SQLAlchemy and is released under
@@ -38,17 +38,13 @@ def _add_default_handler(logger):
_logged_classes = set()
-def class_logger(cls, enable=False):
+def class_logger(cls):
logger = logging.getLogger(cls.__module__ + "." + cls.__name__)
- if enable == 'debug':
- logger.setLevel(logging.DEBUG)
- elif enable == 'info':
- logger.setLevel(logging.INFO)
cls._should_log_debug = lambda self: logger.isEnabledFor(logging.DEBUG)
cls._should_log_info = lambda self: logger.isEnabledFor(logging.INFO)
cls.logger = logger
_logged_classes.add(cls)
-
+ return cls
class Identified(object):
logging_name = None
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 1173d5d09..7825a70ac 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -1,5 +1,5 @@
# orm/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -24,11 +24,13 @@ from .mapper import (
from .interfaces import (
EXT_CONTINUE,
EXT_STOP,
- MapperExtension,
PropComparator,
+ )
+from .deprecated_interfaces import (
+ MapperExtension,
SessionExtension,
AttributeExtension,
- )
+)
from .util import (
aliased,
join,
@@ -39,14 +41,13 @@ from .util import (
with_parent,
with_polymorphic,
)
-from .properties import (
- ColumnProperty,
+from .properties import ColumnProperty
+from .relationships import RelationshipProperty
+from .descriptor_props import (
ComparableProperty,
CompositeProperty,
- RelationshipProperty,
- PropertyLoader,
SynonymProperty,
- )
+ )
from .relationships import (
foreign,
remote,
@@ -61,75 +62,10 @@ from .scoping import (
scoped_session
)
from . import mapper as mapperlib
-from . import strategies
-from .query import AliasOption, Query
-from ..sql import util as sql_util
-from .. import util as sa_util
-
-from . import interfaces
-
-# here, we can establish InstrumentationManager back
-# in sqlalchemy.orm and sqlalchemy.orm.interfaces, which
-# also re-establishes the extended instrumentation system.
-#from ..ext import instrumentation as _ext_instrumentation
-#InstrumentationManager = \
-# interfaces.InstrumentationManager = \
-# _ext_instrumentation.InstrumentationManager
-
-__all__ = (
- 'EXT_CONTINUE',
- 'EXT_STOP',
- 'MapperExtension',
- 'AttributeExtension',
- 'PropComparator',
- 'Query',
- 'Session',
- 'aliased',
- 'backref',
- 'class_mapper',
- 'clear_mappers',
- 'column_property',
- 'comparable_property',
- 'compile_mappers',
- 'configure_mappers',
- 'composite',
- 'contains_alias',
- 'contains_eager',
- 'create_session',
- 'defer',
- 'deferred',
- 'dynamic_loader',
- 'eagerload',
- 'eagerload_all',
- 'foreign',
- 'immediateload',
- 'join',
- 'joinedload',
- 'joinedload_all',
- 'lazyload',
- 'mapper',
- 'make_transient',
- 'noload',
- 'object_mapper',
- 'object_session',
- 'outerjoin',
- 'polymorphic_union',
- 'reconstructor',
- 'relationship',
- 'relation',
- 'remote',
- 'scoped_session',
- 'sessionmaker',
- 'subqueryload',
- 'subqueryload_all',
- 'synonym',
- 'undefer',
- 'undefer_group',
- 'validates',
- 'was_deleted',
- 'with_polymorphic'
- )
-
+from .query import AliasOption, Query, Bundle
+from ..util.langhelpers import public_factory
+from .. import util as _sa_util
+from . import strategies as _strategies
def create_session(bind=None, **kwargs):
"""Create a new :class:`.Session`
@@ -167,501 +103,7 @@ def create_session(bind=None, **kwargs):
kwargs.setdefault('expire_on_commit', False)
return Session(bind=bind, **kwargs)
-
-def relationship(argument, secondary=None, **kwargs):
- """Provide a relationship of a primary Mapper to a secondary Mapper.
-
- This corresponds to a parent-child or associative table relationship. The
- constructed class is an instance of :class:`.RelationshipProperty`.
-
- A typical :func:`.relationship`, used in a classical mapping::
-
- mapper(Parent, properties={
- 'children': relationship(Child)
- })
-
- Some arguments accepted by :func:`.relationship` optionally accept a
- callable function, which when called produces the desired value.
- The callable is invoked by the parent :class:`.Mapper` at "mapper
- initialization" time, which happens only when mappers are first used, and
- is assumed to be after all mappings have been constructed. This can be
- used to resolve order-of-declaration and other dependency issues, such as
- if ``Child`` is declared below ``Parent`` in the same file::
-
- mapper(Parent, properties={
- "children":relationship(lambda: Child,
- order_by=lambda: Child.id)
- })
-
- When using the :ref:`declarative_toplevel` extension, the Declarative
- initializer allows string arguments to be passed to :func:`.relationship`.
- These string arguments are converted into callables that evaluate
- the string as Python code, using the Declarative
- class-registry as a namespace. This allows the lookup of related
- classes to be automatic via their string name, and removes the need to
- import related classes at all into the local module space::
-
- from sqlalchemy.ext.declarative import declarative_base
-
- Base = declarative_base()
-
- class Parent(Base):
- __tablename__ = 'parent'
- id = Column(Integer, primary_key=True)
- children = relationship("Child", order_by="Child.id")
-
- A full array of examples and reference documentation regarding
- :func:`.relationship` is at :ref:`relationship_config_toplevel`.
-
- :param argument:
- a mapped class, or actual :class:`.Mapper` instance, representing the
- target of the relationship.
-
- ``argument`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param secondary:
- for a many-to-many relationship, specifies the intermediary
- table, and is an instance of :class:`.Table`. The ``secondary`` keyword
- argument should generally only
- be used for a table that is not otherwise expressed in any class
- mapping, unless this relationship is declared as view only, otherwise
- conflicting persistence operations can occur.
-
- ``secondary`` may
- also be passed as a callable function which is evaluated at
- mapper initialization time.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- many-to-one reference should be loaded when replaced, if
- not already loaded. Normally, history tracking logic for
- simple many-to-ones only needs to be aware of the "new"
- value in order to perform a flush. This flag is available
- for applications that make use of
- :func:`.attributes.get_history` which also need to know
- the "previous" value of the attribute.
-
- :param backref:
- indicates the string name of a property to be placed on the related
- mapper's class that will handle this relationship in the other
- direction. The other property will be created automatically
- when the mappers are configured. Can also be passed as a
- :func:`backref` object to control the configuration of the
- new relationship.
-
- :param back_populates:
- Takes a string name and has the same meaning as ``backref``,
- except the complementing property is **not** created automatically,
- and instead must be configured explicitly on the other mapper. The
- complementing property should also indicate ``back_populates``
- to this relationship to ensure proper functioning.
-
- :param cascade:
- a comma-separated list of cascade rules which determines how
- Session operations should be "cascaded" from parent to child.
- This defaults to ``False``, which means the default cascade
- should be used. The default value is ``"save-update, merge"``.
-
- Available cascades are:
-
- * ``save-update`` - cascade the :meth:`.Session.add`
- operation. This cascade applies both to future and
- past calls to :meth:`~sqlalchemy.orm.session.Session.add`,
- meaning new items added to a collection or scalar relationship
- get placed into the same session as that of the parent, and
- also applies to items which have been removed from this
- relationship but are still part of unflushed history.
-
- * ``merge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.merge`
- operation
-
- * ``expunge`` - cascade the :meth:`.Session.expunge`
- operation
-
- * ``delete`` - cascade the :meth:`.Session.delete`
- operation
-
- * ``delete-orphan`` - if an item of the child's type is
- detached from its parent, mark it for deletion.
-
- .. versionchanged:: 0.7
- This option does not prevent
- a new instance of the child object from being persisted
- without a parent to start with; to constrain against
- that case, ensure the child's foreign key column(s)
- is configured as NOT NULL
-
- * ``refresh-expire`` - cascade the :meth:`.Session.expire`
- and :meth:`~sqlalchemy.orm.session.Session.refresh` operations
-
- * ``all`` - shorthand for "save-update,merge, refresh-expire,
- expunge, delete"
-
- See the section :ref:`unitofwork_cascades` for more background
- on configuring cascades.
-
- :param cascade_backrefs=True:
- a boolean value indicating if the ``save-update`` cascade should
- operate along an assignment event intercepted by a backref.
- When set to ``False``,
- the attribute managed by this relationship will not cascade
- an incoming transient object into the session of a
- persistent parent, if the event is received via backref.
-
- That is::
-
- mapper(A, a_table, properties={
- 'bs':relationship(B, backref="a", cascade_backrefs=False)
- })
-
- If an ``A()`` is present in the session, assigning it to
- the "a" attribute on a transient ``B()`` will not place
- the ``B()`` into the session. To set the flag in the other
- direction, i.e. so that ``A().bs.append(B())`` won't add
- a transient ``A()`` into the session for a persistent ``B()``::
-
- mapper(A, a_table, properties={
- 'bs':relationship(B,
- backref=backref("a", cascade_backrefs=False)
- )
- })
-
- See the section :ref:`unitofwork_cascades` for more background
- on configuring cascades.
-
- :param collection_class:
- a class or callable that returns a new list-holding object. will
- be used in place of a plain list for storing elements.
- Behavior of this attribute is described in detail at
- :ref:`custom_collections`.
-
- :param comparator_factory:
- a class which extends :class:`.RelationshipProperty.Comparator` which
- provides custom SQL clause generation for comparison operations.
-
- :param doc:
- docstring which will be applied to the resulting descriptor.
-
- :param extension:
- an :class:`.AttributeExtension` instance, or list of extensions,
- which will be prepended to the list of attribute listeners for
- the resulting descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- :param foreign_keys:
- a list of columns which are to be used as "foreign key" columns,
- or columns which refer to the value in a remote column, within the
- context of this :func:`.relationship` object's ``primaryjoin``
- condition. That is, if the ``primaryjoin`` condition of this
- :func:`.relationship` is ``a.id == b.a_id``, and the values in ``b.a_id``
- are required to be present in ``a.id``, then the "foreign key" column
- of this :func:`.relationship` is ``b.a_id``.
-
- In normal cases, the ``foreign_keys`` parameter is **not required.**
- :func:`.relationship` will **automatically** determine which columns
- in the ``primaryjoin`` conditition are to be considered "foreign key"
- columns based on those :class:`.Column` objects that specify
- :class:`.ForeignKey`, or are otherwise listed as referencing columns
- in a :class:`.ForeignKeyConstraint` construct. ``foreign_keys`` is only
- needed when:
-
- 1. There is more than one way to construct a join from the local
- table to the remote table, as there are multiple foreign key
- references present. Setting ``foreign_keys`` will limit the
- :func:`.relationship` to consider just those columns specified
- here as "foreign".
-
- .. versionchanged:: 0.8
- A multiple-foreign key join ambiguity can be resolved by
- setting the ``foreign_keys`` parameter alone, without the
- need to explicitly set ``primaryjoin`` as well.
-
- 2. The :class:`.Table` being mapped does not actually have
- :class:`.ForeignKey` or :class:`.ForeignKeyConstraint`
- constructs present, often because the table
- was reflected from a database that does not support foreign key
- reflection (MySQL MyISAM).
-
- 3. The ``primaryjoin`` argument is used to construct a non-standard
- join condition, which makes use of columns or expressions that do
- not normally refer to their "parent" column, such as a join condition
- expressed by a complex comparison using a SQL function.
-
- The :func:`.relationship` construct will raise informative error messages
- that suggest the use of the ``foreign_keys`` parameter when presented
- with an ambiguous condition. In typical cases, if :func:`.relationship`
- doesn't raise any exceptions, the ``foreign_keys`` parameter is usually
- not needed.
-
- ``foreign_keys`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- .. seealso::
-
- :ref:`relationship_foreign_keys`
-
- :ref:`relationship_custom_foreign`
-
- :func:`.foreign` - allows direct annotation of the "foreign" columns
- within a ``primaryjoin`` condition.
-
- .. versionadded:: 0.8
- The :func:`.foreign` annotation can also be applied
- directly to the ``primaryjoin`` expression, which is an alternate,
- more specific system of describing which columns in a particular
- ``primaryjoin`` should be considered "foreign".
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.MapperProperty.info` attribute of this object.
-
- .. versionadded:: 0.8
-
- :param innerjoin=False:
- when ``True``, joined eager loads will use an inner join to join
- against related tables instead of an outer join. The purpose
- of this option is generally one of performance, as inner joins
- generally perform better than outer joins. Another reason can be
- the use of ``with_lockmode``, which does not support outer joins.
-
- This flag can be set to ``True`` when the relationship references an
- object via many-to-one using local foreign keys that are not nullable,
- or when the reference is one-to-one or a collection that is guaranteed
- to have one or at least one entry.
-
- :param join_depth:
- when non-``None``, an integer value indicating how many levels
- deep "eager" loaders should join on a self-referring or cyclical
- relationship. The number counts how many times the same Mapper
- shall be present in the loading condition along a particular join
- branch. When left at its default of ``None``, eager loaders
- will stop chaining when they encounter a the same target mapper
- which is already higher up in the chain. This option applies
- both to joined- and subquery- eager loaders.
-
- :param lazy='select': specifies
- how the related items should be loaded. Default value is
- ``select``. Values include:
-
- * ``select`` - items should be loaded lazily when the property is first
- accessed, using a separate SELECT statement, or identity map
- fetch for simple many-to-one references.
-
- * ``immediate`` - items should be loaded as the parents are loaded,
- using a separate SELECT statement, or identity map fetch for
- simple many-to-one references.
-
- .. versionadded:: 0.6.5
-
- * ``joined`` - items should be loaded "eagerly" in the same query as
- that of the parent, using a JOIN or LEFT OUTER JOIN. Whether
- the join is "outer" or not is determined by the ``innerjoin``
- parameter.
-
- * ``subquery`` - items should be loaded "eagerly" as the parents are
- loaded, using one additional SQL statement, which issues a JOIN to a
- subquery of the original statement, for each collection requested.
-
- * ``noload`` - no loading should occur at any time. This is to
- support "write-only" attributes, or attributes which are
- populated in some manner specific to the application.
-
- * ``dynamic`` - the attribute will return a pre-configured
- :class:`~sqlalchemy.orm.query.Query` object for all read
- operations, onto which further filtering operations can be
- applied before iterating the results. See
- the section :ref:`dynamic_relationship` for more details.
-
- * True - a synonym for 'select'
-
- * False - a synonym for 'joined'
-
- * None - a synonym for 'noload'
-
- Detailed discussion of loader strategies is at :doc:`/orm/loading`.
-
- :param load_on_pending=False:
- Indicates loading behavior for transient or pending parent objects.
-
- .. versionchanged:: 0.8
- load_on_pending is superseded by
- :meth:`.Session.enable_relationship_loading`.
-
- When set to ``True``, causes the lazy-loader to
- issue a query for a parent object that is not persistent, meaning it has
- never been flushed. This may take effect for a pending object when
- autoflush is disabled, or for a transient object that has been
- "attached" to a :class:`.Session` but is not part of its pending
- collection.
-
- The load_on_pending flag does not improve behavior
- when the ORM is used normally - object references should be constructed
- at the object level, not at the foreign key level, so that they
- are present in an ordinary way before flush() proceeds. This flag
- is not not intended for general use.
-
- .. versionadded:: 0.6.5
-
- :param order_by:
- indicates the ordering that should be applied when loading these
- items. ``order_by`` is expected to refer to one of the :class:`.Column`
- objects to which the target class is mapped, or
- the attribute itself bound to the target class which refers
- to the column.
-
- ``order_by`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param passive_deletes=False:
- Indicates loading behavior during delete operations.
-
- A value of True indicates that unloaded child items should not
- be loaded during a delete operation on the parent. Normally,
- when a parent item is deleted, all child items are loaded so
- that they can either be marked as deleted, or have their
- foreign key to the parent set to NULL. Marking this flag as
- True usually implies an ON DELETE <CASCADE|SET NULL> rule is in
- place which will handle updating/deleting child rows on the
- database side.
-
- Additionally, setting the flag to the string value 'all' will
- disable the "nulling out" of the child foreign keys, when there
- is no delete or delete-orphan cascade enabled. This is
- typically used when a triggering or error raise scenario is in
- place on the database side. Note that the foreign key
- attributes on in-session child objects will not be changed
- after a flush occurs so this is a very special use-case
- setting.
-
- :param passive_updates=True:
- Indicates loading and INSERT/UPDATE/DELETE behavior when the
- source of a foreign key value changes (i.e. an "on update"
- cascade), which are typically the primary key columns of the
- source row.
-
- When True, it is assumed that ON UPDATE CASCADE is configured on
- the foreign key in the database, and that the database will
- handle propagation of an UPDATE from a source column to
- dependent rows. Note that with databases which enforce
- referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables),
- ON UPDATE CASCADE is required for this operation. The
- relationship() will update the value of the attribute on related
- items which are locally present in the session during a flush.
-
- When False, it is assumed that the database does not enforce
- referential integrity and will not be issuing its own CASCADE
- operation for an update. The relationship() will issue the
- appropriate UPDATE statements to the database in response to the
- change of a referenced key, and items locally present in the
- session during a flush will also be refreshed.
-
- This flag should probably be set to False if primary key changes
- are expected and the database in use doesn't support CASCADE
- (i.e. SQLite, MySQL MyISAM tables).
-
- Also see the passive_updates flag on ``mapper()``.
-
- A future SQLAlchemy release will provide a "detect" feature for
- this flag.
-
- :param post_update:
- this indicates that the relationship should be handled by a
- second UPDATE statement after an INSERT or before a
- DELETE. Currently, it also will issue an UPDATE after the
- instance was UPDATEd as well, although this technically should
- be improved. This flag is used to handle saving bi-directional
- dependencies between two individual rows (i.e. each row
- references the other), where it would otherwise be impossible to
- INSERT or DELETE both rows fully since one row exists before the
- other. Use this flag when a particular mapping arrangement will
- incur two rows that are dependent on each other, such as a table
- that has a one-to-many relationship to a set of child rows, and
- also has a column that references a single child row within that
- list (i.e. both tables contain a foreign key to each other). If
- a ``flush()`` operation returns an error that a "cyclical
- dependency" was detected, this is a cue that you might want to
- use ``post_update`` to "break" the cycle.
-
- :param primaryjoin:
- a SQL expression that will be used as the primary
- join of this child object against the parent object, or in a
- many-to-many relationship the join of the primary object to the
- association table. By default, this value is computed based on the
- foreign key relationships of the parent and child tables (or association
- table).
-
- ``primaryjoin`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param remote_side:
- used for self-referential relationships, indicates the column or
- list of columns that form the "remote side" of the relationship.
-
- ``remote_side`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- .. versionchanged:: 0.8
- The :func:`.remote` annotation can also be applied
- directly to the ``primaryjoin`` expression, which is an alternate,
- more specific system of describing which columns in a particular
- ``primaryjoin`` should be considered "remote".
-
- :param query_class:
- a :class:`.Query` subclass that will be used as the base of the
- "appender query" returned by a "dynamic" relationship, that
- is, a relationship that specifies ``lazy="dynamic"`` or was
- otherwise constructed using the :func:`.orm.dynamic_loader`
- function.
-
- :param secondaryjoin:
- a SQL expression that will be used as the join of
- an association table to the child object. By default, this value is
- computed based on the foreign key relationships of the association and
- child tables.
-
- ``secondaryjoin`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param single_parent=(True|False):
- when True, installs a validator which will prevent objects
- from being associated with more than one parent at a time.
- This is used for many-to-one or many-to-many relationships that
- should be treated either as one-to-one or one-to-many. Its
- usage is optional unless delete-orphan cascade is also
- set on this relationship(), in which case its required.
-
- :param uselist=(True|False):
- a boolean that indicates if this property should be loaded as a
- list or a scalar. In most cases, this value is determined
- automatically by ``relationship()``, based on the type and direction
- of the relationship - one to many forms a list, many to one
- forms a scalar, many to many is a list. If a scalar is desired
- where normally a list would be present, such as a bi-directional
- one-to-one relationship, set uselist to False.
-
- :param viewonly=False:
- when set to True, the relationship is used only for loading objects
- within the relationship, and has no effect on the unit-of-work
- flush process. Relationships with viewonly can specify any kind of
- join conditions to provide additional views of related objects
- onto a parent object. Note that the functionality of a viewonly
- relationship has its limits - complicated join conditions may
- not compile into eager or lazy loaders properly. If this is the
- case, use an alternative method.
-
- .. versionchanged:: 0.6
- :func:`relationship` was renamed from its previous name
- :func:`relation`.
-
- """
- return RelationshipProperty(argument, secondary=secondary, **kwargs)
-
+relationship = public_factory(RelationshipProperty, ".orm.relationship")
def relation(*arg, **kw):
"""A synonym for :func:`relationship`."""
@@ -689,138 +131,8 @@ def dynamic_loader(argument, **kw):
return relationship(argument, **kw)
-def column_property(*cols, **kw):
- """Provide a column-level property for use with a Mapper.
-
- Column-based properties can normally be applied to the mapper's
- ``properties`` dictionary using the :class:`.Column` element directly.
- Use this function when the given column is not directly present within the
- mapper's selectable; examples include SQL expressions, functions, and
- scalar SELECT queries.
-
- Columns that aren't present in the mapper's selectable won't be persisted
- by the mapper and are effectively "read-only" attributes.
-
- :param \*cols:
- list of Column objects to be mapped.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- scalar attribute should be loaded when replaced, if not
- already loaded. Normally, history tracking logic for
- simple non-primary-key scalar values only needs to be
- aware of the "new" value in order to perform a flush. This
- flag is available for applications that make use of
- :func:`.attributes.get_history` or :meth:`.Session.is_modified`
- which also need to know
- the "previous" value of the attribute.
-
- .. versionadded:: 0.6.6
-
- :param comparator_factory: a class which extends
- :class:`.ColumnProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
-
- :param group:
- a group name for this property when marked as deferred.
-
- :param deferred:
- when True, the column property is "deferred", meaning that
- it does not load immediately, and is instead loaded when the
- attribute is first accessed on an instance. See also
- :func:`~sqlalchemy.orm.deferred`.
-
- :param doc:
- optional string that will be applied as the doc on the
- class-bound descriptor.
-
- :param expire_on_flush=True:
- Disable expiry on flush. A column_property() which refers
- to a SQL expression (and not a single table-bound column)
- is considered to be a "read only" property; populating it
- has no effect on the state of data, and it can only return
- database state. For this reason a column_property()'s value
- is expired whenever the parent object is involved in a
- flush, that is, has any kind of "dirty" state within a flush.
- Setting this parameter to ``False`` will have the effect of
- leaving any existing value present after the flush proceeds.
- Note however that the :class:`.Session` with default expiration
- settings still expires
- all attributes after a :meth:`.Session.commit` call, however.
-
- .. versionadded:: 0.7.3
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.MapperProperty.info` attribute of this object.
-
- .. versionadded:: 0.8
-
- :param extension:
- an
- :class:`.AttributeExtension`
- instance, or list of extensions, which will be prepended
- to the list of attribute listeners for the resulting
- descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- """
-
- return ColumnProperty(*cols, **kw)
-
-
-def composite(class_, *cols, **kwargs):
- """Return a composite column-based property for use with a Mapper.
-
- See the mapping documentation section :ref:`mapper_composite` for a full
- usage example.
-
- The :class:`.MapperProperty` returned by :func:`.composite`
- is the :class:`.CompositeProperty`.
-
- :param class\_:
- The "composite type" class.
-
- :param \*cols:
- List of Column objects to be mapped.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- scalar attribute should be loaded when replaced, if not
- already loaded. See the same flag on :func:`.column_property`.
-
- .. versionchanged:: 0.7
- This flag specifically becomes meaningful
- - previously it was a placeholder.
-
- :param group:
- A group name for this property when marked as deferred.
-
- :param deferred:
- When True, the column property is "deferred", meaning that it does not
- load immediately, and is instead loaded when the attribute is first
- accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`.
-
- :param comparator_factory: a class which extends
- :class:`.CompositeProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
-
- :param doc:
- optional string that will be applied as the doc on the
- class-bound descriptor.
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.MapperProperty.info` attribute of this object.
-
- .. versionadded:: 0.8
-
- :param extension:
- an :class:`.AttributeExtension` instance,
- or list of extensions, which will be prepended to the list of
- attribute listeners for the resulting descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- """
- return CompositeProperty(class_, *cols, **kwargs)
+column_property = public_factory(ColumnProperty, ".orm.column_property")
+composite = public_factory(CompositeProperty, ".orm.composite")
def backref(name, **kwargs):
@@ -836,488 +148,33 @@ def backref(name, **kwargs):
return (name, kwargs)
-def deferred(*columns, **kwargs):
- """Return a :class:`.DeferredColumnProperty`, which indicates this
- object attributes should only be loaded from its corresponding
- table column when first accessed.
-
- Used with the "properties" dictionary sent to :func:`mapper`.
-
- See also:
-
- :ref:`deferred`
-
- """
- return ColumnProperty(deferred=True, *columns, **kwargs)
-
-
-def mapper(class_, local_table=None, *args, **params):
- """Return a new :class:`~.Mapper` object.
-
- This function is typically used behind the scenes
- via the Declarative extension. When using Declarative,
- many of the usual :func:`.mapper` arguments are handled
- by the Declarative extension itself, including ``class_``,
- ``local_table``, ``properties``, and ``inherits``.
- Other options are passed to :func:`.mapper` using
- the ``__mapper_args__`` class variable::
+def deferred(*columns, **kw):
+ """Indicate a column-based mapped attribute that by default will
+ not load unless accessed.
- class MyClass(Base):
- __tablename__ = 'my_table'
- id = Column(Integer, primary_key=True)
- type = Column(String(50))
- alt = Column("some_alt", Integer)
+ :param \*columns: columns to be mapped. This is typically a single
+ :class:`.Column` object, however a collection is supported in order
+ to support multiple columns mapped under the same attribute.
- __mapper_args__ = {
- 'polymorphic_on' : type
- }
+ :param \**kw: additional keyword arguments passed to :class:`.ColumnProperty`.
+ .. seealso::
- Explicit use of :func:`.mapper`
- is often referred to as *classical mapping*. The above
- declarative example is equivalent in classical form to::
-
- my_table = Table("my_table", metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(50)),
- Column("some_alt", Integer)
- )
-
- class MyClass(object):
- pass
-
- mapper(MyClass, my_table,
- polymorphic_on=my_table.c.type,
- properties={
- 'alt':my_table.c.some_alt
- })
-
- See also:
-
- :ref:`classical_mapping` - discussion of direct usage of
- :func:`.mapper`
-
- :param class\_: The class to be mapped. When using Declarative,
- this argument is automatically passed as the declared class
- itself.
-
- :param local_table: The :class:`.Table` or other selectable
- to which the class is mapped. May be ``None`` if
- this mapper inherits from another mapper using single-table
- inheritance. When using Declarative, this argument is
- automatically passed by the extension, based on what
- is configured via the ``__table__`` argument or via the
- :class:`.Table` produced as a result of the ``__tablename__``
- and :class:`.Column` arguments present.
-
- :param always_refresh: If True, all query operations for this mapped
- class will overwrite all data within object instances that already
- exist within the session, erasing any in-memory changes with
- whatever information was loaded from the database. Usage of this
- flag is highly discouraged; as an alternative, see the method
- :meth:`.Query.populate_existing`.
-
- :param allow_partial_pks: Defaults to True. Indicates that a
- composite primary key with some NULL values should be considered as
- possibly existing within the database. This affects whether a
- mapper will assign an incoming row to an existing identity, as well
- as if :meth:`.Session.merge` will check the database first for a
- particular primary key value. A "partial primary key" can occur if
- one has mapped to an OUTER JOIN, for example.
-
- :param batch: Defaults to ``True``, indicating that save operations
- of multiple entities can be batched together for efficiency.
- Setting to False indicates
- that an instance will be fully saved before saving the next
- instance. This is used in the extremely rare case that a
- :class:`.MapperEvents` listener requires being called
- in between individual row persistence operations.
-
- :param column_prefix: A string which will be prepended
- to the mapped attribute name when :class:`.Column`
- objects are automatically assigned as attributes to the
- mapped class. Does not affect explicitly specified
- column-based properties.
-
- See the section :ref:`column_prefix` for an example.
-
- :param concrete: If True, indicates this mapper should use concrete
- table inheritance with its parent mapper.
-
- See the section :ref:`concrete_inheritance` for an example.
-
- :param exclude_properties: A list or set of string column names to
- be excluded from mapping.
-
- See :ref:`include_exclude_cols` for an example.
-
- :param extension: A :class:`.MapperExtension` instance or
- list of :class:`.MapperExtension` instances which will be applied
- to all operations by this :class:`.Mapper`. **Deprecated.**
- Please see :class:`.MapperEvents`.
-
- :param include_properties: An inclusive list or set of string column
- names to map.
-
- See :ref:`include_exclude_cols` for an example.
-
- :param inherits: A mapped class or the corresponding :class:`.Mapper`
- of one indicating a superclass to which this :class:`.Mapper`
- should *inherit* from. The mapped class here must be a subclass
- of the other mapper's class. When using Declarative, this argument
- is passed automatically as a result of the natural class
- hierarchy of the declared classes.
-
- See also:
-
- :ref:`inheritance_toplevel`
-
- :param inherit_condition: For joined table inheritance, a SQL
- expression which will
- define how the two tables are joined; defaults to a natural join
- between the two tables.
-
- :param inherit_foreign_keys: When ``inherit_condition`` is used and the
- columns present are missing a :class:`.ForeignKey` configuration,
- this parameter can be used to specify which columns are "foreign".
- In most cases can be left as ``None``.
-
- :param legacy_is_orphan: Boolean, defaults to ``False``.
- When ``True``, specifies that "legacy" orphan consideration
- is to be applied to objects mapped by this mapper, which means
- that a pending (that is, not persistent) object is auto-expunged
- from an owning :class:`.Session` only when it is de-associated
- from *all* parents that specify a ``delete-orphan`` cascade towards
- this mapper. The new default behavior is that the object is auto-expunged
- when it is de-associated with *any* of its parents that specify
- ``delete-orphan`` cascade. This behavior is more consistent with
- that of a persistent object, and allows behavior to be consistent
- in more scenarios independently of whether or not an orphanable
- object has been flushed yet or not.
-
- See the change note and example at :ref:`legacy_is_orphan_addition`
- for more detail on this change.
-
- .. versionadded:: 0.8 - the consideration of a pending object as
- an "orphan" has been modified to more closely match the
- behavior as that of persistent objects, which is that the object
- is expunged from the :class:`.Session` as soon as it is
- de-associated from any of its orphan-enabled parents. Previously,
- the pending object would be expunged only if de-associated
- from all of its orphan-enabled parents. The new flag ``legacy_is_orphan``
- is added to :func:`.orm.mapper` which re-establishes the
- legacy behavior.
-
- :param non_primary: Specify that this :class:`.Mapper` is in addition
- to the "primary" mapper, that is, the one used for persistence.
- The :class:`.Mapper` created here may be used for ad-hoc
- mapping of the class to an alternate selectable, for loading
- only.
-
- The ``non_primary`` feature is rarely needed with modern
- usage.
-
- :param order_by: A single :class:`.Column` or list of :class:`.Column`
- objects for which selection operations should use as the default
- ordering for entities. By default mappers have no pre-defined
- ordering.
-
- :param passive_updates: Indicates UPDATE behavior of foreign key
- columns when a primary key column changes on a joined-table
- inheritance mapping. Defaults to ``True``.
-
- When True, it is assumed that ON UPDATE CASCADE is configured on
- the foreign key in the database, and that the database will handle
- propagation of an UPDATE from a source column to dependent columns
- on joined-table rows.
-
- When False, it is assumed that the database does not enforce
- referential integrity and will not be issuing its own CASCADE
- operation for an update. The :class:`.Mapper` here will
- emit an UPDATE statement for the dependent columns during a
- primary key change.
-
- See also:
-
- :ref:`passive_updates` - description of a similar feature as
- used with :func:`.relationship`
-
- :param polymorphic_on: Specifies the column, attribute, or
- SQL expression used to determine the target class for an
- incoming row, when inheriting classes are present.
-
- This value is commonly a :class:`.Column` object that's
- present in the mapped :class:`.Table`::
-
- class Employee(Base):
- __tablename__ = 'employee'
-
- id = Column(Integer, primary_key=True)
- discriminator = Column(String(50))
-
- __mapper_args__ = {
- "polymorphic_on":discriminator,
- "polymorphic_identity":"employee"
- }
-
- It may also be specified
- as a SQL expression, as in this example where we
- use the :func:`.case` construct to provide a conditional
- approach::
-
- class Employee(Base):
- __tablename__ = 'employee'
-
- id = Column(Integer, primary_key=True)
- discriminator = Column(String(50))
-
- __mapper_args__ = {
- "polymorphic_on":case([
- (discriminator == "EN", "engineer"),
- (discriminator == "MA", "manager"),
- ], else_="employee"),
- "polymorphic_identity":"employee"
- }
-
- It may also refer to any attribute
- configured with :func:`.column_property`, or to the
- string name of one::
-
- class Employee(Base):
- __tablename__ = 'employee'
-
- id = Column(Integer, primary_key=True)
- discriminator = Column(String(50))
- employee_type = column_property(
- case([
- (discriminator == "EN", "engineer"),
- (discriminator == "MA", "manager"),
- ], else_="employee")
- )
-
- __mapper_args__ = {
- "polymorphic_on":employee_type,
- "polymorphic_identity":"employee"
- }
-
- .. versionchanged:: 0.7.4
- ``polymorphic_on`` may be specified as a SQL expression,
- or refer to any attribute configured with
- :func:`.column_property`, or to the string name of one.
-
- When setting ``polymorphic_on`` to reference an
- attribute or expression that's not present in the
- locally mapped :class:`.Table`, yet the value
- of the discriminator should be persisted to the database,
- the value of the
- discriminator is not automatically set on new
- instances; this must be handled by the user,
- either through manual means or via event listeners.
- A typical approach to establishing such a listener
- looks like::
-
- from sqlalchemy import event
- from sqlalchemy.orm import object_mapper
-
- @event.listens_for(Employee, "init", propagate=True)
- def set_identity(instance, *arg, **kw):
- mapper = object_mapper(instance)
- instance.discriminator = mapper.polymorphic_identity
-
- Where above, we assign the value of ``polymorphic_identity``
- for the mapped class to the ``discriminator`` attribute,
- thus persisting the value to the ``discriminator`` column
- in the database.
-
- See also:
-
- :ref:`inheritance_toplevel`
-
- :param polymorphic_identity: Specifies the value which
- identifies this particular class as returned by the
- column expression referred to by the ``polymorphic_on``
- setting. As rows are received, the value corresponding
- to the ``polymorphic_on`` column expression is compared
- to this value, indicating which subclass should
- be used for the newly reconstructed object.
-
- :param properties: A dictionary mapping the string names of object
- attributes to :class:`.MapperProperty` instances, which define the
- persistence behavior of that attribute. Note that :class:`.Column`
- objects present in
- the mapped :class:`.Table` are automatically placed into
- ``ColumnProperty`` instances upon mapping, unless overridden.
- When using Declarative, this argument is passed automatically,
- based on all those :class:`.MapperProperty` instances declared
- in the declared class body.
-
- :param primary_key: A list of :class:`.Column` objects which define the
- primary key to be used against this mapper's selectable unit.
- This is normally simply the primary key of the ``local_table``, but
- can be overridden here.
-
- :param version_id_col: A :class:`.Column`
- that will be used to keep a running version id of mapped entities
- in the database. This is used during save operations to ensure that
- no other thread or process has updated the instance during the
- lifetime of the entity, else a
- :class:`~sqlalchemy.orm.exc.StaleDataError` exception is
- thrown. By default the column must be of :class:`.Integer` type,
- unless ``version_id_generator`` specifies a new generation
- algorithm.
-
- :param version_id_generator: A callable which defines the algorithm
- used to generate new version ids. Defaults to an integer
- generator. Can be replaced with one that generates timestamps,
- uuids, etc. e.g.::
-
- import uuid
-
- class MyClass(Base):
- __tablename__ = 'mytable'
- id = Column(Integer, primary_key=True)
- version_uuid = Column(String(32))
-
- __mapper_args__ = {
- 'version_id_col':version_uuid,
- 'version_id_generator':lambda version:uuid.uuid4().hex
- }
-
- The callable receives the current version identifier as its
- single argument.
-
- :param with_polymorphic: A tuple in the form ``(<classes>,
- <selectable>)`` indicating the default style of "polymorphic"
- loading, that is, which tables are queried at once. <classes> is
- any single or list of mappers and/or classes indicating the
- inherited classes that should be loaded at once. The special value
- ``'*'`` may be used to indicate all descending classes should be
- loaded immediately. The second tuple argument <selectable>
- indicates a selectable that will be used to query for multiple
- classes.
-
- See also:
-
- :ref:`concrete_inheritance` - typically uses ``with_polymorphic``
- to specify a UNION statement to select from.
-
- :ref:`with_polymorphic` - usage example of the related
- :meth:`.Query.with_polymorphic` method
+ :ref:`deferred`
"""
- return Mapper(class_, local_table, *args, **params)
-
+ return ColumnProperty(deferred=True, *columns, **kw)
-def synonym(name, map_column=False, descriptor=None,
- comparator_factory=None, doc=None):
- """Denote an attribute name as a synonym to a mapped property.
- .. versionchanged:: 0.7
- :func:`.synonym` is superseded by the :mod:`~sqlalchemy.ext.hybrid`
- extension. See the documentation for hybrids
- at :ref:`hybrids_toplevel`.
+mapper = public_factory(Mapper, ".orm.mapper")
- Used with the ``properties`` dictionary sent to
- :func:`~sqlalchemy.orm.mapper`::
+synonym = public_factory(SynonymProperty, ".orm.synonym")
- class MyClass(object):
- def _get_status(self):
- return self._status
- def _set_status(self, value):
- self._status = value
- status = property(_get_status, _set_status)
-
- mapper(MyClass, sometable, properties={
- "status":synonym("_status", map_column=True)
- })
-
- Above, the ``status`` attribute of MyClass will produce
- expression behavior against the table column named ``status``,
- using the Python attribute ``_status`` on the mapped class
- to represent the underlying value.
-
- :param name: the name of the existing mapped property, which can be
- any other ``MapperProperty`` including column-based properties and
- relationships.
-
- :param map_column: if ``True``, an additional ``ColumnProperty`` is created
- on the mapper automatically, using the synonym's name as the keyname of
- the property, and the keyname of this ``synonym()`` as the name of the
- column to map.
-
- """
- return SynonymProperty(name, map_column=map_column,
- descriptor=descriptor,
- comparator_factory=comparator_factory,
- doc=doc)
+comparable_property = public_factory(ComparableProperty,
+ ".orm.comparable_property")
-def comparable_property(comparator_factory, descriptor=None):
- """Provides a method of applying a :class:`.PropComparator`
- to any Python descriptor attribute.
-
- .. versionchanged:: 0.7
- :func:`.comparable_property` is superseded by
- the :mod:`~sqlalchemy.ext.hybrid` extension. See the example
- at :ref:`hybrid_custom_comparators`.
-
- Allows any Python descriptor to behave like a SQL-enabled
- attribute when used at the class level in queries, allowing
- redefinition of expression operator behavior.
-
- In the example below we redefine :meth:`.PropComparator.operate`
- to wrap both sides of an expression in ``func.lower()`` to produce
- case-insensitive comparison::
-
- from sqlalchemy.orm import comparable_property
- from sqlalchemy.orm.interfaces import PropComparator
- from sqlalchemy.sql import func
- from sqlalchemy import Integer, String, Column
- from sqlalchemy.ext.declarative import declarative_base
-
- class CaseInsensitiveComparator(PropComparator):
- def __clause_element__(self):
- return self.prop
-
- def operate(self, op, other):
- return op(
- func.lower(self.__clause_element__()),
- func.lower(other)
- )
-
- Base = declarative_base()
-
- class SearchWord(Base):
- __tablename__ = 'search_word'
- id = Column(Integer, primary_key=True)
- word = Column(String)
- word_insensitive = comparable_property(lambda prop, mapper:
- CaseInsensitiveComparator(mapper.c.word, mapper)
- )
-
-
- A mapping like the above allows the ``word_insensitive`` attribute
- to render an expression like::
-
- >>> print SearchWord.word_insensitive == "Trucks"
- lower(search_word.word) = lower(:lower_1)
-
- :param comparator_factory:
- A PropComparator subclass or factory that defines operator behavior
- for this property.
-
- :param descriptor:
- Optional when used in a ``properties={}`` declaration. The Python
- descriptor or property to layer comparison behavior on top of.
-
- The like-named descriptor will be automatically retrieved from the
- mapped class if left blank in a ``properties`` declaration.
-
- """
- return ComparableProperty(comparator_factory, descriptor)
-
-
-@sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
+@_sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
"is renamed to :func:`.configure_mappers`")
def compile_mappers():
"""Initialize the inter-mapper relationships of all mappers that have
@@ -1359,107 +216,24 @@ def clear_mappers():
finally:
mapperlib._CONFIGURE_MUTEX.release()
-
-def joinedload(*keys, **kw):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into an joined eager load.
-
- .. versionchanged:: 0.6beta3
- This function is known as :func:`eagerload` in all versions
- of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4
- series. :func:`eagerload` will remain available for the foreseeable
- future in order to enable cross-compatibility.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- examples::
-
- # joined-load the "orders" collection on "User"
- query(User).options(joinedload(User.orders))
-
- # joined-load the "keywords" collection on each "Item",
- # but not the "items" collection on "Order" - those
- # remain lazily loaded.
- query(Order).options(joinedload(Order.items, Item.keywords))
-
- # to joined-load across both, use joinedload_all()
- query(Order).options(joinedload_all(Order.items, Item.keywords))
-
- # set the default strategy to be 'joined'
- query(Order).options(joinedload('*'))
-
- :func:`joinedload` also accepts a keyword argument `innerjoin=True` which
- indicates using an inner join instead of an outer::
-
- query(Order).options(joinedload(Order.user, innerjoin=True))
-
- .. note::
-
- The join created by :func:`joinedload` is anonymously aliased such that
- it **does not affect the query results**. An :meth:`.Query.order_by`
- or :meth:`.Query.filter` call **cannot** reference these aliased
- tables - so-called "user space" joins are constructed using
- :meth:`.Query.join`. The rationale for this is that
- :func:`joinedload` is only applied in order to affect how related
- objects or collections are loaded as an optimizing detail - it can be
- added or removed with no impact on actual results. See the section
- :ref:`zen_of_eager_loading` for a detailed description of how this is
- used, including how to use a single explicit JOIN for
- filtering/ordering and eager loading simultaneously.
-
- See also: :func:`subqueryload`, :func:`lazyload`
-
- """
- innerjoin = kw.pop('innerjoin', None)
- if innerjoin is not None:
- return (
- strategies.EagerLazyOption(keys, lazy='joined'),
- strategies.EagerJoinOption(keys, innerjoin)
- )
- else:
- return strategies.EagerLazyOption(keys, lazy='joined')
-
-
-def joinedload_all(*keys, **kw):
- """Return a ``MapperOption`` that will convert all properties along the
- given dot-separated path or series of mapped attributes
- into an joined eager load.
-
- .. versionchanged:: 0.6beta3
- This function is known as :func:`eagerload_all` in all versions
- of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4
- series. :func:`eagerload_all` will remain available for the
- foreseeable future in order to enable cross-compatibility.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- For example::
-
- query.options(joinedload_all('orders.items.keywords'))...
-
- will set all of ``orders``, ``orders.items``, and
- ``orders.items.keywords`` to load in one joined eager load.
-
- Individual descriptors are accepted as arguments as well::
-
- query.options(joinedload_all(User.orders, Order.items, Item.keywords))
-
- The keyword arguments accept a flag `innerjoin=True|False` which will
- override the value of the `innerjoin` flag specified on the
- relationship().
-
- See also: :func:`subqueryload_all`, :func:`lazyload`
-
- """
- innerjoin = kw.pop('innerjoin', None)
- if innerjoin is not None:
- return (
- strategies.EagerLazyOption(keys, lazy='joined', chained=True),
- strategies.EagerJoinOption(keys, innerjoin, chained=True)
- )
- else:
- return strategies.EagerLazyOption(keys, lazy='joined', chained=True)
-
+from . import strategy_options
+
+joinedload = strategy_options.joinedload._unbound_fn
+joinedload_all = strategy_options.joinedload._unbound_all_fn
+contains_eager = strategy_options.contains_eager._unbound_fn
+defer = strategy_options.defer._unbound_fn
+undefer = strategy_options.undefer._unbound_fn
+undefer_group = strategy_options.undefer_group._unbound_fn
+load_only = strategy_options.load_only._unbound_fn
+lazyload = strategy_options.lazyload._unbound_fn
+lazyload_all = strategy_options.lazyload_all._unbound_all_fn
+subqueryload = strategy_options.subqueryload._unbound_fn
+subqueryload_all = strategy_options.subqueryload_all._unbound_all_fn
+immediateload = strategy_options.immediateload._unbound_fn
+noload = strategy_options.noload._unbound_fn
+defaultload = strategy_options.defaultload._unbound_fn
+
+from .strategy_options import Load
def eagerload(*args, **kwargs):
"""A synonym for :func:`joinedload()`."""
@@ -1471,316 +245,23 @@ def eagerload_all(*args, **kwargs):
return joinedload_all(*args, **kwargs)
-def subqueryload(*keys):
- """Return a ``MapperOption`` that will convert the property
- of the given name or series of mapped attributes
- into an subquery eager load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
- examples::
- # subquery-load the "orders" collection on "User"
- query(User).options(subqueryload(User.orders))
+contains_alias = public_factory(AliasOption, ".orm.contains_alias")
- # subquery-load the "keywords" collection on each "Item",
- # but not the "items" collection on "Order" - those
- # remain lazily loaded.
- query(Order).options(subqueryload(Order.items, Item.keywords))
- # to subquery-load across both, use subqueryload_all()
- query(Order).options(subqueryload_all(Order.items, Item.keywords))
- # set the default strategy to be 'subquery'
- query(Order).options(subqueryload('*'))
-
- See also: :func:`joinedload`, :func:`lazyload`
-
- """
- return strategies.EagerLazyOption(keys, lazy="subquery")
+def __go(lcls):
+ global __all__
+ from .. import util as sa_util
+ from . import dynamic
+ from . import events
+ import inspect as _inspect
+ __all__ = sorted(name for name, obj in lcls.items()
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
-def subqueryload_all(*keys):
- """Return a ``MapperOption`` that will convert all properties along the
- given dot-separated path or series of mapped attributes
- into a subquery eager load.
+ _sa_util.dependencies.resolve_all("sqlalchemy.orm")
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- For example::
-
- query.options(subqueryload_all('orders.items.keywords'))...
-
- will set all of ``orders``, ``orders.items``, and
- ``orders.items.keywords`` to load in one subquery eager load.
-
- Individual descriptors are accepted as arguments as well::
-
- query.options(subqueryload_all(User.orders, Order.items,
- Item.keywords))
-
- See also: :func:`joinedload_all`, :func:`lazyload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy="subquery", chained=True)
-
-
-def lazyload(*keys):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into a lazy load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=True)
-
-
-def lazyload_all(*keys):
- """Return a ``MapperOption`` that will convert all the properties
- along the given dot-separated path or series of mapped attributes
- into a lazy load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=True, chained=True)
-
-
-def noload(*keys):
- """Return a ``MapperOption`` that will convert the property of the
- given name or series of mapped attributes into a non-load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`lazyload`, :func:`eagerload`,
- :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=None)
-
-
-def immediateload(*keys):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into an immediate load.
-
- The "immediate" load means the attribute will be fetched
- with a separate SELECT statement per parent in the
- same way as lazy loading - except the loader is guaranteed
- to be called at load time before the parent object
- is returned in the result.
-
- The normal behavior of lazy loading applies - if
- the relationship is a simple many-to-one, and the child
- object is already present in the :class:`.Session`,
- no SELECT statement will be emitted.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`
-
- .. versionadded:: 0.6.5
-
- """
- return strategies.EagerLazyOption(keys, lazy='immediate')
-
-
-def contains_alias(alias):
- """Return a :class:`.MapperOption` that will indicate to the query that
- the main table has been aliased.
-
- This is used in the very rare case that :func:`.contains_eager`
- is being used in conjunction with a user-defined SELECT
- statement that aliases the parent table. E.g.::
-
- # define an aliased UNION called 'ulist'
- statement = users.select(users.c.user_id==7).\\
- union(users.select(users.c.user_id>7)).\\
- alias('ulist')
-
- # add on an eager load of "addresses"
- statement = statement.outerjoin(addresses).\\
- select().apply_labels()
-
- # create query, indicating "ulist" will be an
- # alias for the main table, "addresses"
- # property should be eager loaded
- query = session.query(User).options(
- contains_alias('ulist'),
- contains_eager('addresses'))
-
- # then get results via the statement
- results = query.from_statement(statement).all()
-
- :param alias: is the string name of an alias, or a
- :class:`~.sql.expression.Alias` object representing
- the alias.
-
- """
- return AliasOption(alias)
-
-
-def contains_eager(*keys, **kwargs):
- """Return a ``MapperOption`` that will indicate to the query that
- the given attribute should be eagerly loaded from columns currently
- in the query.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- The option is used in conjunction with an explicit join that loads
- the desired rows, i.e.::
-
- sess.query(Order).\\
- join(Order.user).\\
- options(contains_eager(Order.user))
-
- The above query would join from the ``Order`` entity to its related
- ``User`` entity, and the returned ``Order`` objects would have the
- ``Order.user`` attribute pre-populated.
-
- :func:`contains_eager` also accepts an `alias` argument, which is the
- string name of an alias, an :func:`~sqlalchemy.sql.expression.alias`
- construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when
- the eagerly-loaded rows are to come from an aliased table::
-
- user_alias = aliased(User)
- sess.query(Order).\\
- join((user_alias, Order.user)).\\
- options(contains_eager(Order.user, alias=user_alias))
-
- See also :func:`eagerload` for the "automatic" version of this
- functionality.
-
- For additional examples of :func:`contains_eager` see
- :ref:`contains_eager`.
-
- """
- alias = kwargs.pop('alias', None)
- if kwargs:
- raise exc.ArgumentError(
- 'Invalid kwargs for contains_eager: %r' % list(kwargs.keys()))
- return strategies.EagerLazyOption(keys, lazy='joined',
- propagate_to_loaders=False, chained=True), \
- strategies.LoadEagerFromAliasOption(keys, alias=alias, chained=True)
-
-
-def defer(*key):
- """Return a :class:`.MapperOption` that will convert the column property
- of the given name into a deferred load.
-
- Used with :meth:`.Query.options`.
-
- e.g.::
-
- from sqlalchemy.orm import defer
-
- query(MyClass).options(defer("attribute_one"),
- defer("attribute_two"))
-
- A class bound descriptor is also accepted::
-
- query(MyClass).options(
- defer(MyClass.attribute_one),
- defer(MyClass.attribute_two))
-
- A "path" can be specified onto a related or collection object using a
- dotted name. The :func:`.orm.defer` option will be applied to that object
- when loaded::
-
- query(MyClass).options(
- defer("related.attribute_one"),
- defer("related.attribute_two"))
-
- To specify a path via class, send multiple arguments::
-
- query(MyClass).options(
- defer(MyClass.related, MyOtherClass.attribute_one),
- defer(MyClass.related, MyOtherClass.attribute_two))
-
- See also:
-
- :ref:`deferred`
-
- :param \*key: A key representing an individual path. Multiple entries
- are accepted to allow a multiple-token path for a single target, not
- multiple targets.
-
- """
- return strategies.DeferredOption(key, defer=True)
-
-
-def undefer(*key):
- """Return a :class:`.MapperOption` that will convert the column property
- of the given name into a non-deferred (regular column) load.
-
- Used with :meth:`.Query.options`.
-
- e.g.::
-
- from sqlalchemy.orm import undefer
-
- query(MyClass).options(
- undefer("attribute_one"),
- undefer("attribute_two"))
-
- A class bound descriptor is also accepted::
-
- query(MyClass).options(
- undefer(MyClass.attribute_one),
- undefer(MyClass.attribute_two))
-
- A "path" can be specified onto a related or collection object using a
- dotted name. The :func:`.orm.undefer` option will be applied to that
- object when loaded::
-
- query(MyClass).options(
- undefer("related.attribute_one"),
- undefer("related.attribute_two"))
-
- To specify a path via class, send multiple arguments::
-
- query(MyClass).options(
- undefer(MyClass.related, MyOtherClass.attribute_one),
- undefer(MyClass.related, MyOtherClass.attribute_two))
-
- See also:
-
- :func:`.orm.undefer_group` as a means to "undefer" a group
- of attributes at once.
-
- :ref:`deferred`
-
- :param \*key: A key representing an individual path. Multiple entries
- are accepted to allow a multiple-token path for a single target, not
- multiple targets.
-
- """
- return strategies.DeferredOption(key, defer=False)
-
-
-def undefer_group(name):
- """Return a :class:`.MapperOption` that will convert the given group of
- deferred column properties into a non-deferred (regular column) load.
-
- Used with :meth:`.Query.options`.
-
- e.g.::
-
- query(MyClass).options(undefer("group_one"))
-
- See also:
-
- :ref:`deferred`
-
- :param name: String name of the deferred group. This name is
- established using the "group" name to the :func:`.orm.deferred`
- configurational function.
-
- """
- return strategies.UndeferGroupOption(name)
+__go(locals())
-from sqlalchemy import util as _sa_util
-_sa_util.importlater.resolve_all()
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index 13c2cf256..e5f8550ab 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -1,5 +1,5 @@
# orm/attributes.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -14,109 +14,19 @@ defines a large part of the ORM's interactivity.
"""
import operator
-from operator import itemgetter
-
from .. import util, event, inspection
-from . import interfaces, collections, events, exc as orm_exc
-from .instrumentation import instance_state, instance_dict, manager_of_class
+from . import interfaces, collections, exc as orm_exc
-orm_util = util.importlater("sqlalchemy.orm", "util")
-
-PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
-"""Symbol returned by a loader callable or other attribute/history
-retrieval operation when a value could not be determined, based
-on loader callable flags.
-"""
-)
-
-ATTR_WAS_SET = util.symbol('ATTR_WAS_SET',
-"""Symbol returned by a loader callable to indicate the
-retrieved value, or values, were assigned to their attributes
-on the target object.
-""")
-
-ATTR_EMPTY = util.symbol('ATTR_EMPTY',
-"""Symbol used internally to indicate an attribute had no callable.
-""")
-
-NO_VALUE = util.symbol('NO_VALUE',
-"""Symbol which may be placed as the 'previous' value of an attribute,
-indicating no value was loaded for an attribute when it was modified,
-and flags indicated we were not to load it.
-"""
-)
-
-NEVER_SET = util.symbol('NEVER_SET',
-"""Symbol which may be placed as the 'previous' value of an attribute
-indicating that the attribute had not been assigned to previously.
-"""
-)
-
-NO_CHANGE = util.symbol("NO_CHANGE",
-"""No callables or SQL should be emitted on attribute access
-and no state should change""", canonical=0
-)
-
-CALLABLES_OK = util.symbol("CALLABLES_OK",
-"""Loader callables can be fired off if a value
-is not present.""", canonical=1
-)
-
-SQL_OK = util.symbol("SQL_OK",
-"""Loader callables can emit SQL at least on scalar value
-attributes.""", canonical=2)
-
-RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK",
-"""callables can use SQL to load related objects as well
-as scalar value attributes.
-""", canonical=4
-)
-
-INIT_OK = util.symbol("INIT_OK",
-"""Attributes should be initialized with a blank
-value (None or an empty collection) upon get, if no other
-value can be obtained.
-""", canonical=8
-)
-
-NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK",
-"""callables can be emitted if the parent is not persistent.""",
-canonical=16
-)
-
-LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED",
-"""callables should use committed values as primary/foreign keys during a load
-""", canonical=32
-)
-
-# pre-packaged sets of flags used as inputs
-PASSIVE_OFF = util.symbol("PASSIVE_OFF",
- "Callables can be emitted in all cases.",
- canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
- INIT_OK | CALLABLES_OK | SQL_OK)
-)
-PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET",
- """PASSIVE_OFF ^ INIT_OK""",
- canonical=PASSIVE_OFF ^ INIT_OK
-)
-PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE",
- "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
- canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
-)
-PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH",
- "PASSIVE_OFF ^ SQL_OK",
- canonical=PASSIVE_OFF ^ SQL_OK
-)
-PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED",
- "PASSIVE_OFF ^ RELATED_OBJECT_OK",
- canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
-)
-PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT",
- "PASSIVE_OFF ^ NON_PERSISTENT_OK",
- canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
-)
+from .base import instance_state, instance_dict, manager_of_class
+from .base import PASSIVE_NO_RESULT, ATTR_WAS_SET, ATTR_EMPTY, NO_VALUE,\
+ NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\
+ INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\
+ PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\
+ PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT
+from .base import state_str, instance_str
+@inspection._self_inspects
class QueryableAttribute(interfaces._MappedAttribute,
interfaces._InspectionAttr,
interfaces.PropComparator):
@@ -159,9 +69,6 @@ class QueryableAttribute(interfaces._MappedAttribute,
if key in base:
self.dispatch._update(base[key].dispatch)
- dispatch = event.dispatcher(events.AttributeEvents)
- dispatch.dispatch_cls._active_history = False
-
@util.memoized_property
def _supports_population(self):
return self.impl.supports_population
@@ -236,6 +143,18 @@ class QueryableAttribute(interfaces._MappedAttribute,
def __clause_element__(self):
return self.comparator.__clause_element__()
+ def _query_clause_element(self):
+ """like __clause_element__(), but called specifically
+ by :class:`.Query` to allow special behavior."""
+
+ return self.comparator._query_clause_element()
+
+ def adapt_to_entity(self, adapt_to_entity):
+ assert not self._of_type
+ return self.__class__(adapt_to_entity.entity, self.key, impl=self.impl,
+ comparator=self.comparator.adapt_to_entity(adapt_to_entity),
+ parententity=adapt_to_entity)
+
def of_type(self, cls):
return QueryableAttribute(
self.class_,
@@ -246,7 +165,7 @@ class QueryableAttribute(interfaces._MappedAttribute,
of_type=cls)
def label(self, name):
- return self.__clause_element__().label(name)
+ return self._query_clause_element().label(name)
def operate(self, op, *other, **kwargs):
return op(self.comparator, *other, **kwargs)
@@ -286,8 +205,6 @@ class QueryableAttribute(interfaces._MappedAttribute,
"""
return self.comparator.property
-inspection._self_inspects(QueryableAttribute)
-
class InstrumentedAttribute(QueryableAttribute):
"""Class bound instrumented attribute which adds basic
@@ -359,7 +276,7 @@ def create_proxied_attribute(descriptor):
return self._comparator
def adapt_to_entity(self, adapt_to_entity):
- return self.__class__(self.class_, self.key, self.descriptor,
+ return self.__class__(adapt_to_entity.entity, self.key, self.descriptor,
self._comparator,
adapt_to_entity)
@@ -398,6 +315,53 @@ def create_proxied_attribute(descriptor):
from_instance=descriptor)
return Proxy
+OP_REMOVE = util.symbol("REMOVE")
+OP_APPEND = util.symbol("APPEND")
+OP_REPLACE = util.symbol("REPLACE")
+
+class Event(object):
+ """A token propagated throughout the course of a chain of attribute
+ events.
+
+ Serves as an indicator of the source of the event and also provides
+ a means of controlling propagation across a chain of attribute
+ operations.
+
+ The :class:`.Event` object is sent as the ``initiator`` argument
+ when dealing with the :meth:`.AttributeEvents.append`,
+ :meth:`.AttributeEvents.set`,
+ and :meth:`.AttributeEvents.remove` events.
+
+ The :class:`.Event` object is currently interpreted by the backref
+ event handlers, and is used to control the propagation of operations
+ across two mutually-dependent attributes.
+
+ .. versionadded:: 0.9.0
+
+ """
+
+ impl = None
+ """The :class:`.AttributeImpl` which is the current event initiator.
+ """
+
+ op = None
+ """The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or :attr:`.OP_REPLACE`,
+ indicating the source operation.
+
+ """
+
+ def __init__(self, attribute_impl, op):
+ self.impl = attribute_impl
+ self.op = op
+ self.parent_token = self.impl.parent_token
+
+
+ @property
+ def key(self):
+ return self.impl.key
+
+ def hasparent(self, state):
+ return self.impl.hasparent(state)
class AttributeImpl(object):
"""internal implementation for instrumented attributes."""
@@ -406,6 +370,7 @@ class AttributeImpl(object):
callable_, dispatch, trackparent=False, extension=None,
compare_function=None, active_history=False,
parent_token=None, expire_missing=True,
+ send_modified_events=True,
**kwargs):
"""Construct an AttributeImpl.
@@ -449,6 +414,10 @@ class AttributeImpl(object):
during state.expire_attributes(None), if no value is present
for this key.
+ send_modified_events
+ if False, the InstanceState._modified_event method will have no effect;
+ this means the attribute will never show up as changed in a
+ history entry.
"""
self.class_ = class_
self.key = key
@@ -456,6 +425,7 @@ class AttributeImpl(object):
self.dispatch = dispatch
self.trackparent = trackparent
self.parent_token = parent_token or self
+ self.send_modified_events = send_modified_events
if compare_function is None:
self.is_equal = operator.eq
else:
@@ -534,8 +504,8 @@ class AttributeImpl(object):
"but the parent record "
"has gone stale, can't be sure this "
"is the most recent parent." %
- (orm_util.state_str(state),
- orm_util.state_str(parent_state),
+ (state_str(state),
+ state_str(parent_state),
self.key))
return
@@ -588,7 +558,6 @@ class AttributeImpl(object):
def get(self, state, dict_, passive=PASSIVE_OFF):
"""Retrieve a value from the given object.
-
If a callable is assembled on this object's attribute, and
passive is False, the callable will be executed and the
resulting value will be set as the new value for this attribute.
@@ -683,19 +652,24 @@ class ScalarAttributeImpl(AttributeImpl):
old = dict_.get(self.key, NO_VALUE)
if self.dispatch.remove:
- self.fire_remove_event(state, dict_, old, None)
+ self.fire_remove_event(state, dict_, old, self._remove_token)
state._modified_event(dict_, self, old)
del dict_[self.key]
def get_history(self, state, dict_, passive=PASSIVE_OFF):
- return History.from_scalar_attribute(
- self, state, dict_.get(self.key, NO_VALUE))
+ if self.key in dict_:
+ return History.from_scalar_attribute(self, state, dict_[self.key])
+ else:
+ if passive & INIT_OK:
+ passive ^= INIT_OK
+ current = self.get(state, dict_, passive=passive)
+ if current is PASSIVE_NO_RESULT:
+ return HISTORY_BLANK
+ else:
+ return History.from_scalar_attribute(self, state, current)
def set(self, state, dict_, value, initiator,
passive=PASSIVE_OFF, check_old=None, pop=False):
- if initiator and initiator.parent_token is self.parent_token:
- return
-
if self.dispatch._active_history:
old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET)
else:
@@ -707,14 +681,26 @@ class ScalarAttributeImpl(AttributeImpl):
state._modified_event(dict_, self, old)
dict_[self.key] = value
+ @util.memoized_property
+ def _replace_token(self):
+ return Event(self, OP_REPLACE)
+
+ @util.memoized_property
+ def _append_token(self):
+ return Event(self, OP_REPLACE)
+
+ @util.memoized_property
+ def _remove_token(self):
+ return Event(self, OP_REMOVE)
+
def fire_replace_event(self, state, dict_, value, previous, initiator):
for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self)
+ value = fn(state, value, previous, initiator or self._replace_token)
return value
def fire_remove_event(self, state, dict_, value, initiator):
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
@property
def type(self):
@@ -736,7 +722,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
def delete(self, state, dict_):
old = self.get(state, dict_)
- self.fire_remove_event(state, dict_, old, self)
+ self.fire_remove_event(state, dict_, old, self._remove_token)
del dict_[self.key]
def get_history(self, state, dict_, passive=PASSIVE_OFF):
@@ -773,14 +759,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
passive=PASSIVE_OFF, check_old=None, pop=False):
"""Set a value on the given InstanceState.
- `initiator` is the ``InstrumentedAttribute`` that initiated the
- ``set()`` operation and is used to control the depth of a circular
- setter operation.
-
"""
- if initiator and initiator.parent_token is self.parent_token:
- return
-
if self.dispatch._active_history:
old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT)
else:
@@ -794,19 +773,20 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
else:
raise ValueError(
"Object %s not associated with %s on attribute '%s'" % (
- orm_util.instance_str(check_old),
- orm_util.state_str(state),
+ instance_str(check_old),
+ state_str(state),
self.key
))
value = self.fire_replace_event(state, dict_, value, old, initiator)
dict_[self.key] = value
+
def fire_remove_event(self, state, dict_, value, initiator):
if self.trackparent and value is not None:
self.sethasparent(instance_state(value), state, False)
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
state._modified_event(dict_, self, value)
@@ -818,7 +798,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
self.sethasparent(instance_state(previous), state, False)
for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self)
+ value = fn(state, value, previous, initiator or self._replace_token)
state._modified_event(dict_, self, previous)
@@ -902,9 +882,17 @@ class CollectionAttributeImpl(AttributeImpl):
return [(instance_state(o), o) for o in current]
+ @util.memoized_property
+ def _append_token(self):
+ return Event(self, OP_APPEND)
+
+ @util.memoized_property
+ def _remove_token(self):
+ return Event(self, OP_REMOVE)
+
def fire_append_event(self, state, dict_, value, initiator):
for fn in self.dispatch.append:
- value = fn(state, value, initiator or self)
+ value = fn(state, value, initiator or self._append_token)
state._modified_event(dict_, self, NEVER_SET, True)
@@ -921,7 +909,7 @@ class CollectionAttributeImpl(AttributeImpl):
self.sethasparent(instance_state(value), state, False)
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
state._modified_event(dict_, self, NEVER_SET, True)
@@ -948,8 +936,6 @@ class CollectionAttributeImpl(AttributeImpl):
self.key, state, self.collection_factory)
def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- if initiator and initiator.parent_token is self.parent_token:
- return
collection = self.get_collection(state, dict_, passive=passive)
if collection is PASSIVE_NO_RESULT:
value = self.fire_append_event(state, dict_, value, initiator)
@@ -960,9 +946,6 @@ class CollectionAttributeImpl(AttributeImpl):
collection.append_with_event(value, initiator)
def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- if initiator and initiator.parent_token is self.parent_token:
- return
-
collection = self.get_collection(state, state.dict, passive=passive)
if collection is PASSIVE_NO_RESULT:
self.fire_remove_event(state, dict_, value, initiator)
@@ -985,14 +968,8 @@ class CollectionAttributeImpl(AttributeImpl):
passive=PASSIVE_OFF, pop=False):
"""Set a value on the given object.
- `initiator` is the ``InstrumentedAttribute`` that initiated the
- ``set()`` operation and is used to control the depth of a circular
- setter operation.
"""
- if initiator and initiator.parent_token is self.parent_token:
- return
-
self._set_iterable(
state, dict_, value,
lambda adapter, i: adapter.adapt_like_to_iterable(i))
@@ -1085,6 +1062,7 @@ def backref_listeners(attribute, key, uselist):
# use easily recognizable names for stack traces
parent_token = attribute.impl.parent_token
+ parent_impl = attribute.impl
def _acceptable_key_err(child_state, initiator, child_impl):
raise ValueError(
@@ -1092,7 +1070,7 @@ def backref_listeners(attribute, key, uselist):
'Passing object %s to attribute "%s" '
'triggers a modify event on attribute "%s" '
'via the backref "%s".' % (
- orm_util.state_str(child_state),
+ state_str(child_state),
initiator.parent_token,
child_impl.parent_token,
attribute.impl.parent_token
@@ -1108,10 +1086,14 @@ def backref_listeners(attribute, key, uselist):
old_state, old_dict = instance_state(oldchild),\
instance_dict(oldchild)
impl = old_state.manager[key].impl
- impl.pop(old_state,
- old_dict,
- state.obj(),
- initiator, passive=PASSIVE_NO_FETCH)
+
+ if initiator.impl is not impl or \
+ initiator.op not in (OP_REPLACE, OP_REMOVE):
+ impl.pop(old_state,
+ old_dict,
+ state.obj(),
+ parent_impl._append_token,
+ passive=PASSIVE_NO_FETCH)
if child is not None:
child_state, child_dict = instance_state(child),\
@@ -1120,12 +1102,14 @@ def backref_listeners(attribute, key, uselist):
if initiator.parent_token is not parent_token and \
initiator.parent_token is not child_impl.parent_token:
_acceptable_key_err(state, initiator, child_impl)
- child_impl.append(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
+ elif initiator.impl is not child_impl or \
+ initiator.op not in (OP_APPEND, OP_REPLACE):
+ child_impl.append(
+ child_state,
+ child_dict,
+ state.obj(),
+ initiator,
+ passive=PASSIVE_NO_FETCH)
return child
def emit_backref_from_collection_append_event(state, child, initiator):
@@ -1139,7 +1123,9 @@ def backref_listeners(attribute, key, uselist):
if initiator.parent_token is not parent_token and \
initiator.parent_token is not child_impl.parent_token:
_acceptable_key_err(state, initiator, child_impl)
- child_impl.append(
+ elif initiator.impl is not child_impl or \
+ initiator.op not in (OP_APPEND, OP_REPLACE):
+ child_impl.append(
child_state,
child_dict,
state.obj(),
@@ -1152,10 +1138,9 @@ def backref_listeners(attribute, key, uselist):
child_state, child_dict = instance_state(child),\
instance_dict(child)
child_impl = child_state.manager[key].impl
- # can't think of a path that would produce an initiator
- # mismatch here, as it would require an existing collection
- # mismatch.
- child_impl.pop(
+ if initiator.impl is not child_impl or \
+ initiator.op not in (OP_REMOVE, OP_REPLACE):
+ child_impl.pop(
child_state,
child_dict,
state.obj(),
@@ -1268,7 +1253,7 @@ class History(History):
original = state.committed_state.get(attribute.key, _NO_HISTORY)
if original is _NO_HISTORY:
- if current is NO_VALUE:
+ if current is NEVER_SET:
return cls((), (), ())
else:
return cls((), [current], ())
@@ -1285,7 +1270,7 @@ class History(History):
deleted = ()
else:
deleted = [original]
- if current is NO_VALUE:
+ if current is NEVER_SET:
return cls((), (), deleted)
else:
return cls([current], (), deleted)
diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py
new file mode 100644
index 000000000..577f9ff76
--- /dev/null
+++ b/lib/sqlalchemy/orm/base.py
@@ -0,0 +1,453 @@
+# orm/base.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Constants and rudimental functions used throughout the ORM.
+
+"""
+
+from .. import util, inspection, exc as sa_exc
+from ..sql import expression
+from . import exc
+import operator
+
+PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
+"""Symbol returned by a loader callable or other attribute/history
+retrieval operation when a value could not be determined, based
+on loader callable flags.
+"""
+)
+
+ATTR_WAS_SET = util.symbol('ATTR_WAS_SET',
+"""Symbol returned by a loader callable to indicate the
+retrieved value, or values, were assigned to their attributes
+on the target object.
+""")
+
+ATTR_EMPTY = util.symbol('ATTR_EMPTY',
+"""Symbol used internally to indicate an attribute had no callable.
+""")
+
+NO_VALUE = util.symbol('NO_VALUE',
+"""Symbol which may be placed as the 'previous' value of an attribute,
+indicating no value was loaded for an attribute when it was modified,
+and flags indicated we were not to load it.
+"""
+)
+
+NEVER_SET = util.symbol('NEVER_SET',
+"""Symbol which may be placed as the 'previous' value of an attribute
+indicating that the attribute had not been assigned to previously.
+"""
+)
+
+NO_CHANGE = util.symbol("NO_CHANGE",
+"""No callables or SQL should be emitted on attribute access
+and no state should change""", canonical=0
+)
+
+CALLABLES_OK = util.symbol("CALLABLES_OK",
+"""Loader callables can be fired off if a value
+is not present.""", canonical=1
+)
+
+SQL_OK = util.symbol("SQL_OK",
+"""Loader callables can emit SQL at least on scalar value
+attributes.""", canonical=2)
+
+RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK",
+"""callables can use SQL to load related objects as well
+as scalar value attributes.
+""", canonical=4
+)
+
+INIT_OK = util.symbol("INIT_OK",
+"""Attributes should be initialized with a blank
+value (None or an empty collection) upon get, if no other
+value can be obtained.
+""", canonical=8
+)
+
+NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK",
+"""callables can be emitted if the parent is not persistent.""",
+canonical=16
+)
+
+LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED",
+"""callables should use committed values as primary/foreign keys during a load
+""", canonical=32
+)
+
+# pre-packaged sets of flags used as inputs
+PASSIVE_OFF = util.symbol("PASSIVE_OFF",
+ "Callables can be emitted in all cases.",
+ canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
+ INIT_OK | CALLABLES_OK | SQL_OK)
+)
+PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET",
+ """PASSIVE_OFF ^ INIT_OK""",
+ canonical=PASSIVE_OFF ^ INIT_OK
+)
+PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE",
+ "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
+ canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
+)
+PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH",
+ "PASSIVE_OFF ^ SQL_OK",
+ canonical=PASSIVE_OFF ^ SQL_OK
+)
+PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED",
+ "PASSIVE_OFF ^ RELATED_OBJECT_OK",
+ canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
+)
+PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT",
+ "PASSIVE_OFF ^ NON_PERSISTENT_OK",
+ canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
+)
+
+DEFAULT_MANAGER_ATTR = '_sa_class_manager'
+DEFAULT_STATE_ATTR = '_sa_instance_state'
+_INSTRUMENTOR = ('mapper', 'instrumentor')
+
+EXT_CONTINUE = util.symbol('EXT_CONTINUE')
+EXT_STOP = util.symbol('EXT_STOP')
+
+ONETOMANY = util.symbol('ONETOMANY',
+"""Indicates the one-to-many direction for a :func:`.relationship`.
+
+This symbol is typically used by the internals but may be exposed within
+certain API features.
+
+""")
+
+MANYTOONE = util.symbol('MANYTOONE',
+"""Indicates the many-to-one direction for a :func:`.relationship`.
+
+This symbol is typically used by the internals but may be exposed within
+certain API features.
+
+""")
+
+MANYTOMANY = util.symbol('MANYTOMANY',
+"""Indicates the many-to-many direction for a :func:`.relationship`.
+
+This symbol is typically used by the internals but may be exposed within
+certain API features.
+
+""")
+
+NOT_EXTENSION = util.symbol('NOT_EXTENSION',
+"""Symbol indicating an :class:`_InspectionAttr` that's
+ not part of sqlalchemy.ext.
+
+ Is assigned to the :attr:`._InspectionAttr.extension_type`
+ attibute.
+
+""")
+
+_none_set = frozenset([None])
+
+
+def _generative(*assertions):
+ """Mark a method as generative, e.g. method-chained."""
+
+ @util.decorator
+ def generate(fn, *args, **kw):
+ self = args[0]._clone()
+ for assertion in assertions:
+ assertion(self, fn.__name__)
+ fn(self, *args[1:], **kw)
+ return self
+ return generate
+
+
+# these can be replaced by sqlalchemy.ext.instrumentation
+# if augmented class instrumentation is enabled.
+def manager_of_class(cls):
+ return cls.__dict__.get(DEFAULT_MANAGER_ATTR, None)
+
+instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
+
+instance_dict = operator.attrgetter('__dict__')
+
+def instance_str(instance):
+ """Return a string describing an instance."""
+
+ return state_str(instance_state(instance))
+
+def state_str(state):
+ """Return a string describing an instance via its InstanceState."""
+
+ if state is None:
+ return "None"
+ else:
+ return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
+
+def state_class_str(state):
+ """Return a string describing an instance's class via its InstanceState."""
+
+ if state is None:
+ return "None"
+ else:
+ return '<%s>' % (state.class_.__name__, )
+
+
+def attribute_str(instance, attribute):
+ return instance_str(instance) + "." + attribute
+
+
+def state_attribute_str(state, attribute):
+ return state_str(state) + "." + attribute
+
+def object_mapper(instance):
+ """Given an object, return the primary Mapper associated with the object
+ instance.
+
+ Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
+ if no mapping is configured.
+
+ This function is available via the inspection system as::
+
+ inspect(instance).mapper
+
+ Using the inspection system will raise
+ :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
+ not part of a mapping.
+
+ """
+ return object_state(instance).mapper
+
+
+def object_state(instance):
+ """Given an object, return the :class:`.InstanceState`
+ associated with the object.
+
+ Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
+ if no mapping is configured.
+
+ Equivalent functionality is available via the :func:`.inspect`
+ function as::
+
+ inspect(instance)
+
+ Using the inspection system will raise
+ :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
+ not part of a mapping.
+
+ """
+ state = _inspect_mapped_object(instance)
+ if state is None:
+ raise exc.UnmappedInstanceError(instance)
+ else:
+ return state
+
+
+@inspection._inspects(object)
+def _inspect_mapped_object(instance):
+ try:
+ return instance_state(instance)
+ # TODO: whats the py-2/3 syntax to catch two
+ # different kinds of exceptions at once ?
+ except exc.UnmappedClassError:
+ return None
+ except exc.NO_STATE:
+ return None
+
+
+
+def _class_to_mapper(class_or_mapper):
+ insp = inspection.inspect(class_or_mapper, False)
+ if insp is not None:
+ return insp.mapper
+ else:
+ raise exc.UnmappedClassError(class_or_mapper)
+
+
+def _mapper_or_none(entity):
+ """Return the :class:`.Mapper` for the given class or None if the
+ class is not mapped."""
+
+ insp = inspection.inspect(entity, False)
+ if insp is not None:
+ return insp.mapper
+ else:
+ return None
+
+
+def _is_mapped_class(entity):
+ """Return True if the given object is a mapped class,
+ :class:`.Mapper`, or :class:`.AliasedClass`."""
+
+ insp = inspection.inspect(entity, False)
+ return insp is not None and \
+ hasattr(insp, "mapper") and \
+ (
+ insp.is_mapper
+ or insp.is_aliased_class
+ )
+
+def _attr_as_key(attr):
+ if hasattr(attr, 'key'):
+ return attr.key
+ else:
+ return expression._column_as_key(attr)
+
+
+
+def _orm_columns(entity):
+ insp = inspection.inspect(entity, False)
+ if hasattr(insp, 'selectable'):
+ return [c for c in insp.selectable.c]
+ else:
+ return [entity]
+
+
+
+def _is_aliased_class(entity):
+ insp = inspection.inspect(entity, False)
+ return insp is not None and \
+ getattr(insp, "is_aliased_class", False)
+
+
+def _entity_descriptor(entity, key):
+ """Return a class attribute given an entity and string name.
+
+ May return :class:`.InstrumentedAttribute` or user-defined
+ attribute.
+
+ """
+ insp = inspection.inspect(entity)
+ if insp.is_selectable:
+ description = entity
+ entity = insp.c
+ elif insp.is_aliased_class:
+ entity = insp.entity
+ description = entity
+ elif hasattr(insp, "mapper"):
+ description = entity = insp.mapper.class_
+ else:
+ description = entity
+
+ try:
+ return getattr(entity, key)
+ except AttributeError:
+ raise sa_exc.InvalidRequestError(
+ "Entity '%s' has no property '%s'" %
+ (description, key)
+ )
+
+_state_mapper = util.dottedgetter('manager.mapper')
+
+@inspection._inspects(type)
+def _inspect_mapped_class(class_, configure=False):
+ try:
+ class_manager = manager_of_class(class_)
+ if not class_manager.is_mapped:
+ return None
+ mapper = class_manager.mapper
+ if configure and mapper._new_mappers:
+ mapper._configure_all()
+ return mapper
+
+ except exc.NO_STATE:
+ return None
+
+def class_mapper(class_, configure=True):
+ """Given a class, return the primary :class:`.Mapper` associated
+ with the key.
+
+ Raises :exc:`.UnmappedClassError` if no mapping is configured
+ on the given class, or :exc:`.ArgumentError` if a non-class
+ object is passed.
+
+ Equivalent functionality is available via the :func:`.inspect`
+ function as::
+
+ inspect(some_mapped_class)
+
+ Using the inspection system will raise
+ :class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
+
+ """
+ mapper = _inspect_mapped_class(class_, configure=configure)
+ if mapper is None:
+ if not isinstance(class_, type):
+ raise sa_exc.ArgumentError(
+ "Class object expected, got '%r'." % (class_, ))
+ raise exc.UnmappedClassError(class_)
+ else:
+ return mapper
+
+
+class _InspectionAttr(object):
+ """A base class applied to all ORM objects that can be returned
+ by the :func:`.inspect` function.
+
+ The attributes defined here allow the usage of simple boolean
+ checks to test basic facts about the object returned.
+
+ While the boolean checks here are basically the same as using
+ the Python isinstance() function, the flags here can be used without
+ the need to import all of these classes, and also such that
+ the SQLAlchemy class system can change while leaving the flags
+ here intact for forwards-compatibility.
+
+ """
+
+ is_selectable = False
+ """Return True if this object is an instance of :class:`.Selectable`."""
+
+ is_aliased_class = False
+ """True if this object is an instance of :class:`.AliasedClass`."""
+
+ is_instance = False
+ """True if this object is an instance of :class:`.InstanceState`."""
+
+ is_mapper = False
+ """True if this object is an instance of :class:`.Mapper`."""
+
+ is_property = False
+ """True if this object is an instance of :class:`.MapperProperty`."""
+
+ is_attribute = False
+ """True if this object is a Python :term:`descriptor`.
+
+ This can refer to one of many types. Usually a
+ :class:`.QueryableAttribute` which handles attributes events on behalf
+ of a :class:`.MapperProperty`. But can also be an extension type
+ such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
+ The :attr:`._InspectionAttr.extension_type` will refer to a constant
+ identifying the specific subtype.
+
+ .. seealso::
+
+ :attr:`.Mapper.all_orm_descriptors`
+
+ """
+
+ is_clause_element = False
+ """True if this object is an instance of :class:`.ClauseElement`."""
+
+ extension_type = NOT_EXTENSION
+ """The extension type, if any.
+ Defaults to :data:`.interfaces.NOT_EXTENSION`
+
+ .. versionadded:: 0.8.0
+
+ .. seealso::
+
+ :data:`.HYBRID_METHOD`
+
+ :data:`.HYBRID_PROPERTY`
+
+ :data:`.ASSOCIATION_PROXY`
+
+ """
+
+class _MappedAttribute(object):
+ """Mixin for attributes which should be replaced by mapper-assigned
+ attributes.
+
+ """
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index 03917d112..87e351b6c 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -1,5 +1,5 @@
# orm/collections.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -108,8 +108,7 @@ import weakref
from ..sql import expression
from .. import util, exc as sa_exc
-orm_util = util.importlater("sqlalchemy.orm", "util")
-attributes = util.importlater("sqlalchemy.orm", "attributes")
+from . import base
__all__ = ['collection', 'collection_adapter',
@@ -139,8 +138,8 @@ class _PlainColumnGetter(object):
return self.cols
def __call__(self, value):
- state = attributes.instance_state(value)
- m = orm_util._state_mapper(state)
+ state = base.instance_state(value)
+ m = base._state_mapper(state)
key = [
m._get_state_attr_by_column(state, state.dict, col)
@@ -167,8 +166,8 @@ class _SerializableColumnGetter(object):
return _SerializableColumnGetter, (self.colkeys,)
def __call__(self, value):
- state = attributes.instance_state(value)
- m = orm_util._state_mapper(state)
+ state = base.instance_state(value)
+ m = base._state_mapper(state)
key = [m._get_state_attr_by_column(
state, state.dict,
m.mapped_table.columns[k])
@@ -352,7 +351,7 @@ class collection(object):
promulgation to collection events.
"""
- setattr(fn, '_sa_instrument_role', 'appender')
+ fn._sa_instrument_role = 'appender'
return fn
@staticmethod
@@ -379,7 +378,7 @@ class collection(object):
promulgation to collection events.
"""
- setattr(fn, '_sa_instrument_role', 'remover')
+ fn._sa_instrument_role = 'remover'
return fn
@staticmethod
@@ -393,7 +392,7 @@ class collection(object):
def __iter__(self): ...
"""
- setattr(fn, '_sa_instrument_role', 'iterator')
+ fn._sa_instrument_role = 'iterator'
return fn
@staticmethod
@@ -414,7 +413,7 @@ class collection(object):
def extend(self, items): ...
"""
- setattr(fn, '_sa_instrumented', True)
+ fn._sa_instrumented = True
return fn
@staticmethod
@@ -428,7 +427,7 @@ class collection(object):
that has been linked, or None if unlinking.
"""
- setattr(fn, '_sa_instrument_role', 'linker')
+ fn._sa_instrument_role = 'linker'
return fn
link = linker
@@ -464,7 +463,7 @@ class collection(object):
validation on the values about to be assigned.
"""
- setattr(fn, '_sa_instrument_role', 'converter')
+ fn._sa_instrument_role = 'converter'
return fn
@staticmethod
@@ -484,7 +483,7 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
+ fn._sa_instrument_before = ('fire_append_event', arg)
return fn
return decorator
@@ -504,8 +503,8 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
- setattr(fn, '_sa_instrument_after', 'fire_remove_event')
+ fn._sa_instrument_before = ('fire_append_event', arg)
+ fn._sa_instrument_after = 'fire_remove_event'
return fn
return decorator
@@ -526,7 +525,7 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_remove_event', arg))
+ fn._sa_instrument_before = ('fire_remove_event', arg)
return fn
return decorator
@@ -546,32 +545,13 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_after', 'fire_remove_event')
+ fn._sa_instrument_after = 'fire_remove_event'
return fn
return decorator
-# public instrumentation interface for 'internally instrumented'
-# implementations
-def collection_adapter(collection):
- """Fetch the :class:`.CollectionAdapter` for a collection."""
-
- return getattr(collection, '_sa_adapter', None)
-
-
-def collection_iter(collection):
- """Iterate over an object supporting the @iterator or __iter__ protocols.
-
- If the collection is an ORM collection, it need not be attached to an
- object to be iterable.
-
- """
- try:
- return getattr(collection, '_sa_iterator',
- getattr(collection, '__iter__'))()
- except AttributeError:
- raise TypeError("'%s' object is not iterable" %
- type(collection).__name__)
+collection_adapter = operator.attrgetter('_sa_adapter')
+"""Fetch the :class:`.CollectionAdapter` for a collection."""
class CollectionAdapter(object):
@@ -584,8 +564,6 @@ class CollectionAdapter(object):
The ORM uses :class:`.CollectionAdapter` exclusively for interaction with
entity collections.
- The usage of getattr()/setattr() is currently to allow injection
- of custom methods, such as to unwrap Zope security proxies.
"""
invalidated = False
@@ -609,16 +587,19 @@ class CollectionAdapter(object):
return self.owner_state.manager[self._key].impl
def link_to_self(self, data):
- """Link a collection to this adapter, and fire a link event."""
- setattr(data, '_sa_adapter', self)
- if hasattr(data, '_sa_linker'):
- getattr(data, '_sa_linker')(self)
+ """Link a collection to this adapter"""
+
+ data._sa_adapter = self
+ if data._sa_linker:
+ data._sa_linker(self)
+
def unlink(self, data):
- """Unlink a collection from any adapter, and fire a link event."""
- setattr(data, '_sa_adapter', None)
- if hasattr(data, '_sa_linker'):
- getattr(data, '_sa_linker')(None)
+ """Unlink a collection from any adapter"""
+
+ del data._sa_adapter
+ if data._sa_linker:
+ data._sa_linker(None)
def adapt_like_to_iterable(self, obj):
"""Converts collection-compatible objects to an iterable of values.
@@ -634,7 +615,7 @@ class CollectionAdapter(object):
a default duck-typing-based implementation is used.
"""
- converter = getattr(self._data(), '_sa_converter', None)
+ converter = self._data()._sa_converter
if converter is not None:
return converter(obj)
@@ -655,60 +636,60 @@ class CollectionAdapter(object):
# If the object is an adapted collection, return the (iterable)
# adapter.
if getattr(obj, '_sa_adapter', None) is not None:
- return getattr(obj, '_sa_adapter')
+ return obj._sa_adapter
elif setting_type == dict:
if util.py3k:
return obj.values()
else:
- return getattr(obj, 'itervalues', getattr(obj, 'values'))()
+ return getattr(obj, 'itervalues', obj.values)()
else:
return iter(obj)
def append_with_event(self, item, initiator=None):
"""Add an entity to the collection, firing mutation events."""
- getattr(self._data(), '_sa_appender')(item, _sa_initiator=initiator)
+ self._data()._sa_appender(item, _sa_initiator=initiator)
def append_without_event(self, item):
"""Add or restore an entity to the collection, firing no events."""
- getattr(self._data(), '_sa_appender')(item, _sa_initiator=False)
+ self._data()._sa_appender(item, _sa_initiator=False)
def append_multiple_without_event(self, items):
"""Add or restore an entity to the collection, firing no events."""
- appender = getattr(self._data(), '_sa_appender')
+ appender = self._data()._sa_appender
for item in items:
appender(item, _sa_initiator=False)
def remove_with_event(self, item, initiator=None):
"""Remove an entity from the collection, firing mutation events."""
- getattr(self._data(), '_sa_remover')(item, _sa_initiator=initiator)
+ self._data()._sa_remover(item, _sa_initiator=initiator)
def remove_without_event(self, item):
"""Remove an entity from the collection, firing no events."""
- getattr(self._data(), '_sa_remover')(item, _sa_initiator=False)
+ self._data()._sa_remover(item, _sa_initiator=False)
def clear_with_event(self, initiator=None):
"""Empty the collection, firing a mutation event for each entity."""
- remover = getattr(self._data(), '_sa_remover')
+ remover = self._data()._sa_remover
for item in list(self):
remover(item, _sa_initiator=initiator)
def clear_without_event(self):
"""Empty the collection, firing no events."""
- remover = getattr(self._data(), '_sa_remover')
+ remover = self._data()._sa_remover
for item in list(self):
remover(item, _sa_initiator=False)
def __iter__(self):
"""Iterate over entities in the collection."""
- return iter(getattr(self._data(), '_sa_iterator')())
+ return iter(self._data()._sa_iterator())
def __len__(self):
"""Count entities in the collection."""
- return len(list(getattr(self._data(), '_sa_iterator')()))
+ return len(list(self._data()._sa_iterator()))
def __bool__(self):
return True
@@ -960,7 +941,12 @@ def _instrument_class(cls):
for role, method_name in roles.items():
setattr(cls, '_sa_%s' % role, getattr(cls, method_name))
- setattr(cls, '_sa_instrumented', id(cls))
+ cls._sa_adapter = None
+ if not hasattr(cls, '_sa_linker'):
+ cls._sa_linker = None
+ if not hasattr(cls, '_sa_converter'):
+ cls._sa_converter = None
+ cls._sa_instrumented = id(cls)
def _instrument_membership_mutator(method, before, argument, after):
@@ -999,7 +985,7 @@ def _instrument_membership_mutator(method, before, argument, after):
if initiator is False:
executor = None
else:
- executor = getattr(args[0], '_sa_adapter', None)
+ executor = args[0]._sa_adapter
if before and executor:
getattr(executor, before)(value, initiator)
@@ -1024,33 +1010,33 @@ def __set(collection, item, _sa_initiator=None):
"""Run set events, may eventually be inlined into decorators."""
if _sa_initiator is not False:
- executor = getattr(collection, '_sa_adapter', None)
+ executor = collection._sa_adapter
if executor:
- item = getattr(executor, 'fire_append_event')(item, _sa_initiator)
+ item = executor.fire_append_event(item, _sa_initiator)
return item
def __del(collection, item, _sa_initiator=None):
"""Run del events, may eventually be inlined into decorators."""
if _sa_initiator is not False:
- executor = getattr(collection, '_sa_adapter', None)
+ executor = collection._sa_adapter
if executor:
- getattr(executor, 'fire_remove_event')(item, _sa_initiator)
+ executor.fire_remove_event(item, _sa_initiator)
def __before_delete(collection, _sa_initiator=None):
"""Special method to run 'commit existing value' methods"""
- executor = getattr(collection, '_sa_adapter', None)
+ executor = collection._sa_adapter
if executor:
- getattr(executor, 'fire_pre_remove_event')(_sa_initiator)
+ executor.fire_pre_remove_event(_sa_initiator)
def _list_decorators():
"""Tailored instrumentation wrappers for any list-like class."""
def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(list, fn.__name__), '__doc__')
+ fn._sa_instrumented = True
+ fn.__doc__ = getattr(list, fn.__name__).__doc__
def append(fn):
def append(self, item, _sa_initiator=None):
@@ -1089,7 +1075,10 @@ def _list_decorators():
start = index.start or 0
if start < 0:
start += len(self)
- stop = index.stop or len(self)
+ if index.stop is not None:
+ stop = index.stop
+ else:
+ stop = len(self)
if stop < 0:
stop += len(self)
@@ -1172,6 +1161,15 @@ def _list_decorators():
_tidy(pop)
return pop
+ if not util.py2k:
+ def clear(fn):
+ def clear(self, index=-1):
+ for item in self:
+ __del(self, item)
+ fn(self)
+ _tidy(clear)
+ return clear
+
# __imul__ : not wrapping this. all members of the collection are already
# present, so no need to fire appends... wrapping it with an explicit
# decorator is still possible, so events on *= can be had if they're
@@ -1186,8 +1184,8 @@ def _dict_decorators():
"""Tailored instrumentation wrappers for any dict-like mapping class."""
def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(dict, fn.__name__), '__doc__')
+ fn._sa_instrumented = True
+ fn.__doc__ = getattr(dict, fn.__name__).__doc__
Unspecified = util.symbol('Unspecified')
@@ -1288,8 +1286,8 @@ def _set_decorators():
"""Tailored instrumentation wrappers for any set-like class."""
def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(set, fn.__name__), '__doc__')
+ fn._sa_instrumented = True
+ fn.__doc__ = getattr(set, fn.__name__).__doc__
Unspecified = util.symbol('Unspecified')
diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py
index 9f1e497af..4709a1821 100644
--- a/lib/sqlalchemy/orm/dependency.py
+++ b/lib/sqlalchemy/orm/dependency.py
@@ -1,5 +1,5 @@
# orm/dependency.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py
index e50967253..020b7c718 100644
--- a/lib/sqlalchemy/orm/deprecated_interfaces.py
+++ b/lib/sqlalchemy/orm/deprecated_interfaces.py
@@ -1,5 +1,5 @@
# orm/deprecated_interfaces.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,7 +7,7 @@
from .. import event, util
from .interfaces import EXT_CONTINUE
-
+@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class MapperExtension(object):
"""Base implementation for :class:`.Mapper` event hooks.
@@ -374,6 +374,7 @@ class MapperExtension(object):
return EXT_CONTINUE
+@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class SessionExtension(object):
"""Base implementation for :class:`.Session` event hooks.
@@ -494,6 +495,7 @@ class SessionExtension(object):
"""
+@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class AttributeExtension(object):
"""Base implementation for :class:`.AttributeImpl` event hooks, events
that fire upon attribute mutations in user code.
diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py
index c58951339..24b0a15e6 100644
--- a/lib/sqlalchemy/orm/descriptor_props.py
+++ b/lib/sqlalchemy/orm/descriptor_props.py
@@ -1,5 +1,5 @@
# orm/descriptor_props.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -12,10 +12,11 @@ as actively in the load/persist ORM loop.
from .interfaces import MapperProperty, PropComparator
from .util import _none_set
-from . import attributes, strategies
+from . import attributes
from .. import util, sql, exc as sa_exc, event, schema
from ..sql import expression
-properties = util.importlater('sqlalchemy.orm', 'properties')
+from . import properties
+from . import query
class DescriptorProperty(MapperProperty):
@@ -75,6 +76,7 @@ class DescriptorProperty(MapperProperty):
mapper.class_manager.instrument_attribute(self.key, proxy_attr)
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class CompositeProperty(DescriptorProperty):
"""Defines a "composite" mapped attribute, representing a collection
of columns as one attribute.
@@ -82,12 +84,64 @@ class CompositeProperty(DescriptorProperty):
:class:`.CompositeProperty` is constructed using the :func:`.composite`
function.
- See also:
+ .. seealso::
- :ref:`mapper_composite`
+ :ref:`mapper_composite`
"""
def __init__(self, class_, *attrs, **kwargs):
+ """Return a composite column-based property for use with a Mapper.
+
+ See the mapping documentation section :ref:`mapper_composite` for a full
+ usage example.
+
+ The :class:`.MapperProperty` returned by :func:`.composite`
+ is the :class:`.CompositeProperty`.
+
+ :param class\_:
+ The "composite type" class.
+
+ :param \*cols:
+ List of Column objects to be mapped.
+
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ scalar attribute should be loaded when replaced, if not
+ already loaded. See the same flag on :func:`.column_property`.
+
+ .. versionchanged:: 0.7
+ This flag specifically becomes meaningful
+ - previously it was a placeholder.
+
+ :param group:
+ A group name for this property when marked as deferred.
+
+ :param deferred:
+ When True, the column property is "deferred", meaning that it does not
+ load immediately, and is instead loaded when the attribute is first
+ accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`.
+
+ :param comparator_factory: a class which extends
+ :class:`.CompositeProperty.Comparator` which provides custom SQL clause
+ generation for comparison operations.
+
+ :param doc:
+ optional string that will be applied as the doc on the
+ class-bound descriptor.
+
+ :param info: Optional data dictionary which will be populated into the
+ :attr:`.MapperProperty.info` attribute of this object.
+
+ .. versionadded:: 0.8
+
+ :param extension:
+ an :class:`.AttributeExtension` instance,
+ or list of extensions, which will be prepended to the list of
+ attribute listeners for the resulting descriptor placed on the class.
+ **Deprecated.** Please see :class:`.AttributeEvents`.
+
+ """
+
self.attrs = attrs
self.composite_class = class_
self.active_history = kwargs.get('active_history', False)
@@ -101,6 +155,7 @@ class CompositeProperty(DescriptorProperty):
util.set_creation_order(self)
self._create_descriptor()
+
def instrument_class(self, mapper):
super(CompositeProperty, self).instrument_class(mapper)
self._setup_event_handlers()
@@ -190,6 +245,11 @@ class CompositeProperty(DescriptorProperty):
prop = self.parent._columntoproperty[attr]
elif isinstance(attr, attributes.InstrumentedAttribute):
prop = attr.property
+ else:
+ raise sa_exc.ArgumentError(
+ "Composite expects Column objects or mapped "
+ "attributes/attribute names as arguments, got: %r"
+ % (attr,))
props.append(prop)
@property
@@ -205,7 +265,9 @@ class CompositeProperty(DescriptorProperty):
prop.active_history = self.active_history
if self.deferred:
prop.deferred = self.deferred
- prop.strategy_class = strategies.DeferredColumnLoader
+ prop.strategy_class = prop._strategy_lookup(
+ ("deferred", True),
+ ("instrument", True))
prop.group = self.group
def _setup_event_handlers(self):
@@ -300,6 +362,18 @@ class CompositeProperty(DescriptorProperty):
def _comparator_factory(self, mapper):
return self.comparator_factory(self, mapper)
+ class CompositeBundle(query.Bundle):
+ def __init__(self, property, expr):
+ self.property = property
+ super(CompositeProperty.CompositeBundle, self).__init__(
+ property.key, *expr)
+
+ def create_row_processor(self, query, procs, labels):
+ def proc(row, result):
+ return self.property.composite_class(*[proc(row, result) for proc in procs])
+ return proc
+
+
class Comparator(PropComparator):
"""Produce boolean, comparison, and other operators for
:class:`.CompositeProperty` attributes.
@@ -319,10 +393,18 @@ class CompositeProperty(DescriptorProperty):
"""
+
+ __hash__ = None
+
+ @property
+ def clauses(self):
+ return self.__clause_element__()
+
def __clause_element__(self):
return expression.ClauseList(group=False, *self._comparable_elements)
- __hash__ = None
+ def _query_clause_element(self):
+ return CompositeProperty.CompositeBundle(self.prop, self.__clause_element__())
@util.memoized_property
def _comparable_elements(self):
@@ -356,6 +438,7 @@ class CompositeProperty(DescriptorProperty):
return str(self.parent.class_.__name__) + "." + self.key
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class ConcreteInheritedProperty(DescriptorProperty):
"""A 'do nothing' :class:`.MapperProperty` that disables
an attribute on a concrete subclass that is only present
@@ -404,11 +487,66 @@ class ConcreteInheritedProperty(DescriptorProperty):
self.descriptor = NoninheritedConcreteProp()
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class SynonymProperty(DescriptorProperty):
def __init__(self, name, map_column=None,
descriptor=None, comparator_factory=None,
doc=None):
+ """Denote an attribute name as a synonym to a mapped property,
+ in that the attribute will mirror the value and expression behavior
+ of another attribute.
+
+ :param name: the name of the existing mapped property. This
+ can refer to the string name of any :class:`.MapperProperty`
+ configured on the class, including column-bound attributes
+ and relationships.
+
+ :param descriptor: a Python :term:`descriptor` that will be used
+ as a getter (and potentially a setter) when this attribute is
+ accessed at the instance level.
+
+ :param map_column: if ``True``, the :func:`.synonym` construct will
+ locate the existing named :class:`.MapperProperty` based on the
+ attribute name of this :func:`.synonym`, and assign it to a new
+ attribute linked to the name of this :func:`.synonym`.
+ That is, given a mapping like::
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+
+ id = Column(Integer, primary_key=True)
+ job_status = Column(String(50))
+
+ job_status = synonym("_job_status", map_column=True)
+
+ The above class ``MyClass`` will now have the ``job_status``
+ :class:`.Column` object mapped to the attribute named ``_job_status``,
+ and the attribute named ``job_status`` will refer to the synonym
+ itself. This feature is typically used in conjunction with the
+ ``descriptor`` argument in order to link a user-defined descriptor
+ as a "wrapper" for an existing column.
+
+ :param comparator_factory: A subclass of :class:`.PropComparator`
+ that will provide custom comparison behavior at the SQL expression
+ level.
+
+ .. note::
+
+ For the use case of providing an attribute which redefines both
+ Python-level and SQL-expression level behavior of an attribute,
+ please refer to the Hybrid attribute introduced at
+ :ref:`mapper_hybrids` for a more effective technique.
+
+ .. seealso::
+
+ :ref:`synonyms` - examples of functionality.
+
+ :ref:`mapper_hybrids` - Hybrids provide a better approach for
+ more complicated attribute-wrapping schemes than synonyms.
+
+ """
+
self.name = name
self.map_column = map_column
self.descriptor = descriptor
@@ -462,10 +600,72 @@ class SynonymProperty(DescriptorProperty):
self.parent = parent
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class ComparableProperty(DescriptorProperty):
"""Instruments a Python property for use in query expressions."""
def __init__(self, comparator_factory, descriptor=None, doc=None):
+ """Provides a method of applying a :class:`.PropComparator`
+ to any Python descriptor attribute.
+
+ .. versionchanged:: 0.7
+ :func:`.comparable_property` is superseded by
+ the :mod:`~sqlalchemy.ext.hybrid` extension. See the example
+ at :ref:`hybrid_custom_comparators`.
+
+ Allows any Python descriptor to behave like a SQL-enabled
+ attribute when used at the class level in queries, allowing
+ redefinition of expression operator behavior.
+
+ In the example below we redefine :meth:`.PropComparator.operate`
+ to wrap both sides of an expression in ``func.lower()`` to produce
+ case-insensitive comparison::
+
+ from sqlalchemy.orm import comparable_property
+ from sqlalchemy.orm.interfaces import PropComparator
+ from sqlalchemy.sql import func
+ from sqlalchemy import Integer, String, Column
+ from sqlalchemy.ext.declarative import declarative_base
+
+ class CaseInsensitiveComparator(PropComparator):
+ def __clause_element__(self):
+ return self.prop
+
+ def operate(self, op, other):
+ return op(
+ func.lower(self.__clause_element__()),
+ func.lower(other)
+ )
+
+ Base = declarative_base()
+
+ class SearchWord(Base):
+ __tablename__ = 'search_word'
+ id = Column(Integer, primary_key=True)
+ word = Column(String)
+ word_insensitive = comparable_property(lambda prop, mapper:
+ CaseInsensitiveComparator(mapper.c.word, mapper)
+ )
+
+
+ A mapping like the above allows the ``word_insensitive`` attribute
+ to render an expression like::
+
+ >>> print SearchWord.word_insensitive == "Trucks"
+ lower(search_word.word) = lower(:lower_1)
+
+ :param comparator_factory:
+ A PropComparator subclass or factory that defines operator behavior
+ for this property.
+
+ :param descriptor:
+ Optional when used in a ``properties={}`` declaration. The Python
+ descriptor or property to layer comparison behavior on top of.
+
+ The like-named descriptor will be automatically retrieved from the
+ mapped class if left blank in a ``properties`` declaration.
+
+ """
self.descriptor = descriptor
self.comparator_factory = comparator_factory
self.doc = doc or (descriptor and descriptor.__doc__) or None
@@ -473,3 +673,5 @@ class ComparableProperty(DescriptorProperty):
def _comparator_factory(self, mapper):
return self.comparator_factory(self, mapper)
+
+
diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py
index 5814b47ca..bae09d32d 100644
--- a/lib/sqlalchemy/orm/dynamic.py
+++ b/lib/sqlalchemy/orm/dynamic.py
@@ -1,5 +1,5 @@
# orm/dynamic.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -15,11 +15,12 @@ from .. import log, util, exc
from ..sql import operators
from . import (
attributes, object_session, util as orm_util, strategies,
- object_mapper, exc as orm_exc
+ object_mapper, exc as orm_exc, properties
)
from .query import Query
-
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="dynamic")
class DynaLoader(strategies.AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.is_class_level = True
@@ -39,9 +40,6 @@ class DynaLoader(strategies.AbstractRelationshipLoader):
backref=self.parent_property.back_populates,
)
-log.class_logger(DynaLoader)
-
-
class DynamicAttributeImpl(attributes.AttributeImpl):
uses_objects = True
accepts_scalar_loader = False
@@ -78,6 +76,14 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
history = self._get_collection_history(state, passive)
return history.added_plus_unchanged
+ @util.memoized_property
+ def _append_token(self):
+ return attributes.Event(self, attributes.OP_APPEND)
+
+ @util.memoized_property
+ def _remove_token(self):
+ return attributes.Event(self, attributes.OP_REMOVE)
+
def fire_append_event(self, state, dict_, value, initiator,
collection_history=None):
if collection_history is None:
@@ -86,7 +92,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
collection_history.add_added(value)
for fn in self.dispatch.append:
- value = fn(state, value, initiator or self)
+ value = fn(state, value, initiator or self._append_token)
if self.trackparent and value is not None:
self.sethasparent(attributes.instance_state(value), state, True)
@@ -102,7 +108,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
self.sethasparent(attributes.instance_state(value), state, False)
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
def _modified_event(self, state, dict_):
diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py
index 7a11cd450..e1dd96068 100644
--- a/lib/sqlalchemy/orm/evaluator.py
+++ b/lib/sqlalchemy/orm/evaluator.py
@@ -1,5 +1,5 @@
# orm/evaluator.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py
index 97019bb4e..a09154dd0 100644
--- a/lib/sqlalchemy/orm/events.py
+++ b/lib/sqlalchemy/orm/events.py
@@ -1,5 +1,5 @@
# orm/events.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -8,10 +8,14 @@
"""
from .. import event, exc, util
-orm = util.importlater("sqlalchemy", "orm")
+from .base import _mapper_or_none
import inspect
import weakref
-
+from . import interfaces
+from . import mapperlib, instrumentation
+from .session import Session, sessionmaker
+from .scoping import scoped_session
+from .attributes import QueryableAttribute
class InstrumentationEvents(event.Events):
"""Events related to class instrumentation events.
@@ -43,17 +47,20 @@ class InstrumentationEvents(event.Events):
"""
_target_class_doc = "SomeBaseClass"
+ _dispatch_target = instrumentation.InstrumentationFactory
+
@classmethod
def _accept_with(cls, target):
- # TODO: there's no coverage for this
if isinstance(target, type):
return _InstrumentationEventsHold(target)
else:
return None
@classmethod
- def _listen(cls, target, identifier, fn, propagate=True):
+ def _listen(cls, event_key, propagate=True):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
def listen(target_cls, *arg):
listen_cls = target()
@@ -63,22 +70,21 @@ class InstrumentationEvents(event.Events):
return fn(target_cls, *arg)
def remove(ref):
- event.Events._remove(orm.instrumentation._instrumentation_factory,
- identifier, listen)
+ key = event.registry._EventKey(None, identifier, listen,
+ instrumentation._instrumentation_factory)
+ getattr(instrumentation._instrumentation_factory.dispatch,
+ identifier).remove(key)
target = weakref.ref(target.class_, remove)
- event.Events._listen(orm.instrumentation._instrumentation_factory,
- identifier, listen)
- @classmethod
- def _remove(cls, identifier, target, fn):
- raise NotImplementedError("Removal of instrumentation events "
- "not yet implemented")
+ event_key.\
+ with_dispatch_target(instrumentation._instrumentation_factory).\
+ with_wrapper(listen).base_listen()
@classmethod
def _clear(cls):
super(InstrumentationEvents, cls)._clear()
- orm.instrumentation._instrumentation_factory.dispatch._clear()
+ instrumentation._instrumentation_factory.dispatch._clear()
def class_instrument(self, cls):
"""Called after the given class is instrumented.
@@ -100,6 +106,7 @@ class InstrumentationEvents(event.Events):
"""Called when an attribute is instrumented."""
+
class _InstrumentationEventsHold(object):
"""temporary marker object used to transfer from _accept_with() to
_listen() on the InstrumentationEvents class.
@@ -110,7 +117,6 @@ class _InstrumentationEventsHold(object):
dispatch = event.dispatcher(InstrumentationEvents)
-
class InstanceEvents(event.Events):
"""Define events specific to object lifecycle.
@@ -121,21 +127,19 @@ class InstanceEvents(event.Events):
def my_load_listener(target, context):
print "on load!"
- event.listen(SomeMappedClass, 'load', my_load_listener)
-
- Available targets include mapped classes, instances of
- :class:`.Mapper` (i.e. returned by :func:`.mapper`,
- :func:`.class_mapper` and similar), as well as the
- :class:`.Mapper` class and :func:`.mapper` function itself
- for global event reception::
+ event.listen(SomeClass, 'load', my_load_listener)
- from sqlalchemy.orm import mapper
+ Available targets include:
- def some_listener(target, context):
- log.debug("Instance %s being loaded" % target)
+ * mapped classes
+ * unmapped superclasses of mapped or to-be-mapped classes
+ (using the ``propagate=True`` flag)
+ * :class:`.Mapper` objects
+ * the :class:`.Mapper` class itself and the :func:`.mapper`
+ function indicate listening for all mappers.
- # attach to all mappers
- event.listen(mapper, 'load', some_listener)
+ .. versionchanged:: 0.8.0 instance events can be associated with
+ unmapped superclasses of mapped classes.
Instance events are closely related to mapper events, but
are more specific to the instance and its instrumentation,
@@ -154,21 +158,28 @@ class InstanceEvents(event.Events):
"""
- _target_class_doc = "SomeMappedClass"
+ _target_class_doc = "SomeClass"
+
+ _dispatch_target = instrumentation.ClassManager
@classmethod
- def _accept_with(cls, target):
- if isinstance(target, orm.instrumentation.ClassManager):
+ def _new_classmanager_instance(cls, class_, classmanager):
+ _InstanceEventsHold.populate(class_, classmanager)
+
+ @classmethod
+ @util.dependencies("sqlalchemy.orm")
+ def _accept_with(cls, orm, target):
+ if isinstance(target, instrumentation.ClassManager):
return target
- elif isinstance(target, orm.Mapper):
+ elif isinstance(target, mapperlib.Mapper):
return target.class_manager
elif target is orm.mapper:
- return orm.instrumentation.ClassManager
+ return instrumentation.ClassManager
elif isinstance(target, type):
- if issubclass(target, orm.Mapper):
- return orm.instrumentation.ClassManager
+ if issubclass(target, mapperlib.Mapper):
+ return instrumentation.ClassManager
else:
- manager = orm.instrumentation.manager_of_class(target)
+ manager = instrumentation.manager_of_class(target)
if manager:
return manager
else:
@@ -176,23 +187,23 @@ class InstanceEvents(event.Events):
return None
@classmethod
- def _listen(cls, target, identifier, fn, raw=False, propagate=False):
+ def _listen(cls, event_key, raw=False, propagate=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
if not raw:
orig_fn = fn
def wrap(state, *arg, **kw):
return orig_fn(state.obj(), *arg, **kw)
fn = wrap
+ event_key = event_key.with_wrapper(fn)
+
+ event_key.base_listen(propagate=propagate)
- event.Events._listen(target, identifier, fn, propagate=propagate)
if propagate:
for mgr in target.subclass_managers(True):
- event.Events._listen(mgr, identifier, fn, True)
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- msg = "Removal of instance events not yet implemented"
- raise NotImplementedError(msg)
+ event_key.with_dispatch_target(mgr).base_listen(propagate=True)
@classmethod
def _clear(cls):
@@ -321,8 +332,7 @@ class InstanceEvents(event.Events):
"""
-
-class _EventsHold(object):
+class _EventsHold(event.RefCollection):
"""Hold onto listeners against unmapped, uninstrumented classes.
Establish _listen() for that class' mapper/instrumentation when
@@ -337,14 +347,20 @@ class _EventsHold(object):
cls.all_holds.clear()
class HoldEvents(object):
+ _dispatch_target = None
+
@classmethod
- def _listen(cls, target, identifier, fn, raw=False, propagate=False):
+ def _listen(cls, event_key, raw=False, propagate=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
if target.class_ in target.all_holds:
collection = target.all_holds[target.class_]
else:
- collection = target.all_holds[target.class_] = []
+ collection = target.all_holds[target.class_] = {}
- collection.append((identifier, fn, raw, propagate))
+ event.registry._stored_in_collection(event_key, target)
+ collection[event_key._key] = (event_key, raw, propagate)
if propagate:
stack = list(target.class_.__subclasses__())
@@ -353,28 +369,37 @@ class _EventsHold(object):
stack.extend(subclass.__subclasses__())
subject = target.resolve(subclass)
if subject is not None:
- subject.dispatch._listen(subject, identifier, fn,
- raw=raw, propagate=propagate)
+ event_key.with_dispatch_target(subject).\
+ listen(raw=raw, propagate=propagate)
+
+ def remove(self, event_key):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
+ collection = target.all_holds[target.class_]
+ del collection[event_key._key]
@classmethod
def populate(cls, class_, subject):
for subclass in class_.__mro__:
if subclass in cls.all_holds:
- if subclass is class_:
- collection = cls.all_holds.pop(subclass)
- else:
- collection = cls.all_holds[subclass]
- for ident, fn, raw, propagate in collection:
+ collection = cls.all_holds[subclass]
+ for event_key, raw, propagate in collection.values():
if propagate or subclass is class_:
- subject.dispatch._listen(subject, ident,
- fn, raw, propagate)
+ # since we can't be sure in what order different classes
+ # in a hierarchy are triggered with populate(),
+ # we rely upon _EventsHold for all event
+ # assignment, instead of using the generic propagate
+ # flag.
+ event_key.with_dispatch_target(subject).\
+ listen(raw=raw, propagate=False)
class _InstanceEventsHold(_EventsHold):
all_holds = weakref.WeakKeyDictionary()
def resolve(self, class_):
- return orm.instrumentation.manager_of_class(class_)
+ return instrumentation.manager_of_class(class_)
class HoldInstanceEvents(_EventsHold.HoldEvents, InstanceEvents):
pass
@@ -396,24 +421,22 @@ class MapperEvents(event.Events):
"select my_special_function(%d)"
% target.special_number)
- # associate the listener function with SomeMappedClass,
+ # associate the listener function with SomeClass,
# to execute during the "before_insert" hook
event.listen(
- SomeMappedClass, 'before_insert', my_before_insert_listener)
-
- Available targets include mapped classes, instances of
- :class:`.Mapper` (i.e. returned by :func:`.mapper`,
- :func:`.class_mapper` and similar), as well as the
- :class:`.Mapper` class and :func:`.mapper` function itself
- for global event reception::
+ SomeClass, 'before_insert', my_before_insert_listener)
- from sqlalchemy.orm import mapper
+ Available targets include:
- def some_listener(mapper, connection, target):
- log.debug("Instance %s being inserted" % target)
+ * mapped classes
+ * unmapped superclasses of mapped or to-be-mapped classes
+ (using the ``propagate=True`` flag)
+ * :class:`.Mapper` objects
+ * the :class:`.Mapper` class itself and the :func:`.mapper`
+ function indicate listening for all mappers.
- # attach to all mappers
- event.listen(mapper, 'before_insert', some_listener)
+ .. versionchanged:: 0.8.0 mapper events can be associated with
+ unmapped superclasses of mapped classes.
Mapper events provide hooks into critical sections of the
mapper, including those related to object instrumentation,
@@ -455,17 +478,23 @@ class MapperEvents(event.Events):
"""
- _target_class_doc = "SomeMappedClass"
+ _target_class_doc = "SomeClass"
+ _dispatch_target = mapperlib.Mapper
@classmethod
- def _accept_with(cls, target):
+ def _new_mapper_instance(cls, class_, mapper):
+ _MapperEventsHold.populate(class_, mapper)
+
+ @classmethod
+ @util.dependencies("sqlalchemy.orm")
+ def _accept_with(cls, orm, target):
if target is orm.mapper:
- return orm.Mapper
+ return mapperlib.Mapper
elif isinstance(target, type):
- if issubclass(target, orm.Mapper):
+ if issubclass(target, mapperlib.Mapper):
return target
else:
- mapper = orm.util._mapper_or_none(target)
+ mapper = _mapper_or_none(target)
if mapper is not None:
return mapper
else:
@@ -474,8 +503,10 @@ class MapperEvents(event.Events):
return target
@classmethod
- def _listen(cls, target, identifier, fn,
+ def _listen(cls, event_key,
raw=False, retval=False, propagate=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
if not raw or not retval:
if not raw:
@@ -494,16 +525,17 @@ class MapperEvents(event.Events):
arg[target_index] = arg[target_index].obj()
if not retval:
wrapped_fn(*arg, **kw)
- return orm.interfaces.EXT_CONTINUE
+ return interfaces.EXT_CONTINUE
else:
return wrapped_fn(*arg, **kw)
fn = wrap
+ event_key = event_key.with_wrapper(wrap)
if propagate:
for mapper in target.self_and_descendants:
- event.Events._listen(mapper, identifier, fn, propagate=True)
+ event_key.with_dispatch_target(mapper).base_listen(propagate=True)
else:
- event.Events._listen(target, identifier, fn)
+ event_key.base_listen()
@classmethod
def _clear(cls):
@@ -517,8 +549,15 @@ class MapperEvents(event.Events):
This event is the earliest phase of mapper construction.
Most attributes of the mapper are not yet initialized.
- This listener can generally only be applied to the :class:`.Mapper`
- class overall.
+ This listener can either be applied to the :class:`.Mapper`
+ class overall, or to any un-mapped class which serves as a base
+ for classes that will be mapped (using the ``propagate=True`` flag)::
+
+ Base = declarative_base()
+
+ @event.listens_for(Base, "instrument_class", propagate=True)
+ def on_new_class(mapper, cls_):
+ " ... "
:param mapper: the :class:`.Mapper` which is the target
of this event.
@@ -1048,17 +1087,11 @@ class MapperEvents(event.Events):
"""
- @classmethod
- def _remove(cls, identifier, target, fn):
- "Removal of mapper events not yet implemented"
- raise NotImplementedError(msg)
-
-
class _MapperEventsHold(_EventsHold):
all_holds = weakref.WeakKeyDictionary()
def resolve(self, class_):
- return orm.util._mapper_or_none(class_)
+ return _mapper_or_none(class_)
class HoldMapperEvents(_EventsHold.HoldEvents, MapperEvents):
pass
@@ -1083,7 +1116,7 @@ class SessionEvents(event.Events):
The :func:`~.event.listen` function will accept
:class:`.Session` objects as well as the return result
- of :func:`.sessionmaker` and :func:`.scoped_session`.
+ of :class:`~.sessionmaker()` and :class:`~.scoped_session()`.
Additionally, it accepts the :class:`.Session` class which
will apply listeners to all :class:`.Session` instances
@@ -1093,38 +1126,35 @@ class SessionEvents(event.Events):
_target_class_doc = "SomeSessionOrFactory"
+ _dispatch_target = Session
+
@classmethod
def _accept_with(cls, target):
- if isinstance(target, orm.scoped_session):
+ if isinstance(target, scoped_session):
target = target.session_factory
- if not isinstance(target, orm.sessionmaker) and \
+ if not isinstance(target, sessionmaker) and \
(
not isinstance(target, type) or
- not issubclass(target, orm.Session)
+ not issubclass(target, Session)
):
raise exc.ArgumentError(
"Session event listen on a scoped_session "
"requires that its creation callable "
"is associated with the Session class.")
- if isinstance(target, orm.sessionmaker):
+ if isinstance(target, sessionmaker):
return target.class_
elif isinstance(target, type):
- if issubclass(target, orm.scoped_session):
- return orm.Session
- elif issubclass(target, orm.Session):
+ if issubclass(target, scoped_session):
+ return Session
+ elif issubclass(target, Session):
return target
- elif isinstance(target, orm.Session):
+ elif isinstance(target, Session):
return target
else:
return None
- @classmethod
- def _remove(cls, identifier, target, fn):
- msg = "Removal of session events not yet implemented"
- raise NotImplementedError(msg)
-
def after_transaction_create(self, session, transaction):
"""Execute when a new :class:`.SessionTransaction` is created.
@@ -1173,7 +1203,7 @@ class SessionEvents(event.Events):
.. note::
- The :meth:`.before_commit` hook is *not* per-flush,
+ The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush,
that is, the :class:`.Session` can emit SQL to the database
many times within the scope of a transaction.
For interception of these events, use the :meth:`~.SessionEvents.before_flush`,
@@ -1265,9 +1295,9 @@ class SessionEvents(event.Events):
:param session: The target :class:`.Session`.
:param previous_transaction: The :class:`.SessionTransaction`
- transactional marker object which was just closed. The current
- :class:`.SessionTransaction` for the given :class:`.Session` is
- available via the :attr:`.Session.transaction` attribute.
+ transactional marker object which was just closed. The current
+ :class:`.SessionTransaction` for the given :class:`.Session` is
+ available via the :attr:`.Session.transaction` attribute.
.. versionadded:: 0.7.3
@@ -1359,7 +1389,7 @@ class SessionEvents(event.Events):
This is called before an add, delete or merge causes
the object to be part of the session.
- .. versionadded:: 0.8. Note that :meth:`.after_attach` now
+ .. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach` now
fires off after the item is part of the session.
:meth:`.before_attach` is provided for those cases where
the item should not yet be part of the session state.
@@ -1474,7 +1504,7 @@ class AttributeEvents(event.Events):
listen(UserContact.phone, 'set', validate_phone, retval=True)
A validation function like the above can also raise an exception
- such as :class:`.ValueError` to halt the operation.
+ such as :exc:`ValueError` to halt the operation.
Several modifiers are available to the :func:`~.event.listen` function.
@@ -1503,25 +1533,32 @@ class AttributeEvents(event.Events):
"""
_target_class_doc = "SomeClass.some_attribute"
+ _dispatch_target = QueryableAttribute
+
+ @staticmethod
+ def _set_dispatch(cls, dispatch_cls):
+ event.Events._set_dispatch(cls, dispatch_cls)
+ dispatch_cls._active_history = False
@classmethod
def _accept_with(cls, target):
# TODO: coverage
- if isinstance(target, orm.interfaces.MapperProperty):
+ if isinstance(target, interfaces.MapperProperty):
return getattr(target.parent.class_, target.key)
else:
return target
@classmethod
- def _listen(cls, target, identifier, fn, active_history=False,
+ def _listen(cls, event_key, active_history=False,
raw=False, retval=False,
propagate=False):
+
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
if active_history:
target.dispatch._active_history = True
- # TODO: for removal, need to package the identity
- # of the wrapper with the original function.
-
if not raw or not retval:
orig_fn = fn
@@ -1534,19 +1571,15 @@ class AttributeEvents(event.Events):
else:
return orig_fn(target, value, *arg)
fn = wrap
+ event_key = event_key.with_wrapper(wrap)
- event.Events._listen(target, identifier, fn, propagate)
+ event_key.base_listen(propagate=propagate)
if propagate:
- manager = orm.instrumentation.manager_of_class(target.class_)
+ manager = instrumentation.manager_of_class(target.class_)
for mgr in manager.subclass_managers(True):
- event.Events._listen(mgr[target.key], identifier, fn, True)
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- msg = "Removal of attribute events not yet implemented"
- raise NotImplementedError(msg)
+ event_key.with_dispatch_target(mgr[target.key]).base_listen(propagate=True)
def append(self, target, value, initiator):
"""Receive a collection append event.
@@ -1558,8 +1591,15 @@ class AttributeEvents(event.Events):
is registered with ``retval=True``, the listener
function must return this value, or a new value which
replaces it.
- :param initiator: the attribute implementation object
- which initiated this event.
+ :param initiator: An instance of :class:`.attributes.Event`
+ representing the initiation of the event. May be modified
+ from it's original value by backref handlers in order to control
+ chained event propagation.
+
+ .. versionchanged:: 0.9.0 the ``initiator`` argument is now
+ passed as a :class:`.attributes.Event` object, and may be modified
+ by backref handlers within a chain of backref-linked events.
+
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
@@ -1572,8 +1612,15 @@ class AttributeEvents(event.Events):
If the listener is registered with ``raw=True``, this will
be the :class:`.InstanceState` object.
:param value: the value being removed.
- :param initiator: the attribute implementation object
- which initiated this event.
+ :param initiator: An instance of :class:`.attributes.Event`
+ representing the initiation of the event. May be modified
+ from it's original value by backref handlers in order to control
+ chained event propagation.
+
+ .. versionchanged:: 0.9.0 the ``initiator`` argument is now
+ passed as a :class:`.attributes.Event` object, and may be modified
+ by backref handlers within a chain of backref-linked events.
+
:return: No return value is defined for this event.
"""
@@ -1593,9 +1640,17 @@ class AttributeEvents(event.Events):
the previous value of the attribute will be loaded from
the database if the existing value is currently unloaded
or expired.
- :param initiator: the attribute implementation object
- which initiated this event.
+ :param initiator: An instance of :class:`.attributes.Event`
+ representing the initiation of the event. May be modified
+ from it's original value by backref handlers in order to control
+ chained event propagation.
+
+ .. versionchanged:: 0.9.0 the ``initiator`` argument is now
+ passed as a :class:`.attributes.Event` object, and may be modified
+ by backref handlers within a chain of backref-linked events.
+
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
"""
+
diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py
index 0faa7bd29..d1ef1ded9 100644
--- a/lib/sqlalchemy/orm/exc.py
+++ b/lib/sqlalchemy/orm/exc.py
@@ -1,13 +1,11 @@
# orm/exc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""SQLAlchemy ORM exceptions."""
from .. import exc as sa_exc, util
-orm_util = util.importlater('sqlalchemy.orm', 'util')
-attributes = util.importlater('sqlalchemy.orm', 'attributes')
NO_STATE = (AttributeError, KeyError)
"""Exception types that may be raised by instrumentation implementations."""
@@ -65,10 +63,11 @@ class DetachedInstanceError(sa_exc.SQLAlchemyError):
class UnmappedInstanceError(UnmappedError):
"""An mapping operation was requested for an unknown instance."""
- def __init__(self, obj, msg=None):
+ @util.dependencies("sqlalchemy.orm.base")
+ def __init__(self, base, obj, msg=None):
if not msg:
try:
- mapper = orm_util.class_mapper(type(obj))
+ base.class_mapper(type(obj))
name = _safe_cls_name(type(obj))
msg = ("Class %r is mapped, but this instance lacks "
"instrumentation. This occurs when the instance"
@@ -117,10 +116,11 @@ class ObjectDeletedError(sa_exc.InvalidRequestError):
object.
"""
- def __init__(self, state, msg=None):
+ @util.dependencies("sqlalchemy.orm.base")
+ def __init__(self, base, state, msg=None):
if not msg:
msg = "Instance '%s' has been deleted, or its "\
- "row is otherwise not present." % orm_util.state_str(state)
+ "row is otherwise not present." % base.state_str(state)
sa_exc.InvalidRequestError.__init__(self, msg)
@@ -149,10 +149,10 @@ def _safe_cls_name(cls):
cls_name = repr(cls)
return cls_name
-
-def _default_unmapped(cls):
+@util.dependencies("sqlalchemy.orm.base")
+def _default_unmapped(base, cls):
try:
- mappers = attributes.manager_of_class(cls).mappers
+ mappers = base.manager_of_class(cls).mappers
except NO_STATE:
mappers = {}
except TypeError:
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index d0234a1d3..a91085d28 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -1,5 +1,5 @@
# orm/identity.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -172,7 +172,7 @@ class WeakInstanceDict(IdentityMap):
if util.py2k:
return dict.values(self)
else:
- return list(dict.values(self))
+ return list(dict.values(self))
def discard(self, state):
st = dict.get(self, state.key, None)
diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py
index 368a6a0b1..68b4f0611 100644
--- a/lib/sqlalchemy/orm/instrumentation.py
+++ b/lib/sqlalchemy/orm/instrumentation.py
@@ -1,5 +1,5 @@
# orm/instrumentation.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -29,17 +29,15 @@ alternate instrumentation forms.
"""
-from . import exc, collections, events, interfaces
-from operator import attrgetter
-from .. import event, util
-state = util.importlater("sqlalchemy.orm", "state")
-
+from . import exc, collections, interfaces, state
+from .. import util
+from . import base
class ClassManager(dict):
"""tracks state information at the class level."""
- MANAGER_ATTR = '_sa_class_manager'
- STATE_ATTR = '_sa_instance_state'
+ MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
+ STATE_ATTR = base.DEFAULT_STATE_ATTR
deferred_scalar_loader = None
@@ -63,7 +61,8 @@ class ClassManager(dict):
for base in self._bases:
self.update(base)
- events._InstanceEventsHold.populate(class_, self)
+ self.dispatch._events._new_classmanager_instance(class_, self)
+ #events._InstanceEventsHold.populate(class_, self)
for basecls in class_.__mro__:
mgr = manager_of_class(basecls)
@@ -79,7 +78,11 @@ class ClassManager(dict):
"reference cycles. Please remove this method." %
class_)
- dispatch = event.dispatcher(events.InstanceEvents)
+ def __hash__(self):
+ return id(self)
+
+ def __eq__(self, other):
+ return other is self
@property
def is_mapped(self):
@@ -164,9 +167,7 @@ class ClassManager(dict):
@util.hybridmethod
def manager_getter(self):
- def manager_of_class(cls):
- return cls.__dict__.get(ClassManager.MANAGER_ATTR, None)
- return manager_of_class
+ return _default_manager_getter
@util.hybridmethod
def state_getter(self):
@@ -177,11 +178,12 @@ class ClassManager(dict):
instance.
"""
- return attrgetter(self.STATE_ATTR)
+ return _default_state_getter
@util.hybridmethod
def dict_getter(self):
- return attrgetter('__dict__')
+ return _default_dict_getter
+
def instrument_attribute(self, key, inst, propagated=False):
if propagated:
@@ -296,6 +298,9 @@ class ClassManager(dict):
def teardown_instance(self, instance):
delattr(instance, self.STATE_ATTR)
+ def _serialize(self, state, state_dict):
+ return _SerializeManager(state, state_dict)
+
def _new_state_if_none(self, instance):
"""Install a default InstanceState if none is present.
@@ -335,12 +340,41 @@ class ClassManager(dict):
return '<%s of %r at %x>' % (
self.__class__.__name__, self.class_, id(self))
+class _SerializeManager(object):
+ """Provide serialization of a :class:`.ClassManager`.
+
+ The :class:`.InstanceState` uses ``__init__()`` on serialize
+ and ``__call__()`` on deserialize.
+
+ """
+ def __init__(self, state, d):
+ self.class_ = state.class_
+ manager = state.manager
+ manager.dispatch.pickle(state, d)
+
+ def __call__(self, state, inst, state_dict):
+ state.manager = manager = manager_of_class(self.class_)
+ if manager is None:
+ raise exc.UnmappedInstanceError(
+ inst,
+ "Cannot deserialize object of type %r - "
+ "no mapper() has "
+ "been configured for this class within the current "
+ "Python process!" %
+ self.class_)
+ elif manager.is_mapped and not manager.mapper.configured:
+ manager.mapper._configure_all()
+
+ # setup _sa_instance_state ahead of time so that
+ # unpickle events can access the object normally.
+ # see [ticket:2362]
+ if inst is not None:
+ manager.setup_instance(inst, state)
+ manager.dispatch.unpickle(state, state_dict)
class InstrumentationFactory(object):
"""Factory for new ClassManager instances."""
- dispatch = event.dispatcher(events.InstrumentationEvents)
-
def create_manager_for_cls(self, class_):
assert class_ is not None
assert manager_of_class(class_) is None
@@ -380,6 +414,14 @@ class InstrumentationFactory(object):
# when importred.
_instrumentation_factory = InstrumentationFactory()
+# these attributes are replaced by sqlalchemy.ext.instrumentation
+# when a non-standard InstrumentationManager class is first
+# used to instrument a class.
+instance_state = _default_state_getter = base.instance_state
+
+instance_dict = _default_dict_getter = base.instance_dict
+
+manager_of_class = _default_manager_getter = base.manager_of_class
def register_class(class_):
"""Register class instrumentation.
@@ -411,15 +453,6 @@ def is_instrumented(instance, key):
return manager_of_class(instance.__class__).\
is_instrumented(key, search=True)
-# these attributes are replaced by sqlalchemy.ext.instrumentation
-# when a non-standard InstrumentationManager class is first
-# used to instrument a class.
-instance_state = _default_state_getter = ClassManager.state_getter()
-
-instance_dict = _default_dict_getter = ClassManager.dict_getter()
-
-manager_of_class = _default_manager_getter = ClassManager.manager_getter()
-
def _generate_init(class_, class_manager):
"""Build an __init__ decorator that triggers ClassManager events."""
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index 150277be2..3d5559be9 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -1,5 +1,5 @@
# orm/interfaces.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -21,9 +21,11 @@ from __future__ import absolute_import
from .. import exc as sa_exc, util, inspect
from ..sql import operators
from collections import deque
+from .base import ONETOMANY, MANYTOONE, MANYTOMANY, EXT_CONTINUE, EXT_STOP, NOT_EXTENSION
+from .base import _InspectionAttr, _MappedAttribute
+from .path_registry import PathRegistry
+import collections
-orm_util = util.importlater('sqlalchemy.orm', 'util')
-collections = util.importlater('sqlalchemy.orm', 'collections')
__all__ = (
'AttributeExtension',
@@ -42,97 +44,6 @@ __all__ = (
'StrategizedProperty',
)
-EXT_CONTINUE = util.symbol('EXT_CONTINUE')
-EXT_STOP = util.symbol('EXT_STOP')
-
-ONETOMANY = util.symbol('ONETOMANY')
-MANYTOONE = util.symbol('MANYTOONE')
-MANYTOMANY = util.symbol('MANYTOMANY')
-
-from .deprecated_interfaces import AttributeExtension, \
- SessionExtension, \
- MapperExtension
-
-
-NOT_EXTENSION = util.symbol('NOT_EXTENSION')
-"""Symbol indicating an :class:`_InspectionAttr` that's
- not part of sqlalchemy.ext.
-
- Is assigned to the :attr:`._InspectionAttr.extension_type`
- attibute.
-
-"""
-
-class _InspectionAttr(object):
- """A base class applied to all ORM objects that can be returned
- by the :func:`.inspect` function.
-
- The attributes defined here allow the usage of simple boolean
- checks to test basic facts about the object returned.
-
- While the boolean checks here are basically the same as using
- the Python isinstance() function, the flags here can be used without
- the need to import all of these classes, and also such that
- the SQLAlchemy class system can change while leaving the flags
- here intact for forwards-compatibility.
-
- """
-
- is_selectable = False
- """Return True if this object is an instance of :class:`.Selectable`."""
-
- is_aliased_class = False
- """True if this object is an instance of :class:`.AliasedClass`."""
-
- is_instance = False
- """True if this object is an instance of :class:`.InstanceState`."""
-
- is_mapper = False
- """True if this object is an instance of :class:`.Mapper`."""
-
- is_property = False
- """True if this object is an instance of :class:`.MapperProperty`."""
-
- is_attribute = False
- """True if this object is a Python :term:`descriptor`.
-
- This can refer to one of many types. Usually a
- :class:`.QueryableAttribute` which handles attributes events on behalf
- of a :class:`.MapperProperty`. But can also be an extension type
- such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
- The :attr:`._InspectionAttr.extension_type` will refer to a constant
- identifying the specific subtype.
-
- .. seealso::
-
- :attr:`.Mapper.all_orm_descriptors`
-
- """
-
- is_clause_element = False
- """True if this object is an instance of :class:`.ClauseElement`."""
-
- extension_type = NOT_EXTENSION
- """The extension type, if any.
- Defaults to :data:`.interfaces.NOT_EXTENSION`
-
- .. versionadded:: 0.8.0
-
- .. seealso::
-
- :data:`.HYBRID_METHOD`
-
- :data:`.HYBRID_PROPERTY`
-
- :data:`.ASSOCIATION_PROXY`
-
- """
-
-class _MappedAttribute(object):
- """Mixin for attributes which should be replaced by mapper-assigned
- attributes.
-
- """
class MapperProperty(_MappedAttribute, _InspectionAttr):
@@ -235,7 +146,26 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
@property
def class_attribute(self):
"""Return the class-bound descriptor corresponding to this
- MapperProperty."""
+ :class:`.MapperProperty`.
+
+ This is basically a ``getattr()`` call::
+
+ return getattr(self.parent.class_, self.key)
+
+ I.e. if this :class:`.MapperProperty` were named ``addresses``,
+ and the class to which it is mapped is ``User``, this sequence
+ is possible::
+
+ >>> from sqlalchemy import inspect
+ >>> mapper = inspect(User)
+ >>> addresses_property = mapper.attrs.addresses
+ >>> addresses_property.class_attribute is User.addresses
+ True
+ >>> User.addresses.property is addresses_property
+ True
+
+
+ """
return getattr(self.parent.class_, self.key)
@@ -389,6 +319,9 @@ class PropComparator(operators.ColumnOperators):
def __clause_element__(self):
raise NotImplementedError("%r" % self)
+ def _query_clause_element(self):
+ return self.__clause_element__()
+
def adapt_to_entity(self, adapt_to_entity):
"""Return a copy of this PropComparator which will use the given
:class:`.AliasedInsp` to produce corresponding expressions.
@@ -490,51 +423,57 @@ class StrategizedProperty(MapperProperty):
strategy_wildcard_key = None
- @util.memoized_property
- def _wildcard_path(self):
- if self.strategy_wildcard_key:
- return ('loaderstrategy', (self.strategy_wildcard_key,))
- else:
- return None
+ def _get_context_loader(self, context, path):
+ load = None
- def _get_context_strategy(self, context, path):
- strategy_cls = path._inlined_get_for(self, context, 'loaderstrategy')
+ # use EntityRegistry.__getitem__()->PropRegistry here so
+ # that the path is stated in terms of our base
+ search_path = dict.__getitem__(path, self)
- if not strategy_cls:
- wc_key = self._wildcard_path
- if wc_key and wc_key in context.attributes:
- strategy_cls = context.attributes[wc_key]
+ # search among: exact match, "attr.*", "default" strategy
+ # if any.
+ for path_key in (
+ search_path._loader_key,
+ search_path._wildcard_path_loader_key,
+ search_path._default_path_loader_key
+ ):
+ if path_key in context.attributes:
+ load = context.attributes[path_key]
+ break
- if strategy_cls:
- try:
- return self._strategies[strategy_cls]
- except KeyError:
- return self.__init_strategy(strategy_cls)
- return self.strategy
+ return load
- def _get_strategy(self, cls):
+ def _get_strategy(self, key):
try:
- return self._strategies[cls]
+ return self._strategies[key]
except KeyError:
- return self.__init_strategy(cls)
+ cls = self._strategy_lookup(*key)
+ self._strategies[key] = self._strategies[cls] = strategy = cls(self)
+ return strategy
- def __init_strategy(self, cls):
- self._strategies[cls] = strategy = cls(self)
- return strategy
+ def _get_strategy_by_cls(self, cls):
+ return self._get_strategy(cls._strategy_keys[0])
def setup(self, context, entity, path, adapter, **kwargs):
- self._get_context_strategy(context, path).\
- setup_query(context, entity, path,
- adapter, **kwargs)
+ loader = self._get_context_loader(context, path)
+ if loader and loader.strategy:
+ strat = self._get_strategy(loader.strategy)
+ else:
+ strat = self.strategy
+ strat.setup_query(context, entity, path, loader, adapter, **kwargs)
def create_row_processor(self, context, path, mapper, row, adapter):
- return self._get_context_strategy(context, path).\
- create_row_processor(context, path,
+ loader = self._get_context_loader(context, path)
+ if loader and loader.strategy:
+ strat = self._get_strategy(loader.strategy)
+ else:
+ strat = self.strategy
+ return strat.create_row_processor(context, path, loader,
mapper, row, adapter)
def do_init(self):
self._strategies = {}
- self.strategy = self.__init_strategy(self.strategy_class)
+ self.strategy = self._get_strategy_by_cls(self.strategy_class)
def post_instrument_class(self, mapper):
if self.is_primary() and \
@@ -542,6 +481,30 @@ class StrategizedProperty(MapperProperty):
self.strategy.init_class_attribute(mapper)
+ _strategies = collections.defaultdict(dict)
+
+ @classmethod
+ def strategy_for(cls, **kw):
+ def decorate(dec_cls):
+ dec_cls._strategy_keys = []
+ key = tuple(sorted(kw.items()))
+ cls._strategies[cls][key] = dec_cls
+ dec_cls._strategy_keys.append(key)
+ return dec_cls
+ return decorate
+
+ @classmethod
+ def _strategy_lookup(cls, *key):
+ for prop_cls in cls.__mro__:
+ if prop_cls in cls._strategies:
+ strategies = cls._strategies[prop_cls]
+ try:
+ return strategies[key]
+ except KeyError:
+ pass
+ raise Exception("can't locate strategy for %s %s" % (cls, key))
+
+
class MapperOption(object):
"""Describe a modification to a Query."""
@@ -563,241 +526,6 @@ class MapperOption(object):
self.process_query(query)
-class PropertyOption(MapperOption):
- """A MapperOption that is applied to a property off the mapper or
- one of its child mappers, identified by a dot-separated key
- or list of class-bound attributes. """
-
- def __init__(self, key, mapper=None):
- self.key = key
- self.mapper = mapper
-
- def process_query(self, query):
- self._process(query, True)
-
- def process_query_conditionally(self, query):
- self._process(query, False)
-
- def _process(self, query, raiseerr):
- paths = self._process_paths(query, raiseerr)
- if paths:
- self.process_query_property(query, paths)
-
- def process_query_property(self, query, paths):
- pass
-
- def __getstate__(self):
- d = self.__dict__.copy()
- d['key'] = ret = []
- for token in util.to_list(self.key):
- if isinstance(token, PropComparator):
- ret.append((token._parentmapper.class_, token.key))
- else:
- ret.append(token)
- return d
-
- def __setstate__(self, state):
- ret = []
- for key in state['key']:
- if isinstance(key, tuple):
- cls, propkey = key
- ret.append(getattr(cls, propkey))
- else:
- ret.append(key)
- state['key'] = tuple(ret)
- self.__dict__ = state
-
- def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
- if orm_util._is_aliased_class(mapper):
- searchfor = mapper
- else:
- searchfor = orm_util._class_to_mapper(mapper)
- for ent in query._mapper_entities:
- if ent.corresponds_to(searchfor):
- return ent
- else:
- if raiseerr:
- if not list(query._mapper_entities):
- raise sa_exc.ArgumentError(
- "Query has only expression-based entities - "
- "can't find property named '%s'."
- % (token, )
- )
- else:
- raise sa_exc.ArgumentError(
- "Can't find property '%s' on any entity "
- "specified in this Query. Note the full path "
- "from root (%s) to target entity must be specified."
- % (token, ",".join(str(x) for
- x in query._mapper_entities))
- )
- else:
- return None
-
- def _find_entity_basestring(self, query, token, raiseerr):
- for ent in query._mapper_entities:
- # return only the first _MapperEntity when searching
- # based on string prop name. Ideally object
- # attributes are used to specify more exactly.
- return ent
- else:
- if raiseerr:
- raise sa_exc.ArgumentError(
- "Query has only expression-based entities - "
- "can't find property named '%s'."
- % (token, )
- )
- else:
- return None
-
- def _process_paths(self, query, raiseerr):
- """reconcile the 'key' for this PropertyOption with
- the current path and entities of the query.
-
- Return a list of affected paths.
-
- """
- path = orm_util.PathRegistry.root
- entity = None
- paths = []
- no_result = []
-
- # _current_path implies we're in a
- # secondary load with an existing path
- current_path = list(query._current_path.path)
-
- tokens = deque(self.key)
- while tokens:
- token = tokens.popleft()
- if isinstance(token, str):
- # wildcard token
- if token.endswith(':*'):
- return [path.token(token)]
- sub_tokens = token.split(".", 1)
- token = sub_tokens[0]
- tokens.extendleft(sub_tokens[1:])
-
- # exhaust current_path before
- # matching tokens to entities
- if current_path:
- if current_path[1].key == token:
- current_path = current_path[2:]
- continue
- else:
- return no_result
-
- if not entity:
- entity = self._find_entity_basestring(
- query,
- token,
- raiseerr)
- if entity is None:
- return no_result
- path_element = entity.entity_zero
- mapper = entity.mapper
-
- if hasattr(mapper.class_, token):
- prop = getattr(mapper.class_, token).property
- else:
- if raiseerr:
- raise sa_exc.ArgumentError(
- "Can't find property named '%s' on the "
- "mapped entity %s in this Query. " % (
- token, mapper)
- )
- else:
- return no_result
- elif isinstance(token, PropComparator):
- prop = token.property
-
- # exhaust current_path before
- # matching tokens to entities
- if current_path:
- if current_path[0:2] == \
- [token._parententity, prop]:
- current_path = current_path[2:]
- continue
- else:
- return no_result
-
- if not entity:
- entity = self._find_entity_prop_comparator(
- query,
- prop.key,
- token._parententity,
- raiseerr)
- if not entity:
- return no_result
-
- path_element = entity.entity_zero
- mapper = entity.mapper
- else:
- raise sa_exc.ArgumentError(
- "mapper option expects "
- "string key or list of attributes")
- assert prop is not None
- if raiseerr and not prop.parent.common_parent(mapper):
- raise sa_exc.ArgumentError("Attribute '%s' does not "
- "link from element '%s'" % (token, path_element))
-
- path = path[path_element][prop]
-
- paths.append(path)
-
- if getattr(token, '_of_type', None):
- ac = token._of_type
- ext_info = inspect(ac)
- path_element = mapper = ext_info.mapper
- if not ext_info.is_aliased_class:
- ac = orm_util.with_polymorphic(
- ext_info.mapper.base_mapper,
- ext_info.mapper, aliased=True,
- _use_mapper_path=True)
- ext_info = inspect(ac)
- path.set(query._attributes, "path_with_polymorphic", ext_info)
- else:
- path_element = mapper = getattr(prop, 'mapper', None)
- if mapper is None and tokens:
- raise sa_exc.ArgumentError(
- "Attribute '%s' of entity '%s' does not "
- "refer to a mapped entity" %
- (token, entity)
- )
-
- if current_path:
- # ran out of tokens before
- # current_path was exhausted.
- assert not tokens
- return no_result
-
- return paths
-
-
-class StrategizedOption(PropertyOption):
- """A MapperOption that affects which LoaderStrategy will be used
- for an operation by a StrategizedProperty.
- """
-
- chained = False
-
- def process_query_property(self, query, paths):
- strategy = self.get_strategy_class()
- if self.chained:
- for path in paths:
- path.set(
- query._attributes,
- "loaderstrategy",
- strategy
- )
- else:
- paths[-1].set(
- query._attributes,
- "loaderstrategy",
- strategy
- )
-
- def get_strategy_class(self):
- raise NotImplementedError()
class LoaderStrategy(object):
@@ -832,10 +560,10 @@ class LoaderStrategy(object):
def init_class_attribute(self, mapper):
pass
- def setup_query(self, context, entity, path, adapter, **kwargs):
+ def setup_query(self, context, entity, path, loadopt, adapter, **kwargs):
pass
- def create_row_processor(self, context, path, mapper,
+ def create_row_processor(self, context, path, loadopt, mapper,
row, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index 1641f509e..af77fe3e0 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -1,5 +1,5 @@
# orm/loading.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -19,7 +19,6 @@ from .interfaces import EXT_CONTINUE
from ..sql import util as sql_util
from .util import _none_set, state_str
from .. import exc as sa_exc
-sessionlib = util.importlater("sqlalchemy.orm", "session")
_new_runid = util.counter()
@@ -34,7 +33,8 @@ def instances(query, cursor, context):
for ent in query._entities]
filtered = id in filter_fns
- single_entity = filtered and len(query._entities) == 1
+ single_entity = len(query._entities) == 1 and \
+ query._entities[0].supports_single_entity
if filtered:
if single_entity:
@@ -44,7 +44,7 @@ def instances(query, cursor, context):
return tuple(fn(x) for x, fn in zip(row, filter_fns))
custom_rows = single_entity and \
- query._entities[0].mapper.dispatch.append_result
+ query._entities[0].custom_rows
(process, labels) = \
list(zip(*[
@@ -98,11 +98,10 @@ def instances(query, cursor, context):
break
-def merge_result(query, iterator, load=True):
+@util.dependencies("sqlalchemy.orm.query")
+def merge_result(querylib, query, iterator, load=True):
"""Merge a result into this :class:`.Query` object's Session."""
- from . import query as querylib
-
session = query.session
if load:
# flush current contents if we expect to load data
@@ -175,8 +174,6 @@ def load_on_ident(query, key,
only_load_props=None):
"""Load the given identity key from the database."""
- lockmode = lockmode or query._lockmode
-
if key is not None:
ident = key[1]
else:
@@ -214,10 +211,17 @@ def load_on_ident(query, key,
q._params = params
if lockmode is not None:
- q._lockmode = lockmode
+ version_check = True
+ q = q.with_lockmode(lockmode)
+ elif query._for_update_arg is not None:
+ version_check = True
+ q._for_update_arg = query._for_update_arg
+ else:
+ version_check = False
+
q._get_options(
populate_existing=bool(refresh_state),
- version_check=(lockmode is not None),
+ version_check=version_check,
only_load_props=only_load_props,
refresh_state=refresh_state)
q._order_by = None
@@ -547,7 +551,7 @@ def load_scalar_attributes(mapper, state, attribute_names):
"""initiate a column-based attribute refresh operation."""
#assert mapper is _state_mapper(state)
- session = sessionlib._state_session(state)
+ session = state.session
if not session:
raise orm_exc.DetachedInstanceError(
"Instance %s is not bound to a Session; "
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 5929aea6c..fc75a0cb5 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -1,5 +1,5 @@
# orm/mapper.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -22,26 +22,18 @@ from collections import deque
from .. import sql, util, log, exc as sa_exc, event, schema, inspection
from ..sql import expression, visitors, operators, util as sql_util
-from . import instrumentation, attributes, \
- exc as orm_exc, events, loading, dependency
+from . import instrumentation, attributes, exc as orm_exc, loading, dependency
+from . import properties
from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute
-from .util import _INSTRUMENTOR, _class_to_mapper, \
- _state_mapper, class_mapper, \
- PathRegistry, state_str
+from .base import _class_to_mapper, _state_mapper, class_mapper, \
+ state_str, _INSTRUMENTOR
+from .path_registry import PathRegistry
+
import sys
-properties = util.importlater("sqlalchemy.orm", "properties")
-descriptor_props = util.importlater("sqlalchemy.orm", "descriptor_props")
-__all__ = (
- 'Mapper',
- '_mapper_registry',
- 'class_mapper',
- 'object_mapper',
- )
_mapper_registry = weakref.WeakKeyDictionary()
-_new_mappers = False
_already_compiling = False
_memoized_configured_property = util.group_expirable_memoized_property()
@@ -56,6 +48,8 @@ NO_ATTRIBUTE = util.symbol('NO_ATTRIBUTE')
_CONFIGURE_MUTEX = util.threading.RLock()
+@inspection._self_inspects
+@log.class_logger
class Mapper(_InspectionAttr):
"""Define the correlation of class attributes to database table
columns.
@@ -88,9 +82,12 @@ class Mapper(_InspectionAttr):
"""
+
+ _new_mappers = False
+
def __init__(self,
class_,
- local_table,
+ local_table=None,
properties=None,
primary_key=None,
non_primary=False,
@@ -118,10 +115,380 @@ class Mapper(_InspectionAttr):
legacy_is_orphan=False,
_compiled_cache_size=100,
):
- """Construct a new mapper.
+ """Return a new :class:`~.Mapper` object.
+
+ This function is typically used behind the scenes
+ via the Declarative extension. When using Declarative,
+ many of the usual :func:`.mapper` arguments are handled
+ by the Declarative extension itself, including ``class_``,
+ ``local_table``, ``properties``, and ``inherits``.
+ Other options are passed to :func:`.mapper` using
+ the ``__mapper_args__`` class variable::
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+ id = Column(Integer, primary_key=True)
+ type = Column(String(50))
+ alt = Column("some_alt", Integer)
+
+ __mapper_args__ = {
+ 'polymorphic_on' : type
+ }
+
+
+ Explicit use of :func:`.mapper`
+ is often referred to as *classical mapping*. The above
+ declarative example is equivalent in classical form to::
+
+ my_table = Table("my_table", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('type', String(50)),
+ Column("some_alt", Integer)
+ )
+
+ class MyClass(object):
+ pass
+
+ mapper(MyClass, my_table,
+ polymorphic_on=my_table.c.type,
+ properties={
+ 'alt':my_table.c.some_alt
+ })
+
+ .. seealso::
+
+ :ref:`classical_mapping` - discussion of direct usage of
+ :func:`.mapper`
+
+ :param class\_: The class to be mapped. When using Declarative,
+ this argument is automatically passed as the declared class
+ itself.
+
+ :param local_table: The :class:`.Table` or other selectable
+ to which the class is mapped. May be ``None`` if
+ this mapper inherits from another mapper using single-table
+ inheritance. When using Declarative, this argument is
+ automatically passed by the extension, based on what
+ is configured via the ``__table__`` argument or via the
+ :class:`.Table` produced as a result of the ``__tablename__``
+ and :class:`.Column` arguments present.
+
+ :param always_refresh: If True, all query operations for this mapped
+ class will overwrite all data within object instances that already
+ exist within the session, erasing any in-memory changes with
+ whatever information was loaded from the database. Usage of this
+ flag is highly discouraged; as an alternative, see the method
+ :meth:`.Query.populate_existing`.
+
+ :param allow_partial_pks: Defaults to True. Indicates that a
+ composite primary key with some NULL values should be considered as
+ possibly existing within the database. This affects whether a
+ mapper will assign an incoming row to an existing identity, as well
+ as if :meth:`.Session.merge` will check the database first for a
+ particular primary key value. A "partial primary key" can occur if
+ one has mapped to an OUTER JOIN, for example.
+
+ :param batch: Defaults to ``True``, indicating that save operations
+ of multiple entities can be batched together for efficiency.
+ Setting to False indicates
+ that an instance will be fully saved before saving the next
+ instance. This is used in the extremely rare case that a
+ :class:`.MapperEvents` listener requires being called
+ in between individual row persistence operations.
+
+ :param column_prefix: A string which will be prepended
+ to the mapped attribute name when :class:`.Column`
+ objects are automatically assigned as attributes to the
+ mapped class. Does not affect explicitly specified
+ column-based properties.
+
+ See the section :ref:`column_prefix` for an example.
+
+ :param concrete: If True, indicates this mapper should use concrete
+ table inheritance with its parent mapper.
+
+ See the section :ref:`concrete_inheritance` for an example.
+
+ :param eager_defaults: if True, the ORM will immediately fetch the
+ value of server-generated default values after an INSERT or UPDATE,
+ rather than leaving them as expired to be fetched on next access.
+ This can be used for event schemes where the server-generated values
+ are needed immediately before the flush completes. By default,
+ this scheme will emit an individual ``SELECT`` statement per row
+ inserted or updated, which note can add significant performance
+ overhead. However, if the
+ target database supports :term:`RETURNING`, the default values will be
+ returned inline with the INSERT or UPDATE statement, which can
+ greatly enhance performance for an application that needs frequent
+ access to just-generated server defaults.
+
+ .. versionchanged:: 0.9.0 The ``eager_defaults`` option can now
+ make use of :term:`RETURNING` for backends which support it.
+
+ :param exclude_properties: A list or set of string column names to
+ be excluded from mapping.
+
+ See :ref:`include_exclude_cols` for an example.
+
+ :param extension: A :class:`.MapperExtension` instance or
+ list of :class:`.MapperExtension` instances which will be applied
+ to all operations by this :class:`.Mapper`. **Deprecated.**
+ Please see :class:`.MapperEvents`.
+
+ :param include_properties: An inclusive list or set of string column
+ names to map.
+
+ See :ref:`include_exclude_cols` for an example.
+
+ :param inherits: A mapped class or the corresponding :class:`.Mapper`
+ of one indicating a superclass to which this :class:`.Mapper`
+ should *inherit* from. The mapped class here must be a subclass
+ of the other mapper's class. When using Declarative, this argument
+ is passed automatically as a result of the natural class
+ hierarchy of the declared classes.
+
+ .. seealso::
+
+ :ref:`inheritance_toplevel`
+
+ :param inherit_condition: For joined table inheritance, a SQL
+ expression which will
+ define how the two tables are joined; defaults to a natural join
+ between the two tables.
+
+ :param inherit_foreign_keys: When ``inherit_condition`` is used and the
+ columns present are missing a :class:`.ForeignKey` configuration,
+ this parameter can be used to specify which columns are "foreign".
+ In most cases can be left as ``None``.
+
+ :param legacy_is_orphan: Boolean, defaults to ``False``.
+ When ``True``, specifies that "legacy" orphan consideration
+ is to be applied to objects mapped by this mapper, which means
+ that a pending (that is, not persistent) object is auto-expunged
+ from an owning :class:`.Session` only when it is de-associated
+ from *all* parents that specify a ``delete-orphan`` cascade towards
+ this mapper. The new default behavior is that the object is auto-expunged
+ when it is de-associated with *any* of its parents that specify
+ ``delete-orphan`` cascade. This behavior is more consistent with
+ that of a persistent object, and allows behavior to be consistent
+ in more scenarios independently of whether or not an orphanable
+ object has been flushed yet or not.
+
+ See the change note and example at :ref:`legacy_is_orphan_addition`
+ for more detail on this change.
+
+ .. versionadded:: 0.8 - the consideration of a pending object as
+ an "orphan" has been modified to more closely match the
+ behavior as that of persistent objects, which is that the object
+ is expunged from the :class:`.Session` as soon as it is
+ de-associated from any of its orphan-enabled parents. Previously,
+ the pending object would be expunged only if de-associated
+ from all of its orphan-enabled parents. The new flag ``legacy_is_orphan``
+ is added to :func:`.orm.mapper` which re-establishes the
+ legacy behavior.
+
+ :param non_primary: Specify that this :class:`.Mapper` is in addition
+ to the "primary" mapper, that is, the one used for persistence.
+ The :class:`.Mapper` created here may be used for ad-hoc
+ mapping of the class to an alternate selectable, for loading
+ only.
+
+ The ``non_primary`` feature is rarely needed with modern
+ usage.
+
+ :param order_by: A single :class:`.Column` or list of :class:`.Column`
+ objects for which selection operations should use as the default
+ ordering for entities. By default mappers have no pre-defined
+ ordering.
+
+ :param passive_updates: Indicates UPDATE behavior of foreign key
+ columns when a primary key column changes on a joined-table
+ inheritance mapping. Defaults to ``True``.
+
+ When True, it is assumed that ON UPDATE CASCADE is configured on
+ the foreign key in the database, and that the database will handle
+ propagation of an UPDATE from a source column to dependent columns
+ on joined-table rows.
+
+ When False, it is assumed that the database does not enforce
+ referential integrity and will not be issuing its own CASCADE
+ operation for an update. The :class:`.Mapper` here will
+ emit an UPDATE statement for the dependent columns during a
+ primary key change.
+
+ ..seealso::
+
+ :ref:`passive_updates` - description of a similar feature as
+ used with :func:`.relationship`
+
+ :param polymorphic_on: Specifies the column, attribute, or
+ SQL expression used to determine the target class for an
+ incoming row, when inheriting classes are present.
+
+ This value is commonly a :class:`.Column` object that's
+ present in the mapped :class:`.Table`::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key=True)
+ discriminator = Column(String(50))
+
+ __mapper_args__ = {
+ "polymorphic_on":discriminator,
+ "polymorphic_identity":"employee"
+ }
+
+ It may also be specified
+ as a SQL expression, as in this example where we
+ use the :func:`.case` construct to provide a conditional
+ approach::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key=True)
+ discriminator = Column(String(50))
+
+ __mapper_args__ = {
+ "polymorphic_on":case([
+ (discriminator == "EN", "engineer"),
+ (discriminator == "MA", "manager"),
+ ], else_="employee"),
+ "polymorphic_identity":"employee"
+ }
+
+ It may also refer to any attribute
+ configured with :func:`.column_property`, or to the
+ string name of one::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key=True)
+ discriminator = Column(String(50))
+ employee_type = column_property(
+ case([
+ (discriminator == "EN", "engineer"),
+ (discriminator == "MA", "manager"),
+ ], else_="employee")
+ )
- Mappers are normally constructed via the
- :func:`~sqlalchemy.orm.mapper` function. See for details.
+ __mapper_args__ = {
+ "polymorphic_on":employee_type,
+ "polymorphic_identity":"employee"
+ }
+
+ .. versionchanged:: 0.7.4
+ ``polymorphic_on`` may be specified as a SQL expression,
+ or refer to any attribute configured with
+ :func:`.column_property`, or to the string name of one.
+
+ When setting ``polymorphic_on`` to reference an
+ attribute or expression that's not present in the
+ locally mapped :class:`.Table`, yet the value
+ of the discriminator should be persisted to the database,
+ the value of the
+ discriminator is not automatically set on new
+ instances; this must be handled by the user,
+ either through manual means or via event listeners.
+ A typical approach to establishing such a listener
+ looks like::
+
+ from sqlalchemy import event
+ from sqlalchemy.orm import object_mapper
+
+ @event.listens_for(Employee, "init", propagate=True)
+ def set_identity(instance, *arg, **kw):
+ mapper = object_mapper(instance)
+ instance.discriminator = mapper.polymorphic_identity
+
+ Where above, we assign the value of ``polymorphic_identity``
+ for the mapped class to the ``discriminator`` attribute,
+ thus persisting the value to the ``discriminator`` column
+ in the database.
+
+ .. seealso::
+
+ :ref:`inheritance_toplevel`
+
+ :param polymorphic_identity: Specifies the value which
+ identifies this particular class as returned by the
+ column expression referred to by the ``polymorphic_on``
+ setting. As rows are received, the value corresponding
+ to the ``polymorphic_on`` column expression is compared
+ to this value, indicating which subclass should
+ be used for the newly reconstructed object.
+
+ :param properties: A dictionary mapping the string names of object
+ attributes to :class:`.MapperProperty` instances, which define the
+ persistence behavior of that attribute. Note that :class:`.Column`
+ objects present in
+ the mapped :class:`.Table` are automatically placed into
+ ``ColumnProperty`` instances upon mapping, unless overridden.
+ When using Declarative, this argument is passed automatically,
+ based on all those :class:`.MapperProperty` instances declared
+ in the declared class body.
+
+ :param primary_key: A list of :class:`.Column` objects which define the
+ primary key to be used against this mapper's selectable unit.
+ This is normally simply the primary key of the ``local_table``, but
+ can be overridden here.
+
+ :param version_id_col: A :class:`.Column`
+ that will be used to keep a running version id of rows
+ in the table. This is used to detect concurrent updates or
+ the presence of stale data in a flush. The methodology is to
+ detect if an UPDATE statement does not match the last known
+ version id, a
+ :class:`~sqlalchemy.orm.exc.StaleDataError` exception is
+ thrown.
+ By default, the column must be of :class:`.Integer` type,
+ unless ``version_id_generator`` specifies an alternative version
+ generator.
+
+ .. seealso::
+
+ :ref:`mapper_version_counter` - discussion of version counting
+ and rationale.
+
+ :param version_id_generator: Define how new version ids should
+ be generated. Defaults to ``None``, which indicates that
+ a simple integer counting scheme be employed. To provide a custom
+ versioning scheme, provide a callable function of the form::
+
+ def generate_version(version):
+ return next_version
+
+ Alternatively, server-side versioning functions such as triggers,
+ or programmatic versioning schemes outside of the version id generator
+ may be used, by specifying the value ``False``.
+ Please see :ref:`server_side_version_counter` for a discussion
+ of important points when using this option.
+
+ .. versionadded:: 0.9.0 ``version_id_generator`` supports server-side
+ version number generation.
+
+ .. seealso::
+
+ :ref:`custom_version_counter`
+
+ :ref:`server_side_version_counter`
+
+
+ :param with_polymorphic: A tuple in the form ``(<classes>,
+ <selectable>)`` indicating the default style of "polymorphic"
+ loading, that is, which tables are queried at once. <classes> is
+ any single or list of mappers and/or classes indicating the
+ inherited classes that should be loaded at once. The special value
+ ``'*'`` may be used to indicate all descending classes should be
+ loaded immediately. The second tuple argument <selectable>
+ indicates a selectable that will be used to query for multiple
+ classes.
+
+ .. seealso::
+
+ :ref:`with_polymorphic` - discussion of polymorphic querying techniques.
"""
@@ -138,9 +505,19 @@ class Mapper(_InspectionAttr):
self.order_by = order_by
self.always_refresh = always_refresh
- self.version_id_col = version_id_col
- self.version_id_generator = version_id_generator or \
- (lambda x: (x or 0) + 1)
+
+ if isinstance(version_id_col, MapperProperty):
+ self.version_id_prop = version_id_col
+ self.version_id_col = None
+ else:
+ self.version_id_col = version_id_col
+ if version_id_generator is False:
+ self.version_id_generator = False
+ elif version_id_generator is None:
+ self.version_id_generator = lambda x: (x or 0) + 1
+ else:
+ self.version_id_generator = version_id_generator
+
self.concrete = concrete
self.single = False
self.inherits = inherits
@@ -218,7 +595,7 @@ class Mapper(_InspectionAttr):
# configure_mappers() until construction succeeds)
_CONFIGURE_MUTEX.acquire()
try:
- events._MapperEventsHold.populate(class_, self)
+ self.dispatch._events._new_mapper_instance(class_, self)
self._configure_inheritance()
self._configure_legacy_instrument_class()
self._configure_class_instrumentation()
@@ -226,8 +603,7 @@ class Mapper(_InspectionAttr):
self._configure_properties()
self._configure_polymorphic_setter()
self._configure_pks()
- global _new_mappers
- _new_mappers = True
+ Mapper._new_mappers = True
self._log("constructed")
self._expire_memoizations()
finally:
@@ -252,7 +628,7 @@ class Mapper(_InspectionAttr):
def entity(self):
"""Part of the inspection API.
- Returns self.class_.
+ Returns self.class\_.
"""
return self.class_
@@ -272,7 +648,9 @@ class Mapper(_InspectionAttr):
this :class:`.Mapper` represents. If this mapper is a
single-table inheriting mapper, local_table will be ``None``.
- See also :attr:`~.Mapper.mapped_table`.
+ .. seealso::
+
+ :attr:`~.Mapper.mapped_table`.
"""
@@ -290,7 +668,9 @@ class Mapper(_InspectionAttr):
subclass. For single-table inheritance mappers, mapped_table
references the base table.
- See also :attr:`~.Mapper.local_table`.
+ .. seealso::
+
+ :attr:`~.Mapper.local_table`.
"""
@@ -309,7 +689,9 @@ class Mapper(_InspectionAttr):
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
- See also :func:`.configure_mappers`.
+ .. seealso::
+
+ :func:`.configure_mappers`.
"""
@@ -478,8 +860,6 @@ class Mapper(_InspectionAttr):
c = None
"""A synonym for :attr:`~.Mapper.columns`."""
- dispatch = event.dispatcher(events.MapperEvents)
-
@util.memoized_property
def _path_registry(self):
return PathRegistry.per_mapper(self)
@@ -489,7 +869,7 @@ class Mapper(_InspectionAttr):
being present."""
# a set of all mappers which inherit from this one.
- self._inheriting_mappers = util.WeakSet()
+ self._inheriting_mappers = util.WeakSequence()
if self.inherits:
if isinstance(self.inherits, type):
@@ -563,7 +943,7 @@ class Mapper(_InspectionAttr):
self.polymorphic_map = self.inherits.polymorphic_map
self.batch = self.inherits.batch
- self.inherits._inheriting_mappers.add(self)
+ self.inherits._inheriting_mappers.append(self)
self.base_mapper = self.inherits.base_mapper
self.passive_updates = self.inherits.passive_updates
self._all_tables = self.inherits._all_tables
@@ -630,7 +1010,7 @@ class Mapper(_InspectionAttr):
self.batch = self.inherits.batch
for mp in self.self_and_descendants:
mp.base_mapper = self.inherits.base_mapper
- self.inherits._inheriting_mappers.add(self)
+ self.inherits._inheriting_mappers.append(self)
self.passive_updates = self.inherits.passive_updates
self._all_tables = self.inherits._all_tables
for key, prop in mapper._props.items():
@@ -735,30 +1115,20 @@ class Mapper(_InspectionAttr):
self._reconstructor = method
event.listen(manager, 'load', _event_on_load, raw=True)
elif hasattr(method, '__sa_validators__'):
- include_removes = getattr(method,
- "__sa_include_removes__", False)
+ validation_opts = method.__sa_validation_opts__
for name in method.__sa_validators__:
self.validators = self.validators.union(
- {name: (method, include_removes)}
+ {name: (method, validation_opts)}
)
manager.info[_INSTRUMENTOR] = self
- @util.deprecated("0.7", message=":meth:`.Mapper.compile` "
- "is replaced by :func:`.configure_mappers`")
- def compile(self):
- """Initialize the inter-mapper relationships of all mappers that
- have been constructed thus far.
+ @classmethod
+ def _configure_all(cls):
+ """Class-level path to the :func:`.configure_mappers` call.
"""
configure_mappers()
- return self
-
- @property
- @util.deprecated("0.7", message=":attr:`.Mapper.compiled` "
- "is replaced by :attr:`.Mapper.configured`")
- def compiled(self):
- return self.configured
def dispose(self):
# Disable any attribute-based compilation.
@@ -956,7 +1326,7 @@ class Mapper(_InspectionAttr):
prop = self.polymorphic_on
self.polymorphic_on = prop.columns[0]
polymorphic_key = prop.key
- elif not expression.is_column(self.polymorphic_on):
+ elif not expression._is_column(self.polymorphic_on):
# polymorphic_on is not a Column and not a ColumnProperty;
# not supported right now.
raise sa_exc.ArgumentError(
@@ -1080,6 +1450,13 @@ class Mapper(_InspectionAttr):
_validate_polymorphic_identity = None
@_memoized_configured_property
+ def _version_id_prop(self):
+ if self.version_id_col is not None:
+ return self._columntoproperty[self.version_id_col]
+ else:
+ return None
+
+ @_memoized_configured_property
def _acceptable_polymorphic_identities(self):
identities = set()
@@ -1205,7 +1582,7 @@ class Mapper(_InspectionAttr):
# generate a properties.ColumnProperty
columns = util.to_list(prop)
column = columns[0]
- if not expression.is_column(column):
+ if not expression._is_column(column):
raise sa_exc.ArgumentError(
"%s=%r is not an instance of MapperProperty or Column"
% (key, prop))
@@ -1369,7 +1746,7 @@ class Mapper(_InspectionAttr):
"""return a MapperProperty associated with the given key.
"""
- if _configure_mappers and _new_mappers:
+ if _configure_mappers and Mapper._new_mappers:
configure_mappers()
try:
@@ -1387,7 +1764,7 @@ class Mapper(_InspectionAttr):
@property
def iterate_properties(self):
"""return an iterator of all MapperProperty objects."""
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
return iter(self._props.values())
@@ -1461,7 +1838,7 @@ class Mapper(_InspectionAttr):
@_memoized_configured_property
def _with_polymorphic_mappers(self):
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
if not self.with_polymorphic:
return []
@@ -1493,7 +1870,7 @@ class Mapper(_InspectionAttr):
Normally, this is equivalent to :attr:`.mapped_table`, unless
the ``with_polymorphic`` feature is in use, in which case the
- full "polymoprhic" selectable is returned.
+ full "polymorphic" selectable is returned.
"""
return self._with_polymorphic_selectable
@@ -1568,7 +1945,7 @@ class Mapper(_InspectionAttr):
:attr:`.Mapper.all_orm_descriptors`
"""
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
return util.ImmutableProperties(self._props)
@@ -1611,23 +1988,23 @@ class Mapper(_InspectionAttr):
"""Return a namespace of all :class:`.SynonymProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
- return self._filter_properties(descriptor_props.SynonymProperty)
+ return self._filter_properties(properties.SynonymProperty)
@_memoized_configured_property
def column_attrs(self):
"""Return a namespace of all :class:`.ColumnProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
return self._filter_properties(properties.ColumnProperty)
@@ -1637,10 +2014,10 @@ class Mapper(_InspectionAttr):
"""Return a namespace of all :class:`.RelationshipProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
return self._filter_properties(properties.RelationshipProperty)
@@ -1650,16 +2027,16 @@ class Mapper(_InspectionAttr):
"""Return a namespace of all :class:`.CompositeProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
- return self._filter_properties(descriptor_props.CompositeProperty)
+ return self._filter_properties(properties.CompositeProperty)
def _filter_properties(self, type_):
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
return util.ImmutableProperties(util.OrderedDict(
(k, v) for k, v in self._props.items()
@@ -1805,7 +2182,7 @@ class Mapper(_InspectionAttr):
while stack:
item = stack.popleft()
descendants.append(item)
- stack.extend(sorted(item._inheriting_mappers, key=lambda m: m.class_.__name__))
+ stack.extend(item._inheriting_mappers)
return util.WeakSequence(descendants)
def polymorphic_iterator(self):
@@ -1835,10 +2212,11 @@ class Mapper(_InspectionAttr):
"""Return an identity-map key for use in storing/retrieving an
item from the identity map.
- row
- A ``sqlalchemy.engine.RowProxy`` instance or a
- dictionary corresponding result-set ``ColumnElement``
- instances to their values within a row.
+ :param row: A :class:`.RowProxy` instance. The columns which are mapped
+ by this :class:`.Mapper` should be locatable in the row, preferably
+ via the :class:`.Column` object directly (as is the case when a
+ :func:`.select` construct is executed), or via string names of the form
+ ``<tablename>_<colname>``.
"""
pk_cols = self.primary_key
@@ -1852,8 +2230,7 @@ class Mapper(_InspectionAttr):
"""Return an identity-map key for use in storing/retrieving an
item from an identity map.
- primary_key
- A list of values indicating the identifier.
+ :param primary_key: A list of values indicating the identifier.
"""
return self._identity_class, tuple(primary_key)
@@ -1862,6 +2239,11 @@ class Mapper(_InspectionAttr):
"""Return the identity key for the given instance, based on
its primary key attributes.
+ If the instance's state is expired, calling this method
+ will result in a database check to see if the object has been deleted.
+ If the row no longer exists,
+ :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
This value is typically also found on the instance state under the
attribute name `key`.
@@ -1882,6 +2264,11 @@ class Mapper(_InspectionAttr):
"""Return the list of primary key values for the given
instance.
+ If the instance's state is expired, calling this method
+ will result in a database check to see if the object has been deleted.
+ If the row no longer exists,
+ :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
"""
state = attributes.instance_state(instance)
return self._primary_key_from_state(state)
@@ -2070,9 +2457,9 @@ class Mapper(_InspectionAttr):
dep is not None and \
dep is not parent and \
dep.inherit_condition is not None:
- cols = set(sql_util.find_columns(dep.inherit_condition))
+ cols = set(sql_util._find_columns(dep.inherit_condition))
if parent.inherit_condition is not None:
- cols = cols.union(sql_util.find_columns(
+ cols = cols.union(sql_util._find_columns(
parent.inherit_condition))
return fk.parent not in cols and fk.column not in cols
else:
@@ -2107,14 +2494,13 @@ class Mapper(_InspectionAttr):
for m in self.iterate_to_root():
if m._inherits_equated_pairs and \
cols.intersection(
- [l for l, r in m._inherits_equated_pairs]):
+ util.reduce(set.union,
+ [l.proxy_set for l, r in m._inherits_equated_pairs])
+ ):
result[table].append((m, m._inherits_equated_pairs))
return result
-inspection._self_inspects(Mapper)
-log.class_logger(Mapper)
-
def configure_mappers():
"""Initialize the inter-mapper relationships of all mappers that
@@ -2125,8 +2511,7 @@ def configure_mappers():
"""
- global _new_mappers
- if not _new_mappers:
+ if not Mapper._new_mappers:
return
_call_configured = None
@@ -2139,7 +2524,7 @@ def configure_mappers():
try:
# double-check inside mutex
- if not _new_mappers:
+ if not Mapper._new_mappers:
return
# initialize properties on all mappers
@@ -2168,7 +2553,7 @@ def configure_mappers():
mapper._configure_failed = exc
raise
- _new_mappers = False
+ Mapper._new_mappers = False
finally:
_already_compiling = False
finally:
@@ -2220,13 +2605,28 @@ def validates(*names, **kw):
argument "is_remove" which will be a boolean.
.. versionadded:: 0.7.7
+ :param include_backrefs: defaults to ``True``; if ``False``, the
+ validation function will not emit if the originator is an attribute
+ event related via a backref. This can be used for bi-directional
+ :func:`.validates` usage where only one validator should emit per
+ attribute operation.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :ref:`simple_validators` - usage examples for :func:`.validates`
"""
include_removes = kw.pop('include_removes', False)
+ include_backrefs = kw.pop('include_backrefs', True)
def wrap(fn):
fn.__sa_validators__ = names
- fn.__sa_include_removes__ = include_removes
+ fn.__sa_validation_opts__ = {
+ "include_removes": include_removes,
+ "include_backrefs": include_backrefs
+ }
return fn
return wrap
@@ -2247,7 +2647,7 @@ def _event_on_first_init(manager, cls):
instrumenting_mapper = manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
@@ -2262,7 +2662,7 @@ def _event_on_init(state, args, kwargs):
instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
if instrumenting_mapper._set_polymorphic_identity:
instrumenting_mapper._set_polymorphic_identity(state)
diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py
new file mode 100644
index 000000000..3397626b8
--- /dev/null
+++ b/lib/sqlalchemy/orm/path_registry.py
@@ -0,0 +1,261 @@
+# orm/path_registry.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""Path tracking utilities, representing mapper graph traversals.
+
+"""
+
+from .. import inspection
+from .. import util
+from .. import exc
+from itertools import chain
+from .base import class_mapper
+
+def _unreduce_path(path):
+ return PathRegistry.deserialize(path)
+
+
+_WILDCARD_TOKEN = "*"
+_DEFAULT_TOKEN = "_sa_default"
+
+class PathRegistry(object):
+ """Represent query load paths and registry functions.
+
+ Basically represents structures like:
+
+ (<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
+
+ These structures are generated by things like
+ query options (joinedload(), subqueryload(), etc.) and are
+ used to compose keys stored in the query._attributes dictionary
+ for various options.
+
+ They are then re-composed at query compile/result row time as
+ the query is formed and as rows are fetched, where they again
+ serve to compose keys to look up options in the context.attributes
+ dictionary, which is copied from query._attributes.
+
+ The path structure has a limited amount of caching, where each
+ "root" ultimately pulls from a fixed registry associated with
+ the first mapper, that also contains elements for each of its
+ property keys. However paths longer than two elements, which
+ are the exception rather than the rule, are generated on an
+ as-needed basis.
+
+ """
+
+ def __eq__(self, other):
+ return other is not None and \
+ self.path == other.path
+
+ def set(self, attributes, key, value):
+ attributes[(key, self.path)] = value
+
+ def setdefault(self, attributes, key, value):
+ attributes.setdefault((key, self.path), value)
+
+ def get(self, attributes, key, value=None):
+ key = (key, self.path)
+ if key in attributes:
+ return attributes[key]
+ else:
+ return value
+
+ def __len__(self):
+ return len(self.path)
+
+ @property
+ def length(self):
+ return len(self.path)
+
+ def pairs(self):
+ path = self.path
+ for i in range(0, len(path), 2):
+ yield path[i], path[i + 1]
+
+ def contains_mapper(self, mapper):
+ for path_mapper in [
+ self.path[i] for i in range(0, len(self.path), 2)
+ ]:
+ if path_mapper.is_mapper and \
+ path_mapper.isa(mapper):
+ return True
+ else:
+ return False
+
+ def contains(self, attributes, key):
+ return (key, self.path) in attributes
+
+ def __reduce__(self):
+ return _unreduce_path, (self.serialize(), )
+
+ def serialize(self):
+ path = self.path
+ return list(zip(
+ [m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
+ [path[i].key for i in range(1, len(path), 2)] + [None]
+ ))
+
+ @classmethod
+ def deserialize(cls, path):
+ if path is None:
+ return None
+
+ p = tuple(chain(*[(class_mapper(mcls),
+ class_mapper(mcls).attrs[key]
+ if key is not None else None)
+ for mcls, key in path]))
+ if p and p[-1] is None:
+ p = p[0:-1]
+ return cls.coerce(p)
+
+ @classmethod
+ def per_mapper(cls, mapper):
+ return EntityRegistry(
+ cls.root, mapper
+ )
+
+ @classmethod
+ def coerce(cls, raw):
+ return util.reduce(lambda prev, next: prev[next], raw, cls.root)
+
+ def token(self, token):
+ if token.endswith(':' + _WILDCARD_TOKEN):
+ return TokenRegistry(self, token)
+ elif token.endswith(":" + _DEFAULT_TOKEN):
+ return TokenRegistry(self.root, token)
+ else:
+ raise exc.ArgumentError("invalid token: %s" % token)
+
+ def __add__(self, other):
+ return util.reduce(
+ lambda prev, next: prev[next],
+ other.path, self)
+
+ def __repr__(self):
+ return "%s(%r)" % (self.__class__.__name__, self.path, )
+
+
+class RootRegistry(PathRegistry):
+ """Root registry, defers to mappers so that
+ paths are maintained per-root-mapper.
+
+ """
+ path = ()
+ has_entity = False
+ def __getitem__(self, entity):
+ return entity._path_registry
+
+PathRegistry.root = RootRegistry()
+
+class TokenRegistry(PathRegistry):
+ def __init__(self, parent, token):
+ self.token = token
+ self.parent = parent
+ self.path = parent.path + (token,)
+
+ has_entity = False
+
+ def __getitem__(self, entity):
+ raise NotImplementedError()
+
+class PropRegistry(PathRegistry):
+ def __init__(self, parent, prop):
+ # restate this path in terms of the
+ # given MapperProperty's parent.
+ insp = inspection.inspect(parent[-1])
+ if not insp.is_aliased_class or insp._use_mapper_path:
+ parent = parent.parent[prop.parent]
+ elif insp.is_aliased_class and insp.with_polymorphic_mappers:
+ if prop.parent is not insp.mapper and \
+ prop.parent in insp.with_polymorphic_mappers:
+ subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
+ parent = parent.parent[subclass_entity]
+
+ self.prop = prop
+ self.parent = parent
+ self.path = parent.path + (prop,)
+
+ @util.memoized_property
+ def has_entity(self):
+ return hasattr(self.prop, "mapper")
+
+ @util.memoized_property
+ def entity(self):
+ return self.prop.mapper
+
+ @util.memoized_property
+ def _wildcard_path_loader_key(self):
+ """Given a path (mapper A, prop X), replace the prop with the wildcard,
+ e.g. (mapper A, 'relationship:.*') or (mapper A, 'column:.*'), then
+ return within the ("loader", path) structure.
+
+ """
+ return ("loader",
+ self.parent.token(
+ "%s:%s" % (self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
+ ).path
+ )
+
+ @util.memoized_property
+ def _default_path_loader_key(self):
+ return ("loader",
+ self.parent.token(
+ "%s:%s" % (self.prop.strategy_wildcard_key, _DEFAULT_TOKEN)
+ ).path
+ )
+
+ @util.memoized_property
+ def _loader_key(self):
+ return ("loader", self.path)
+
+ @property
+ def mapper(self):
+ return self.entity
+
+ @property
+ def entity_path(self):
+ return self[self.entity]
+
+ def __getitem__(self, entity):
+ if isinstance(entity, (int, slice)):
+ return self.path[entity]
+ else:
+ return EntityRegistry(
+ self, entity
+ )
+
+class EntityRegistry(PathRegistry, dict):
+ is_aliased_class = False
+ has_entity = True
+
+ def __init__(self, parent, entity):
+ self.key = entity
+ self.parent = parent
+ self.is_aliased_class = entity.is_aliased_class
+ self.entity = entity
+ self.path = parent.path + (entity,)
+ self.entity_path = self
+
+ @property
+ def mapper(self):
+ return inspection.inspect(self.entity).mapper
+
+ def __bool__(self):
+ return True
+ __nonzero__ = __bool__
+
+ def __getitem__(self, entity):
+ if isinstance(entity, (int, slice)):
+ return self.path[entity]
+ else:
+ return dict.__getitem__(self, entity)
+
+ def __missing__(self, key):
+ self[key] = item = PropRegistry(self, key)
+ return item
+
+
+
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 1f5507edf..b0fa620e3 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -1,5 +1,5 @@
# orm/persistence.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -17,7 +17,7 @@ import operator
from itertools import groupby
from .. import sql, util, exc as sa_exc, schema
from . import attributes, sync, exc as orm_exc, evaluator
-from .util import _state_mapper, state_str, _attr_as_key
+from .base import _state_mapper, state_str, _attr_as_key
from ..sql import expression
from . import loading
@@ -61,7 +61,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
if insert:
_emit_insert_statements(base_mapper, uowtransaction,
cached_connections,
- table, insert)
+ mapper, table, insert)
_finalize_insert_update_commands(base_mapper, uowtransaction,
states_to_insert, states_to_update)
@@ -246,9 +246,12 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
value_params = {}
has_all_pks = True
+ has_all_defaults = True
for col in mapper._cols_by_table[table]:
- if col is mapper.version_id_col:
- params[col.key] = mapper.version_id_generator(None)
+ if col is mapper.version_id_col and \
+ mapper.version_id_generator is not False:
+ val = mapper.version_id_generator(None)
+ params[col.key] = val
else:
# pull straight from the dict for
# pending objects
@@ -261,6 +264,9 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
elif col.default is None and \
col.server_default is None:
params[col.key] = value
+ elif col.server_default is not None and \
+ mapper.base_mapper.eager_defaults:
+ has_all_defaults = False
elif isinstance(value, sql.ClauseElement):
value_params[col] = value
@@ -268,7 +274,8 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
params[col.key] = value
insert.append((state, state_dict, params, mapper,
- connection, value_params, has_all_pks))
+ connection, value_params, has_all_pks,
+ has_all_defaults))
return insert
@@ -315,19 +322,20 @@ def _collect_update_commands(base_mapper, uowtransaction,
params[col.key] = history.added[0]
hasdata = True
else:
- params[col.key] = mapper.version_id_generator(
- params[col._label])
-
- # HACK: check for history, in case the
- # history is only
- # in a different table than the one
- # where the version_id_col is.
- for prop in mapper._columntoproperty.values():
- history = attributes.get_state_history(
- state, prop.key,
- attributes.PASSIVE_NO_INITIALIZE)
- if history.added:
- hasdata = True
+ if mapper.version_id_generator is not False:
+ val = mapper.version_id_generator(params[col._label])
+ params[col.key] = val
+
+ # HACK: check for history, in case the
+ # history is only
+ # in a different table than the one
+ # where the version_id_col is.
+ for prop in mapper._columntoproperty.values():
+ history = attributes.get_state_history(
+ state, prop.key,
+ attributes.PASSIVE_NO_INITIALIZE)
+ if history.added:
+ hasdata = True
else:
prop = mapper._columntoproperty[col]
history = attributes.get_state_history(
@@ -409,6 +417,7 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table,
mapper._get_state_attr_by_column(
state,
state_dict, col)
+
elif col in post_update_cols:
prop = mapper._columntoproperty[col]
history = attributes.get_state_history(
@@ -478,7 +487,13 @@ def _emit_update_statements(base_mapper, uowtransaction,
sql.bindparam(mapper.version_id_col._label,
type_=mapper.version_id_col.type))
- return table.update(clause)
+ stmt = table.update(clause)
+ if mapper.base_mapper.eager_defaults:
+ stmt = stmt.return_defaults()
+ elif mapper.version_id_col is not None:
+ stmt = stmt.return_defaults(mapper.version_id_col)
+
+ return stmt
statement = base_mapper._memo(('update', table), update_stmt)
@@ -500,8 +515,7 @@ def _emit_update_statements(base_mapper, uowtransaction,
table,
state,
state_dict,
- c.context.prefetch_cols,
- c.context.postfetch_cols,
+ c,
c.context.compiled_parameters[0],
value_params)
rows += c.rowcount
@@ -521,44 +535,55 @@ def _emit_update_statements(base_mapper, uowtransaction,
def _emit_insert_statements(base_mapper, uowtransaction,
- cached_connections, table, insert):
+ cached_connections, mapper, table, insert):
"""Emit INSERT statements corresponding to value lists collected
by _collect_insert_commands()."""
statement = base_mapper._memo(('insert', table), table.insert)
- for (connection, pkeys, hasvalue, has_all_pks), \
+ for (connection, pkeys, hasvalue, has_all_pks, has_all_defaults), \
records in groupby(insert,
lambda rec: (rec[4],
list(rec[2].keys()),
bool(rec[5]),
- rec[6])
+ rec[6], rec[7])
):
- if has_all_pks and not hasvalue:
+ if \
+ (
+ has_all_defaults
+ or not base_mapper.eager_defaults
+ or not connection.dialect.implicit_returning
+ ) and has_all_pks and not hasvalue:
+
records = list(records)
multiparams = [rec[2] for rec in records]
+
c = cached_connections[connection].\
execute(statement, multiparams)
- for (state, state_dict, params, mapper,
- conn, value_params, has_all_pks), \
+ for (state, state_dict, params, mapper_rec,
+ conn, value_params, has_all_pks, has_all_defaults), \
last_inserted_params in \
zip(records, c.context.compiled_parameters):
_postfetch(
- mapper,
+ mapper_rec,
uowtransaction,
table,
state,
state_dict,
- c.context.prefetch_cols,
- c.context.postfetch_cols,
+ c,
last_inserted_params,
value_params)
else:
- for state, state_dict, params, mapper, \
+ if not has_all_defaults and base_mapper.eager_defaults:
+ statement = statement.return_defaults()
+ elif mapper.version_id_col is not None:
+ statement = statement.return_defaults(mapper.version_id_col)
+
+ for state, state_dict, params, mapper_rec, \
connection, value_params, \
- has_all_pks in records:
+ has_all_pks, has_all_defaults in records:
if value_params:
result = connection.execute(
@@ -574,23 +599,22 @@ def _emit_insert_statements(base_mapper, uowtransaction,
# set primary key attributes
for pk, col in zip(primary_key,
mapper._pks_by_table[table]):
- prop = mapper._columntoproperty[col]
+ prop = mapper_rec._columntoproperty[col]
if state_dict.get(prop.key) is None:
# TODO: would rather say:
#state_dict[prop.key] = pk
- mapper._set_state_attr_by_column(
+ mapper_rec._set_state_attr_by_column(
state,
state_dict,
col, pk)
_postfetch(
- mapper,
+ mapper_rec,
uowtransaction,
table,
state,
state_dict,
- result.context.prefetch_cols,
- result.context.postfetch_cols,
+ result,
result.context.compiled_parameters[0],
value_params)
@@ -699,14 +723,25 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
if readonly:
state._expire_attributes(state.dict, readonly)
- # if eager_defaults option is enabled,
- # refresh whatever has been expired.
- if base_mapper.eager_defaults and state.unloaded:
+ # if eager_defaults option is enabled, load
+ # all expired cols. Else if we have a version_id_col, make sure
+ # it isn't expired.
+ toload_now = []
+
+ if base_mapper.eager_defaults:
+ toload_now.extend(state._unloaded_non_object)
+ elif mapper.version_id_col is not None and \
+ mapper.version_id_generator is False:
+ prop = mapper._columntoproperty[mapper.version_id_col]
+ if prop.key in state.unloaded:
+ toload_now.extend([prop.key])
+
+ if toload_now:
state.key = base_mapper._identity_key_from_state(state)
loading.load_on_ident(
uowtransaction.session.query(base_mapper),
state.key, refresh_state=state,
- only_load_props=state.unloaded)
+ only_load_props=toload_now)
# call after_XXX extensions
if not has_identity:
@@ -716,15 +751,26 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
def _postfetch(mapper, uowtransaction, table,
- state, dict_, prefetch_cols, postfetch_cols,
- params, value_params):
+ state, dict_, result, params, value_params):
"""Expire attributes in need of newly persisted database state,
after an INSERT or UPDATE statement has proceeded for that
state."""
+ prefetch_cols = result.context.prefetch_cols
+ postfetch_cols = result.context.postfetch_cols
+ returning_cols = result.context.returning_cols
+
if mapper.version_id_col is not None:
prefetch_cols = list(prefetch_cols) + [mapper.version_id_col]
+ if returning_cols:
+ row = result.context.returned_defaults
+ if row is not None:
+ for col in returning_cols:
+ if col.primary_key:
+ continue
+ mapper._set_state_attr_by_column(state, dict_, col, row[col])
+
for c in prefetch_cols:
if c.key in params and c in mapper._columntoproperty:
mapper._set_state_attr_by_column(state, dict_, c, params[c.key])
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 5986556db..a0def7d31 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -1,5 +1,5 @@
# orm/properties.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -10,30 +10,20 @@ This is a private module which defines the behavior of invidual ORM-
mapped attributes.
"""
+from __future__ import absolute_import
-from .. import sql, util, log, exc as sa_exc, inspect
-from ..sql import operators, expression
-from . import (
- attributes, mapper,
- strategies, configure_mappers, relationships,
- dependency
- )
-from .util import CascadeOptions, \
- _orm_annotate, _orm_deannotate, _orm_full_deannotate
+from .. import util, log
+from ..sql import expression
+from . import attributes
+from .util import _orm_full_deannotate
-from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY,\
- PropComparator, StrategizedProperty
-
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
-NoneType = type(None)
-
-from .descriptor_props import CompositeProperty, SynonymProperty, \
- ComparableProperty, ConcreteInheritedProperty
+from .interfaces import PropComparator, StrategizedProperty
__all__ = ['ColumnProperty', 'CompositeProperty', 'SynonymProperty',
- 'ComparableProperty', 'RelationshipProperty', 'RelationProperty']
+ 'ComparableProperty', 'RelationshipProperty']
+@log.class_logger
class ColumnProperty(StrategizedProperty):
"""Describes an object attribute that corresponds to a table column.
@@ -41,31 +31,81 @@ class ColumnProperty(StrategizedProperty):
"""
+ strategy_wildcard_key = 'column'
+
def __init__(self, *columns, **kwargs):
- """Construct a ColumnProperty.
+ """Provide a column-level property for use with a Mapper.
- Note the public constructor is the :func:`.orm.column_property`
- function.
+ Column-based properties can normally be applied to the mapper's
+ ``properties`` dictionary using the :class:`.Column` element directly.
+ Use this function when the given column is not directly present within the
+ mapper's selectable; examples include SQL expressions, functions, and
+ scalar SELECT queries.
- :param \*columns: The list of `columns` describes a single
- object property. If there are multiple tables joined
- together for the mapper, this list represents the equivalent
- column as it appears across each table.
+ Columns that aren't present in the mapper's selectable won't be persisted
+ by the mapper and are effectively "read-only" attributes.
- :param group:
+ :param \*cols:
+ list of Column objects to be mapped.
- :param deferred:
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ scalar attribute should be loaded when replaced, if not
+ already loaded. Normally, history tracking logic for
+ simple non-primary-key scalar values only needs to be
+ aware of the "new" value in order to perform a flush. This
+ flag is available for applications that make use of
+ :func:`.attributes.get_history` or :meth:`.Session.is_modified`
+ which also need to know
+ the "previous" value of the attribute.
- :param comparator_factory:
+ .. versionadded:: 0.6.6
- :param descriptor:
+ :param comparator_factory: a class which extends
+ :class:`.ColumnProperty.Comparator` which provides custom SQL clause
+ generation for comparison operations.
- :param expire_on_flush:
+ :param group:
+ a group name for this property when marked as deferred.
- :param extension:
+ :param deferred:
+ when True, the column property is "deferred", meaning that
+ it does not load immediately, and is instead loaded when the
+ attribute is first accessed on an instance. See also
+ :func:`~sqlalchemy.orm.deferred`.
+
+ :param doc:
+ optional string that will be applied as the doc on the
+ class-bound descriptor.
+
+ :param expire_on_flush=True:
+ Disable expiry on flush. A column_property() which refers
+ to a SQL expression (and not a single table-bound column)
+ is considered to be a "read only" property; populating it
+ has no effect on the state of data, and it can only return
+ database state. For this reason a column_property()'s value
+ is expired whenever the parent object is involved in a
+ flush, that is, has any kind of "dirty" state within a flush.
+ Setting this parameter to ``False`` will have the effect of
+ leaving any existing value present after the flush proceeds.
+ Note however that the :class:`.Session` with default expiration
+ settings still expires
+ all attributes after a :meth:`.Session.commit` call, however.
+
+ .. versionadded:: 0.7.3
:param info: Optional data dictionary which will be populated into the
- :attr:`.info` attribute of this object.
+ :attr:`.MapperProperty.info` attribute of this object.
+
+ .. versionadded:: 0.8
+
+ :param extension:
+ an
+ :class:`.AttributeExtension`
+ instance, or list of extensions, which will be prepended
+ to the list of attribute listeners for the resulting
+ descriptor placed on the class.
+ **Deprecated.** Please see :class:`.AttributeEvents`.
"""
self._orig_columns = [expression._labeled(c) for c in columns]
@@ -102,12 +142,11 @@ class ColumnProperty(StrategizedProperty):
', '.join(sorted(kwargs.keys()))))
util.set_creation_order(self)
- if not self.instrument:
- self.strategy_class = strategies.UninstrumentedColumnLoader
- elif self.deferred:
- self.strategy_class = strategies.DeferredColumnLoader
- else:
- self.strategy_class = strategies.ColumnLoader
+
+ self.strategy_class = self._strategy_lookup(
+ ("deferred", self.deferred),
+ ("instrument", self.instrument)
+ )
@property
def expression(self):
@@ -215,1101 +254,6 @@ class ColumnProperty(StrategizedProperty):
col = self.__clause_element__()
return op(col._bind_param(op, other), col, **kwargs)
- # TODO: legacy..do we need this ? (0.5)
- ColumnComparator = Comparator
-
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
-log.class_logger(ColumnProperty)
-
-
-class RelationshipProperty(StrategizedProperty):
- """Describes an object property that holds a single item or list
- of items that correspond to a related database table.
-
- Public constructor is the :func:`.orm.relationship` function.
-
- See also:
-
- :ref:`relationship_config_toplevel`
-
- """
-
- strategy_wildcard_key = 'relationship:*'
-
- _dependency_processor = None
-
- def __init__(self, argument,
- secondary=None, primaryjoin=None,
- secondaryjoin=None,
- foreign_keys=None,
- uselist=None,
- order_by=False,
- backref=None,
- back_populates=None,
- post_update=False,
- cascade=False, extension=None,
- viewonly=False, lazy=True,
- collection_class=None, passive_deletes=False,
- passive_updates=True, remote_side=None,
- enable_typechecks=True, join_depth=None,
- comparator_factory=None,
- single_parent=False, innerjoin=False,
- doc=None,
- active_history=False,
- cascade_backrefs=True,
- load_on_pending=False,
- strategy_class=None, _local_remote_pairs=None,
- query_class=None,
- info=None):
-
- self.uselist = uselist
- self.argument = argument
- self.secondary = secondary
- self.primaryjoin = primaryjoin
- self.secondaryjoin = secondaryjoin
- self.post_update = post_update
- self.direction = None
- self.viewonly = viewonly
- self.lazy = lazy
- self.single_parent = single_parent
- self._user_defined_foreign_keys = foreign_keys
- self.collection_class = collection_class
- self.passive_deletes = passive_deletes
- self.cascade_backrefs = cascade_backrefs
- self.passive_updates = passive_updates
- self.remote_side = remote_side
- self.enable_typechecks = enable_typechecks
- self.query_class = query_class
- self.innerjoin = innerjoin
- self.doc = doc
- self.active_history = active_history
- self.join_depth = join_depth
- self.local_remote_pairs = _local_remote_pairs
- self.extension = extension
- self.load_on_pending = load_on_pending
- self.comparator_factory = comparator_factory or \
- RelationshipProperty.Comparator
- self.comparator = self.comparator_factory(self, None)
- util.set_creation_order(self)
-
- if info is not None:
- self.info = info
-
- if strategy_class:
- self.strategy_class = strategy_class
- elif self.lazy == 'dynamic':
- from sqlalchemy.orm import dynamic
- self.strategy_class = dynamic.DynaLoader
- else:
- self.strategy_class = strategies.factory(self.lazy)
-
- self._reverse_property = set()
-
- self.cascade = cascade if cascade is not False \
- else "save-update, merge"
-
- self.order_by = order_by
-
- self.back_populates = back_populates
-
- if self.back_populates:
- if backref:
- raise sa_exc.ArgumentError(
- "backref and back_populates keyword arguments "
- "are mutually exclusive")
- self.backref = None
- else:
- self.backref = backref
-
- def instrument_class(self, mapper):
- attributes.register_descriptor(
- mapper.class_,
- self.key,
- comparator=self.comparator_factory(self, mapper),
- parententity=mapper,
- doc=self.doc,
- )
-
- class Comparator(PropComparator):
- """Produce boolean, comparison, and other operators for
- :class:`.RelationshipProperty` attributes.
-
- See the documentation for :class:`.PropComparator` for a brief overview
- of ORM level operator definition.
-
- See also:
-
- :class:`.PropComparator`
-
- :class:`.ColumnProperty.Comparator`
-
- :class:`.ColumnOperators`
-
- :ref:`types_operators`
-
- :attr:`.TypeEngine.comparator_factory`
-
- """
-
- _of_type = None
-
- def __init__(self, prop, parentmapper, adapt_to_entity=None, of_type=None):
- """Construction of :class:`.RelationshipProperty.Comparator`
- is internal to the ORM's attribute mechanics.
-
- """
- self.prop = prop
- self._parentmapper = parentmapper
- self._adapt_to_entity = adapt_to_entity
- if of_type:
- self._of_type = of_type
-
- def adapt_to_entity(self, adapt_to_entity):
- return self.__class__(self.property, self._parentmapper,
- adapt_to_entity=adapt_to_entity,
- of_type=self._of_type)
-
- @util.memoized_property
- def mapper(self):
- """The target :class:`.Mapper` referred to by this
- :class:`.RelationshipProperty.Comparator.
-
- This is the "target" or "remote" side of the
- :func:`.relationship`.
-
- """
- return self.property.mapper
-
- @util.memoized_property
- def _parententity(self):
- return self.property.parent
-
- def _source_selectable(self):
- elem = self.property.parent._with_polymorphic_selectable
- if self.adapter:
- return self.adapter(elem)
- else:
- return elem
-
- def __clause_element__(self):
- adapt_from = self._source_selectable()
- if self._of_type:
- of_type = inspect(self._of_type).mapper
- else:
- of_type = None
-
- pj, sj, source, dest, \
- secondary, target_adapter = self.property._create_joins(
- source_selectable=adapt_from,
- source_polymorphic=True,
- of_type=of_type)
- if sj is not None:
- return pj & sj
- else:
- return pj
-
- def of_type(self, cls):
- """Produce a construct that represents a particular 'subtype' of
- attribute for the parent class.
-
- Currently this is usable in conjunction with :meth:`.Query.join`
- and :meth:`.Query.outerjoin`.
-
- """
- return RelationshipProperty.Comparator(
- self.property,
- self._parentmapper,
- adapt_to_entity=self._adapt_to_entity,
- of_type=cls)
-
- def in_(self, other):
- """Produce an IN clause - this is not implemented
- for :func:`~.orm.relationship`-based attributes at this time.
-
- """
- raise NotImplementedError('in_() not yet supported for '
- 'relationships. For a simple many-to-one, use '
- 'in_() against the set of foreign key values.')
-
- __hash__ = None
-
- def __eq__(self, other):
- """Implement the ``==`` operator.
-
- In a many-to-one context, such as::
-
- MyClass.some_prop == <some object>
-
- this will typically produce a
- clause such as::
-
- mytable.related_id == <some id>
-
- Where ``<some id>`` is the primary key of the given
- object.
-
- The ``==`` operator provides partial functionality for non-
- many-to-one comparisons:
-
- * Comparisons against collections are not supported.
- Use :meth:`~.RelationshipProperty.Comparator.contains`.
- * Compared to a scalar one-to-many, will produce a
- clause that compares the target columns in the parent to
- the given target.
- * Compared to a scalar many-to-many, an alias
- of the association table will be rendered as
- well, forming a natural join that is part of the
- main body of the query. This will not work for
- queries that go beyond simple AND conjunctions of
- comparisons, such as those which use OR. Use
- explicit joins, outerjoins, or
- :meth:`~.RelationshipProperty.Comparator.has` for
- more comprehensive non-many-to-one scalar
- membership tests.
- * Comparisons against ``None`` given in a one-to-many
- or many-to-many context produce a NOT EXISTS clause.
-
- """
- if isinstance(other, (NoneType, expression.Null)):
- if self.property.direction in [ONETOMANY, MANYTOMANY]:
- return ~self._criterion_exists()
- else:
- return _orm_annotate(self.property._optimized_compare(
- None, adapt_source=self.adapter))
- elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a colle"
- "ction to an object or collection; use "
- "contains() to test for membership.")
- else:
- return _orm_annotate(self.property._optimized_compare(other,
- adapt_source=self.adapter))
-
- def _criterion_exists(self, criterion=None, **kwargs):
- if getattr(self, '_of_type', None):
- info = inspect(self._of_type)
- target_mapper, to_selectable, is_aliased_class = \
- info.mapper, info.selectable, info.is_aliased_class
- if self.property._is_self_referential and not is_aliased_class:
- to_selectable = to_selectable.alias()
-
- single_crit = target_mapper._single_table_criterion
- if single_crit is not None:
- if criterion is not None:
- criterion = single_crit & criterion
- else:
- criterion = single_crit
- else:
- is_aliased_class = False
- to_selectable = None
-
- if self.adapter:
- source_selectable = self._source_selectable()
- else:
- source_selectable = None
-
- pj, sj, source, dest, secondary, target_adapter = \
- self.property._create_joins(dest_polymorphic=True,
- dest_selectable=to_selectable,
- source_selectable=source_selectable)
-
- for k in kwargs:
- crit = getattr(self.property.mapper.class_, k) == kwargs[k]
- if criterion is None:
- criterion = crit
- else:
- criterion = criterion & crit
-
- # annotate the *local* side of the join condition, in the case
- # of pj + sj this is the full primaryjoin, in the case of just
- # pj its the local side of the primaryjoin.
- if sj is not None:
- j = _orm_annotate(pj) & sj
- else:
- j = _orm_annotate(pj, exclude=self.property.remote_side)
-
- if criterion is not None and target_adapter and not is_aliased_class:
- # limit this adapter to annotated only?
- criterion = target_adapter.traverse(criterion)
-
- # only have the "joined left side" of what we
- # return be subject to Query adaption. The right
- # side of it is used for an exists() subquery and
- # should not correlate or otherwise reach out
- # to anything in the enclosing query.
- if criterion is not None:
- criterion = criterion._annotate(
- {'no_replacement_traverse': True})
-
- crit = j & criterion
-
- ex = sql.exists([1], crit, from_obj=dest).correlate_except(dest)
- if secondary is not None:
- ex = ex.correlate_except(secondary)
- return ex
-
- def any(self, criterion=None, **kwargs):
- """Produce an expression that tests a collection against
- particular criterion, using EXISTS.
-
- An expression like::
-
- session.query(MyClass).filter(
- MyClass.somereference.any(SomeRelated.x==2)
- )
-
-
- Will produce a query like::
-
- SELECT * FROM my_table WHERE
- EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
- AND related.x=2)
-
- Because :meth:`~.RelationshipProperty.Comparator.any` uses
- a correlated subquery, its performance is not nearly as
- good when compared against large target tables as that of
- using a join.
-
- :meth:`~.RelationshipProperty.Comparator.any` is particularly
- useful for testing for empty collections::
-
- session.query(MyClass).filter(
- ~MyClass.somereference.any()
- )
-
- will produce::
-
- SELECT * FROM my_table WHERE
- NOT EXISTS (SELECT 1 FROM related WHERE
- related.my_id=my_table.id)
-
- :meth:`~.RelationshipProperty.Comparator.any` is only
- valid for collections, i.e. a :func:`.relationship`
- that has ``uselist=True``. For scalar references,
- use :meth:`~.RelationshipProperty.Comparator.has`.
-
- """
- if not self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'any()' not implemented for scalar "
- "attributes. Use has()."
- )
-
- return self._criterion_exists(criterion, **kwargs)
-
- def has(self, criterion=None, **kwargs):
- """Produce an expression that tests a scalar reference against
- particular criterion, using EXISTS.
-
- An expression like::
-
- session.query(MyClass).filter(
- MyClass.somereference.has(SomeRelated.x==2)
- )
-
-
- Will produce a query like::
-
- SELECT * FROM my_table WHERE
- EXISTS (SELECT 1 FROM related WHERE
- related.id==my_table.related_id AND related.x=2)
-
- Because :meth:`~.RelationshipProperty.Comparator.has` uses
- a correlated subquery, its performance is not nearly as
- good when compared against large target tables as that of
- using a join.
-
- :meth:`~.RelationshipProperty.Comparator.has` is only
- valid for scalar references, i.e. a :func:`.relationship`
- that has ``uselist=False``. For collection references,
- use :meth:`~.RelationshipProperty.Comparator.any`.
-
- """
- if self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'has()' not implemented for collections. "
- "Use any().")
- return self._criterion_exists(criterion, **kwargs)
-
- def contains(self, other, **kwargs):
- """Return a simple expression that tests a collection for
- containment of a particular item.
-
- :meth:`~.RelationshipProperty.Comparator.contains` is
- only valid for a collection, i.e. a
- :func:`~.orm.relationship` that implements
- one-to-many or many-to-many with ``uselist=True``.
-
- When used in a simple one-to-many context, an
- expression like::
-
- MyClass.contains(other)
-
- Produces a clause like::
-
- mytable.id == <some id>
-
- Where ``<some id>`` is the value of the foreign key
- attribute on ``other`` which refers to the primary
- key of its parent object. From this it follows that
- :meth:`~.RelationshipProperty.Comparator.contains` is
- very useful when used with simple one-to-many
- operations.
-
- For many-to-many operations, the behavior of
- :meth:`~.RelationshipProperty.Comparator.contains`
- has more caveats. The association table will be
- rendered in the statement, producing an "implicit"
- join, that is, includes multiple tables in the FROM
- clause which are equated in the WHERE clause::
-
- query(MyClass).filter(MyClass.contains(other))
-
- Produces a query like::
-
- SELECT * FROM my_table, my_association_table AS
- my_association_table_1 WHERE
- my_table.id = my_association_table_1.parent_id
- AND my_association_table_1.child_id = <some id>
-
- Where ``<some id>`` would be the primary key of
- ``other``. From the above, it is clear that
- :meth:`~.RelationshipProperty.Comparator.contains`
- will **not** work with many-to-many collections when
- used in queries that move beyond simple AND
- conjunctions, such as multiple
- :meth:`~.RelationshipProperty.Comparator.contains`
- expressions joined by OR. In such cases subqueries or
- explicit "outer joins" will need to be used instead.
- See :meth:`~.RelationshipProperty.Comparator.any` for
- a less-performant alternative using EXISTS, or refer
- to :meth:`.Query.outerjoin` as well as :ref:`ormtutorial_joins`
- for more details on constructing outer joins.
-
- """
- if not self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'contains' not implemented for scalar "
- "attributes. Use ==")
- clause = self.property._optimized_compare(other,
- adapt_source=self.adapter)
-
- if self.property.secondaryjoin is not None:
- clause.negation_clause = \
- self.__negated_contains_or_equals(other)
-
- return clause
-
- def __negated_contains_or_equals(self, other):
- if self.property.direction == MANYTOONE:
- state = attributes.instance_state(other)
-
- def state_bindparam(x, state, col):
- o = state.obj() # strong ref
- return sql.bindparam(x, unique=True, callable_=lambda: \
- self.property.mapper._get_committed_attr_by_column(o, col))
-
- def adapt(col):
- if self.adapter:
- return self.adapter(col)
- else:
- return col
-
- if self.property._use_get:
- return sql.and_(*[
- sql.or_(
- adapt(x) != state_bindparam(adapt(x), state, y),
- adapt(x) == None)
- for (x, y) in self.property.local_remote_pairs])
-
- criterion = sql.and_(*[x == y for (x, y) in
- zip(
- self.property.mapper.primary_key,
- self.property.\
- mapper.\
- primary_key_from_instance(other))
- ])
- return ~self._criterion_exists(criterion)
-
- def __ne__(self, other):
- """Implement the ``!=`` operator.
-
- In a many-to-one context, such as::
-
- MyClass.some_prop != <some object>
-
- This will typically produce a clause such as::
-
- mytable.related_id != <some id>
-
- Where ``<some id>`` is the primary key of the
- given object.
-
- The ``!=`` operator provides partial functionality for non-
- many-to-one comparisons:
-
- * Comparisons against collections are not supported.
- Use
- :meth:`~.RelationshipProperty.Comparator.contains`
- in conjunction with :func:`~.expression.not_`.
- * Compared to a scalar one-to-many, will produce a
- clause that compares the target columns in the parent to
- the given target.
- * Compared to a scalar many-to-many, an alias
- of the association table will be rendered as
- well, forming a natural join that is part of the
- main body of the query. This will not work for
- queries that go beyond simple AND conjunctions of
- comparisons, such as those which use OR. Use
- explicit joins, outerjoins, or
- :meth:`~.RelationshipProperty.Comparator.has` in
- conjunction with :func:`~.expression.not_` for
- more comprehensive non-many-to-one scalar
- membership tests.
- * Comparisons against ``None`` given in a one-to-many
- or many-to-many context produce an EXISTS clause.
-
- """
- if isinstance(other, (NoneType, expression.Null)):
- if self.property.direction == MANYTOONE:
- return sql.or_(*[x != None for x in
- self.property._calculated_foreign_keys])
- else:
- return self._criterion_exists()
- elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a collection"
- " to an object or collection; use "
- "contains() to test for membership.")
- else:
- return self.__negated_contains_or_equals(other)
-
- @util.memoized_property
- def property(self):
- if mapperlib.module._new_mappers:
- configure_mappers()
- return self.prop
-
- def compare(self, op, value,
- value_is_parent=False,
- alias_secondary=True):
- if op == operators.eq:
- if value is None:
- if self.uselist:
- return ~sql.exists([1], self.primaryjoin)
- else:
- return self._optimized_compare(None,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
- else:
- return self._optimized_compare(value,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
- else:
- return op(self.comparator, value)
-
- def _optimized_compare(self, value, value_is_parent=False,
- adapt_source=None,
- alias_secondary=True):
- if value is not None:
- value = attributes.instance_state(value)
- return self._get_strategy(strategies.LazyLoader).lazy_clause(value,
- reverse_direction=not value_is_parent,
- alias_secondary=alias_secondary,
- adapt_source=adapt_source)
-
- def __str__(self):
- return str(self.parent.class_.__name__) + "." + self.key
-
- def merge(self,
- session,
- source_state,
- source_dict,
- dest_state,
- dest_dict,
- load, _recursive):
-
- if load:
- for r in self._reverse_property:
- if (source_state, r) in _recursive:
- return
-
- if not "merge" in self._cascade:
- return
-
- if self.key not in source_dict:
- return
-
- if self.uselist:
- instances = source_state.get_impl(self.key).\
- get(source_state, source_dict)
- if hasattr(instances, '_sa_adapter'):
- # convert collections to adapters to get a true iterator
- instances = instances._sa_adapter
-
- if load:
- # for a full merge, pre-load the destination collection,
- # so that individual _merge of each item pulls from identity
- # map for those already present.
- # also assumes CollectionAttrbiuteImpl behavior of loading
- # "old" list in any case
- dest_state.get_impl(self.key).get(dest_state, dest_dict)
-
- dest_list = []
- for current in instances:
- current_state = attributes.instance_state(current)
- current_dict = attributes.instance_dict(current)
- _recursive[(current_state, self)] = True
- obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
- if obj is not None:
- dest_list.append(obj)
-
- if not load:
- coll = attributes.init_state_collection(dest_state,
- dest_dict, self.key)
- for c in dest_list:
- coll.append_without_event(c)
- else:
- dest_state.get_impl(self.key)._set_iterable(dest_state,
- dest_dict, dest_list)
- else:
- current = source_dict[self.key]
- if current is not None:
- current_state = attributes.instance_state(current)
- current_dict = attributes.instance_dict(current)
- _recursive[(current_state, self)] = True
- obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
- else:
- obj = None
-
- if not load:
- dest_dict[self.key] = obj
- else:
- dest_state.get_impl(self.key).set(dest_state,
- dest_dict, obj, None)
-
- def _value_as_iterable(self, state, dict_, key,
- passive=attributes.PASSIVE_OFF):
- """Return a list of tuples (state, obj) for the given
- key.
-
- returns an empty list if the value is None/empty/PASSIVE_NO_RESULT
- """
-
- impl = state.manager[key].impl
- x = impl.get(state, dict_, passive=passive)
- if x is attributes.PASSIVE_NO_RESULT or x is None:
- return []
- elif hasattr(impl, 'get_collection'):
- return [
- (attributes.instance_state(o), o) for o in
- impl.get_collection(state, dict_, x, passive=passive)
- ]
- else:
- return [(attributes.instance_state(x), x)]
-
- def cascade_iterator(self, type_, state, dict_,
- visited_states, halt_on=None):
- #assert type_ in self._cascade
-
- # only actively lazy load on the 'delete' cascade
- if type_ != 'delete' or self.passive_deletes:
- passive = attributes.PASSIVE_NO_INITIALIZE
- else:
- passive = attributes.PASSIVE_OFF
-
- if type_ == 'save-update':
- tuples = state.manager[self.key].impl.\
- get_all_pending(state, dict_)
-
- else:
- tuples = self._value_as_iterable(state, dict_, self.key,
- passive=passive)
-
- skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
- not in self._cascade
-
- for instance_state, c in tuples:
- if instance_state in visited_states:
- continue
-
- if c is None:
- # would like to emit a warning here, but
- # would not be consistent with collection.append(None)
- # current behavior of silently skipping.
- # see [ticket:2229]
- continue
-
- instance_dict = attributes.instance_dict(c)
-
- if halt_on and halt_on(instance_state):
- continue
-
- if skip_pending and not instance_state.key:
- continue
-
- instance_mapper = instance_state.manager.mapper
-
- if not instance_mapper.isa(self.mapper.class_manager.mapper):
- raise AssertionError("Attribute '%s' on class '%s' "
- "doesn't handle objects "
- "of type '%s'" % (
- self.key,
- self.parent.class_,
- c.__class__
- ))
-
- visited_states.add(instance_state)
-
- yield c, instance_mapper, instance_state, instance_dict
-
- def _add_reverse_property(self, key):
- other = self.mapper.get_property(key, _configure_mappers=False)
- self._reverse_property.add(other)
- other._reverse_property.add(self)
-
- if not other.mapper.common_parent(self.parent):
- raise sa_exc.ArgumentError('reverse_property %r on '
- 'relationship %s references relationship %s, which '
- 'does not reference mapper %s' % (key, self, other,
- self.parent))
- if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
- == other.direction:
- raise sa_exc.ArgumentError('%s and back-reference %s are '
- 'both of the same direction %r. Did you mean to '
- 'set remote_side on the many-to-one side ?'
- % (other, self, self.direction))
-
- @util.memoized_property
- def mapper(self):
- """Return the targeted :class:`.Mapper` for this
- :class:`.RelationshipProperty`.
-
- This is a lazy-initializing static attribute.
-
- """
- if isinstance(self.argument, type):
- mapper_ = mapper.class_mapper(self.argument,
- configure=False)
- elif isinstance(self.argument, mapper.Mapper):
- mapper_ = self.argument
- elif util.callable(self.argument):
-
- # accept a callable to suit various deferred-
- # configurational schemes
-
- mapper_ = mapper.class_mapper(self.argument(),
- configure=False)
- else:
- raise sa_exc.ArgumentError("relationship '%s' expects "
- "a class or a mapper argument (received: %s)"
- % (self.key, type(self.argument)))
- assert isinstance(mapper_, mapper.Mapper), mapper_
- return mapper_
-
- @util.memoized_property
- @util.deprecated("0.7", "Use .target")
- def table(self):
- """Return the selectable linked to this
- :class:`.RelationshipProperty` object's target
- :class:`.Mapper`."""
- return self.target
-
- def do_init(self):
- self._check_conflicts()
- self._process_dependent_arguments()
- self._setup_join_conditions()
- self._check_cascade_settings(self._cascade)
- self._post_init()
- self._generate_backref()
- super(RelationshipProperty, self).do_init()
-
- def _process_dependent_arguments(self):
- """Convert incoming configuration arguments to their
- proper form.
-
- Callables are resolved, ORM annotations removed.
-
- """
- # accept callables for other attributes which may require
- # deferred initialization. This technique is used
- # by declarative "string configs" and some recipes.
- for attr in (
- 'order_by', 'primaryjoin', 'secondaryjoin',
- 'secondary', '_user_defined_foreign_keys', 'remote_side',
- ):
- attr_value = getattr(self, attr)
- if util.callable(attr_value):
- setattr(self, attr, attr_value())
-
- # remove "annotations" which are present if mapped class
- # descriptors are used to create the join expression.
- for attr in 'primaryjoin', 'secondaryjoin':
- val = getattr(self, attr)
- if val is not None:
- setattr(self, attr, _orm_deannotate(
- expression._only_column_elements(val, attr))
- )
-
- # ensure expressions in self.order_by, foreign_keys,
- # remote_side are all columns, not strings.
- if self.order_by is not False and self.order_by is not None:
- self.order_by = [
- expression._only_column_elements(x, "order_by")
- for x in
- util.to_list(self.order_by)]
-
- self._user_defined_foreign_keys = \
- util.column_set(
- expression._only_column_elements(x, "foreign_keys")
- for x in util.to_column_set(
- self._user_defined_foreign_keys
- ))
-
- self.remote_side = \
- util.column_set(
- expression._only_column_elements(x, "remote_side")
- for x in
- util.to_column_set(self.remote_side))
-
- self.target = self.mapper.mapped_table
-
-
- def _setup_join_conditions(self):
- self._join_condition = jc = relationships.JoinCondition(
- parent_selectable=self.parent.mapped_table,
- child_selectable=self.mapper.mapped_table,
- parent_local_selectable=self.parent.local_table,
- child_local_selectable=self.mapper.local_table,
- primaryjoin=self.primaryjoin,
- secondary=self.secondary,
- secondaryjoin=self.secondaryjoin,
- parent_equivalents=self.parent._equivalent_columns,
- child_equivalents=self.mapper._equivalent_columns,
- consider_as_foreign_keys=self._user_defined_foreign_keys,
- local_remote_pairs=self.local_remote_pairs,
- remote_side=self.remote_side,
- self_referential=self._is_self_referential,
- prop=self,
- support_sync=not self.viewonly,
- can_be_synced_fn=self._columns_are_mapped
- )
- self.primaryjoin = jc.deannotated_primaryjoin
- self.secondaryjoin = jc.deannotated_secondaryjoin
- self.direction = jc.direction
- self.local_remote_pairs = jc.local_remote_pairs
- self.remote_side = jc.remote_columns
- self.local_columns = jc.local_columns
- self.synchronize_pairs = jc.synchronize_pairs
- self._calculated_foreign_keys = jc.foreign_key_columns
- self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs
-
- def _check_conflicts(self):
- """Test that this relationship is legal, warn about
- inheritance conflicts."""
-
- if not self.is_primary() \
- and not mapper.class_mapper(
- self.parent.class_,
- configure=False).has_property(self.key):
- raise sa_exc.ArgumentError("Attempting to assign a new "
- "relationship '%s' to a non-primary mapper on "
- "class '%s'. New relationships can only be added "
- "to the primary mapper, i.e. the very first mapper "
- "created for class '%s' " % (self.key,
- self.parent.class_.__name__,
- self.parent.class_.__name__))
-
- # check for conflicting relationship() on superclass
- if not self.parent.concrete:
- for inheriting in self.parent.iterate_to_root():
- if inheriting is not self.parent \
- and inheriting.has_property(self.key):
- util.warn("Warning: relationship '%s' on mapper "
- "'%s' supersedes the same relationship "
- "on inherited mapper '%s'; this can "
- "cause dependency issues during flush"
- % (self.key, self.parent, inheriting))
-
- def _get_cascade(self):
- """Return the current cascade setting for this
- :class:`.RelationshipProperty`.
- """
- return self._cascade
-
- def _set_cascade(self, cascade):
- cascade = CascadeOptions(cascade)
- if 'mapper' in self.__dict__:
- self._check_cascade_settings(cascade)
- self._cascade = cascade
-
- if self._dependency_processor:
- self._dependency_processor.cascade = cascade
-
- cascade = property(_get_cascade, _set_cascade)
-
- def _check_cascade_settings(self, cascade):
- if cascade.delete_orphan and not self.single_parent \
- and (self.direction is MANYTOMANY or self.direction
- is MANYTOONE):
- raise sa_exc.ArgumentError(
- 'On %s, delete-orphan cascade is not supported '
- 'on a many-to-many or many-to-one relationship '
- 'when single_parent is not set. Set '
- 'single_parent=True on the relationship().'
- % self)
- if self.direction is MANYTOONE and self.passive_deletes:
- util.warn("On %s, 'passive_deletes' is normally configured "
- "on one-to-many, one-to-one, many-to-many "
- "relationships only."
- % self)
-
- if self.passive_deletes == 'all' and \
- ("delete" in cascade or
- "delete-orphan" in cascade):
- raise sa_exc.ArgumentError(
- "On %s, can't set passive_deletes='all' in conjunction "
- "with 'delete' or 'delete-orphan' cascade" % self)
-
- if cascade.delete_orphan:
- self.mapper.primary_mapper()._delete_orphans.append(
- (self.key, self.parent.class_)
- )
-
- def _columns_are_mapped(self, *cols):
- """Return True if all columns in the given collection are
- mapped by the tables referenced by this :class:`.Relationship`.
-
- """
- for c in cols:
- if self.secondary is not None \
- and self.secondary.c.contains_column(c):
- continue
- if not self.parent.mapped_table.c.contains_column(c) and \
- not self.target.c.contains_column(c):
- return False
- return True
-
- def _generate_backref(self):
- """Interpret the 'backref' instruction to create a
- :func:`.relationship` complementary to this one."""
-
- if not self.is_primary():
- return
- if self.backref is not None and not self.back_populates:
- if isinstance(self.backref, str):
- backref_key, kwargs = self.backref, {}
- else:
- backref_key, kwargs = self.backref
- mapper = self.mapper.primary_mapper()
-
- check = set(mapper.iterate_to_root()).\
- union(mapper.self_and_descendants)
- for m in check:
- if m.has_property(backref_key):
- raise sa_exc.ArgumentError("Error creating backref "
- "'%s' on relationship '%s': property of that "
- "name exists on mapper '%s'" % (backref_key,
- self, m))
-
- # determine primaryjoin/secondaryjoin for the
- # backref. Use the one we had, so that
- # a custom join doesn't have to be specified in
- # both directions.
- if self.secondary is not None:
- # for many to many, just switch primaryjoin/
- # secondaryjoin. use the annotated
- # pj/sj on the _join_condition.
- pj = kwargs.pop('primaryjoin',
- self._join_condition.secondaryjoin_minus_local)
- sj = kwargs.pop('secondaryjoin',
- self._join_condition.primaryjoin_minus_local)
- else:
- pj = kwargs.pop('primaryjoin',
- self._join_condition.primaryjoin_reverse_remote)
- sj = kwargs.pop('secondaryjoin', None)
- if sj:
- raise sa_exc.InvalidRequestError(
- "Can't assign 'secondaryjoin' on a backref "
- "against a non-secondary relationship."
- )
-
- foreign_keys = kwargs.pop('foreign_keys',
- self._user_defined_foreign_keys)
- parent = self.parent.primary_mapper()
- kwargs.setdefault('viewonly', self.viewonly)
- kwargs.setdefault('post_update', self.post_update)
- kwargs.setdefault('passive_updates', self.passive_updates)
- self.back_populates = backref_key
- relationship = RelationshipProperty(
- parent, self.secondary,
- pj, sj,
- foreign_keys=foreign_keys,
- back_populates=self.key,
- **kwargs)
- mapper._configure_property(backref_key, relationship)
-
- if self.back_populates:
- self._add_reverse_property(self.back_populates)
-
- def _post_init(self):
- if self.uselist is None:
- self.uselist = self.direction is not MANYTOONE
- if not self.viewonly:
- self._dependency_processor = \
- dependency.DependencyProcessor.from_relationship(self)
-
- @util.memoized_property
- def _use_get(self):
- """memoize the 'use_get' attribute of this RelationshipLoader's
- lazyloader."""
-
- strategy = self._get_strategy(strategies.LazyLoader)
- return strategy.use_get
-
- @util.memoized_property
- def _is_self_referential(self):
- return self.mapper.common_parent(self.parent)
-
- def _create_joins(self, source_polymorphic=False,
- source_selectable=None, dest_polymorphic=False,
- dest_selectable=None, of_type=None):
- if source_selectable is None:
- if source_polymorphic and self.parent.with_polymorphic:
- source_selectable = self.parent._with_polymorphic_selectable
-
- aliased = False
- if dest_selectable is None:
- if dest_polymorphic and self.mapper.with_polymorphic:
- dest_selectable = self.mapper._with_polymorphic_selectable
- aliased = True
- else:
- dest_selectable = self.mapper.mapped_table
-
- if self._is_self_referential and source_selectable is None:
- dest_selectable = dest_selectable.alias()
- aliased = True
- else:
- aliased = True
-
- dest_mapper = of_type or self.mapper
-
- single_crit = dest_mapper._single_table_criterion
- aliased = aliased or (source_selectable is not None)
-
- primaryjoin, secondaryjoin, secondary, target_adapter, dest_selectable = \
- self._join_condition.join_targets(
- source_selectable, dest_selectable, aliased, single_crit
- )
- if source_selectable is None:
- source_selectable = self.parent.local_table
- if dest_selectable is None:
- dest_selectable = self.mapper.local_table
- return (primaryjoin, secondaryjoin, source_selectable,
- dest_selectable, secondary, target_adapter)
-
-
-PropertyLoader = RelationProperty = RelationshipProperty
-log.class_logger(RelationshipProperty)
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index f6fd07e61..6bd465e9c 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -1,5 +1,5 @@
# orm/query.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -24,37 +24,28 @@ from . import (
attributes, interfaces, object_mapper, persistence,
exc as orm_exc, loading
)
+from .base import _entity_descriptor, _is_aliased_class, \
+ _is_mapped_class, _orm_columns, _generative
+from .path_registry import PathRegistry
from .util import (
- AliasedClass, ORMAdapter, _entity_descriptor, PathRegistry,
- _is_aliased_class, _is_mapped_class, _orm_columns,
- join as orm_join, with_parent, aliased
+ AliasedClass, ORMAdapter, join as orm_join, with_parent, aliased
)
-from .. import sql, util, log, exc as sa_exc, inspect, inspection, \
- types as sqltypes
+from .. import sql, util, log, exc as sa_exc, inspect, inspection
from ..sql.expression import _interpret_as_from
from ..sql import (
util as sql_util,
expression, visitors
)
+from ..sql.base import ColumnCollection
+from . import properties
__all__ = ['Query', 'QueryContext', 'aliased']
-def _generative(*assertions):
- """Mark a method as generative."""
-
- @util.decorator
- def generate(fn, *args, **kw):
- self = args[0]._clone()
- for assertion in assertions:
- assertion(self, fn.__name__)
- fn(self, *args[1:], **kw)
- return self
- return generate
-
_path_registry = PathRegistry.root
-
+@inspection._self_inspects
+@log.class_logger
class Query(object):
"""ORM-level SQL construction object.
@@ -77,7 +68,6 @@ class Query(object):
_with_labels = False
_criterion = None
_yield_per = None
- _lockmode = None
_order_by = False
_group_by = False
_having = None
@@ -85,6 +75,7 @@ class Query(object):
_prefixes = None
_offset = None
_limit = None
+ _for_update_arg = None
_statement = None
_correlate = frozenset()
_populate_existing = False
@@ -118,6 +109,7 @@ class Query(object):
if entity_wrapper is None:
entity_wrapper = _QueryEntity
self._entities = []
+ self._primary_entity = None
for ent in util.to_list(entities):
entity_wrapper(self, ent)
@@ -299,11 +291,8 @@ class Query(object):
@property
def _mapper_entities(self):
- # TODO: this is wrong, its hardcoded to "primary entity" when
- # for the case of __all_equivs() it should not be
- # the name of this accessor is wrong too
for ent in self._entities:
- if hasattr(ent, 'primary_entity'):
+ if isinstance(ent, _MapperEntity):
yield ent
def _joinpoint_zero(self):
@@ -313,9 +302,10 @@ class Query(object):
)
def _mapper_zero_or_none(self):
- if not getattr(self._entities[0], 'primary_entity', False):
+ if self._primary_entity:
+ return self._primary_entity.mapper
+ else:
return None
- return self._entities[0].mapper
def _only_mapper_zero(self, rationale=None):
if len(self._entities) > 1:
@@ -327,16 +317,11 @@ class Query(object):
return self._mapper_zero()
def _only_full_mapper_zero(self, methname):
- if len(self._entities) != 1:
+ if self._entities != [self._primary_entity]:
raise sa_exc.InvalidRequestError(
"%s() can only be used against "
"a single mapped class." % methname)
- entity = self._entity_zero()
- if not hasattr(entity, 'primary_entity'):
- raise sa_exc.InvalidRequestError(
- "%s() can only be used against "
- "a single mapped class." % methname)
- return entity.entity_zero
+ return self._primary_entity.entity_zero
def _only_entity_zero(self, rationale=None):
if len(self._entities) > 1:
@@ -555,7 +540,7 @@ class Query(object):
:class:`.Query`, converted
to a scalar subquery with a label of the given name.
- Analogous to :meth:`sqlalchemy.sql.SelectBaseMixin.label`.
+ Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.label`.
.. versionadded:: 0.6.5
@@ -567,7 +552,7 @@ class Query(object):
"""Return the full SELECT statement represented by this
:class:`.Query`, converted to a scalar subquery.
- Analogous to :meth:`sqlalchemy.sql.SelectBaseMixin.as_scalar`.
+ Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.as_scalar`.
.. versionadded:: 0.6.5
@@ -698,7 +683,7 @@ class Query(object):
"""
- if not getattr(self._entities[0], 'primary_entity', False):
+ if not self._primary_entity:
raise sa_exc.InvalidRequestError(
"No primary mapper set up for this Query.")
entity = self._entities[0]._clone()
@@ -811,7 +796,7 @@ class Query(object):
if not self._populate_existing and \
not mapper.always_refresh and \
- self._lockmode is None:
+ self._for_update_arg is None:
instance = loading.get_from_identity(
self.session, key, attributes.PASSIVE_OFF)
@@ -903,11 +888,10 @@ class Query(object):
"""
if property is None:
- from sqlalchemy.orm import properties
mapper = object_mapper(instance)
for prop in mapper.iterate_properties:
- if isinstance(prop, properties.PropertyLoader) and \
+ if isinstance(prop, properties.RelationshipProperty) and \
prop.mapper is self._mapper_zero():
property = prop
break
@@ -936,7 +920,7 @@ class Query(object):
@_generative()
def with_session(self, session):
- """Return a :class:`Query` that will use the given :class:`.Session`.
+ """Return a :class:`.Query` that will use the given :class:`.Session`.
"""
@@ -1140,32 +1124,63 @@ class Query(object):
@_generative()
def with_lockmode(self, mode):
- """Return a new Query object with the specified locking mode.
+ """Return a new :class:`.Query` object with the specified "locking mode",
+ which essentially refers to the ``FOR UPDATE`` clause.
- :param mode: a string representing the desired locking mode. A
- corresponding value is passed to the ``for_update`` parameter of
- :meth:`~sqlalchemy.sql.expression.select` when the query is
- executed. Valid values are:
+ .. deprecated:: 0.9.0 superseded by :meth:`.Query.with_for_update`.
- ``'update'`` - passes ``for_update=True``, which translates to
- ``FOR UPDATE`` (standard SQL, supported by most dialects)
+ :param mode: a string representing the desired locking mode.
+ Valid values are:
- ``'update_nowait'`` - passes ``for_update='nowait'``, which
- translates to ``FOR UPDATE NOWAIT`` (supported by Oracle,
- PostgreSQL 8.1 upwards)
+ * ``None`` - translates to no lockmode
- ``'read'`` - passes ``for_update='read'``, which translates to
- ``LOCK IN SHARE MODE`` (for MySQL), and ``FOR SHARE`` (for
- PostgreSQL)
+ * ``'update'`` - translates to ``FOR UPDATE``
+ (standard SQL, supported by most dialects)
- ``'read_nowait'`` - passes ``for_update='read_nowait'``, which
- translates to ``FOR SHARE NOWAIT`` (supported by PostgreSQL).
+ * ``'update_nowait'`` - translates to ``FOR UPDATE NOWAIT``
+ (supported by Oracle, PostgreSQL 8.1 upwards)
+
+ * ``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL),
+ and ``FOR SHARE`` (for PostgreSQL)
+
+ .. seealso::
+
+ :meth:`.Query.with_for_update` - improved API for
+ specifying the ``FOR UPDATE`` clause.
- .. versionadded:: 0.7.7
- ``FOR SHARE`` and ``FOR SHARE NOWAIT`` (PostgreSQL).
"""
+ self._for_update_arg = LockmodeArg.parse_legacy_query(mode)
+
+ @_generative()
+ def with_for_update(self, read=False, nowait=False, of=None):
+ """return a new :class:`.Query` with the specified options for the
+ ``FOR UPDATE`` clause.
+
+ The behavior of this method is identical to that of
+ :meth:`.SelectBase.with_for_update`. When called with no arguments,
+ the resulting ``SELECT`` statement will have a ``FOR UPDATE`` clause
+ appended. When additional arguments are specified, backend-specific
+ options such as ``FOR UPDATE NOWAIT`` or ``LOCK IN SHARE MODE``
+ can take effect.
+
+ E.g.::
+
+ q = sess.query(User).with_for_update(nowait=True, of=User)
+
+ The above query on a Postgresql backend will render like::
- self._lockmode = mode
+ SELECT users.id AS users_id FROM users FOR UPDATE OF users NOWAIT
+
+ .. versionadded:: 0.9.0 :meth:`.Query.with_for_update` supersedes
+ the :meth:`.Query.with_lockmode` method.
+
+ .. seealso::
+
+ :meth:`.GenerativeSelect.with_for_update` - Core level method with
+ full argument and behavioral description.
+
+ """
+ self._for_update_arg = LockmodeArg(read=read, nowait=nowait, of=of)
@_generative()
def params(self, *args, **kwargs):
@@ -1300,7 +1315,7 @@ class Query(object):
"""apply a HAVING criterion to the query and return the
newly resulting :class:`.Query`.
- :meth:`having` is used in conjunction with :meth:`group_by`.
+ :meth:`~.Query.having` is used in conjunction with :meth:`~.Query.group_by`.
HAVING criterion makes it possible to use filters on aggregate
functions like COUNT, SUM, AVG, MAX, and MIN, eg.::
@@ -1478,7 +1493,7 @@ class Query(object):
q = session.query(User).join(Address)
- The above calling form of :meth:`.join` will raise an error if
+ The above calling form of :meth:`~.Query.join` will raise an error if
either there are no foreign keys between the two entities, or if
there are multiple foreign key linkages between them. In the
above calling form, :meth:`~.Query.join` is called upon to
@@ -1640,14 +1655,14 @@ class Query(object):
example :ref:`examples_xmlpersistence` which illustrates
an XPath-like query system using algorithmic joins.
- :param *props: A collection of one or more join conditions,
+ :param \*props: A collection of one or more join conditions,
each consisting of a relationship-bound attribute or string
relationship name representing an "on clause", or a single
target entity, or a tuple in the form of ``(target, onclause)``.
A special two-argument calling form of the form ``target, onclause``
is also accepted.
:param aliased=False: If True, indicate that the JOIN target should be
- anonymously aliased. Subsequent calls to :class:`~.Query.filter`
+ anonymously aliased. Subsequent calls to :meth:`~.Query.filter`
and similar will adapt the incoming criterion to the target
alias, until :meth:`~.Query.reset_joinpoint` is called.
:param from_joinpoint=False: When using ``aliased=True``, a setting
@@ -1827,14 +1842,30 @@ class Query(object):
raise sa_exc.InvalidRequestError(
"Can't construct a join from %s to %s, they "
"are the same entity" %
- (left, right))
+ (left, right))
l_info = inspect(left)
r_info = inspect(right)
- overlap = not create_aliases and \
- sql_util.selectables_overlap(l_info.selectable,
- r_info.selectable)
+
+ overlap = False
+ if not create_aliases:
+ right_mapper = getattr(r_info, "mapper", None)
+ # if the target is a joined inheritance mapping,
+ # be more liberal about auto-aliasing.
+ if right_mapper and (
+ right_mapper.with_polymorphic or
+ isinstance(right_mapper.mapped_table, expression.Join)
+ ):
+ for from_obj in self._from_obj or [l_info.selectable]:
+ if sql_util.selectables_overlap(l_info.selectable, from_obj) and \
+ sql_util.selectables_overlap(from_obj, r_info.selectable):
+ overlap = True
+ break
+ elif sql_util.selectables_overlap(l_info.selectable, r_info.selectable):
+ overlap = True
+
+
if overlap and l_info.selectable is r_info.selectable:
raise sa_exc.InvalidRequestError(
"Can't join table/selectable '%s' to itself" %
@@ -2219,7 +2250,7 @@ class Query(object):
``Query``.
:param \*prefixes: optional prefixes, typically strings,
- not using any commas. In particular is useful for MySQL keywords.
+ not using any commas. In particular is useful for MySQL keywords.
e.g.::
@@ -2414,10 +2445,10 @@ class Query(object):
"""
return [
{
- 'name':ent._label_name,
- 'type':ent.type,
- 'aliased':getattr(ent, 'is_aliased_class', False),
- 'expr':ent.expr
+ 'name': ent._label_name,
+ 'type': ent.type,
+ 'aliased': getattr(ent, 'is_aliased_class', False),
+ 'expr': ent.expr
}
for ent in self._entities
]
@@ -2500,7 +2531,7 @@ class Query(object):
.. versionadded:: 0.8.1
"""
- return sql.exists(self.with_entities('1').statement)
+ return sql.exists(self.with_labels().statement.with_only_columns(['1']))
def count(self):
"""Return a count of rows this Query would return.
@@ -2571,19 +2602,37 @@ class Query(object):
The expression evaluator currently doesn't account for differing
string collations between the database and Python.
- Returns the number of rows deleted, excluding any cascades.
+ :return: the count of rows matched as returned by the database's
+ "row count" feature.
- The method does *not* offer in-Python cascading of relationships - it
- is assumed that ON DELETE CASCADE is configured for any foreign key
- references which require it. The Session needs to be expired (occurs
- automatically after commit(), or call expire_all()) in order for the
- state of dependent objects subject to delete or delete-orphan cascade
- to be correctly represented.
+ This method has several key caveats:
- Note that the :meth:`.MapperEvents.before_delete` and
- :meth:`.MapperEvents.after_delete`
- events are **not** invoked from this method. It instead
- invokes :meth:`.SessionEvents.after_bulk_delete`.
+ * The method does **not** offer in-Python cascading of relationships - it
+ is assumed that ON DELETE CASCADE/SET NULL/etc. is configured for any foreign key
+ references which require it, otherwise the database may emit an
+ integrity violation if foreign key references are being enforced.
+
+ After the DELETE, dependent objects in the :class:`.Session` which
+ were impacted by an ON DELETE may not contain the current
+ state, or may have been deleted. This issue is resolved once the
+ :class:`.Session` is expired,
+ which normally occurs upon :meth:`.Session.commit` or can be forced
+ by using :meth:`.Session.expire_all`. Accessing an expired object
+ whose row has been deleted will invoke a SELECT to locate the
+ row; when the row is not found, an :class:`~sqlalchemy.orm.exc.ObjectDeletedError`
+ is raised.
+
+ * The :meth:`.MapperEvents.before_delete` and
+ :meth:`.MapperEvents.after_delete`
+ events are **not** invoked from this method. Instead, the
+ :meth:`.SessionEvents.after_bulk_delete` method is provided to act
+ upon a mass DELETE of entity rows.
+
+ .. seealso::
+
+ :meth:`.Query.update`
+
+ :ref:`inserts_and_updates` - Core SQL tutorial
"""
#TODO: cascades need handling.
@@ -2622,20 +2671,50 @@ class Query(object):
The expression evaluator currently doesn't account for differing
string collations between the database and Python.
- Returns the number of rows matched by the update.
+ :return: the count of rows matched as returned by the database's
+ "row count" feature.
+
+ This method has several key caveats:
+
+ * The method does **not** offer in-Python cascading of relationships - it
+ is assumed that ON UPDATE CASCADE is configured for any foreign key
+ references which require it, otherwise the database may emit an
+ integrity violation if foreign key references are being enforced.
+
+ After the UPDATE, dependent objects in the :class:`.Session` which
+ were impacted by an ON UPDATE CASCADE may not contain the current
+ state; this issue is resolved once the :class:`.Session` is expired,
+ which normally occurs upon :meth:`.Session.commit` or can be forced
+ by using :meth:`.Session.expire_all`.
+
+ * As of 0.8, this method will support multiple table updates, as detailed
+ in :ref:`multi_table_updates`, and this behavior does extend to support
+ updates of joined-inheritance and other multiple table mappings. However,
+ the **join condition of an inheritance mapper is currently not
+ automatically rendered**.
+ Care must be taken in any multiple-table update to explicitly include
+ the joining condition between those tables, even in mappings where
+ this is normally automatic.
+ E.g. if a class ``Engineer`` subclasses ``Employee``, an UPDATE of the
+ ``Engineer`` local table using criteria against the ``Employee``
+ local table might look like::
+
+ session.query(Engineer).\\
+ filter(Engineer.id == Employee.id).\\
+ filter(Employee.name == 'dilbert').\\
+ update({"engineer_type": "programmer"})
+
+ * The :meth:`.MapperEvents.before_update` and
+ :meth:`.MapperEvents.after_update`
+ events are **not** invoked from this method. Instead, the
+ :meth:`.SessionEvents.after_bulk_update` method is provided to act
+ upon a mass UPDATE of entity rows.
- The method does *not* offer in-Python cascading of relationships - it
- is assumed that ON UPDATE CASCADE is configured for any foreign key
- references which require it.
+ .. seealso::
- The Session needs to be expired (occurs automatically after commit(),
- or call expire_all()) in order for the state of dependent objects
- subject foreign key cascade to be correctly represented.
+ :meth:`.Query.delete`
- Note that the :meth:`.MapperEvents.before_update` and
- :meth:`.MapperEvents.after_update`
- events are **not** invoked from this method. It instead
- invokes :meth:`.SessionEvents.after_bulk_update`.
+ :ref:`inserts_and_updates` - Core SQL tutorial
"""
@@ -2650,13 +2729,6 @@ class Query(object):
update_op.exec_()
return update_op.rowcount
- _lockmode_lookup = {
- 'read': 'read',
- 'read_nowait': 'read_nowait',
- 'update': True,
- 'update_nowait': 'nowait',
- None: False
- }
def _compile_context(self, labels=True):
context = QueryContext(self)
@@ -2666,12 +2738,8 @@ class Query(object):
context.labels = labels
- if self._lockmode:
- try:
- context.for_update = self._lockmode_lookup[self._lockmode]
- except KeyError:
- raise sa_exc.ArgumentError(
- "Unknown lockmode %r" % self._lockmode)
+ context._for_update_arg = self._for_update_arg
+
for entity in self._entities:
entity.setup_context(self, context)
@@ -2755,9 +2823,10 @@ class Query(object):
statement = sql.select(
[inner] + context.secondary_columns,
- for_update=context.for_update,
use_labels=context.labels)
+ statement._for_update_arg = context._for_update_arg
+
from_clause = inner
for eager_join in context.eager_joins.values():
# EagerLoader places a 'stop_on' attribute on the join,
@@ -2800,11 +2869,12 @@ class Query(object):
context.whereclause,
from_obj=context.froms,
use_labels=context.labels,
- for_update=context.for_update,
order_by=context.order_by,
**self._select_args
)
+ statement._for_update_arg = context._for_update_arg
+
for hint in self._with_hints:
statement = statement.with_hint(*hint)
@@ -2832,14 +2902,34 @@ class Query(object):
if adapter:
single_crit = adapter.traverse(single_crit)
single_crit = self._adapt_clause(single_crit, False, False)
- context.whereclause = sql.and_(context.whereclause,
- single_crit)
+ context.whereclause = sql.and_(
+ sql.True_._ifnone(context.whereclause),
+ single_crit)
def __str__(self):
return str(self._compile_context().statement)
-inspection._self_inspects(Query)
+from ..sql.selectable import ForUpdateArg
+class LockmodeArg(ForUpdateArg):
+ @classmethod
+ def parse_legacy_query(self, mode):
+ if mode in (None, False):
+ return None
+
+ if mode == "read":
+ read = True
+ nowait = False
+ elif mode == "update":
+ read = nowait = False
+ elif mode == "update_nowait":
+ nowait = True
+ read = False
+ else:
+ raise sa_exc.ArgumentError(
+ "Unknown with_lockmode argument: %r" % mode)
+
+ return LockmodeArg(read=read, nowait=nowait)
class _QueryEntity(object):
"""represent an entity column returned within a Query result."""
@@ -2850,6 +2940,8 @@ class _QueryEntity(object):
if not isinstance(entity, util.string_types) and \
_is_mapped_class(entity):
cls = _MapperEntity
+ elif isinstance(entity, Bundle):
+ cls = _BundleEntity
else:
cls = _ColumnEntity
return object.__new__(cls)
@@ -2864,12 +2956,15 @@ class _MapperEntity(_QueryEntity):
"""mapper/class/AliasedClass entity"""
def __init__(self, query, entity):
- self.primary_entity = not query._entities
+ if not query._primary_entity:
+ query._primary_entity = self
query._entities.append(self)
self.entities = [entity]
self.expr = entity
+ supports_single_entity = True
+
def setup_entity(self, ext_info, aliased_adapter):
self.mapper = ext_info.mapper
self.aliased_adapter = aliased_adapter
@@ -2884,6 +2979,7 @@ class _MapperEntity(_QueryEntity):
else:
self._label_name = self.mapper.class_.__name__
self.path = self.entity_zero._path_registry
+ self.custom_rows = bool(self.mapper.dispatch.append_result)
def set_with_polymorphic(self, query, cls_or_mappers,
selectable, polymorphic_on):
@@ -2939,10 +3035,8 @@ class _MapperEntity(_QueryEntity):
return entity.common_parent(self.entity_zero)
- #_adapted_selectable = None
def adapt_to_selectable(self, query, sel):
query._entities.append(self)
- # self._adapted_selectable = sel
def _get_entity_clauses(self, query, context):
@@ -2980,7 +3074,7 @@ class _MapperEntity(_QueryEntity):
self.selectable,
self.mapper._equivalent_columns)
- if self.primary_entity:
+ if query._primary_entity is self:
_instance = loading.instance_processor(
self.mapper,
context,
@@ -3050,6 +3144,187 @@ class _MapperEntity(_QueryEntity):
def __str__(self):
return str(self.mapper)
+@inspection._self_inspects
+class Bundle(object):
+ """A grouping of SQL expressions that are returned by a :class:`.Query`
+ under one namespace.
+
+ The :class:`.Bundle` essentially allows nesting of the tuple-based
+ results returned by a column-oriented :class:`.Query` object. It also
+ is extensible via simple subclassing, where the primary capability
+ to override is that of how the set of expressions should be returned,
+ allowing post-processing as well as custom return types, without
+ involving ORM identity-mapped classes.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :ref:`bundles`
+
+ """
+
+ single_entity = False
+ """If True, queries for a single Bundle will be returned as a single
+ entity, rather than an element within a keyed tuple."""
+
+ def __init__(self, name, *exprs, **kw):
+ """Construct a new :class:`.Bundle`.
+
+ e.g.::
+
+ bn = Bundle("mybundle", MyClass.x, MyClass.y)
+
+ for row in session.query(bn).filter(bn.c.x == 5).filter(bn.c.y == 4):
+ print(row.mybundle.x, row.mybundle.y)
+
+ :param name: name of the bundle.
+ :param \*exprs: columns or SQL expressions comprising the bundle.
+ :param single_entity=False: if True, rows for this :class:`.Bundle`
+ can be returned as a "single entity" outside of any enclosing tuple
+ in the same manner as a mapped entity.
+
+ """
+ self.name = self._label = name
+ self.exprs = exprs
+ self.c = self.columns = ColumnCollection()
+ self.columns.update((getattr(col, "key", col._label), col)
+ for col in exprs)
+ self.single_entity = kw.pop('single_entity', self.single_entity)
+
+ columns = None
+ """A namespace of SQL expressions referred to by this :class:`.Bundle`.
+
+ e.g.::
+
+ bn = Bundle("mybundle", MyClass.x, MyClass.y)
+
+ q = sess.query(bn).filter(bn.c.x == 5)
+
+ Nesting of bundles is also supported::
+
+ b1 = Bundle("b1",
+ Bundle('b2', MyClass.a, MyClass.b),
+ Bundle('b3', MyClass.x, MyClass.y)
+ )
+
+ q = sess.query(b1).filter(b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
+
+ .. seealso::
+
+ :attr:`.Bundle.c`
+
+ """
+
+ c = None
+ """An alias for :attr:`.Bundle.columns`."""
+
+ def _clone(self):
+ cloned = self.__class__.__new__(self.__class__)
+ cloned.__dict__.update(self.__dict__)
+ return cloned
+
+ def __clause_element__(self):
+ return expression.ClauseList(group=False, *self.c)
+
+ @property
+ def clauses(self):
+ return self.__clause_element__().clauses
+
+ def label(self, name):
+ """Provide a copy of this :class:`.Bundle` passing a new label."""
+
+ cloned = self._clone()
+ cloned.name = name
+ return cloned
+
+ def create_row_processor(self, query, procs, labels):
+ """Produce the "row processing" function for this :class:`.Bundle`.
+
+ May be overridden by subclasses.
+
+ .. seealso::
+
+ :ref:`bundles` - includes an example of subclassing.
+
+ """
+ def proc(row, result):
+ return util.KeyedTuple([proc(row, None) for proc in procs], labels)
+ return proc
+
+
+class _BundleEntity(_QueryEntity):
+ def __init__(self, query, bundle, setup_entities=True):
+ query._entities.append(self)
+ self.bundle = self.expr = bundle
+ self.type = type(bundle)
+ self._label_name = bundle.name
+ self._entities = []
+
+ if setup_entities:
+ for expr in bundle.exprs:
+ if isinstance(expr, Bundle):
+ _BundleEntity(self, expr)
+ else:
+ _ColumnEntity(self, expr, namespace=self)
+
+ self.entities = ()
+
+ self.filter_fn = lambda item: item
+
+ self.supports_single_entity = self.bundle.single_entity
+
+ custom_rows = False
+
+ @property
+ def entity_zero(self):
+ for ent in self._entities:
+ ezero = ent.entity_zero
+ if ezero is not None:
+ return ezero
+ else:
+ return None
+
+ def corresponds_to(self, entity):
+ # TODO: this seems to have no effect for
+ # _ColumnEntity either
+ return False
+
+ @property
+ def entity_zero_or_selectable(self):
+ for ent in self._entities:
+ ezero = ent.entity_zero_or_selectable
+ if ezero is not None:
+ return ezero
+ else:
+ return None
+
+ def adapt_to_selectable(self, query, sel):
+ c = _BundleEntity(query, self.bundle, setup_entities=False)
+ #c._label_name = self._label_name
+ #c.entity_zero = self.entity_zero
+ #c.entities = self.entities
+
+ for ent in self._entities:
+ ent.adapt_to_selectable(c, sel)
+
+ def setup_entity(self, ext_info, aliased_adapter):
+ for ent in self._entities:
+ ent.setup_entity(ext_info, aliased_adapter)
+
+ def setup_context(self, query, context):
+ for ent in self._entities:
+ ent.setup_context(query, context)
+
+ def row_processor(self, query, context, custom_rows):
+ procs, labels = zip(
+ *[ent.row_processor(query, context, custom_rows)
+ for ent in self._entities]
+ )
+
+ proc = self.bundle.create_row_processor(query, procs, labels)
+
+ return proc, self._label_name
class _ColumnEntity(_QueryEntity):
"""Column/expression based entity."""
@@ -3066,7 +3341,7 @@ class _ColumnEntity(_QueryEntity):
interfaces.PropComparator
)):
self._label_name = column.key
- column = column.__clause_element__()
+ column = column._query_clause_element()
else:
self._label_name = getattr(column, 'key', None)
@@ -3079,6 +3354,9 @@ class _ColumnEntity(_QueryEntity):
if c is not column:
return
+ elif isinstance(column, Bundle):
+ _BundleEntity(query, column)
+ return
if not isinstance(column, sql.ColumnElement):
raise sa_exc.InvalidRequestError(
@@ -3086,7 +3364,7 @@ class _ColumnEntity(_QueryEntity):
"expected - got '%r'" % (column, )
)
- type_ = column.type
+ self.type = type_ = column.type
if type_.hashable:
self.filter_fn = lambda item: item
else:
@@ -3129,6 +3407,9 @@ class _ColumnEntity(_QueryEntity):
else:
self.entity_zero = None
+ supports_single_entity = False
+ custom_rows = False
+
@property
def entity_zero_or_selectable(self):
if self.entity_zero is not None:
@@ -3138,10 +3419,6 @@ class _ColumnEntity(_QueryEntity):
else:
return None
- @property
- def type(self):
- return self.column.type
-
def adapt_to_selectable(self, query, sel):
c = _ColumnEntity(query, sel.corresponding_column(self.column))
c._label_name = self._label_name
@@ -3154,6 +3431,8 @@ class _ColumnEntity(_QueryEntity):
self.froms.add(ext_info.selectable)
def corresponds_to(self, entity):
+ # TODO: just returning False here,
+ # no tests fail
if self.entity_zero is None:
return False
elif _is_aliased_class(entity):
@@ -3188,14 +3467,11 @@ class _ColumnEntity(_QueryEntity):
return str(self.column)
-log.class_logger(Query)
-
-
class QueryContext(object):
multi_row_eager_loaders = False
adapter = None
froms = ()
- for_update = False
+ for_update = None
def __init__(self, query):
@@ -3230,6 +3506,38 @@ class QueryContext(object):
class AliasOption(interfaces.MapperOption):
def __init__(self, alias):
+ """Return a :class:`.MapperOption` that will indicate to the :class:`.Query`
+ that the main table has been aliased.
+
+ This is a seldom-used option to suit the
+ very rare case that :func:`.contains_eager`
+ is being used in conjunction with a user-defined SELECT
+ statement that aliases the parent table. E.g.::
+
+ # define an aliased UNION called 'ulist'
+ ulist = users.select(users.c.user_id==7).\\
+ union(users.select(users.c.user_id>7)).\\
+ alias('ulist')
+
+ # add on an eager load of "addresses"
+ statement = ulist.outerjoin(addresses).\\
+ select().apply_labels()
+
+ # create query, indicating "ulist" will be an
+ # alias for the main table, "addresses"
+ # property should be eager loaded
+ query = session.query(User).options(
+ contains_alias(ulist),
+ contains_eager(User.addresses))
+
+ # then get results via the statement
+ results = query.from_statement(statement).all()
+
+ :param alias: is the string name of an alias, or a
+ :class:`~.sql.expression.Alias` object representing
+ the alias.
+
+ """
self.alias = alias
def process_query(self, query):
@@ -3238,3 +3546,5 @@ class AliasOption(interfaces.MapperOption):
else:
alias = self.alias
query._from_obj_alias = sql_util.ColumnAdapter(alias)
+
+
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index 33377d3ec..6fdedd382 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -1,5 +1,5 @@
# orm/relationships.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -13,15 +13,20 @@ and `secondaryjoin` aspects of :func:`.relationship`.
"""
-from .. import sql, util, exc as sa_exc, schema
+from .. import sql, util, exc as sa_exc, schema, log
+
+from .util import CascadeOptions, _orm_annotate, _orm_deannotate
+from . import dependency
+from . import attributes
from ..sql.util import (
ClauseAdapter,
join_condition, _shallow_annotate, visit_binary_product,
- _deep_deannotate, find_tables, selectables_overlap
+ _deep_deannotate, selectables_overlap
)
from ..sql import operators, expression, visitors
-from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY
-
+from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY, StrategizedProperty, PropComparator
+from ..inspection import inspect
+from . import mapper as mapperlib
def remote(expr):
"""Annotate a portion of a primaryjoin expression
@@ -64,6 +69,1607 @@ def foreign(expr):
{"foreign": True})
+@log.class_logger
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
+class RelationshipProperty(StrategizedProperty):
+ """Describes an object property that holds a single item or list
+ of items that correspond to a related database table.
+
+ Public constructor is the :func:`.orm.relationship` function.
+
+ See also:
+
+ :ref:`relationship_config_toplevel`
+
+ """
+
+ strategy_wildcard_key = 'relationship'
+
+ _dependency_processor = None
+
+ def __init__(self, argument,
+ secondary=None, primaryjoin=None,
+ secondaryjoin=None,
+ foreign_keys=None,
+ uselist=None,
+ order_by=False,
+ backref=None,
+ back_populates=None,
+ post_update=False,
+ cascade=False, extension=None,
+ viewonly=False, lazy=True,
+ collection_class=None, passive_deletes=False,
+ passive_updates=True, remote_side=None,
+ enable_typechecks=True, join_depth=None,
+ comparator_factory=None,
+ single_parent=False, innerjoin=False,
+ distinct_target_key=None,
+ doc=None,
+ active_history=False,
+ cascade_backrefs=True,
+ load_on_pending=False,
+ strategy_class=None, _local_remote_pairs=None,
+ query_class=None,
+ info=None):
+ """Provide a relationship of a primary Mapper to a secondary Mapper.
+
+ This corresponds to a parent-child or associative table relationship. The
+ constructed class is an instance of :class:`.RelationshipProperty`.
+
+ A typical :func:`.relationship`, used in a classical mapping::
+
+ mapper(Parent, properties={
+ 'children': relationship(Child)
+ })
+
+ Some arguments accepted by :func:`.relationship` optionally accept a
+ callable function, which when called produces the desired value.
+ The callable is invoked by the parent :class:`.Mapper` at "mapper
+ initialization" time, which happens only when mappers are first used, and
+ is assumed to be after all mappings have been constructed. This can be
+ used to resolve order-of-declaration and other dependency issues, such as
+ if ``Child`` is declared below ``Parent`` in the same file::
+
+ mapper(Parent, properties={
+ "children":relationship(lambda: Child,
+ order_by=lambda: Child.id)
+ })
+
+ When using the :ref:`declarative_toplevel` extension, the Declarative
+ initializer allows string arguments to be passed to :func:`.relationship`.
+ These string arguments are converted into callables that evaluate
+ the string as Python code, using the Declarative
+ class-registry as a namespace. This allows the lookup of related
+ classes to be automatic via their string name, and removes the need to
+ import related classes at all into the local module space::
+
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+ id = Column(Integer, primary_key=True)
+ children = relationship("Child", order_by="Child.id")
+
+ A full array of examples and reference documentation regarding
+ :func:`.relationship` is at :ref:`relationship_config_toplevel`.
+
+ :param argument:
+ a mapped class, or actual :class:`.Mapper` instance, representing the
+ target of the relationship.
+
+ ``argument`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param secondary:
+ for a many-to-many relationship, specifies the intermediary
+ table, and is an instance of :class:`.Table`. The ``secondary`` keyword
+ argument should generally only
+ be used for a table that is not otherwise expressed in any class
+ mapping, unless this relationship is declared as view only, otherwise
+ conflicting persistence operations can occur.
+
+ ``secondary`` may
+ also be passed as a callable function which is evaluated at
+ mapper initialization time.
+
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ many-to-one reference should be loaded when replaced, if
+ not already loaded. Normally, history tracking logic for
+ simple many-to-ones only needs to be aware of the "new"
+ value in order to perform a flush. This flag is available
+ for applications that make use of
+ :func:`.attributes.get_history` which also need to know
+ the "previous" value of the attribute.
+
+ :param backref:
+ indicates the string name of a property to be placed on the related
+ mapper's class that will handle this relationship in the other
+ direction. The other property will be created automatically
+ when the mappers are configured. Can also be passed as a
+ :func:`backref` object to control the configuration of the
+ new relationship.
+
+ :param back_populates:
+ Takes a string name and has the same meaning as ``backref``,
+ except the complementing property is **not** created automatically,
+ and instead must be configured explicitly on the other mapper. The
+ complementing property should also indicate ``back_populates``
+ to this relationship to ensure proper functioning.
+
+ :param cascade:
+ a comma-separated list of cascade rules which determines how
+ Session operations should be "cascaded" from parent to child.
+ This defaults to ``False``, which means the default cascade
+ should be used. The default value is ``"save-update, merge"``.
+
+ Available cascades are:
+
+ * ``save-update`` - cascade the :meth:`.Session.add`
+ operation. This cascade applies both to future and
+ past calls to :meth:`~sqlalchemy.orm.session.Session.add`,
+ meaning new items added to a collection or scalar relationship
+ get placed into the same session as that of the parent, and
+ also applies to items which have been removed from this
+ relationship but are still part of unflushed history.
+
+ * ``merge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.merge`
+ operation
+
+ * ``expunge`` - cascade the :meth:`.Session.expunge`
+ operation
+
+ * ``delete`` - cascade the :meth:`.Session.delete`
+ operation
+
+ * ``delete-orphan`` - if an item of the child's type is
+ detached from its parent, mark it for deletion.
+
+ .. versionchanged:: 0.7
+ This option does not prevent
+ a new instance of the child object from being persisted
+ without a parent to start with; to constrain against
+ that case, ensure the child's foreign key column(s)
+ is configured as NOT NULL
+
+ * ``refresh-expire`` - cascade the :meth:`.Session.expire`
+ and :meth:`~sqlalchemy.orm.session.Session.refresh` operations
+
+ * ``all`` - shorthand for "save-update,merge, refresh-expire,
+ expunge, delete"
+
+ See the section :ref:`unitofwork_cascades` for more background
+ on configuring cascades.
+
+ :param cascade_backrefs=True:
+ a boolean value indicating if the ``save-update`` cascade should
+ operate along an assignment event intercepted by a backref.
+ When set to ``False``,
+ the attribute managed by this relationship will not cascade
+ an incoming transient object into the session of a
+ persistent parent, if the event is received via backref.
+
+ That is::
+
+ mapper(A, a_table, properties={
+ 'bs':relationship(B, backref="a", cascade_backrefs=False)
+ })
+
+ If an ``A()`` is present in the session, assigning it to
+ the "a" attribute on a transient ``B()`` will not place
+ the ``B()`` into the session. To set the flag in the other
+ direction, i.e. so that ``A().bs.append(B())`` won't add
+ a transient ``A()`` into the session for a persistent ``B()``::
+
+ mapper(A, a_table, properties={
+ 'bs':relationship(B,
+ backref=backref("a", cascade_backrefs=False)
+ )
+ })
+
+ See the section :ref:`unitofwork_cascades` for more background
+ on configuring cascades.
+
+ :param collection_class:
+ a class or callable that returns a new list-holding object. will
+ be used in place of a plain list for storing elements.
+ Behavior of this attribute is described in detail at
+ :ref:`custom_collections`.
+
+ :param comparator_factory:
+ a class which extends :class:`.RelationshipProperty.Comparator` which
+ provides custom SQL clause generation for comparison operations.
+
+ :param distinct_target_key=None:
+ Indicate if a "subquery" eager load should apply the DISTINCT
+ keyword to the innermost SELECT statement. When left as ``None``,
+ the DISTINCT keyword will be applied in those cases when the target
+ columns do not comprise the full primary key of the target table.
+ When set to ``True``, the DISTINCT keyword is applied to the innermost
+ SELECT unconditionally.
+
+ It may be desirable to set this flag to False when the DISTINCT is
+ reducing performance of the innermost subquery beyond that of what
+ duplicate innermost rows may be causing.
+
+ .. versionadded:: 0.8.3 - distinct_target_key allows the
+ subquery eager loader to apply a DISTINCT modifier to the
+ innermost SELECT.
+
+ .. versionchanged:: 0.9.0 - distinct_target_key now defaults to
+ ``None``, so that the feature enables itself automatically for
+ those cases where the innermost query targets a non-unique
+ key.
+
+ :param doc:
+ docstring which will be applied to the resulting descriptor.
+
+ :param extension:
+ an :class:`.AttributeExtension` instance, or list of extensions,
+ which will be prepended to the list of attribute listeners for
+ the resulting descriptor placed on the class.
+ **Deprecated.** Please see :class:`.AttributeEvents`.
+
+ :param foreign_keys:
+ a list of columns which are to be used as "foreign key" columns,
+ or columns which refer to the value in a remote column, within the
+ context of this :func:`.relationship` object's ``primaryjoin``
+ condition. That is, if the ``primaryjoin`` condition of this
+ :func:`.relationship` is ``a.id == b.a_id``, and the values in ``b.a_id``
+ are required to be present in ``a.id``, then the "foreign key" column
+ of this :func:`.relationship` is ``b.a_id``.
+
+ In normal cases, the ``foreign_keys`` parameter is **not required.**
+ :func:`.relationship` will **automatically** determine which columns
+ in the ``primaryjoin`` conditition are to be considered "foreign key"
+ columns based on those :class:`.Column` objects that specify
+ :class:`.ForeignKey`, or are otherwise listed as referencing columns
+ in a :class:`.ForeignKeyConstraint` construct. ``foreign_keys`` is only
+ needed when:
+
+ 1. There is more than one way to construct a join from the local
+ table to the remote table, as there are multiple foreign key
+ references present. Setting ``foreign_keys`` will limit the
+ :func:`.relationship` to consider just those columns specified
+ here as "foreign".
+
+ .. versionchanged:: 0.8
+ A multiple-foreign key join ambiguity can be resolved by
+ setting the ``foreign_keys`` parameter alone, without the
+ need to explicitly set ``primaryjoin`` as well.
+
+ 2. The :class:`.Table` being mapped does not actually have
+ :class:`.ForeignKey` or :class:`.ForeignKeyConstraint`
+ constructs present, often because the table
+ was reflected from a database that does not support foreign key
+ reflection (MySQL MyISAM).
+
+ 3. The ``primaryjoin`` argument is used to construct a non-standard
+ join condition, which makes use of columns or expressions that do
+ not normally refer to their "parent" column, such as a join condition
+ expressed by a complex comparison using a SQL function.
+
+ The :func:`.relationship` construct will raise informative error messages
+ that suggest the use of the ``foreign_keys`` parameter when presented
+ with an ambiguous condition. In typical cases, if :func:`.relationship`
+ doesn't raise any exceptions, the ``foreign_keys`` parameter is usually
+ not needed.
+
+ ``foreign_keys`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ .. seealso::
+
+ :ref:`relationship_foreign_keys`
+
+ :ref:`relationship_custom_foreign`
+
+ :func:`.foreign` - allows direct annotation of the "foreign" columns
+ within a ``primaryjoin`` condition.
+
+ .. versionadded:: 0.8
+ The :func:`.foreign` annotation can also be applied
+ directly to the ``primaryjoin`` expression, which is an alternate,
+ more specific system of describing which columns in a particular
+ ``primaryjoin`` should be considered "foreign".
+
+ :param info: Optional data dictionary which will be populated into the
+ :attr:`.MapperProperty.info` attribute of this object.
+
+ .. versionadded:: 0.8
+
+ :param innerjoin=False:
+ when ``True``, joined eager loads will use an inner join to join
+ against related tables instead of an outer join. The purpose
+ of this option is generally one of performance, as inner joins
+ generally perform better than outer joins. Another reason can be
+ the use of ``with_lockmode``, which does not support outer joins.
+
+ This flag can be set to ``True`` when the relationship references an
+ object via many-to-one using local foreign keys that are not nullable,
+ or when the reference is one-to-one or a collection that is guaranteed
+ to have one or at least one entry.
+
+ :param join_depth:
+ when non-``None``, an integer value indicating how many levels
+ deep "eager" loaders should join on a self-referring or cyclical
+ relationship. The number counts how many times the same Mapper
+ shall be present in the loading condition along a particular join
+ branch. When left at its default of ``None``, eager loaders
+ will stop chaining when they encounter a the same target mapper
+ which is already higher up in the chain. This option applies
+ both to joined- and subquery- eager loaders.
+
+ :param lazy='select': specifies
+ how the related items should be loaded. Default value is
+ ``select``. Values include:
+
+ * ``select`` - items should be loaded lazily when the property is first
+ accessed, using a separate SELECT statement, or identity map
+ fetch for simple many-to-one references.
+
+ * ``immediate`` - items should be loaded as the parents are loaded,
+ using a separate SELECT statement, or identity map fetch for
+ simple many-to-one references.
+
+ .. versionadded:: 0.6.5
+
+ * ``joined`` - items should be loaded "eagerly" in the same query as
+ that of the parent, using a JOIN or LEFT OUTER JOIN. Whether
+ the join is "outer" or not is determined by the ``innerjoin``
+ parameter.
+
+ * ``subquery`` - items should be loaded "eagerly" as the parents are
+ loaded, using one additional SQL statement, which issues a JOIN to a
+ subquery of the original statement, for each collection requested.
+
+ * ``noload`` - no loading should occur at any time. This is to
+ support "write-only" attributes, or attributes which are
+ populated in some manner specific to the application.
+
+ * ``dynamic`` - the attribute will return a pre-configured
+ :class:`~sqlalchemy.orm.query.Query` object for all read
+ operations, onto which further filtering operations can be
+ applied before iterating the results. See
+ the section :ref:`dynamic_relationship` for more details.
+
+ * True - a synonym for 'select'
+
+ * False - a synonym for 'joined'
+
+ * None - a synonym for 'noload'
+
+ Detailed discussion of loader strategies is at :doc:`/orm/loading`.
+
+ :param load_on_pending=False:
+ Indicates loading behavior for transient or pending parent objects.
+
+ When set to ``True``, causes the lazy-loader to
+ issue a query for a parent object that is not persistent, meaning it has
+ never been flushed. This may take effect for a pending object when
+ autoflush is disabled, or for a transient object that has been
+ "attached" to a :class:`.Session` but is not part of its pending
+ collection.
+
+ The load_on_pending flag does not improve behavior
+ when the ORM is used normally - object references should be constructed
+ at the object level, not at the foreign key level, so that they
+ are present in an ordinary way before flush() proceeds. This flag
+ is not not intended for general use.
+
+ .. versionadded:: 0.6.5
+
+ .. seealso::
+
+ :meth:`.Session.enable_relationship_loading` - this method establishes
+ "load on pending" behavior for the whole object, and also allows
+ loading on objects that remain transient or detached.
+
+ :param order_by:
+ indicates the ordering that should be applied when loading these
+ items. ``order_by`` is expected to refer to one of the :class:`.Column`
+ objects to which the target class is mapped, or
+ the attribute itself bound to the target class which refers
+ to the column.
+
+ ``order_by`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param passive_deletes=False:
+ Indicates loading behavior during delete operations.
+
+ A value of True indicates that unloaded child items should not
+ be loaded during a delete operation on the parent. Normally,
+ when a parent item is deleted, all child items are loaded so
+ that they can either be marked as deleted, or have their
+ foreign key to the parent set to NULL. Marking this flag as
+ True usually implies an ON DELETE <CASCADE|SET NULL> rule is in
+ place which will handle updating/deleting child rows on the
+ database side.
+
+ Additionally, setting the flag to the string value 'all' will
+ disable the "nulling out" of the child foreign keys, when there
+ is no delete or delete-orphan cascade enabled. This is
+ typically used when a triggering or error raise scenario is in
+ place on the database side. Note that the foreign key
+ attributes on in-session child objects will not be changed
+ after a flush occurs so this is a very special use-case
+ setting.
+
+ :param passive_updates=True:
+ Indicates loading and INSERT/UPDATE/DELETE behavior when the
+ source of a foreign key value changes (i.e. an "on update"
+ cascade), which are typically the primary key columns of the
+ source row.
+
+ When True, it is assumed that ON UPDATE CASCADE is configured on
+ the foreign key in the database, and that the database will
+ handle propagation of an UPDATE from a source column to
+ dependent rows. Note that with databases which enforce
+ referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables),
+ ON UPDATE CASCADE is required for this operation. The
+ relationship() will update the value of the attribute on related
+ items which are locally present in the session during a flush.
+
+ When False, it is assumed that the database does not enforce
+ referential integrity and will not be issuing its own CASCADE
+ operation for an update. The relationship() will issue the
+ appropriate UPDATE statements to the database in response to the
+ change of a referenced key, and items locally present in the
+ session during a flush will also be refreshed.
+
+ This flag should probably be set to False if primary key changes
+ are expected and the database in use doesn't support CASCADE
+ (i.e. SQLite, MySQL MyISAM tables).
+
+ Also see the passive_updates flag on ``mapper()``.
+
+ A future SQLAlchemy release will provide a "detect" feature for
+ this flag.
+
+ :param post_update:
+ this indicates that the relationship should be handled by a
+ second UPDATE statement after an INSERT or before a
+ DELETE. Currently, it also will issue an UPDATE after the
+ instance was UPDATEd as well, although this technically should
+ be improved. This flag is used to handle saving bi-directional
+ dependencies between two individual rows (i.e. each row
+ references the other), where it would otherwise be impossible to
+ INSERT or DELETE both rows fully since one row exists before the
+ other. Use this flag when a particular mapping arrangement will
+ incur two rows that are dependent on each other, such as a table
+ that has a one-to-many relationship to a set of child rows, and
+ also has a column that references a single child row within that
+ list (i.e. both tables contain a foreign key to each other). If
+ a ``flush()`` operation returns an error that a "cyclical
+ dependency" was detected, this is a cue that you might want to
+ use ``post_update`` to "break" the cycle.
+
+ :param primaryjoin:
+ a SQL expression that will be used as the primary
+ join of this child object against the parent object, or in a
+ many-to-many relationship the join of the primary object to the
+ association table. By default, this value is computed based on the
+ foreign key relationships of the parent and child tables (or association
+ table).
+
+ ``primaryjoin`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param remote_side:
+ used for self-referential relationships, indicates the column or
+ list of columns that form the "remote side" of the relationship.
+
+ ``remote_side`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ .. versionchanged:: 0.8
+ The :func:`.remote` annotation can also be applied
+ directly to the ``primaryjoin`` expression, which is an alternate,
+ more specific system of describing which columns in a particular
+ ``primaryjoin`` should be considered "remote".
+
+ :param query_class:
+ a :class:`.Query` subclass that will be used as the base of the
+ "appender query" returned by a "dynamic" relationship, that
+ is, a relationship that specifies ``lazy="dynamic"`` or was
+ otherwise constructed using the :func:`.orm.dynamic_loader`
+ function.
+
+ :param secondaryjoin:
+ a SQL expression that will be used as the join of
+ an association table to the child object. By default, this value is
+ computed based on the foreign key relationships of the association and
+ child tables.
+
+ ``secondaryjoin`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param single_parent=(True|False):
+ when True, installs a validator which will prevent objects
+ from being associated with more than one parent at a time.
+ This is used for many-to-one or many-to-many relationships that
+ should be treated either as one-to-one or one-to-many. Its
+ usage is optional unless delete-orphan cascade is also
+ set on this relationship(), in which case its required.
+
+ :param uselist=(True|False):
+ a boolean that indicates if this property should be loaded as a
+ list or a scalar. In most cases, this value is determined
+ automatically by ``relationship()``, based on the type and direction
+ of the relationship - one to many forms a list, many to one
+ forms a scalar, many to many is a list. If a scalar is desired
+ where normally a list would be present, such as a bi-directional
+ one-to-one relationship, set uselist to False.
+
+ :param viewonly=False:
+ when set to True, the relationship is used only for loading objects
+ within the relationship, and has no effect on the unit-of-work
+ flush process. Relationships with viewonly can specify any kind of
+ join conditions to provide additional views of related objects
+ onto a parent object. Note that the functionality of a viewonly
+ relationship has its limits - complicated join conditions may
+ not compile into eager or lazy loaders properly. If this is the
+ case, use an alternative method.
+
+ .. versionchanged:: 0.6
+ :func:`relationship` was renamed from its previous name
+ :func:`relation`.
+
+ """
+
+ self.uselist = uselist
+ self.argument = argument
+ self.secondary = secondary
+ self.primaryjoin = primaryjoin
+ self.secondaryjoin = secondaryjoin
+ self.post_update = post_update
+ self.direction = None
+ self.viewonly = viewonly
+ self.lazy = lazy
+ self.single_parent = single_parent
+ self._user_defined_foreign_keys = foreign_keys
+ self.collection_class = collection_class
+ self.passive_deletes = passive_deletes
+ self.cascade_backrefs = cascade_backrefs
+ self.passive_updates = passive_updates
+ self.remote_side = remote_side
+ self.enable_typechecks = enable_typechecks
+ self.query_class = query_class
+ self.innerjoin = innerjoin
+ self.distinct_target_key = distinct_target_key
+ self.doc = doc
+ self.active_history = active_history
+ self.join_depth = join_depth
+ self.local_remote_pairs = _local_remote_pairs
+ self.extension = extension
+ self.load_on_pending = load_on_pending
+ self.comparator_factory = comparator_factory or \
+ RelationshipProperty.Comparator
+ self.comparator = self.comparator_factory(self, None)
+ util.set_creation_order(self)
+
+ if info is not None:
+ self.info = info
+
+ if strategy_class:
+ self.strategy_class = strategy_class
+ else:
+ self.strategy_class = self._strategy_lookup(("lazy", self.lazy))
+
+ self._reverse_property = set()
+
+ self.cascade = cascade if cascade is not False \
+ else "save-update, merge"
+
+ self.order_by = order_by
+
+ self.back_populates = back_populates
+
+ if self.back_populates:
+ if backref:
+ raise sa_exc.ArgumentError(
+ "backref and back_populates keyword arguments "
+ "are mutually exclusive")
+ self.backref = None
+ else:
+ self.backref = backref
+
+ def instrument_class(self, mapper):
+ attributes.register_descriptor(
+ mapper.class_,
+ self.key,
+ comparator=self.comparator_factory(self, mapper),
+ parententity=mapper,
+ doc=self.doc,
+ )
+
+ class Comparator(PropComparator):
+ """Produce boolean, comparison, and other operators for
+ :class:`.RelationshipProperty` attributes.
+
+ See the documentation for :class:`.PropComparator` for a brief overview
+ of ORM level operator definition.
+
+ See also:
+
+ :class:`.PropComparator`
+
+ :class:`.ColumnProperty.Comparator`
+
+ :class:`.ColumnOperators`
+
+ :ref:`types_operators`
+
+ :attr:`.TypeEngine.comparator_factory`
+
+ """
+
+ _of_type = None
+
+ def __init__(self, prop, parentmapper, adapt_to_entity=None, of_type=None):
+ """Construction of :class:`.RelationshipProperty.Comparator`
+ is internal to the ORM's attribute mechanics.
+
+ """
+ self.prop = prop
+ self._parentmapper = parentmapper
+ self._adapt_to_entity = adapt_to_entity
+ if of_type:
+ self._of_type = of_type
+
+ def adapt_to_entity(self, adapt_to_entity):
+ return self.__class__(self.property, self._parentmapper,
+ adapt_to_entity=adapt_to_entity,
+ of_type=self._of_type)
+
+ @util.memoized_property
+ def mapper(self):
+ """The target :class:`.Mapper` referred to by this
+ :class:`.RelationshipProperty.Comparator`.
+
+ This is the "target" or "remote" side of the
+ :func:`.relationship`.
+
+ """
+ return self.property.mapper
+
+ @util.memoized_property
+ def _parententity(self):
+ return self.property.parent
+
+ def _source_selectable(self):
+ if self._adapt_to_entity:
+ return self._adapt_to_entity.selectable
+ else:
+ return self.property.parent._with_polymorphic_selectable
+
+ def __clause_element__(self):
+ adapt_from = self._source_selectable()
+ if self._of_type:
+ of_type = inspect(self._of_type).mapper
+ else:
+ of_type = None
+
+ pj, sj, source, dest, \
+ secondary, target_adapter = self.property._create_joins(
+ source_selectable=adapt_from,
+ source_polymorphic=True,
+ of_type=of_type)
+ if sj is not None:
+ return pj & sj
+ else:
+ return pj
+
+ def of_type(self, cls):
+ """Produce a construct that represents a particular 'subtype' of
+ attribute for the parent class.
+
+ Currently this is usable in conjunction with :meth:`.Query.join`
+ and :meth:`.Query.outerjoin`.
+
+ """
+ return RelationshipProperty.Comparator(
+ self.property,
+ self._parentmapper,
+ adapt_to_entity=self._adapt_to_entity,
+ of_type=cls)
+
+ def in_(self, other):
+ """Produce an IN clause - this is not implemented
+ for :func:`~.orm.relationship`-based attributes at this time.
+
+ """
+ raise NotImplementedError('in_() not yet supported for '
+ 'relationships. For a simple many-to-one, use '
+ 'in_() against the set of foreign key values.')
+
+ __hash__ = None
+
+ def __eq__(self, other):
+ """Implement the ``==`` operator.
+
+ In a many-to-one context, such as::
+
+ MyClass.some_prop == <some object>
+
+ this will typically produce a
+ clause such as::
+
+ mytable.related_id == <some id>
+
+ Where ``<some id>`` is the primary key of the given
+ object.
+
+ The ``==`` operator provides partial functionality for non-
+ many-to-one comparisons:
+
+ * Comparisons against collections are not supported.
+ Use :meth:`~.RelationshipProperty.Comparator.contains`.
+ * Compared to a scalar one-to-many, will produce a
+ clause that compares the target columns in the parent to
+ the given target.
+ * Compared to a scalar many-to-many, an alias
+ of the association table will be rendered as
+ well, forming a natural join that is part of the
+ main body of the query. This will not work for
+ queries that go beyond simple AND conjunctions of
+ comparisons, such as those which use OR. Use
+ explicit joins, outerjoins, or
+ :meth:`~.RelationshipProperty.Comparator.has` for
+ more comprehensive non-many-to-one scalar
+ membership tests.
+ * Comparisons against ``None`` given in a one-to-many
+ or many-to-many context produce a NOT EXISTS clause.
+
+ """
+ if isinstance(other, (util.NoneType, expression.Null)):
+ if self.property.direction in [ONETOMANY, MANYTOMANY]:
+ return ~self._criterion_exists()
+ else:
+ return _orm_annotate(self.property._optimized_compare(
+ None, adapt_source=self.adapter))
+ elif self.property.uselist:
+ raise sa_exc.InvalidRequestError("Can't compare a colle"
+ "ction to an object or collection; use "
+ "contains() to test for membership.")
+ else:
+ return _orm_annotate(self.property._optimized_compare(other,
+ adapt_source=self.adapter))
+
+ def _criterion_exists(self, criterion=None, **kwargs):
+ if getattr(self, '_of_type', None):
+ info = inspect(self._of_type)
+ target_mapper, to_selectable, is_aliased_class = \
+ info.mapper, info.selectable, info.is_aliased_class
+ if self.property._is_self_referential and not is_aliased_class:
+ to_selectable = to_selectable.alias()
+
+ single_crit = target_mapper._single_table_criterion
+ if single_crit is not None:
+ if criterion is not None:
+ criterion = single_crit & criterion
+ else:
+ criterion = single_crit
+ else:
+ is_aliased_class = False
+ to_selectable = None
+
+ if self.adapter:
+ source_selectable = self._source_selectable()
+ else:
+ source_selectable = None
+
+ pj, sj, source, dest, secondary, target_adapter = \
+ self.property._create_joins(dest_polymorphic=True,
+ dest_selectable=to_selectable,
+ source_selectable=source_selectable)
+
+ for k in kwargs:
+ crit = getattr(self.property.mapper.class_, k) == kwargs[k]
+ if criterion is None:
+ criterion = crit
+ else:
+ criterion = criterion & crit
+
+ # annotate the *local* side of the join condition, in the case
+ # of pj + sj this is the full primaryjoin, in the case of just
+ # pj its the local side of the primaryjoin.
+ if sj is not None:
+ j = _orm_annotate(pj) & sj
+ else:
+ j = _orm_annotate(pj, exclude=self.property.remote_side)
+
+ if criterion is not None and target_adapter and not is_aliased_class:
+ # limit this adapter to annotated only?
+ criterion = target_adapter.traverse(criterion)
+
+ # only have the "joined left side" of what we
+ # return be subject to Query adaption. The right
+ # side of it is used for an exists() subquery and
+ # should not correlate or otherwise reach out
+ # to anything in the enclosing query.
+ if criterion is not None:
+ criterion = criterion._annotate(
+ {'no_replacement_traverse': True})
+
+ crit = j & sql.True_._ifnone(criterion)
+
+ ex = sql.exists([1], crit, from_obj=dest).correlate_except(dest)
+ if secondary is not None:
+ ex = ex.correlate_except(secondary)
+ return ex
+
+ def any(self, criterion=None, **kwargs):
+ """Produce an expression that tests a collection against
+ particular criterion, using EXISTS.
+
+ An expression like::
+
+ session.query(MyClass).filter(
+ MyClass.somereference.any(SomeRelated.x==2)
+ )
+
+
+ Will produce a query like::
+
+ SELECT * FROM my_table WHERE
+ EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
+ AND related.x=2)
+
+ Because :meth:`~.RelationshipProperty.Comparator.any` uses
+ a correlated subquery, its performance is not nearly as
+ good when compared against large target tables as that of
+ using a join.
+
+ :meth:`~.RelationshipProperty.Comparator.any` is particularly
+ useful for testing for empty collections::
+
+ session.query(MyClass).filter(
+ ~MyClass.somereference.any()
+ )
+
+ will produce::
+
+ SELECT * FROM my_table WHERE
+ NOT EXISTS (SELECT 1 FROM related WHERE
+ related.my_id=my_table.id)
+
+ :meth:`~.RelationshipProperty.Comparator.any` is only
+ valid for collections, i.e. a :func:`.relationship`
+ that has ``uselist=True``. For scalar references,
+ use :meth:`~.RelationshipProperty.Comparator.has`.
+
+ """
+ if not self.property.uselist:
+ raise sa_exc.InvalidRequestError(
+ "'any()' not implemented for scalar "
+ "attributes. Use has()."
+ )
+
+ return self._criterion_exists(criterion, **kwargs)
+
+ def has(self, criterion=None, **kwargs):
+ """Produce an expression that tests a scalar reference against
+ particular criterion, using EXISTS.
+
+ An expression like::
+
+ session.query(MyClass).filter(
+ MyClass.somereference.has(SomeRelated.x==2)
+ )
+
+
+ Will produce a query like::
+
+ SELECT * FROM my_table WHERE
+ EXISTS (SELECT 1 FROM related WHERE
+ related.id==my_table.related_id AND related.x=2)
+
+ Because :meth:`~.RelationshipProperty.Comparator.has` uses
+ a correlated subquery, its performance is not nearly as
+ good when compared against large target tables as that of
+ using a join.
+
+ :meth:`~.RelationshipProperty.Comparator.has` is only
+ valid for scalar references, i.e. a :func:`.relationship`
+ that has ``uselist=False``. For collection references,
+ use :meth:`~.RelationshipProperty.Comparator.any`.
+
+ """
+ if self.property.uselist:
+ raise sa_exc.InvalidRequestError(
+ "'has()' not implemented for collections. "
+ "Use any().")
+ return self._criterion_exists(criterion, **kwargs)
+
+ def contains(self, other, **kwargs):
+ """Return a simple expression that tests a collection for
+ containment of a particular item.
+
+ :meth:`~.RelationshipProperty.Comparator.contains` is
+ only valid for a collection, i.e. a
+ :func:`~.orm.relationship` that implements
+ one-to-many or many-to-many with ``uselist=True``.
+
+ When used in a simple one-to-many context, an
+ expression like::
+
+ MyClass.contains(other)
+
+ Produces a clause like::
+
+ mytable.id == <some id>
+
+ Where ``<some id>`` is the value of the foreign key
+ attribute on ``other`` which refers to the primary
+ key of its parent object. From this it follows that
+ :meth:`~.RelationshipProperty.Comparator.contains` is
+ very useful when used with simple one-to-many
+ operations.
+
+ For many-to-many operations, the behavior of
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ has more caveats. The association table will be
+ rendered in the statement, producing an "implicit"
+ join, that is, includes multiple tables in the FROM
+ clause which are equated in the WHERE clause::
+
+ query(MyClass).filter(MyClass.contains(other))
+
+ Produces a query like::
+
+ SELECT * FROM my_table, my_association_table AS
+ my_association_table_1 WHERE
+ my_table.id = my_association_table_1.parent_id
+ AND my_association_table_1.child_id = <some id>
+
+ Where ``<some id>`` would be the primary key of
+ ``other``. From the above, it is clear that
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ will **not** work with many-to-many collections when
+ used in queries that move beyond simple AND
+ conjunctions, such as multiple
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ expressions joined by OR. In such cases subqueries or
+ explicit "outer joins" will need to be used instead.
+ See :meth:`~.RelationshipProperty.Comparator.any` for
+ a less-performant alternative using EXISTS, or refer
+ to :meth:`.Query.outerjoin` as well as :ref:`ormtutorial_joins`
+ for more details on constructing outer joins.
+
+ """
+ if not self.property.uselist:
+ raise sa_exc.InvalidRequestError(
+ "'contains' not implemented for scalar "
+ "attributes. Use ==")
+ clause = self.property._optimized_compare(other,
+ adapt_source=self.adapter)
+
+ if self.property.secondaryjoin is not None:
+ clause.negation_clause = \
+ self.__negated_contains_or_equals(other)
+
+ return clause
+
+ def __negated_contains_or_equals(self, other):
+ if self.property.direction == MANYTOONE:
+ state = attributes.instance_state(other)
+
+ def state_bindparam(x, state, col):
+ o = state.obj() # strong ref
+ return sql.bindparam(x, unique=True, callable_=lambda: \
+ self.property.mapper._get_committed_attr_by_column(o, col))
+
+ def adapt(col):
+ if self.adapter:
+ return self.adapter(col)
+ else:
+ return col
+
+ if self.property._use_get:
+ return sql.and_(*[
+ sql.or_(
+ adapt(x) != state_bindparam(adapt(x), state, y),
+ adapt(x) == None)
+ for (x, y) in self.property.local_remote_pairs])
+
+ criterion = sql.and_(*[x == y for (x, y) in
+ zip(
+ self.property.mapper.primary_key,
+ self.property.\
+ mapper.\
+ primary_key_from_instance(other))
+ ])
+ return ~self._criterion_exists(criterion)
+
+ def __ne__(self, other):
+ """Implement the ``!=`` operator.
+
+ In a many-to-one context, such as::
+
+ MyClass.some_prop != <some object>
+
+ This will typically produce a clause such as::
+
+ mytable.related_id != <some id>
+
+ Where ``<some id>`` is the primary key of the
+ given object.
+
+ The ``!=`` operator provides partial functionality for non-
+ many-to-one comparisons:
+
+ * Comparisons against collections are not supported.
+ Use
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ in conjunction with :func:`~.expression.not_`.
+ * Compared to a scalar one-to-many, will produce a
+ clause that compares the target columns in the parent to
+ the given target.
+ * Compared to a scalar many-to-many, an alias
+ of the association table will be rendered as
+ well, forming a natural join that is part of the
+ main body of the query. This will not work for
+ queries that go beyond simple AND conjunctions of
+ comparisons, such as those which use OR. Use
+ explicit joins, outerjoins, or
+ :meth:`~.RelationshipProperty.Comparator.has` in
+ conjunction with :func:`~.expression.not_` for
+ more comprehensive non-many-to-one scalar
+ membership tests.
+ * Comparisons against ``None`` given in a one-to-many
+ or many-to-many context produce an EXISTS clause.
+
+ """
+ if isinstance(other, (util.NoneType, expression.Null)):
+ if self.property.direction == MANYTOONE:
+ return sql.or_(*[x != None for x in
+ self.property._calculated_foreign_keys])
+ else:
+ return self._criterion_exists()
+ elif self.property.uselist:
+ raise sa_exc.InvalidRequestError("Can't compare a collection"
+ " to an object or collection; use "
+ "contains() to test for membership.")
+ else:
+ return self.__negated_contains_or_equals(other)
+
+ @util.memoized_property
+ def property(self):
+ if mapperlib.Mapper._new_mappers:
+ mapperlib.Mapper._configure_all()
+ return self.prop
+
+ def compare(self, op, value,
+ value_is_parent=False,
+ alias_secondary=True):
+ if op == operators.eq:
+ if value is None:
+ if self.uselist:
+ return ~sql.exists([1], self.primaryjoin)
+ else:
+ return self._optimized_compare(None,
+ value_is_parent=value_is_parent,
+ alias_secondary=alias_secondary)
+ else:
+ return self._optimized_compare(value,
+ value_is_parent=value_is_parent,
+ alias_secondary=alias_secondary)
+ else:
+ return op(self.comparator, value)
+
+ def _optimized_compare(self, value, value_is_parent=False,
+ adapt_source=None,
+ alias_secondary=True):
+ if value is not None:
+ value = attributes.instance_state(value)
+ return self._lazy_strategy.lazy_clause(value,
+ reverse_direction=not value_is_parent,
+ alias_secondary=alias_secondary,
+ adapt_source=adapt_source)
+
+ def __str__(self):
+ return str(self.parent.class_.__name__) + "." + self.key
+
+ def merge(self,
+ session,
+ source_state,
+ source_dict,
+ dest_state,
+ dest_dict,
+ load, _recursive):
+
+ if load:
+ for r in self._reverse_property:
+ if (source_state, r) in _recursive:
+ return
+
+ if not "merge" in self._cascade:
+ return
+
+ if self.key not in source_dict:
+ return
+
+ if self.uselist:
+ instances = source_state.get_impl(self.key).\
+ get(source_state, source_dict)
+ if hasattr(instances, '_sa_adapter'):
+ # convert collections to adapters to get a true iterator
+ instances = instances._sa_adapter
+
+ if load:
+ # for a full merge, pre-load the destination collection,
+ # so that individual _merge of each item pulls from identity
+ # map for those already present.
+ # also assumes CollectionAttrbiuteImpl behavior of loading
+ # "old" list in any case
+ dest_state.get_impl(self.key).get(dest_state, dest_dict)
+
+ dest_list = []
+ for current in instances:
+ current_state = attributes.instance_state(current)
+ current_dict = attributes.instance_dict(current)
+ _recursive[(current_state, self)] = True
+ obj = session._merge(current_state, current_dict,
+ load=load, _recursive=_recursive)
+ if obj is not None:
+ dest_list.append(obj)
+
+ if not load:
+ coll = attributes.init_state_collection(dest_state,
+ dest_dict, self.key)
+ for c in dest_list:
+ coll.append_without_event(c)
+ else:
+ dest_state.get_impl(self.key)._set_iterable(dest_state,
+ dest_dict, dest_list)
+ else:
+ current = source_dict[self.key]
+ if current is not None:
+ current_state = attributes.instance_state(current)
+ current_dict = attributes.instance_dict(current)
+ _recursive[(current_state, self)] = True
+ obj = session._merge(current_state, current_dict,
+ load=load, _recursive=_recursive)
+ else:
+ obj = None
+
+ if not load:
+ dest_dict[self.key] = obj
+ else:
+ dest_state.get_impl(self.key).set(dest_state,
+ dest_dict, obj, None)
+
+ def _value_as_iterable(self, state, dict_, key,
+ passive=attributes.PASSIVE_OFF):
+ """Return a list of tuples (state, obj) for the given
+ key.
+
+ returns an empty list if the value is None/empty/PASSIVE_NO_RESULT
+ """
+
+ impl = state.manager[key].impl
+ x = impl.get(state, dict_, passive=passive)
+ if x is attributes.PASSIVE_NO_RESULT or x is None:
+ return []
+ elif hasattr(impl, 'get_collection'):
+ return [
+ (attributes.instance_state(o), o) for o in
+ impl.get_collection(state, dict_, x, passive=passive)
+ ]
+ else:
+ return [(attributes.instance_state(x), x)]
+
+ def cascade_iterator(self, type_, state, dict_,
+ visited_states, halt_on=None):
+ #assert type_ in self._cascade
+
+ # only actively lazy load on the 'delete' cascade
+ if type_ != 'delete' or self.passive_deletes:
+ passive = attributes.PASSIVE_NO_INITIALIZE
+ else:
+ passive = attributes.PASSIVE_OFF
+
+ if type_ == 'save-update':
+ tuples = state.manager[self.key].impl.\
+ get_all_pending(state, dict_)
+
+ else:
+ tuples = self._value_as_iterable(state, dict_, self.key,
+ passive=passive)
+
+ skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
+ not in self._cascade
+
+ for instance_state, c in tuples:
+ if instance_state in visited_states:
+ continue
+
+ if c is None:
+ # would like to emit a warning here, but
+ # would not be consistent with collection.append(None)
+ # current behavior of silently skipping.
+ # see [ticket:2229]
+ continue
+
+ instance_dict = attributes.instance_dict(c)
+
+ if halt_on and halt_on(instance_state):
+ continue
+
+ if skip_pending and not instance_state.key:
+ continue
+
+ instance_mapper = instance_state.manager.mapper
+
+ if not instance_mapper.isa(self.mapper.class_manager.mapper):
+ raise AssertionError("Attribute '%s' on class '%s' "
+ "doesn't handle objects "
+ "of type '%s'" % (
+ self.key,
+ self.parent.class_,
+ c.__class__
+ ))
+
+ visited_states.add(instance_state)
+
+ yield c, instance_mapper, instance_state, instance_dict
+
+ def _add_reverse_property(self, key):
+ other = self.mapper.get_property(key, _configure_mappers=False)
+ self._reverse_property.add(other)
+ other._reverse_property.add(self)
+
+ if not other.mapper.common_parent(self.parent):
+ raise sa_exc.ArgumentError('reverse_property %r on '
+ 'relationship %s references relationship %s, which '
+ 'does not reference mapper %s' % (key, self, other,
+ self.parent))
+ if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
+ == other.direction:
+ raise sa_exc.ArgumentError('%s and back-reference %s are '
+ 'both of the same direction %r. Did you mean to '
+ 'set remote_side on the many-to-one side ?'
+ % (other, self, self.direction))
+
+ @util.memoized_property
+ def mapper(self):
+ """Return the targeted :class:`.Mapper` for this
+ :class:`.RelationshipProperty`.
+
+ This is a lazy-initializing static attribute.
+
+ """
+ if util.callable(self.argument) and \
+ not isinstance(self.argument, (type, mapperlib.Mapper)):
+ argument = self.argument()
+ else:
+ argument = self.argument
+
+ if isinstance(argument, type):
+ mapper_ = mapperlib.class_mapper(argument,
+ configure=False)
+ elif isinstance(self.argument, mapperlib.Mapper):
+ mapper_ = argument
+ else:
+ raise sa_exc.ArgumentError("relationship '%s' expects "
+ "a class or a mapper argument (received: %s)"
+ % (self.key, type(argument)))
+ return mapper_
+
+ @util.memoized_property
+ @util.deprecated("0.7", "Use .target")
+ def table(self):
+ """Return the selectable linked to this
+ :class:`.RelationshipProperty` object's target
+ :class:`.Mapper`.
+ """
+ return self.target
+
+ def do_init(self):
+ self._check_conflicts()
+ self._process_dependent_arguments()
+ self._setup_join_conditions()
+ self._check_cascade_settings(self._cascade)
+ self._post_init()
+ self._generate_backref()
+ super(RelationshipProperty, self).do_init()
+ self._lazy_strategy = self._get_strategy((("lazy", "select"),))
+
+
+ def _process_dependent_arguments(self):
+ """Convert incoming configuration arguments to their
+ proper form.
+
+ Callables are resolved, ORM annotations removed.
+
+ """
+ # accept callables for other attributes which may require
+ # deferred initialization. This technique is used
+ # by declarative "string configs" and some recipes.
+ for attr in (
+ 'order_by', 'primaryjoin', 'secondaryjoin',
+ 'secondary', '_user_defined_foreign_keys', 'remote_side',
+ ):
+ attr_value = getattr(self, attr)
+ if util.callable(attr_value):
+ setattr(self, attr, attr_value())
+
+ # remove "annotations" which are present if mapped class
+ # descriptors are used to create the join expression.
+ for attr in 'primaryjoin', 'secondaryjoin':
+ val = getattr(self, attr)
+ if val is not None:
+ setattr(self, attr, _orm_deannotate(
+ expression._only_column_elements(val, attr))
+ )
+
+ # ensure expressions in self.order_by, foreign_keys,
+ # remote_side are all columns, not strings.
+ if self.order_by is not False and self.order_by is not None:
+ self.order_by = [
+ expression._only_column_elements(x, "order_by")
+ for x in
+ util.to_list(self.order_by)]
+
+ self._user_defined_foreign_keys = \
+ util.column_set(
+ expression._only_column_elements(x, "foreign_keys")
+ for x in util.to_column_set(
+ self._user_defined_foreign_keys
+ ))
+
+ self.remote_side = \
+ util.column_set(
+ expression._only_column_elements(x, "remote_side")
+ for x in
+ util.to_column_set(self.remote_side))
+
+ self.target = self.mapper.mapped_table
+
+
+ def _setup_join_conditions(self):
+ self._join_condition = jc = JoinCondition(
+ parent_selectable=self.parent.mapped_table,
+ child_selectable=self.mapper.mapped_table,
+ parent_local_selectable=self.parent.local_table,
+ child_local_selectable=self.mapper.local_table,
+ primaryjoin=self.primaryjoin,
+ secondary=self.secondary,
+ secondaryjoin=self.secondaryjoin,
+ parent_equivalents=self.parent._equivalent_columns,
+ child_equivalents=self.mapper._equivalent_columns,
+ consider_as_foreign_keys=self._user_defined_foreign_keys,
+ local_remote_pairs=self.local_remote_pairs,
+ remote_side=self.remote_side,
+ self_referential=self._is_self_referential,
+ prop=self,
+ support_sync=not self.viewonly,
+ can_be_synced_fn=self._columns_are_mapped
+ )
+ self.primaryjoin = jc.deannotated_primaryjoin
+ self.secondaryjoin = jc.deannotated_secondaryjoin
+ self.direction = jc.direction
+ self.local_remote_pairs = jc.local_remote_pairs
+ self.remote_side = jc.remote_columns
+ self.local_columns = jc.local_columns
+ self.synchronize_pairs = jc.synchronize_pairs
+ self._calculated_foreign_keys = jc.foreign_key_columns
+ self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs
+
+ def _check_conflicts(self):
+ """Test that this relationship is legal, warn about
+ inheritance conflicts."""
+
+ if not self.is_primary() \
+ and not mapperlib.class_mapper(
+ self.parent.class_,
+ configure=False).has_property(self.key):
+ raise sa_exc.ArgumentError("Attempting to assign a new "
+ "relationship '%s' to a non-primary mapper on "
+ "class '%s'. New relationships can only be added "
+ "to the primary mapper, i.e. the very first mapper "
+ "created for class '%s' " % (self.key,
+ self.parent.class_.__name__,
+ self.parent.class_.__name__))
+
+ # check for conflicting relationship() on superclass
+ if not self.parent.concrete:
+ for inheriting in self.parent.iterate_to_root():
+ if inheriting is not self.parent \
+ and inheriting.has_property(self.key):
+ util.warn("Warning: relationship '%s' on mapper "
+ "'%s' supersedes the same relationship "
+ "on inherited mapper '%s'; this can "
+ "cause dependency issues during flush"
+ % (self.key, self.parent, inheriting))
+
+ def _get_cascade(self):
+ """Return the current cascade setting for this
+ :class:`.RelationshipProperty`.
+ """
+ return self._cascade
+
+ def _set_cascade(self, cascade):
+ cascade = CascadeOptions(cascade)
+ if 'mapper' in self.__dict__:
+ self._check_cascade_settings(cascade)
+ self._cascade = cascade
+
+ if self._dependency_processor:
+ self._dependency_processor.cascade = cascade
+
+ cascade = property(_get_cascade, _set_cascade)
+
+ def _check_cascade_settings(self, cascade):
+ if cascade.delete_orphan and not self.single_parent \
+ and (self.direction is MANYTOMANY or self.direction
+ is MANYTOONE):
+ raise sa_exc.ArgumentError(
+ 'On %s, delete-orphan cascade is not supported '
+ 'on a many-to-many or many-to-one relationship '
+ 'when single_parent is not set. Set '
+ 'single_parent=True on the relationship().'
+ % self)
+ if self.direction is MANYTOONE and self.passive_deletes:
+ util.warn("On %s, 'passive_deletes' is normally configured "
+ "on one-to-many, one-to-one, many-to-many "
+ "relationships only."
+ % self)
+
+ if self.passive_deletes == 'all' and \
+ ("delete" in cascade or
+ "delete-orphan" in cascade):
+ raise sa_exc.ArgumentError(
+ "On %s, can't set passive_deletes='all' in conjunction "
+ "with 'delete' or 'delete-orphan' cascade" % self)
+
+ if cascade.delete_orphan:
+ self.mapper.primary_mapper()._delete_orphans.append(
+ (self.key, self.parent.class_)
+ )
+
+ def _columns_are_mapped(self, *cols):
+ """Return True if all columns in the given collection are
+ mapped by the tables referenced by this :class:`.Relationship`.
+
+ """
+ for c in cols:
+ if self.secondary is not None \
+ and self.secondary.c.contains_column(c):
+ continue
+ if not self.parent.mapped_table.c.contains_column(c) and \
+ not self.target.c.contains_column(c):
+ return False
+ return True
+
+ def _generate_backref(self):
+ """Interpret the 'backref' instruction to create a
+ :func:`.relationship` complementary to this one."""
+
+ if not self.is_primary():
+ return
+ if self.backref is not None and not self.back_populates:
+ if isinstance(self.backref, util.string_types):
+ backref_key, kwargs = self.backref, {}
+ else:
+ backref_key, kwargs = self.backref
+ mapper = self.mapper.primary_mapper()
+
+ check = set(mapper.iterate_to_root()).\
+ union(mapper.self_and_descendants)
+ for m in check:
+ if m.has_property(backref_key):
+ raise sa_exc.ArgumentError("Error creating backref "
+ "'%s' on relationship '%s': property of that "
+ "name exists on mapper '%s'" % (backref_key,
+ self, m))
+
+ # determine primaryjoin/secondaryjoin for the
+ # backref. Use the one we had, so that
+ # a custom join doesn't have to be specified in
+ # both directions.
+ if self.secondary is not None:
+ # for many to many, just switch primaryjoin/
+ # secondaryjoin. use the annotated
+ # pj/sj on the _join_condition.
+ pj = kwargs.pop('primaryjoin',
+ self._join_condition.secondaryjoin_minus_local)
+ sj = kwargs.pop('secondaryjoin',
+ self._join_condition.primaryjoin_minus_local)
+ else:
+ pj = kwargs.pop('primaryjoin',
+ self._join_condition.primaryjoin_reverse_remote)
+ sj = kwargs.pop('secondaryjoin', None)
+ if sj:
+ raise sa_exc.InvalidRequestError(
+ "Can't assign 'secondaryjoin' on a backref "
+ "against a non-secondary relationship."
+ )
+
+ foreign_keys = kwargs.pop('foreign_keys',
+ self._user_defined_foreign_keys)
+ parent = self.parent.primary_mapper()
+ kwargs.setdefault('viewonly', self.viewonly)
+ kwargs.setdefault('post_update', self.post_update)
+ kwargs.setdefault('passive_updates', self.passive_updates)
+ self.back_populates = backref_key
+ relationship = RelationshipProperty(
+ parent, self.secondary,
+ pj, sj,
+ foreign_keys=foreign_keys,
+ back_populates=self.key,
+ **kwargs)
+ mapper._configure_property(backref_key, relationship)
+
+ if self.back_populates:
+ self._add_reverse_property(self.back_populates)
+
+ def _post_init(self):
+ if self.uselist is None:
+ self.uselist = self.direction is not MANYTOONE
+ if not self.viewonly:
+ self._dependency_processor = \
+ dependency.DependencyProcessor.from_relationship(self)
+
+ @util.memoized_property
+ def _use_get(self):
+ """memoize the 'use_get' attribute of this RelationshipLoader's
+ lazyloader."""
+
+ strategy = self._lazy_strategy
+ return strategy.use_get
+
+ @util.memoized_property
+ def _is_self_referential(self):
+ return self.mapper.common_parent(self.parent)
+
+ def _create_joins(self, source_polymorphic=False,
+ source_selectable=None, dest_polymorphic=False,
+ dest_selectable=None, of_type=None):
+ if source_selectable is None:
+ if source_polymorphic and self.parent.with_polymorphic:
+ source_selectable = self.parent._with_polymorphic_selectable
+
+ aliased = False
+ if dest_selectable is None:
+ if dest_polymorphic and self.mapper.with_polymorphic:
+ dest_selectable = self.mapper._with_polymorphic_selectable
+ aliased = True
+ else:
+ dest_selectable = self.mapper.mapped_table
+
+ if self._is_self_referential and source_selectable is None:
+ dest_selectable = dest_selectable.alias()
+ aliased = True
+ else:
+ aliased = True
+
+ dest_mapper = of_type or self.mapper
+
+ single_crit = dest_mapper._single_table_criterion
+ aliased = aliased or (source_selectable is not None)
+
+ primaryjoin, secondaryjoin, secondary, target_adapter, dest_selectable = \
+ self._join_condition.join_targets(
+ source_selectable, dest_selectable, aliased, single_crit
+ )
+ if source_selectable is None:
+ source_selectable = self.parent.local_table
+ if dest_selectable is None:
+ dest_selectable = self.mapper.local_table
+ return (primaryjoin, secondaryjoin, source_selectable,
+ dest_selectable, secondary, target_adapter)
+
def _annotate_columns(element, annotations):
def clone(elem):
if isinstance(elem, expression.ColumnClause):
@@ -901,4 +2507,4 @@ class _ColInAnnotations(object):
self.name = name
def __call__(self, c):
- return self.name in c._annotations \ No newline at end of file
+ return self.name in c._annotations
diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py
index 6393b74a7..c1f8f319f 100644
--- a/lib/sqlalchemy/orm/scoping.py
+++ b/lib/sqlalchemy/orm/scoping.py
@@ -1,5 +1,5 @@
# orm/scoping.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -163,7 +163,7 @@ def makeprop(name):
return property(get, set)
for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map',
- 'is_active', 'autoflush', 'no_autoflush'):
+ 'is_active', 'autoflush', 'no_autoflush', 'info'):
setattr(scoped_session, prop, makeprop(prop))
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 5a4486eef..c10a0efc9 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -1,5 +1,5 @@
# orm/session.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -8,25 +8,40 @@
import weakref
-from .. import util, sql, engine, exc as sa_exc, event
+from .. import util, sql, engine, exc as sa_exc
from ..sql import util as sql_util, expression
from . import (
- SessionExtension, attributes, exc, query, util as orm_util,
+ SessionExtension, attributes, exc, query,
loading, identity
)
-from .util import (
+from ..inspection import inspect
+from .base import (
object_mapper, class_mapper,
_class_to_mapper, _state_mapper, object_state,
- _none_set
+ _none_set, state_str, instance_str
)
from .unitofwork import UOWTransaction
-from .mapper import Mapper
-from .events import SessionEvents
-statelib = util.importlater("sqlalchemy.orm", "state")
+from . import state as statelib
import sys
__all__ = ['Session', 'SessionTransaction', 'SessionExtension', 'sessionmaker']
+_sessions = weakref.WeakValueDictionary()
+"""Weak-referencing dictionary of :class:`.Session` objects.
+"""
+
+def _state_session(state):
+ """Given an :class:`.InstanceState`, return the :class:`.Session`
+ associated, if any.
+ """
+ if state.session_id:
+ try:
+ return _sessions[state.session_id]
+ except KeyError:
+ pass
+ return None
+
+
class _SessionClassMethods(object):
"""Class-level methods for :class:`.Session`, :class:`.sessionmaker`."""
@@ -39,7 +54,8 @@ class _SessionClassMethods(object):
sess.close()
@classmethod
- def identity_key(cls, *args, **kwargs):
+ @util.dependencies("sqlalchemy.orm.util")
+ def identity_key(cls, orm_util, *args, **kwargs):
"""Return an identity key.
This is an alias of :func:`.util.identity_key`.
@@ -469,6 +485,7 @@ class Session(_SessionClassMethods):
_enable_transaction_accounting=True,
autocommit=False, twophase=False,
weak_identity_map=True, binds=None, extension=None,
+ info=None,
query_cls=query.Query):
"""Construct a new Session.
@@ -557,6 +574,14 @@ class Session(_SessionClassMethods):
flush events, as well as a post-rollback event. **Deprecated.**
Please see :class:`.SessionEvents`.
+ :param info: optional dictionary of arbitrary data to be associated
+ with this :class:`.Session`. Is available via the :attr:`.Session.info`
+ attribute. Note the dictionary is copied at construction time so
+ that modifications to the per-:class:`.Session` dictionary will be local
+ to that :class:`.Session`.
+
+ .. versionadded:: 0.9.0
+
:param query_cls: Class which should be used to create new Query
objects, as returned by the ``query()`` method. Defaults to
:class:`~sqlalchemy.orm.query.Query`.
@@ -599,6 +624,8 @@ class Session(_SessionClassMethods):
self._enable_transaction_accounting = _enable_transaction_accounting
self.twophase = twophase
self._query_cls = query_cls
+ if info:
+ self.info.update(info)
if extension:
for ext in util.to_list(extension):
@@ -606,22 +633,39 @@ class Session(_SessionClassMethods):
if binds is not None:
for mapperortable, bind in binds.items():
- if isinstance(mapperortable, (type, Mapper)):
+ insp = inspect(mapperortable)
+ if insp.is_selectable:
+ self.bind_table(mapperortable, bind)
+ elif insp.is_mapper:
self.bind_mapper(mapperortable, bind)
else:
- self.bind_table(mapperortable, bind)
+ assert False
+
if not self.autocommit:
self.begin()
_sessions[self.hash_key] = self
- dispatch = event.dispatcher(SessionEvents)
-
connection_callable = None
transaction = None
"""The current active or inactive :class:`.SessionTransaction`."""
+ @util.memoized_property
+ def info(self):
+ """A user-modifiable dictionary.
+
+ The initial value of this dictioanry can be populated using the
+ ``info`` argument to the :class:`.Session` constructor or
+ :class:`.sessionmaker` constructor or factory methods. The dictionary
+ here is always local to this :class:`.Session` and can be modified
+ independently of all other :class:`.Session` objects.
+
+ .. versionadded:: 0.9.0
+
+ """
+ return {}
+
def begin(self, subtransactions=False, nested=False):
"""Begin a transaction on this Session.
@@ -779,7 +823,7 @@ class Session(_SessionClassMethods):
etc.) which will be used to locate a bind, if a bind
cannot otherwise be identified.
- :param close_with_result: Passed to :meth:`Engine.connect`, indicating
+ :param close_with_result: Passed to :meth:`.Engine.connect`, indicating
the :class:`.Connection` should be considered "single use",
automatically closing when the first result set is closed. This
flag only has an effect if this :class:`.Session` is configured with
@@ -1136,7 +1180,18 @@ class Session(_SessionClassMethods):
def _autoflush(self):
if self.autoflush and not self._flushing:
- self.flush()
+ try:
+ self.flush()
+ except sa_exc.StatementError as e:
+ # note we are reraising StatementError as opposed to
+ # raising FlushError with "chaining" to remain compatible
+ # with code that catches StatementError, IntegrityError,
+ # etc.
+ e.add_detail(
+ "raised as a result of Query-invoked autoflush; "
+ "consider using a session.no_autoflush block if this "
+ "flush is occuring prematurely")
+ util.raise_from_cause(e)
def refresh(self, instance, attribute_names=None, lockmode=None):
"""Expire and refresh the attributes on the given instance.
@@ -1180,7 +1235,7 @@ class Session(_SessionClassMethods):
only_load_props=attribute_names) is None:
raise sa_exc.InvalidRequestError(
"Could not refresh instance '%s'" %
- orm_util.instance_str(instance))
+ instance_str(instance))
def expire_all(self):
"""Expires all persistent instances within this Session.
@@ -1291,7 +1346,7 @@ class Session(_SessionClassMethods):
if state.session_id is not self.hash_key:
raise sa_exc.InvalidRequestError(
"Instance %s is not present in this Session" %
- orm_util.state_str(state))
+ state_str(state))
cascaded = list(state.manager.mapper.cascade_iterator(
'expunge', state))
@@ -1331,7 +1386,7 @@ class Session(_SessionClassMethods):
"expect these generated values. Ensure also that "
"this flush() is not occurring at an inappropriate "
"time, such aswithin a load() event."
- % orm_util.state_str(state)
+ % state_str(state)
)
if state.key is None:
@@ -1434,7 +1489,7 @@ class Session(_SessionClassMethods):
if state.key is None:
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" %
- orm_util.state_str(state))
+ state_str(state))
if state in self._deleted:
return
@@ -1598,7 +1653,7 @@ class Session(_SessionClassMethods):
"merging to update the most recent version."
% (
existing_version,
- orm_util.state_str(merged_state),
+ state_str(merged_state),
merged_version
))
@@ -1622,13 +1677,13 @@ class Session(_SessionClassMethods):
if not self.identity_map.contains_state(state):
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persistent within this Session" %
- orm_util.state_str(state))
+ state_str(state))
def _save_impl(self, state):
if state.key is not None:
raise sa_exc.InvalidRequestError(
"Object '%s' already has an identity - it can't be registered "
- "as pending" % orm_util.state_str(state))
+ "as pending" % state_str(state))
self._before_attach(state)
if state not in self._new:
@@ -1644,13 +1699,13 @@ class Session(_SessionClassMethods):
if state.key is None:
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" %
- orm_util.state_str(state))
+ state_str(state))
if state.deleted:
raise sa_exc.InvalidRequestError(
"Instance '%s' has been deleted. Use the make_transient() "
"function to send this object back to the transient state." %
- orm_util.state_str(state)
+ state_str(state)
)
self._before_attach(state)
self._deleted.pop(state, None)
@@ -1703,8 +1758,8 @@ class Session(_SessionClassMethods):
may not fire off a backref event, if the effective value
is what was already loaded from a foreign-key-holding value.
- The :meth:`.Session.enable_relationship_loading` method supersedes
- the ``load_on_pending`` flag on :func:`.relationship`. Unlike
+ The :meth:`.Session.enable_relationship_loading` method is
+ similar to the ``load_on_pending`` flag on :func:`.relationship`. Unlike
that flag, :meth:`.Session.enable_relationship_loading` allows
an object to remain transient while still being able to load
related items.
@@ -1721,6 +1776,12 @@ class Session(_SessionClassMethods):
.. versionadded:: 0.8
+ .. seealso::
+
+ ``load_on_pending`` at :func:`.relationship` - this flag
+ allows per-relationship loading of many-to-ones on items that
+ are pending.
+
"""
state = attributes.instance_state(obj)
self._attach(state, include_before=True)
@@ -1738,14 +1799,14 @@ class Session(_SessionClassMethods):
raise sa_exc.InvalidRequestError("Can't attach instance "
"%s; another instance with key %s is already "
"present in this session."
- % (orm_util.state_str(state), state.key))
+ % (state_str(state), state.key))
if state.session_id and \
state.session_id is not self.hash_key and \
state.session_id in _sessions:
raise sa_exc.InvalidRequestError(
"Object '%s' is already attached to session '%s' "
- "(this is '%s')" % (orm_util.state_str(state),
+ "(this is '%s')" % (state_str(state),
state.session_id, self.hash_key))
if state.session_id != self.hash_key:
@@ -2090,9 +2151,10 @@ class Session(_SessionClassMethods):
access to the full set of persistent objects (i.e., those
that have row identity) currently in the session.
- See also:
+ .. seealso::
- :func:`.identity_key` - operations involving identity keys.
+ :func:`.identity_key` - helper function to produce the keys used
+ in this dictionary.
"""
@@ -2196,7 +2258,8 @@ class sessionmaker(_SessionClassMethods):
def __init__(self, bind=None, class_=Session, autoflush=True,
autocommit=False,
- expire_on_commit=True, **kw):
+ expire_on_commit=True,
+ info=None, **kw):
"""Construct a new :class:`.sessionmaker`.
All arguments here except for ``class_`` correspond to arguments
@@ -2213,6 +2276,13 @@ class sessionmaker(_SessionClassMethods):
:class:`.Session` objects.
:param expire_on_commit=True: the expire_on_commit setting to use
with newly created :class:`.Session` objects.
+ :param info: optional dictionary of information that will be available
+ via :attr:`.Session.info`. Note this dictionary is *updated*, not
+ replaced, when the ``info`` parameter is specified to the specific
+ :class:`.Session` construction operation.
+
+ .. versionadded:: 0.9.0
+
:param \**kw: all other keyword arguments are passed to the constructor
of newly created :class:`.Session` objects.
@@ -2221,6 +2291,8 @@ class sessionmaker(_SessionClassMethods):
kw['autoflush'] = autoflush
kw['autocommit'] = autocommit
kw['expire_on_commit'] = expire_on_commit
+ if info is not None:
+ kw['info'] = info
self.kw = kw
# make our own subclass of the given class, so that
# events can be associated with it specifically.
@@ -2238,7 +2310,12 @@ class sessionmaker(_SessionClassMethods):
"""
for k, v in self.kw.items():
- local_kw.setdefault(k, v)
+ if k == 'info' and 'info' in local_kw:
+ d = v.copy()
+ d.update(local_kw['info'])
+ local_kw['info'] = d
+ else:
+ local_kw.setdefault(k, v)
return self.class_(**local_kw)
def configure(self, **new_kw):
@@ -2253,13 +2330,12 @@ class sessionmaker(_SessionClassMethods):
self.kw.update(new_kw)
def __repr__(self):
- return "%s(class_=%r%s)" % (
+ return "%s(class_=%r,%s)" % (
self.__class__.__name__,
self.class_.__name__,
", ".join("%s=%r" % (k, v) for k, v in self.kw.items())
)
-_sessions = weakref.WeakValueDictionary()
def make_transient(instance):
@@ -2304,12 +2380,4 @@ def object_session(instance):
raise exc.UnmappedInstanceError(instance)
-def _state_session(state):
- if state.session_id:
- try:
- return _sessions[state.session_id]
- except KeyError:
- pass
- return None
-
_new_sessionid = util.counter()
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index c479d880d..9712dd055 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -1,5 +1,5 @@
# orm/state.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -13,16 +13,11 @@ defines a large part of the ORM's interactivity.
import weakref
from .. import util
-from . import exc as orm_exc, attributes, util as orm_util, interfaces
-from .attributes import (
- PASSIVE_NO_RESULT,
- SQL_OK, NEVER_SET, ATTR_WAS_SET, NO_VALUE,\
- PASSIVE_NO_INITIALIZE
- )
-sessionlib = util.importlater("sqlalchemy.orm", "session")
-instrumentation = util.importlater("sqlalchemy.orm", "instrumentation")
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
-
+from . import exc as orm_exc, interfaces
+from .path_registry import PathRegistry
+from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
+ NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF
+from . import base
class InstanceState(interfaces._InspectionAttr):
"""tracks state information at the instance level."""
@@ -89,15 +84,16 @@ class InstanceState(interfaces._InspectionAttr):
not self._attached
@property
- def _attached(self):
+ @util.dependencies("sqlalchemy.orm.session")
+ def _attached(self, sessionlib):
return self.session_id is not None and \
self.session_id in sessionlib._sessions
@property
- def session(self):
+ @util.dependencies("sqlalchemy.orm.session")
+ def session(self, sessionlib):
"""Return the owning :class:`.Session` for this instance,
or ``None`` if none available."""
-
return sessionlib._state_session(self)
@property
@@ -186,7 +182,7 @@ class InstanceState(interfaces._InspectionAttr):
def dict(self):
o = self.obj()
if o is not None:
- return attributes.instance_dict(o)
+ return base.instance_dict(o)
else:
return {}
@@ -214,8 +210,8 @@ class InstanceState(interfaces._InspectionAttr):
return self._pending_mutations[key]
def __getstate__(self):
- d = {'instance': self.obj()}
- d.update(
+ state_dict = {'instance': self.obj()}
+ state_dict.update(
(k, self.__dict__[k]) for k in (
'committed_state', '_pending_mutations', 'modified', 'expired',
'callables', 'key', 'parents', 'load_options',
@@ -223,14 +219,14 @@ class InstanceState(interfaces._InspectionAttr):
) if k in self.__dict__
)
if self.load_path:
- d['load_path'] = self.load_path.serialize()
+ state_dict['load_path'] = self.load_path.serialize()
- self.manager.dispatch.pickle(self, d)
+ state_dict['manager'] = self.manager._serialize(self, state_dict)
- return d
+ return state_dict
- def __setstate__(self, state):
- inst = state['instance']
+ def __setstate__(self, state_dict):
+ inst = state_dict['instance']
if inst is not None:
self.obj = weakref.ref(inst, self._cleanup)
self.class_ = inst.__class__
@@ -239,42 +235,26 @@ class InstanceState(interfaces._InspectionAttr):
# due to storage of state in "parents". "class_"
# also new.
self.obj = None
- self.class_ = state['class_']
- self.manager = manager = instrumentation.manager_of_class(self.class_)
- if manager is None:
- raise orm_exc.UnmappedInstanceError(
- inst,
- "Cannot deserialize object of type %r - "
- "no mapper() has "
- "been configured for this class within the current "
- "Python process!" %
- self.class_)
- elif manager.is_mapped and not manager.mapper.configured:
- mapperlib.configure_mappers()
-
- self.committed_state = state.get('committed_state', {})
- self._pending_mutations = state.get('_pending_mutations', {})
- self.parents = state.get('parents', {})
- self.modified = state.get('modified', False)
- self.expired = state.get('expired', False)
- self.callables = state.get('callables', {})
+ self.class_ = state_dict['class_']
+
+ self.committed_state = state_dict.get('committed_state', {})
+ self._pending_mutations = state_dict.get('_pending_mutations', {})
+ self.parents = state_dict.get('parents', {})
+ self.modified = state_dict.get('modified', False)
+ self.expired = state_dict.get('expired', False)
+ self.callables = state_dict.get('callables', {})
self.__dict__.update([
- (k, state[k]) for k in (
+ (k, state_dict[k]) for k in (
'key', 'load_options',
- ) if k in state
+ ) if k in state_dict
])
- if 'load_path' in state:
- self.load_path = orm_util.PathRegistry.\
- deserialize(state['load_path'])
+ if 'load_path' in state_dict:
+ self.load_path = PathRegistry.\
+ deserialize(state_dict['load_path'])
- # setup _sa_instance_state ahead of time so that
- # unpickle events can access the object normally.
- # see [ticket:2362]
- if inst is not None:
- manager.setup_instance(inst, self)
- manager.dispatch.unpickle(self, state)
+ state_dict['manager'](self, inst, state_dict)
def _initialize(self, key):
"""Set this attribute to an empty value or collection,
@@ -413,6 +393,13 @@ class InstanceState(interfaces._InspectionAttr):
difference(self.dict)
@property
+ def _unloaded_non_object(self):
+ return self.unloaded.intersection(
+ attr for attr in self.manager
+ if self.manager[attr].impl.accepts_scalar_loader
+ )
+
+ @property
def expired_attributes(self):
"""Return the set of keys which are 'expired' to be loaded by
the manager's deferred scalar loader, assuming no pending
@@ -428,6 +415,8 @@ class InstanceState(interfaces._InspectionAttr):
return None
def _modified_event(self, dict_, attr, previous, collection=False):
+ if not attr.send_modified_events:
+ return
if attr.key not in self.committed_state:
if collection:
if previous is NEVER_SET:
@@ -461,7 +450,7 @@ class InstanceState(interfaces._InspectionAttr):
"collected."
% (
self.manager[attr.key],
- orm_util.state_class_str(self)
+ base.state_class_str(self)
))
self.modified = True
@@ -527,13 +516,13 @@ class AttributeState(object):
to a particular attribute on a particular mapped object.
The :class:`.AttributeState` object is accessed
- via the :attr:`.InstanceState.attr` collection
+ via the :attr:`.InstanceState.attrs` collection
of a particular :class:`.InstanceState`::
from sqlalchemy import inspect
insp = inspect(some_mapped_object)
- attr_state = insp.attr.some_attribute
+ attr_state = insp.attrs.some_attribute
"""
@@ -568,10 +557,40 @@ class AttributeState(object):
"""Return the current pre-flush change history for
this attribute, via the :class:`.History` interface.
+ This method will **not** emit loader callables if the value of the
+ attribute is unloaded.
+
+ .. seealso::
+
+ :meth:`.AttributeState.load_history` - retrieve history
+ using loader callables if the value is not locally present.
+
+ :func:`.attributes.get_history` - underlying function
+
"""
return self.state.get_history(self.key,
PASSIVE_NO_INITIALIZE)
+ def load_history(self):
+ """Return the current pre-flush change history for
+ this attribute, via the :class:`.History` interface.
+
+ This method **will** emit loader callables if the value of the
+ attribute is unloaded.
+
+ .. seealso::
+
+ :attr:`.AttributeState.history`
+
+ :func:`.attributes.get_history` - underlying function
+
+ .. versionadded:: 0.9.0
+
+ """
+ return self.state.get_history(self.key,
+ PASSIVE_OFF ^ INIT_OK)
+
+
class PendingCollection(object):
"""A writable placeholder for an unloaded collection.
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index aa46d06a8..033e3d064 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -1,5 +1,5 @@
# orm/strategies.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,14 +16,13 @@ from . import (
)
from .state import InstanceState
from .util import _none_set
+from . import properties
from .interfaces import (
- LoaderStrategy, StrategizedOption, MapperOption, PropertyOption,
- StrategizedProperty
+ LoaderStrategy, StrategizedProperty
)
from .session import _state_session
import itertools
-
def _register_attribute(strategy, mapper, useobject,
compare_function=None,
typecallable=None,
@@ -45,10 +44,10 @@ def _register_attribute(strategy, mapper, useobject,
listen_hooks.append(single_parent_validator)
if prop.key in prop.parent.validators:
- fn, include_removes = prop.parent.validators[prop.key]
+ fn, opts = prop.parent.validators[prop.key]
listen_hooks.append(
lambda desc, prop: orm_util._validator_events(desc,
- prop.key, fn, include_removes)
+ prop.key, fn, **opts)
)
if useobject:
@@ -81,6 +80,7 @@ def _register_attribute(strategy, mapper, useobject,
callable_=callable_,
active_history=active_history,
impl_class=impl_class,
+ send_modified_events=not useobject or not prop.viewonly,
doc=prop.doc,
**kw
)
@@ -88,7 +88,7 @@ def _register_attribute(strategy, mapper, useobject,
for hook in listen_hooks:
hook(desc, prop)
-
+@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
class UninstrumentedColumnLoader(LoaderStrategy):
"""Represent the a non-instrumented MapperProperty.
@@ -100,17 +100,19 @@ class UninstrumentedColumnLoader(LoaderStrategy):
super(UninstrumentedColumnLoader, self).__init__(parent)
self.columns = self.parent_property.columns
- def setup_query(self, context, entity, path, adapter,
+ def setup_query(self, context, entity, path, loadopt, adapter,
column_collection=None, **kwargs):
for c in self.columns:
if adapter:
c = adapter.columns[c]
column_collection.append(c)
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
return None, None, None
+@log.class_logger
+@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
class ColumnLoader(LoaderStrategy):
"""Provide loading behavior for a :class:`.ColumnProperty`."""
@@ -119,7 +121,7 @@ class ColumnLoader(LoaderStrategy):
self.columns = self.parent_property.columns
self.is_composite = hasattr(self.parent_property, 'composite_class')
- def setup_query(self, context, entity, path,
+ def setup_query(self, context, entity, path, loadopt,
adapter, column_collection, **kwargs):
for c in self.columns:
if adapter:
@@ -131,7 +133,8 @@ class ColumnLoader(LoaderStrategy):
coltype = self.columns[0].type
# TODO: check all columns ? check for foreign key as well?
active_history = self.parent_property.active_history or \
- self.columns[0].primary_key
+ self.columns[0].primary_key or \
+ mapper.version_id_col in set(self.columns)
_register_attribute(self, mapper, useobject=False,
compare_function=coltype.compare_values,
@@ -139,7 +142,7 @@ class ColumnLoader(LoaderStrategy):
)
def create_row_processor(self, context, path,
- mapper, row, adapter):
+ loadopt, mapper, row, adapter):
key = self.key
# look through list of columns represented here
# to see which, if any, is present in the row.
@@ -156,9 +159,9 @@ class ColumnLoader(LoaderStrategy):
return expire_for_non_present_col, None, None
-log.class_logger(ColumnLoader)
-
+@log.class_logger
+@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
class DeferredColumnLoader(LoaderStrategy):
"""Provide loading behavior for a deferred :class:`.ColumnProperty`."""
@@ -170,16 +173,16 @@ class DeferredColumnLoader(LoaderStrategy):
self.columns = self.parent_property.columns
self.group = self.parent_property.group
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
col = self.columns[0]
if adapter:
col = adapter.columns[col]
key = self.key
if col in row:
- return self.parent_property._get_strategy(ColumnLoader).\
+ return self.parent_property._get_strategy_by_cls(ColumnLoader).\
create_row_processor(
- context, path, mapper, row, adapter)
+ context, path, loadopt, mapper, row, adapter)
elif not self.is_class_level:
set_deferred_for_local_state = InstanceState._row_processor(
@@ -202,15 +205,15 @@ class DeferredColumnLoader(LoaderStrategy):
expire_missing=False
)
- def setup_query(self, context, entity, path, adapter,
+ def setup_query(self, context, entity, path, loadopt, adapter,
only_load_props=None, **kwargs):
if (
- self.group is not None and
- context.attributes.get(('undefer', self.group), False)
+ loadopt and self.group and
+ loadopt.local_opts.get('undefer_group', False) == self.group
) or (only_load_props and self.key in only_load_props):
- self.parent_property._get_strategy(ColumnLoader).\
+ self.parent_property._get_strategy_by_cls(ColumnLoader).\
setup_query(context, entity,
- path, adapter, **kwargs)
+ path, loadopt, adapter, **kwargs)
def _load_for_state(self, state, passive):
if not state.key:
@@ -251,8 +254,6 @@ class DeferredColumnLoader(LoaderStrategy):
return attributes.ATTR_WAS_SET
-log.class_logger(DeferredColumnLoader)
-
class LoadDeferredColumns(object):
"""serializable loader object used by DeferredColumnLoader"""
@@ -269,29 +270,6 @@ class LoadDeferredColumns(object):
return strategy._load_for_state(state, passive)
-class DeferredOption(StrategizedOption):
- propagate_to_loaders = True
-
- def __init__(self, key, defer=False):
- super(DeferredOption, self).__init__(key)
- self.defer = defer
-
- def get_strategy_class(self):
- if self.defer:
- return DeferredColumnLoader
- else:
- return ColumnLoader
-
-
-class UndeferGroupOption(MapperOption):
- propagate_to_loaders = True
-
- def __init__(self, group):
- self.group = group
-
- def process_query(self, query):
- query._attributes[("undefer", self.group)] = True
-
class AbstractRelationshipLoader(LoaderStrategy):
"""LoaderStratgies which deal with related objects."""
@@ -304,6 +282,9 @@ class AbstractRelationshipLoader(LoaderStrategy):
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="noload")
+@properties.RelationshipProperty.strategy_for(lazy=None)
class NoLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
with "lazy=None".
@@ -319,15 +300,16 @@ class NoLoader(AbstractRelationshipLoader):
typecallable=self.parent_property.collection_class,
)
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
def invoke_no_load(state, dict_, row):
state._initialize(self.key)
return invoke_no_load, None, None
-log.class_logger(NoLoader)
-
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy=True)
+@properties.RelationshipProperty.strategy_for(lazy="select")
class LazyLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
with "lazy=True", that is loads when first accessed.
@@ -350,7 +332,6 @@ class LazyLoader(AbstractRelationshipLoader):
# determine if our "lazywhere" clause is the same as the mapper's
# get() clause. then we can just use mapper.get()
- #from sqlalchemy.orm import query
self.use_get = not self.uselist and \
self.mapper._get_clause[0].compare(
self._lazywhere,
@@ -542,7 +523,8 @@ class LazyLoader(AbstractRelationshipLoader):
for pk in self.mapper.primary_key
]
- def _emit_lazyload(self, session, state, ident_key, passive):
+ @util.dependencies("sqlalchemy.orm.strategy_options")
+ def _emit_lazyload(self, strategy_options, session, state, ident_key, passive):
q = session.query(self.mapper)._adapt_all_clauses()
q = q._with_invoke_all_eagers(False)
@@ -571,7 +553,7 @@ class LazyLoader(AbstractRelationshipLoader):
if rev.direction is interfaces.MANYTOONE and \
rev._use_get and \
not isinstance(rev.strategy, LazyLoader):
- q = q.options(EagerLazyOption((rev.key,), lazy='select'))
+ q = q.options(strategy_options.Load(rev.parent).lazyload(rev.key))
lazy_clause = self.lazy_clause(state, passive=passive)
@@ -598,7 +580,7 @@ class LazyLoader(AbstractRelationshipLoader):
else:
return None
- def create_row_processor(self, context, path,
+ def create_row_processor(self, context, path, loadopt,
mapper, row, adapter):
key = self.key
if not self.is_class_level:
@@ -630,8 +612,6 @@ class LazyLoader(AbstractRelationshipLoader):
return reset_for_lazy_callable, None, None
-log.class_logger(LazyLoader)
-
class LoadLazyAttribute(object):
"""serializable loader object used by LazyLoader"""
@@ -648,18 +628,19 @@ class LoadLazyAttribute(object):
return strategy._load_for_state(state, passive)
+@properties.RelationshipProperty.strategy_for(lazy="immediate")
class ImmediateLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
- _get_strategy(LazyLoader).\
+ _get_strategy_by_cls(LazyLoader).\
init_class_attribute(mapper)
def setup_query(self, context, entity,
- path, adapter, column_collection=None,
+ path, loadopt, adapter, column_collection=None,
parentmapper=None, **kwargs):
pass
- def create_row_processor(self, context, path,
+ def create_row_processor(self, context, path, loadopt,
mapper, row, adapter):
def load_immediate(state, dict_, row):
state.get_impl(self.key).get(state, dict_)
@@ -667,6 +648,8 @@ class ImmediateLoader(AbstractRelationshipLoader):
return None, None, load_immediate
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="subquery")
class SubqueryLoader(AbstractRelationshipLoader):
def __init__(self, parent):
super(SubqueryLoader, self).__init__(parent)
@@ -674,11 +657,11 @@ class SubqueryLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
- _get_strategy(LazyLoader).\
+ _get_strategy_by_cls(LazyLoader).\
init_class_attribute(mapper)
def setup_query(self, context, entity,
- path, adapter,
+ path, loadopt, adapter,
column_collection=None,
parentmapper=None, **kwargs):
@@ -703,14 +686,14 @@ class SubqueryLoader(AbstractRelationshipLoader):
# if not via query option, check for
# a cycle
- if not path.contains(context.attributes, "loaderstrategy"):
+ if not path.contains(context.attributes, "loader"):
if self.join_depth:
if path.length / 2 > self.join_depth:
return
elif subq_path.contains_mapper(self.mapper):
return
- subq_mapper, leftmost_mapper, leftmost_attr = \
+ subq_mapper, leftmost_mapper, leftmost_attr, leftmost_relationship = \
self._get_leftmost(subq_path)
orig_query = context.attributes.get(
@@ -721,7 +704,8 @@ class SubqueryLoader(AbstractRelationshipLoader):
# produce a subquery from it.
left_alias = self._generate_from_original_query(
orig_query, leftmost_mapper,
- leftmost_attr, entity.mapper
+ leftmost_attr, leftmost_relationship,
+ entity.mapper
)
# generate another Query that will join the
@@ -770,11 +754,12 @@ class SubqueryLoader(AbstractRelationshipLoader):
leftmost_mapper._columntoproperty[c].class_attribute
for c in leftmost_cols
]
- return subq_mapper, leftmost_mapper, leftmost_attr
+ return subq_mapper, leftmost_mapper, leftmost_attr, leftmost_prop
def _generate_from_original_query(self,
orig_query, leftmost_mapper,
- leftmost_attr, entity_mapper
+ leftmost_attr, leftmost_relationship,
+ entity_mapper
):
# reformat the original query
# to look only for significant columns
@@ -785,8 +770,22 @@ class SubqueryLoader(AbstractRelationshipLoader):
if not q._from_obj and entity_mapper.isa(leftmost_mapper):
q._set_select_from([entity_mapper], False)
+ target_cols = q._adapt_col_list(leftmost_attr)
+
# select from the identity columns of the outer
- q._set_entities(q._adapt_col_list(leftmost_attr))
+ q._set_entities(target_cols)
+
+ distinct_target_key = leftmost_relationship.distinct_target_key
+
+ if distinct_target_key is True:
+ q._distinct = True
+ elif distinct_target_key is None:
+ # if target_cols refer to a non-primary key or only
+ # part of a composite primary key, set the q as distinct
+ for t in set(c.table for c in target_cols):
+ if not set(target_cols).issuperset(t.primary_key):
+ q._distinct = True
+ break
if q._order_by is False:
q._order_by = leftmost_mapper.order_by
@@ -916,7 +915,36 @@ class SubqueryLoader(AbstractRelationshipLoader):
q = q.order_by(*eager_order_by)
return q
- def create_row_processor(self, context, path,
+ class _SubqCollections(object):
+ """Given a :class:`.Query` used to emit the "subquery load",
+ provide a load interface that executes the query at the
+ first moment a value is needed.
+
+ """
+ _data = None
+
+ def __init__(self, subq):
+ self.subq = subq
+
+ def get(self, key, default):
+ if self._data is None:
+ self._load()
+ return self._data.get(key, default)
+
+ def _load(self):
+ self._data = dict(
+ (k, [vv[0] for vv in v])
+ for k, v in itertools.groupby(
+ self.subq,
+ lambda x: x[1:]
+ )
+ )
+
+ def loader(self, state, dict_, row):
+ if self._data is None:
+ self._load()
+
+ def create_row_processor(self, context, path, loadopt,
mapper, row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
@@ -938,12 +966,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
# call upon create_row_processor again
collections = path.get(context.attributes, "collections")
if collections is None:
- collections = dict(
- (k, [v[0] for v in v])
- for k, v in itertools.groupby(
- subq,
- lambda x: x[1:]
- ))
+ collections = self._SubqCollections(subq)
path.set(context.attributes, 'collections', collections)
if adapter:
@@ -963,7 +986,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
state.get_impl(self.key).\
set_committed_value(state, dict_, collection)
- return load_collection_from_subq, None, None
+ return load_collection_from_subq, None, None, collections.loader
def _create_scalar_loader(self, collections, local_cols):
def load_scalar_from_subq(state, dict_, row):
@@ -981,12 +1004,13 @@ class SubqueryLoader(AbstractRelationshipLoader):
state.get_impl(self.key).\
set_committed_value(state, dict_, scalar)
- return load_scalar_from_subq, None, None
-
+ return load_scalar_from_subq, None, None, collections.loader
-log.class_logger(SubqueryLoader)
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="joined")
+@properties.RelationshipProperty.strategy_for(lazy=False)
class JoinedLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
using joined eager loading.
@@ -998,9 +1022,9 @@ class JoinedLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
- _get_strategy(LazyLoader).init_class_attribute(mapper)
+ _get_strategy_by_cls(LazyLoader).init_class_attribute(mapper)
- def setup_query(self, context, entity, path, adapter, \
+ def setup_query(self, context, entity, path, loadopt, adapter, \
column_collection=None, parentmapper=None,
allow_innerjoin=True,
**kwargs):
@@ -1013,19 +1037,19 @@ class JoinedLoader(AbstractRelationshipLoader):
with_polymorphic = None
- user_defined_adapter = path.get(context.attributes,
- "user_defined_eager_row_processor",
- False)
+ user_defined_adapter = self._init_user_defined_eager_proc(
+ loadopt, context) if loadopt else False
+
if user_defined_adapter is not False:
clauses, adapter, add_to_collection = \
- self._get_user_defined_adapter(
+ self._setup_query_on_user_defined_adapter(
context, entity, path, adapter,
user_defined_adapter
)
else:
# if not via query option, check for
# a cycle
- if not path.contains(context.attributes, "loaderstrategy"):
+ if not path.contains(context.attributes, "loader"):
if self.join_depth:
if path.length / 2 > self.join_depth:
return
@@ -1034,7 +1058,7 @@ class JoinedLoader(AbstractRelationshipLoader):
clauses, adapter, add_to_collection, \
allow_innerjoin = self._generate_row_adapter(
- context, entity, path, adapter,
+ context, entity, path, loadopt, adapter,
column_collection, parentmapper, allow_innerjoin
)
@@ -1069,24 +1093,74 @@ class JoinedLoader(AbstractRelationshipLoader):
"when using joined loading with with_polymorphic()."
)
- def _get_user_defined_adapter(self, context, entity,
+ def _init_user_defined_eager_proc(self, loadopt, context):
+
+ # check if the opt applies at all
+ if "eager_from_alias" not in loadopt.local_opts:
+ # nope
+ return False
+
+ path = loadopt.path.parent
+
+ # the option applies. check if the "user_defined_eager_row_processor"
+ # has been built up.
+ adapter = path.get(context.attributes,
+ "user_defined_eager_row_processor", False)
+ if adapter is not False:
+ # just return it
+ return adapter
+
+ # otherwise figure it out.
+ alias = loadopt.local_opts["eager_from_alias"]
+
+ root_mapper, prop = path[-2:]
+
+ #from .mapper import Mapper
+ #from .interfaces import MapperProperty
+ #assert isinstance(root_mapper, Mapper)
+ #assert isinstance(prop, MapperProperty)
+
+ if alias is not None:
+ if isinstance(alias, str):
+ alias = prop.target.alias(alias)
+ adapter = sql_util.ColumnAdapter(alias,
+ equivalents=prop.mapper._equivalent_columns)
+ else:
+ if path.contains(context.attributes, "path_with_polymorphic"):
+ with_poly_info = path.get(context.attributes,
+ "path_with_polymorphic")
+ adapter = orm_util.ORMAdapter(
+ with_poly_info.entity,
+ equivalents=prop.mapper._equivalent_columns)
+ else:
+ adapter = context.query._polymorphic_adapters.get(prop.mapper, None)
+ path.set(context.attributes,
+ "user_defined_eager_row_processor",
+ adapter)
+
+ return adapter
+
+ def _setup_query_on_user_defined_adapter(self, context, entity,
path, adapter, user_defined_adapter):
- adapter = entity._get_entity_clauses(context.query, context)
- if adapter and user_defined_adapter:
- user_defined_adapter = user_defined_adapter.wrap(adapter)
- path.set(context.attributes, "user_defined_eager_row_processor",
- user_defined_adapter)
- elif adapter:
- user_defined_adapter = adapter
- path.set(context.attributes, "user_defined_eager_row_processor",
- user_defined_adapter)
+ # apply some more wrapping to the "user defined adapter"
+ # if we are setting up the query for SQL render.
+ adapter = entity._get_entity_clauses(context.query, context)
+
+ if adapter and user_defined_adapter:
+ user_defined_adapter = user_defined_adapter.wrap(adapter)
+ path.set(context.attributes, "user_defined_eager_row_processor",
+ user_defined_adapter)
+ elif adapter:
+ user_defined_adapter = adapter
+ path.set(context.attributes, "user_defined_eager_row_processor",
+ user_defined_adapter)
- add_to_collection = context.primary_columns
- return user_defined_adapter, adapter, add_to_collection
+ add_to_collection = context.primary_columns
+ return user_defined_adapter, adapter, add_to_collection
def _generate_row_adapter(self,
- context, entity, path, adapter,
+ context, entity, path, loadopt, adapter,
column_collection, parentmapper, allow_innerjoin
):
with_poly_info = path.get(
@@ -1109,9 +1183,12 @@ class JoinedLoader(AbstractRelationshipLoader):
if self.parent_property.direction != interfaces.MANYTOONE:
context.multi_row_eager_loaders = True
- innerjoin = allow_innerjoin and path.get(context.attributes,
- "eager_join_type",
- self.parent_property.innerjoin)
+ innerjoin = allow_innerjoin and (
+ loadopt.local_opts.get(
+ 'innerjoin', self.parent_property.innerjoin)
+ if loadopt is not None
+ else self.parent_property.innerjoin
+ )
if not innerjoin:
# if this is an outer join, all eager joins from
# here must also be outer joins
@@ -1202,7 +1279,7 @@ class JoinedLoader(AbstractRelationshipLoader):
# by the Query propagates those columns outward.
# This has the effect
# of "undefering" those columns.
- for col in sql_util.find_columns(
+ for col in sql_util._find_columns(
self.parent_property.primaryjoin):
if localparent.mapped_table.c.contains_column(col):
if adapter:
@@ -1218,10 +1295,10 @@ class JoinedLoader(AbstractRelationshipLoader):
)
)
- def _create_eager_adapter(self, context, row, adapter, path):
- user_defined_adapter = path.get(context.attributes,
- "user_defined_eager_row_processor",
- False)
+ def _create_eager_adapter(self, context, row, adapter, path, loadopt):
+ user_defined_adapter = self._init_user_defined_eager_proc(
+ loadopt, context) if loadopt else False
+
if user_defined_adapter is not False:
decorator = user_defined_adapter
# user defined eagerloads are part of the "primary"
@@ -1244,7 +1321,7 @@ class JoinedLoader(AbstractRelationshipLoader):
# processor, will cause a degrade to lazy
return False
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
@@ -1256,7 +1333,7 @@ class JoinedLoader(AbstractRelationshipLoader):
eager_adapter = self._create_eager_adapter(
context,
row,
- adapter, our_path)
+ adapter, our_path, loadopt)
if eager_adapter is not False:
key = self.key
@@ -1273,9 +1350,9 @@ class JoinedLoader(AbstractRelationshipLoader):
return self._create_collection_loader(context, key, _instance)
else:
return self.parent_property.\
- _get_strategy(LazyLoader).\
+ _get_strategy_by_cls(LazyLoader).\
create_row_processor(
- context, path,
+ context, path, loadopt,
mapper, row, adapter)
def _create_collection_loader(self, context, key, _instance):
@@ -1336,102 +1413,6 @@ class JoinedLoader(AbstractRelationshipLoader):
None, load_scalar_from_joined_exec
-log.class_logger(JoinedLoader)
-
-
-class EagerLazyOption(StrategizedOption):
- def __init__(self, key, lazy=True, chained=False,
- propagate_to_loaders=True
- ):
- if isinstance(key[0], str) and key[0] == '*':
- if len(key) != 1:
- raise sa_exc.ArgumentError(
- "Wildcard identifier '*' must "
- "be specified alone.")
- key = ("relationship:*",)
- propagate_to_loaders = False
- super(EagerLazyOption, self).__init__(key)
- self.lazy = lazy
- self.chained = chained
- self.propagate_to_loaders = propagate_to_loaders
- self.strategy_cls = factory(lazy)
-
- def get_strategy_class(self):
- return self.strategy_cls
-
-_factory = {
- False: JoinedLoader,
- "joined": JoinedLoader,
- None: NoLoader,
- "noload": NoLoader,
- "select": LazyLoader,
- True: LazyLoader,
- "subquery": SubqueryLoader,
- "immediate": ImmediateLoader
-}
-
-
-def factory(identifier):
- return _factory.get(identifier, LazyLoader)
-
-
-class EagerJoinOption(PropertyOption):
-
- def __init__(self, key, innerjoin, chained=False):
- super(EagerJoinOption, self).__init__(key)
- self.innerjoin = innerjoin
- self.chained = chained
-
- def process_query_property(self, query, paths):
- if self.chained:
- for path in paths:
- path.set(query._attributes, "eager_join_type", self.innerjoin)
- else:
- paths[-1].set(query._attributes, "eager_join_type", self.innerjoin)
-
-
-class LoadEagerFromAliasOption(PropertyOption):
-
- def __init__(self, key, alias=None, chained=False):
- super(LoadEagerFromAliasOption, self).__init__(key)
- if alias is not None:
- if not isinstance(alias, str):
- info = inspect(alias)
- alias = info.selectable
- self.alias = alias
- self.chained = chained
-
- def process_query_property(self, query, paths):
- if self.chained:
- for path in paths[0:-1]:
- (root_mapper, prop) = path.path[-2:]
- adapter = query._polymorphic_adapters.get(prop.mapper, None)
- path.setdefault(query._attributes,
- "user_defined_eager_row_processor",
- adapter)
-
- root_mapper, prop = paths[-1].path[-2:]
- if self.alias is not None:
- if isinstance(self.alias, str):
- self.alias = prop.target.alias(self.alias)
- paths[-1].set(query._attributes,
- "user_defined_eager_row_processor",
- sql_util.ColumnAdapter(self.alias,
- equivalents=prop.mapper._equivalent_columns)
- )
- else:
- if paths[-1].contains(query._attributes, "path_with_polymorphic"):
- with_poly_info = paths[-1].get(query._attributes,
- "path_with_polymorphic")
- adapter = orm_util.ORMAdapter(
- with_poly_info.entity,
- equivalents=prop.mapper._equivalent_columns)
- else:
- adapter = query._polymorphic_adapters.get(prop.mapper, None)
- paths[-1].set(query._attributes,
- "user_defined_eager_row_processor",
- adapter)
-
def single_parent_validator(desc, prop):
def _do_check(state, value, oldvalue, initiator):
diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py
new file mode 100644
index 000000000..6e838ccb7
--- /dev/null
+++ b/lib/sqlalchemy/orm/strategy_options.py
@@ -0,0 +1,924 @@
+# orm/strategy_options.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""
+
+"""
+
+from .interfaces import MapperOption, PropComparator
+from .. import util
+from ..sql.base import _generative, Generative
+from .. import exc as sa_exc, inspect
+from .base import _is_aliased_class, _class_to_mapper
+from . import util as orm_util
+from .path_registry import PathRegistry, TokenRegistry, \
+ _WILDCARD_TOKEN, _DEFAULT_TOKEN
+
+class Load(Generative, MapperOption):
+ """Represents loader options which modify the state of a
+ :class:`.Query` in order to affect how various mapped attributes are loaded.
+
+ .. versionadded:: 0.9.0 The :meth:`.Load` system is a new foundation for
+ the existing system of loader options, including options such as
+ :func:`.orm.joinedload`, :func:`.orm.defer`, and others. In particular,
+ it introduces a new method-chained system that replaces the need for
+ dot-separated paths as well as "_all()" options such as :func:`.orm.joinedload_all`.
+
+ A :class:`.Load` object can be used directly or indirectly. To use one
+ directly, instantiate given the parent class. This style of usage is
+ useful when dealing with a :class:`.Query` that has multiple entities,
+ or when producing a loader option that can be applied generically to
+ any style of query::
+
+ myopt = Load(MyClass).joinedload("widgets")
+
+ The above ``myopt`` can now be used with :meth:`.Query.options`::
+
+ session.query(MyClass).options(myopt)
+
+ The :class:`.Load` construct is invoked indirectly whenever one makes use
+ of the various loader options that are present in ``sqlalchemy.orm``, including
+ options such as :func:`.orm.joinedload`, :func:`.orm.defer`, :func:`.orm.subqueryload`,
+ and all the rest. These constructs produce an "anonymous" form of the
+ :class:`.Load` object which tracks attributes and options, but is not linked
+ to a parent class until it is associated with a parent :class:`.Query`::
+
+ # produce "unbound" Load object
+ myopt = joinedload("widgets")
+
+ # when applied using options(), the option is "bound" to the
+ # class observed in the given query, e.g. MyClass
+ session.query(MyClass).options(myopt)
+
+ Whether the direct or indirect style is used, the :class:`.Load` object
+ returned now represents a specific "path" along the entities of a :class:`.Query`.
+ This path can be traversed using a standard method-chaining approach.
+ Supposing a class hierarchy such as ``User``, ``User.addresses -> Address``,
+ ``User.orders -> Order`` and ``Order.items -> Item``, we can specify a variety
+ of loader options along each element in the "path"::
+
+ session.query(User).options(
+ joinedload("addresses"),
+ subqueryload("orders").joinedload("items")
+ )
+
+ Where above, the ``addresses`` collection will be joined-loaded, the
+ ``orders`` collection will be subquery-loaded, and within that subquery load
+ the ``items`` collection will be joined-loaded.
+
+
+ """
+ def __init__(self, entity):
+ insp = inspect(entity)
+ self.path = insp._path_registry
+ self.context = {}
+ self.local_opts = {}
+
+ def _generate(self):
+ cloned = super(Load, self)._generate()
+ cloned.local_opts = {}
+ return cloned
+
+ strategy = None
+ propagate_to_loaders = False
+
+ def process_query(self, query):
+ self._process(query, True)
+
+ def process_query_conditionally(self, query):
+ self._process(query, False)
+
+ def _process(self, query, raiseerr):
+ current_path = query._current_path
+ if current_path:
+ for (token, start_path), loader in self.context.items():
+ chopped_start_path = self._chop_path(start_path, current_path)
+ if chopped_start_path is not None:
+ query._attributes[(token, chopped_start_path)] = loader
+ else:
+ query._attributes.update(self.context)
+
+ def _generate_path(self, path, attr, wildcard_key, raiseerr=True):
+ if raiseerr and not path.has_entity:
+ if isinstance(path, TokenRegistry):
+ raise sa_exc.ArgumentError(
+ "Wildcard token cannot be followed by another entity")
+ else:
+ raise sa_exc.ArgumentError(
+ "Attribute '%s' of entity '%s' does not "
+ "refer to a mapped entity" %
+ (path.prop.key, path.parent.entity)
+ )
+
+ if isinstance(attr, util.string_types):
+ default_token = attr.endswith(_DEFAULT_TOKEN)
+ if attr.endswith(_WILDCARD_TOKEN) or default_token:
+ if default_token:
+ self.propagate_to_loaders = False
+ if wildcard_key:
+ attr = "%s:%s" % (wildcard_key, attr)
+ return path.token(attr)
+
+ try:
+ # use getattr on the class to work around
+ # synonyms, hybrids, etc.
+ attr = getattr(path.entity.class_, attr)
+ except AttributeError:
+ if raiseerr:
+ raise sa_exc.ArgumentError(
+ "Can't find property named '%s' on the "
+ "mapped entity %s in this Query. " % (
+ attr, path.entity)
+ )
+ else:
+ return None
+ else:
+ attr = attr.property
+
+ path = path[attr]
+ else:
+ prop = attr.property
+
+ if not prop.parent.common_parent(path.mapper):
+ if raiseerr:
+ raise sa_exc.ArgumentError("Attribute '%s' does not "
+ "link from element '%s'" % (attr, path.entity))
+ else:
+ return None
+
+ if getattr(attr, '_of_type', None):
+ ac = attr._of_type
+ ext_info = inspect(ac)
+
+ path_element = ext_info.mapper
+ if not ext_info.is_aliased_class:
+ ac = orm_util.with_polymorphic(
+ ext_info.mapper.base_mapper,
+ ext_info.mapper, aliased=True,
+ _use_mapper_path=True)
+ path.entity_path[prop].set(self.context,
+ "path_with_polymorphic", inspect(ac))
+ path = path[prop][path_element]
+ else:
+ path = path[prop]
+
+ if path.has_entity:
+ path = path.entity_path
+ return path
+
+ def _coerce_strat(self, strategy):
+ if strategy is not None:
+ strategy = tuple(sorted(strategy.items()))
+ return strategy
+
+ @_generative
+ def set_relationship_strategy(self, attr, strategy, propagate_to_loaders=True):
+ strategy = self._coerce_strat(strategy)
+
+ self.propagate_to_loaders = propagate_to_loaders
+ # if the path is a wildcard, this will set propagate_to_loaders=False
+ self.path = self._generate_path(self.path, attr, "relationship")
+ self.strategy = strategy
+ if strategy is not None:
+ self._set_path_strategy()
+
+ @_generative
+ def set_column_strategy(self, attrs, strategy, opts=None):
+ strategy = self._coerce_strat(strategy)
+
+ for attr in attrs:
+ path = self._generate_path(self.path, attr, "column")
+ cloned = self._generate()
+ cloned.strategy = strategy
+ cloned.path = path
+ cloned.propagate_to_loaders = True
+ if opts:
+ cloned.local_opts.update(opts)
+ cloned._set_path_strategy()
+
+ def _set_path_strategy(self):
+ if self.path.has_entity:
+ self.path.parent.set(self.context, "loader", self)
+ else:
+ self.path.set(self.context, "loader", self)
+
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ d["path"] = self.path.serialize()
+ return d
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ self.path = PathRegistry.deserialize(self.path)
+
+ def _chop_path(self, to_chop, path):
+ i = -1
+
+ for i, (c_token, p_token) in enumerate(zip(to_chop, path.path)):
+ if isinstance(c_token, util.string_types):
+ # TODO: this is approximated from the _UnboundLoad
+ # version and probably has issues, not fully covered.
+
+ if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
+ return to_chop
+ elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_token.key:
+ return None
+
+ if c_token is p_token:
+ continue
+ else:
+ return None
+ return to_chop[i+1:]
+
+
+class _UnboundLoad(Load):
+ """Represent a loader option that isn't tied to a root entity.
+
+ The loader option will produce an entity-linked :class:`.Load`
+ object when it is passed :meth:`.Query.options`.
+
+ This provides compatibility with the traditional system
+ of freestanding options, e.g. ``joinedload('x.y.z')``.
+
+ """
+ def __init__(self):
+ self.path = ()
+ self._to_bind = set()
+ self.local_opts = {}
+
+ _is_chain_link = False
+
+ def _set_path_strategy(self):
+ self._to_bind.add(self)
+
+ def _generate_path(self, path, attr, wildcard_key):
+ if wildcard_key and isinstance(attr, util.string_types) and \
+ attr in (_WILDCARD_TOKEN, _DEFAULT_TOKEN):
+ if attr == _DEFAULT_TOKEN:
+ self.propagate_to_loaders = False
+ attr = "%s:%s" % (wildcard_key, attr)
+
+ return path + (attr, )
+
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ d['path'] = ret = []
+ for token in util.to_list(self.path):
+ if isinstance(token, PropComparator):
+ ret.append((token._parentmapper.class_, token.key))
+ else:
+ ret.append(token)
+ return d
+
+ def __setstate__(self, state):
+ ret = []
+ for key in state['path']:
+ if isinstance(key, tuple):
+ cls, propkey = key
+ ret.append(getattr(cls, propkey))
+ else:
+ ret.append(key)
+ state['path'] = tuple(ret)
+ self.__dict__ = state
+
+ def _process(self, query, raiseerr):
+ for val in self._to_bind:
+ val._bind_loader(query, query._attributes, raiseerr)
+
+ @classmethod
+ def _from_keys(self, meth, keys, chained, kw):
+ opt = _UnboundLoad()
+
+ def _split_key(key):
+ if isinstance(key, util.string_types):
+ # coerce fooload('*') into "default loader strategy"
+ if key == _WILDCARD_TOKEN:
+ return (_DEFAULT_TOKEN, )
+ # coerce fooload(".*") into "wildcard on default entity"
+ elif key.startswith("." + _WILDCARD_TOKEN):
+ key = key[1:]
+ return key.split(".")
+ else:
+ return (key,)
+ all_tokens = [token for key in keys for token in _split_key(key)]
+
+ for token in all_tokens[0:-1]:
+ if chained:
+ opt = meth(opt, token, **kw)
+ else:
+ opt = opt.defaultload(token)
+ opt._is_chain_link = True
+
+ opt = meth(opt, all_tokens[-1], **kw)
+ opt._is_chain_link = False
+
+ return opt
+
+
+ def _chop_path(self, to_chop, path):
+ i = -1
+ for i, (c_token, (p_mapper, p_prop)) in enumerate(zip(to_chop, path.pairs())):
+ if isinstance(c_token, util.string_types):
+ if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
+ return to_chop
+ elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_prop.key:
+ return None
+ elif isinstance(c_token, PropComparator):
+ if c_token.property is not p_prop:
+ return None
+ else:
+ i += 1
+
+ return to_chop[i:]
+
+
+ def _bind_loader(self, query, context, raiseerr):
+ start_path = self.path
+ # _current_path implies we're in a
+ # secondary load with an existing path
+
+ current_path = query._current_path
+ if current_path:
+ start_path = self._chop_path(start_path, current_path)
+
+ if not start_path:
+ return None
+
+ token = start_path[0]
+ if isinstance(token, util.string_types):
+ entity = self._find_entity_basestring(query, token, raiseerr)
+ elif isinstance(token, PropComparator):
+ prop = token.property
+ entity = self._find_entity_prop_comparator(
+ query,
+ prop.key,
+ token._parententity,
+ raiseerr)
+
+ else:
+ raise sa_exc.ArgumentError(
+ "mapper option expects "
+ "string key or list of attributes")
+
+ if not entity:
+ return
+
+ path_element = entity.entity_zero
+
+ # transfer our entity-less state into a Load() object
+ # with a real entity path.
+ loader = Load(path_element)
+ loader.context = context
+ loader.strategy = self.strategy
+
+ path = loader.path
+ for token in start_path:
+ loader.path = path = loader._generate_path(
+ loader.path, token, None, raiseerr)
+ if path is None:
+ return
+
+ loader.local_opts.update(self.local_opts)
+
+ if loader.path.has_entity:
+ effective_path = loader.path.parent
+ else:
+ effective_path = loader.path
+
+ # prioritize "first class" options over those
+ # that were "links in the chain", e.g. "x" and "y" in someload("x.y.z")
+ # versus someload("x") / someload("x.y")
+ if self._is_chain_link:
+ effective_path.setdefault(context, "loader", loader)
+ else:
+ effective_path.set(context, "loader", loader)
+
+ def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
+ if _is_aliased_class(mapper):
+ searchfor = mapper
+ else:
+ searchfor = _class_to_mapper(mapper)
+ for ent in query._mapper_entities:
+ if ent.corresponds_to(searchfor):
+ return ent
+ else:
+ if raiseerr:
+ if not list(query._mapper_entities):
+ raise sa_exc.ArgumentError(
+ "Query has only expression-based entities - "
+ "can't find property named '%s'."
+ % (token, )
+ )
+ else:
+ raise sa_exc.ArgumentError(
+ "Can't find property '%s' on any entity "
+ "specified in this Query. Note the full path "
+ "from root (%s) to target entity must be specified."
+ % (token, ",".join(str(x) for
+ x in query._mapper_entities))
+ )
+ else:
+ return None
+
+ def _find_entity_basestring(self, query, token, raiseerr):
+ if token.endswith(':' + _WILDCARD_TOKEN):
+ if len(list(query._mapper_entities)) != 1:
+ if raiseerr:
+ raise sa_exc.ArgumentError(
+ "Wildcard loader can only be used with exactly "
+ "one entity. Use Load(ent) to specify "
+ "specific entities.")
+
+ for ent in query._mapper_entities:
+ # return only the first _MapperEntity when searching
+ # based on string prop name. Ideally object
+ # attributes are used to specify more exactly.
+ return ent
+ else:
+ if raiseerr:
+ raise sa_exc.ArgumentError(
+ "Query has only expression-based entities - "
+ "can't find property named '%s'."
+ % (token, )
+ )
+ else:
+ return None
+
+
+
+class loader_option(object):
+ def __init__(self):
+ pass
+
+ def __call__(self, fn):
+ self.name = name = fn.__name__
+ self.fn = fn
+ if hasattr(Load, name):
+ raise TypeError("Load class already has a %s method." % (name))
+ setattr(Load, name, fn)
+
+ return self
+
+ def _add_unbound_fn(self, fn):
+ self._unbound_fn = fn
+ fn_doc = self.fn.__doc__
+ self.fn.__doc__ = """Produce a new :class:`.Load` object with the
+:func:`.orm.%(name)s` option applied.
+
+See :func:`.orm.%(name)s` for usage examples.
+
+""" % {"name": self.name}
+
+ fn.__doc__ = fn_doc
+ return self
+
+ def _add_unbound_all_fn(self, fn):
+ self._unbound_all_fn = fn
+ fn.__doc__ = """Produce a standalone "all" option for :func:`.orm.%(name)s`.
+
+.. deprecated:: 0.9.0
+
+ The "_all()" style is replaced by method chaining, e.g.::
+
+ session.query(MyClass).options(
+ %(name)s("someattribute").%(name)s("anotherattribute")
+ )
+
+""" % {"name": self.name}
+ return self
+
+@loader_option()
+def contains_eager(loadopt, attr, alias=None):
+ """Indicate that the given attribute should be eagerly loaded from
+ columns stated manually in the query.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ The option is used in conjunction with an explicit join that loads
+ the desired rows, i.e.::
+
+ sess.query(Order).\\
+ join(Order.user).\\
+ options(contains_eager(Order.user))
+
+ The above query would join from the ``Order`` entity to its related
+ ``User`` entity, and the returned ``Order`` objects would have the
+ ``Order.user`` attribute pre-populated.
+
+ :func:`contains_eager` also accepts an `alias` argument, which is the
+ string name of an alias, an :func:`~sqlalchemy.sql.expression.alias`
+ construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when
+ the eagerly-loaded rows are to come from an aliased table::
+
+ user_alias = aliased(User)
+ sess.query(Order).\\
+ join((user_alias, Order.user)).\\
+ options(contains_eager(Order.user, alias=user_alias))
+
+ .. seealso::
+
+ :ref:`contains_eager`
+
+ """
+ if alias is not None:
+ if not isinstance(alias, str):
+ info = inspect(alias)
+ alias = info.selectable
+
+ cloned = loadopt.set_relationship_strategy(
+ attr,
+ {"lazy": "joined"},
+ propagate_to_loaders=False
+ )
+ cloned.local_opts['eager_from_alias'] = alias
+ return cloned
+
+@contains_eager._add_unbound_fn
+def contains_eager(*keys, **kw):
+ return _UnboundLoad()._from_keys(_UnboundLoad.contains_eager, keys, True, kw)
+
+@loader_option()
+def load_only(loadopt, *attrs):
+ """Indicate that for a particular entity, only the given list
+ of column-based attribute names should be loaded; all others will be
+ deferred.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ Example - given a class ``User``, load only the ``name`` and ``fullname``
+ attributes::
+
+ session.query(User).options(load_only("name", "fullname"))
+
+ Example - given a relationship ``User.addresses -> Address``, specify
+ subquery loading for the ``User.addresses`` collection, but on each ``Address``
+ object load only the ``email_address`` attribute::
+
+ session.query(User).options(
+ subqueryload("addreses").load_only("email_address")
+ )
+
+ For a :class:`.Query` that has multiple entities, the lead entity can be
+ specifically referred to using the :class:`.Load` constructor::
+
+ session.query(User, Address).join(User.addresses).options(
+ Load(User).load_only("name", "fullname"),
+ Load(Address).load_only("email_addres")
+ )
+
+
+ .. versionadded:: 0.9.0
+
+ """
+ cloned = loadopt.set_column_strategy(
+ attrs,
+ {"deferred": False, "instrument": True}
+ )
+ cloned.set_column_strategy("*",
+ {"deferred": True, "instrument": True})
+ return cloned
+
+@load_only._add_unbound_fn
+def load_only(*attrs):
+ return _UnboundLoad().load_only(*attrs)
+
+@loader_option()
+def joinedload(loadopt, attr, innerjoin=None):
+ """Indicate that the given attribute should be loaded using joined
+ eager loading.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ examples::
+
+ # joined-load the "orders" collection on "User"
+ query(User).options(joinedload(User.orders))
+
+ # joined-load Order.items and then Item.keywords
+ query(Order).options(joinedload(Order.items).joinedload(Item.keywords))
+
+ # lazily load Order.items, but when Items are loaded,
+ # joined-load the keywords collection
+ query(Order).options(lazyload(Order.items).joinedload(Item.keywords))
+
+ :func:`.orm.joinedload` also accepts a keyword argument `innerjoin=True` which
+ indicates using an inner join instead of an outer::
+
+ query(Order).options(joinedload(Order.user, innerjoin=True))
+
+ .. note::
+
+ The joins produced by :func:`.orm.joinedload` are **anonymously aliased**.
+ The criteria by which the join proceeds cannot be modified, nor can the
+ :class:`.Query` refer to these joins in any way, including ordering.
+
+ To produce a specific SQL JOIN which is explicitly available, use
+ :meth:`.Query.join`. To combine explicit JOINs with eager loading
+ of collections, use :func:`.orm.contains_eager`; see :ref:`contains_eager`.
+
+ .. seealso::
+
+ :ref:`loading_toplevel`
+
+ :ref:`contains_eager`
+
+ :func:`.orm.subqueryload`
+
+ :func:`.orm.lazyload`
+
+ """
+ loader = loadopt.set_relationship_strategy(attr, {"lazy": "joined"})
+ if innerjoin is not None:
+ loader.local_opts['innerjoin'] = innerjoin
+ return loader
+
+@joinedload._add_unbound_fn
+def joinedload(*keys, **kw):
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.joinedload, keys, False, kw)
+
+@joinedload._add_unbound_all_fn
+def joinedload_all(*keys, **kw):
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.joinedload, keys, True, kw)
+
+
+@loader_option()
+def subqueryload(loadopt, attr):
+ """Indicate that the given attribute should be loaded using
+ subquery eager loading.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ examples::
+
+ # subquery-load the "orders" collection on "User"
+ query(User).options(subqueryload(User.orders))
+
+ # subquery-load Order.items and then Item.keywords
+ query(Order).options(subqueryload(Order.items).subqueryload(Item.keywords))
+
+ # lazily load Order.items, but when Items are loaded,
+ # subquery-load the keywords collection
+ query(Order).options(lazyload(Order.items).subqueryload(Item.keywords))
+
+
+ .. seealso::
+
+ :ref:`loading_toplevel`
+
+ :func:`.orm.joinedload`
+
+ :func:`.orm.lazyload`
+
+ """
+ return loadopt.set_relationship_strategy(attr, {"lazy": "subquery"})
+
+@subqueryload._add_unbound_fn
+def subqueryload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, False, {})
+
+@subqueryload._add_unbound_all_fn
+def subqueryload_all(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, True, {})
+
+@loader_option()
+def lazyload(loadopt, attr):
+ """Indicate that the given attribute should be loaded using "lazy"
+ loading.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ """
+ return loadopt.set_relationship_strategy(attr, {"lazy": "select"})
+
+@lazyload._add_unbound_fn
+def lazyload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, False, {})
+
+@lazyload._add_unbound_all_fn
+def lazyload_all(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, True, {})
+
+@loader_option()
+def immediateload(loadopt, attr):
+ """Indicate that the given attribute should be loaded using
+ an immediate load with a per-attribute SELECT statement.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ .. seealso::
+
+ :ref:`loading_toplevel`
+
+ :func:`.orm.joinedload`
+
+ :func:`.orm.lazyload`
+
+ """
+ loader = loadopt.set_relationship_strategy(attr, {"lazy": "immediate"})
+ return loader
+
+@immediateload._add_unbound_fn
+def immediateload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.immediateload, keys, False, {})
+
+
+@loader_option()
+def noload(loadopt, attr):
+ """Indicate that the given relationship attribute should remain unloaded.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ :func:`.orm.noload` applies to :func:`.relationship` attributes; for
+ column-based attributes, see :func:`.orm.defer`.
+
+ """
+
+ return loadopt.set_relationship_strategy(attr, {"lazy": "noload"})
+
+@noload._add_unbound_fn
+def noload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.noload, keys, False, {})
+
+@loader_option()
+def defaultload(loadopt, attr):
+ """Indicate an attribute should load using its default loader style.
+
+ This method is used to link to other loader options, such as
+ to set the :func:`.orm.defer` option on a class that is linked to
+ a relationship of the parent class being loaded, :func:`.orm.defaultload`
+ can be used to navigate this path without changing the loading style
+ of the relationship::
+
+ session.query(MyClass).options(defaultload("someattr").defer("some_column"))
+
+ .. seealso::
+
+ :func:`.orm.defer`
+
+ :func:`.orm.undefer`
+
+ """
+ return loadopt.set_relationship_strategy(
+ attr,
+ None
+ )
+
+@defaultload._add_unbound_fn
+def defaultload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.defaultload, keys, False, {})
+
+@loader_option()
+def defer(loadopt, key):
+ """Indicate that the given column-oriented attribute should be deferred, e.g.
+ not loaded until accessed.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ e.g.::
+
+ from sqlalchemy.orm import defer
+
+ session.query(MyClass).options(
+ defer("attribute_one"),
+ defer("attribute_two"))
+
+ session.query(MyClass).options(
+ defer(MyClass.attribute_one),
+ defer(MyClass.attribute_two))
+
+ To specify a deferred load of an attribute on a related class,
+ the path can be specified one token at a time, specifying the loading
+ style for each link along the chain. To leave the loading style
+ for a link unchanged, use :func:`.orm.defaultload`::
+
+ session.query(MyClass).options(defaultload("someattr").defer("some_column"))
+
+ A :class:`.Load` object that is present on a certain path can have
+ :meth:`.Load.defer` called multiple times, each will operate on the same
+ parent entity::
+
+
+ session.query(MyClass).options(
+ defaultload("someattr").
+ defer("some_column").
+ defer("some_other_column").
+ defer("another_column")
+ )
+
+ :param key: Attribute to be deferred.
+
+ :param \*addl_attrs: Deprecated; this option supports the old 0.8 style
+ of specifying a path as a series of attributes, which is now superseded
+ by the method-chained style.
+
+ .. seealso::
+
+ :ref:`deferred`
+
+ :func:`.orm.undefer`
+
+ """
+ return loadopt.set_column_strategy(
+ (key, ),
+ {"deferred": True, "instrument": True}
+ )
+
+
+@defer._add_unbound_fn
+def defer(key, *addl_attrs):
+ return _UnboundLoad._from_keys(_UnboundLoad.defer, (key, ) + addl_attrs, False, {})
+
+@loader_option()
+def undefer(loadopt, key):
+ """Indicate that the given column-oriented attribute should be undeferred, e.g.
+ specified within the SELECT statement of the entity as a whole.
+
+ The column being undeferred is typically set up on the mapping as a
+ :func:`.deferred` attribute.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ Examples::
+
+ # undefer two columns
+ session.query(MyClass).options(undefer("col1"), undefer("col2"))
+
+ # undefer all columns specific to a single class using Load + *
+ session.query(MyClass, MyOtherClass).options(Load(MyClass).undefer("*"))
+
+ :param key: Attribute to be undeferred.
+
+ :param \*addl_attrs: Deprecated; this option supports the old 0.8 style
+ of specifying a path as a series of attributes, which is now superseded
+ by the method-chained style.
+
+ .. seealso::
+
+ :ref:`deferred`
+
+ :func:`.orm.defer`
+
+ :func:`.orm.undefer_group`
+
+ """
+ return loadopt.set_column_strategy(
+ (key, ),
+ {"deferred": False, "instrument": True}
+ )
+
+@undefer._add_unbound_fn
+def undefer(key, *addl_attrs):
+ return _UnboundLoad._from_keys(_UnboundLoad.undefer, (key, ) + addl_attrs, False, {})
+
+@loader_option()
+def undefer_group(loadopt, name):
+ """Indicate that columns within the given deferred group name should be undeferred.
+
+ The columns being undeferred are set up on the mapping as
+ :func:`.deferred` attributes and include a "group" name.
+
+ E.g::
+
+ session.query(MyClass).options(undefer_group("large_attrs"))
+
+ To undefer a group of attributes on a related entity, the path can be
+ spelled out using relationship loader options, such as :func:`.orm.defaultload`::
+
+ session.query(MyClass).options(defaultload("someattr").undefer_group("large_attrs"))
+
+ .. versionchanged:: 0.9.0 :func:`.orm.undefer_group` is now specific to a
+ particiular entity load path.
+
+ .. seealso::
+
+ :ref:`deferred`
+
+ :func:`.orm.defer`
+
+ :func:`.orm.undefer`
+
+ """
+ return loadopt.set_column_strategy(
+ "*",
+ None,
+ {"undefer_group": name}
+ )
+
+@undefer_group._add_unbound_fn
+def undefer_group(name):
+ return _UnboundLoad().undefer_group(name)
+
diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py
index 6524ab27a..cf735fc53 100644
--- a/lib/sqlalchemy/orm/sync.py
+++ b/lib/sqlalchemy/orm/sync.py
@@ -1,5 +1,5 @@
# orm/sync.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index 1e8d3e4dc..8c0c0d40e 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -1,5 +1,5 @@
# orm/unitofwork.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,8 +16,6 @@ from .. import util, event, exc
from ..util import topological
from . import attributes, persistence, util as orm_util, exc as orm_exc
-sessionlib = util.importlater("sqlalchemy.orm", "session")
-
def track_cascade_events(descriptor, prop):
"""Establish event listeners on object attributes which handle
@@ -33,7 +31,7 @@ def track_cascade_events(descriptor, prop):
if item is None:
return
- sess = sessionlib._state_session(state)
+ sess = state.session
if sess:
if sess._warn_on_events:
sess._flush_warning("collection append")
@@ -50,7 +48,7 @@ def track_cascade_events(descriptor, prop):
if item is None:
return
- sess = sessionlib._state_session(state)
+ sess = state.session
if sess:
prop = state.manager.mapper._props[key]
@@ -74,7 +72,7 @@ def track_cascade_events(descriptor, prop):
if oldvalue is newvalue:
return newvalue
- sess = sessionlib._state_session(state)
+ sess = state.session
if sess:
if sess._warn_on_events:
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index ae1ca2013..dd85f2ef1 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -1,5 +1,5 @@
# orm/util.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,21 +7,20 @@
from .. import sql, util, event, exc as sa_exc, inspection
from ..sql import expression, util as sql_util, operators
-from .interfaces import PropComparator, MapperProperty, _InspectionAttr
-from itertools import chain
-from . import attributes, exc
+from .interfaces import PropComparator, MapperProperty
+from . import attributes
import re
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
+from .base import instance_str, state_str, state_class_str, attribute_str, \
+ state_attribute_str, object_mapper, object_state, _none_set
+from .base import class_mapper, _class_to_mapper
+from .base import _InspectionAttr
+from .path_registry import PathRegistry
all_cascades = frozenset(("delete", "delete-orphan", "all", "merge",
"expunge", "save-update", "refresh-expire",
"none"))
-_INSTRUMENTOR = ('mapper', 'instrumentor')
-
-_none_set = frozenset([None])
-
class CascadeOptions(frozenset):
"""Keeps track of the options sent to relationship().cascade"""
@@ -71,24 +70,43 @@ class CascadeOptions(frozenset):
)
-def _validator_events(desc, key, validator, include_removes):
+def _validator_events(desc, key, validator, include_removes, include_backrefs):
"""Runs a validation method on an attribute value to be set or appended."""
+ if not include_backrefs:
+ def detect_is_backref(state, initiator):
+ impl = state.manager[key].impl
+ return initiator.impl is not impl
+
if include_removes:
def append(state, value, initiator):
- return validator(state.obj(), key, value, False)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value, False)
+ else:
+ return value
def set_(state, value, oldvalue, initiator):
- return validator(state.obj(), key, value, False)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value, False)
+ else:
+ return value
def remove(state, value, initiator):
- validator(state.obj(), key, value, True)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ validator(state.obj(), key, value, True)
+
else:
def append(state, value, initiator):
- return validator(state.obj(), key, value)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value)
+ else:
+ return value
def set_(state, value, oldvalue, initiator):
- return validator(state.obj(), key, value)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value)
+ else:
+ return value
event.listen(desc, 'append', append, raw=True, retval=True)
event.listen(desc, 'set', set_, raw=True, retval=True)
@@ -160,31 +178,59 @@ def polymorphic_union(table_map, typecolname,
def identity_key(*args, **kwargs):
- """Get an identity key.
+ """Generate "identity key" tuples, as are used as keys in the
+ :attr:`.Session.identity_map` dictionary.
- Valid call signatures:
+ This function has several call styles:
* ``identity_key(class, ident)``
- class
- mapped class (must be a positional argument)
+ This form receives a mapped class and a primary key scalar or
+ tuple as an argument.
+
+ E.g.::
+
+ >>> identity_key(MyClass, (1, 2))
+ (<class '__main__.MyClass'>, (1, 2))
- ident
- primary key, if the key is composite this is a tuple
+ :param class: mapped class (must be a positional argument)
+ :param ident: primary key, may be a scalar or tuple argument.
* ``identity_key(instance=instance)``
- instance
- object instance (must be given as a keyword arg)
+ This form will produce the identity key for a given instance. The
+ instance need not be persistent, only that its primary key attributes
+ are populated (else the key will contain ``None`` for those missing
+ values).
+
+ E.g.::
+
+ >>> instance = MyClass(1, 2)
+ >>> identity_key(instance=instance)
+ (<class '__main__.MyClass'>, (1, 2))
+
+ In this form, the given instance is ultimately run though
+ :meth:`.Mapper.identity_key_from_instance`, which will have the
+ effect of performing a database check for the corresponding row
+ if the object is expired.
+
+ :param instance: object instance (must be given as a keyword arg)
* ``identity_key(class, row=row)``
- class
- mapped class (must be a positional argument)
+ This form is similar to the class/tuple form, except is passed a
+ database result row as a :class:`.RowProxy` object.
+
+ E.g.::
- row
- result proxy row (must be given as a keyword arg)
+ >>> row = engine.execute("select * from table where a=1 and b=2").first()
+ >>> identity_key(MyClass, row=row)
+ (<class '__main__.MyClass'>, (1, 2))
+
+ :param class: mapped class (must be a positional argument)
+ :param row: :class:`.RowProxy` row returned by a :class:`.ResultProxy`
+ (must be given as a keyword arg)
"""
if args:
@@ -245,212 +291,6 @@ class ORMAdapter(sql_util.ColumnAdapter):
else:
return None
-def _unreduce_path(path):
- return PathRegistry.deserialize(path)
-
-class PathRegistry(object):
- """Represent query load paths and registry functions.
-
- Basically represents structures like:
-
- (<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
-
- These structures are generated by things like
- query options (joinedload(), subqueryload(), etc.) and are
- used to compose keys stored in the query._attributes dictionary
- for various options.
-
- They are then re-composed at query compile/result row time as
- the query is formed and as rows are fetched, where they again
- serve to compose keys to look up options in the context.attributes
- dictionary, which is copied from query._attributes.
-
- The path structure has a limited amount of caching, where each
- "root" ultimately pulls from a fixed registry associated with
- the first mapper, that also contains elements for each of its
- property keys. However paths longer than two elements, which
- are the exception rather than the rule, are generated on an
- as-needed basis.
-
- """
-
- def __eq__(self, other):
- return other is not None and \
- self.path == other.path
-
- def set(self, attributes, key, value):
- attributes[(key, self.path)] = value
-
- def setdefault(self, attributes, key, value):
- attributes.setdefault((key, self.path), value)
-
- def get(self, attributes, key, value=None):
- key = (key, self.path)
- if key in attributes:
- return attributes[key]
- else:
- return value
-
- def __len__(self):
- return len(self.path)
-
- @property
- def length(self):
- return len(self.path)
-
- def pairs(self):
- path = self.path
- for i in range(0, len(path), 2):
- yield path[i], path[i + 1]
-
- def contains_mapper(self, mapper):
- for path_mapper in [
- self.path[i] for i in range(0, len(self.path), 2)
- ]:
- if isinstance(path_mapper, mapperlib.Mapper) and \
- path_mapper.isa(mapper):
- return True
- else:
- return False
-
- def contains(self, attributes, key):
- return (key, self.path) in attributes
-
- def __reduce__(self):
- return _unreduce_path, (self.serialize(), )
-
- def serialize(self):
- path = self.path
- return list(zip(
- [m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
- [path[i].key for i in range(1, len(path), 2)] + [None]
- ))
-
- @classmethod
- def deserialize(cls, path):
- if path is None:
- return None
-
- p = tuple(chain(*[(class_mapper(mcls),
- class_mapper(mcls).attrs[key]
- if key is not None else None)
- for mcls, key in path]))
- if p and p[-1] is None:
- p = p[0:-1]
- return cls.coerce(p)
-
- @classmethod
- def per_mapper(cls, mapper):
- return EntityRegistry(
- cls.root, mapper
- )
-
- @classmethod
- def coerce(cls, raw):
- return util.reduce(lambda prev, next: prev[next], raw, cls.root)
-
- @classmethod
- def token(cls, token):
- return TokenRegistry(cls.root, token)
-
- def __add__(self, other):
- return util.reduce(
- lambda prev, next: prev[next],
- other.path, self)
-
- def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self.path, )
-
-
-class RootRegistry(PathRegistry):
- """Root registry, defers to mappers so that
- paths are maintained per-root-mapper.
-
- """
- path = ()
-
- def __getitem__(self, entity):
- return entity._path_registry
-PathRegistry.root = RootRegistry()
-
-class TokenRegistry(PathRegistry):
- def __init__(self, parent, token):
- self.token = token
- self.parent = parent
- self.path = parent.path + (token,)
-
- def __getitem__(self, entity):
- raise NotImplementedError()
-
-class PropRegistry(PathRegistry):
- def __init__(self, parent, prop):
- # restate this path in terms of the
- # given MapperProperty's parent.
- insp = inspection.inspect(parent[-1])
- if not insp.is_aliased_class or insp._use_mapper_path:
- parent = parent.parent[prop.parent]
- elif insp.is_aliased_class and insp.with_polymorphic_mappers:
- if prop.parent is not insp.mapper and \
- prop.parent in insp.with_polymorphic_mappers:
- subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
- parent = parent.parent[subclass_entity]
-
- self.prop = prop
- self.parent = parent
- self.path = parent.path + (prop,)
-
- def __getitem__(self, entity):
- if isinstance(entity, (int, slice)):
- return self.path[entity]
- else:
- return EntityRegistry(
- self, entity
- )
-
-
-class EntityRegistry(PathRegistry, dict):
- is_aliased_class = False
-
- def __init__(self, parent, entity):
- self.key = entity
- self.parent = parent
- self.is_aliased_class = entity.is_aliased_class
-
- self.path = parent.path + (entity,)
-
- def __bool__(self):
- return True
- __nonzero__ = __bool__
-
- def __getitem__(self, entity):
- if isinstance(entity, (int, slice)):
- return self.path[entity]
- else:
- return dict.__getitem__(self, entity)
-
- def _inlined_get_for(self, prop, context, key):
- """an inlined version of:
-
- cls = path[mapperproperty].get(context, key)
-
- Skips the isinstance() check in __getitem__
- and the extra method call for get().
- Used by StrategizedProperty for its
- very frequent lookup.
-
- """
- path = dict.__getitem__(self, prop)
- path_key = (key, path.path)
- if path_key in context.attributes:
- return context.attributes[path_key]
- else:
- return None
-
- def __missing__(self, key):
- self[key] = item = PropRegistry(self, key)
- return item
-
-
class AliasedClass(object):
"""Represents an "aliased" form of a mapped class for usage with Query.
@@ -538,8 +378,10 @@ class AliasedClass(object):
else:
raise AttributeError(key)
- if isinstance(attr, attributes.QueryableAttribute):
- return _aliased_insp._adapt_prop(attr, key)
+ if isinstance(attr, PropComparator):
+ ret = attr.adapt_to_entity(_aliased_insp)
+ setattr(self, key, ret)
+ return ret
elif hasattr(attr, 'func_code'):
is_method = getattr(_aliased_insp._target, key, None)
if is_method and is_method.__self__ is not None:
@@ -550,7 +392,8 @@ class AliasedClass(object):
ret = attr.__get__(None, self)
if isinstance(ret, PropComparator):
return ret.adapt_to_entity(_aliased_insp)
- return ret
+ else:
+ return ret
else:
return attr
@@ -672,17 +515,6 @@ class AliasedInsp(_InspectionAttr):
'parentmapper': self.mapper}
)
- def _adapt_prop(self, existing, key):
- comparator = existing.comparator.adapt_to_entity(self)
- queryattr = attributes.QueryableAttribute(
- self.entity, key,
- impl=existing.impl,
- parententity=self,
- comparator=comparator)
- setattr(self.entity, key, queryattr)
- return queryattr
-
-
def _entity_for_mapper(self, mapper):
self_poly = self.with_polymorphic_mappers
if mapper in self_poly:
@@ -1053,186 +885,6 @@ def with_parent(instance, prop):
value_is_parent=True)
-def _attr_as_key(attr):
- if hasattr(attr, 'key'):
- return attr.key
- else:
- return expression._column_as_key(attr)
-
-
-_state_mapper = util.dottedgetter('manager.mapper')
-
-
-@inspection._inspects(object)
-def _inspect_mapped_object(instance):
- try:
- return attributes.instance_state(instance)
- # TODO: whats the py-2/3 syntax to catch two
- # different kinds of exceptions at once ?
- except exc.UnmappedClassError:
- return None
- except exc.NO_STATE:
- return None
-
-
-@inspection._inspects(type)
-def _inspect_mapped_class(class_, configure=False):
- try:
- class_manager = attributes.manager_of_class(class_)
- if not class_manager.is_mapped:
- return None
- mapper = class_manager.mapper
- if configure and mapperlib.module._new_mappers:
- mapperlib.configure_mappers()
- return mapper
-
- except exc.NO_STATE:
- return None
-
-
-def object_mapper(instance):
- """Given an object, return the primary Mapper associated with the object
- instance.
-
- Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
- if no mapping is configured.
-
- This function is available via the inspection system as::
-
- inspect(instance).mapper
-
- Using the inspection system will raise
- :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
- not part of a mapping.
-
- """
- return object_state(instance).mapper
-
-
-def object_state(instance):
- """Given an object, return the :class:`.InstanceState`
- associated with the object.
-
- Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
- if no mapping is configured.
-
- Equivalent functionality is available via the :func:`.inspect`
- function as::
-
- inspect(instance)
-
- Using the inspection system will raise
- :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
- not part of a mapping.
-
- """
- state = _inspect_mapped_object(instance)
- if state is None:
- raise exc.UnmappedInstanceError(instance)
- else:
- return state
-
-
-def class_mapper(class_, configure=True):
- """Given a class, return the primary :class:`.Mapper` associated
- with the key.
-
- Raises :class:`.UnmappedClassError` if no mapping is configured
- on the given class, or :class:`.ArgumentError` if a non-class
- object is passed.
-
- Equivalent functionality is available via the :func:`.inspect`
- function as::
-
- inspect(some_mapped_class)
-
- Using the inspection system will raise
- :class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
-
- """
- mapper = _inspect_mapped_class(class_, configure=configure)
- if mapper is None:
- if not isinstance(class_, type):
- raise sa_exc.ArgumentError(
- "Class object expected, got '%r'." % class_)
- raise exc.UnmappedClassError(class_)
- else:
- return mapper
-
-
-def _class_to_mapper(class_or_mapper):
- insp = inspection.inspect(class_or_mapper, False)
- if insp is not None:
- return insp.mapper
- else:
- raise exc.UnmappedClassError(class_or_mapper)
-
-
-def _mapper_or_none(entity):
- """Return the :class:`.Mapper` for the given class or None if the
- class is not mapped."""
-
- insp = inspection.inspect(entity, False)
- if insp is not None:
- return insp.mapper
- else:
- return None
-
-
-def _is_mapped_class(entity):
- """Return True if the given object is a mapped class,
- :class:`.Mapper`, or :class:`.AliasedClass`."""
-
- insp = inspection.inspect(entity, False)
- return insp is not None and \
- hasattr(insp, "mapper") and \
- (
- insp.is_mapper
- or insp.is_aliased_class
- )
-
-
-def _is_aliased_class(entity):
- insp = inspection.inspect(entity, False)
- return insp is not None and \
- getattr(insp, "is_aliased_class", False)
-
-
-def _entity_descriptor(entity, key):
- """Return a class attribute given an entity and string name.
-
- May return :class:`.InstrumentedAttribute` or user-defined
- attribute.
-
- """
- insp = inspection.inspect(entity)
- if insp.is_selectable:
- description = entity
- entity = insp.c
- elif insp.is_aliased_class:
- entity = insp.entity
- description = entity
- elif hasattr(insp, "mapper"):
- description = entity = insp.mapper.class_
- else:
- description = entity
-
- try:
- return getattr(entity, key)
- except AttributeError:
- raise sa_exc.InvalidRequestError(
- "Entity '%s' has no property '%s'" %
- (description, key)
- )
-
-
-def _orm_columns(entity):
- insp = inspection.inspect(entity, False)
- if hasattr(insp, 'selectable'):
- return [c for c in insp.selectable.c]
- else:
- return [entity]
-
def has_identity(object):
"""Return True if the given object has a database
@@ -1260,37 +912,8 @@ def was_deleted(object):
state = attributes.instance_state(object)
return state.deleted
-def instance_str(instance):
- """Return a string describing an instance."""
-
- return state_str(attributes.instance_state(instance))
-
-
-def state_str(state):
- """Return a string describing an instance via its InstanceState."""
-
- if state is None:
- return "None"
- else:
- return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
-
-
-def state_class_str(state):
- """Return a string describing an instance's class via its InstanceState."""
-
- if state is None:
- return "None"
- else:
- return '<%s>' % (state.class_.__name__, )
-def attribute_str(instance, attribute):
- return instance_str(instance) + "." + attribute
-
-
-def state_attribute_str(state, attribute):
- return state_str(state) + "." + attribute
-
def randomize_unitofwork():
"""Use random-ordering sets within the unit of work in order
@@ -1327,3 +950,4 @@ def randomize_unitofwork():
from sqlalchemy.testing.util import RandomSet
topological.set = unitofwork.set = session.set = mapper.set = \
dependency.set = RandomSet
+
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index 498b001c1..f84f331d5 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -1,5 +1,5 @@
# sqlalchemy/pool.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -20,7 +20,7 @@ import time
import traceback
import weakref
-from . import exc, log, event, events, interfaces, util
+from . import exc, log, event, interfaces, util
from .util import queue as sqla_queue
from .util import threading, memoized_property, \
chop_traceback
@@ -130,10 +130,10 @@ class Pool(log.Identified):
:meth:`unique_connection` method is provided to bypass the
threadlocal behavior installed into :meth:`connect`.
- :param reset_on_return: If true, reset the database state of
- connections returned to the pool. This is typically a
- ROLLBACK to release locks and transaction resources.
- Disable at your own peril. Defaults to True.
+ :param reset_on_return: Configures the action to take
+ on connections as they are returned to the pool.
+ See the argument description in :class:`.QueuePool` for
+ more detail.
:param events: a list of 2-tuples, each of the form
``(callable, target)`` which will be passed to event.listen()
@@ -185,8 +185,6 @@ class Pool(log.Identified):
for l in listeners:
self.add_listener(l)
- dispatch = event.dispatcher(events.PoolEvents)
-
def _close_connection(self, connection):
self.logger.debug("Closing connection %r", connection)
try:
@@ -218,7 +216,7 @@ class Pool(log.Identified):
"""
- return _ConnectionFairy.checkout(self)
+ return _ConnectionFairy._checkout(self)
def _create_connection(self):
"""Called by subclasses to create a new ConnectionRecord."""
@@ -270,16 +268,17 @@ class Pool(log.Identified):
"""
if not self._use_threadlocal:
- return _ConnectionFairy.checkout(self)
+ return _ConnectionFairy._checkout(self)
try:
rec = self._threadconns.current()
except AttributeError:
pass
else:
- return rec.checkout_existing()
+ if rec is not None:
+ return rec._checkout_existing()
- return _ConnectionFairy.checkout(self, self._threadconns)
+ return _ConnectionFairy._checkout(self, self._threadconns)
def _return_conn(self, record):
"""Given a _ConnectionRecord, return it to the :class:`.Pool`.
@@ -310,6 +309,34 @@ class Pool(log.Identified):
class _ConnectionRecord(object):
+ """Internal object which maintains an individual DBAPI connection
+ referenced by a :class:`.Pool`.
+
+ The :class:`._ConnectionRecord` object always exists for any particular
+ DBAPI connection whether or not that DBAPI connection has been
+ "checked out". This is in contrast to the :class:`._ConnectionFairy`
+ which is only a public facade to the DBAPI connection while it is checked
+ out.
+
+ A :class:`._ConnectionRecord` may exist for a span longer than that
+ of a single DBAPI connection. For example, if the
+ :meth:`._ConnectionRecord.invalidate`
+ method is called, the DBAPI connection associated with this
+ :class:`._ConnectionRecord`
+ will be discarded, but the :class:`._ConnectionRecord` may be used again,
+ in which case a new DBAPI connection is produced when the :class:`.Pool`
+ next uses this record.
+
+ The :class:`._ConnectionRecord` is delivered along with connection
+ pool events, including :meth:`.PoolEvents.connect` and
+ :meth:`.PoolEvents.checkout`, however :class:`._ConnectionRecord` still
+ remains an internal object whose API and internals may change.
+
+ .. seealso::
+
+ :class:`._ConnectionFairy`
+
+ """
def __init__(self, pool):
self.__pool = pool
@@ -321,8 +348,23 @@ class _ConnectionRecord(object):
exec_once(self.connection, self)
pool.dispatch.connect(self.connection, self)
+ connection = None
+ """A reference to the actual DBAPI connection being tracked.
+
+ May be ``None`` if this :class:`._ConnectionRecord` has been marked
+ as invalidated; a new DBAPI connection may replace it if the owning
+ pool calls upon this :class:`._ConnectionRecord` to reconnect.
+
+ """
+
@util.memoized_property
def info(self):
+ """The ``.info`` dictionary associated with the DBAPI connection.
+
+ This dictionary is shared among the :attr:`._ConnectionFairy.info`
+ and :attr:`.Connection.info` accessors.
+
+ """
return {}
@classmethod
@@ -361,9 +403,22 @@ class _ConnectionRecord(object):
def close(self):
if self.connection is not None:
- self.__pool._close_connection(self.connection)
+ self.__close()
def invalidate(self, e=None):
+ """Invalidate the DBAPI connection held by this :class:`._ConnectionRecord`.
+
+ This method is called for all connection invalidations, including
+ when the :meth:`._ConnectionFairy.invalidate` or :meth:`.Connection.invalidate`
+ methods are called, as well as when any so-called "automatic invalidation"
+ condition occurs.
+
+ .. seealso::
+
+ :ref:`pool_connection_invalidation`
+
+ """
+ self.__pool.dispatch.invalidate(self.connection, self, e)
if e is not None:
self.__pool.logger.info(
"Invalidate connection %r (reason: %s:%s)",
@@ -424,18 +479,8 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo, fairy=None):
try:
fairy = fairy or _ConnectionFairy(connection, connection_record)
- if pool.dispatch.reset:
- pool.dispatch.reset(fairy, connection_record)
- if pool._reset_on_return is reset_rollback:
- if echo:
- pool.logger.debug("Connection %s rollback-on-return",
- connection)
- pool._dialect.do_rollback(fairy)
- elif pool._reset_on_return is reset_commit:
- if echo:
- pool.logger.debug("Connection %s commit-on-return",
- connection)
- pool._dialect.do_commit(fairy)
+ assert fairy.connection is connection
+ fairy._reset(pool, echo)
# Immediately close detached instances
if not connection_record:
@@ -454,15 +499,58 @@ _refs = set()
class _ConnectionFairy(object):
- """Proxies a DB-API connection and provides return-on-dereference
- support."""
+ """Proxies a DBAPI connection and provides return-on-dereference
+ support.
+
+ This is an internal object used by the :class:`.Pool` implementation
+ to provide context management to a DBAPI connection delivered by
+ that :class:`.Pool`.
+
+ The name "fairy" is inspired by the fact that the :class:`._ConnectionFairy`
+ object's lifespan is transitory, as it lasts only for the length of a
+ specific DBAPI connection being checked out from the pool, and additionally
+ that as a transparent proxy, it is mostly invisible.
+
+ .. seealso::
+
+ :class:`._ConnectionRecord`
+
+ """
def __init__(self, dbapi_connection, connection_record):
self.connection = dbapi_connection
self._connection_record = connection_record
+ connection = None
+ """A reference to the actual DBAPI connection being tracked."""
+
+ _connection_record = None
+ """A reference to the :class:`._ConnectionRecord` object associated
+ with the DBAPI connection.
+
+ This is currently an internal accessor which is subject to change.
+
+ """
+
+ _reset_agent = None
+ """Refer to an object with a ``.commit()`` and ``.rollback()`` method;
+ if non-None, the "reset-on-return" feature will call upon this object
+ rather than directly against the dialect-level do_rollback() and do_commit()
+ methods.
+
+ In practice, a :class:`.Connection` assigns a :class:`.Transaction` object
+ to this variable when one is in scope so that the :class:`.Transaction`
+ takes the job of committing or rolling back on return if
+ :meth:`.Connection.close` is called while the :class:`.Transaction`
+ still exists.
+
+ This is essentially an "event handler" of sorts but is simplified as an
+ instance variable both for performance/simplicity as well as that there
+ can only be one "reset agent" at a time.
+ """
+
@classmethod
- def checkout(cls, pool, threadconns=None, fairy=None):
+ def _checkout(cls, pool, threadconns=None, fairy=None):
if not fairy:
fairy = _ConnectionRecord.checkout(pool)
@@ -499,16 +587,40 @@ class _ConnectionFairy(object):
fairy.invalidate()
raise exc.InvalidRequestError("This connection is closed")
- def checkout_existing(self):
- return _ConnectionFairy.checkout(self._pool, fairy=self)
+ def _checkout_existing(self):
+ return _ConnectionFairy._checkout(self._pool, fairy=self)
- def checkin(self):
+ def _checkin(self):
_finalize_fairy(self.connection, self._connection_record,
self._pool, None, self._echo, fairy=self)
self.connection = None
self._connection_record = None
- _close = checkin
+ _close = _checkin
+
+ def _reset(self, pool, echo):
+ if pool.dispatch.reset:
+ pool.dispatch.reset(self, self._connection_record)
+ if pool._reset_on_return is reset_rollback:
+ if echo:
+ pool.logger.debug("Connection %s rollback-on-return%s",
+ self.connection,
+ ", via agent"
+ if self._reset_agent else "")
+ if self._reset_agent:
+ self._reset_agent.rollback()
+ else:
+ pool._dialect.do_rollback(self)
+ elif pool._reset_on_return is reset_commit:
+ if echo:
+ pool.logger.debug("Connection %s commit-on-return%s",
+ self.connection,
+ ", via agent"
+ if self._reset_agent else "")
+ if self._reset_agent:
+ self._reset_agent.commit()
+ else:
+ pool._dialect.do_commit(self)
@property
def _logger(self):
@@ -516,6 +628,9 @@ class _ConnectionFairy(object):
@property
def is_valid(self):
+ """Return True if this :class:`._ConnectionFairy` still refers
+ to an active DBAPI connection."""
+
return self.connection is not None
@util.memoized_property
@@ -526,7 +641,9 @@ class _ConnectionFairy(object):
The data here will follow along with the DBAPI connection including
after it is returned to the connection pool and used again
- in subsequent instances of :class:`.ConnectionFairy`.
+ in subsequent instances of :class:`._ConnectionFairy`. It is shared
+ with the :attr:`._ConnectionRecord.info` and :attr:`.Connection.info`
+ accessors.
"""
return self._connection_record.info
@@ -534,8 +651,16 @@ class _ConnectionFairy(object):
def invalidate(self, e=None):
"""Mark this connection as invalidated.
- The connection will be immediately closed. The containing
- ConnectionRecord will create a new connection when next used.
+ This method can be called directly, and is also called as a result
+ of the :meth:`.Connection.invalidate` method. When invoked,
+ the DBAPI connection is immediately closed and discarded from
+ further use by the pool. The invalidation mechanism proceeds
+ via the :meth:`._ConnectionRecord.invalidate` internal method.
+
+ .. seealso::
+
+ :ref:`pool_connection_invalidation`
+
"""
if self.connection is None:
@@ -543,9 +668,15 @@ class _ConnectionFairy(object):
if self._connection_record:
self._connection_record.invalidate(e=e)
self.connection = None
- self.checkin()
+ self._checkin()
def cursor(self, *args, **kwargs):
+ """Return a new DBAPI cursor for the underlying connection.
+
+ This method is a proxy for the ``connection.cursor()`` DBAPI
+ method.
+
+ """
return self.connection.cursor(*args, **kwargs)
def __getattr__(self, key):
@@ -577,7 +708,7 @@ class _ConnectionFairy(object):
def close(self):
self._counter -= 1
if self._counter == 0:
- self.checkin()
+ self._checkin()
@@ -659,15 +790,6 @@ class SingletonThreadPool(Pool):
return c
-class DummyLock(object):
-
- def acquire(self, wait=True):
- return True
-
- def release(self):
- pass
-
-
class QueuePool(Pool):
"""A :class:`.Pool` that imposes a limit on the number of open connections.
@@ -775,30 +897,27 @@ class QueuePool(Pool):
self._overflow = 0 - pool_size
self._max_overflow = max_overflow
self._timeout = timeout
- self._overflow_lock = threading.Lock() if self._max_overflow > -1 \
- else DummyLock()
+ self._overflow_lock = threading.Lock()
def _do_return_conn(self, conn):
try:
self._pool.put(conn, False)
except sqla_queue.Full:
- conn.close()
- self._overflow_lock.acquire()
try:
- self._overflow -= 1
+ conn.close()
finally:
- self._overflow_lock.release()
+ self._dec_overflow()
def _do_get(self):
+ use_overflow = self._max_overflow > -1
+
try:
- wait = self._max_overflow > -1 and \
- self._overflow >= self._max_overflow
+ wait = use_overflow and self._overflow >= self._max_overflow
return self._pool.get(wait, self._timeout)
except sqla_queue.SAAbort as aborted:
return aborted.context._do_get()
except sqla_queue.Empty:
- if self._max_overflow > -1 and \
- self._overflow >= self._max_overflow:
+ if use_overflow and self._overflow >= self._max_overflow:
if not wait:
return self._do_get()
else:
@@ -807,17 +926,33 @@ class QueuePool(Pool):
"connection timed out, timeout %d" %
(self.size(), self.overflow(), self._timeout))
- self._overflow_lock.acquire()
- try:
- if self._max_overflow > -1 and \
- self._overflow >= self._max_overflow:
- return self._do_get()
- else:
- con = self._create_connection()
- self._overflow += 1
- return con
- finally:
- self._overflow_lock.release()
+ if self._inc_overflow():
+ try:
+ return self._create_connection()
+ except:
+ self._dec_overflow()
+ raise
+ else:
+ return self._do_get()
+
+ def _inc_overflow(self):
+ if self._max_overflow == -1:
+ self._overflow += 1
+ return True
+ with self._overflow_lock:
+ if self._overflow < self._max_overflow:
+ self._overflow += 1
+ return True
+ else:
+ return False
+
+ def _dec_overflow(self):
+ if self._max_overflow == -1:
+ self._overflow -= 1
+ return True
+ with self._overflow_lock:
+ self._overflow -= 1
+ return True
def recreate(self):
self.logger.info("Pool recreating")
diff --git a/lib/sqlalchemy/processors.py b/lib/sqlalchemy/processors.py
index bf95d146b..d0f52e42b 100644
--- a/lib/sqlalchemy/processors.py
+++ b/lib/sqlalchemy/processors.py
@@ -1,5 +1,5 @@
# sqlalchemy/processors.py
-# Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
#
# This module is part of SQLAlchemy and is released under
@@ -15,6 +15,7 @@ They all share one common characteristic: None is passed through unchanged.
import codecs
import re
import datetime
+from . import util
def str_to_datetime_processor_factory(regexp, type_):
@@ -66,7 +67,22 @@ def py_fallback():
return decoder(value, errors)[0]
return process
- def to_decimal_processor_factory(target_class, scale=10):
+ def to_conditional_unicode_processor_factory(encoding, errors=None):
+ decoder = codecs.getdecoder(encoding)
+
+ def process(value):
+ if value is None:
+ return None
+ elif isinstance(value, util.text_type):
+ return value
+ else:
+ # decoder returns a tuple: (value, len). Simply dropping the
+ # len part is safe: it is done that way in the normal
+ # 'xx'.decode(encoding) code path.
+ return decoder(value, errors)[0]
+ return process
+
+ def to_decimal_processor_factory(target_class, scale):
fstring = "%%.%df" % scale
def process(value):
@@ -113,13 +129,18 @@ try:
str_to_date
def to_unicode_processor_factory(encoding, errors=None):
- # this is cumbersome but it would be even more so on the C side
if errors is not None:
return UnicodeResultProcessor(encoding, errors).process
else:
return UnicodeResultProcessor(encoding).process
- def to_decimal_processor_factory(target_class, scale=10):
+ def to_conditional_unicode_processor_factory(encoding, errors=None):
+ if errors is not None:
+ return UnicodeResultProcessor(encoding, errors).conditional_process
+ else:
+ return UnicodeResultProcessor(encoding).conditional_process
+
+ def to_decimal_processor_factory(target_class, scale):
# Note that the scale argument is not taken into account for integer
# values in the C implementation while it is in the Python one.
# For example, the Python implementation might return
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index ebcc9a7ed..9e647e595 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -1,3638 +1,58 @@
-# sqlalchemy/schema.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# schema.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""The schema module provides the building blocks for database metadata.
-
-Each element within this module describes a database entity which can be
-created and dropped, or is otherwise part of such an entity. Examples include
-tables, columns, sequences, and indexes.
-
-All entities are subclasses of :class:`~sqlalchemy.schema.SchemaItem`, and as
-defined in this module they are intended to be agnostic of any vendor-specific
-constructs.
-
-A collection of entities are grouped into a unit called
-:class:`~sqlalchemy.schema.MetaData`. MetaData serves as a logical grouping of
-schema elements, and can also be associated with an actual database connection
-such that operations involving the contained elements can contact the database
-as needed.
-
-Two of the elements here also build upon their "syntactic" counterparts, which
-are defined in :class:`~sqlalchemy.sql.expression.`, specifically
-:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Column`.
-Since these objects are part of the SQL expression language, they are usable
-as components in SQL expressions.
+"""Compatiblity namespace for sqlalchemy.sql.schema and related.
"""
-import re
-import inspect
-from . import exc, util, dialects, event, events, inspection
-from .sql import expression, visitors
-import collections
-
-ddl = util.importlater("sqlalchemy.engine", "ddl")
-sqlutil = util.importlater("sqlalchemy.sql", "util")
-url = util.importlater("sqlalchemy.engine", "url")
-sqltypes = util.importlater("sqlalchemy", "types")
-
-__all__ = ['SchemaItem', 'Table', 'Column', 'ForeignKey', 'Sequence', 'Index',
- 'ForeignKeyConstraint', 'PrimaryKeyConstraint', 'CheckConstraint',
- 'UniqueConstraint', 'DefaultGenerator', 'Constraint', 'MetaData',
- 'ThreadLocalMetaData', 'SchemaVisitor', 'PassiveDefault',
- 'DefaultClause', 'FetchedValue', 'ColumnDefault', 'DDL',
- 'CreateTable', 'DropTable', 'CreateSequence', 'DropSequence',
- 'AddConstraint', 'DropConstraint',
- ]
-__all__.sort()
-
-RETAIN_SCHEMA = util.symbol('retain_schema')
-
-
-class SchemaItem(events.SchemaEventTarget, visitors.Visitable):
- """Base class for items that define a database schema."""
-
- __visit_name__ = 'schema_item'
- quote = None
-
- def _init_items(self, *args):
- """Initialize the list of child items for this SchemaItem."""
-
- for item in args:
- if item is not None:
- item._set_parent_with_dispatch(self)
-
- def get_children(self, **kwargs):
- """used to allow SchemaVisitor access"""
- return []
-
- def __repr__(self):
- return util.generic_repr(self)
-
- @util.memoized_property
- def info(self):
- """Info dictionary associated with the object, allowing user-defined
- data to be associated with this :class:`.SchemaItem`.
-
- The dictionary is automatically generated when first accessed.
- It can also be specified in the constructor of some objects,
- such as :class:`.Table` and :class:`.Column`.
-
- """
- return {}
-
-
-def _get_table_key(name, schema):
- if schema is None:
- return name
- else:
- return schema + "." + name
-
-
-def _validate_dialect_kwargs(kwargs, name):
- # validate remaining kwargs that they all specify DB prefixes
-
- for k in kwargs:
- m = re.match('^(.+?)_.*', k)
- if m is None:
- raise TypeError("Additional arguments should be "
- "named <dialectname>_<argument>, got '%s'" % k)
-
-
-inspection._self_inspects(SchemaItem)
-
-
-class Table(SchemaItem, expression.TableClause):
- """Represent a table in a database.
-
- e.g.::
-
- mytable = Table("mytable", metadata,
- Column('mytable_id', Integer, primary_key=True),
- Column('value', String(50))
- )
-
- The :class:`.Table` object constructs a unique instance of itself based
- on its name and optional schema name within the given
- :class:`.MetaData` object. Calling the :class:`.Table`
- constructor with the same name and same :class:`.MetaData` argument
- a second time will return the *same* :class:`.Table` object - in this way
- the :class:`.Table` constructor acts as a registry function.
-
- See also:
-
- :ref:`metadata_describing` - Introduction to database metadata
-
- Constructor arguments are as follows:
-
- :param name: The name of this table as represented in the database.
-
- This property, along with the *schema*, indicates the *singleton
- identity* of this table in relation to its parent :class:`.MetaData`.
- Additional calls to :class:`.Table` with the same name, metadata,
- and schema name will return the same :class:`.Table` object.
-
- Names which contain no upper case characters
- will be treated as case insensitive names, and will not be quoted
- unless they are a reserved word. Names with any number of upper
- case characters will be quoted and sent exactly. Note that this
- behavior applies even for databases which standardize upper
- case names as case insensitive such as Oracle.
-
- :param metadata: a :class:`.MetaData` object which will contain this
- table. The metadata is used as a point of association of this table
- with other tables which are referenced via foreign key. It also
- may be used to associate this table with a particular
- :class:`.Connectable`.
-
- :param \*args: Additional positional arguments are used primarily
- to add the list of :class:`.Column` objects contained within this
- table. Similar to the style of a CREATE TABLE statement, other
- :class:`.SchemaItem` constructs may be added here, including
- :class:`.PrimaryKeyConstraint`, and :class:`.ForeignKeyConstraint`.
-
- :param autoload: Defaults to False: the Columns for this table should
- be reflected from the database. Usually there will be no Column
- objects in the constructor if this property is set.
-
- :param autoload_replace: If ``True``, when using ``autoload=True``
- and ``extend_existing=True``,
- replace ``Column`` objects already present in the ``Table`` that's
- in the ``MetaData`` registry with
- what's reflected. Otherwise, all existing columns will be
- excluded from the reflection process. Note that this does
- not impact ``Column`` objects specified in the same call to ``Table``
- which includes ``autoload``, those always take precedence.
- Defaults to ``True``.
-
- .. versionadded:: 0.7.5
-
- :param autoload_with: If autoload==True, this is an optional Engine
- or Connection instance to be used for the table reflection. If
- ``None``, the underlying MetaData's bound connectable will be used.
-
- :param extend_existing: When ``True``, indicates that if this
- :class:`.Table` is already present in the given :class:`.MetaData`,
- apply further arguments within the constructor to the existing
- :class:`.Table`.
-
- If ``extend_existing`` or ``keep_existing`` are not set, an error is
- raised if additional table modifiers are specified when
- the given :class:`.Table` is already present in the :class:`.MetaData`.
-
- .. versionchanged:: 0.7.4
- ``extend_existing`` will work in conjunction
- with ``autoload=True`` to run a new reflection operation against
- the database; new :class:`.Column` objects will be produced
- from database metadata to replace those existing with the same
- name, and additional :class:`.Column` objects not present
- in the :class:`.Table` will be added.
-
- As is always the case with ``autoload=True``, :class:`.Column`
- objects can be specified in the same :class:`.Table` constructor,
- which will take precedence. I.e.::
-
- Table("mytable", metadata,
- Column('y', Integer),
- extend_existing=True,
- autoload=True,
- autoload_with=engine
- )
-
- The above will overwrite all columns within ``mytable`` which
- are present in the database, except for ``y`` which will be used as is
- from the above definition. If the ``autoload_replace`` flag
- is set to False, no existing columns will be replaced.
-
- :param implicit_returning: True by default - indicates that
- RETURNING can be used by default to fetch newly inserted primary key
- values, for backends which support this. Note that
- create_engine() also provides an implicit_returning flag.
-
- :param include_columns: A list of strings indicating a subset of
- columns to be loaded via the ``autoload`` operation; table columns who
- aren't present in this list will not be represented on the resulting
- ``Table`` object. Defaults to ``None`` which indicates all columns
- should be reflected.
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.SchemaItem.info` attribute of this object.
-
- :param keep_existing: When ``True``, indicates that if this Table
- is already present in the given :class:`.MetaData`, ignore
- further arguments within the constructor to the existing
- :class:`.Table`, and return the :class:`.Table` object as
- originally created. This is to allow a function that wishes
- to define a new :class:`.Table` on first call, but on
- subsequent calls will return the same :class:`.Table`,
- without any of the declarations (particularly constraints)
- being applied a second time. Also see extend_existing.
-
- If extend_existing or keep_existing are not set, an error is
- raised if additional table modifiers are specified when
- the given :class:`.Table` is already present in the :class:`.MetaData`.
-
- :param listeners: A list of tuples of the form ``(<eventname>, <fn>)``
- which will be passed to :func:`.event.listen` upon construction.
- This alternate hook to :func:`.event.listen` allows the establishment
- of a listener function specific to this :class:`.Table` before
- the "autoload" process begins. Particularly useful for
- the :meth:`.DDLEvents.column_reflect` event::
-
- def listen_for_reflect(table, column_info):
- "handle the column reflection event"
- # ...
-
- t = Table(
- 'sometable',
- autoload=True,
- listeners=[
- ('column_reflect', listen_for_reflect)
- ])
-
- :param mustexist: When ``True``, indicates that this Table must already
- be present in the given :class:`.MetaData` collection, else
- an exception is raised.
-
- :param prefixes:
- A list of strings to insert after CREATE in the CREATE TABLE
- statement. They will be separated by spaces.
-
- :param quote: Force quoting of this table's name on or off, corresponding
- to ``True`` or ``False``. When left at its default of ``None``,
- the column identifier will be quoted according to whether the name is
- case sensitive (identifiers with at least one upper case character are
- treated as case sensitive), or if it's a reserved word. This flag
- is only needed to force quoting of a reserved word which is not known
- by the SQLAlchemy dialect.
-
- :param quote_schema: same as 'quote' but applies to the schema identifier.
-
- :param schema: The *schema name* for this table, which is required if
- the table resides in a schema other than the default selected schema
- for the engine's database connection. Defaults to ``None``.
-
- :param useexisting: Deprecated. Use extend_existing.
-
- """
-
- __visit_name__ = 'table'
-
- def __new__(cls, *args, **kw):
- if not args:
- # python3k pickle seems to call this
- return object.__new__(cls)
-
- try:
- name, metadata, args = args[0], args[1], args[2:]
- except IndexError:
- raise TypeError("Table() takes at least two arguments")
-
- schema = kw.get('schema', None)
- if schema is None:
- schema = metadata.schema
- keep_existing = kw.pop('keep_existing', False)
- extend_existing = kw.pop('extend_existing', False)
- if 'useexisting' in kw:
- msg = "useexisting is deprecated. Use extend_existing."
- util.warn_deprecated(msg)
- if extend_existing:
- msg = "useexisting is synonymous with extend_existing."
- raise exc.ArgumentError(msg)
- extend_existing = kw.pop('useexisting', False)
-
- if keep_existing and extend_existing:
- msg = "keep_existing and extend_existing are mutually exclusive."
- raise exc.ArgumentError(msg)
-
- mustexist = kw.pop('mustexist', False)
- key = _get_table_key(name, schema)
- if key in metadata.tables:
- if not keep_existing and not extend_existing and bool(args):
- raise exc.InvalidRequestError(
- "Table '%s' is already defined for this MetaData "
- "instance. Specify 'extend_existing=True' "
- "to redefine "
- "options and columns on an "
- "existing Table object." % key)
- table = metadata.tables[key]
- if extend_existing:
- table._init_existing(*args, **kw)
- return table
- else:
- if mustexist:
- raise exc.InvalidRequestError(
- "Table '%s' not defined" % (key))
- table = object.__new__(cls)
- table.dispatch.before_parent_attach(table, metadata)
- metadata._add_table(name, schema, table)
- try:
- table._init(name, metadata, *args, **kw)
- table.dispatch.after_parent_attach(table, metadata)
- return table
- except:
- metadata._remove_table(name, schema)
- raise
-
- def __init__(self, *args, **kw):
- """Constructor for :class:`~.schema.Table`.
-
- This method is a no-op. See the top-level
- documentation for :class:`~.schema.Table`
- for constructor arguments.
-
- """
- # __init__ is overridden to prevent __new__ from
- # calling the superclass constructor.
-
- def _init(self, name, metadata, *args, **kwargs):
- super(Table, self).__init__(name)
- self.metadata = metadata
- self.schema = kwargs.pop('schema', None)
- if self.schema is None:
- self.schema = metadata.schema
- self.quote_schema = kwargs.pop(
- 'quote_schema', metadata.quote_schema)
- else:
- self.quote_schema = kwargs.pop('quote_schema', None)
-
- self.indexes = set()
- self.constraints = set()
- self._columns = expression.ColumnCollection()
- PrimaryKeyConstraint()._set_parent_with_dispatch(self)
- self.foreign_keys = set()
- self._extra_dependencies = set()
- self.kwargs = {}
- if self.schema is not None:
- self.fullname = "%s.%s" % (self.schema, self.name)
- else:
- self.fullname = self.name
-
- autoload = kwargs.pop('autoload', False)
- autoload_with = kwargs.pop('autoload_with', None)
- # this argument is only used with _init_existing()
- kwargs.pop('autoload_replace', True)
- include_columns = kwargs.pop('include_columns', None)
-
- self.implicit_returning = kwargs.pop('implicit_returning', True)
- self.quote = kwargs.pop('quote', None)
- if 'info' in kwargs:
- self.info = kwargs.pop('info')
- if 'listeners' in kwargs:
- listeners = kwargs.pop('listeners')
- for evt, fn in listeners:
- event.listen(self, evt, fn)
-
- self._prefixes = kwargs.pop('prefixes', [])
-
- self._extra_kwargs(**kwargs)
-
- # load column definitions from the database if 'autoload' is defined
- # we do it after the table is in the singleton dictionary to support
- # circular foreign keys
- if autoload:
- self._autoload(metadata, autoload_with, include_columns)
-
- # initialize all the column, etc. objects. done after reflection to
- # allow user-overrides
- self._init_items(*args)
-
- def _autoload(self, metadata, autoload_with, include_columns,
- exclude_columns=()):
- if self.primary_key.columns:
- PrimaryKeyConstraint(*[
- c for c in self.primary_key.columns
- if c.key in exclude_columns
- ])._set_parent_with_dispatch(self)
-
- if autoload_with:
- autoload_with.run_callable(
- autoload_with.dialect.reflecttable,
- self, include_columns, exclude_columns
- )
- else:
- bind = _bind_or_error(metadata,
- msg="No engine is bound to this Table's MetaData. "
- "Pass an engine to the Table via "
- "autoload_with=<someengine>, "
- "or associate the MetaData with an engine via "
- "metadata.bind=<someengine>")
- bind.run_callable(
- bind.dialect.reflecttable,
- self, include_columns, exclude_columns
- )
-
- @property
- def _sorted_constraints(self):
- """Return the set of constraints as a list, sorted by creation
- order.
-
- """
- return sorted(self.constraints, key=lambda c: c._creation_order)
-
- def _init_existing(self, *args, **kwargs):
- autoload = kwargs.pop('autoload', False)
- autoload_with = kwargs.pop('autoload_with', None)
- autoload_replace = kwargs.pop('autoload_replace', True)
- schema = kwargs.pop('schema', None)
- if schema and schema != self.schema:
- raise exc.ArgumentError(
- "Can't change schema of existing table from '%s' to '%s'",
- (self.schema, schema))
-
- include_columns = kwargs.pop('include_columns', None)
-
- if include_columns is not None:
- for c in self.c:
- if c.name not in include_columns:
- self._columns.remove(c)
-
- for key in ('quote', 'quote_schema'):
- if key in kwargs:
- setattr(self, key, kwargs.pop(key))
-
- if 'info' in kwargs:
- self.info = kwargs.pop('info')
-
- if autoload:
- if not autoload_replace:
- exclude_columns = [c.name for c in self.c]
- else:
- exclude_columns = ()
- self._autoload(
- self.metadata, autoload_with, include_columns, exclude_columns)
-
- self._extra_kwargs(**kwargs)
- self._init_items(*args)
-
- def _extra_kwargs(self, **kwargs):
- # validate remaining kwargs that they all specify DB prefixes
- _validate_dialect_kwargs(kwargs, "Table")
- self.kwargs.update(kwargs)
-
- def _init_collections(self):
- pass
-
- @util.memoized_property
- def _autoincrement_column(self):
- for col in self.primary_key:
- if col.autoincrement and \
- col.type._type_affinity is not None and \
- issubclass(col.type._type_affinity, sqltypes.Integer) and \
- (not col.foreign_keys or col.autoincrement == 'ignore_fk') and \
- isinstance(col.default, (type(None), Sequence)) and \
- (col.server_default is None or col.server_default.reflected):
- return col
-
- @property
- def key(self):
- return _get_table_key(self.name, self.schema)
-
- def __repr__(self):
- return "Table(%s)" % ', '.join(
- [repr(self.name)] + [repr(self.metadata)] +
- [repr(x) for x in self.columns] +
- ["%s=%s" % (k, repr(getattr(self, k))) for k in ['schema']])
-
- def __str__(self):
- return _get_table_key(self.description, self.schema)
-
- @property
- def bind(self):
- """Return the connectable associated with this Table."""
-
- return self.metadata and self.metadata.bind or None
-
- def add_is_dependent_on(self, table):
- """Add a 'dependency' for this Table.
-
- This is another Table object which must be created
- first before this one can, or dropped after this one.
-
- Usually, dependencies between tables are determined via
- ForeignKey objects. However, for other situations that
- create dependencies outside of foreign keys (rules, inheriting),
- this method can manually establish such a link.
-
- """
- self._extra_dependencies.add(table)
-
- def append_column(self, column):
- """Append a :class:`~.schema.Column` to this :class:`~.schema.Table`.
-
- The "key" of the newly added :class:`~.schema.Column`, i.e. the
- value of its ``.key`` attribute, will then be available
- in the ``.c`` collection of this :class:`~.schema.Table`, and the
- column definition will be included in any CREATE TABLE, SELECT,
- UPDATE, etc. statements generated from this :class:`~.schema.Table`
- construct.
-
- Note that this does **not** change the definition of the table
- as it exists within any underlying database, assuming that
- table has already been created in the database. Relational
- databases support the addition of columns to existing tables
- using the SQL ALTER command, which would need to be
- emitted for an already-existing table that doesn't contain
- the newly added column.
-
- """
-
- column._set_parent_with_dispatch(self)
-
- def append_constraint(self, constraint):
- """Append a :class:`~.schema.Constraint` to this
- :class:`~.schema.Table`.
-
- This has the effect of the constraint being included in any
- future CREATE TABLE statement, assuming specific DDL creation
- events have not been associated with the given
- :class:`~.schema.Constraint` object.
-
- Note that this does **not** produce the constraint within the
- relational database automatically, for a table that already exists
- in the database. To add a constraint to an
- existing relational database table, the SQL ALTER command must
- be used. SQLAlchemy also provides the
- :class:`.AddConstraint` construct which can produce this SQL when
- invoked as an executable clause.
-
- """
-
- constraint._set_parent_with_dispatch(self)
-
- def append_ddl_listener(self, event_name, listener):
- """Append a DDL event listener to this ``Table``.
-
- .. deprecated:: 0.7
- See :class:`.DDLEvents`.
-
- """
-
- def adapt_listener(target, connection, **kw):
- listener(event_name, target, connection)
-
- event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
-
- def _set_parent(self, metadata):
- metadata._add_table(self.name, self.schema, self)
- self.metadata = metadata
-
- def get_children(self, column_collections=True,
- schema_visitor=False, **kw):
- if not schema_visitor:
- return expression.TableClause.get_children(
- self, column_collections=column_collections, **kw)
- else:
- if column_collections:
- return list(self.columns)
- else:
- return []
-
- def exists(self, bind=None):
- """Return True if this table exists."""
-
- if bind is None:
- bind = _bind_or_error(self)
-
- return bind.run_callable(bind.dialect.has_table,
- self.name, schema=self.schema)
-
- def create(self, bind=None, checkfirst=False):
- """Issue a ``CREATE`` statement for this
- :class:`.Table`, using the given :class:`.Connectable`
- for connectivity.
-
- See also :meth:`.MetaData.create_all`.
-
- """
-
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaGenerator,
- self,
- checkfirst=checkfirst)
-
- def drop(self, bind=None, checkfirst=False):
- """Issue a ``DROP`` statement for this
- :class:`.Table`, using the given :class:`.Connectable`
- for connectivity.
-
- See also :meth:`.MetaData.drop_all`.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaDropper,
- self,
- checkfirst=checkfirst)
-
- def tometadata(self, metadata, schema=RETAIN_SCHEMA):
- """Return a copy of this :class:`.Table` associated with a different
- :class:`.MetaData`.
-
- E.g.::
-
- some_engine = create_engine("sqlite:///some.db")
-
- # create two metadata
- meta1 = MetaData()
- meta2 = MetaData()
-
- # load 'users' from the sqlite engine
- users_table = Table('users', meta1, autoload=True,
- autoload_with=some_engine)
-
- # create the same Table object for the plain metadata
- users_table_2 = users_table.tometadata(meta2)
-
- :param metadata: Target :class:`.MetaData` object.
- :param schema: Optional string name of a target schema, or
- ``None`` for no schema. The :class:`.Table` object will be
- given this schema name upon copy. Defaults to the special
- symbol :attr:`.RETAIN_SCHEMA` which indicates no change should be
- made to the schema name of the resulting :class:`.Table`.
-
- """
-
- if schema is RETAIN_SCHEMA:
- schema = self.schema
- elif schema is None:
- schema = metadata.schema
- key = _get_table_key(self.name, schema)
- if key in metadata.tables:
- util.warn("Table '%s' already exists within the given "
- "MetaData - not copying." % self.description)
- return metadata.tables[key]
-
- args = []
- for c in self.columns:
- args.append(c.copy(schema=schema))
- table = Table(
- self.name, metadata, schema=schema,
- *args, **self.kwargs
- )
- for c in self.constraints:
- table.append_constraint(c.copy(schema=schema, target_table=table))
-
- for index in self.indexes:
- # skip indexes that would be generated
- # by the 'index' flag on Column
- if len(index.columns) == 1 and \
- list(index.columns)[0].index:
- continue
- Index(index.name,
- unique=index.unique,
- *[table.c[col] for col in index.columns.keys()],
- **index.kwargs)
- table.dispatch._update(self.dispatch)
- return table
-
-
-class Column(SchemaItem, expression.ColumnClause):
- """Represents a column in a database table."""
-
- __visit_name__ = 'column'
-
- def __init__(self, *args, **kwargs):
- """
- Construct a new ``Column`` object.
-
- :param name: The name of this column as represented in the database.
- This argument may be the first positional argument, or specified
- via keyword.
-
- Names which contain no upper case characters
- will be treated as case insensitive names, and will not be quoted
- unless they are a reserved word. Names with any number of upper
- case characters will be quoted and sent exactly. Note that this
- behavior applies even for databases which standardize upper
- case names as case insensitive such as Oracle.
-
- The name field may be omitted at construction time and applied
- later, at any time before the Column is associated with a
- :class:`.Table`. This is to support convenient
- usage within the :mod:`~sqlalchemy.ext.declarative` extension.
-
- :param type\_: The column's type, indicated using an instance which
- subclasses :class:`~sqlalchemy.types.TypeEngine`. If no arguments
- are required for the type, the class of the type can be sent
- as well, e.g.::
-
- # use a type with arguments
- Column('data', String(50))
-
- # use no arguments
- Column('level', Integer)
-
- The ``type`` argument may be the second positional argument
- or specified by keyword.
-
- If the ``type`` is ``None`` or is omitted, it will first default to the special
- type :class:`.NullType`. If and when this :class:`.Column` is
- made to refer to another column using :class:`.ForeignKey`
- and/or :class:`.ForeignKeyConstraint`, the type of the remote-referenced
- column will be copied to this column as well, at the moment that
- the foreign key is resolved against that remote :class:`.Column`
- object.
-
- .. versionchanged:: 0.9.0
- Support for propagation of type to a :class:`.Column` from its
- :class:`.ForeignKey` object has been improved and should be
- more reliable and timely.
-
- :param \*args: Additional positional arguments include various
- :class:`.SchemaItem` derived constructs which will be applied
- as options to the column. These include instances of
- :class:`.Constraint`, :class:`.ForeignKey`, :class:`.ColumnDefault`,
- and :class:`.Sequence`. In some cases an equivalent keyword
- argument is available such as ``server_default``, ``default``
- and ``unique``.
-
- :param autoincrement: This flag may be set to ``False`` to
- indicate an integer primary key column that should not be
- considered to be the "autoincrement" column, that is
- the integer primary key column which generates values
- implicitly upon INSERT and whose value is usually returned
- via the DBAPI cursor.lastrowid attribute. It defaults
- to ``True`` to satisfy the common use case of a table
- with a single integer primary key column. If the table
- has a composite primary key consisting of more than one
- integer column, set this flag to True only on the
- column that should be considered "autoincrement".
-
- The setting *only* has an effect for columns which are:
-
- * Integer derived (i.e. INT, SMALLINT, BIGINT).
-
- * Part of the primary key
-
- * Are not referenced by any foreign keys, unless
- the value is specified as ``'ignore_fk'``
-
- .. versionadded:: 0.7.4
-
- * have no server side or client side defaults (with the exception
- of Postgresql SERIAL).
-
- The setting has these two effects on columns that meet the
- above criteria:
-
- * DDL issued for the column will include database-specific
- keywords intended to signify this column as an
- "autoincrement" column, such as AUTO INCREMENT on MySQL,
- SERIAL on Postgresql, and IDENTITY on MS-SQL. It does
- *not* issue AUTOINCREMENT for SQLite since this is a
- special SQLite flag that is not required for autoincrementing
- behavior. See the SQLite dialect documentation for
- information on SQLite's AUTOINCREMENT.
-
- * The column will be considered to be available as
- cursor.lastrowid or equivalent, for those dialects which
- "post fetch" newly inserted identifiers after a row has
- been inserted (SQLite, MySQL, MS-SQL). It does not have
- any effect in this regard for databases that use sequences
- to generate primary key identifiers (i.e. Firebird, Postgresql,
- Oracle).
-
- .. versionchanged:: 0.7.4
- ``autoincrement`` accepts a special value ``'ignore_fk'``
- to indicate that autoincrementing status regardless of foreign
- key references. This applies to certain composite foreign key
- setups, such as the one demonstrated in the ORM documentation
- at :ref:`post_update`.
-
- :param default: A scalar, Python callable, or
- :class:`.ColumnElement` expression representing the
- *default value* for this column, which will be invoked upon insert
- if this column is otherwise not specified in the VALUES clause of
- the insert. This is a shortcut to using :class:`.ColumnDefault` as
- a positional argument; see that class for full detail on the
- structure of the argument.
-
- Contrast this argument to ``server_default`` which creates a
- default generator on the database side.
-
- :param doc: optional String that can be used by the ORM or similar
- to document attributes. This attribute does not render SQL
- comments (a future attribute 'comment' will achieve that).
-
- :param key: An optional string identifier which will identify this
- ``Column`` object on the :class:`.Table`. When a key is provided,
- this is the only identifier referencing the ``Column`` within the
- application, including ORM attribute mapping; the ``name`` field
- is used only when rendering SQL.
-
- :param index: When ``True``, indicates that the column is indexed.
- This is a shortcut for using a :class:`.Index` construct on the
- table. To specify indexes with explicit names or indexes that
- contain multiple columns, use the :class:`.Index` construct
- instead.
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.SchemaItem.info` attribute of this object.
-
- :param nullable: If set to the default of ``True``, indicates the
- column will be rendered as allowing NULL, else it's rendered as
- NOT NULL. This parameter is only used when issuing CREATE TABLE
- statements.
-
- :param onupdate: A scalar, Python callable, or
- :class:`~sqlalchemy.sql.expression.ClauseElement` representing a
- default value to be applied to the column within UPDATE
- statements, which wil be invoked upon update if this column is not
- present in the SET clause of the update. This is a shortcut to
- using :class:`.ColumnDefault` as a positional argument with
- ``for_update=True``.
-
- :param primary_key: If ``True``, marks this column as a primary key
- column. Multiple columns can have this flag set to specify
- composite primary keys. As an alternative, the primary key of a
- :class:`.Table` can be specified via an explicit
- :class:`.PrimaryKeyConstraint` object.
-
- :param server_default: A :class:`.FetchedValue` instance, str, Unicode
- or :func:`~sqlalchemy.sql.expression.text` construct representing
- the DDL DEFAULT value for the column.
-
- String types will be emitted as-is, surrounded by single quotes::
-
- Column('x', Text, server_default="val")
-
- x TEXT DEFAULT 'val'
-
- A :func:`~sqlalchemy.sql.expression.text` expression will be
- rendered as-is, without quotes::
-
- Column('y', DateTime, server_default=text('NOW()'))0
-
- y DATETIME DEFAULT NOW()
-
- Strings and text() will be converted into a :class:`.DefaultClause`
- object upon initialization.
-
- Use :class:`.FetchedValue` to indicate that an already-existing
- column will generate a default value on the database side which
- will be available to SQLAlchemy for post-fetch after inserts. This
- construct does not specify any DDL and the implementation is left
- to the database, such as via a trigger.
-
- :param server_onupdate: A :class:`.FetchedValue` instance
- representing a database-side default generation function. This
- indicates to SQLAlchemy that a newly generated value will be
- available after updates. This construct does not specify any DDL
- and the implementation is left to the database, such as via a
- trigger.
-
- :param quote: Force quoting of this column's name on or off,
- corresponding to ``True`` or ``False``. When left at its default
- of ``None``, the column identifier will be quoted according to
- whether the name is case sensitive (identifiers with at least one
- upper case character are treated as case sensitive), or if it's a
- reserved word. This flag is only needed to force quoting of a
- reserved word which is not known by the SQLAlchemy dialect.
-
- :param unique: When ``True``, indicates that this column contains a
- unique constraint, or if ``index`` is ``True`` as well, indicates
- that the :class:`.Index` should be created with the unique flag.
- To specify multiple columns in the constraint/index or to specify
- an explicit name, use the :class:`.UniqueConstraint` or
- :class:`.Index` constructs explicitly.
-
- """
-
- name = kwargs.pop('name', None)
- type_ = kwargs.pop('type_', None)
- args = list(args)
- if args:
- if isinstance(args[0], util.string_types):
- if name is not None:
- raise exc.ArgumentError(
- "May not pass name positionally and as a keyword.")
- name = args.pop(0)
- if args:
- coltype = args[0]
-
- if (isinstance(coltype, sqltypes.TypeEngine) or
- (isinstance(coltype, type) and
- issubclass(coltype, sqltypes.TypeEngine))):
- if type_ is not None:
- raise exc.ArgumentError(
- "May not pass type_ positionally and as a keyword.")
- type_ = args.pop(0)
-
- super(Column, self).__init__(name, None, type_)
- self.key = kwargs.pop('key', name)
- self.primary_key = kwargs.pop('primary_key', False)
- self.nullable = kwargs.pop('nullable', not self.primary_key)
- self.default = kwargs.pop('default', None)
- self.server_default = kwargs.pop('server_default', None)
- self.server_onupdate = kwargs.pop('server_onupdate', None)
- self.index = kwargs.pop('index', None)
- self.unique = kwargs.pop('unique', None)
- self.quote = kwargs.pop('quote', None)
- self.doc = kwargs.pop('doc', None)
- self.onupdate = kwargs.pop('onupdate', None)
- self.autoincrement = kwargs.pop('autoincrement', True)
- self.constraints = set()
- self.foreign_keys = set()
-
- # check if this Column is proxying another column
- if '_proxies' in kwargs:
- self._proxies = kwargs.pop('_proxies')
- # otherwise, add DDL-related events
- elif isinstance(self.type, sqltypes.SchemaType):
- self.type._set_parent_with_dispatch(self)
-
- if self.default is not None:
- if isinstance(self.default, (ColumnDefault, Sequence)):
- args.append(self.default)
- else:
- if getattr(self.type, '_warn_on_bytestring', False):
- if isinstance(self.default, util.binary_type):
- util.warn("Unicode column received non-unicode "
- "default value.")
- args.append(ColumnDefault(self.default))
-
- if self.server_default is not None:
- if isinstance(self.server_default, FetchedValue):
- args.append(self.server_default._as_for_update(False))
- else:
- args.append(DefaultClause(self.server_default))
-
- if self.onupdate is not None:
- if isinstance(self.onupdate, (ColumnDefault, Sequence)):
- args.append(self.onupdate)
- else:
- args.append(ColumnDefault(self.onupdate, for_update=True))
-
- if self.server_onupdate is not None:
- if isinstance(self.server_onupdate, FetchedValue):
- args.append(self.server_onupdate._as_for_update(True))
- else:
- args.append(DefaultClause(self.server_onupdate,
- for_update=True))
- self._init_items(*args)
-
- util.set_creation_order(self)
-
- if 'info' in kwargs:
- self.info = kwargs.pop('info')
-
- if kwargs:
- raise exc.ArgumentError(
- "Unknown arguments passed to Column: " + repr(list(kwargs)))
-
- def __str__(self):
- if self.name is None:
- return "(no name)"
- elif self.table is not None:
- if self.table.named_with_column:
- return (self.table.description + "." + self.description)
- else:
- return self.description
- else:
- return self.description
-
- def references(self, column):
- """Return True if this Column references the given column via foreign
- key."""
-
- for fk in self.foreign_keys:
- if fk.column.proxy_set.intersection(column.proxy_set):
- return True
- else:
- return False
-
- def append_foreign_key(self, fk):
- fk._set_parent_with_dispatch(self)
-
- def __repr__(self):
- kwarg = []
- if self.key != self.name:
- kwarg.append('key')
- if self.primary_key:
- kwarg.append('primary_key')
- if not self.nullable:
- kwarg.append('nullable')
- if self.onupdate:
- kwarg.append('onupdate')
- if self.default:
- kwarg.append('default')
- if self.server_default:
- kwarg.append('server_default')
- return "Column(%s)" % ', '.join(
- [repr(self.name)] + [repr(self.type)] +
- [repr(x) for x in self.foreign_keys if x is not None] +
- [repr(x) for x in self.constraints] +
- [(self.table is not None and "table=<%s>" %
- self.table.description or "table=None")] +
- ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg])
-
- def _set_parent(self, table):
- if not self.name:
- raise exc.ArgumentError(
- "Column must be constructed with a non-blank name or "
- "assign a non-blank .name before adding to a Table.")
- if self.key is None:
- self.key = self.name
-
- existing = getattr(self, 'table', None)
- if existing is not None and existing is not table:
- raise exc.ArgumentError(
- "Column object already assigned to Table '%s'" %
- existing.description)
-
- if self.key in table._columns:
- col = table._columns.get(self.key)
- if col is not self:
- for fk in col.foreign_keys:
- table.foreign_keys.remove(fk)
- if fk.constraint in table.constraints:
- # this might have been removed
- # already, if it's a composite constraint
- # and more than one col being replaced
- table.constraints.remove(fk.constraint)
-
- table._columns.replace(self)
-
- if self.primary_key:
- table.primary_key._replace(self)
- Table._autoincrement_column._reset(table)
- elif self.key in table.primary_key:
- raise exc.ArgumentError(
- "Trying to redefine primary-key column '%s' as a "
- "non-primary-key column on table '%s'" % (
- self.key, table.fullname))
- self.table = table
-
- if self.index:
- if isinstance(self.index, util.string_types):
- raise exc.ArgumentError(
- "The 'index' keyword argument on Column is boolean only. "
- "To create indexes with a specific name, create an "
- "explicit Index object external to the Table.")
- Index(expression._truncated_label('ix_%s' % self._label),
- self, unique=self.unique)
- elif self.unique:
- if isinstance(self.unique, util.string_types):
- raise exc.ArgumentError(
- "The 'unique' keyword argument on Column is boolean "
- "only. To create unique constraints or indexes with a "
- "specific name, append an explicit UniqueConstraint to "
- "the Table's list of elements, or create an explicit "
- "Index object external to the Table.")
- table.append_constraint(UniqueConstraint(self.key))
-
- fk_key = (table.key, self.key)
- if fk_key in self.table.metadata._fk_memos:
- for fk in self.table.metadata._fk_memos[fk_key]:
- fk._set_remote_table(table)
-
- def _on_table_attach(self, fn):
- if self.table is not None:
- fn(self, self.table)
- event.listen(self, 'after_parent_attach', fn)
-
- def copy(self, **kw):
- """Create a copy of this ``Column``, unitialized.
-
- This is used in ``Table.tometadata``.
-
- """
-
- # Constraint objects plus non-constraint-bound ForeignKey objects
- args = \
- [c.copy(**kw) for c in self.constraints] + \
- [c.copy(**kw) for c in self.foreign_keys if not c.constraint]
-
- type_ = self.type
- if isinstance(type_, sqltypes.SchemaType):
- type_ = type_.copy(**kw)
-
- c = self._constructor(
- name=self.name,
- type_=type_,
- key=self.key,
- primary_key=self.primary_key,
- nullable=self.nullable,
- unique=self.unique,
- quote=self.quote,
- index=self.index,
- autoincrement=self.autoincrement,
- default=self.default,
- server_default=self.server_default,
- onupdate=self.onupdate,
- server_onupdate=self.server_onupdate,
- info=self.info,
- doc=self.doc,
- *args
- )
- c.dispatch._update(self.dispatch)
- return c
-
- def _make_proxy(self, selectable, name=None, key=None,
- name_is_truncatable=False, **kw):
- """Create a *proxy* for this column.
-
- This is a copy of this ``Column`` referenced by a different parent
- (such as an alias or select statement). The column should
- be used only in select scenarios, as its full DDL/default
- information is not transferred.
-
- """
- fk = [ForeignKey(f.column, _constraint=f.constraint)
- for f in self.foreign_keys]
- if name is None and self.name is None:
- raise exc.InvalidRequestError("Cannot initialize a sub-selectable"
- " with this Column object until it's 'name' has "
- "been assigned.")
- try:
- c = self._constructor(
- expression._as_truncated(name or self.name) if \
- name_is_truncatable else (name or self.name),
- self.type,
- key=key if key else name if name else self.key,
- primary_key=self.primary_key,
- nullable=self.nullable,
- quote=self.quote,
- _proxies=[self], *fk)
- except TypeError:
- util.raise_from_cause(
- TypeError(
- "Could not create a copy of this %r object. "
- "Ensure the class includes a _constructor() "
- "attribute or method which accepts the "
- "standard Column constructor arguments, or "
- "references the Column class itself." % self.__class__)
- )
-
- c.table = selectable
- selectable._columns.add(c)
- if selectable._is_clone_of is not None:
- c._is_clone_of = selectable._is_clone_of.columns[c.key]
- if self.primary_key:
- selectable.primary_key.add(c)
- c.dispatch.after_parent_attach(c, selectable)
- return c
-
- def get_children(self, schema_visitor=False, **kwargs):
- if schema_visitor:
- return [x for x in (self.default, self.onupdate)
- if x is not None] + \
- list(self.foreign_keys) + list(self.constraints)
- else:
- return expression.ColumnClause.get_children(self, **kwargs)
-
-
-class ForeignKey(SchemaItem):
- """Defines a dependency between two columns.
-
- ``ForeignKey`` is specified as an argument to a :class:`.Column` object,
- e.g.::
-
- t = Table("remote_table", metadata,
- Column("remote_id", ForeignKey("main_table.id"))
- )
-
- Note that ``ForeignKey`` is only a marker object that defines
- a dependency between two columns. The actual constraint
- is in all cases represented by the :class:`.ForeignKeyConstraint`
- object. This object will be generated automatically when
- a ``ForeignKey`` is associated with a :class:`.Column` which
- in turn is associated with a :class:`.Table`. Conversely,
- when :class:`.ForeignKeyConstraint` is applied to a :class:`.Table`,
- ``ForeignKey`` markers are automatically generated to be
- present on each associated :class:`.Column`, which are also
- associated with the constraint object.
-
- Note that you cannot define a "composite" foreign key constraint,
- that is a constraint between a grouping of multiple parent/child
- columns, using ``ForeignKey`` objects. To define this grouping,
- the :class:`.ForeignKeyConstraint` object must be used, and applied
- to the :class:`.Table`. The associated ``ForeignKey`` objects
- are created automatically.
-
- The ``ForeignKey`` objects associated with an individual
- :class:`.Column` object are available in the `foreign_keys` collection
- of that column.
-
- Further examples of foreign key configuration are in
- :ref:`metadata_foreignkeys`.
-
- """
-
- __visit_name__ = 'foreign_key'
-
- def __init__(self, column, _constraint=None, use_alter=False, name=None,
- onupdate=None, ondelete=None, deferrable=None,
- schema=None,
- initially=None, link_to_name=False, match=None):
- """
- Construct a column-level FOREIGN KEY.
-
- The :class:`.ForeignKey` object when constructed generates a
- :class:`.ForeignKeyConstraint` which is associated with the parent
- :class:`.Table` object's collection of constraints.
-
- :param column: A single target column for the key relationship. A
- :class:`.Column` object or a column name as a string:
- ``tablename.columnkey`` or ``schema.tablename.columnkey``.
- ``columnkey`` is the ``key`` which has been assigned to the column
- (defaults to the column name itself), unless ``link_to_name`` is
- ``True`` in which case the rendered name of the column is used.
-
- .. versionadded:: 0.7.4
- Note that if the schema name is not included, and the
- underlying :class:`.MetaData` has a "schema", that value will
- be used.
-
- :param name: Optional string. An in-database name for the key if
- `constraint` is not provided.
-
- :param onupdate: Optional string. If set, emit ON UPDATE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param ondelete: Optional string. If set, emit ON DELETE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
- DEFERRABLE when issuing DDL for this constraint.
-
- :param initially: Optional string. If set, emit INITIALLY <value> when
- issuing DDL for this constraint.
-
- :param link_to_name: if True, the string name given in ``column`` is
- the rendered name of the referenced column, not its locally
- assigned ``key``.
-
- :param use_alter: passed to the underlying
- :class:`.ForeignKeyConstraint` to indicate the constraint should be
- generated/dropped externally from the CREATE TABLE/ DROP TABLE
- statement. See that classes' constructor for details.
-
- :param match: Optional string. If set, emit MATCH <value> when issuing
- DDL for this constraint. Typical values include SIMPLE, PARTIAL
- and FULL.
-
- """
-
- self._colspec = column
-
- # the linked ForeignKeyConstraint.
- # ForeignKey will create this when parent Column
- # is attached to a Table, *or* ForeignKeyConstraint
- # object passes itself in when creating ForeignKey
- # markers.
- self.constraint = _constraint
- self.parent = None
- self.use_alter = use_alter
- self.name = name
- self.onupdate = onupdate
- self.ondelete = ondelete
- self.deferrable = deferrable
- self.initially = initially
- self.link_to_name = link_to_name
- self.match = match
-
- def __repr__(self):
- return "ForeignKey(%r)" % self._get_colspec()
-
- def copy(self, schema=None):
- """Produce a copy of this :class:`.ForeignKey` object.
-
- The new :class:`.ForeignKey` will not be bound
- to any :class:`.Column`.
-
- This method is usually used by the internal
- copy procedures of :class:`.Column`, :class:`.Table`,
- and :class:`.MetaData`.
-
- :param schema: The returned :class:`.ForeignKey` will
- reference the original table and column name, qualified
- by the given string schema name.
-
- """
-
- fk = ForeignKey(
- self._get_colspec(schema=schema),
- use_alter=self.use_alter,
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- deferrable=self.deferrable,
- initially=self.initially,
- link_to_name=self.link_to_name,
- match=self.match
- )
- fk.dispatch._update(self.dispatch)
- return fk
-
- def _get_colspec(self, schema=None):
- """Return a string based 'column specification' for this
- :class:`.ForeignKey`.
-
- This is usually the equivalent of the string-based "tablename.colname"
- argument first passed to the object's constructor.
-
- """
- if schema:
- return schema + "." + self.column.table.name + \
- "." + self.column.key
- elif isinstance(self._colspec, util.string_types):
- return self._colspec
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
- else:
- _column = self._colspec
-
- return "%s.%s" % (_column.table.fullname, _column.key)
-
-
- target_fullname = property(_get_colspec)
-
- def references(self, table):
- """Return True if the given :class:`.Table` is referenced by this
- :class:`.ForeignKey`."""
-
- return table.corresponding_column(self.column) is not None
-
- def get_referent(self, table):
- """Return the :class:`.Column` in the given :class:`.Table`
- referenced by this :class:`.ForeignKey`.
-
- Returns None if this :class:`.ForeignKey` does not reference the given
- :class:`.Table`.
-
- """
-
- return table.corresponding_column(self.column)
-
- @util.memoized_property
- def _column_tokens(self):
- """parse a string-based _colspec into its component parts."""
-
- m = self._colspec.split('.')
- if m is None:
- raise exc.ArgumentError(
- "Invalid foreign key column specification: %s" %
- self._colspec)
- if (len(m) == 1):
- tname = m.pop()
- colname = None
- else:
- colname = m.pop()
- tname = m.pop()
-
- # A FK between column 'bar' and table 'foo' can be
- # specified as 'foo', 'foo.bar', 'dbo.foo.bar',
- # 'otherdb.dbo.foo.bar'. Once we have the column name and
- # the table name, treat everything else as the schema
- # name. Some databases (e.g. Sybase) support
- # inter-database foreign keys. See tickets#1341 and --
- # indirectly related -- Ticket #594. This assumes that '.'
- # will never appear *within* any component of the FK.
-
- if (len(m) > 0):
- schema = '.'.join(m)
- else:
- schema = None
- return schema, tname, colname
-
- def _table_key(self):
- if isinstance(self._colspec, util.string_types):
- schema, tname, colname = self._column_tokens
- return _get_table_key(tname, schema)
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
- else:
- _column = self._colspec
-
- if _column.table is None:
- return None
- else:
- return _column.table.key
-
- def _resolve_col_tokens(self):
- if self.parent is None:
- raise exc.InvalidRequestError(
- "this ForeignKey object does not yet have a "
- "parent Column associated with it.")
-
- elif self.parent.table is None:
- raise exc.InvalidRequestError(
- "this ForeignKey's parent column is not yet associated "
- "with a Table.")
-
- parenttable = self.parent.table
-
- # assertion, can be commented out.
- # basically Column._make_proxy() sends the actual
- # target Column to the ForeignKey object, so the
- # string resolution here is never called.
- for c in self.parent.base_columns:
- if isinstance(c, Column):
- assert c.table is parenttable
- break
- else:
- assert False
- ######################
-
- schema, tname, colname = self._column_tokens
-
- if schema is None and parenttable.metadata.schema is not None:
- schema = parenttable.metadata.schema
-
- tablekey = _get_table_key(tname, schema)
- return parenttable, tablekey, colname
-
-
- def _link_to_col_by_colstring(self, parenttable, table, colname):
- if not hasattr(self.constraint, '_referred_table'):
- self.constraint._referred_table = table
- else:
- assert self.constraint._referred_table is table
-
- _column = None
- if colname is None:
- # colname is None in the case that ForeignKey argument
- # was specified as table name only, in which case we
- # match the column name to the same column on the
- # parent.
- key = self.parent
- _column = table.c.get(self.parent.key, None)
- elif self.link_to_name:
- key = colname
- for c in table.c:
- if c.name == colname:
- _column = c
- else:
- key = colname
- _column = table.c.get(colname, None)
-
- if _column is None:
- raise exc.NoReferencedColumnError(
- "Could not initialize target column for ForeignKey '%s' on table '%s': "
- "table '%s' has no column named '%s'" % (
- self._colspec, parenttable.name, table.name, key),
- table.name, key)
-
- self._set_target_column(_column)
-
- def _set_target_column(self, column):
- # propagate TypeEngine to parent if it didn't have one
- if isinstance(self.parent.type, sqltypes.NullType):
- self.parent.type = column.type
-
- # super-edgy case, if other FKs point to our column,
- # they'd get the type propagated out also.
- if isinstance(self.parent.table, Table):
- fk_key = (self.parent.table.key, self.parent.key)
- if fk_key in self.parent.table.metadata._fk_memos:
- for fk in self.parent.table.metadata._fk_memos[fk_key]:
- if isinstance(fk.parent.type, sqltypes.NullType):
- fk.parent.type = column.type
-
- self.column = column
-
- @util.memoized_property
- def column(self):
- """Return the target :class:`.Column` referenced by this
- :class:`.ForeignKey`.
-
- If no target column has been established, an exception
- is raised.
-
- .. versionchanged:: 0.9.0
- Foreign key target column resolution now occurs as soon as both
- the ForeignKey object and the remote Column to which it refers
- are both associated with the same MetaData object.
-
- """
-
- if isinstance(self._colspec, util.string_types):
-
- parenttable, tablekey, colname = self._resolve_col_tokens()
-
- if tablekey not in parenttable.metadata:
- raise exc.NoReferencedTableError(
- "Foreign key associated with column '%s' could not find "
- "table '%s' with which to generate a "
- "foreign key to target column '%s'" %
- (self.parent, tablekey, colname),
- tablekey)
- elif parenttable.key not in parenttable.metadata:
- raise exc.InvalidRequestError(
- "Table %s is no longer associated with its "
- "parent MetaData" % parenttable)
- else:
- raise exc.NoReferencedColumnError(
- "Could not initialize target column for "
- "ForeignKey '%s' on table '%s': "
- "table '%s' has no column named '%s'" % (
- self._colspec, parenttable.name, tablekey, colname),
- tablekey, colname)
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
- return _column
- else:
- _column = self._colspec
- return _column
-
- def _set_parent(self, column):
- if self.parent is not None and self.parent is not column:
- raise exc.InvalidRequestError(
- "This ForeignKey already has a parent !")
- self.parent = column
- self.parent.foreign_keys.add(self)
- self.parent._on_table_attach(self._set_table)
-
- def _set_remote_table(self, table):
- parenttable, tablekey, colname = self._resolve_col_tokens()
- self._link_to_col_by_colstring(parenttable, table, colname)
- self.constraint._validate_dest_table(table)
-
- def _remove_from_metadata(self, metadata):
- parenttable, table_key, colname = self._resolve_col_tokens()
- fk_key = (table_key, colname)
-
- if self in metadata._fk_memos[fk_key]:
- # TODO: no test coverage for self not in memos
- metadata._fk_memos[fk_key].remove(self)
-
- def _set_table(self, column, table):
- # standalone ForeignKey - create ForeignKeyConstraint
- # on the hosting Table when attached to the Table.
- if self.constraint is None and isinstance(table, Table):
- self.constraint = ForeignKeyConstraint(
- [], [], use_alter=self.use_alter, name=self.name,
- onupdate=self.onupdate, ondelete=self.ondelete,
- deferrable=self.deferrable, initially=self.initially,
- match=self.match,
- )
- self.constraint._elements[self.parent] = self
- self.constraint._set_parent_with_dispatch(table)
- table.foreign_keys.add(self)
-
- # set up remote ".column" attribute, or a note to pick it
- # up when the other Table/Column shows up
- if isinstance(self._colspec, util.string_types):
- parenttable, table_key, colname = self._resolve_col_tokens()
- fk_key = (table_key, colname)
- if table_key in parenttable.metadata.tables:
- table = parenttable.metadata.tables[table_key]
- try:
- self._link_to_col_by_colstring(parenttable, table, colname)
- except exc.NoReferencedColumnError:
- # this is OK, we'll try later
- pass
- parenttable.metadata._fk_memos[fk_key].append(self)
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
- self._set_target_column(_column)
- else:
- _column = self._colspec
- self._set_target_column(_column)
-
-
-
-class _NotAColumnExpr(object):
- def _not_a_column_expr(self):
- raise exc.InvalidRequestError(
- "This %s cannot be used directly "
- "as a column expression." % self.__class__.__name__)
-
- __clause_element__ = self_group = lambda self: self._not_a_column_expr()
- _from_objects = property(lambda self: self._not_a_column_expr())
-
-
-class DefaultGenerator(_NotAColumnExpr, SchemaItem):
- """Base class for column *default* values."""
-
- __visit_name__ = 'default_generator'
-
- is_sequence = False
- is_server_default = False
- column = None
-
- def __init__(self, for_update=False):
- self.for_update = for_update
-
- def _set_parent(self, column):
- self.column = column
- if self.for_update:
- self.column.onupdate = self
- else:
- self.column.default = self
-
- def execute(self, bind=None, **kwargs):
- if bind is None:
- bind = _bind_or_error(self)
- return bind._execute_default(self, **kwargs)
-
- @property
- def bind(self):
- """Return the connectable associated with this default."""
- if getattr(self, 'column', None) is not None:
- return self.column.table.bind
- else:
- return None
-
-
-class ColumnDefault(DefaultGenerator):
- """A plain default value on a column.
-
- This could correspond to a constant, a callable function,
- or a SQL clause.
-
- :class:`.ColumnDefault` is generated automatically
- whenever the ``default``, ``onupdate`` arguments of
- :class:`.Column` are used. A :class:`.ColumnDefault`
- can be passed positionally as well.
-
- For example, the following::
-
- Column('foo', Integer, default=50)
-
- Is equivalent to::
-
- Column('foo', Integer, ColumnDefault(50))
-
-
- """
-
- def __init__(self, arg, **kwargs):
- """"Construct a new :class:`.ColumnDefault`.
-
-
- :param arg: argument representing the default value.
- May be one of the following:
-
- * a plain non-callable Python value, such as a
- string, integer, boolean, or other simple type.
- The default value will be used as is each time.
- * a SQL expression, that is one which derives from
- :class:`.ColumnElement`. The SQL expression will
- be rendered into the INSERT or UPDATE statement,
- or in the case of a primary key column when
- RETURNING is not used may be
- pre-executed before an INSERT within a SELECT.
- * A Python callable. The function will be invoked for each
- new row subject to an INSERT or UPDATE.
- The callable must accept exactly
- zero or one positional arguments. The one-argument form
- will receive an instance of the :class:`.ExecutionContext`,
- which provides contextual information as to the current
- :class:`.Connection` in use as well as the current
- statement and parameters.
-
- """
- super(ColumnDefault, self).__init__(**kwargs)
- if isinstance(arg, FetchedValue):
- raise exc.ArgumentError(
- "ColumnDefault may not be a server-side default type.")
- if util.callable(arg):
- arg = self._maybe_wrap_callable(arg)
- self.arg = arg
-
- @util.memoized_property
- def is_callable(self):
- return util.callable(self.arg)
-
- @util.memoized_property
- def is_clause_element(self):
- return isinstance(self.arg, expression.ClauseElement)
-
- @util.memoized_property
- def is_scalar(self):
- return not self.is_callable and \
- not self.is_clause_element and \
- not self.is_sequence
-
- def _maybe_wrap_callable(self, fn):
- """Wrap callables that don't accept a context.
-
- The alternative here is to require that
- a simple callable passed to "default" would need
- to be of the form "default=lambda ctx: datetime.now".
- That is the more "correct" way to go, but the case
- of using a zero-arg callable for "default" is so
- much more prominent than the context-specific one
- I'm having trouble justifying putting that inconvenience
- on everyone.
-
- """
- if inspect.isfunction(fn):
- inspectable = fn
- elif inspect.isclass(fn):
- inspectable = fn.__init__
- elif hasattr(fn, '__call__'):
- inspectable = fn.__call__
- else:
- # probably not inspectable, try anyways.
- inspectable = fn
- try:
- argspec = inspect.getargspec(inspectable)
- except TypeError:
- return lambda ctx: fn()
-
- defaulted = argspec[3] is not None and len(argspec[3]) or 0
- positionals = len(argspec[0]) - defaulted
-
- # Py3K compat - no unbound methods
- if inspect.ismethod(inspectable) or inspect.isclass(fn):
- positionals -= 1
-
- if positionals == 0:
- return lambda ctx: fn()
- elif positionals == 1:
- return fn
- else:
- raise exc.ArgumentError(
- "ColumnDefault Python function takes zero or one "
- "positional arguments")
-
- def _visit_name(self):
- if self.for_update:
- return "column_onupdate"
- else:
- return "column_default"
- __visit_name__ = property(_visit_name)
-
- def __repr__(self):
- return "ColumnDefault(%r)" % self.arg
-
-
-class Sequence(DefaultGenerator):
- """Represents a named database sequence.
-
- The :class:`.Sequence` object represents the name and configurational
- parameters of a database sequence. It also represents
- a construct that can be "executed" by a SQLAlchemy :class:`.Engine`
- or :class:`.Connection`, rendering the appropriate "next value" function
- for the target database and returning a result.
-
- The :class:`.Sequence` is typically associated with a primary key column::
-
- some_table = Table('some_table', metadata,
- Column('id', Integer, Sequence('some_table_seq'), primary_key=True)
- )
-
- When CREATE TABLE is emitted for the above :class:`.Table`, if the
- target platform supports sequences, a CREATE SEQUENCE statement will
- be emitted as well. For platforms that don't support sequences,
- the :class:`.Sequence` construct is ignored.
-
- See also: :class:`.CreateSequence` :class:`.DropSequence`
-
- """
-
- __visit_name__ = 'sequence'
-
- is_sequence = True
-
- def __init__(self, name, start=None, increment=None, schema=None,
- optional=False, quote=None, metadata=None,
- quote_schema=None,
- for_update=False):
- """Construct a :class:`.Sequence` object.
-
- :param name: The name of the sequence.
- :param start: the starting index of the sequence. This value is
- used when the CREATE SEQUENCE command is emitted to the database
- as the value of the "START WITH" clause. If ``None``, the
- clause is omitted, which on most platforms indicates a starting
- value of 1.
- :param increment: the increment value of the sequence. This
- value is used when the CREATE SEQUENCE command is emitted to
- the database as the value of the "INCREMENT BY" clause. If ``None``,
- the clause is omitted, which on most platforms indicates an
- increment of 1.
- :param schema: Optional schema name for the sequence, if located
- in a schema other than the default.
- :param optional: boolean value, when ``True``, indicates that this
- :class:`.Sequence` object only needs to be explicitly generated
- on backends that don't provide another way to generate primary
- key identifiers. Currently, it essentially means, "don't create
- this sequence on the Postgresql backend, where the SERIAL keyword
- creates a sequence for us automatically".
- :param quote: boolean value, when ``True`` or ``False``, explicitly
- forces quoting of the schema name on or off. When left at its
- default of ``None``, normal quoting rules based on casing and reserved
- words take place.
- :param metadata: optional :class:`.MetaData` object which will be
- associated with this :class:`.Sequence`. A :class:`.Sequence`
- that is associated with a :class:`.MetaData` gains access to the
- ``bind`` of that :class:`.MetaData`, meaning the
- :meth:`.Sequence.create` and :meth:`.Sequence.drop` methods will
- make usage of that engine automatically.
-
- .. versionchanged:: 0.7
- Additionally, the appropriate CREATE SEQUENCE/
- DROP SEQUENCE DDL commands will be emitted corresponding to this
- :class:`.Sequence` when :meth:`.MetaData.create_all` and
- :meth:`.MetaData.drop_all` are invoked.
-
- Note that when a :class:`.Sequence` is applied to a :class:`.Column`,
- the :class:`.Sequence` is automatically associated with the
- :class:`.MetaData` object of that column's parent :class:`.Table`,
- when that association is made. The :class:`.Sequence` will then
- be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding
- to when the :class:`.Table` object itself is created or dropped,
- rather than that of the :class:`.MetaData` object overall.
- :param for_update: Indicates this :class:`.Sequence`, when associated
- with a :class:`.Column`, should be invoked for UPDATE statements
- on that column's table, rather than for INSERT statements, when
- no value is otherwise present for that column in the statement.
-
- """
- super(Sequence, self).__init__(for_update=for_update)
- self.name = name
- self.start = start
- self.increment = increment
- self.optional = optional
- self.quote = quote
- if metadata is not None and schema is None and metadata.schema:
- self.schema = schema = metadata.schema
- self.quote_schema = metadata.quote_schema
- else:
- self.schema = schema
- self.quote_schema = quote_schema
- self.metadata = metadata
- self._key = _get_table_key(name, schema)
- if metadata:
- self._set_metadata(metadata)
-
- @util.memoized_property
- def is_callable(self):
- return False
-
- @util.memoized_property
- def is_clause_element(self):
- return False
-
- def next_value(self):
- """Return a :class:`.next_value` function element
- which will render the appropriate increment function
- for this :class:`.Sequence` within any SQL expression.
-
- """
- return expression.func.next_value(self, bind=self.bind)
-
- def _set_parent(self, column):
- super(Sequence, self)._set_parent(column)
- column._on_table_attach(self._set_table)
-
- def _set_table(self, column, table):
- self._set_metadata(table.metadata)
-
- def _set_metadata(self, metadata):
- self.metadata = metadata
- self.metadata._sequences[self._key] = self
-
- @property
- def bind(self):
- if self.metadata:
- return self.metadata.bind
- else:
- return None
-
- def create(self, bind=None, checkfirst=True):
- """Creates this sequence in the database."""
-
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaGenerator,
- self,
- checkfirst=checkfirst)
-
- def drop(self, bind=None, checkfirst=True):
- """Drops this sequence from the database."""
-
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaDropper,
- self,
- checkfirst=checkfirst)
-
- def _not_a_column_expr(self):
- raise exc.InvalidRequestError(
- "This %s cannot be used directly "
- "as a column expression. Use func.next_value(sequence) "
- "to produce a 'next value' function that's usable "
- "as a column element."
- % self.__class__.__name__)
-
-
-class FetchedValue(_NotAColumnExpr, events.SchemaEventTarget):
- """A marker for a transparent database-side default.
-
- Use :class:`.FetchedValue` when the database is configured
- to provide some automatic default for a column.
-
- E.g.::
-
- Column('foo', Integer, FetchedValue())
-
- Would indicate that some trigger or default generator
- will create a new value for the ``foo`` column during an
- INSERT.
-
- .. seealso::
-
- :ref:`triggered_columns`
-
- """
- is_server_default = True
- reflected = False
- has_argument = False
-
- def __init__(self, for_update=False):
- self.for_update = for_update
-
- def _as_for_update(self, for_update):
- if for_update == self.for_update:
- return self
- else:
- return self._clone(for_update)
-
- def _clone(self, for_update):
- n = self.__class__.__new__(self.__class__)
- n.__dict__.update(self.__dict__)
- n.__dict__.pop('column', None)
- n.for_update = for_update
- return n
-
- def _set_parent(self, column):
- self.column = column
- if self.for_update:
- self.column.server_onupdate = self
- else:
- self.column.server_default = self
-
- def __repr__(self):
- return util.generic_repr(self)
-
-inspection._self_inspects(FetchedValue)
-
-
-class DefaultClause(FetchedValue):
- """A DDL-specified DEFAULT column value.
-
- :class:`.DefaultClause` is a :class:`.FetchedValue`
- that also generates a "DEFAULT" clause when
- "CREATE TABLE" is emitted.
-
- :class:`.DefaultClause` is generated automatically
- whenever the ``server_default``, ``server_onupdate`` arguments of
- :class:`.Column` are used. A :class:`.DefaultClause`
- can be passed positionally as well.
-
- For example, the following::
-
- Column('foo', Integer, server_default="50")
-
- Is equivalent to::
-
- Column('foo', Integer, DefaultClause("50"))
-
- """
-
- has_argument = True
-
- def __init__(self, arg, for_update=False, _reflected=False):
- util.assert_arg_type(arg, (util.string_types[0],
- expression.ClauseElement,
- expression.TextClause), 'arg')
- super(DefaultClause, self).__init__(for_update)
- self.arg = arg
- self.reflected = _reflected
-
- def __repr__(self):
- return "DefaultClause(%r, for_update=%r)" % \
- (self.arg, self.for_update)
-
-
-class PassiveDefault(DefaultClause):
- """A DDL-specified DEFAULT column value.
-
- .. deprecated:: 0.6
- :class:`.PassiveDefault` is deprecated.
- Use :class:`.DefaultClause`.
- """
- @util.deprecated("0.6",
- ":class:`.PassiveDefault` is deprecated. "
- "Use :class:`.DefaultClause`.",
- False)
- def __init__(self, *arg, **kw):
- DefaultClause.__init__(self, *arg, **kw)
-
-
-class Constraint(SchemaItem):
- """A table-level SQL constraint."""
-
- __visit_name__ = 'constraint'
-
- def __init__(self, name=None, deferrable=None, initially=None,
- _create_rule=None,
- **kw):
- """Create a SQL constraint.
-
- :param name:
- Optional, the in-database name of this ``Constraint``.
-
- :param deferrable:
- Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
-
- :param initially:
- Optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- :param _create_rule:
- a callable which is passed the DDLCompiler object during
- compilation. Returns True or False to signal inline generation of
- this Constraint.
-
- The AddConstraint and DropConstraint DDL constructs provide
- DDLElement's more comprehensive "conditional DDL" approach that is
- passed a database connection when DDL is being issued. _create_rule
- is instead called during any CREATE TABLE compilation, where there
- may not be any transaction/connection in progress. However, it
- allows conditional compilation of the constraint even for backends
- which do not support addition of constraints through ALTER TABLE,
- which currently includes SQLite.
-
- _create_rule is used by some types to create constraints.
- Currently, its call signature is subject to change at any time.
-
- :param \**kwargs:
- Dialect-specific keyword parameters, see the documentation
- for various dialects and constraints regarding options here.
-
- """
-
- self.name = name
- self.deferrable = deferrable
- self.initially = initially
- self._create_rule = _create_rule
- util.set_creation_order(self)
- _validate_dialect_kwargs(kw, self.__class__.__name__)
- self.kwargs = kw
-
- @property
- def table(self):
- try:
- if isinstance(self.parent, Table):
- return self.parent
- except AttributeError:
- pass
- raise exc.InvalidRequestError(
- "This constraint is not bound to a table. Did you "
- "mean to call table.append_constraint(constraint) ?")
-
- def _set_parent(self, parent):
- self.parent = parent
- parent.constraints.add(self)
-
- def copy(self, **kw):
- raise NotImplementedError()
-
-
-class ColumnCollectionMixin(object):
- def __init__(self, *columns):
- self.columns = expression.ColumnCollection()
- self._pending_colargs = [_to_schema_column_or_string(c)
- for c in columns]
- if self._pending_colargs and \
- isinstance(self._pending_colargs[0], Column) and \
- isinstance(self._pending_colargs[0].table, Table):
- self._set_parent_with_dispatch(self._pending_colargs[0].table)
-
- def _set_parent(self, table):
- for col in self._pending_colargs:
- if isinstance(col, util.string_types):
- col = table.c[col]
- self.columns.add(col)
-
-
-class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint):
- """A constraint that proxies a ColumnCollection."""
-
- def __init__(self, *columns, **kw):
- """
- :param \*columns:
- A sequence of column names or Column objects.
-
- :param name:
- Optional, the in-database name of this constraint.
-
- :param deferrable:
- Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
-
- :param initially:
- Optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- """
- ColumnCollectionMixin.__init__(self, *columns)
- Constraint.__init__(self, **kw)
-
- def _set_parent(self, table):
- ColumnCollectionMixin._set_parent(self, table)
- Constraint._set_parent(self, table)
-
- def __contains__(self, x):
- return x in self.columns
-
- def copy(self, **kw):
- c = self.__class__(name=self.name, deferrable=self.deferrable,
- initially=self.initially, *self.columns.keys())
- c.dispatch._update(self.dispatch)
- return c
-
- def contains_column(self, col):
- return self.columns.contains_column(col)
-
- def __iter__(self):
- # inlining of
- # return iter(self.columns)
- # ColumnCollection->OrderedProperties->OrderedDict
- ordered_dict = self.columns._data
- return (ordered_dict[key] for key in ordered_dict._list)
-
- def __len__(self):
- return len(self.columns._data)
-
-
-class CheckConstraint(Constraint):
- """A table- or column-level CHECK constraint.
-
- Can be included in the definition of a Table or Column.
- """
-
- def __init__(self, sqltext, name=None, deferrable=None,
- initially=None, table=None, _create_rule=None,
- _autoattach=True):
- """Construct a CHECK constraint.
-
- :param sqltext:
- A string containing the constraint definition, which will be used
- verbatim, or a SQL expression construct.
-
- :param name:
- Optional, the in-database name of the constraint.
-
- :param deferrable:
- Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
-
- :param initially:
- Optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- """
-
- super(CheckConstraint, self).\
- __init__(name, deferrable, initially, _create_rule)
- self.sqltext = expression._literal_as_text(sqltext)
- if table is not None:
- self._set_parent_with_dispatch(table)
- elif _autoattach:
- cols = sqlutil.find_columns(self.sqltext)
- tables = set([c.table for c in cols
- if isinstance(c.table, Table)])
- if len(tables) == 1:
- self._set_parent_with_dispatch(
- tables.pop())
-
- def __visit_name__(self):
- if isinstance(self.parent, Table):
- return "check_constraint"
- else:
- return "column_check_constraint"
- __visit_name__ = property(__visit_name__)
-
- def copy(self, target_table=None, **kw):
- if target_table is not None:
- def replace(col):
- if self.table.c.contains_column(col):
- return target_table.c[col.key]
- else:
- return None
- sqltext = visitors.replacement_traverse(self.sqltext, {}, replace)
- else:
- sqltext = self.sqltext
- c = CheckConstraint(sqltext,
- name=self.name,
- initially=self.initially,
- deferrable=self.deferrable,
- _create_rule=self._create_rule,
- table=target_table,
- _autoattach=False)
- c.dispatch._update(self.dispatch)
- return c
-
-
-class ForeignKeyConstraint(Constraint):
- """A table-level FOREIGN KEY constraint.
-
- Defines a single column or composite FOREIGN KEY ... REFERENCES
- constraint. For a no-frills, single column foreign key, adding a
- :class:`.ForeignKey` to the definition of a :class:`.Column` is a shorthand
- equivalent for an unnamed, single column :class:`.ForeignKeyConstraint`.
-
- Examples of foreign key configuration are in :ref:`metadata_foreignkeys`.
-
- """
- __visit_name__ = 'foreign_key_constraint'
-
- def __init__(self, columns, refcolumns, name=None, onupdate=None,
- ondelete=None, deferrable=None, initially=None, use_alter=False,
- link_to_name=False, match=None, table=None):
- """Construct a composite-capable FOREIGN KEY.
-
- :param columns: A sequence of local column names. The named columns
- must be defined and present in the parent Table. The names should
- match the ``key`` given to each column (defaults to the name) unless
- ``link_to_name`` is True.
-
- :param refcolumns: A sequence of foreign column names or Column
- objects. The columns must all be located within the same Table.
-
- :param name: Optional, the in-database name of the key.
-
- :param onupdate: Optional string. If set, emit ON UPDATE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param ondelete: Optional string. If set, emit ON DELETE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
- DEFERRABLE when issuing DDL for this constraint.
-
- :param initially: Optional string. If set, emit INITIALLY <value> when
- issuing DDL for this constraint.
-
- :param link_to_name: if True, the string name given in ``column`` is
- the rendered name of the referenced column, not its locally assigned
- ``key``.
-
- :param use_alter: If True, do not emit the DDL for this constraint as
- part of the CREATE TABLE definition. Instead, generate it via an
- ALTER TABLE statement issued after the full collection of tables
- have been created, and drop it via an ALTER TABLE statement before
- the full collection of tables are dropped. This is shorthand for the
- usage of :class:`.AddConstraint` and :class:`.DropConstraint` applied
- as "after-create" and "before-drop" events on the MetaData object.
- This is normally used to generate/drop constraints on objects that
- are mutually dependent on each other.
-
- :param match: Optional string. If set, emit MATCH <value> when issuing
- DDL for this constraint. Typical values include SIMPLE, PARTIAL
- and FULL.
-
- """
- super(ForeignKeyConstraint, self).\
- __init__(name, deferrable, initially)
-
- self.onupdate = onupdate
- self.ondelete = ondelete
- self.link_to_name = link_to_name
- if self.name is None and use_alter:
- raise exc.ArgumentError("Alterable Constraint requires a name")
- self.use_alter = use_alter
- self.match = match
-
- self._elements = util.OrderedDict()
-
- # standalone ForeignKeyConstraint - create
- # associated ForeignKey objects which will be applied to hosted
- # Column objects (in col.foreign_keys), either now or when attached
- # to the Table for string-specified names
- for col, refcol in zip(columns, refcolumns):
- self._elements[col] = ForeignKey(
- refcol,
- _constraint=self,
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- use_alter=self.use_alter,
- link_to_name=self.link_to_name,
- match=self.match
- )
-
- if table is not None:
- self._set_parent_with_dispatch(table)
- elif columns and \
- isinstance(columns[0], Column) and \
- columns[0].table is not None:
- self._set_parent_with_dispatch(columns[0].table)
-
- def _validate_dest_table(self, table):
- table_keys = set([elem._table_key() for elem in self._elements.values()])
- if None not in table_keys and len(table_keys) > 1:
- elem0, elem1 = sorted(table_keys)[0:2]
- raise exc.ArgumentError(
- 'ForeignKeyConstraint on %s(%s) refers to '
- 'multiple remote tables: %s and %s' % (
- table.fullname,
- self._col_description,
- elem0,
- elem1
- ))
-
- @property
- def _col_description(self):
- return ", ".join(self._elements)
-
- @property
- def columns(self):
- return list(self._elements)
-
- @property
- def elements(self):
- return list(self._elements.values())
-
- def _set_parent(self, table):
- super(ForeignKeyConstraint, self)._set_parent(table)
-
- self._validate_dest_table(table)
-
- for col, fk in self._elements.items():
- # string-specified column names now get
- # resolved to Column objects
- if isinstance(col, util.string_types):
- try:
- col = table.c[col]
- except KeyError:
- raise exc.ArgumentError(
- "Can't create ForeignKeyConstraint "
- "on table '%s': no column "
- "named '%s' is present." % (table.description, col))
-
- if not hasattr(fk, 'parent') or \
- fk.parent is not col:
- fk._set_parent_with_dispatch(col)
-
- if self.use_alter:
- def supports_alter(ddl, event, schema_item, bind, **kw):
- return table in set(kw['tables']) and \
- bind.dialect.supports_alter
-
- event.listen(table.metadata, "after_create",
- AddConstraint(self, on=supports_alter))
- event.listen(table.metadata, "before_drop",
- DropConstraint(self, on=supports_alter))
-
- def copy(self, schema=None, **kw):
- fkc = ForeignKeyConstraint(
- [x.parent.key for x in self._elements.values()],
- [x._get_colspec(schema=schema) for x in self._elements.values()],
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- use_alter=self.use_alter,
- deferrable=self.deferrable,
- initially=self.initially,
- link_to_name=self.link_to_name,
- match=self.match
- )
- fkc.dispatch._update(self.dispatch)
- return fkc
-
-
-class PrimaryKeyConstraint(ColumnCollectionConstraint):
- """A table-level PRIMARY KEY constraint.
-
- Defines a single column or composite PRIMARY KEY constraint. For a
- no-frills primary key, adding ``primary_key=True`` to one or more
- ``Column`` definitions is a shorthand equivalent for an unnamed single- or
- multiple-column PrimaryKeyConstraint.
- """
-
- __visit_name__ = 'primary_key_constraint'
-
- def _set_parent(self, table):
- super(PrimaryKeyConstraint, self)._set_parent(table)
-
- if table.primary_key in table.constraints:
- table.constraints.remove(table.primary_key)
- table.primary_key = self
- table.constraints.add(self)
-
- for c in self.columns:
- c.primary_key = True
-
- def _replace(self, col):
- self.columns.replace(col)
-
-
-class UniqueConstraint(ColumnCollectionConstraint):
- """A table-level UNIQUE constraint.
-
- Defines a single column or composite UNIQUE constraint. For a no-frills,
- single column constraint, adding ``unique=True`` to the ``Column``
- definition is a shorthand equivalent for an unnamed, single column
- UniqueConstraint.
- """
-
- __visit_name__ = 'unique_constraint'
-
-
-class Index(ColumnCollectionMixin, SchemaItem):
- """A table-level INDEX.
-
- Defines a composite (one or more column) INDEX. For a no-frills, single
- column index, adding ``index=True`` to the ``Column`` definition is
- a shorthand equivalent for an unnamed, single column :class:`.Index`.
-
- .. seealso::
-
- :ref:`schema_indexes` - General information on :class:`.Index`.
-
- :ref:`postgresql_indexes` - PostgreSQL-specific options available for the
- :class:`.Index` construct.
-
- :ref:`mysql_indexes` - MySQL-specific options available for the
- :class:`.Index` construct.
-
- :ref:`mssql_indexes` - MSSQL-specific options available for the
- :class:`.Index` construct.
-
- """
-
- __visit_name__ = 'index'
-
- def __init__(self, name, *expressions, **kw):
- """Construct an index object.
-
- :param name:
- The name of the index
-
- :param \*expressions:
- Column expressions to include in the index. The expressions
- are normally instances of :class:`.Column`, but may also
- be arbitrary SQL expressions which ultmately refer to a
- :class:`.Column`.
-
- .. versionadded:: 0.8 :class:`.Index` supports SQL expressions as
- well as plain columns.
-
- :param unique:
- Defaults to False: create a unique index.
-
- :param \**kw:
- Other keyword arguments may be interpreted by specific dialects.
-
- """
- self.table = None
-
- columns = []
- for expr in expressions:
- if not isinstance(expr, expression.ClauseElement):
- columns.append(expr)
- else:
- cols = []
- visitors.traverse(expr, {}, {'column': cols.append})
- if cols:
- columns.append(cols[0])
- else:
- columns.append(expr)
-
- self.expressions = expressions
-
- # will call _set_parent() if table-bound column
- # objects are present
- ColumnCollectionMixin.__init__(self, *columns)
-
- self.name = name
- self.unique = kw.pop('unique', False)
- self.kwargs = kw
-
- def _set_parent(self, table):
- ColumnCollectionMixin._set_parent(self, table)
-
- if self.table is not None and table is not self.table:
- raise exc.ArgumentError(
- "Index '%s' is against table '%s', and "
- "cannot be associated with table '%s'." % (
- self.name,
- self.table.description,
- table.description
- )
- )
- self.table = table
- for c in self.columns:
- if c.table != self.table:
- raise exc.ArgumentError(
- "Column '%s' is not part of table '%s'." %
- (c, self.table.description)
- )
- table.indexes.add(self)
-
- self.expressions = [
- expr if isinstance(expr, expression.ClauseElement)
- else colexpr
- for expr, colexpr in zip(self.expressions, self.columns)
- ]
-
- @property
- def bind(self):
- """Return the connectable associated with this Index."""
-
- return self.table.bind
-
- def create(self, bind=None):
- """Issue a ``CREATE`` statement for this
- :class:`.Index`, using the given :class:`.Connectable`
- for connectivity.
-
- See also :meth:`.MetaData.create_all`.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaGenerator, self)
- return self
-
- def drop(self, bind=None):
- """Issue a ``DROP`` statement for this
- :class:`.Index`, using the given :class:`.Connectable`
- for connectivity.
-
- See also :meth:`.MetaData.drop_all`.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaDropper, self)
-
- def __repr__(self):
- return 'Index(%s)' % (
- ", ".join(
- [repr(self.name)] +
- [repr(c) for c in self.columns] +
- (self.unique and ["unique=True"] or [])
- ))
-
-
-class MetaData(SchemaItem):
- """A collection of :class:`.Table` objects and their associated schema
- constructs.
-
- Holds a collection of :class:`.Table` objects as well as
- an optional binding to an :class:`.Engine` or
- :class:`.Connection`. If bound, the :class:`.Table` objects
- in the collection and their columns may participate in implicit SQL
- execution.
-
- The :class:`.Table` objects themselves are stored in the
- ``metadata.tables`` dictionary.
-
- The ``bind`` property may be assigned to dynamically. A common pattern is
- to start unbound and then bind later when an engine is available::
-
- metadata = MetaData()
- # define tables
- Table('mytable', metadata, ...)
- # connect to an engine later, perhaps after loading a URL from a
- # configuration file
- metadata.bind = an_engine
-
- MetaData is a thread-safe object after tables have been explicitly defined
- or loaded via reflection.
-
- See also:
-
- :ref:`metadata_describing` - Introduction to database metadata
-
- .. index::
- single: thread safety; MetaData
-
- """
-
- __visit_name__ = 'metadata'
-
- def __init__(self, bind=None, reflect=False, schema=None,
- quote_schema=None):
- """Create a new MetaData object.
-
- :param bind:
- An Engine or Connection to bind to. May also be a string or URL
- instance, these are passed to create_engine() and this MetaData will
- be bound to the resulting engine.
-
- :param reflect:
- Optional, automatically load all tables from the bound database.
- Defaults to False. ``bind`` is required when this option is set.
-
- .. deprecated:: 0.8
- Please use the :meth:`.MetaData.reflect` method.
-
- :param schema:
- The default schema to use for the :class:`.Table`,
- :class:`.Sequence`, and other objects associated with this
- :class:`.MetaData`. Defaults to ``None``.
-
- :param quote_schema:
- Sets the ``quote_schema`` flag for those :class:`.Table`,
- :class:`.Sequence`, and other objects which make usage of the
- local ``schema`` name.
-
- .. versionadded:: 0.7.4
- ``schema`` and ``quote_schema`` parameters.
-
- """
- self.tables = util.immutabledict()
- self.schema = schema
- self.quote_schema = quote_schema
- self._schemas = set()
- self._sequences = {}
- self._fk_memos = collections.defaultdict(list)
-
- self.bind = bind
- if reflect:
- util.warn("reflect=True is deprecate; please "
- "use the reflect() method.")
- if not bind:
- raise exc.ArgumentError(
- "A bind must be supplied in conjunction "
- "with reflect=True")
- self.reflect()
-
- def __repr__(self):
- return 'MetaData(bind=%r)' % self.bind
-
- def __contains__(self, table_or_key):
- if not isinstance(table_or_key, util.string_types):
- table_or_key = table_or_key.key
- return table_or_key in self.tables
-
- def _add_table(self, name, schema, table):
- key = _get_table_key(name, schema)
- dict.__setitem__(self.tables, key, table)
- if schema:
- self._schemas.add(schema)
-
-
-
- def _remove_table(self, name, schema):
- key = _get_table_key(name, schema)
- removed = dict.pop(self.tables, key, None)
- if removed is not None:
- for fk in removed.foreign_keys:
- fk._remove_from_metadata(self)
- if self._schemas:
- self._schemas = set([t.schema
- for t in self.tables.values()
- if t.schema is not None])
-
-
- def __getstate__(self):
- return {'tables': self.tables,
- 'schema': self.schema,
- 'quote_schema': self.quote_schema,
- 'schemas': self._schemas,
- 'sequences': self._sequences,
- 'fk_memos': self._fk_memos}
-
- def __setstate__(self, state):
- self.tables = state['tables']
- self.schema = state['schema']
- self.quote_schema = state['quote_schema']
- self._bind = None
- self._sequences = state['sequences']
- self._schemas = state['schemas']
- self._fk_memos = state['fk_memos']
-
- def is_bound(self):
- """True if this MetaData is bound to an Engine or Connection."""
-
- return self._bind is not None
-
- def bind(self):
- """An :class:`.Engine` or :class:`.Connection` to which this
- :class:`.MetaData` is bound.
-
- Typically, a :class:`.Engine` is assigned to this attribute
- so that "implicit execution" may be used, or alternatively
- as a means of providing engine binding information to an
- ORM :class:`.Session` object::
-
- engine = create_engine("someurl://")
- metadata.bind = engine
-
- .. seealso::
-
- :ref:`dbengine_implicit` - background on "bound metadata"
-
- """
- return self._bind
-
- def _bind_to(self, bind):
- """Bind this MetaData to an Engine, Connection, string or URL."""
-
- if isinstance(bind, util.string_types + (url.URL, )):
- from sqlalchemy import create_engine
- self._bind = create_engine(bind)
- else:
- self._bind = bind
- bind = property(bind, _bind_to)
-
- def clear(self):
- """Clear all Table objects from this MetaData."""
-
- dict.clear(self.tables)
- self._schemas.clear()
- self._fk_memos.clear()
-
- def remove(self, table):
- """Remove the given Table object from this MetaData."""
-
- self._remove_table(table.name, table.schema)
-
- @property
- def sorted_tables(self):
- """Returns a list of :class:`.Table` objects sorted in order of
- foreign key dependency.
-
- The sorting will place :class:`.Table` objects that have dependencies
- first, before the dependencies themselves, representing the
- order in which they can be created. To get the order in which
- the tables would be dropped, use the ``reversed()`` Python built-in.
-
- .. seealso::
-
- :meth:`.Inspector.sorted_tables`
-
- """
- return sqlutil.sort_tables(self.tables.values())
-
- def reflect(self, bind=None, schema=None, views=False, only=None):
- """Load all available table definitions from the database.
-
- Automatically creates ``Table`` entries in this ``MetaData`` for any
- table available in the database but not yet present in the
- ``MetaData``. May be called multiple times to pick up tables recently
- added to the database, however no special action is taken if a table
- in this ``MetaData`` no longer exists in the database.
-
- :param bind:
- A :class:`.Connectable` used to access the database; if None, uses
- the existing bind on this ``MetaData``, if any.
-
- :param schema:
- Optional, query and reflect tables from an alterate schema.
- If None, the schema associated with this :class:`.MetaData`
- is used, if any.
-
- :param views:
- If True, also reflect views.
-
- :param only:
- Optional. Load only a sub-set of available named tables. May be
- specified as a sequence of names or a callable.
-
- If a sequence of names is provided, only those tables will be
- reflected. An error is raised if a table is requested but not
- available. Named tables already present in this ``MetaData`` are
- ignored.
-
- If a callable is provided, it will be used as a boolean predicate to
- filter the list of potential table names. The callable is called
- with a table name and this ``MetaData`` instance as positional
- arguments and should return a true value for any table to reflect.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
-
- with bind.connect() as conn:
-
- reflect_opts = {
- 'autoload': True,
- 'autoload_with': conn
- }
-
- if schema is None:
- schema = self.schema
-
- if schema is not None:
- reflect_opts['schema'] = schema
-
- available = util.OrderedSet(bind.engine.table_names(schema,
- connection=conn))
- if views:
- available.update(
- bind.dialect.get_view_names(conn, schema)
- )
-
- if schema is not None:
- available_w_schema = util.OrderedSet(["%s.%s" % (schema, name)
- for name in available])
- else:
- available_w_schema = available
-
- current = set(self.tables)
-
- if only is None:
- load = [name for name, schname in
- zip(available, available_w_schema)
- if schname not in current]
- elif util.callable(only):
- load = [name for name, schname in
- zip(available, available_w_schema)
- if schname not in current and only(name, self)]
- else:
- missing = [name for name in only if name not in available]
- if missing:
- s = schema and (" schema '%s'" % schema) or ''
- raise exc.InvalidRequestError(
- 'Could not reflect: requested table(s) not available '
- 'in %s%s: (%s)' %
- (bind.engine.url, s, ', '.join(missing)))
- load = [name for name in only if name not in current]
-
- for name in load:
- Table(name, self, **reflect_opts)
-
- def append_ddl_listener(self, event_name, listener):
- """Append a DDL event listener to this ``MetaData``.
-
- .. deprecated:: 0.7
- See :class:`.DDLEvents`.
-
- """
- def adapt_listener(target, connection, **kw):
- tables = kw['tables']
- listener(event, target, connection, tables=tables)
-
- event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
-
- def create_all(self, bind=None, tables=None, checkfirst=True):
- """Create all tables stored in this metadata.
-
- Conditional by default, will not attempt to recreate tables already
- present in the target database.
-
- :param bind:
- A :class:`.Connectable` used to access the
- database; if None, uses the existing bind on this ``MetaData``, if
- any.
-
- :param tables:
- Optional list of ``Table`` objects, which is a subset of the total
- tables in the ``MetaData`` (others are ignored).
-
- :param checkfirst:
- Defaults to True, don't issue CREATEs for tables already present
- in the target database.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaGenerator,
- self,
- checkfirst=checkfirst,
- tables=tables)
-
- def drop_all(self, bind=None, tables=None, checkfirst=True):
- """Drop all tables stored in this metadata.
-
- Conditional by default, will not attempt to drop tables not present in
- the target database.
-
- :param bind:
- A :class:`.Connectable` used to access the
- database; if None, uses the existing bind on this ``MetaData``, if
- any.
-
- :param tables:
- Optional list of ``Table`` objects, which is a subset of the
- total tables in the ``MetaData`` (others are ignored).
-
- :param checkfirst:
- Defaults to True, only issue DROPs for tables confirmed to be
- present in the target database.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaDropper,
- self,
- checkfirst=checkfirst,
- tables=tables)
-
-
-class ThreadLocalMetaData(MetaData):
- """A MetaData variant that presents a different ``bind`` in every thread.
-
- Makes the ``bind`` property of the MetaData a thread-local value, allowing
- this collection of tables to be bound to different ``Engine``
- implementations or connections in each thread.
-
- The ThreadLocalMetaData starts off bound to None in each thread. Binds
- must be made explicitly by assigning to the ``bind`` property or using
- ``connect()``. You can also re-bind dynamically multiple times per
- thread, just like a regular ``MetaData``.
-
- """
-
- __visit_name__ = 'metadata'
-
- def __init__(self):
- """Construct a ThreadLocalMetaData."""
-
- self.context = util.threading.local()
- self.__engines = {}
- super(ThreadLocalMetaData, self).__init__()
-
- def bind(self):
- """The bound Engine or Connection for this thread.
-
- This property may be assigned an Engine or Connection, or assigned a
- string or URL to automatically create a basic Engine for this bind
- with ``create_engine()``."""
-
- return getattr(self.context, '_engine', None)
-
- def _bind_to(self, bind):
- """Bind to a Connectable in the caller's thread."""
-
- if isinstance(bind, util.string_types + (url.URL, )):
- try:
- self.context._engine = self.__engines[bind]
- except KeyError:
- from sqlalchemy import create_engine
- e = create_engine(bind)
- self.__engines[bind] = e
- self.context._engine = e
- else:
- # TODO: this is squirrely. we shouldnt have to hold onto engines
- # in a case like this
- if bind not in self.__engines:
- self.__engines[bind] = bind
- self.context._engine = bind
-
- bind = property(bind, _bind_to)
-
- def is_bound(self):
- """True if there is a bind for this thread."""
- return (hasattr(self.context, '_engine') and
- self.context._engine is not None)
-
- def dispose(self):
- """Dispose all bound engines, in all thread contexts."""
-
- for e in self.__engines.values():
- if hasattr(e, 'dispose'):
- e.dispose()
-
-
-class SchemaVisitor(visitors.ClauseVisitor):
- """Define the visiting for ``SchemaItem`` objects."""
-
- __traverse_options__ = {'schema_visitor': True}
-
-
-class _DDLCompiles(expression.ClauseElement):
- def _compiler(self, dialect, **kw):
- """Return a compiler appropriate for this ClauseElement, given a
- Dialect."""
-
- return dialect.ddl_compiler(dialect, self, **kw)
-
-
-class DDLElement(expression.Executable, _DDLCompiles):
- """Base class for DDL expression constructs.
-
- This class is the base for the general purpose :class:`.DDL` class,
- as well as the various create/drop clause constructs such as
- :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
- etc.
-
- :class:`.DDLElement` integrates closely with SQLAlchemy events,
- introduced in :ref:`event_toplevel`. An instance of one is
- itself an event receiving callable::
-
- event.listen(
- users,
- 'after_create',
- AddConstraint(constraint).execute_if(dialect='postgresql')
- )
-
- See also:
-
- :class:`.DDL`
-
- :class:`.DDLEvents`
-
- :ref:`event_toplevel`
-
- :ref:`schema_ddl_sequences`
-
- """
-
- _execution_options = expression.Executable.\
- _execution_options.union({'autocommit': True})
-
- target = None
- on = None
- dialect = None
- callable_ = None
-
- def execute(self, bind=None, target=None):
- """Execute this DDL immediately.
-
- Executes the DDL statement in isolation using the supplied
- :class:`.Connectable` or
- :class:`.Connectable` assigned to the ``.bind``
- property, if not supplied. If the DDL has a conditional ``on``
- criteria, it will be invoked with None as the event.
-
- :param bind:
- Optional, an ``Engine`` or ``Connection``. If not supplied, a valid
- :class:`.Connectable` must be present in the
- ``.bind`` property.
-
- :param target:
- Optional, defaults to None. The target SchemaItem for the
- execute call. Will be passed to the ``on`` callable if any,
- and may also provide string expansion data for the
- statement. See ``execute_at`` for more information.
-
- """
-
- if bind is None:
- bind = _bind_or_error(self)
-
- if self._should_execute(target, bind):
- return bind.execute(self.against(target))
- else:
- bind.engine.logger.info(
- "DDL execution skipped, criteria not met.")
-
- @util.deprecated("0.7", "See :class:`.DDLEvents`, as well as "
- ":meth:`.DDLElement.execute_if`.")
- def execute_at(self, event_name, target):
- """Link execution of this DDL to the DDL lifecycle of a SchemaItem.
-
- Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance,
- executing it when that schema item is created or dropped. The DDL
- statement will be executed using the same Connection and transactional
- context as the Table create/drop itself. The ``.bind`` property of
- this statement is ignored.
-
- :param event:
- One of the events defined in the schema item's ``.ddl_events``;
- e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop'
-
- :param target:
- The Table or MetaData instance for which this DDLElement will
- be associated with.
-
- A DDLElement instance can be linked to any number of schema items.
-
- ``execute_at`` builds on the ``append_ddl_listener`` interface of
- :class:`.MetaData` and :class:`.Table` objects.
-
- Caveat: Creating or dropping a Table in isolation will also trigger
- any DDL set to ``execute_at`` that Table's MetaData. This may change
- in a future release.
-
- """
-
- def call_event(target, connection, **kw):
- if self._should_execute_deprecated(event_name,
- target, connection, **kw):
- return connection.execute(self.against(target))
-
- event.listen(target, "" + event_name.replace('-', '_'), call_event)
-
- @expression._generative
- def against(self, target):
- """Return a copy of this DDL against a specific schema item."""
-
- self.target = target
-
- @expression._generative
- def execute_if(self, dialect=None, callable_=None, state=None):
- """Return a callable that will execute this
- DDLElement conditionally.
-
- Used to provide a wrapper for event listening::
-
- event.listen(
- metadata,
- 'before_create',
- DDL("my_ddl").execute_if(dialect='postgresql')
- )
-
- :param dialect: May be a string, tuple or a callable
- predicate. If a string, it will be compared to the name of the
- executing database dialect::
-
- DDL('something').execute_if(dialect='postgresql')
-
- If a tuple, specifies multiple dialect names::
-
- DDL('something').execute_if(dialect=('postgresql', 'mysql'))
-
- :param callable_: A callable, which will be invoked with
- four positional arguments as well as optional keyword
- arguments:
-
- :ddl:
- This DDL element.
-
- :target:
- The :class:`.Table` or :class:`.MetaData` object which is the
- target of this event. May be None if the DDL is executed
- explicitly.
-
- :bind:
- The :class:`.Connection` being used for DDL execution
-
- :tables:
- Optional keyword argument - a list of Table objects which are to
- be created/ dropped within a MetaData.create_all() or drop_all()
- method call.
-
- :state:
- Optional keyword argument - will be the ``state`` argument
- passed to this function.
-
- :checkfirst:
- Keyword argument, will be True if the 'checkfirst' flag was
- set during the call to ``create()``, ``create_all()``,
- ``drop()``, ``drop_all()``.
-
- If the callable returns a true value, the DDL statement will be
- executed.
-
- :param state: any value which will be passed to the callable_
- as the ``state`` keyword argument.
-
- See also:
-
- :class:`.DDLEvents`
-
- :ref:`event_toplevel`
-
- """
- self.dialect = dialect
- self.callable_ = callable_
- self.state = state
-
- def _should_execute(self, target, bind, **kw):
- if self.on is not None and \
- not self._should_execute_deprecated(None, target, bind, **kw):
- return False
-
- if isinstance(self.dialect, util.string_types):
- if self.dialect != bind.engine.name:
- return False
- elif isinstance(self.dialect, (tuple, list, set)):
- if bind.engine.name not in self.dialect:
- return False
- if self.callable_ is not None and \
- not self.callable_(self, target, bind, state=self.state, **kw):
- return False
-
- return True
-
- def _should_execute_deprecated(self, event, target, bind, **kw):
- if self.on is None:
- return True
- elif isinstance(self.on, util.string_types):
- return self.on == bind.engine.name
- elif isinstance(self.on, (tuple, list, set)):
- return bind.engine.name in self.on
- else:
- return self.on(self, event, target, bind, **kw)
-
- def __call__(self, target, bind, **kw):
- """Execute the DDL as a ddl_listener."""
-
- if self._should_execute(target, bind, **kw):
- return bind.execute(self.against(target))
-
- def _check_ddl_on(self, on):
- if (on is not None and
- (not isinstance(on, util.string_types + (tuple, list, set)) and
- not util.callable(on))):
- raise exc.ArgumentError(
- "Expected the name of a database dialect, a tuple "
- "of names, or a callable for "
- "'on' criteria, got type '%s'." % type(on).__name__)
-
- def bind(self):
- if self._bind:
- return self._bind
-
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
- def _generate(self):
- s = self.__class__.__new__(self.__class__)
- s.__dict__ = self.__dict__.copy()
- return s
-
-
-class DDL(DDLElement):
- """A literal DDL statement.
-
- Specifies literal SQL DDL to be executed by the database. DDL objects
- function as DDL event listeners, and can be subscribed to those events
- listed in :class:`.DDLEvents`, using either :class:`.Table` or
- :class:`.MetaData` objects as targets. Basic templating support allows
- a single DDL instance to handle repetitive tasks for multiple tables.
-
- Examples::
-
- from sqlalchemy import event, DDL
-
- tbl = Table('users', metadata, Column('uid', Integer))
- event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
-
- spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
- event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
-
- drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
- connection.execute(drop_spow)
-
- When operating on Table events, the following ``statement``
- string substitions are available::
-
- %(table)s - the Table name, with any required quoting applied
- %(schema)s - the schema name, with any required quoting applied
- %(fullname)s - the Table name including schema, quoted if needed
-
- The DDL's "context", if any, will be combined with the standard
- substutions noted above. Keys present in the context will override
- the standard substitutions.
-
- """
-
- __visit_name__ = "ddl"
-
- def __init__(self, statement, on=None, context=None, bind=None):
- """Create a DDL statement.
-
- :param statement:
- A string or unicode string to be executed. Statements will be
- processed with Python's string formatting operator. See the
- ``context`` argument and the ``execute_at`` method.
-
- A literal '%' in a statement must be escaped as '%%'.
-
- SQL bind parameters are not available in DDL statements.
-
- :param on:
- .. deprecated:: 0.7
- See :meth:`.DDLElement.execute_if`.
-
- Optional filtering criteria. May be a string, tuple or a callable
- predicate. If a string, it will be compared to the name of the
- executing database dialect::
-
- DDL('something', on='postgresql')
-
- If a tuple, specifies multiple dialect names::
-
- DDL('something', on=('postgresql', 'mysql'))
-
- If a callable, it will be invoked with four positional arguments
- as well as optional keyword arguments:
-
- :ddl:
- This DDL element.
-
- :event:
- The name of the event that has triggered this DDL, such as
- 'after-create' Will be None if the DDL is executed explicitly.
-
- :target:
- The ``Table`` or ``MetaData`` object which is the target of
- this event. May be None if the DDL is executed explicitly.
-
- :connection:
- The ``Connection`` being used for DDL execution
-
- :tables:
- Optional keyword argument - a list of Table objects which are to
- be created/ dropped within a MetaData.create_all() or drop_all()
- method call.
-
-
- If the callable returns a true value, the DDL statement will be
- executed.
-
- :param context:
- Optional dictionary, defaults to None. These values will be
- available for use in string substitutions on the DDL statement.
-
- :param bind:
- Optional. A :class:`.Connectable`, used by
- default when ``execute()`` is invoked without a bind argument.
-
-
- See also:
-
- :class:`.DDLEvents`
- :mod:`sqlalchemy.event`
-
- """
-
- if not isinstance(statement, util.string_types):
- raise exc.ArgumentError(
- "Expected a string or unicode SQL statement, got '%r'" %
- statement)
-
- self.statement = statement
- self.context = context or {}
-
- self._check_ddl_on(on)
- self.on = on
- self._bind = bind
-
- def __repr__(self):
- return '<%s@%s; %s>' % (
- type(self).__name__, id(self),
- ', '.join([repr(self.statement)] +
- ['%s=%r' % (key, getattr(self, key))
- for key in ('on', 'context')
- if getattr(self, key)]))
-
-
-def _to_schema_column(element):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- if not isinstance(element, Column):
- raise exc.ArgumentError("schema.Column object expected")
- return element
-
-
-def _to_schema_column_or_string(element):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- if not isinstance(element, util.string_types + (expression.ColumnElement, )):
- msg = "Element %r is not a string name or column element"
- raise exc.ArgumentError(msg % element)
- return element
-
-
-class _CreateDropBase(DDLElement):
- """Base class for DDL constucts that represent CREATE and DROP or
- equivalents.
-
- The common theme of _CreateDropBase is a single
- ``element`` attribute which refers to the element
- to be created or dropped.
-
- """
-
- def __init__(self, element, on=None, bind=None):
- self.element = element
- self._check_ddl_on(on)
- self.on = on
- self.bind = bind
-
- def _create_rule_disable(self, compiler):
- """Allow disable of _create_rule using a callable.
-
- Pass to _create_rule using
- util.portable_instancemethod(self._create_rule_disable)
- to retain serializability.
-
- """
- return False
-
-
-class CreateSchema(_CreateDropBase):
- """Represent a CREATE SCHEMA statement.
-
- .. versionadded:: 0.7.4
-
- The argument here is the string name of the schema.
-
- """
-
- __visit_name__ = "create_schema"
-
- def __init__(self, name, quote=None, **kw):
- """Create a new :class:`.CreateSchema` construct."""
-
- self.quote = quote
- super(CreateSchema, self).__init__(name, **kw)
-
-
-class DropSchema(_CreateDropBase):
- """Represent a DROP SCHEMA statement.
-
- The argument here is the string name of the schema.
-
- .. versionadded:: 0.7.4
-
- """
-
- __visit_name__ = "drop_schema"
-
- def __init__(self, name, quote=None, cascade=False, **kw):
- """Create a new :class:`.DropSchema` construct."""
-
- self.quote = quote
- self.cascade = cascade
- super(DropSchema, self).__init__(name, **kw)
-
-
-class CreateTable(_CreateDropBase):
- """Represent a CREATE TABLE statement."""
-
- __visit_name__ = "create_table"
-
- def __init__(self, element, on=None, bind=None):
- """Create a :class:`.CreateTable` construct.
-
- :param element: a :class:`.Table` that's the subject
- of the CREATE
- :param on: See the description for 'on' in :class:`.DDL`.
- :param bind: See the description for 'bind' in :class:`.DDL`.
-
- """
- super(CreateTable, self).__init__(element, on=on, bind=bind)
- self.columns = [CreateColumn(column)
- for column in element.columns
- ]
-
-
-class _DropView(_CreateDropBase):
- """Semi-public 'DROP VIEW' construct.
-
- Used by the test suite for dialect-agnostic drops of views.
- This object will eventually be part of a public "view" API.
-
- """
- __visit_name__ = "drop_view"
-
-
-class CreateColumn(_DDLCompiles):
- """Represent a :class:`.Column` as rendered in a CREATE TABLE statement,
- via the :class:`.CreateTable` construct.
-
- This is provided to support custom column DDL within the generation
- of CREATE TABLE statements, by using the
- compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
- to extend :class:`.CreateColumn`.
-
- Typical integration is to examine the incoming :class:`.Column`
- object, and to redirect compilation if a particular flag or condition
- is found::
-
- from sqlalchemy import schema
- from sqlalchemy.ext.compiler import compiles
-
- @compiles(schema.CreateColumn)
- def compile(element, compiler, **kw):
- column = element.element
-
- if "special" not in column.info:
- return compiler.visit_create_column(element, **kw)
-
- text = "%s SPECIAL DIRECTIVE %s" % (
- column.name,
- compiler.type_compiler.process(column.type)
- )
- default = compiler.get_column_default_string(column)
- if default is not None:
- text += " DEFAULT " + default
-
- if not column.nullable:
- text += " NOT NULL"
-
- if column.constraints:
- text += " ".join(
- compiler.process(const)
- for const in column.constraints)
- return text
-
- The above construct can be applied to a :class:`.Table` as follows::
-
- from sqlalchemy import Table, Metadata, Column, Integer, String
- from sqlalchemy import schema
-
- metadata = MetaData()
-
- table = Table('mytable', MetaData(),
- Column('x', Integer, info={"special":True}, primary_key=True),
- Column('y', String(50)),
- Column('z', String(20), info={"special":True})
- )
-
- metadata.create_all(conn)
-
- Above, the directives we've added to the :attr:`.Column.info` collection
- will be detected by our custom compilation scheme::
-
- CREATE TABLE mytable (
- x SPECIAL DIRECTIVE INTEGER NOT NULL,
- y VARCHAR(50),
- z SPECIAL DIRECTIVE VARCHAR(20),
- PRIMARY KEY (x)
- )
-
- .. versionadded:: 0.8 The :class:`.CreateColumn` construct was added
- to support custom column creation styles.
-
- """
- __visit_name__ = 'create_column'
-
- def __init__(self, element):
- self.element = element
-
-
-class DropTable(_CreateDropBase):
- """Represent a DROP TABLE statement."""
-
- __visit_name__ = "drop_table"
-
-
-class CreateSequence(_CreateDropBase):
- """Represent a CREATE SEQUENCE statement."""
-
- __visit_name__ = "create_sequence"
-
-
-class DropSequence(_CreateDropBase):
- """Represent a DROP SEQUENCE statement."""
-
- __visit_name__ = "drop_sequence"
-
-
-class CreateIndex(_CreateDropBase):
- """Represent a CREATE INDEX statement."""
-
- __visit_name__ = "create_index"
-
-
-class DropIndex(_CreateDropBase):
- """Represent a DROP INDEX statement."""
-
- __visit_name__ = "drop_index"
-
-
-class AddConstraint(_CreateDropBase):
- """Represent an ALTER TABLE ADD CONSTRAINT statement."""
-
- __visit_name__ = "add_constraint"
-
- def __init__(self, element, *args, **kw):
- super(AddConstraint, self).__init__(element, *args, **kw)
- element._create_rule = util.portable_instancemethod(
- self._create_rule_disable)
-
-
-class DropConstraint(_CreateDropBase):
- """Represent an ALTER TABLE DROP CONSTRAINT statement."""
-
- __visit_name__ = "drop_constraint"
-
- def __init__(self, element, cascade=False, **kw):
- self.cascade = cascade
- super(DropConstraint, self).__init__(element, **kw)
- element._create_rule = util.portable_instancemethod(
- self._create_rule_disable)
-
-
-def _bind_or_error(schemaitem, msg=None):
- bind = schemaitem.bind
- if not bind:
- name = schemaitem.__class__.__name__
- label = getattr(schemaitem, 'fullname',
- getattr(schemaitem, 'name', None))
- if label:
- item = '%s %r' % (name, label)
- else:
- item = name
- if isinstance(schemaitem, (MetaData, DDL)):
- bindable = "the %s's .bind" % name
- else:
- bindable = "this %s's .metadata.bind" % name
-
- if msg is None:
- msg = "The %s is not bound to an Engine or Connection. "\
- "Execution can not proceed without a database to execute "\
- "against. Either execute with an explicit connection or "\
- "assign %s to enable implicit execution." % \
- (item, bindable)
- raise exc.UnboundExecutionError(msg)
- return bind
+from .sql.base import (
+ SchemaVisitor
+ )
+
+
+from .sql.schema import (
+ CheckConstraint,
+ Column,
+ ColumnDefault,
+ Constraint,
+ DefaultClause,
+ DefaultGenerator,
+ FetchedValue,
+ ForeignKey,
+ ForeignKeyConstraint,
+ Index,
+ MetaData,
+ PassiveDefault,
+ PrimaryKeyConstraint,
+ SchemaItem,
+ Sequence,
+ Table,
+ ThreadLocalMetaData,
+ UniqueConstraint,
+ _get_table_key,
+ ColumnCollectionConstraint,
+ )
+
+
+from .sql.ddl import (
+ DDL,
+ CreateTable,
+ DropTable,
+ CreateSequence,
+ DropSequence,
+ CreateIndex,
+ DropIndex,
+ CreateSchema,
+ DropSchema,
+ _DropView,
+ CreateColumn,
+ AddConstraint,
+ DropConstraint,
+ DDLBase,
+ DDLElement,
+ _CreateDropBase,
+ _DDLCompiles
+)
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index 9700f26a0..9ed6049af 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -1,5 +1,5 @@
# sql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -35,6 +35,7 @@ from .expression import (
exists,
extract,
false,
+ False_,
func,
insert,
intersect,
@@ -55,6 +56,7 @@ from .expression import (
table,
text,
true,
+ True_,
tuple_,
type_coerce,
union,
@@ -64,5 +66,24 @@ from .expression import (
from .visitors import ClauseVisitor
-__tmp = list(locals().keys())
-__all__ = sorted([i for i in __tmp if not i.startswith('__')])
+
+def __go(lcls):
+ global __all__
+ from .. import util as _sa_util
+
+ import inspect as _inspect
+
+ __all__ = sorted(name for name, obj in lcls.items()
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
+
+ from .annotation import _prepare_annotations, Annotated
+ from .elements import AnnotatedColumnElement, ClauseList
+ from .selectable import AnnotatedFromClause
+ _prepare_annotations(ColumnElement, AnnotatedColumnElement)
+ _prepare_annotations(FromClause, AnnotatedFromClause)
+ _prepare_annotations(ClauseList, Annotated)
+
+ _sa_util.dependencies.resolve_all("sqlalchemy.sql")
+
+__go(locals())
+
diff --git a/lib/sqlalchemy/sql/annotation.py b/lib/sqlalchemy/sql/annotation.py
new file mode 100644
index 000000000..b5b7849d2
--- /dev/null
+++ b/lib/sqlalchemy/sql/annotation.py
@@ -0,0 +1,182 @@
+# sql/annotation.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""The :class:`.Annotated` class and related routines; creates hash-equivalent
+copies of SQL constructs which contain context-specific markers and associations.
+
+"""
+
+from .. import util
+from . import operators
+
+class Annotated(object):
+ """clones a ClauseElement and applies an 'annotations' dictionary.
+
+ Unlike regular clones, this clone also mimics __hash__() and
+ __cmp__() of the original element so that it takes its place
+ in hashed collections.
+
+ A reference to the original element is maintained, for the important
+ reason of keeping its hash value current. When GC'ed, the
+ hash value may be reused, causing conflicts.
+
+ """
+
+ def __new__(cls, *args):
+ if not args:
+ # clone constructor
+ return object.__new__(cls)
+ else:
+ element, values = args
+ # pull appropriate subclass from registry of annotated
+ # classes
+ try:
+ cls = annotated_classes[element.__class__]
+ except KeyError:
+ cls = _new_annotation_type(element.__class__, cls)
+ return object.__new__(cls)
+
+ def __init__(self, element, values):
+ self.__dict__ = element.__dict__.copy()
+ self.__element = element
+ self._annotations = values
+
+ def _annotate(self, values):
+ _values = self._annotations.copy()
+ _values.update(values)
+ return self._with_annotations(_values)
+
+ def _with_annotations(self, values):
+ clone = self.__class__.__new__(self.__class__)
+ clone.__dict__ = self.__dict__.copy()
+ clone._annotations = values
+ return clone
+
+ def _deannotate(self, values=None, clone=True):
+ if values is None:
+ return self.__element
+ else:
+ _values = self._annotations.copy()
+ for v in values:
+ _values.pop(v, None)
+ return self._with_annotations(_values)
+
+ def _compiler_dispatch(self, visitor, **kw):
+ return self.__element.__class__._compiler_dispatch(self, visitor, **kw)
+
+ @property
+ def _constructor(self):
+ return self.__element._constructor
+
+ def _clone(self):
+ clone = self.__element._clone()
+ if clone is self.__element:
+ # detect immutable, don't change anything
+ return self
+ else:
+ # update the clone with any changes that have occurred
+ # to this object's __dict__.
+ clone.__dict__.update(self.__dict__)
+ return self.__class__(clone, self._annotations)
+
+ def __hash__(self):
+ return hash(self.__element)
+
+ def __eq__(self, other):
+ if isinstance(self.__element, operators.ColumnOperators):
+ return self.__element.__class__.__eq__(self, other)
+ else:
+ return hash(other) == hash(self)
+
+
+
+# hard-generate Annotated subclasses. this technique
+# is used instead of on-the-fly types (i.e. type.__new__())
+# so that the resulting objects are pickleable.
+annotated_classes = {}
+
+
+
+def _deep_annotate(element, annotations, exclude=None):
+ """Deep copy the given ClauseElement, annotating each element
+ with the given annotations dictionary.
+
+ Elements within the exclude collection will be cloned but not annotated.
+
+ """
+ def clone(elem):
+ if exclude and \
+ hasattr(elem, 'proxy_set') and \
+ elem.proxy_set.intersection(exclude):
+ newelem = elem._clone()
+ elif annotations != elem._annotations:
+ newelem = elem._annotate(annotations)
+ else:
+ newelem = elem
+ newelem._copy_internals(clone=clone)
+ return newelem
+
+ if element is not None:
+ element = clone(element)
+ return element
+
+
+def _deep_deannotate(element, values=None):
+ """Deep copy the given element, removing annotations."""
+
+ cloned = util.column_dict()
+
+ def clone(elem):
+ # if a values dict is given,
+ # the elem must be cloned each time it appears,
+ # as there may be different annotations in source
+ # elements that are remaining. if totally
+ # removing all annotations, can assume the same
+ # slate...
+ if values or elem not in cloned:
+ newelem = elem._deannotate(values=values, clone=True)
+ newelem._copy_internals(clone=clone)
+ if not values:
+ cloned[elem] = newelem
+ return newelem
+ else:
+ return cloned[elem]
+
+ if element is not None:
+ element = clone(element)
+ return element
+
+
+def _shallow_annotate(element, annotations):
+ """Annotate the given ClauseElement and copy its internals so that
+ internal objects refer to the new annotated object.
+
+ Basically used to apply a "dont traverse" annotation to a
+ selectable, without digging throughout the whole
+ structure wasting time.
+ """
+ element = element._annotate(annotations)
+ element._copy_internals()
+ return element
+
+def _new_annotation_type(cls, base_cls):
+ if issubclass(cls, Annotated):
+ return cls
+ elif cls in annotated_classes:
+ return annotated_classes[cls]
+ annotated_classes[cls] = anno_cls = type(
+ "Annotated%s" % cls.__name__,
+ (base_cls, cls), {})
+ globals()["Annotated%s" % cls.__name__] = anno_cls
+ return anno_cls
+
+def _prepare_annotations(target_hierarchy, base_cls):
+ stack = [target_hierarchy]
+ while stack:
+ cls = stack.pop()
+ stack.extend(cls.__subclasses__())
+
+ _new_annotation_type(cls, base_cls)
diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py
new file mode 100644
index 000000000..4a7dd65d3
--- /dev/null
+++ b/lib/sqlalchemy/sql/base.py
@@ -0,0 +1,460 @@
+# sql/base.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Foundational utilities common to many sql modules.
+
+"""
+
+
+from .. import util, exc
+import itertools
+from .visitors import ClauseVisitor
+import re
+import collections
+
+PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT')
+NO_ARG = util.symbol('NO_ARG')
+
+class Immutable(object):
+ """mark a ClauseElement as 'immutable' when expressions are cloned."""
+
+ def unique_params(self, *optionaldict, **kwargs):
+ raise NotImplementedError("Immutable objects do not support copying")
+
+ def params(self, *optionaldict, **kwargs):
+ raise NotImplementedError("Immutable objects do not support copying")
+
+ def _clone(self):
+ return self
+
+
+
+def _from_objects(*elements):
+ return itertools.chain(*[element._from_objects for element in elements])
+
+@util.decorator
+def _generative(fn, *args, **kw):
+ """Mark a method as generative."""
+
+ self = args[0]._generate()
+ fn(self, *args[1:], **kw)
+ return self
+
+
+class DialectKWArgs(object):
+ """Establish the ability for a class to have dialect-specific arguments
+ with defaults and validation.
+
+ """
+
+ @util.memoized_property
+ def dialect_kwargs(self):
+ """A collection of keyword arguments specified as dialect-specific
+ options to this construct.
+
+ The arguments are present here in their original ``<dialect>_<kwarg>``
+ format. Only arguments that were actually passed are included;
+ unlike the :attr:`.DialectKWArgs.dialect_options` collection, which
+ contains all options known by this dialect including defaults.
+
+ .. versionadded:: 0.9.2
+
+ .. seealso::
+
+ :attr:`.DialectKWArgs.dialect_options` - nested dictionary form
+
+ """
+
+ return util.immutabledict()
+
+ @property
+ def kwargs(self):
+ """Deprecated; see :attr:`.DialectKWArgs.dialect_kwargs"""
+ return self.dialect_kwargs
+
+ @util.dependencies("sqlalchemy.dialects")
+ def _kw_reg_for_dialect(dialects, dialect_name):
+ dialect_cls = dialects.registry.load(dialect_name)
+ if dialect_cls.construct_arguments is None:
+ return None
+ return dict(dialect_cls.construct_arguments)
+ _kw_registry = util.PopulateDict(_kw_reg_for_dialect)
+
+ def _kw_reg_for_dialect_cls(self, dialect_name):
+ construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name]
+ if construct_arg_dictionary is None:
+ return {"*": None}
+ else:
+ d = {}
+ for cls in reversed(self.__class__.__mro__):
+ if cls in construct_arg_dictionary:
+ d.update(construct_arg_dictionary[cls])
+ return d
+
+ @util.memoized_property
+ def dialect_options(self):
+ """A collection of keyword arguments specified as dialect-specific
+ options to this construct.
+
+ This is a two-level nested registry, keyed to ``<dialect_name>``
+ and ``<argument_name>``. For example, the ``postgresql_where`` argument
+ would be locatable as::
+
+ arg = my_object.dialect_options['postgresql']['where']
+
+ .. versionadded:: 0.9.2
+
+ .. seealso::
+
+ :attr:`.DialectKWArgs.dialect_kwargs` - flat dictionary form
+
+ """
+
+ return util.PopulateDict(
+ util.portable_instancemethod(self._kw_reg_for_dialect_cls)
+ )
+
+ def _validate_dialect_kwargs(self, kwargs):
+ # validate remaining kwargs that they all specify DB prefixes
+
+ if not kwargs:
+ return
+
+ self.dialect_kwargs = self.dialect_kwargs.union(kwargs)
+
+ for k in kwargs:
+ m = re.match('^(.+?)_(.+)$', k)
+ if m is None:
+ raise TypeError("Additional arguments should be "
+ "named <dialectname>_<argument>, got '%s'" % k)
+ dialect_name, arg_name = m.group(1, 2)
+
+ try:
+ construct_arg_dictionary = self.dialect_options[dialect_name]
+ except exc.NoSuchModuleError:
+ util.warn(
+ "Can't validate argument %r; can't "
+ "locate any SQLAlchemy dialect named %r" %
+ (k, dialect_name))
+ self.dialect_options[dialect_name] = {
+ "*": None,
+ arg_name: kwargs[k]}
+ else:
+ if "*" not in construct_arg_dictionary and \
+ arg_name not in construct_arg_dictionary:
+ raise exc.ArgumentError(
+ "Argument %r is not accepted by "
+ "dialect %r on behalf of %r" % (
+ k,
+ dialect_name, self.__class__
+ ))
+ else:
+ construct_arg_dictionary[arg_name] = kwargs[k]
+
+
+class Generative(object):
+ """Allow a ClauseElement to generate itself via the
+ @_generative decorator.
+
+ """
+
+ def _generate(self):
+ s = self.__class__.__new__(self.__class__)
+ s.__dict__ = self.__dict__.copy()
+ return s
+
+
+class Executable(Generative):
+ """Mark a ClauseElement as supporting execution.
+
+ :class:`.Executable` is a superclass for all "statement" types
+ of objects, including :func:`select`, :func:`delete`, :func:`update`,
+ :func:`insert`, :func:`text`.
+
+ """
+
+ supports_execution = True
+ _execution_options = util.immutabledict()
+ _bind = None
+
+ @_generative
+ def execution_options(self, **kw):
+ """ Set non-SQL options for the statement which take effect during
+ execution.
+
+ Execution options can be set on a per-statement or
+ per :class:`.Connection` basis. Additionally, the
+ :class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide
+ access to execution options which they in turn configure upon
+ connections.
+
+ The :meth:`execution_options` method is generative. A new
+ instance of this statement is returned that contains the options::
+
+ statement = select([table.c.x, table.c.y])
+ statement = statement.execution_options(autocommit=True)
+
+ Note that only a subset of possible execution options can be applied
+ to a statement - these include "autocommit" and "stream_results",
+ but not "isolation_level" or "compiled_cache".
+ See :meth:`.Connection.execution_options` for a full list of
+ possible options.
+
+ .. seealso::
+
+ :meth:`.Connection.execution_options()`
+
+ :meth:`.Query.execution_options()`
+
+ """
+ if 'isolation_level' in kw:
+ raise exc.ArgumentError(
+ "'isolation_level' execution option may only be specified "
+ "on Connection.execution_options(), or "
+ "per-engine using the isolation_level "
+ "argument to create_engine()."
+ )
+ if 'compiled_cache' in kw:
+ raise exc.ArgumentError(
+ "'compiled_cache' execution option may only be specified "
+ "on Connection.execution_options(), not per statement."
+ )
+ self._execution_options = self._execution_options.union(kw)
+
+ def execute(self, *multiparams, **params):
+ """Compile and execute this :class:`.Executable`."""
+ e = self.bind
+ if e is None:
+ label = getattr(self, 'description', self.__class__.__name__)
+ msg = ('This %s is not directly bound to a Connection or Engine.'
+ 'Use the .execute() method of a Connection or Engine '
+ 'to execute this construct.' % label)
+ raise exc.UnboundExecutionError(msg)
+ return e._execute_clauseelement(self, multiparams, params)
+
+ def scalar(self, *multiparams, **params):
+ """Compile and execute this :class:`.Executable`, returning the
+ result's scalar representation.
+
+ """
+ return self.execute(*multiparams, **params).scalar()
+
+ @property
+ def bind(self):
+ """Returns the :class:`.Engine` or :class:`.Connection` to
+ which this :class:`.Executable` is bound, or None if none found.
+
+ This is a traversal which checks locally, then
+ checks among the "from" clauses of associated objects
+ until a bound engine or connection is found.
+
+ """
+ if self._bind is not None:
+ return self._bind
+
+ for f in _from_objects(self):
+ if f is self:
+ continue
+ engine = f.bind
+ if engine is not None:
+ return engine
+ else:
+ return None
+
+
+class SchemaEventTarget(object):
+ """Base class for elements that are the targets of :class:`.DDLEvents`
+ events.
+
+ This includes :class:`.SchemaItem` as well as :class:`.SchemaType`.
+
+ """
+
+ def _set_parent(self, parent):
+ """Associate with this SchemaEvent's parent object."""
+
+ raise NotImplementedError()
+
+ def _set_parent_with_dispatch(self, parent):
+ self.dispatch.before_parent_attach(self, parent)
+ self._set_parent(parent)
+ self.dispatch.after_parent_attach(self, parent)
+
+class SchemaVisitor(ClauseVisitor):
+ """Define the visiting for ``SchemaItem`` objects."""
+
+ __traverse_options__ = {'schema_visitor': True}
+
+class ColumnCollection(util.OrderedProperties):
+ """An ordered dictionary that stores a list of ColumnElement
+ instances.
+
+ Overrides the ``__eq__()`` method to produce SQL clauses between
+ sets of correlated columns.
+
+ """
+
+ def __init__(self, *cols):
+ super(ColumnCollection, self).__init__()
+ self._data.update((c.key, c) for c in cols)
+ self.__dict__['_all_cols'] = util.column_set(self)
+
+ def __str__(self):
+ return repr([str(c) for c in self])
+
+ def replace(self, column):
+ """add the given column to this collection, removing unaliased
+ versions of this column as well as existing columns with the
+ same key.
+
+ e.g.::
+
+ t = Table('sometable', metadata, Column('col1', Integer))
+ t.columns.replace(Column('col1', Integer, key='columnone'))
+
+ will remove the original 'col1' from the collection, and add
+ the new column under the name 'columnname'.
+
+ Used by schema.Column to override columns during table reflection.
+
+ """
+ if column.name in self and column.key != column.name:
+ other = self[column.name]
+ if other.name == other.key:
+ del self._data[other.name]
+ self._all_cols.remove(other)
+ if column.key in self._data:
+ self._all_cols.remove(self._data[column.key])
+ self._all_cols.add(column)
+ self._data[column.key] = column
+
+ def add(self, column):
+ """Add a column to this collection.
+
+ The key attribute of the column will be used as the hash key
+ for this dictionary.
+
+ """
+ self[column.key] = column
+
+ def __delitem__(self, key):
+ raise NotImplementedError()
+
+ def __setattr__(self, key, object):
+ raise NotImplementedError()
+
+ def __setitem__(self, key, value):
+ if key in self:
+
+ # this warning is primarily to catch select() statements
+ # which have conflicting column names in their exported
+ # columns collection
+
+ existing = self[key]
+ if not existing.shares_lineage(value):
+ util.warn('Column %r on table %r being replaced by '
+ '%r, which has the same key. Consider '
+ 'use_labels for select() statements.' % (key,
+ getattr(existing, 'table', None), value))
+ self._all_cols.remove(existing)
+ # pop out memoized proxy_set as this
+ # operation may very well be occurring
+ # in a _make_proxy operation
+ util.memoized_property.reset(value, "proxy_set")
+ self._all_cols.add(value)
+ self._data[key] = value
+
+ def clear(self):
+ self._data.clear()
+ self._all_cols.clear()
+
+ def remove(self, column):
+ del self._data[column.key]
+ self._all_cols.remove(column)
+
+ def update(self, value):
+ self._data.update(value)
+ self._all_cols.clear()
+ self._all_cols.update(self._data.values())
+
+ def extend(self, iter):
+ self.update((c.key, c) for c in iter)
+
+ __hash__ = None
+
+ @util.dependencies("sqlalchemy.sql.elements")
+ def __eq__(self, elements, other):
+ l = []
+ for c in other:
+ for local in self:
+ if c.shares_lineage(local):
+ l.append(c == local)
+ return elements.and_(*l)
+
+ def __contains__(self, other):
+ if not isinstance(other, util.string_types):
+ raise exc.ArgumentError("__contains__ requires a string argument")
+ return util.OrderedProperties.__contains__(self, other)
+
+ def __setstate__(self, state):
+ self.__dict__['_data'] = state['_data']
+ self.__dict__['_all_cols'] = util.column_set(self._data.values())
+
+ def contains_column(self, col):
+ # this has to be done via set() membership
+ return col in self._all_cols
+
+ def as_immutable(self):
+ return ImmutableColumnCollection(self._data, self._all_cols)
+
+
+class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection):
+ def __init__(self, data, colset):
+ util.ImmutableProperties.__init__(self, data)
+ self.__dict__['_all_cols'] = colset
+
+ extend = remove = util.ImmutableProperties._immutable
+
+
+class ColumnSet(util.ordered_column_set):
+ def contains_column(self, col):
+ return col in self
+
+ def extend(self, cols):
+ for col in cols:
+ self.add(col)
+
+ def __add__(self, other):
+ return list(self) + list(other)
+
+ @util.dependencies("sqlalchemy.sql.elements")
+ def __eq__(self, elements, other):
+ l = []
+ for c in other:
+ for local in self:
+ if c.shares_lineage(local):
+ l.append(c == local)
+ return elements.and_(*l)
+
+ def __hash__(self):
+ return hash(tuple(x for x in self))
+
+def _bind_or_error(schemaitem, msg=None):
+ bind = schemaitem.bind
+ if not bind:
+ name = schemaitem.__class__.__name__
+ label = getattr(schemaitem, 'fullname',
+ getattr(schemaitem, 'name', None))
+ if label:
+ item = '%s object %r' % (name, label)
+ else:
+ item = '%s object' % name
+ if msg is None:
+ msg = "%s is not bound to an Engine or Connection. "\
+ "Execution can not proceed without a database to execute "\
+ "against." % item
+ raise exc.UnboundExecutionError(msg)
+ return bind
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index a5f545de9..4448f7c7b 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -1,5 +1,5 @@
# sql/compiler.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -23,13 +23,12 @@ To generate user-defined SQL strings, see
"""
import re
-import sys
-from .. import schema, engine, util, exc, types
-from . import (
- operators, functions, util as sql_util, visitors, expression as sql
-)
+from . import schema, sqltypes, operators, functions, \
+ util as sql_util, visitors, elements, selectable, base
+from .. import util, exc
import decimal
import itertools
+import operator
RESERVED_WORDS = set([
'all', 'analyse', 'analyze', 'and', 'any', 'array',
@@ -115,6 +114,7 @@ OPERATORS = {
operators.asc_op: ' ASC',
operators.nullsfirst_op: ' NULLS FIRST',
operators.nullslast_op: ' NULLS LAST',
+
}
FUNCTIONS = {
@@ -150,14 +150,122 @@ EXTRACT_MAP = {
}
COMPOUND_KEYWORDS = {
- sql.CompoundSelect.UNION: 'UNION',
- sql.CompoundSelect.UNION_ALL: 'UNION ALL',
- sql.CompoundSelect.EXCEPT: 'EXCEPT',
- sql.CompoundSelect.EXCEPT_ALL: 'EXCEPT ALL',
- sql.CompoundSelect.INTERSECT: 'INTERSECT',
- sql.CompoundSelect.INTERSECT_ALL: 'INTERSECT ALL'
+ selectable.CompoundSelect.UNION: 'UNION',
+ selectable.CompoundSelect.UNION_ALL: 'UNION ALL',
+ selectable.CompoundSelect.EXCEPT: 'EXCEPT',
+ selectable.CompoundSelect.EXCEPT_ALL: 'EXCEPT ALL',
+ selectable.CompoundSelect.INTERSECT: 'INTERSECT',
+ selectable.CompoundSelect.INTERSECT_ALL: 'INTERSECT ALL'
}
+class Compiled(object):
+ """Represent a compiled SQL or DDL expression.
+
+ The ``__str__`` method of the ``Compiled`` object should produce
+ the actual text of the statement. ``Compiled`` objects are
+ specific to their underlying database dialect, and also may
+ or may not be specific to the columns referenced within a
+ particular set of bind parameters. In no case should the
+ ``Compiled`` object be dependent on the actual values of those
+ bind parameters, even though it may reference those values as
+ defaults.
+ """
+
+ def __init__(self, dialect, statement, bind=None,
+ compile_kwargs=util.immutabledict()):
+ """Construct a new ``Compiled`` object.
+
+ :param dialect: ``Dialect`` to compile against.
+
+ :param statement: ``ClauseElement`` to be compiled.
+
+ :param bind: Optional Engine or Connection to compile this
+ statement against.
+
+ :param compile_kwargs: additional kwargs that will be
+ passed to the initial call to :meth:`.Compiled.process`.
+
+ .. versionadded:: 0.8
+
+ """
+
+ self.dialect = dialect
+ self.bind = bind
+ if statement is not None:
+ self.statement = statement
+ self.can_execute = statement.supports_execution
+ self.string = self.process(self.statement, **compile_kwargs)
+
+ @util.deprecated("0.7", ":class:`.Compiled` objects now compile "
+ "within the constructor.")
+ def compile(self):
+ """Produce the internal string representation of this element.
+ """
+ pass
+
+ def _execute_on_connection(self, connection, multiparams, params):
+ return connection._execute_compiled(self, multiparams, params)
+
+ @property
+ def sql_compiler(self):
+ """Return a Compiled that is capable of processing SQL expressions.
+
+ If this compiler is one, it would likely just return 'self'.
+
+ """
+
+ raise NotImplementedError()
+
+ def process(self, obj, **kwargs):
+ return obj._compiler_dispatch(self, **kwargs)
+
+ def __str__(self):
+ """Return the string text of the generated SQL or DDL."""
+
+ return self.string or ''
+
+ def construct_params(self, params=None):
+ """Return the bind params for this compiled object.
+
+ :param params: a dict of string/object pairs whose values will
+ override bind values compiled in to the
+ statement.
+ """
+
+ raise NotImplementedError()
+
+ @property
+ def params(self):
+ """Return the bind params for this compiled object."""
+ return self.construct_params()
+
+ def execute(self, *multiparams, **params):
+ """Execute this compiled object."""
+
+ e = self.bind
+ if e is None:
+ raise exc.UnboundExecutionError(
+ "This Compiled object is not bound to any Engine "
+ "or Connection.")
+ return e._execute_compiled(self, multiparams, params)
+
+ def scalar(self, *multiparams, **params):
+ """Execute this compiled object and return the result's
+ scalar value."""
+
+ return self.execute(*multiparams, **params).scalar()
+
+
+class TypeCompiler(object):
+ """Produces DDL specification for TypeEngine objects."""
+
+ def __init__(self, dialect):
+ self.dialect = dialect
+
+ def process(self, type_):
+ return type_._compiler_dispatch(self)
+
+
class _CompileLabel(visitors.Visitable):
"""lightweight label object which acts as an expression.Label."""
@@ -178,12 +286,8 @@ class _CompileLabel(visitors.Visitable):
def type(self):
return self.element.type
- @property
- def quote(self):
- return self.element.quote
-
-class SQLCompiler(engine.Compiled):
+class SQLCompiler(Compiled):
"""Default implementation of Compiled.
Compiles ClauseElements into SQL strings. Uses a similar visit
@@ -284,7 +388,7 @@ class SQLCompiler(engine.Compiled):
# a map which tracks "truncated" names based on
# dialect.label_length or dialect.max_identifier_length
self.truncated_names = {}
- engine.Compiled.__init__(self, dialect, statement, **kwargs)
+ Compiled.__init__(self, dialect, statement, **kwargs)
if self.positional and dialect.paramstyle == 'numeric':
self._apply_numbered_params()
@@ -397,7 +501,7 @@ class SQLCompiler(engine.Compiled):
render_label_only = render_label_as_label is label
if render_label_only or render_label_with_as:
- if isinstance(label.name, sql._truncated_label):
+ if isinstance(label.name, elements._truncated_label):
labelname = self._truncated_identifier("colident", label.name)
else:
labelname = label.name
@@ -432,7 +536,7 @@ class SQLCompiler(engine.Compiled):
"its 'name' is assigned.")
is_literal = column.is_literal
- if not is_literal and isinstance(name, sql._truncated_label):
+ if not is_literal and isinstance(name, elements._truncated_label):
name = self._truncated_identifier("colident", name)
if add_to_result_map is not None:
@@ -446,24 +550,22 @@ class SQLCompiler(engine.Compiled):
if is_literal:
name = self.escape_literal_column(name)
else:
- name = self.preparer.quote(name, column.quote)
+ name = self.preparer.quote(name)
table = column.table
if table is None or not include_table or not table.named_with_column:
return name
else:
if table.schema:
- schema_prefix = self.preparer.quote_schema(
- table.schema,
- table.quote_schema) + '.'
+ schema_prefix = self.preparer.quote_schema(table.schema) + '.'
else:
schema_prefix = ''
tablename = table.name
- if isinstance(tablename, sql._truncated_label):
+ if isinstance(tablename, elements._truncated_label):
tablename = self._truncated_identifier("alias", tablename)
return schema_prefix + \
- self.preparer.quote(tablename, table.quote) + \
+ self.preparer.quote(tablename) + \
"." + name
def escape_literal_column(self, text):
@@ -484,20 +586,13 @@ class SQLCompiler(engine.Compiled):
def post_process_text(self, text):
return text
- def visit_textclause(self, textclause, **kwargs):
- if textclause.typemap is not None:
- for colname, type_ in textclause.typemap.items():
- self.result_map[colname
- if self.dialect.case_sensitive
- else colname.lower()] = \
- (colname, None, type_)
-
+ def visit_textclause(self, textclause, **kw):
def do_bindparam(m):
name = m.group(1)
- if name in textclause.bindparams:
- return self.process(textclause.bindparams[name])
+ if name in textclause._bindparams:
+ return self.process(textclause._bindparams[name], **kw)
else:
- return self.bindparam_string(name, **kwargs)
+ return self.bindparam_string(name, **kw)
# un-escape any \:params
return BIND_PARAMS_ESC.sub(lambda m: m.group(1),
@@ -505,14 +600,47 @@ class SQLCompiler(engine.Compiled):
self.post_process_text(textclause.text))
)
+ def visit_text_as_from(self, taf, iswrapper=False,
+ compound_index=0, force_result_map=False,
+ asfrom=False,
+ parens=True, **kw):
+
+ toplevel = not self.stack
+ entry = self._default_stack_entry if toplevel else self.stack[-1]
+
+ populate_result_map = force_result_map or (
+ compound_index == 0 and (
+ toplevel or \
+ entry['iswrapper']
+ )
+ )
+
+ if populate_result_map:
+ for c in taf.c:
+ self._add_to_result_map(
+ c.key, c.key, (c,), c.type
+ )
+
+ text = self.process(taf.element, **kw)
+ if asfrom and parens:
+ text = "(%s)" % text
+ return text
+
+
def visit_null(self, expr, **kw):
return 'NULL'
def visit_true(self, expr, **kw):
- return 'true'
+ if self.dialect.supports_native_boolean:
+ return 'true'
+ else:
+ return "1"
def visit_false(self, expr, **kw):
- return 'false'
+ if self.dialect.supports_native_boolean:
+ return 'false'
+ else:
+ return "0"
def visit_clauselist(self, clauselist, order_by_select=None, **kw):
if order_by_select is not None:
@@ -619,6 +747,7 @@ class SQLCompiler(engine.Compiled):
def function_argspec(self, func, **kwargs):
return func.clause_expr._compiler_dispatch(self, **kwargs)
+
def visit_compound_select(self, cs, asfrom=False,
parens=True, compound_index=0, **kwargs):
toplevel = not self.stack
@@ -684,11 +813,23 @@ class SQLCompiler(engine.Compiled):
raise exc.CompileError(
"Unary expression has no operator or modifier")
+ def visit_istrue_unary_operator(self, element, operator, **kw):
+ if self.dialect.supports_native_boolean:
+ return self.process(element.element, **kw)
+ else:
+ return "%s = 1" % self.process(element.element, **kw)
+
+ def visit_isfalse_unary_operator(self, element, operator, **kw):
+ if self.dialect.supports_native_boolean:
+ return "NOT %s" % self.process(element.element, **kw)
+ else:
+ return "%s = 0" % self.process(element.element, **kw)
+
def visit_binary(self, binary, **kw):
# don't allow "? = ?" to render
if self.ansi_bind_rules and \
- isinstance(binary.left, sql.BindParameter) and \
- isinstance(binary.right, sql.BindParameter):
+ isinstance(binary.left, elements.BindParameter) and \
+ isinstance(binary.right, elements.BindParameter):
kw['literal_binds'] = True
operator = binary.operator
@@ -728,7 +869,7 @@ class SQLCompiler(engine.Compiled):
@util.memoized_property
def _like_percent_literal(self):
- return sql.literal_column("'%'", type_=types.String())
+ return elements.literal_column("'%'", type_=sqltypes.STRINGTYPE)
def visit_contains_op_binary(self, binary, operator, **kw):
binary = binary._clone()
@@ -772,39 +913,49 @@ class SQLCompiler(engine.Compiled):
def visit_like_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
+
+ # TODO: use ternary here, not "and"/ "or"
return '%s LIKE %s' % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_notlike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return '%s NOT LIKE %s' % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_ilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return 'lower(%s) LIKE lower(%s)' % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_notilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return 'lower(%s) NOT LIKE lower(%s)' % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_bindparam(self, bindparam, within_columns_clause=False,
literal_binds=False,
@@ -820,8 +971,9 @@ class SQLCompiler(engine.Compiled):
(within_columns_clause and \
self.ansi_bind_rules):
if bindparam.value is None:
- raise exc.CompileError("Bind parameter without a "
- "renderable value not allowed here.")
+ raise exc.CompileError("Bind parameter '%s' without a "
+ "renderable value not allowed here."
+ % bindparam.key)
return self.render_literal_bindparam(bindparam,
within_columns_clause=True, **kwargs)
@@ -851,13 +1003,10 @@ class SQLCompiler(engine.Compiled):
self.binds[bindparam.key] = self.binds[name] = bindparam
- return self.bindparam_string(name, quote=bindparam.quote, **kwargs)
+ return self.bindparam_string(name, **kwargs)
def render_literal_bindparam(self, bindparam, **kw):
value = bindparam.value
- processor = bindparam.type._cached_bind_processor(self.dialect)
- if processor:
- value = processor(value)
return self.render_literal_value(value, bindparam.type)
def render_literal_value(self, value, type_):
@@ -870,15 +1019,10 @@ class SQLCompiler(engine.Compiled):
of the DBAPI.
"""
- if isinstance(value, util.string_types):
- value = value.replace("'", "''")
- return "'%s'" % value
- elif value is None:
- return "NULL"
- elif isinstance(value, (float, ) + util.int_types):
- return repr(value)
- elif isinstance(value, decimal.Decimal):
- return str(value)
+
+ processor = type_._cached_literal_processor(self.dialect)
+ if processor:
+ return processor(value)
else:
raise NotImplementedError(
"Don't know how to literal-quote value %r" % value)
@@ -888,7 +1032,7 @@ class SQLCompiler(engine.Compiled):
return self.bind_names[bindparam]
bind_name = bindparam.key
- if isinstance(bind_name, sql._truncated_label):
+ if isinstance(bind_name, elements._truncated_label):
bind_name = self._truncated_identifier("bindparam", bind_name)
# add to bind_names for translation
@@ -921,8 +1065,7 @@ class SQLCompiler(engine.Compiled):
self.anon_map[derived] = anonymous_counter + 1
return derived + "_" + str(anonymous_counter)
- def bindparam_string(self, name, quote=None,
- positional_names=None, **kw):
+ def bindparam_string(self, name, positional_names=None, **kw):
if self.positional:
if positional_names is not None:
positional_names.append(name)
@@ -937,7 +1080,7 @@ class SQLCompiler(engine.Compiled):
if self.positional:
kwargs['positional_names'] = self.cte_positional
- if isinstance(cte.name, sql._truncated_label):
+ if isinstance(cte.name, elements._truncated_label):
cte_name = self._truncated_identifier("alias", cte.name)
else:
cte_name = cte.name
@@ -947,7 +1090,7 @@ class SQLCompiler(engine.Compiled):
# we've generated a same-named CTE that we are enclosed in,
# or this is the same CTE. just return the name.
if cte in existing_cte._restates or cte is existing_cte:
- return cte_name
+ return self.preparer.format_alias(cte, cte_name)
elif existing_cte in cte._restates:
# we've generated a same-named CTE that is
# enclosed in us - we take precedence, so
@@ -961,19 +1104,24 @@ class SQLCompiler(engine.Compiled):
self.ctes_by_name[cte_name] = cte
- if cte.cte_alias:
- if isinstance(cte.cte_alias, sql._truncated_label):
- cte_alias = self._truncated_identifier("alias", cte.cte_alias)
- else:
- cte_alias = cte.cte_alias
- if not cte.cte_alias and cte not in self.ctes:
+ if cte._cte_alias is not None:
+ orig_cte = cte._cte_alias
+ if orig_cte not in self.ctes:
+ self.visit_cte(orig_cte)
+ cte_alias_name = cte._cte_alias.name
+ if isinstance(cte_alias_name, elements._truncated_label):
+ cte_alias_name = self._truncated_identifier("alias", cte_alias_name)
+ else:
+ orig_cte = cte
+ cte_alias_name = None
+ if not cte_alias_name and cte not in self.ctes:
if cte.recursive:
self.ctes_recursive = True
text = self.preparer.format_alias(cte, cte_name)
if cte.recursive:
- if isinstance(cte.original, sql.Select):
+ if isinstance(cte.original, selectable.Select):
col_source = cte.original
- elif isinstance(cte.original, sql.CompoundSelect):
+ elif isinstance(cte.original, selectable.CompoundSelect):
col_source = cte.original.selects[0]
else:
assert False
@@ -989,9 +1137,10 @@ class SQLCompiler(engine.Compiled):
self, asfrom=True, **kwargs
)
self.ctes[cte] = text
+
if asfrom:
- if cte.cte_alias:
- text = self.preparer.format_alias(cte, cte_alias)
+ if cte_alias_name:
+ text = self.preparer.format_alias(cte, cte_alias_name)
text += " AS " + cte_name
else:
return self.preparer.format_alias(cte, cte_name)
@@ -1001,7 +1150,7 @@ class SQLCompiler(engine.Compiled):
iscrud=False,
fromhints=None, **kwargs):
if asfrom or ashint:
- if isinstance(alias.name, sql._truncated_label):
+ if isinstance(alias.name, elements._truncated_label):
alias_name = self._truncated_identifier("alias", alias.name)
else:
alias_name = alias.name
@@ -1059,7 +1208,7 @@ class SQLCompiler(engine.Compiled):
if not within_columns_clause:
result_expr = col_expr
- elif isinstance(column, sql.Label):
+ elif isinstance(column, elements.Label):
if col_expr is not column:
result_expr = _CompileLabel(
col_expr,
@@ -1078,23 +1227,23 @@ class SQLCompiler(engine.Compiled):
elif \
asfrom and \
- isinstance(column, sql.ColumnClause) and \
+ isinstance(column, elements.ColumnClause) and \
not column.is_literal and \
column.table is not None and \
- not isinstance(column.table, sql.Select):
+ not isinstance(column.table, selectable.Select):
result_expr = _CompileLabel(col_expr,
- sql._as_truncated(column.name),
+ elements._as_truncated(column.name),
alt_names=(column.key,))
elif not isinstance(column,
- (sql.UnaryExpression, sql.TextClause)) \
+ (elements.UnaryExpression, elements.TextClause)) \
and (not hasattr(column, 'name') or \
- isinstance(column, sql.Function)):
+ isinstance(column, functions.Function)):
result_expr = _CompileLabel(col_expr, column.anon_label)
elif col_expr is not column:
# TODO: are we sure "column" has a .name and .key here ?
- # assert isinstance(column, sql.ColumnClause)
+ # assert isinstance(column, elements.ColumnClause)
result_expr = _CompileLabel(col_expr,
- sql._as_truncated(column.name),
+ elements._as_truncated(column.name),
alt_names=(column.key,))
else:
result_expr = col_expr
@@ -1137,8 +1286,8 @@ class SQLCompiler(engine.Compiled):
# as this whole system won't work for custom Join/Select
# subclasses where compilation routines
# call down to compiler.visit_join(), compiler.visit_select()
- join_name = sql.Join.__visit_name__
- select_name = sql.Select.__visit_name__
+ join_name = selectable.Join.__visit_name__
+ select_name = selectable.Select.__visit_name__
def visit(element, **kw):
if element in column_translate[-1]:
@@ -1150,24 +1299,27 @@ class SQLCompiler(engine.Compiled):
newelem = cloned[element] = element._clone()
if newelem.__visit_name__ is join_name and \
- isinstance(newelem.right, sql.FromGrouping):
+ isinstance(newelem.right, selectable.FromGrouping):
newelem._reset_exported()
newelem.left = visit(newelem.left, **kw)
right = visit(newelem.right, **kw)
- selectable = sql.select(
+ selectable_ = selectable.Select(
[right.element],
use_labels=True).alias()
- for c in selectable.c:
- c._label = c._key_label = c.name
+ for c in selectable_.c:
+ c._key_label = c.key
+ c._label = c.name
+
translate_dict = dict(
- zip(right.element.c, selectable.c)
- )
- translate_dict[right.element.left] = selectable
- translate_dict[right.element.right] = selectable
+ zip(newelem.right.element.c, selectable_.c)
+ )
+
+ translate_dict[right.element.left] = selectable_
+ translate_dict[right.element.right] = selectable_
# propagate translations that we've gained
# from nested visit(newelem.right) outwards
@@ -1183,7 +1335,8 @@ class SQLCompiler(engine.Compiled):
column_translate[-1].update(translate_dict)
- newelem.right = selectable
+ newelem.right = selectable_
+
newelem.onclause = visit(newelem.onclause, **kw)
elif newelem.__visit_name__ is select_name:
column_translate.append({})
@@ -1199,6 +1352,7 @@ class SQLCompiler(engine.Compiled):
def _transform_result_map_for_nested_joins(self, select, transformed_select):
inner_col = dict((c._key_label, c) for
c in transformed_select.inner_columns)
+
d = dict(
(inner_col[c._key_label], c)
for c in select.inner_columns
@@ -1291,7 +1445,7 @@ class SQLCompiler(engine.Compiled):
explicit_correlate_froms=correlate_froms,
implicit_correlate_froms=asfrom_froms)
- new_correlate_froms = set(sql._from_objects(*froms))
+ new_correlate_froms = set(selectable._from_objects(*froms))
all_correlate_froms = new_correlate_froms.union(correlate_froms)
new_entry = {
@@ -1382,9 +1536,11 @@ class SQLCompiler(engine.Compiled):
text += self.order_by_clause(select,
order_by_select=order_by_select, **kwargs)
+
if select._limit is not None or select._offset is not None:
text += self.limit_clause(select)
- if select.for_update:
+
+ if select._for_update_arg is not None:
text += self.for_update_clause(select)
if self.ctes and \
@@ -1440,10 +1596,7 @@ class SQLCompiler(engine.Compiled):
return ""
def for_update_clause(self, select):
- if select.for_update:
- return " FOR UPDATE"
- else:
- return ""
+ return " FOR UPDATE"
def returning_clause(self, stmt, returning_cols):
raise exc.CompileError(
@@ -1453,23 +1606,21 @@ class SQLCompiler(engine.Compiled):
def limit_clause(self, select):
text = ""
if select._limit is not None:
- text += "\n LIMIT " + self.process(sql.literal(select._limit))
+ text += "\n LIMIT " + self.process(elements.literal(select._limit))
if select._offset is not None:
if select._limit is None:
text += "\n LIMIT -1"
- text += " OFFSET " + self.process(sql.literal(select._offset))
+ text += " OFFSET " + self.process(elements.literal(select._offset))
return text
def visit_table(self, table, asfrom=False, iscrud=False, ashint=False,
fromhints=None, **kwargs):
if asfrom or ashint:
if getattr(table, "schema", None):
- ret = self.preparer.quote_schema(table.schema,
- table.quote_schema) + \
- "." + self.preparer.quote(table.name,
- table.quote)
+ ret = self.preparer.quote_schema(table.schema) + \
+ "." + self.preparer.quote(table.name)
else:
- ret = self.preparer.quote(table.name, table.quote)
+ ret = self.preparer.quote(table.name)
if fromhints and table in fromhints:
ret = self.format_from_hint_text(ret, table,
fromhints[table], iscrud)
@@ -1488,7 +1639,7 @@ class SQLCompiler(engine.Compiled):
def visit_insert(self, insert_stmt, **kw):
self.isinsert = True
- colparams = self._get_colparams(insert_stmt)
+ colparams = self._get_colparams(insert_stmt, **kw)
if not colparams and \
not self.dialect.supports_default_values and \
@@ -1621,7 +1772,7 @@ class SQLCompiler(engine.Compiled):
table_text = self.update_tables_clause(update_stmt, update_stmt.table,
extra_froms, **kw)
- colparams = self._get_colparams(update_stmt, extra_froms)
+ colparams = self._get_colparams(update_stmt, **kw)
if update_stmt._hints:
dialect_hints = dict([
@@ -1651,11 +1802,12 @@ class SQLCompiler(engine.Compiled):
'=' + c[1] for c in colparams
)
- if update_stmt._returning:
- self.returning = update_stmt._returning
+ if self.returning or update_stmt._returning:
+ if not self.returning:
+ self.returning = update_stmt._returning
if self.returning_precedes_values:
text += " " + self.returning_clause(
- update_stmt, update_stmt._returning)
+ update_stmt, self.returning)
if extra_froms:
extra_from_text = self.update_from_clause(
@@ -1675,7 +1827,7 @@ class SQLCompiler(engine.Compiled):
if self.returning and not self.returning_precedes_values:
text += " " + self.returning_clause(
- update_stmt, update_stmt._returning)
+ update_stmt, self.returning)
self.stack.pop(-1)
@@ -1684,13 +1836,45 @@ class SQLCompiler(engine.Compiled):
def _create_crud_bind_param(self, col, value, required=False, name=None):
if name is None:
name = col.key
- bindparam = sql.bindparam(name, value,
- type_=col.type, required=required,
- quote=col.quote)
+ bindparam = elements.BindParameter(name, value,
+ type_=col.type, required=required)
bindparam._is_crud = True
return bindparam._compiler_dispatch(self)
- def _get_colparams(self, stmt, extra_tables=None):
+ @util.memoized_property
+ def _key_getters_for_crud_column(self):
+ if self.isupdate and self.statement._extra_froms:
+ # when extra tables are present, refer to the columns
+ # in those extra tables as table-qualified, including in
+ # dictionaries and when rendering bind param names.
+ # the "main" table of the statement remains unqualified,
+ # allowing the most compatibility with a non-multi-table
+ # statement.
+ _et = set(self.statement._extra_froms)
+ def _column_as_key(key):
+ str_key = elements._column_as_key(key)
+ if hasattr(key, 'table') and key.table in _et:
+ return (key.table.name, str_key)
+ else:
+ return str_key
+ def _getattr_col_key(col):
+ if col.table in _et:
+ return (col.table.name, col.key)
+ else:
+ return col.key
+ def _col_bind_name(col):
+ if col.table in _et:
+ return "%s_%s" % (col.table.name, col.key)
+ else:
+ return col.key
+
+ else:
+ _column_as_key = elements._column_as_key
+ _getattr_col_key = _col_bind_name = operator.attrgetter("key")
+
+ return _column_as_key, _getattr_col_key, _col_bind_name
+
+ def _get_colparams(self, stmt, **kw):
"""create a set of tuples representing column/string pairs for use
in an INSERT or UPDATE statement.
@@ -1719,12 +1903,18 @@ class SQLCompiler(engine.Compiled):
else:
stmt_parameters = stmt.parameters
+ # getters - these are normally just column.key,
+ # but in the case of mysql multi-table update, the rules for
+ # .key must conditionally take tablename into account
+ _column_as_key, _getattr_col_key, _col_bind_name = \
+ self._key_getters_for_crud_column
+
# if we have statement parameters - set defaults in the
# compiled params
if self.column_keys is None:
parameters = {}
else:
- parameters = dict((sql._column_as_key(key), REQUIRED)
+ parameters = dict((_column_as_key(key), REQUIRED)
for key in self.column_keys
if not stmt_parameters or
key not in stmt_parameters)
@@ -1734,17 +1924,19 @@ class SQLCompiler(engine.Compiled):
if stmt_parameters is not None:
for k, v in stmt_parameters.items():
- colkey = sql._column_as_key(k)
+ colkey = _column_as_key(k)
if colkey is not None:
parameters.setdefault(colkey, v)
else:
# a non-Column expression on the left side;
# add it to values() in an "as-is" state,
# coercing right side to bound param
- if sql._is_literal(v):
- v = self.process(sql.bindparam(None, v, type_=k.type))
+ if elements._is_literal(v):
+ v = self.process(
+ elements.BindParameter(None, v, type_=k.type),
+ **kw)
else:
- v = self.process(v.self_group())
+ v = self.process(v.self_group(), **kw)
values.append((k, v))
@@ -1756,30 +1948,44 @@ class SQLCompiler(engine.Compiled):
self.dialect.implicit_returning and \
stmt.table.implicit_returning
+ if self.isinsert:
+ implicit_return_defaults = implicit_returning and stmt._return_defaults
+ elif self.isupdate:
+ implicit_return_defaults = self.dialect.implicit_returning and \
+ stmt.table.implicit_returning and \
+ stmt._return_defaults
+
+ if implicit_return_defaults:
+ if stmt._return_defaults is True:
+ implicit_return_defaults = set(stmt.table.c)
+ else:
+ implicit_return_defaults = set(stmt._return_defaults)
+
postfetch_lastrowid = need_pks and self.dialect.postfetch_lastrowid
check_columns = {}
+
# special logic that only occurs for multi-table UPDATE
# statements
- if extra_tables and stmt_parameters:
+ if self.isupdate and stmt._extra_froms and stmt_parameters:
normalized_params = dict(
- (sql._clause_element_as_expr(c), param)
+ (elements._clause_element_as_expr(c), param)
for c, param in stmt_parameters.items()
)
- assert self.isupdate
affected_tables = set()
- for t in extra_tables:
+ for t in stmt._extra_froms:
for c in t.c:
if c in normalized_params:
affected_tables.add(t)
- check_columns[c.key] = c
+ check_columns[_getattr_col_key(c)] = c
value = normalized_params[c]
- if sql._is_literal(value):
+ if elements._is_literal(value):
value = self._create_crud_bind_param(
- c, value, required=value is REQUIRED)
+ c, value, required=value is REQUIRED,
+ name=_col_bind_name(c))
else:
self.postfetch.append(c)
- value = self.process(value.self_group())
+ value = self.process(value.self_group(), **kw)
values.append((c, value))
# determine tables which are actually
# to be updated - process onupdate and
@@ -1791,36 +1997,60 @@ class SQLCompiler(engine.Compiled):
elif c.onupdate is not None and not c.onupdate.is_sequence:
if c.onupdate.is_clause_element:
values.append(
- (c, self.process(c.onupdate.arg.self_group()))
+ (c, self.process(
+ c.onupdate.arg.self_group(),
+ **kw)
+ )
)
self.postfetch.append(c)
else:
values.append(
- (c, self._create_crud_bind_param(c, None))
+ (c, self._create_crud_bind_param(
+ c, None, name=_col_bind_name(c)
+ )
+ )
)
self.prefetch.append(c)
elif c.server_onupdate is not None:
self.postfetch.append(c)
- # iterating through columns at the top to maintain ordering.
- # otherwise we might iterate through individual sets of
- # "defaults", "primary key cols", etc.
- for c in stmt.table.columns:
- if c.key in parameters and c.key not in check_columns:
- value = parameters.pop(c.key)
- if sql._is_literal(value):
+ if self.isinsert and stmt.select_names:
+ # for an insert from select, we can only use names that
+ # are given, so only select for those names.
+ cols = (stmt.table.c[_column_as_key(name)]
+ for name in stmt.select_names)
+ else:
+ # iterate through all table columns to maintain
+ # ordering, even for those cols that aren't included
+ cols = stmt.table.columns
+
+ for c in cols:
+ col_key = _getattr_col_key(c)
+ if col_key in parameters and col_key not in check_columns:
+ value = parameters.pop(col_key)
+ if elements._is_literal(value):
value = self._create_crud_bind_param(
c, value, required=value is REQUIRED,
- name=c.key
+ name=_col_bind_name(c)
if not stmt._has_multi_parameters
- else "%s_0" % c.key
+ else "%s_0" % _col_bind_name(c)
)
- elif c.primary_key and implicit_returning:
- self.returning.append(c)
- value = self.process(value.self_group())
else:
- self.postfetch.append(c)
- value = self.process(value.self_group())
+ if isinstance(value, elements.BindParameter) and \
+ value.type._isnull:
+ value = value._clone()
+ value.type = c.type
+
+ if c.primary_key and implicit_returning:
+ self.returning.append(c)
+ value = self.process(value.self_group(), **kw)
+ elif implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ value = self.process(value.self_group(), **kw)
+ else:
+ self.postfetch.append(c)
+ value = self.process(value.self_group(), **kw)
values.append((c, value))
elif self.isinsert:
@@ -1838,13 +2068,13 @@ class SQLCompiler(engine.Compiled):
if self.dialect.supports_sequences and \
(not c.default.optional or \
not self.dialect.sequences_optional):
- proc = self.process(c.default)
+ proc = self.process(c.default, **kw)
values.append((c, proc))
self.returning.append(c)
elif c.default.is_clause_element:
values.append(
(c,
- self.process(c.default.arg.self_group()))
+ self.process(c.default.arg.self_group(), **kw))
)
self.returning.append(c)
else:
@@ -1855,7 +2085,13 @@ class SQLCompiler(engine.Compiled):
else:
self.returning.append(c)
else:
- if c.default is not None or \
+ if (
+ c.default is not None and
+ (
+ not c.default.is_sequence or
+ self.dialect.supports_sequences
+ )
+ ) or \
c is stmt.table._autoincrement_column and (
self.dialect.supports_sequences or
self.dialect.preexecute_autoincrement_sequences
@@ -1872,16 +2108,22 @@ class SQLCompiler(engine.Compiled):
if self.dialect.supports_sequences and \
(not c.default.optional or \
not self.dialect.sequences_optional):
- proc = self.process(c.default)
+ proc = self.process(c.default, **kw)
values.append((c, proc))
- if not c.primary_key:
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ elif not c.primary_key:
self.postfetch.append(c)
elif c.default.is_clause_element:
values.append(
- (c, self.process(c.default.arg.self_group()))
+ (c, self.process(c.default.arg.self_group(), **kw))
)
- if not c.primary_key:
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ elif not c.primary_key:
# dont add primary key column to postfetch
self.postfetch.append(c)
else:
@@ -1890,32 +2132,49 @@ class SQLCompiler(engine.Compiled):
)
self.prefetch.append(c)
elif c.server_default is not None:
- if not c.primary_key:
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ elif not c.primary_key:
self.postfetch.append(c)
+ elif implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
elif self.isupdate:
if c.onupdate is not None and not c.onupdate.is_sequence:
if c.onupdate.is_clause_element:
values.append(
- (c, self.process(c.onupdate.arg.self_group()))
+ (c, self.process(c.onupdate.arg.self_group(), **kw))
)
- self.postfetch.append(c)
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ else:
+ self.postfetch.append(c)
else:
values.append(
(c, self._create_crud_bind_param(c, None))
)
self.prefetch.append(c)
elif c.server_onupdate is not None:
- self.postfetch.append(c)
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ else:
+ self.postfetch.append(c)
+ elif implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
if parameters and stmt_parameters:
check = set(parameters).intersection(
- sql._column_as_key(k) for k in stmt.parameters
+ _column_as_key(k) for k in stmt.parameters
).difference(check_columns)
if check:
raise exc.CompileError(
"Unconsumed column names: %s" %
- (", ".join(check))
+ (", ".join("%s" % c for c in check))
)
if stmt._has_multi_parameters:
@@ -1924,17 +2183,17 @@ class SQLCompiler(engine.Compiled):
values.extend(
[
- (
- c,
- self._create_crud_bind_param(
- c, row[c.key],
- name="%s_%d" % (c.key, i + 1)
- )
- if c.key in row else param
- )
- for (c, param) in values_0
- ]
- for i, row in enumerate(stmt.parameters[1:])
+ (
+ c,
+ self._create_crud_bind_param(
+ c, row[c.key],
+ name="%s_%d" % (c.key, i + 1)
+ )
+ if c.key in row else param
+ )
+ for (c, param) in values_0
+ ]
+ for i, row in enumerate(stmt.parameters[1:])
)
return values
@@ -2005,7 +2264,7 @@ class SQLCompiler(engine.Compiled):
self.preparer.format_savepoint(savepoint_stmt)
-class DDLCompiler(engine.Compiled):
+class DDLCompiler(Compiled):
@util.memoized_property
def sql_compiler(self):
@@ -2042,11 +2301,11 @@ class DDLCompiler(engine.Compiled):
return self.sql_compiler.post_process_text(ddl.statement % context)
def visit_create_schema(self, create):
- schema = self.preparer.format_schema(create.element, create.quote)
+ schema = self.preparer.format_schema(create.element)
return "CREATE SCHEMA " + schema
def visit_drop_schema(self, drop):
- schema = self.preparer.format_schema(drop.element, drop.quote)
+ schema = self.preparer.format_schema(drop.element)
text = "DROP SCHEMA " + schema
if drop.cascade:
text += " CASCADE"
@@ -2068,11 +2327,13 @@ class DDLCompiler(engine.Compiled):
for create_column in create.columns:
column = create_column.element
try:
- text += separator
- separator = ", \n"
- text += "\t" + self.process(create_column,
+ processed = self.process(create_column,
first_pk=column.primary_key
and not first_pk)
+ if processed is not None:
+ text += separator
+ separator = ", \n"
+ text += "\t" + processed
if column.primary_key:
first_pk = True
except exc.CompileError as ce:
@@ -2093,6 +2354,9 @@ class DDLCompiler(engine.Compiled):
def visit_create_column(self, create, first_pk=False):
column = create.element
+ if column.system:
+ return None
+
text = self.get_column_specification(
column,
first_pk=first_pk
@@ -2156,7 +2420,7 @@ class DDLCompiler(engine.Compiled):
use_schema=include_table_schema),
', '.join(
self.sql_compiler.process(expr,
- include_table=False) for
+ include_table=False, literal_binds=True) for
expr in index.expressions)
)
return text
@@ -2169,13 +2433,12 @@ class DDLCompiler(engine.Compiled):
def _prepared_index_name(self, index, include_schema=False):
if include_schema and index.table is not None and index.table.schema:
schema = index.table.schema
- schema_name = self.preparer.quote_schema(schema,
- index.table.quote_schema)
+ schema_name = self.preparer.quote_schema(schema)
else:
schema_name = None
ident = index.name
- if isinstance(ident, sql._truncated_label):
+ if isinstance(ident, elements._truncated_label):
max_ = self.dialect.max_index_name_length or \
self.dialect.max_identifier_length
if len(ident) > max_:
@@ -2184,9 +2447,7 @@ class DDLCompiler(engine.Compiled):
else:
self.dialect.validate_identifier(ident)
- index_name = self.preparer.quote(
- ident,
- index.quote)
+ index_name = self.preparer.quote(ident)
if schema_name:
index_name = schema_name + "." + index_name
@@ -2246,8 +2507,9 @@ class DDLCompiler(engine.Compiled):
if constraint.name is not None:
text += "CONSTRAINT %s " % \
self.preparer.format_constraint(constraint)
- sqltext = sql_util.expression_as_ddl(constraint.sqltext)
- text += "CHECK (%s)" % self.sql_compiler.process(sqltext)
+ text += "CHECK (%s)" % self.sql_compiler.process(constraint.sqltext,
+ include_table=False,
+ literal_binds=True)
text += self.define_constraint_deferrability(constraint)
return text
@@ -2268,7 +2530,7 @@ class DDLCompiler(engine.Compiled):
text += "CONSTRAINT %s " % \
self.preparer.format_constraint(constraint)
text += "PRIMARY KEY "
- text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
+ text += "(%s)" % ', '.join(self.preparer.quote(c.name)
for c in constraint)
text += self.define_constraint_deferrability(constraint)
return text
@@ -2281,11 +2543,11 @@ class DDLCompiler(engine.Compiled):
preparer.format_constraint(constraint)
remote_table = list(constraint._elements.values())[0].column.table
text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % (
- ', '.join(preparer.quote(f.parent.name, f.parent.quote)
+ ', '.join(preparer.quote(f.parent.name)
for f in constraint._elements.values()),
self.define_constraint_remote_table(
constraint, remote_table, preparer),
- ', '.join(preparer.quote(f.column.name, f.column.quote)
+ ', '.join(preparer.quote(f.column.name)
for f in constraint._elements.values())
)
text += self.define_constraint_match(constraint)
@@ -2299,12 +2561,14 @@ class DDLCompiler(engine.Compiled):
return preparer.format_table(table)
def visit_unique_constraint(self, constraint):
+ if len(constraint) == 0:
+ return ''
text = ""
if constraint.name is not None:
text += "CONSTRAINT %s " % \
self.preparer.format_constraint(constraint)
text += "UNIQUE (%s)" % (
- ', '.join(self.preparer.quote(c.name, c.quote)
+ ', '.join(self.preparer.quote(c.name)
for c in constraint))
text += self.define_constraint_deferrability(constraint)
return text
@@ -2335,7 +2599,7 @@ class DDLCompiler(engine.Compiled):
return text
-class GenericTypeCompiler(engine.TypeCompiler):
+class GenericTypeCompiler(TypeCompiler):
def visit_FLOAT(self, type_):
return "FLOAT"
@@ -2558,15 +2822,25 @@ class IdentifierPreparer(object):
or not self.legal_characters.match(util.text_type(value))
or (lc_value != value))
- def quote_schema(self, schema, force):
- """Quote a schema.
+ def quote_schema(self, schema, force=None):
+ """Conditionally quote a schema.
+
+ Subclasses can override this to provide database-dependent
+ quoting behavior for schema names.
+
+ the 'force' flag should be considered deprecated.
- Subclasses should override this to provide database-dependent
- quoting behavior.
"""
return self.quote(schema, force)
- def quote(self, ident, force):
+ def quote(self, ident, force=None):
+ """Conditionally quote an identifier.
+
+ the 'force' flag should be considered deprecated.
+ """
+
+ force = getattr(ident, "quote", None)
+
if force is None:
if ident in self._strings:
return self._strings[ident]
@@ -2582,38 +2856,35 @@ class IdentifierPreparer(object):
return ident
def format_sequence(self, sequence, use_schema=True):
- name = self.quote(sequence.name, sequence.quote)
- if not self.omit_schema and use_schema and \
- sequence.schema is not None:
- name = self.quote_schema(sequence.schema, sequence.quote) + \
- "." + name
+ name = self.quote(sequence.name)
+ if not self.omit_schema and use_schema and sequence.schema is not None:
+ name = self.quote_schema(sequence.schema) + "." + name
return name
def format_label(self, label, name=None):
- return self.quote(name or label.name, label.quote)
+ return self.quote(name or label.name)
def format_alias(self, alias, name=None):
- return self.quote(name or alias.name, alias.quote)
+ return self.quote(name or alias.name)
def format_savepoint(self, savepoint, name=None):
- return self.quote(name or savepoint.ident, savepoint.quote)
+ return self.quote(name or savepoint.ident)
def format_constraint(self, constraint):
- return self.quote(constraint.name, constraint.quote)
+ return self.quote(constraint.name)
def format_table(self, table, use_schema=True, name=None):
"""Prepare a quoted table and schema name."""
if name is None:
name = table.name
- result = self.quote(name, table.quote)
+ result = self.quote(name)
if not self.omit_schema and use_schema \
and getattr(table, "schema", None):
- result = self.quote_schema(table.schema, table.quote_schema) + \
- "." + result
+ result = self.quote_schema(table.schema) + "." + result
return result
- def format_schema(self, name, quote):
+ def format_schema(self, name, quote=None):
"""Prepare a quoted schema name."""
return self.quote(name, quote)
@@ -2628,10 +2899,9 @@ class IdentifierPreparer(object):
if use_table:
return self.format_table(
column.table, use_schema=False,
- name=table_name) + "." + \
- self.quote(name, column.quote)
+ name=table_name) + "." + self.quote(name)
else:
- return self.quote(name, column.quote)
+ return self.quote(name)
else:
# literal textual elements get stuck into ColumnClause a lot,
# which shouldn't get quoted
@@ -2651,7 +2921,7 @@ class IdentifierPreparer(object):
if not self.omit_schema and use_schema and \
getattr(table, 'schema', None):
- return (self.quote_schema(table.schema, table.quote_schema),
+ return (self.quote_schema(table.schema),
self.format_table(table, use_schema=False))
else:
return (self.format_table(table, use_schema=False), )
diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py
new file mode 100644
index 000000000..bda876502
--- /dev/null
+++ b/lib/sqlalchemy/sql/ddl.py
@@ -0,0 +1,864 @@
+# sql/ddl.py
+# Copyright (C) 2009-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""
+Provides the hierarchy of DDL-defining schema items as well as routines
+to invoke them for a create/drop call.
+
+"""
+
+from .. import util
+from .elements import ClauseElement
+from .visitors import traverse
+from .base import Executable, _generative, SchemaVisitor, _bind_or_error
+from ..util import topological
+from .. import event
+from .. import exc
+
+class _DDLCompiles(ClauseElement):
+ def _compiler(self, dialect, **kw):
+ """Return a compiler appropriate for this ClauseElement, given a
+ Dialect."""
+
+ return dialect.ddl_compiler(dialect, self, **kw)
+
+
+class DDLElement(Executable, _DDLCompiles):
+ """Base class for DDL expression constructs.
+
+ This class is the base for the general purpose :class:`.DDL` class,
+ as well as the various create/drop clause constructs such as
+ :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
+ etc.
+
+ :class:`.DDLElement` integrates closely with SQLAlchemy events,
+ introduced in :ref:`event_toplevel`. An instance of one is
+ itself an event receiving callable::
+
+ event.listen(
+ users,
+ 'after_create',
+ AddConstraint(constraint).execute_if(dialect='postgresql')
+ )
+
+ .. seealso::
+
+ :class:`.DDL`
+
+ :class:`.DDLEvents`
+
+ :ref:`event_toplevel`
+
+ :ref:`schema_ddl_sequences`
+
+ """
+
+ _execution_options = Executable.\
+ _execution_options.union({'autocommit': True})
+
+ target = None
+ on = None
+ dialect = None
+ callable_ = None
+
+ def _execute_on_connection(self, connection, multiparams, params):
+ return connection._execute_ddl(self, multiparams, params)
+
+ def execute(self, bind=None, target=None):
+ """Execute this DDL immediately.
+
+ Executes the DDL statement in isolation using the supplied
+ :class:`.Connectable` or
+ :class:`.Connectable` assigned to the ``.bind``
+ property, if not supplied. If the DDL has a conditional ``on``
+ criteria, it will be invoked with None as the event.
+
+ :param bind:
+ Optional, an ``Engine`` or ``Connection``. If not supplied, a valid
+ :class:`.Connectable` must be present in the
+ ``.bind`` property.
+
+ :param target:
+ Optional, defaults to None. The target SchemaItem for the
+ execute call. Will be passed to the ``on`` callable if any,
+ and may also provide string expansion data for the
+ statement. See ``execute_at`` for more information.
+
+ """
+
+ if bind is None:
+ bind = _bind_or_error(self)
+
+ if self._should_execute(target, bind):
+ return bind.execute(self.against(target))
+ else:
+ bind.engine.logger.info(
+ "DDL execution skipped, criteria not met.")
+
+ @util.deprecated("0.7", "See :class:`.DDLEvents`, as well as "
+ ":meth:`.DDLElement.execute_if`.")
+ def execute_at(self, event_name, target):
+ """Link execution of this DDL to the DDL lifecycle of a SchemaItem.
+
+ Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance,
+ executing it when that schema item is created or dropped. The DDL
+ statement will be executed using the same Connection and transactional
+ context as the Table create/drop itself. The ``.bind`` property of
+ this statement is ignored.
+
+ :param event:
+ One of the events defined in the schema item's ``.ddl_events``;
+ e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop'
+
+ :param target:
+ The Table or MetaData instance for which this DDLElement will
+ be associated with.
+
+ A DDLElement instance can be linked to any number of schema items.
+
+ ``execute_at`` builds on the ``append_ddl_listener`` interface of
+ :class:`.MetaData` and :class:`.Table` objects.
+
+ Caveat: Creating or dropping a Table in isolation will also trigger
+ any DDL set to ``execute_at`` that Table's MetaData. This may change
+ in a future release.
+
+ """
+
+ def call_event(target, connection, **kw):
+ if self._should_execute_deprecated(event_name,
+ target, connection, **kw):
+ return connection.execute(self.against(target))
+
+ event.listen(target, "" + event_name.replace('-', '_'), call_event)
+
+ @_generative
+ def against(self, target):
+ """Return a copy of this DDL against a specific schema item."""
+
+ self.target = target
+
+ @_generative
+ def execute_if(self, dialect=None, callable_=None, state=None):
+ """Return a callable that will execute this
+ DDLElement conditionally.
+
+ Used to provide a wrapper for event listening::
+
+ event.listen(
+ metadata,
+ 'before_create',
+ DDL("my_ddl").execute_if(dialect='postgresql')
+ )
+
+ :param dialect: May be a string, tuple or a callable
+ predicate. If a string, it will be compared to the name of the
+ executing database dialect::
+
+ DDL('something').execute_if(dialect='postgresql')
+
+ If a tuple, specifies multiple dialect names::
+
+ DDL('something').execute_if(dialect=('postgresql', 'mysql'))
+
+ :param callable_: A callable, which will be invoked with
+ four positional arguments as well as optional keyword
+ arguments:
+
+ :ddl:
+ This DDL element.
+
+ :target:
+ The :class:`.Table` or :class:`.MetaData` object which is the
+ target of this event. May be None if the DDL is executed
+ explicitly.
+
+ :bind:
+ The :class:`.Connection` being used for DDL execution
+
+ :tables:
+ Optional keyword argument - a list of Table objects which are to
+ be created/ dropped within a MetaData.create_all() or drop_all()
+ method call.
+
+ :state:
+ Optional keyword argument - will be the ``state`` argument
+ passed to this function.
+
+ :checkfirst:
+ Keyword argument, will be True if the 'checkfirst' flag was
+ set during the call to ``create()``, ``create_all()``,
+ ``drop()``, ``drop_all()``.
+
+ If the callable returns a true value, the DDL statement will be
+ executed.
+
+ :param state: any value which will be passed to the callable\_
+ as the ``state`` keyword argument.
+
+ .. seealso::
+
+ :class:`.DDLEvents`
+
+ :ref:`event_toplevel`
+
+ """
+ self.dialect = dialect
+ self.callable_ = callable_
+ self.state = state
+
+ def _should_execute(self, target, bind, **kw):
+ if self.on is not None and \
+ not self._should_execute_deprecated(None, target, bind, **kw):
+ return False
+
+ if isinstance(self.dialect, util.string_types):
+ if self.dialect != bind.engine.name:
+ return False
+ elif isinstance(self.dialect, (tuple, list, set)):
+ if bind.engine.name not in self.dialect:
+ return False
+ if self.callable_ is not None and \
+ not self.callable_(self, target, bind, state=self.state, **kw):
+ return False
+
+ return True
+
+ def _should_execute_deprecated(self, event, target, bind, **kw):
+ if self.on is None:
+ return True
+ elif isinstance(self.on, util.string_types):
+ return self.on == bind.engine.name
+ elif isinstance(self.on, (tuple, list, set)):
+ return bind.engine.name in self.on
+ else:
+ return self.on(self, event, target, bind, **kw)
+
+ def __call__(self, target, bind, **kw):
+ """Execute the DDL as a ddl_listener."""
+
+ if self._should_execute(target, bind, **kw):
+ return bind.execute(self.against(target))
+
+ def _check_ddl_on(self, on):
+ if (on is not None and
+ (not isinstance(on, util.string_types + (tuple, list, set)) and
+ not util.callable(on))):
+ raise exc.ArgumentError(
+ "Expected the name of a database dialect, a tuple "
+ "of names, or a callable for "
+ "'on' criteria, got type '%s'." % type(on).__name__)
+
+ def bind(self):
+ if self._bind:
+ return self._bind
+
+ def _set_bind(self, bind):
+ self._bind = bind
+ bind = property(bind, _set_bind)
+
+ def _generate(self):
+ s = self.__class__.__new__(self.__class__)
+ s.__dict__ = self.__dict__.copy()
+ return s
+
+
+class DDL(DDLElement):
+ """A literal DDL statement.
+
+ Specifies literal SQL DDL to be executed by the database. DDL objects
+ function as DDL event listeners, and can be subscribed to those events
+ listed in :class:`.DDLEvents`, using either :class:`.Table` or
+ :class:`.MetaData` objects as targets. Basic templating support allows
+ a single DDL instance to handle repetitive tasks for multiple tables.
+
+ Examples::
+
+ from sqlalchemy import event, DDL
+
+ tbl = Table('users', metadata, Column('uid', Integer))
+ event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
+
+ spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
+ event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
+
+ drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
+ connection.execute(drop_spow)
+
+ When operating on Table events, the following ``statement``
+ string substitions are available::
+
+ %(table)s - the Table name, with any required quoting applied
+ %(schema)s - the schema name, with any required quoting applied
+ %(fullname)s - the Table name including schema, quoted if needed
+
+ The DDL's "context", if any, will be combined with the standard
+ substutions noted above. Keys present in the context will override
+ the standard substitutions.
+
+ """
+
+ __visit_name__ = "ddl"
+
+ def __init__(self, statement, on=None, context=None, bind=None):
+ """Create a DDL statement.
+
+ :param statement:
+ A string or unicode string to be executed. Statements will be
+ processed with Python's string formatting operator. See the
+ ``context`` argument and the ``execute_at`` method.
+
+ A literal '%' in a statement must be escaped as '%%'.
+
+ SQL bind parameters are not available in DDL statements.
+
+ :param on:
+ .. deprecated:: 0.7
+ See :meth:`.DDLElement.execute_if`.
+
+ Optional filtering criteria. May be a string, tuple or a callable
+ predicate. If a string, it will be compared to the name of the
+ executing database dialect::
+
+ DDL('something', on='postgresql')
+
+ If a tuple, specifies multiple dialect names::
+
+ DDL('something', on=('postgresql', 'mysql'))
+
+ If a callable, it will be invoked with four positional arguments
+ as well as optional keyword arguments:
+
+ :ddl:
+ This DDL element.
+
+ :event:
+ The name of the event that has triggered this DDL, such as
+ 'after-create' Will be None if the DDL is executed explicitly.
+
+ :target:
+ The ``Table`` or ``MetaData`` object which is the target of
+ this event. May be None if the DDL is executed explicitly.
+
+ :connection:
+ The ``Connection`` being used for DDL execution
+
+ :tables:
+ Optional keyword argument - a list of Table objects which are to
+ be created/ dropped within a MetaData.create_all() or drop_all()
+ method call.
+
+
+ If the callable returns a true value, the DDL statement will be
+ executed.
+
+ :param context:
+ Optional dictionary, defaults to None. These values will be
+ available for use in string substitutions on the DDL statement.
+
+ :param bind:
+ Optional. A :class:`.Connectable`, used by
+ default when ``execute()`` is invoked without a bind argument.
+
+
+ .. seealso::
+
+ :class:`.DDLEvents`
+
+ :mod:`sqlalchemy.event`
+
+ """
+
+ if not isinstance(statement, util.string_types):
+ raise exc.ArgumentError(
+ "Expected a string or unicode SQL statement, got '%r'" %
+ statement)
+
+ self.statement = statement
+ self.context = context or {}
+
+ self._check_ddl_on(on)
+ self.on = on
+ self._bind = bind
+
+ def __repr__(self):
+ return '<%s@%s; %s>' % (
+ type(self).__name__, id(self),
+ ', '.join([repr(self.statement)] +
+ ['%s=%r' % (key, getattr(self, key))
+ for key in ('on', 'context')
+ if getattr(self, key)]))
+
+
+
+class _CreateDropBase(DDLElement):
+ """Base class for DDL constucts that represent CREATE and DROP or
+ equivalents.
+
+ The common theme of _CreateDropBase is a single
+ ``element`` attribute which refers to the element
+ to be created or dropped.
+
+ """
+
+ def __init__(self, element, on=None, bind=None):
+ self.element = element
+ self._check_ddl_on(on)
+ self.on = on
+ self.bind = bind
+
+ def _create_rule_disable(self, compiler):
+ """Allow disable of _create_rule using a callable.
+
+ Pass to _create_rule using
+ util.portable_instancemethod(self._create_rule_disable)
+ to retain serializability.
+
+ """
+ return False
+
+
+class CreateSchema(_CreateDropBase):
+ """Represent a CREATE SCHEMA statement.
+
+ .. versionadded:: 0.7.4
+
+ The argument here is the string name of the schema.
+
+ """
+
+ __visit_name__ = "create_schema"
+
+ def __init__(self, name, quote=None, **kw):
+ """Create a new :class:`.CreateSchema` construct."""
+
+ self.quote = quote
+ super(CreateSchema, self).__init__(name, **kw)
+
+
+class DropSchema(_CreateDropBase):
+ """Represent a DROP SCHEMA statement.
+
+ The argument here is the string name of the schema.
+
+ .. versionadded:: 0.7.4
+
+ """
+
+ __visit_name__ = "drop_schema"
+
+ def __init__(self, name, quote=None, cascade=False, **kw):
+ """Create a new :class:`.DropSchema` construct."""
+
+ self.quote = quote
+ self.cascade = cascade
+ super(DropSchema, self).__init__(name, **kw)
+
+
+class CreateTable(_CreateDropBase):
+ """Represent a CREATE TABLE statement."""
+
+ __visit_name__ = "create_table"
+
+ def __init__(self, element, on=None, bind=None):
+ """Create a :class:`.CreateTable` construct.
+
+ :param element: a :class:`.Table` that's the subject
+ of the CREATE
+ :param on: See the description for 'on' in :class:`.DDL`.
+ :param bind: See the description for 'bind' in :class:`.DDL`.
+
+ """
+ super(CreateTable, self).__init__(element, on=on, bind=bind)
+ self.columns = [CreateColumn(column)
+ for column in element.columns
+ ]
+
+
+class _DropView(_CreateDropBase):
+ """Semi-public 'DROP VIEW' construct.
+
+ Used by the test suite for dialect-agnostic drops of views.
+ This object will eventually be part of a public "view" API.
+
+ """
+ __visit_name__ = "drop_view"
+
+
+class CreateColumn(_DDLCompiles):
+ """Represent a :class:`.Column` as rendered in a CREATE TABLE statement,
+ via the :class:`.CreateTable` construct.
+
+ This is provided to support custom column DDL within the generation
+ of CREATE TABLE statements, by using the
+ compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
+ to extend :class:`.CreateColumn`.
+
+ Typical integration is to examine the incoming :class:`.Column`
+ object, and to redirect compilation if a particular flag or condition
+ is found::
+
+ from sqlalchemy import schema
+ from sqlalchemy.ext.compiler import compiles
+
+ @compiles(schema.CreateColumn)
+ def compile(element, compiler, **kw):
+ column = element.element
+
+ if "special" not in column.info:
+ return compiler.visit_create_column(element, **kw)
+
+ text = "%s SPECIAL DIRECTIVE %s" % (
+ column.name,
+ compiler.type_compiler.process(column.type)
+ )
+ default = compiler.get_column_default_string(column)
+ if default is not None:
+ text += " DEFAULT " + default
+
+ if not column.nullable:
+ text += " NOT NULL"
+
+ if column.constraints:
+ text += " ".join(
+ compiler.process(const)
+ for const in column.constraints)
+ return text
+
+ The above construct can be applied to a :class:`.Table` as follows::
+
+ from sqlalchemy import Table, Metadata, Column, Integer, String
+ from sqlalchemy import schema
+
+ metadata = MetaData()
+
+ table = Table('mytable', MetaData(),
+ Column('x', Integer, info={"special":True}, primary_key=True),
+ Column('y', String(50)),
+ Column('z', String(20), info={"special":True})
+ )
+
+ metadata.create_all(conn)
+
+ Above, the directives we've added to the :attr:`.Column.info` collection
+ will be detected by our custom compilation scheme::
+
+ CREATE TABLE mytable (
+ x SPECIAL DIRECTIVE INTEGER NOT NULL,
+ y VARCHAR(50),
+ z SPECIAL DIRECTIVE VARCHAR(20),
+ PRIMARY KEY (x)
+ )
+
+ The :class:`.CreateColumn` construct can also be used to skip certain
+ columns when producing a ``CREATE TABLE``. This is accomplished by
+ creating a compilation rule that conditionally returns ``None``.
+ This is essentially how to produce the same effect as using the
+ ``system=True`` argument on :class:`.Column`, which marks a column
+ as an implicitly-present "system" column.
+
+ For example, suppose we wish to produce a :class:`.Table` which skips
+ rendering of the Postgresql ``xmin`` column against the Postgresql backend,
+ but on other backends does render it, in anticipation of a triggered rule.
+ A conditional compilation rule could skip this name only on Postgresql::
+
+ from sqlalchemy.schema import CreateColumn
+
+ @compiles(CreateColumn, "postgresql")
+ def skip_xmin(element, compiler, **kw):
+ if element.element.name == 'xmin':
+ return None
+ else:
+ return compiler.visit_create_column(element, **kw)
+
+
+ my_table = Table('mytable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('xmin', Integer)
+ )
+
+ Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE``
+ which only includes the ``id`` column in the string; the ``xmin`` column
+ will be omitted, but only against the Postgresql backend.
+
+ .. versionadded:: 0.8.3 The :class:`.CreateColumn` construct supports
+ skipping of columns by returning ``None`` from a custom compilation rule.
+
+ .. versionadded:: 0.8 The :class:`.CreateColumn` construct was added
+ to support custom column creation styles.
+
+ """
+ __visit_name__ = 'create_column'
+
+ def __init__(self, element):
+ self.element = element
+
+
+class DropTable(_CreateDropBase):
+ """Represent a DROP TABLE statement."""
+
+ __visit_name__ = "drop_table"
+
+
+class CreateSequence(_CreateDropBase):
+ """Represent a CREATE SEQUENCE statement."""
+
+ __visit_name__ = "create_sequence"
+
+
+class DropSequence(_CreateDropBase):
+ """Represent a DROP SEQUENCE statement."""
+
+ __visit_name__ = "drop_sequence"
+
+
+class CreateIndex(_CreateDropBase):
+ """Represent a CREATE INDEX statement."""
+
+ __visit_name__ = "create_index"
+
+
+class DropIndex(_CreateDropBase):
+ """Represent a DROP INDEX statement."""
+
+ __visit_name__ = "drop_index"
+
+
+class AddConstraint(_CreateDropBase):
+ """Represent an ALTER TABLE ADD CONSTRAINT statement."""
+
+ __visit_name__ = "add_constraint"
+
+ def __init__(self, element, *args, **kw):
+ super(AddConstraint, self).__init__(element, *args, **kw)
+ element._create_rule = util.portable_instancemethod(
+ self._create_rule_disable)
+
+
+class DropConstraint(_CreateDropBase):
+ """Represent an ALTER TABLE DROP CONSTRAINT statement."""
+
+ __visit_name__ = "drop_constraint"
+
+ def __init__(self, element, cascade=False, **kw):
+ self.cascade = cascade
+ super(DropConstraint, self).__init__(element, **kw)
+ element._create_rule = util.portable_instancemethod(
+ self._create_rule_disable)
+
+
+class DDLBase(SchemaVisitor):
+ def __init__(self, connection):
+ self.connection = connection
+
+
+class SchemaGenerator(DDLBase):
+
+ def __init__(self, dialect, connection, checkfirst=False,
+ tables=None, **kwargs):
+ super(SchemaGenerator, self).__init__(connection, **kwargs)
+ self.checkfirst = checkfirst
+ self.tables = tables
+ self.preparer = dialect.identifier_preparer
+ self.dialect = dialect
+ self.memo = {}
+
+ def _can_create_table(self, table):
+ self.dialect.validate_identifier(table.name)
+ if table.schema:
+ self.dialect.validate_identifier(table.schema)
+ return not self.checkfirst or \
+ not self.dialect.has_table(self.connection,
+ table.name, schema=table.schema)
+
+ def _can_create_sequence(self, sequence):
+ return self.dialect.supports_sequences and \
+ (
+ (not self.dialect.sequences_optional or
+ not sequence.optional) and
+ (
+ not self.checkfirst or
+ not self.dialect.has_sequence(
+ self.connection,
+ sequence.name,
+ schema=sequence.schema)
+ )
+ )
+
+ def visit_metadata(self, metadata):
+ if self.tables is not None:
+ tables = self.tables
+ else:
+ tables = list(metadata.tables.values())
+ collection = [t for t in sort_tables(tables)
+ if self._can_create_table(t)]
+ seq_coll = [s for s in metadata._sequences.values()
+ if s.column is None and self._can_create_sequence(s)]
+
+ metadata.dispatch.before_create(metadata, self.connection,
+ tables=collection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ for seq in seq_coll:
+ self.traverse_single(seq, create_ok=True)
+
+ for table in collection:
+ self.traverse_single(table, create_ok=True)
+
+ metadata.dispatch.after_create(metadata, self.connection,
+ tables=collection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ def visit_table(self, table, create_ok=False):
+ if not create_ok and not self._can_create_table(table):
+ return
+
+ table.dispatch.before_create(table, self.connection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ for column in table.columns:
+ if column.default is not None:
+ self.traverse_single(column.default)
+
+ self.connection.execute(CreateTable(table))
+
+ if hasattr(table, 'indexes'):
+ for index in table.indexes:
+ self.traverse_single(index)
+
+ table.dispatch.after_create(table, self.connection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ def visit_sequence(self, sequence, create_ok=False):
+ if not create_ok and not self._can_create_sequence(sequence):
+ return
+ self.connection.execute(CreateSequence(sequence))
+
+ def visit_index(self, index):
+ self.connection.execute(CreateIndex(index))
+
+
+class SchemaDropper(DDLBase):
+
+ def __init__(self, dialect, connection, checkfirst=False,
+ tables=None, **kwargs):
+ super(SchemaDropper, self).__init__(connection, **kwargs)
+ self.checkfirst = checkfirst
+ self.tables = tables
+ self.preparer = dialect.identifier_preparer
+ self.dialect = dialect
+ self.memo = {}
+
+ def visit_metadata(self, metadata):
+ if self.tables is not None:
+ tables = self.tables
+ else:
+ tables = list(metadata.tables.values())
+
+ collection = [
+ t
+ for t in reversed(sort_tables(tables))
+ if self._can_drop_table(t)
+ ]
+
+ seq_coll = [
+ s
+ for s in metadata._sequences.values()
+ if s.column is None and self._can_drop_sequence(s)
+ ]
+
+ metadata.dispatch.before_drop(
+ metadata, self.connection, tables=collection,
+ checkfirst=self.checkfirst, _ddl_runner=self)
+
+ for table in collection:
+ self.traverse_single(table, drop_ok=True)
+
+ for seq in seq_coll:
+ self.traverse_single(seq, drop_ok=True)
+
+ metadata.dispatch.after_drop(
+ metadata, self.connection, tables=collection,
+ checkfirst=self.checkfirst, _ddl_runner=self)
+
+ def _can_drop_table(self, table):
+ self.dialect.validate_identifier(table.name)
+ if table.schema:
+ self.dialect.validate_identifier(table.schema)
+ return not self.checkfirst or self.dialect.has_table(self.connection,
+ table.name, schema=table.schema)
+
+ def _can_drop_sequence(self, sequence):
+ return self.dialect.supports_sequences and \
+ ((not self.dialect.sequences_optional or
+ not sequence.optional) and
+ (not self.checkfirst or
+ self.dialect.has_sequence(
+ self.connection,
+ sequence.name,
+ schema=sequence.schema))
+ )
+
+ def visit_index(self, index):
+ self.connection.execute(DropIndex(index))
+
+ def visit_table(self, table, drop_ok=False):
+ if not drop_ok and not self._can_drop_table(table):
+ return
+
+ table.dispatch.before_drop(table, self.connection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ for column in table.columns:
+ if column.default is not None:
+ self.traverse_single(column.default)
+
+ self.connection.execute(DropTable(table))
+
+ table.dispatch.after_drop(table, self.connection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ def visit_sequence(self, sequence, drop_ok=False):
+ if not drop_ok and not self._can_drop_sequence(sequence):
+ return
+ self.connection.execute(DropSequence(sequence))
+
+def sort_tables(tables, skip_fn=None, extra_dependencies=None):
+ """sort a collection of Table objects in order of
+ their foreign-key dependency."""
+
+ tables = list(tables)
+ tuples = []
+ if extra_dependencies is not None:
+ tuples.extend(extra_dependencies)
+
+ def visit_foreign_key(fkey):
+ if fkey.use_alter:
+ return
+ elif skip_fn and skip_fn(fkey):
+ return
+ parent_table = fkey.column.table
+ if parent_table in tables:
+ child_table = fkey.parent.table
+ if parent_table is not child_table:
+ tuples.append((parent_table, child_table))
+
+ for table in tables:
+ traverse(table,
+ {'schema_visitor': True},
+ {'foreign_key': visit_foreign_key})
+
+ tuples.extend(
+ [parent, table] for parent in table._extra_dependencies
+ )
+
+ return list(topological.sort(tuples, tables))
+
diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py
new file mode 100644
index 000000000..c39dce9c6
--- /dev/null
+++ b/lib/sqlalchemy/sql/default_comparator.py
@@ -0,0 +1,278 @@
+# sql/default_comparator.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Default implementation of SQL comparison operations.
+"""
+
+from .. import exc, util
+from . import operators
+from . import type_api
+from .elements import BindParameter, True_, False_, BinaryExpression, \
+ Null, _const_expr, _clause_element_as_expr, \
+ ClauseList, ColumnElement, TextClause, UnaryExpression, \
+ collate, _is_literal, _literal_as_text
+from .selectable import SelectBase, Alias, Selectable, ScalarSelect
+
+class _DefaultColumnComparator(operators.ColumnOperators):
+ """Defines comparison and math operations.
+
+ See :class:`.ColumnOperators` and :class:`.Operators` for descriptions
+ of all operations.
+
+ """
+
+ @util.memoized_property
+ def type(self):
+ return self.expr.type
+
+ def operate(self, op, *other, **kwargs):
+ o = self.operators[op.__name__]
+ return o[0](self, self.expr, op, *(other + o[1:]), **kwargs)
+
+ def reverse_operate(self, op, other, **kwargs):
+ o = self.operators[op.__name__]
+ return o[0](self, self.expr, op, other, reverse=True, *o[1:], **kwargs)
+
+ def _adapt_expression(self, op, other_comparator):
+ """evaluate the return type of <self> <op> <othertype>,
+ and apply any adaptations to the given operator.
+
+ This method determines the type of a resulting binary expression
+ given two source types and an operator. For example, two
+ :class:`.Column` objects, both of the type :class:`.Integer`, will
+ produce a :class:`.BinaryExpression` that also has the type
+ :class:`.Integer` when compared via the addition (``+``) operator.
+ However, using the addition operator with an :class:`.Integer`
+ and a :class:`.Date` object will produce a :class:`.Date`, assuming
+ "days delta" behavior by the database (in reality, most databases
+ other than Postgresql don't accept this particular operation).
+
+ The method returns a tuple of the form <operator>, <type>.
+ The resulting operator and type will be those applied to the
+ resulting :class:`.BinaryExpression` as the final operator and the
+ right-hand side of the expression.
+
+ Note that only a subset of operators make usage of
+ :meth:`._adapt_expression`,
+ including math operators and user-defined operators, but not
+ boolean comparison or special SQL keywords like MATCH or BETWEEN.
+
+ """
+ return op, other_comparator.type
+
+ def _boolean_compare(self, expr, op, obj, negate=None, reverse=False,
+ _python_is_types=(util.NoneType, bool),
+ **kwargs):
+
+ if isinstance(obj, _python_is_types + (Null, True_, False_)):
+
+ # allow x ==/!= True/False to be treated as a literal.
+ # this comes out to "== / != true/false" or "1/0" if those
+ # constants aren't supported and works on all platforms
+ if op in (operators.eq, operators.ne) and \
+ isinstance(obj, (bool, True_, False_)):
+ return BinaryExpression(expr,
+ _literal_as_text(obj),
+ op,
+ type_=type_api.BOOLEANTYPE,
+ negate=negate, modifiers=kwargs)
+ else:
+ # all other None/True/False uses IS, IS NOT
+ if op in (operators.eq, operators.is_):
+ return BinaryExpression(expr, _const_expr(obj),
+ operators.is_,
+ negate=operators.isnot)
+ elif op in (operators.ne, operators.isnot):
+ return BinaryExpression(expr, _const_expr(obj),
+ operators.isnot,
+ negate=operators.is_)
+ else:
+ raise exc.ArgumentError(
+ "Only '=', '!=', 'is_()', 'isnot()' operators can "
+ "be used with None/True/False")
+ else:
+ obj = self._check_literal(expr, op, obj)
+
+ if reverse:
+ return BinaryExpression(obj,
+ expr,
+ op,
+ type_=type_api.BOOLEANTYPE,
+ negate=negate, modifiers=kwargs)
+ else:
+ return BinaryExpression(expr,
+ obj,
+ op,
+ type_=type_api.BOOLEANTYPE,
+ negate=negate, modifiers=kwargs)
+
+ def _binary_operate(self, expr, op, obj, reverse=False, result_type=None,
+ **kw):
+ obj = self._check_literal(expr, op, obj)
+
+ if reverse:
+ left, right = obj, expr
+ else:
+ left, right = expr, obj
+
+ if result_type is None:
+ op, result_type = left.comparator._adapt_expression(
+ op, right.comparator)
+
+ return BinaryExpression(left, right, op, type_=result_type)
+
+ def _scalar(self, expr, op, fn, **kw):
+ return fn(expr)
+
+ def _in_impl(self, expr, op, seq_or_selectable, negate_op, **kw):
+ seq_or_selectable = _clause_element_as_expr(seq_or_selectable)
+
+ if isinstance(seq_or_selectable, ScalarSelect):
+ return self._boolean_compare(expr, op, seq_or_selectable,
+ negate=negate_op)
+ elif isinstance(seq_or_selectable, SelectBase):
+
+ # TODO: if we ever want to support (x, y, z) IN (select x,
+ # y, z from table), we would need a multi-column version of
+ # as_scalar() to produce a multi- column selectable that
+ # does not export itself as a FROM clause
+
+ return self._boolean_compare(
+ expr, op, seq_or_selectable.as_scalar(),
+ negate=negate_op, **kw)
+ elif isinstance(seq_or_selectable, (Selectable, TextClause)):
+ return self._boolean_compare(expr, op, seq_or_selectable,
+ negate=negate_op, **kw)
+
+ # Handle non selectable arguments as sequences
+ args = []
+ for o in seq_or_selectable:
+ if not _is_literal(o):
+ if not isinstance(o, operators.ColumnOperators):
+ raise exc.InvalidRequestError('in() function accept'
+ 's either a list of non-selectable values, '
+ 'or a selectable: %r' % o)
+ elif o is None:
+ o = Null()
+ else:
+ o = expr._bind_param(op, o)
+ args.append(o)
+ if len(args) == 0:
+
+ # Special case handling for empty IN's, behave like
+ # comparison against zero row selectable. We use != to
+ # build the contradiction as it handles NULL values
+ # appropriately, i.e. "not (x IN ())" should not return NULL
+ # values for x.
+
+ util.warn('The IN-predicate on "%s" was invoked with an '
+ 'empty sequence. This results in a '
+ 'contradiction, which nonetheless can be '
+ 'expensive to evaluate. Consider alternative '
+ 'strategies for improved performance.' % expr)
+ if op is operators.in_op:
+ return expr != expr
+ else:
+ return expr == expr
+
+ return self._boolean_compare(expr, op,
+ ClauseList(*args).self_group(against=op),
+ negate=negate_op)
+
+ def _unsupported_impl(self, expr, op, *arg, **kw):
+ raise NotImplementedError("Operator '%s' is not supported on "
+ "this expression" % op.__name__)
+
+ def _neg_impl(self, expr, op, **kw):
+ """See :meth:`.ColumnOperators.__neg__`."""
+ return UnaryExpression(expr, operator=operators.neg)
+
+ def _match_impl(self, expr, op, other, **kw):
+ """See :meth:`.ColumnOperators.match`."""
+ return self._boolean_compare(expr, operators.match_op,
+ self._check_literal(expr, operators.match_op,
+ other))
+
+ def _distinct_impl(self, expr, op, **kw):
+ """See :meth:`.ColumnOperators.distinct`."""
+ return UnaryExpression(expr, operator=operators.distinct_op,
+ type_=expr.type)
+
+ def _between_impl(self, expr, op, cleft, cright, **kw):
+ """See :meth:`.ColumnOperators.between`."""
+ return BinaryExpression(
+ expr,
+ ClauseList(
+ self._check_literal(expr, operators.and_, cleft),
+ self._check_literal(expr, operators.and_, cright),
+ operator=operators.and_,
+ group=False, group_contents=False),
+ operators.between_op)
+
+ def _collate_impl(self, expr, op, other, **kw):
+ return collate(expr, other)
+
+ # a mapping of operators with the method they use, along with
+ # their negated operator for comparison operators
+ operators = {
+ "add": (_binary_operate,),
+ "mul": (_binary_operate,),
+ "sub": (_binary_operate,),
+ "div": (_binary_operate,),
+ "mod": (_binary_operate,),
+ "truediv": (_binary_operate,),
+ "custom_op": (_binary_operate,),
+ "concat_op": (_binary_operate,),
+ "lt": (_boolean_compare, operators.ge),
+ "le": (_boolean_compare, operators.gt),
+ "ne": (_boolean_compare, operators.eq),
+ "gt": (_boolean_compare, operators.le),
+ "ge": (_boolean_compare, operators.lt),
+ "eq": (_boolean_compare, operators.ne),
+ "like_op": (_boolean_compare, operators.notlike_op),
+ "ilike_op": (_boolean_compare, operators.notilike_op),
+ "notlike_op": (_boolean_compare, operators.like_op),
+ "notilike_op": (_boolean_compare, operators.ilike_op),
+ "contains_op": (_boolean_compare, operators.notcontains_op),
+ "startswith_op": (_boolean_compare, operators.notstartswith_op),
+ "endswith_op": (_boolean_compare, operators.notendswith_op),
+ "desc_op": (_scalar, UnaryExpression._create_desc),
+ "asc_op": (_scalar, UnaryExpression._create_asc),
+ "nullsfirst_op": (_scalar, UnaryExpression._create_nullsfirst),
+ "nullslast_op": (_scalar, UnaryExpression._create_nullslast),
+ "in_op": (_in_impl, operators.notin_op),
+ "notin_op": (_in_impl, operators.in_op),
+ "is_": (_boolean_compare, operators.is_),
+ "isnot": (_boolean_compare, operators.isnot),
+ "collate": (_collate_impl,),
+ "match_op": (_match_impl,),
+ "distinct_op": (_distinct_impl,),
+ "between_op": (_between_impl, ),
+ "neg": (_neg_impl,),
+ "getitem": (_unsupported_impl,),
+ "lshift": (_unsupported_impl,),
+ "rshift": (_unsupported_impl,),
+ }
+
+ def _check_literal(self, expr, operator, other):
+ if isinstance(other, (ColumnElement, TextClause)):
+ if isinstance(other, BindParameter) and \
+ other.type._isnull:
+ other = other._clone()
+ other.type = expr.type
+ return other
+ elif hasattr(other, '__clause_element__'):
+ other = other.__clause_element__()
+ elif isinstance(other, type_api.TypeEngine.Comparator):
+ other = other.expr
+
+ if isinstance(other, (SelectBase, Alias)):
+ return other.as_scalar()
+ elif not isinstance(other, (ColumnElement, TextClause)):
+ return expr._bind_param(operator, other)
+ else:
+ return other
+
diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py
new file mode 100644
index 000000000..854b894ee
--- /dev/null
+++ b/lib/sqlalchemy/sql/dml.py
@@ -0,0 +1,769 @@
+# sql/dml.py
+# Copyright (C) 2009-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""
+Provide :class:`.Insert`, :class:`.Update` and :class:`.Delete`.
+
+"""
+
+from .base import Executable, _generative, _from_objects, DialectKWArgs
+from .elements import ClauseElement, _literal_as_text, Null, and_, _clone
+from .selectable import _interpret_as_from, _interpret_as_select, HasPrefixes
+from .. import util
+from .. import exc
+
+class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
+ """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.
+
+ """
+
+ __visit_name__ = 'update_base'
+
+ _execution_options = \
+ Executable._execution_options.union({'autocommit': True})
+ _hints = util.immutabledict()
+ _prefixes = ()
+
+ def _process_colparams(self, parameters):
+ def process_single(p):
+ if isinstance(p, (list, tuple)):
+ return dict(
+ (c.key, pval)
+ for c, pval in zip(self.table.c, p)
+ )
+ else:
+ return p
+
+ if isinstance(parameters, (list, tuple)) and \
+ isinstance(parameters[0], (list, tuple, dict)):
+
+ if not self._supports_multi_parameters:
+ raise exc.InvalidRequestError(
+ "This construct does not support "
+ "multiple parameter sets.")
+
+ return [process_single(p) for p in parameters], True
+ else:
+ return process_single(parameters), False
+
+ def params(self, *arg, **kw):
+ """Set the parameters for the statement.
+
+ This method raises ``NotImplementedError`` on the base class,
+ and is overridden by :class:`.ValuesBase` to provide the
+ SET/VALUES clause of UPDATE and INSERT.
+
+ """
+ raise NotImplementedError(
+ "params() is not supported for INSERT/UPDATE/DELETE statements."
+ " To set the values for an INSERT or UPDATE statement, use"
+ " stmt.values(**parameters).")
+
+ def bind(self):
+ """Return a 'bind' linked to this :class:`.UpdateBase`
+ or a :class:`.Table` associated with it.
+
+ """
+ return self._bind or self.table.bind
+
+ def _set_bind(self, bind):
+ self._bind = bind
+ bind = property(bind, _set_bind)
+
+ @_generative
+ def returning(self, *cols):
+ """Add a :term:`RETURNING` or equivalent clause to this statement.
+
+ e.g.::
+
+ stmt = table.update().\\
+ where(table.c.data == 'value').\\
+ values(status='X').\\
+ returning(table.c.server_flag, table.c.updated_timestamp)
+
+ for server_flag, updated_timestamp in connection.execute(stmt):
+ print(server_flag, updated_timestamp)
+
+ The given collection of column expressions should be derived from
+ the table that is
+ the target of the INSERT, UPDATE, or DELETE. While :class:`.Column`
+ objects are typical, the elements can also be expressions::
+
+ stmt = table.insert().returning(
+ (table.c.first_name + " " + table.c.last_name).label('fullname')
+ )
+
+ Upon compilation, a RETURNING clause, or database equivalent,
+ will be rendered within the statement. For INSERT and UPDATE,
+ the values are the newly inserted/updated values. For DELETE,
+ the values are those of the rows which were deleted.
+
+ Upon execution, the values of the columns to be returned
+ are made available via the result set and can be iterated
+ using :meth:`.ResultProxy.fetchone` and similar. For DBAPIs which do not
+ natively support returning values (i.e. cx_oracle),
+ SQLAlchemy will approximate this behavior at the result level
+ so that a reasonable amount of behavioral neutrality is
+ provided.
+
+ Note that not all databases/DBAPIs
+ support RETURNING. For those backends with no support,
+ an exception is raised upon compilation and/or execution.
+ For those who do support it, the functionality across backends
+ varies greatly, including restrictions on executemany()
+ and other statements which return multiple rows. Please
+ read the documentation notes for the database in use in
+ order to determine the availability of RETURNING.
+
+ .. seealso::
+
+ :meth:`.ValuesBase.return_defaults` - an alternative method tailored
+ towards efficient fetching of server-side defaults and triggers
+ for single-row INSERTs or UPDATEs.
+
+
+ """
+ self._returning = cols
+
+
+ @_generative
+ def with_hint(self, text, selectable=None, dialect_name="*"):
+ """Add a table hint for a single table to this
+ INSERT/UPDATE/DELETE statement.
+
+ .. note::
+
+ :meth:`.UpdateBase.with_hint` currently applies only to
+ Microsoft SQL Server. For MySQL INSERT/UPDATE/DELETE hints, use
+ :meth:`.UpdateBase.prefix_with`.
+
+ The text of the hint is rendered in the appropriate
+ location for the database backend in use, relative
+ to the :class:`.Table` that is the subject of this
+ statement, or optionally to that of the given
+ :class:`.Table` passed as the ``selectable`` argument.
+
+ The ``dialect_name`` option will limit the rendering of a particular
+ hint to a particular backend. Such as, to add a hint
+ that only takes effect for SQL Server::
+
+ mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql")
+
+ .. versionadded:: 0.7.6
+
+ :param text: Text of the hint.
+ :param selectable: optional :class:`.Table` that specifies
+ an element of the FROM clause within an UPDATE or DELETE
+ to be the subject of the hint - applies only to certain backends.
+ :param dialect_name: defaults to ``*``, if specified as the name
+ of a particular dialect, will apply these hints only when
+ that dialect is in use.
+ """
+ if selectable is None:
+ selectable = self.table
+
+ self._hints = self._hints.union(
+ {(selectable, dialect_name): text})
+
+
+class ValuesBase(UpdateBase):
+ """Supplies support for :meth:`.ValuesBase.values` to
+ INSERT and UPDATE constructs."""
+
+ __visit_name__ = 'values_base'
+
+ _supports_multi_parameters = False
+ _has_multi_parameters = False
+ select = None
+
+ def __init__(self, table, values, prefixes):
+ self.table = _interpret_as_from(table)
+ self.parameters, self._has_multi_parameters = \
+ self._process_colparams(values)
+ if prefixes:
+ self._setup_prefixes(prefixes)
+
+ @_generative
+ def values(self, *args, **kwargs):
+ """specify a fixed VALUES clause for an INSERT statement, or the SET
+ clause for an UPDATE.
+
+ Note that the :class:`.Insert` and :class:`.Update` constructs support
+ per-execution time formatting of the VALUES and/or SET clauses,
+ based on the arguments passed to :meth:`.Connection.execute`. However,
+ the :meth:`.ValuesBase.values` method can be used to "fix" a particular
+ set of parameters into the statement.
+
+ Multiple calls to :meth:`.ValuesBase.values` will produce a new
+ construct, each one with the parameter list modified to include
+ the new parameters sent. In the typical case of a single
+ dictionary of parameters, the newly passed keys will replace
+ the same keys in the previous construct. In the case of a list-based
+ "multiple values" construct, each new list of values is extended
+ onto the existing list of values.
+
+ :param \**kwargs: key value pairs representing the string key
+ of a :class:`.Column` mapped to the value to be rendered into the
+ VALUES or SET clause::
+
+ users.insert().values(name="some name")
+
+ users.update().where(users.c.id==5).values(name="some name")
+
+ :param \*args: Alternatively, a dictionary, tuple or list
+ of dictionaries or tuples can be passed as a single positional
+ argument in order to form the VALUES or
+ SET clause of the statement. The single dictionary form
+ works the same as the kwargs form::
+
+ users.insert().values({"name": "some name"})
+
+ If a tuple is passed, the tuple should contain the same number
+ of columns as the target :class:`.Table`::
+
+ users.insert().values((5, "some name"))
+
+ The :class:`.Insert` construct also supports multiply-rendered VALUES
+ construct, for those backends which support this SQL syntax
+ (SQLite, Postgresql, MySQL). This mode is indicated by passing a list
+ of one or more dictionaries/tuples::
+
+ users.insert().values([
+ {"name": "some name"},
+ {"name": "some other name"},
+ {"name": "yet another name"},
+ ])
+
+ In the case of an :class:`.Update`
+ construct, only the single dictionary/tuple form is accepted,
+ else an exception is raised. It is also an exception case to
+ attempt to mix the single-/multiple- value styles together,
+ either through multiple :meth:`.ValuesBase.values` calls
+ or by sending a list + kwargs at the same time.
+
+ .. note::
+
+ Passing a multiple values list is *not* the same
+ as passing a multiple values list to the :meth:`.Connection.execute`
+ method. Passing a list of parameter sets to :meth:`.ValuesBase.values`
+ produces a construct of this form::
+
+ INSERT INTO table (col1, col2, col3) VALUES
+ (col1_0, col2_0, col3_0),
+ (col1_1, col2_1, col3_1),
+ ...
+
+ whereas a multiple list passed to :meth:`.Connection.execute`
+ has the effect of using the DBAPI
+ `executemany() <http://www.python.org/dev/peps/pep-0249/#id18>`_
+ method, which provides a high-performance system of invoking
+ a single-row INSERT statement many times against a series
+ of parameter sets. The "executemany" style is supported by
+ all database backends, as it does not depend on a special SQL
+ syntax.
+
+ .. versionadded:: 0.8
+ Support for multiple-VALUES INSERT statements.
+
+
+ .. seealso::
+
+ :ref:`inserts_and_updates` - SQL Expression
+ Language Tutorial
+
+ :func:`~.expression.insert` - produce an ``INSERT`` statement
+
+ :func:`~.expression.update` - produce an ``UPDATE`` statement
+
+ """
+ if self.select is not None:
+ raise exc.InvalidRequestError(
+ "This construct already inserts from a SELECT")
+ if self._has_multi_parameters and kwargs:
+ raise exc.InvalidRequestError(
+ "This construct already has multiple parameter sets.")
+
+ if args:
+ if len(args) > 1:
+ raise exc.ArgumentError(
+ "Only a single dictionary/tuple or list of "
+ "dictionaries/tuples is accepted positionally.")
+ v = args[0]
+ else:
+ v = {}
+
+ if self.parameters is None:
+ self.parameters, self._has_multi_parameters = \
+ self._process_colparams(v)
+ else:
+ if self._has_multi_parameters:
+ self.parameters = list(self.parameters)
+ p, self._has_multi_parameters = self._process_colparams(v)
+ if not self._has_multi_parameters:
+ raise exc.ArgumentError(
+ "Can't mix single-values and multiple values "
+ "formats in one statement")
+
+ self.parameters.extend(p)
+ else:
+ self.parameters = self.parameters.copy()
+ p, self._has_multi_parameters = self._process_colparams(v)
+ if self._has_multi_parameters:
+ raise exc.ArgumentError(
+ "Can't mix single-values and multiple values "
+ "formats in one statement")
+ self.parameters.update(p)
+
+ if kwargs:
+ if self._has_multi_parameters:
+ raise exc.ArgumentError(
+ "Can't pass kwargs and multiple parameter sets "
+ "simultaenously")
+ else:
+ self.parameters.update(kwargs)
+
+ @_generative
+ def return_defaults(self, *cols):
+ """Make use of a :term:`RETURNING` clause for the purpose
+ of fetching server-side expressions and defaults.
+
+ E.g.::
+
+ stmt = table.insert().values(data='newdata').return_defaults()
+
+ result = connection.execute(stmt)
+
+ server_created_at = result.returned_defaults['created_at']
+
+ When used against a backend that supports RETURNING, all column
+ values generated by SQL expression or server-side-default will be added
+ to any existing RETURNING clause, provided that
+ :meth:`.UpdateBase.returning` is not used simultaneously. The column values
+ will then be available on the result using the
+ :attr:`.ResultProxy.returned_defaults` accessor as a
+ dictionary, referring to values keyed to the :class:`.Column` object
+ as well as its ``.key``.
+
+ This method differs from :meth:`.UpdateBase.returning` in these ways:
+
+ 1. :meth:`.ValuesBase.return_defaults` is only intended for use with
+ an INSERT or an UPDATE statement that matches exactly one row.
+ While the RETURNING construct in the general sense supports multiple
+ rows for a multi-row UPDATE or DELETE statement, or for special
+ cases of INSERT that return multiple rows (e.g. INSERT from SELECT,
+ multi-valued VALUES clause), :meth:`.ValuesBase.return_defaults`
+ is intended only
+ for an "ORM-style" single-row INSERT/UPDATE statement. The row
+ returned by the statement is also consumed implcitly when
+ :meth:`.ValuesBase.return_defaults` is used. By contrast,
+ :meth:`.UpdateBase.returning` leaves the RETURNING result-set intact
+ with a collection of any number of rows.
+
+ 2. It is compatible with the existing logic to fetch auto-generated
+ primary key values, also known as "implicit returning". Backends that
+ support RETURNING will automatically make use of RETURNING in order
+ to fetch the value of newly generated primary keys; while the
+ :meth:`.UpdateBase.returning` method circumvents this behavior,
+ :meth:`.ValuesBase.return_defaults` leaves it intact.
+
+ 3. It can be called against any backend. Backends that don't support
+ RETURNING will skip the usage of the feature, rather than raising
+ an exception. The return value of :attr:`.ResultProxy.returned_defaults`
+ will be ``None``
+
+ :meth:`.ValuesBase.return_defaults` is used by the ORM to provide
+ an efficient implementation for the ``eager_defaults`` feature of
+ :func:`.mapper`.
+
+ :param cols: optional list of column key names or :class:`.Column`
+ objects. If omitted, all column expressions evaulated on the server
+ are added to the returning list.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :meth:`.UpdateBase.returning`
+
+ :attr:`.ResultProxy.returned_defaults`
+
+ """
+ self._return_defaults = cols or True
+
+
+class Insert(ValuesBase):
+ """Represent an INSERT construct.
+
+ The :class:`.Insert` object is created using the
+ :func:`~.expression.insert()` function.
+
+ .. seealso::
+
+ :ref:`coretutorial_insert_expressions`
+
+ """
+ __visit_name__ = 'insert'
+
+ _supports_multi_parameters = True
+
+ def __init__(self,
+ table,
+ values=None,
+ inline=False,
+ bind=None,
+ prefixes=None,
+ returning=None,
+ return_defaults=False,
+ **dialect_kw):
+ """Construct an :class:`.Insert` object.
+
+ Similar functionality is available via the
+ :meth:`~.TableClause.insert` method on
+ :class:`~.schema.Table`.
+
+ :param table: :class:`.TableClause` which is the subject of the insert.
+
+ :param values: collection of values to be inserted; see
+ :meth:`.Insert.values` for a description of allowed formats here.
+ Can be omitted entirely; a :class:`.Insert` construct will also
+ dynamically render the VALUES clause at execution time based on
+ the parameters passed to :meth:`.Connection.execute`.
+
+ :param inline: if True, SQL defaults will be compiled 'inline' into the
+ statement and not pre-executed.
+
+ If both `values` and compile-time bind parameters are present, the
+ compile-time bind parameters override the information specified
+ within `values` on a per-key basis.
+
+ The keys within `values` can be either :class:`~sqlalchemy.schema.Column`
+ objects or their string identifiers. Each key may reference one of:
+
+ * a literal data value (i.e. string, number, etc.);
+ * a Column object;
+ * a SELECT statement.
+
+ If a ``SELECT`` statement is specified which references this
+ ``INSERT`` statement's table, the statement will be correlated
+ against the ``INSERT`` statement.
+
+ .. seealso::
+
+ :ref:`coretutorial_insert_expressions` - SQL Expression Tutorial
+
+ :ref:`inserts_and_updates` - SQL Expression Tutorial
+
+ """
+ ValuesBase.__init__(self, table, values, prefixes)
+ self._bind = bind
+ self.select = self.select_names = None
+ self.inline = inline
+ self._returning = returning
+ self._validate_dialect_kwargs(dialect_kw)
+ self._return_defaults = return_defaults
+
+ def get_children(self, **kwargs):
+ if self.select is not None:
+ return self.select,
+ else:
+ return ()
+
+ @_generative
+ def from_select(self, names, select):
+ """Return a new :class:`.Insert` construct which represents
+ an ``INSERT...FROM SELECT`` statement.
+
+ e.g.::
+
+ sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
+ ins = table2.insert().from_select(['a', 'b'], sel)
+
+ :param names: a sequence of string column names or :class:`.Column`
+ objects representing the target columns.
+ :param select: a :func:`.select` construct, :class:`.FromClause`
+ or other construct which resolves into a :class:`.FromClause`,
+ such as an ORM :class:`.Query` object, etc. The order of
+ columns returned from this FROM clause should correspond to the
+ order of columns sent as the ``names`` parameter; while this
+ is not checked before passing along to the database, the database
+ would normally raise an exception if these column lists don't
+ correspond.
+
+ .. note::
+
+ Depending on backend, it may be necessary for the :class:`.Insert`
+ statement to be constructed using the ``inline=True`` flag; this
+ flag will prevent the implicit usage of ``RETURNING`` when the
+ ``INSERT`` statement is rendered, which isn't supported on a backend
+ such as Oracle in conjunction with an ``INSERT..SELECT`` combination::
+
+ sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
+ ins = table2.insert(inline=True).from_select(['a', 'b'], sel)
+
+ .. note::
+
+ A SELECT..INSERT construct in SQL has no VALUES clause. Therefore
+ :class:`.Column` objects which utilize Python-side defaults
+ (e.g. as described at :ref:`metadata_defaults_toplevel`)
+ will **not** take effect when using :meth:`.Insert.from_select`.
+
+ .. versionadded:: 0.8.3
+
+ """
+ if self.parameters:
+ raise exc.InvalidRequestError(
+ "This construct already inserts value expressions")
+
+ self.parameters, self._has_multi_parameters = \
+ self._process_colparams(dict((n, Null()) for n in names))
+
+ self.select_names = names
+ self.select = _interpret_as_select(select)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ # TODO: coverage
+ self.parameters = self.parameters.copy()
+ if self.select is not None:
+ self.select = _clone(self.select)
+
+
+class Update(ValuesBase):
+ """Represent an Update construct.
+
+ The :class:`.Update` object is created using the :func:`update()` function.
+
+ """
+ __visit_name__ = 'update'
+
+ def __init__(self,
+ table,
+ whereclause=None,
+ values=None,
+ inline=False,
+ bind=None,
+ prefixes=None,
+ returning=None,
+ return_defaults=False,
+ **dialect_kw):
+ """Construct an :class:`.Update` object.
+
+ E.g.::
+
+ from sqlalchemy import update
+
+ stmt = update(users).where(users.c.id==5).\\
+ values(name='user #5')
+
+ Similar functionality is available via the
+ :meth:`~.TableClause.update` method on
+ :class:`.Table`::
+
+ stmt = users.update().\\
+ where(users.c.id==5).\\
+ values(name='user #5')
+
+ :param table: A :class:`.Table` object representing the database
+ table to be updated.
+
+ :param whereclause: Optional SQL expression describing the ``WHERE``
+ condition of the ``UPDATE`` statement. Modern applications
+ may prefer to use the generative :meth:`~Update.where()`
+ method to specify the ``WHERE`` clause.
+
+ The WHERE clause can refer to multiple tables.
+ For databases which support this, an ``UPDATE FROM`` clause will
+ be generated, or on MySQL, a multi-table update. The statement
+ will fail on databases that don't have support for multi-table
+ update statements. A SQL-standard method of referring to
+ additional tables in the WHERE clause is to use a correlated
+ subquery::
+
+ users.update().values(name='ed').where(
+ users.c.name==select([addresses.c.email_address]).\\
+ where(addresses.c.user_id==users.c.id).\\
+ as_scalar()
+ )
+
+ .. versionchanged:: 0.7.4
+ The WHERE clause can refer to multiple tables.
+
+ :param values:
+ Optional dictionary which specifies the ``SET`` conditions of the
+ ``UPDATE``. If left as ``None``, the ``SET``
+ conditions are determined from those parameters passed to the
+ statement during the execution and/or compilation of the
+ statement. When compiled standalone without any parameters,
+ the ``SET`` clause generates for all columns.
+
+ Modern applications may prefer to use the generative
+ :meth:`.Update.values` method to set the values of the
+ UPDATE statement.
+
+ :param inline:
+ if True, SQL defaults present on :class:`.Column` objects via
+ the ``default`` keyword will be compiled 'inline' into the statement
+ and not pre-executed. This means that their values will not
+ be available in the dictionary returned from
+ :meth:`.ResultProxy.last_updated_params`.
+
+ If both ``values`` and compile-time bind parameters are present, the
+ compile-time bind parameters override the information specified
+ within ``values`` on a per-key basis.
+
+ The keys within ``values`` can be either :class:`.Column`
+ objects or their string identifiers (specifically the "key" of the
+ :class:`.Column`, normally but not necessarily equivalent to
+ its "name"). Normally, the
+ :class:`.Column` objects used here are expected to be
+ part of the target :class:`.Table` that is the table
+ to be updated. However when using MySQL, a multiple-table
+ UPDATE statement can refer to columns from any of
+ the tables referred to in the WHERE clause.
+
+ The values referred to in ``values`` are typically:
+
+ * a literal data value (i.e. string, number, etc.)
+ * a SQL expression, such as a related :class:`.Column`,
+ a scalar-returning :func:`.select` construct,
+ etc.
+
+ When combining :func:`.select` constructs within the values
+ clause of an :func:`.update` construct,
+ the subquery represented by the :func:`.select` should be
+ *correlated* to the parent table, that is, providing criterion
+ which links the table inside the subquery to the outer table
+ being updated::
+
+ users.update().values(
+ name=select([addresses.c.email_address]).\\
+ where(addresses.c.user_id==users.c.id).\\
+ as_scalar()
+ )
+
+ .. seealso::
+
+ :ref:`inserts_and_updates` - SQL Expression
+ Language Tutorial
+
+
+ """
+ ValuesBase.__init__(self, table, values, prefixes)
+ self._bind = bind
+ self._returning = returning
+ if whereclause is not None:
+ self._whereclause = _literal_as_text(whereclause)
+ else:
+ self._whereclause = None
+ self.inline = inline
+ self._validate_dialect_kwargs(dialect_kw)
+ self._return_defaults = return_defaults
+
+
+ def get_children(self, **kwargs):
+ if self._whereclause is not None:
+ return self._whereclause,
+ else:
+ return ()
+
+ def _copy_internals(self, clone=_clone, **kw):
+ # TODO: coverage
+ self._whereclause = clone(self._whereclause, **kw)
+ self.parameters = self.parameters.copy()
+
+ @_generative
+ def where(self, whereclause):
+ """return a new update() construct with the given expression added to
+ its WHERE clause, joined to the existing clause via AND, if any.
+
+ """
+ if self._whereclause is not None:
+ self._whereclause = and_(self._whereclause,
+ _literal_as_text(whereclause))
+ else:
+ self._whereclause = _literal_as_text(whereclause)
+
+ @property
+ def _extra_froms(self):
+ # TODO: this could be made memoized
+ # if the memoization is reset on each generative call.
+ froms = []
+ seen = set([self.table])
+
+ if self._whereclause is not None:
+ for item in _from_objects(self._whereclause):
+ if not seen.intersection(item._cloned_set):
+ froms.append(item)
+ seen.update(item._cloned_set)
+
+ return froms
+
+
+class Delete(UpdateBase):
+ """Represent a DELETE construct.
+
+ The :class:`.Delete` object is created using the :func:`delete()` function.
+
+ """
+
+ __visit_name__ = 'delete'
+
+ def __init__(self,
+ table,
+ whereclause=None,
+ bind=None,
+ returning=None,
+ prefixes=None,
+ **dialect_kw):
+ """Construct :class:`.Delete` object.
+
+ Similar functionality is available via the
+ :meth:`~.TableClause.delete` method on
+ :class:`~.schema.Table`.
+
+ :param table: The table to be updated.
+
+ :param whereclause: A :class:`.ClauseElement` describing the ``WHERE``
+ condition of the ``UPDATE`` statement. Note that the
+ :meth:`~Delete.where()` generative method may be used instead.
+
+ .. seealso::
+
+ :ref:`deletes` - SQL Expression Tutorial
+
+ """
+ self._bind = bind
+ self.table = _interpret_as_from(table)
+ self._returning = returning
+
+ if prefixes:
+ self._setup_prefixes(prefixes)
+
+ if whereclause is not None:
+ self._whereclause = _literal_as_text(whereclause)
+ else:
+ self._whereclause = None
+
+ self._validate_dialect_kwargs(dialect_kw)
+
+ def get_children(self, **kwargs):
+ if self._whereclause is not None:
+ return self._whereclause,
+ else:
+ return ()
+
+ @_generative
+ def where(self, whereclause):
+ """Add the given WHERE clause to a newly returned delete construct."""
+
+ if self._whereclause is not None:
+ self._whereclause = and_(self._whereclause,
+ _literal_as_text(whereclause))
+ else:
+ self._whereclause = _literal_as_text(whereclause)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ # TODO: coverage
+ self._whereclause = clone(self._whereclause, **kw)
+
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
new file mode 100644
index 000000000..0e888fcf7
--- /dev/null
+++ b/lib/sqlalchemy/sql/elements.py
@@ -0,0 +1,2880 @@
+# sql/elements.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Core SQL expression elements, including :class:`.ClauseElement`,
+:class:`.ColumnElement`, and derived classes.
+
+"""
+
+from __future__ import unicode_literals
+
+from .. import util, exc, inspection
+from . import type_api
+from . import operators
+from .visitors import Visitable, cloned_traverse, traverse
+from .annotation import Annotated
+import itertools
+from .base import Executable, PARSE_AUTOCOMMIT, Immutable, NO_ARG
+from .base import _generative, Generative
+
+import re
+import operator
+
+def _clone(element, **kw):
+ return element._clone()
+
+def collate(expression, collation):
+ """Return the clause ``expression COLLATE collation``.
+
+ e.g.::
+
+ collate(mycolumn, 'utf8_bin')
+
+ produces::
+
+ mycolumn COLLATE utf8_bin
+
+ """
+
+ expr = _literal_as_binds(expression)
+ return BinaryExpression(
+ expr,
+ _literal_as_text(collation),
+ operators.collate, type_=expr.type)
+
+def between(ctest, cleft, cright):
+ """Return a ``BETWEEN`` predicate clause.
+
+ Equivalent of SQL ``clausetest BETWEEN clauseleft AND clauseright``.
+
+ The :func:`between()` method on all
+ :class:`.ColumnElement` subclasses provides
+ similar functionality.
+
+ """
+ ctest = _literal_as_binds(ctest)
+ return ctest.between(cleft, cright)
+
+def literal(value, type_=None):
+ """Return a literal clause, bound to a bind parameter.
+
+ Literal clauses are created automatically when non- :class:`.ClauseElement`
+ objects (such as strings, ints, dates, etc.) are used in a comparison
+ operation with a :class:`.ColumnElement`
+ subclass, such as a :class:`~sqlalchemy.schema.Column` object.
+ Use this function to force the
+ generation of a literal clause, which will be created as a
+ :class:`BindParameter` with a bound value.
+
+ :param value: the value to be bound. Can be any Python object supported by
+ the underlying DB-API, or is translatable via the given type argument.
+
+ :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which
+ will provide bind-parameter translation for this literal.
+
+ """
+ return BindParameter(None, value, type_=type_, unique=True)
+
+
+
+def type_coerce(expression, type_):
+ """Coerce the given expression into the given type,
+ on the Python side only.
+
+ :func:`.type_coerce` is roughly similar to :func:`.cast`, except no
+ "CAST" expression is rendered - the given type is only applied towards
+ expression typing and against received result values.
+
+ e.g.::
+
+ from sqlalchemy.types import TypeDecorator
+ import uuid
+
+ class AsGuid(TypeDecorator):
+ impl = String
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ return str(value)
+ else:
+ return None
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ return uuid.UUID(value)
+ else:
+ return None
+
+ conn.execute(
+ select([type_coerce(mytable.c.ident, AsGuid)]).\\
+ where(
+ type_coerce(mytable.c.ident, AsGuid) ==
+ uuid.uuid3(uuid.NAMESPACE_URL, 'bar')
+ )
+ )
+
+ :param expression: Column-oriented expression.
+ :param type_: A :class:`.TypeEngine` class or instance indicating
+ the type to which the CAST should apply.
+
+ .. seealso::
+
+ :func:`.cast`
+
+ """
+ type_ = type_api.to_instance(type_)
+
+ if hasattr(expression, '__clause_element__'):
+ return type_coerce(expression.__clause_element__(), type_)
+ elif isinstance(expression, BindParameter):
+ bp = expression._clone()
+ bp.type = type_
+ return bp
+ elif not isinstance(expression, Visitable):
+ if expression is None:
+ return Null()
+ else:
+ return literal(expression, type_=type_)
+ else:
+ return Label(None, expression, type_=type_)
+
+
+
+
+
+def outparam(key, type_=None):
+ """Create an 'OUT' parameter for usage in functions (stored procedures),
+ for databases which support them.
+
+ The ``outparam`` can be used like a regular function parameter.
+ The "output" value will be available from the
+ :class:`~sqlalchemy.engine.ResultProxy` object via its ``out_parameters``
+ attribute, which returns a dictionary containing the values.
+
+ """
+ return BindParameter(
+ key, None, type_=type_, unique=False, isoutparam=True)
+
+
+
+
+def not_(clause):
+ """Return a negation of the given clause, i.e. ``NOT(clause)``.
+
+ The ``~`` operator is also overloaded on all
+ :class:`.ColumnElement` subclasses to produce the
+ same result.
+
+ """
+ return operators.inv(_literal_as_binds(clause))
+
+
+
+@inspection._self_inspects
+class ClauseElement(Visitable):
+ """Base class for elements of a programmatically constructed SQL
+ expression.
+
+ """
+ __visit_name__ = 'clause'
+
+ _annotations = {}
+ supports_execution = False
+ _from_objects = []
+ bind = None
+ _is_clone_of = None
+ is_selectable = False
+ is_clause_element = True
+
+ _order_by_label_element = None
+
+ def _clone(self):
+ """Create a shallow copy of this ClauseElement.
+
+ This method may be used by a generative API. Its also used as
+ part of the "deep" copy afforded by a traversal that combines
+ the _copy_internals() method.
+
+ """
+ c = self.__class__.__new__(self.__class__)
+ c.__dict__ = self.__dict__.copy()
+ ClauseElement._cloned_set._reset(c)
+ ColumnElement.comparator._reset(c)
+
+ # this is a marker that helps to "equate" clauses to each other
+ # when a Select returns its list of FROM clauses. the cloning
+ # process leaves around a lot of remnants of the previous clause
+ # typically in the form of column expressions still attached to the
+ # old table.
+ c._is_clone_of = self
+
+ return c
+
+ @property
+ def _constructor(self):
+ """return the 'constructor' for this ClauseElement.
+
+ This is for the purposes for creating a new object of
+ this type. Usually, its just the element's __class__.
+ However, the "Annotated" version of the object overrides
+ to return the class of its proxied element.
+
+ """
+ return self.__class__
+
+ @util.memoized_property
+ def _cloned_set(self):
+ """Return the set consisting all cloned ancestors of this
+ ClauseElement.
+
+ Includes this ClauseElement. This accessor tends to be used for
+ FromClause objects to identify 'equivalent' FROM clauses, regardless
+ of transformative operations.
+
+ """
+ s = util.column_set()
+ f = self
+ while f is not None:
+ s.add(f)
+ f = f._is_clone_of
+ return s
+
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ d.pop('_is_clone_of', None)
+ return d
+
+ def _annotate(self, values):
+ """return a copy of this ClauseElement with annotations
+ updated by the given dictionary.
+
+ """
+ return Annotated(self, values)
+
+ def _with_annotations(self, values):
+ """return a copy of this ClauseElement with annotations
+ replaced by the given dictionary.
+
+ """
+ return Annotated(self, values)
+
+ def _deannotate(self, values=None, clone=False):
+ """return a copy of this :class:`.ClauseElement` with annotations
+ removed.
+
+ :param values: optional tuple of individual values
+ to remove.
+
+ """
+ if clone:
+ # clone is used when we are also copying
+ # the expression for a deep deannotation
+ return self._clone()
+ else:
+ # if no clone, since we have no annotations we return
+ # self
+ return self
+
+ def _execute_on_connection(self, connection, multiparams, params):
+ return connection._execute_clauseelement(self, multiparams, params)
+
+ def unique_params(self, *optionaldict, **kwargs):
+ """Return a copy with :func:`bindparam()` elements replaced.
+
+ Same functionality as ``params()``, except adds `unique=True`
+ to affected bind parameters so that multiple statements can be
+ used.
+
+ """
+ return self._params(True, optionaldict, kwargs)
+
+ def params(self, *optionaldict, **kwargs):
+ """Return a copy with :func:`bindparam()` elements replaced.
+
+ Returns a copy of this ClauseElement with :func:`bindparam()`
+ elements replaced with values taken from the given dictionary::
+
+ >>> clause = column('x') + bindparam('foo')
+ >>> print clause.compile().params
+ {'foo':None}
+ >>> print clause.params({'foo':7}).compile().params
+ {'foo':7}
+
+ """
+ return self._params(False, optionaldict, kwargs)
+
+ def _params(self, unique, optionaldict, kwargs):
+ if len(optionaldict) == 1:
+ kwargs.update(optionaldict[0])
+ elif len(optionaldict) > 1:
+ raise exc.ArgumentError(
+ "params() takes zero or one positional dictionary argument")
+
+ def visit_bindparam(bind):
+ if bind.key in kwargs:
+ bind.value = kwargs[bind.key]
+ bind.required = False
+ if unique:
+ bind._convert_to_unique()
+ return cloned_traverse(self, {}, {'bindparam': visit_bindparam})
+
+ def compare(self, other, **kw):
+ """Compare this ClauseElement to the given ClauseElement.
+
+ Subclasses should override the default behavior, which is a
+ straight identity comparison.
+
+ \**kw are arguments consumed by subclass compare() methods and
+ may be used to modify the criteria for comparison.
+ (see :class:`.ColumnElement`)
+
+ """
+ return self is other
+
+ def _copy_internals(self, clone=_clone, **kw):
+ """Reassign internal elements to be clones of themselves.
+
+ Called during a copy-and-traverse operation on newly
+ shallow-copied elements to create a deep copy.
+
+ The given clone function should be used, which may be applying
+ additional transformations to the element (i.e. replacement
+ traversal, cloned traversal, annotations).
+
+ """
+ pass
+
+ def get_children(self, **kwargs):
+ """Return immediate child elements of this :class:`.ClauseElement`.
+
+ This is used for visit traversal.
+
+ \**kwargs may contain flags that change the collection that is
+ returned, for example to return a subset of items in order to
+ cut down on larger traversals, or to return child items from a
+ different context (such as schema-level collections instead of
+ clause-level).
+
+ """
+ return []
+
+ def self_group(self, against=None):
+ """Apply a 'grouping' to this :class:`.ClauseElement`.
+
+ This method is overridden by subclasses to return a
+ "grouping" construct, i.e. parenthesis. In particular
+ it's used by "binary" expressions to provide a grouping
+ around themselves when placed into a larger expression,
+ as well as by :func:`.select` constructs when placed into
+ the FROM clause of another :func:`.select`. (Note that
+ subqueries should be normally created using the
+ :meth:`.Select.alias` method, as many platforms require
+ nested SELECT statements to be named).
+
+ As expressions are composed together, the application of
+ :meth:`self_group` is automatic - end-user code should never
+ need to use this method directly. Note that SQLAlchemy's
+ clause constructs take operator precedence into account -
+ so parenthesis might not be needed, for example, in
+ an expression like ``x OR (y AND z)`` - AND takes precedence
+ over OR.
+
+ The base :meth:`self_group` method of :class:`.ClauseElement`
+ just returns self.
+ """
+ return self
+
+ @util.dependencies("sqlalchemy.engine.default")
+ def compile(self, default, bind=None, dialect=None, **kw):
+ """Compile this SQL expression.
+
+ The return value is a :class:`~.Compiled` object.
+ Calling ``str()`` or ``unicode()`` on the returned value will yield a
+ string representation of the result. The
+ :class:`~.Compiled` object also can return a
+ dictionary of bind parameter names and values
+ using the ``params`` accessor.
+
+ :param bind: An ``Engine`` or ``Connection`` from which a
+ ``Compiled`` will be acquired. This argument takes precedence over
+ this :class:`.ClauseElement`'s bound engine, if any.
+
+ :param column_keys: Used for INSERT and UPDATE statements, a list of
+ column names which should be present in the VALUES clause of the
+ compiled statement. If ``None``, all columns from the target table
+ object are rendered.
+
+ :param dialect: A ``Dialect`` instance from which a ``Compiled``
+ will be acquired. This argument takes precedence over the `bind`
+ argument as well as this :class:`.ClauseElement`'s bound engine, if
+ any.
+
+ :param inline: Used for INSERT statements, for a dialect which does
+ not support inline retrieval of newly generated primary key
+ columns, will force the expression used to create the new primary
+ key value to be rendered inline within the INSERT statement's
+ VALUES clause. This typically refers to Sequence execution but may
+ also refer to any server-side default generation function
+ associated with a primary key `Column`.
+
+ """
+
+ if not dialect:
+ if bind:
+ dialect = bind.dialect
+ elif self.bind:
+ dialect = self.bind.dialect
+ bind = self.bind
+ else:
+ dialect = default.DefaultDialect()
+ return self._compiler(dialect, bind=bind, **kw)
+
+ def _compiler(self, dialect, **kw):
+ """Return a compiler appropriate for this ClauseElement, given a
+ Dialect."""
+
+ return dialect.statement_compiler(dialect, self, **kw)
+
+ def __str__(self):
+ if util.py3k:
+ return str(self.compile())
+ else:
+ return unicode(self.compile()).encode('ascii', 'backslashreplace')
+
+ def __and__(self, other):
+ return and_(self, other)
+
+ def __or__(self, other):
+ return or_(self, other)
+
+ def __invert__(self):
+ if hasattr(self, 'negation_clause'):
+ return self.negation_clause
+ else:
+ return self._negate()
+
+ def __bool__(self):
+ raise TypeError("Boolean value of this clause is not defined")
+
+ __nonzero__ = __bool__
+
+ def _negate(self):
+ return UnaryExpression(
+ self.self_group(against=operators.inv),
+ operator=operators.inv,
+ negate=None)
+
+ def __repr__(self):
+ friendly = getattr(self, 'description', None)
+ if friendly is None:
+ return object.__repr__(self)
+ else:
+ return '<%s.%s at 0x%x; %s>' % (
+ self.__module__, self.__class__.__name__, id(self), friendly)
+
+
+
+class ColumnElement(ClauseElement, operators.ColumnOperators):
+ """Represent a column-oriented SQL expression suitable for usage in the
+ "columns" clause, WHERE clause etc. of a statement.
+
+ While the most familiar kind of :class:`.ColumnElement` is the
+ :class:`.Column` object, :class:`.ColumnElement` serves as the basis
+ for any unit that may be present in a SQL expression, including
+ the expressions themselves, SQL functions, bound parameters,
+ literal expressions, keywords such as ``NULL``, etc.
+ :class:`.ColumnElement` is the ultimate base class for all such elements.
+
+ A :class:`.ColumnElement` provides the ability to generate new
+ :class:`.ColumnElement`
+ objects using Python expressions. This means that Python operators
+ such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations,
+ and allow the instantiation of further :class:`.ColumnElement` instances
+ which are composed from other, more fundamental :class:`.ColumnElement`
+ objects. For example, two :class:`.ColumnClause` objects can be added
+ together with the addition operator ``+`` to produce
+ a :class:`.BinaryExpression`.
+ Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses
+ of :class:`.ColumnElement`::
+
+ >>> from sqlalchemy.sql import column
+ >>> column('a') + column('b')
+ <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
+ >>> print column('a') + column('b')
+ a + b
+
+ :class:`.ColumnElement` supports the ability to be a *proxy* element,
+ which indicates that the :class:`.ColumnElement` may be associated with
+ a :class:`.Selectable` which was derived from another :class:`.Selectable`.
+ An example of a "derived" :class:`.Selectable` is an :class:`.Alias` of a
+ :class:`~sqlalchemy.schema.Table`. For the ambitious, an in-depth
+ discussion of this concept can be found at
+ `Expression Transformations <http://techspot.zzzeek.org/2008/01/23/expression-transformations/>`_.
+
+ """
+
+ __visit_name__ = 'column'
+ primary_key = False
+ foreign_keys = []
+ _label = None
+ _key_label = None
+ _alt_names = ()
+
+ def self_group(self, against=None):
+ if against in (operators.and_, operators.or_, operators._asbool) and \
+ self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
+ return AsBoolean(self, operators.istrue, operators.isfalse)
+ else:
+ return self
+
+ def _negate(self):
+ if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
+ return AsBoolean(self, operators.isfalse, operators.istrue)
+ else:
+ return super(ColumnElement, self)._negate()
+
+ @util.memoized_property
+ def type(self):
+ return type_api.NULLTYPE
+
+ @util.memoized_property
+ def comparator(self):
+ return self.type.comparator_factory(self)
+
+ def __getattr__(self, key):
+ try:
+ return getattr(self.comparator, key)
+ except AttributeError:
+ raise AttributeError(
+ 'Neither %r object nor %r object has an attribute %r' % (
+ type(self).__name__,
+ type(self.comparator).__name__,
+ key)
+ )
+
+ def operate(self, op, *other, **kwargs):
+ return op(self.comparator, *other, **kwargs)
+
+ def reverse_operate(self, op, other, **kwargs):
+ return op(other, self.comparator, **kwargs)
+
+ def _bind_param(self, operator, obj):
+ return BindParameter(None, obj,
+ _compared_to_operator=operator,
+ _compared_to_type=self.type, unique=True)
+
+ @property
+ def expression(self):
+ """Return a column expression.
+
+ Part of the inspection interface; returns self.
+
+ """
+ return self
+
+ @property
+ def _select_iterable(self):
+ return (self, )
+
+ @util.memoized_property
+ def base_columns(self):
+ return util.column_set(c for c in self.proxy_set
+ if not hasattr(c, '_proxies'))
+
+ @util.memoized_property
+ def proxy_set(self):
+ s = util.column_set([self])
+ if hasattr(self, '_proxies'):
+ for c in self._proxies:
+ s.update(c.proxy_set)
+ return s
+
+ def shares_lineage(self, othercolumn):
+ """Return True if the given :class:`.ColumnElement`
+ has a common ancestor to this :class:`.ColumnElement`."""
+
+ return bool(self.proxy_set.intersection(othercolumn.proxy_set))
+
+ def _compare_name_for_result(self, other):
+ """Return True if the given column element compares to this one
+ when targeting within a result row."""
+
+ return hasattr(other, 'name') and hasattr(self, 'name') and \
+ other.name == self.name
+
+ def _make_proxy(self, selectable, name=None, name_is_truncatable=False, **kw):
+ """Create a new :class:`.ColumnElement` representing this
+ :class:`.ColumnElement` as it appears in the select list of a
+ descending selectable.
+
+ """
+ if name is None:
+ name = self.anon_label
+ try:
+ key = str(self)
+ except exc.UnsupportedCompilationError:
+ key = self.anon_label
+ else:
+ key = name
+ co = ColumnClause(
+ _as_truncated(name) if name_is_truncatable else name,
+ type_=getattr(self, 'type', None),
+ _selectable=selectable
+ )
+ co._proxies = [self]
+ if selectable._is_clone_of is not None:
+ co._is_clone_of = \
+ selectable._is_clone_of.columns.get(key)
+ selectable._columns[key] = co
+ return co
+
+ def compare(self, other, use_proxies=False, equivalents=None, **kw):
+ """Compare this ColumnElement to another.
+
+ Special arguments understood:
+
+ :param use_proxies: when True, consider two columns that
+ share a common base column as equivalent (i.e. shares_lineage())
+
+ :param equivalents: a dictionary of columns as keys mapped to sets
+ of columns. If the given "other" column is present in this
+ dictionary, if any of the columns in the corresponding set() pass the
+ comparison test, the result is True. This is used to expand the
+ comparison to other columns that may be known to be equivalent to
+ this one via foreign key or other criterion.
+
+ """
+ to_compare = (other, )
+ if equivalents and other in equivalents:
+ to_compare = equivalents[other].union(to_compare)
+
+ for oth in to_compare:
+ if use_proxies and self.shares_lineage(oth):
+ return True
+ elif hash(oth) == hash(self):
+ return True
+ else:
+ return False
+
+ def label(self, name):
+ """Produce a column label, i.e. ``<columnname> AS <name>``.
+
+ This is a shortcut to the :func:`~.expression.label` function.
+
+ if 'name' is None, an anonymous label name will be generated.
+
+ """
+ return Label(name, self, self.type)
+
+ @util.memoized_property
+ def anon_label(self):
+ """provides a constant 'anonymous label' for this ColumnElement.
+
+ This is a label() expression which will be named at compile time.
+ The same label() is returned each time anon_label is called so
+ that expressions can reference anon_label multiple times, producing
+ the same label name at compile time.
+
+ the compiler uses this function automatically at compile time
+ for expressions that are known to be 'unnamed' like binary
+ expressions and function calls.
+
+ """
+ return _anonymous_label('%%(%d %s)s' % (id(self), getattr(self,
+ 'name', 'anon')))
+
+
+
+class BindParameter(ColumnElement):
+ """Represent a bound parameter value.
+
+ """
+
+ __visit_name__ = 'bindparam'
+
+ _is_crud = False
+
+ def __init__(self, key, value=NO_ARG, type_=None,
+ unique=False, required=NO_ARG,
+ quote=None, callable_=None,
+ isoutparam=False,
+ _compared_to_operator=None,
+ _compared_to_type=None):
+ """Construct a new :class:`.BindParameter`.
+
+ :param key:
+ the key for this bind param. Will be used in the generated
+ SQL statement for dialects that use named parameters. This
+ value may be modified when part of a compilation operation,
+ if other :class:`BindParameter` objects exist with the same
+ key, or if its length is too long and truncation is
+ required.
+
+ :param value:
+ Initial value for this bind param. This value may be
+ overridden by the dictionary of parameters sent to statement
+ compilation/execution.
+
+ Defaults to ``None``, however if neither ``value`` nor
+ ``callable`` are passed explicitly, the ``required`` flag will be
+ set to ``True`` which has the effect of requiring a value be present
+ when the statement is actually executed.
+
+ .. versionchanged:: 0.8 The ``required`` flag is set to ``True``
+ automatically if ``value`` or ``callable`` is not passed.
+
+ :param callable\_:
+ A callable function that takes the place of "value". The function
+ will be called at statement execution time to determine the
+ ultimate value. Used for scenarios where the actual bind
+ value cannot be determined at the point at which the clause
+ construct is created, but embedded bind values are still desirable.
+
+ :param type\_:
+ A ``TypeEngine`` object that will be used to pre-process the
+ value corresponding to this :class:`BindParameter` at
+ execution time.
+
+ :param unique:
+ if True, the key name of this BindParamClause will be
+ modified if another :class:`BindParameter` of the same name
+ already has been located within the containing
+ :class:`.ClauseElement`.
+
+ :param required:
+ If ``True``, a value is required at execution time. If not passed,
+ is set to ``True`` or ``False`` based on whether or not
+ one of ``value`` or ``callable`` were passed..
+
+ .. versionchanged:: 0.8 If the ``required`` flag is not specified,
+ it will be set automatically to ``True`` or ``False`` depending
+ on whether or not the ``value`` or ``callable`` parameters
+ were specified.
+
+ :param quote:
+ True if this parameter name requires quoting and is not
+ currently known as a SQLAlchemy reserved word; this currently
+ only applies to the Oracle backend.
+
+ :param isoutparam:
+ if True, the parameter should be treated like a stored procedure
+ "OUT" parameter.
+
+ .. seealso::
+
+ :func:`.outparam`
+
+
+
+ """
+ if isinstance(key, ColumnClause):
+ type_ = key.type
+ key = key.name
+ if required is NO_ARG:
+ required = (value is NO_ARG and callable_ is None)
+ if value is NO_ARG:
+ value = None
+
+ if quote is not None:
+ key = quoted_name(key, quote)
+
+ if unique:
+ self.key = _anonymous_label('%%(%d %s)s' % (id(self), key
+ or 'param'))
+ else:
+ self.key = key or _anonymous_label('%%(%d param)s'
+ % id(self))
+
+ # identifying key that won't change across
+ # clones, used to identify the bind's logical
+ # identity
+ self._identifying_key = self.key
+
+ # key that was passed in the first place, used to
+ # generate new keys
+ self._orig_key = key or 'param'
+
+ self.unique = unique
+ self.value = value
+ self.callable = callable_
+ self.isoutparam = isoutparam
+ self.required = required
+ if type_ is None:
+ if _compared_to_type is not None:
+ self.type = \
+ _compared_to_type.coerce_compared_value(
+ _compared_to_operator, value)
+ else:
+ self.type = type_api._type_map.get(type(value),
+ type_api.NULLTYPE)
+ elif isinstance(type_, type):
+ self.type = type_()
+ else:
+ self.type = type_
+
+ def _with_value(self, value):
+ """Return a copy of this :class:`.BindParameter` with the given value set."""
+ cloned = self._clone()
+ cloned.value = value
+ cloned.callable = None
+ cloned.required = False
+ if cloned.type is type_api.NULLTYPE:
+ cloned.type = type_api._type_map.get(type(value),
+ type_api.NULLTYPE)
+ return cloned
+
+ @property
+ def effective_value(self):
+ """Return the value of this bound parameter,
+ taking into account if the ``callable`` parameter
+ was set.
+
+ The ``callable`` value will be evaluated
+ and returned if present, else ``value``.
+
+ """
+ if self.callable:
+ return self.callable()
+ else:
+ return self.value
+
+ def _clone(self):
+ c = ClauseElement._clone(self)
+ if self.unique:
+ c.key = _anonymous_label('%%(%d %s)s' % (id(c), c._orig_key
+ or 'param'))
+ return c
+
+ def _convert_to_unique(self):
+ if not self.unique:
+ self.unique = True
+ self.key = _anonymous_label('%%(%d %s)s' % (id(self),
+ self._orig_key or 'param'))
+
+ def compare(self, other, **kw):
+ """Compare this :class:`BindParameter` to the given
+ clause."""
+
+ return isinstance(other, BindParameter) \
+ and self.type._compare_type_affinity(other.type) \
+ and self.value == other.value
+
+ def __getstate__(self):
+ """execute a deferred value for serialization purposes."""
+
+ d = self.__dict__.copy()
+ v = self.value
+ if self.callable:
+ v = self.callable()
+ d['callable'] = None
+ d['value'] = v
+ return d
+
+ def __repr__(self):
+ return 'BindParameter(%r, %r, type_=%r)' % (self.key,
+ self.value, self.type)
+
+
+class TypeClause(ClauseElement):
+ """Handle a type keyword in a SQL statement.
+
+ Used by the ``Case`` statement.
+
+ """
+
+ __visit_name__ = 'typeclause'
+
+ def __init__(self, type):
+ self.type = type
+
+
+class TextClause(Executable, ClauseElement):
+ """Represent a literal SQL text fragment.
+
+ Public constructor is the :func:`text()` function.
+
+ """
+
+ __visit_name__ = 'textclause'
+
+ _bind_params_regex = re.compile(r'(?<![:\w\x5c]):(\w+)(?!:)', re.UNICODE)
+ _execution_options = \
+ Executable._execution_options.union(
+ {'autocommit': PARSE_AUTOCOMMIT})
+
+ @property
+ def _select_iterable(self):
+ return (self,)
+
+ @property
+ def selectable(self):
+ return self
+
+ _hide_froms = []
+
+ def __init__(
+ self,
+ text,
+ bind=None):
+ self._bind = bind
+ self._bindparams = {}
+
+ def repl(m):
+ self._bindparams[m.group(1)] = BindParameter(m.group(1))
+ return ':%s' % m.group(1)
+
+ # scan the string and search for bind parameter names, add them
+ # to the list of bindparams
+ self.text = self._bind_params_regex.sub(repl, text)
+
+ @classmethod
+ def _create_text(self, text, bind=None, bindparams=None,
+ typemap=None, autocommit=None):
+ """Construct a new :class:`.TextClause` clause, representing
+ a textual SQL string directly.
+
+ E.g.::
+
+ fom sqlalchemy import text
+
+ t = text("SELECT * FROM users")
+ result = connection.execute(t)
+
+ The advantages :func:`.text` provides over a plain string are
+ backend-neutral support for bind parameters, per-statement
+ execution options, as well as
+ bind parameter and result-column typing behavior, allowing
+ SQLAlchemy type constructs to play a role when executing
+ a statement that is specified literally. The construct can also
+ be provided with a ``.c`` collection of column elements, allowing
+ it to be embedded in other SQL expression constructs as a subquery.
+
+ Bind parameters are specified by name, using the format ``:name``.
+ E.g.::
+
+ t = text("SELECT * FROM users WHERE id=:user_id")
+ result = connection.execute(t, user_id=12)
+
+ For SQL statements where a colon is required verbatim, as within
+ an inline string, use a backslash to escape::
+
+ t = text("SELECT * FROM users WHERE name='\\:username'")
+
+ The :class:`.TextClause` construct includes methods which can
+ provide information about the bound parameters as well as the column
+ values which would be returned from the textual statement, assuming
+ it's an executable SELECT type of statement. The :meth:`.TextClause.bindparams`
+ method is used to provide bound parameter detail, and
+ :meth:`.TextClause.columns` method allows specification of
+ return columns including names and types::
+
+ t = text("SELECT * FROM users WHERE id=:user_id").\\
+ bindparams(user_id=7).\\
+ columns(id=Integer, name=String)
+
+ for id, name in connection.execute(t):
+ print(id, name)
+
+ The :func:`.text` construct is used internally in cases when
+ a literal string is specified for part of a larger query, such as
+ when a string is specified to the :meth:`.Select.where` method of
+ :class:`.Select`. In those cases, the same
+ bind parameter syntax is applied::
+
+ s = select([users.c.id, users.c.name]).where("id=:user_id")
+ result = connection.execute(s, user_id=12)
+
+ Using :func:`.text` explicitly usually implies the construction
+ of a full, standalone statement. As such, SQLAlchemy refers
+ to it as an :class:`.Executable` object, and it supports
+ the :meth:`Executable.execution_options` method. For example,
+ a :func:`.text` construct that should be subject to "autocommit"
+ can be set explicitly so using the ``autocommit`` option::
+
+ t = text("EXEC my_procedural_thing()").\\
+ execution_options(autocommit=True)
+
+ Note that SQLAlchemy's usual "autocommit" behavior applies to
+ :func:`.text` constructs implicitly - that is, statements which begin
+ with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``,
+ or a variety of other phrases specific to certain backends, will
+ be eligible for autocommit if no transaction is in progress.
+
+ :param text:
+ the text of the SQL statement to be created. use ``:<param>``
+ to specify bind parameters; they will be compiled to their
+ engine-specific format.
+
+ :param autocommit:
+ Deprecated. Use .execution_options(autocommit=<True|False>)
+ to set the autocommit option.
+
+ :param bind:
+ an optional connection or engine to be used for this text query.
+
+ :param bindparams:
+ Deprecated. A list of :func:`.bindparam` instances used to
+ provide information about parameters embedded in the statement.
+ This argument now invokes the :meth:`.TextClause.bindparams`
+ method on the construct before returning it. E.g.::
+
+ stmt = text("SELECT * FROM table WHERE id=:id",
+ bindparams=[bindparam('id', value=5, type_=Integer)])
+
+ Is equivalent to::
+
+ stmt = text("SELECT * FROM table WHERE id=:id").\\
+ bindparams(bindparam('id', value=5, type_=Integer))
+
+ .. deprecated:: 0.9.0 the :meth:`.TextClause.bindparams` method
+ supersedes the ``bindparams`` argument to :func:`.text`.
+
+ :param typemap:
+ Deprecated. A dictionary mapping the names of columns
+ represented in the columns clause of a ``SELECT`` statement
+ to type objects,
+ which will be used to perform post-processing on columns within
+ the result set. This parameter now invokes the :meth:`.TextClause.columns`
+ method, which returns a :class:`.TextAsFrom` construct that gains
+ a ``.c`` collection and can be embedded in other expressions. E.g.::
+
+ stmt = text("SELECT * FROM table",
+ typemap={'id': Integer, 'name': String},
+ )
+
+ Is equivalent to::
+
+ stmt = text("SELECT * FROM table").columns(id=Integer, name=String)
+
+ Or alternatively::
+
+ from sqlalchemy.sql import column
+ stmt = text("SELECT * FROM table").columns(
+ column('id', Integer),
+ column('name', String)
+ )
+
+ .. deprecated:: 0.9.0 the :meth:`.TextClause.columns` method
+ supersedes the ``typemap`` argument to :func:`.text`.
+
+ """
+ stmt = TextClause(text, bind=bind)
+ if bindparams:
+ stmt = stmt.bindparams(*bindparams)
+ if typemap:
+ stmt = stmt.columns(**typemap)
+ if autocommit is not None:
+ util.warn_deprecated('autocommit on text() is deprecated. '
+ 'Use .execution_options(autocommit=True)')
+ stmt = stmt.execution_options(autocommit=autocommit)
+
+ return stmt
+
+ @_generative
+ def bindparams(self, *binds, **names_to_values):
+ """Establish the values and/or types of bound parameters within
+ this :class:`.TextClause` construct.
+
+ Given a text construct such as::
+
+ from sqlalchemy import text
+ stmt = text("SELECT id, name FROM user WHERE name=:name "
+ "AND timestamp=:timestamp")
+
+ the :meth:`.TextClause.bindparams` method can be used to establish
+ the initial value of ``:name`` and ``:timestamp``,
+ using simple keyword arguments::
+
+ stmt = stmt.bindparams(name='jack',
+ timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5))
+
+ Where above, new :class:`.BindParameter` objects
+ will be generated with the names ``name`` and ``timestamp``, and
+ values of ``jack`` and ``datetime.datetime(2012, 10, 8, 15, 12, 5)``,
+ respectively. The types will be
+ inferred from the values given, in this case :class:`.String` and
+ :class:`.DateTime`.
+
+ When specific typing behavior is needed, the positional ``*binds``
+ argument can be used in which to specify :func:`.bindparam` constructs
+ directly. These constructs must include at least the ``key`` argument,
+ then an optional value and type::
+
+ from sqlalchemy import bindparam
+ stmt = stmt.bindparams(
+ bindparam('name', value='jack', type_=String),
+ bindparam('timestamp', type_=DateTime)
+ )
+
+ Above, we specified the type of :class:`.DateTime` for the ``timestamp``
+ bind, and the type of :class:`.String` for the ``name`` bind. In
+ the case of ``name`` we also set the default value of ``"jack"``.
+
+ Additional bound parameters can be supplied at statement execution
+ time, e.g.::
+
+ result = connection.execute(stmt,
+ timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5))
+
+ The :meth:`.TextClause.bindparams` method can be called repeatedly, where
+ it will re-use existing :class:`.BindParameter` objects to add new information.
+ For example, we can call :meth:`.TextClause.bindparams` first with
+ typing information, and a second time with value information, and it
+ will be combined::
+
+ stmt = text("SELECT id, name FROM user WHERE name=:name "
+ "AND timestamp=:timestamp")
+ stmt = stmt.bindparams(
+ bindparam('name', type_=String),
+ bindparam('timestamp', type_=DateTime)
+ )
+ stmt = stmt.bindparams(
+ name='jack',
+ timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)
+ )
+
+
+ .. versionadded:: 0.9.0 The :meth:`.TextClause.bindparams` method supersedes
+ the argument ``bindparams`` passed to :func:`~.expression.text`.
+
+
+ """
+ self._bindparams = new_params = self._bindparams.copy()
+
+ for bind in binds:
+ try:
+ existing = new_params[bind.key]
+ except KeyError:
+ raise exc.ArgumentError(
+ "This text() construct doesn't define a "
+ "bound parameter named %r" % bind.key)
+ else:
+ new_params[existing.key] = bind
+
+ for key, value in names_to_values.items():
+ try:
+ existing = new_params[key]
+ except KeyError:
+ raise exc.ArgumentError(
+ "This text() construct doesn't define a "
+ "bound parameter named %r" % key)
+ else:
+ new_params[key] = existing._with_value(value)
+
+
+
+ @util.dependencies('sqlalchemy.sql.selectable')
+ def columns(self, selectable, *cols, **types):
+ """Turn this :class:`.TextClause` object into a :class:`.TextAsFrom`
+ object that can be embedded into another statement.
+
+ This function essentially bridges the gap between an entirely
+ textual SELECT statement and the SQL expression language concept
+ of a "selectable"::
+
+ from sqlalchemy.sql import column, text
+
+ stmt = text("SELECT id, name FROM some_table")
+ stmt = stmt.columns(column('id'), column('name')).alias('st')
+
+ stmt = select([mytable]).\\
+ select_from(
+ mytable.join(stmt, mytable.c.name == stmt.c.name)
+ ).where(stmt.c.id > 5)
+
+ Above, we used untyped :func:`.column` elements. These can also have
+ types specified, which will impact how the column behaves in expressions
+ as well as determining result set behavior::
+
+ stmt = text("SELECT id, name, timestamp FROM some_table")
+ stmt = stmt.columns(
+ column('id', Integer),
+ column('name', Unicode),
+ column('timestamp', DateTime)
+ )
+
+ for id, name, timestamp in connection.execute(stmt):
+ print(id, name, timestamp)
+
+ Keyword arguments allow just the names and types of columns to be specified,
+ where the :func:`.column` elements will be generated automatically::
+
+ stmt = text("SELECT id, name, timestamp FROM some_table")
+ stmt = stmt.columns(
+ id=Integer,
+ name=Unicode,
+ timestamp=DateTime
+ )
+
+ for id, name, timestamp in connection.execute(stmt):
+ print(id, name, timestamp)
+
+ The :meth:`.TextClause.columns` method provides a direct
+ route to calling :meth:`.FromClause.alias` as well as :meth:`.SelectBase.cte`
+ against a textual SELECT statement::
+
+ stmt = stmt.columns(id=Integer, name=String).cte('st')
+
+ stmt = select([sometable]).where(sometable.c.id == stmt.c.id)
+
+ .. versionadded:: 0.9.0 :func:`.text` can now be converted into a fully
+ featured "selectable" construct using the :meth:`.TextClause.columns`
+ method. This method supersedes the ``typemap`` argument to
+ :func:`.text`.
+
+ """
+
+ col_by_name = dict(
+ (col.key, col) for col in cols
+ )
+ for key, type_ in types.items():
+ col_by_name[key] = ColumnClause(key, type_)
+
+ return selectable.TextAsFrom(self, list(col_by_name.values()))
+
+ @property
+ def type(self):
+ return type_api.NULLTYPE
+
+ @property
+ def comparator(self):
+ return self.type.comparator_factory(self)
+
+ def self_group(self, against=None):
+ if against is operators.in_op:
+ return Grouping(self)
+ else:
+ return self
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self._bindparams = dict((b.key, clone(b, **kw))
+ for b in self._bindparams.values())
+
+ def get_children(self, **kwargs):
+ return list(self._bindparams.values())
+
+
+class Null(ColumnElement):
+ """Represent the NULL keyword in a SQL statement.
+
+ :class:`.Null` is accessed as a constant via the
+ :func:`.null` function.
+
+ """
+
+ __visit_name__ = 'null'
+
+ @util.memoized_property
+ def type(self):
+ return type_api.NULLTYPE
+
+ @classmethod
+ def _singleton(cls):
+ """Return a constant :class:`.Null` construct."""
+
+ return NULL
+
+ def compare(self, other):
+ return isinstance(other, Null)
+
+
+class False_(ColumnElement):
+ """Represent the ``false`` keyword, or equivalent, in a SQL statement.
+
+ :class:`.False_` is accessed as a constant via the
+ :func:`.false` function.
+
+ """
+
+ __visit_name__ = 'false'
+
+ @util.memoized_property
+ def type(self):
+ return type_api.BOOLEANTYPE
+
+ def _negate(self):
+ return TRUE
+
+ @classmethod
+ def _singleton(cls):
+ """Return a constant :class:`.False_` construct.
+
+ E.g.::
+
+ >>> from sqlalchemy import false
+ >>> print select([t.c.x]).where(false())
+ SELECT x FROM t WHERE false
+
+ A backend which does not support true/false constants will render as
+ an expression against 1 or 0::
+
+ >>> print select([t.c.x]).where(false())
+ SELECT x FROM t WHERE 0 = 1
+
+ The :func:`.true` and :func:`.false` constants also feature
+ "short circuit" operation within an :func:`.and_` or :func:`.or_`
+ conjunction::
+
+ >>> print select([t.c.x]).where(or_(t.c.x > 5, true()))
+ SELECT x FROM t WHERE true
+
+ >>> print select([t.c.x]).where(and_(t.c.x > 5, false()))
+ SELECT x FROM t WHERE false
+
+ .. versionchanged:: 0.9 :func:`.true` and :func:`.false` feature
+ better integrated behavior within conjunctions and on dialects
+ that don't support true/false constants.
+
+ .. seealso::
+
+ :func:`.true`
+
+ """
+
+ return FALSE
+
+ def compare(self, other):
+ return isinstance(other, False_)
+
+class True_(ColumnElement):
+ """Represent the ``true`` keyword, or equivalent, in a SQL statement.
+
+ :class:`.True_` is accessed as a constant via the
+ :func:`.true` function.
+
+ """
+
+ __visit_name__ = 'true'
+
+ @util.memoized_property
+ def type(self):
+ return type_api.BOOLEANTYPE
+
+ def _negate(self):
+ return FALSE
+
+ @classmethod
+ def _ifnone(cls, other):
+ if other is None:
+ return cls._singleton()
+ else:
+ return other
+
+ @classmethod
+ def _singleton(cls):
+ """Return a constant :class:`.True_` construct.
+
+ E.g.::
+
+ >>> from sqlalchemy import true
+ >>> print select([t.c.x]).where(true())
+ SELECT x FROM t WHERE true
+
+ A backend which does not support true/false constants will render as
+ an expression against 1 or 0::
+
+ >>> print select([t.c.x]).where(true())
+ SELECT x FROM t WHERE 1 = 1
+
+ The :func:`.true` and :func:`.false` constants also feature
+ "short circuit" operation within an :func:`.and_` or :func:`.or_`
+ conjunction::
+
+ >>> print select([t.c.x]).where(or_(t.c.x > 5, true()))
+ SELECT x FROM t WHERE true
+
+ >>> print select([t.c.x]).where(and_(t.c.x > 5, false()))
+ SELECT x FROM t WHERE false
+
+ .. versionchanged:: 0.9 :func:`.true` and :func:`.false` feature
+ better integrated behavior within conjunctions and on dialects
+ that don't support true/false constants.
+
+ .. seealso::
+
+ :func:`.false`
+
+ """
+
+ return TRUE
+
+ def compare(self, other):
+ return isinstance(other, True_)
+
+NULL = Null()
+FALSE = False_()
+TRUE = True_()
+
+class ClauseList(ClauseElement):
+ """Describe a list of clauses, separated by an operator.
+
+ By default, is comma-separated, such as a column listing.
+
+ """
+ __visit_name__ = 'clauselist'
+
+ def __init__(self, *clauses, **kwargs):
+ self.operator = kwargs.pop('operator', operators.comma_op)
+ self.group = kwargs.pop('group', True)
+ self.group_contents = kwargs.pop('group_contents', True)
+ if self.group_contents:
+ self.clauses = [
+ _literal_as_text(clause).self_group(against=self.operator)
+ for clause in clauses]
+ else:
+ self.clauses = [
+ _literal_as_text(clause)
+ for clause in clauses]
+
+ def __iter__(self):
+ return iter(self.clauses)
+
+ def __len__(self):
+ return len(self.clauses)
+
+ @property
+ def _select_iterable(self):
+ return iter(self)
+
+ def append(self, clause):
+ if self.group_contents:
+ self.clauses.append(_literal_as_text(clause).\
+ self_group(against=self.operator))
+ else:
+ self.clauses.append(_literal_as_text(clause))
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.clauses = [clone(clause, **kw) for clause in self.clauses]
+
+ def get_children(self, **kwargs):
+ return self.clauses
+
+ @property
+ def _from_objects(self):
+ return list(itertools.chain(*[c._from_objects for c in self.clauses]))
+
+ def self_group(self, against=None):
+ if self.group and operators.is_precedent(self.operator, against):
+ return Grouping(self)
+ else:
+ return self
+
+ def compare(self, other, **kw):
+ """Compare this :class:`.ClauseList` to the given :class:`.ClauseList`,
+ including a comparison of all the clause items.
+
+ """
+ if not isinstance(other, ClauseList) and len(self.clauses) == 1:
+ return self.clauses[0].compare(other, **kw)
+ elif isinstance(other, ClauseList) and \
+ len(self.clauses) == len(other.clauses):
+ for i in range(0, len(self.clauses)):
+ if not self.clauses[i].compare(other.clauses[i], **kw):
+ return False
+ else:
+ return self.operator == other.operator
+ else:
+ return False
+
+
+
+class BooleanClauseList(ClauseList, ColumnElement):
+ __visit_name__ = 'clauselist'
+
+ def __init__(self, *arg, **kw):
+ raise NotImplementedError(
+ "BooleanClauseList has a private constructor")
+
+ @classmethod
+ def _construct(cls, operator, continue_on, skip_on, *clauses, **kw):
+ convert_clauses = []
+
+ clauses = util.coerce_generator_arg(clauses)
+ for clause in clauses:
+ clause = _literal_as_text(clause)
+
+ if isinstance(clause, continue_on):
+ continue
+ elif isinstance(clause, skip_on):
+ return clause.self_group(against=operators._asbool)
+
+ convert_clauses.append(clause)
+
+ if len(convert_clauses) == 1:
+ return convert_clauses[0].self_group(against=operators._asbool)
+ elif not convert_clauses and clauses:
+ return clauses[0].self_group(against=operators._asbool)
+
+ convert_clauses = [c.self_group(against=operator)
+ for c in convert_clauses]
+
+ self = cls.__new__(cls)
+ self.clauses = convert_clauses
+ self.group = True
+ self.operator = operator
+ self.group_contents = True
+ self.type = type_api.BOOLEANTYPE
+ return self
+
+ @classmethod
+ def and_(cls, *clauses):
+ """Join a list of clauses together using the ``AND`` operator.
+
+ The ``&`` operator is also overloaded on all :class:`.ColumnElement`
+ subclasses to produce the
+ same result.
+
+ """
+ return cls._construct(operators.and_, True_, False_, *clauses)
+
+ @classmethod
+ def or_(cls, *clauses):
+ """Join a list of clauses together using the ``OR`` operator.
+
+ The ``|`` operator is also overloaded on all
+ :class:`.ColumnElement` subclasses to produce the
+ same result.
+
+ """
+ return cls._construct(operators.or_, False_, True_, *clauses)
+
+ @property
+ def _select_iterable(self):
+ return (self, )
+
+ def self_group(self, against=None):
+ if not self.clauses:
+ return self
+ else:
+ return super(BooleanClauseList, self).self_group(against=against)
+
+ def _negate(self):
+ return ClauseList._negate(self)
+
+
+and_ = BooleanClauseList.and_
+or_ = BooleanClauseList.or_
+
+class Tuple(ClauseList, ColumnElement):
+ """Represent a SQL tuple."""
+
+ def __init__(self, *clauses, **kw):
+ """Return a :class:`.Tuple`.
+
+ Main usage is to produce a composite IN construct::
+
+ from sqlalchemy import tuple_
+
+ tuple_(table.c.col1, table.c.col2).in_(
+ [(1, 2), (5, 12), (10, 19)]
+ )
+
+ .. warning::
+
+ The composite IN construct is not supported by all backends,
+ and is currently known to work on Postgresql and MySQL,
+ but not SQLite. Unsupported backends will raise
+ a subclass of :class:`~sqlalchemy.exc.DBAPIError` when such
+ an expression is invoked.
+
+ """
+
+ clauses = [_literal_as_binds(c) for c in clauses]
+ self.type = kw.pop('type_', None)
+ if self.type is None:
+ self.type = _type_from_args(clauses)
+ super(Tuple, self).__init__(*clauses, **kw)
+
+ @property
+ def _select_iterable(self):
+ return (self, )
+
+ def _bind_param(self, operator, obj):
+ return Tuple(*[
+ BindParameter(None, o, _compared_to_operator=operator,
+ _compared_to_type=self.type, unique=True)
+ for o in obj
+ ]).self_group()
+
+
+class Case(ColumnElement):
+ """Represent a SQL ``CASE`` construct.
+
+
+ """
+ __visit_name__ = 'case'
+
+ def __init__(self, whens, value=None, else_=None):
+ """Produce a :class:`.Case` object.
+
+ :param whens: A sequence of pairs, or alternatively a dict,
+ to be translated into "WHEN / THEN" clauses.
+
+ :param value: Optional for simple case statements, produces
+ a column expression as in "CASE <expr> WHEN ..."
+
+ :param else\_: Optional as well, for case defaults produces
+ the "ELSE" portion of the "CASE" statement.
+
+ The expressions used for THEN and ELSE,
+ when specified as strings, will be interpreted
+ as bound values. To specify textual SQL expressions
+ for these, use the :func:`literal_column`
+ construct.
+
+ The expressions used for the WHEN criterion
+ may only be literal strings when "value" is
+ present, i.e. CASE table.somecol WHEN "x" THEN "y".
+ Otherwise, literal strings are not accepted
+ in this position, and either the text(<string>)
+ or literal(<string>) constructs must be used to
+ interpret raw string values.
+
+ Usage examples::
+
+ case([(orderline.c.qty > 100, item.c.specialprice),
+ (orderline.c.qty > 10, item.c.bulkprice)
+ ], else_=item.c.regularprice)
+
+ case(value=emp.c.type, whens={
+ 'engineer': emp.c.salary * 1.1,
+ 'manager': emp.c.salary * 3,
+ })
+
+ Using :func:`.literal_column()`, to allow for databases that
+ do not support bind parameters in the ``then`` clause. The type
+ can be specified which determines the type of the :func:`case()` construct
+ overall::
+
+ case([(orderline.c.qty > 100,
+ literal_column("'greaterthan100'", String)),
+ (orderline.c.qty > 10, literal_column("'greaterthan10'",
+ String))
+ ], else_=literal_column("'lethan10'", String))
+
+ """
+
+ try:
+ whens = util.dictlike_iteritems(whens)
+ except TypeError:
+ pass
+
+ if value is not None:
+ whenlist = [
+ (_literal_as_binds(c).self_group(),
+ _literal_as_binds(r)) for (c, r) in whens
+ ]
+ else:
+ whenlist = [
+ (_no_literals(c).self_group(),
+ _literal_as_binds(r)) for (c, r) in whens
+ ]
+
+ if whenlist:
+ type_ = list(whenlist[-1])[-1].type
+ else:
+ type_ = None
+
+ if value is None:
+ self.value = None
+ else:
+ self.value = _literal_as_binds(value)
+
+ self.type = type_
+ self.whens = whenlist
+ if else_ is not None:
+ self.else_ = _literal_as_binds(else_)
+ else:
+ self.else_ = None
+
+ def _copy_internals(self, clone=_clone, **kw):
+ if self.value is not None:
+ self.value = clone(self.value, **kw)
+ self.whens = [(clone(x, **kw), clone(y, **kw))
+ for x, y in self.whens]
+ if self.else_ is not None:
+ self.else_ = clone(self.else_, **kw)
+
+ def get_children(self, **kwargs):
+ if self.value is not None:
+ yield self.value
+ for x, y in self.whens:
+ yield x
+ yield y
+ if self.else_ is not None:
+ yield self.else_
+
+ @property
+ def _from_objects(self):
+ return list(itertools.chain(*[x._from_objects for x in
+ self.get_children()]))
+
+
+def literal_column(text, type_=None):
+ """Return a textual column expression, as would be in the columns
+ clause of a ``SELECT`` statement.
+
+ The object returned supports further expressions in the same way as any
+ other column object, including comparison, math and string operations.
+ The type\_ parameter is important to determine proper expression behavior
+ (such as, '+' means string concatenation or numerical addition based on
+ the type).
+
+ :param text: the text of the expression; can be any SQL expression.
+ Quoting rules will not be applied. To specify a column-name expression
+ which should be subject to quoting rules, use the :func:`column`
+ function.
+
+ :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine`
+ object which will
+ provide result-set translation and additional expression semantics for
+ this column. If left as None the type will be NullType.
+
+ """
+ return ColumnClause(text, type_=type_, is_literal=True)
+
+
+
+class Cast(ColumnElement):
+ """Represent the SQL ``CAST`` construct."""
+
+ __visit_name__ = 'cast'
+
+ def __init__(self, expression, type_):
+ """Return a :class:`.Cast` object.
+
+ Equivalent of SQL ``CAST(clause AS totype)``.
+
+ E.g.::
+
+ cast(table.c.unit_price * table.c.qty, Numeric(10,4))
+
+ or::
+
+ cast(table.c.timestamp, DATE)
+
+ :param expression: Column-oriented expression.
+ :param type_: A :class:`.TypeEngine` class or instance indicating
+ the type to which the CAST should apply.
+
+ .. seealso::
+
+ :func:`.type_coerce` - Python-side type coercion without emitting
+ CAST.
+
+ """
+ self.type = type_api.to_instance(type_)
+ self.clause = _literal_as_binds(expression, type_=self.type)
+ self.typeclause = TypeClause(self.type)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.clause = clone(self.clause, **kw)
+ self.typeclause = clone(self.typeclause, **kw)
+
+ def get_children(self, **kwargs):
+ return self.clause, self.typeclause
+
+ @property
+ def _from_objects(self):
+ return self.clause._from_objects
+
+
+class Extract(ColumnElement):
+ """Represent a SQL EXTRACT clause, ``extract(field FROM expr)``."""
+
+ __visit_name__ = 'extract'
+
+ def __init__(self, field, expr, **kwargs):
+ """Return a :class:`.Extract` construct.
+
+ This is typically available as :func:`.extract`
+ as well as ``func.extract`` from the
+ :data:`.func` namespace.
+
+ """
+ self.type = type_api.INTEGERTYPE
+ self.field = field
+ self.expr = _literal_as_binds(expr, None)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.expr = clone(self.expr, **kw)
+
+ def get_children(self, **kwargs):
+ return self.expr,
+
+ @property
+ def _from_objects(self):
+ return self.expr._from_objects
+
+
+class UnaryExpression(ColumnElement):
+ """Define a 'unary' expression.
+
+ A unary expression has a single column expression
+ and an operator. The operator can be placed on the left
+ (where it is called the 'operator') or right (where it is called the
+ 'modifier') of the column expression.
+
+ """
+ __visit_name__ = 'unary'
+
+ def __init__(self, element, operator=None, modifier=None,
+ type_=None, negate=None):
+ self.operator = operator
+ self.modifier = modifier
+ self.element = element.self_group(against=self.operator or self.modifier)
+ self.type = type_api.to_instance(type_)
+ self.negate = negate
+
+ @classmethod
+ def _create_nullsfirst(cls, column):
+ """Return a NULLS FIRST ``ORDER BY`` clause element.
+
+ e.g.::
+
+ someselect.order_by(desc(table1.mycol).nullsfirst())
+
+ produces::
+
+ ORDER BY mycol DESC NULLS FIRST
+
+ """
+ return UnaryExpression(
+ _literal_as_text(column), modifier=operators.nullsfirst_op)
+
+
+ @classmethod
+ def _create_nullslast(cls, column):
+ """Return a NULLS LAST ``ORDER BY`` clause element.
+
+ e.g.::
+
+ someselect.order_by(desc(table1.mycol).nullslast())
+
+ produces::
+
+ ORDER BY mycol DESC NULLS LAST
+
+ """
+ return UnaryExpression(
+ _literal_as_text(column), modifier=operators.nullslast_op)
+
+
+ @classmethod
+ def _create_desc(cls, column):
+ """Return a descending ``ORDER BY`` clause element.
+
+ e.g.::
+
+ someselect.order_by(desc(table1.mycol))
+
+ produces::
+
+ ORDER BY mycol DESC
+
+ """
+ return UnaryExpression(
+ _literal_as_text(column), modifier=operators.desc_op)
+
+ @classmethod
+ def _create_asc(cls, column):
+ """Return an ascending ``ORDER BY`` clause element.
+
+ e.g.::
+
+ someselect.order_by(asc(table1.mycol))
+
+ produces::
+
+ ORDER BY mycol ASC
+
+ """
+ return UnaryExpression(
+ _literal_as_text(column), modifier=operators.asc_op)
+
+ @classmethod
+ def _create_distinct(cls, expr):
+ """Return a ``DISTINCT`` clause.
+
+ e.g.::
+
+ distinct(a)
+
+ renders::
+
+ DISTINCT a
+
+ """
+ expr = _literal_as_binds(expr)
+ return UnaryExpression(expr,
+ operator=operators.distinct_op, type_=expr.type)
+
+ @util.memoized_property
+ def _order_by_label_element(self):
+ if self.modifier in (operators.desc_op, operators.asc_op):
+ return self.element._order_by_label_element
+ else:
+ return None
+
+ @property
+ def _from_objects(self):
+ return self.element._from_objects
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.element = clone(self.element, **kw)
+
+ def get_children(self, **kwargs):
+ return self.element,
+
+ def compare(self, other, **kw):
+ """Compare this :class:`UnaryExpression` against the given
+ :class:`.ClauseElement`."""
+
+ return (
+ isinstance(other, UnaryExpression) and
+ self.operator == other.operator and
+ self.modifier == other.modifier and
+ self.element.compare(other.element, **kw)
+ )
+
+ def _negate(self):
+ if self.negate is not None:
+ return UnaryExpression(
+ self.element,
+ operator=self.negate,
+ negate=self.operator,
+ modifier=self.modifier,
+ type_=self.type)
+ else:
+ return ClauseElement._negate(self)
+
+ def self_group(self, against=None):
+ if self.operator and operators.is_precedent(self.operator, against):
+ return Grouping(self)
+ else:
+ return self
+
+
+class AsBoolean(UnaryExpression):
+
+ def __init__(self, element, operator, negate):
+ self.element = element
+ self.type = type_api.BOOLEANTYPE
+ self.operator = operator
+ self.negate = negate
+ self.modifier = None
+
+ def self_group(self, against=None):
+ return self
+
+ def _negate(self):
+ return self.element._negate()
+
+
+class BinaryExpression(ColumnElement):
+ """Represent an expression that is ``LEFT <operator> RIGHT``.
+
+ A :class:`.BinaryExpression` is generated automatically
+ whenever two column expressions are used in a Python binary expresion::
+
+ >>> from sqlalchemy.sql import column
+ >>> column('a') + column('b')
+ <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
+ >>> print column('a') + column('b')
+ a + b
+
+ """
+
+ __visit_name__ = 'binary'
+
+ def __init__(self, left, right, operator, type_=None,
+ negate=None, modifiers=None):
+ # allow compatibility with libraries that
+ # refer to BinaryExpression directly and pass strings
+ if isinstance(operator, util.string_types):
+ operator = operators.custom_op(operator)
+ self._orig = (left, right)
+ self.left = left.self_group(against=operator)
+ self.right = right.self_group(against=operator)
+ self.operator = operator
+ self.type = type_api.to_instance(type_)
+ self.negate = negate
+
+ if modifiers is None:
+ self.modifiers = {}
+ else:
+ self.modifiers = modifiers
+
+ def __bool__(self):
+ if self.operator in (operator.eq, operator.ne):
+ return self.operator(hash(self._orig[0]), hash(self._orig[1]))
+ else:
+ raise TypeError("Boolean value of this clause is not defined")
+
+ __nonzero__ = __bool__
+
+ @property
+ def is_comparison(self):
+ return operators.is_comparison(self.operator)
+
+ @property
+ def _from_objects(self):
+ return self.left._from_objects + self.right._from_objects
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.left = clone(self.left, **kw)
+ self.right = clone(self.right, **kw)
+
+ def get_children(self, **kwargs):
+ return self.left, self.right
+
+ def compare(self, other, **kw):
+ """Compare this :class:`BinaryExpression` against the
+ given :class:`BinaryExpression`."""
+
+ return (
+ isinstance(other, BinaryExpression) and
+ self.operator == other.operator and
+ (
+ self.left.compare(other.left, **kw) and
+ self.right.compare(other.right, **kw) or
+ (
+ operators.is_commutative(self.operator) and
+ self.left.compare(other.right, **kw) and
+ self.right.compare(other.left, **kw)
+ )
+ )
+ )
+
+ def self_group(self, against=None):
+ if operators.is_precedent(self.operator, against):
+ return Grouping(self)
+ else:
+ return self
+
+ def _negate(self):
+ if self.negate is not None:
+ return BinaryExpression(
+ self.left,
+ self.right,
+ self.negate,
+ negate=self.operator,
+ type_=type_api.BOOLEANTYPE,
+ modifiers=self.modifiers)
+ else:
+ return super(BinaryExpression, self)._negate()
+
+
+
+
+class Grouping(ColumnElement):
+ """Represent a grouping within a column expression"""
+
+ __visit_name__ = 'grouping'
+
+ def __init__(self, element):
+ self.element = element
+ self.type = getattr(element, 'type', type_api.NULLTYPE)
+
+ def self_group(self, against=None):
+ return self
+
+ @property
+ def _label(self):
+ return getattr(self.element, '_label', None) or self.anon_label
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.element = clone(self.element, **kw)
+
+ def get_children(self, **kwargs):
+ return self.element,
+
+ @property
+ def _from_objects(self):
+ return self.element._from_objects
+
+ def __getattr__(self, attr):
+ return getattr(self.element, attr)
+
+ def __getstate__(self):
+ return {'element': self.element, 'type': self.type}
+
+ def __setstate__(self, state):
+ self.element = state['element']
+ self.type = state['type']
+
+ def compare(self, other, **kw):
+ return isinstance(other, Grouping) and \
+ self.element.compare(other.element)
+
+
+class Over(ColumnElement):
+ """Represent an OVER clause.
+
+ This is a special operator against a so-called
+ "window" function, as well as any aggregate function,
+ which produces results relative to the result set
+ itself. It's supported only by certain database
+ backends.
+
+ """
+ __visit_name__ = 'over'
+
+ order_by = None
+ partition_by = None
+
+ def __init__(self, func, partition_by=None, order_by=None):
+ """Produce an :class:`.Over` object against a function.
+
+ Used against aggregate or so-called "window" functions,
+ for database backends that support window functions.
+
+ E.g.::
+
+ from sqlalchemy import over
+ over(func.row_number(), order_by='x')
+
+ Would produce "ROW_NUMBER() OVER(ORDER BY x)".
+
+ :param func: a :class:`.FunctionElement` construct, typically
+ generated by :data:`~.expression.func`.
+ :param partition_by: a column element or string, or a list
+ of such, that will be used as the PARTITION BY clause
+ of the OVER construct.
+ :param order_by: a column element or string, or a list
+ of such, that will be used as the ORDER BY clause
+ of the OVER construct.
+
+ This function is also available from the :data:`~.expression.func`
+ construct itself via the :meth:`.FunctionElement.over` method.
+
+ .. versionadded:: 0.7
+
+ """
+ self.func = func
+ if order_by is not None:
+ self.order_by = ClauseList(*util.to_list(order_by))
+ if partition_by is not None:
+ self.partition_by = ClauseList(*util.to_list(partition_by))
+
+ @util.memoized_property
+ def type(self):
+ return self.func.type
+
+ def get_children(self, **kwargs):
+ return [c for c in
+ (self.func, self.partition_by, self.order_by)
+ if c is not None]
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.func = clone(self.func, **kw)
+ if self.partition_by is not None:
+ self.partition_by = clone(self.partition_by, **kw)
+ if self.order_by is not None:
+ self.order_by = clone(self.order_by, **kw)
+
+ @property
+ def _from_objects(self):
+ return list(itertools.chain(
+ *[c._from_objects for c in
+ (self.func, self.partition_by, self.order_by)
+ if c is not None]
+ ))
+
+
+class Label(ColumnElement):
+ """Represents a column label (AS).
+
+ Represent a label, as typically applied to any column-level
+ element using the ``AS`` sql keyword.
+
+ """
+
+ __visit_name__ = 'label'
+
+ def __init__(self, name, element, type_=None):
+ """Return a :class:`Label` object for the
+ given :class:`.ColumnElement`.
+
+ A label changes the name of an element in the columns clause of a
+ ``SELECT`` statement, typically via the ``AS`` SQL keyword.
+
+ This functionality is more conveniently available via the
+ :meth:`.ColumnElement.label` method on :class:`.ColumnElement`.
+
+ :param name: label name
+
+ :param obj: a :class:`.ColumnElement`.
+
+ """
+ while isinstance(element, Label):
+ element = element.element
+ if name:
+ self.name = name
+ else:
+ self.name = _anonymous_label('%%(%d %s)s' % (id(self),
+ getattr(element, 'name', 'anon')))
+ self.key = self._label = self._key_label = self.name
+ self._element = element
+ self._type = type_
+ self._proxies = [element]
+
+ def __reduce__(self):
+ return self.__class__, (self.name, self._element, self._type)
+
+ @util.memoized_property
+ def _order_by_label_element(self):
+ return self
+
+ @util.memoized_property
+ def type(self):
+ return type_api.to_instance(
+ self._type or getattr(self._element, 'type', None)
+ )
+
+ @util.memoized_property
+ def element(self):
+ return self._element.self_group(against=operators.as_)
+
+ def self_group(self, against=None):
+ sub_element = self._element.self_group(against=against)
+ if sub_element is not self._element:
+ return Label(self.name,
+ sub_element,
+ type_=self._type)
+ else:
+ return self
+
+ @property
+ def primary_key(self):
+ return self.element.primary_key
+
+ @property
+ def foreign_keys(self):
+ return self.element.foreign_keys
+
+ def get_children(self, **kwargs):
+ return self.element,
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.element = clone(self.element, **kw)
+
+ @property
+ def _from_objects(self):
+ return self.element._from_objects
+
+ def _make_proxy(self, selectable, name=None, **kw):
+ e = self.element._make_proxy(selectable,
+ name=name if name else self.name)
+ e._proxies.append(self)
+ if self._type is not None:
+ e.type = self._type
+ return e
+
+
+class ColumnClause(Immutable, ColumnElement):
+ """Represents a generic column expression from any textual string.
+
+ This includes columns associated with tables, aliases and select
+ statements, but also any arbitrary text. May or may not be bound
+ to an underlying :class:`.Selectable`.
+
+ :class:`.ColumnClause` is constructed by itself typically via
+ the :func:`~.expression.column` function. It may be placed directly
+ into constructs such as :func:`.select` constructs::
+
+ from sqlalchemy.sql import column, select
+
+ c1, c2 = column("c1"), column("c2")
+ s = select([c1, c2]).where(c1==5)
+
+ There is also a variant on :func:`~.expression.column` known
+ as :func:`~.expression.literal_column` - the difference is that
+ in the latter case, the string value is assumed to be an exact
+ expression, rather than a column name, so that no quoting rules
+ or similar are applied::
+
+ from sqlalchemy.sql import literal_column, select
+
+ s = select([literal_column("5 + 7")])
+
+ :class:`.ColumnClause` can also be used in a table-like
+ fashion by combining the :func:`~.expression.column` function
+ with the :func:`~.expression.table` function, to produce
+ a "lightweight" form of table metadata::
+
+ from sqlalchemy.sql import table, column
+
+ user = table("user",
+ column("id"),
+ column("name"),
+ column("description"),
+ )
+
+ The above construct can be created in an ad-hoc fashion and is
+ not associated with any :class:`.schema.MetaData`, unlike it's
+ more full fledged :class:`.schema.Table` counterpart.
+
+ """
+ __visit_name__ = 'column'
+
+ onupdate = default = server_default = server_onupdate = None
+
+ _memoized_property = util.group_expirable_memoized_property()
+
+ def __init__(self, text, type_=None, is_literal=False, _selectable=None):
+ """Construct a :class:`.ColumnClause` object.
+
+ :param text: the text of the element.
+
+ :param type: :class:`.types.TypeEngine` object which can associate
+ this :class:`.ColumnClause` with a type.
+
+ :param is_literal: if True, the :class:`.ColumnClause` is assumed to
+ be an exact expression that will be delivered to the output with no
+ quoting rules applied regardless of case sensitive settings. the
+ :func:`literal_column()` function is usually used to create such a
+ :class:`.ColumnClause`.
+
+ :param text: the name of the column. Quoting rules will be applied
+ to the clause like any other column name. For textual column constructs
+ that are not to be quoted, use the :func:`literal_column` function.
+
+ :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object
+ which will provide result-set translation for this column.
+
+
+ """
+
+ self.key = self.name = text
+ self.table = _selectable
+ self.type = type_api.to_instance(type_)
+ self.is_literal = is_literal
+
+ def _compare_name_for_result(self, other):
+ if self.is_literal or \
+ self.table is None or \
+ not hasattr(other, 'proxy_set') or (
+ isinstance(other, ColumnClause) and other.is_literal
+ ):
+ return super(ColumnClause, self).\
+ _compare_name_for_result(other)
+ else:
+ return other.proxy_set.intersection(self.proxy_set)
+
+ def _get_table(self):
+ return self.__dict__['table']
+
+ def _set_table(self, table):
+ self._memoized_property.expire_instance(self)
+ self.__dict__['table'] = table
+ table = property(_get_table, _set_table)
+
+ @_memoized_property
+ def _from_objects(self):
+ t = self.table
+ if t is not None:
+ return [t]
+ else:
+ return []
+
+ @util.memoized_property
+ def description(self):
+ if util.py3k:
+ return self.name
+ else:
+ return self.name.encode('ascii', 'backslashreplace')
+
+ @_memoized_property
+ def _key_label(self):
+ if self.key != self.name:
+ return self._gen_label(self.key)
+ else:
+ return self._label
+
+ @_memoized_property
+ def _label(self):
+ return self._gen_label(self.name)
+
+ def _gen_label(self, name):
+ t = self.table
+
+ if self.is_literal:
+ return None
+
+ elif t is not None and t.named_with_column:
+ if getattr(t, 'schema', None):
+ label = t.schema.replace('.', '_') + "_" + \
+ t.name + "_" + name
+ else:
+ label = t.name + "_" + name
+
+ # propagate name quoting rules for labels.
+ if getattr(name, "quote", None) is not None:
+ if isinstance(label, quoted_name):
+ label.quote = name.quote
+ else:
+ label = quoted_name(label, name.quote)
+ elif getattr(t.name, "quote", None) is not None:
+ # can't get this situation to occur, so let's
+ # assert false on it for now
+ assert not isinstance(label, quoted_name)
+ label = quoted_name(label, t.name.quote)
+
+ # ensure the label name doesn't conflict with that
+ # of an existing column
+ if label in t.c:
+ _label = label
+ counter = 1
+ while _label in t.c:
+ _label = label + "_" + str(counter)
+ counter += 1
+ label = _label
+
+ return _as_truncated(label)
+
+ else:
+ return name
+
+ def _bind_param(self, operator, obj):
+ return BindParameter(self.name, obj,
+ _compared_to_operator=operator,
+ _compared_to_type=self.type,
+ unique=True)
+
+ def _make_proxy(self, selectable, name=None, attach=True,
+ name_is_truncatable=False, **kw):
+ # propagate the "is_literal" flag only if we are keeping our name,
+ # otherwise its considered to be a label
+ is_literal = self.is_literal and (name is None or name == self.name)
+ c = self._constructor(
+ _as_truncated(name or self.name) if \
+ name_is_truncatable else \
+ (name or self.name),
+ type_=self.type,
+ _selectable=selectable,
+ is_literal=is_literal
+ )
+ if name is None:
+ c.key = self.key
+ c._proxies = [self]
+ if selectable._is_clone_of is not None:
+ c._is_clone_of = \
+ selectable._is_clone_of.columns.get(c.key)
+
+ if attach:
+ selectable._columns[c.key] = c
+ return c
+
+
+class _IdentifiedClause(Executable, ClauseElement):
+
+ __visit_name__ = 'identified'
+ _execution_options = \
+ Executable._execution_options.union({'autocommit': False})
+
+ def __init__(self, ident):
+ self.ident = ident
+
+
+class SavepointClause(_IdentifiedClause):
+ __visit_name__ = 'savepoint'
+
+
+class RollbackToSavepointClause(_IdentifiedClause):
+ __visit_name__ = 'rollback_to_savepoint'
+
+
+class ReleaseSavepointClause(_IdentifiedClause):
+ __visit_name__ = 'release_savepoint'
+
+
+class quoted_name(util.text_type):
+ """Represent a SQL identifier combined with quoting preferences.
+
+ :class:`.quoted_name` is a Python unicode/str subclass which
+ represents a particular identifier name along with a
+ ``quote`` flag. This ``quote`` flag, when set to
+ ``True`` or ``False``, overrides automatic quoting behavior
+ for this identifier in order to either unconditionally quote
+ or to not quote the name. If left at its default of ``None``,
+ quoting behavior is applied to the identifier on a per-backend basis
+ based on an examination of the token itself.
+
+ A :class:`.quoted_name` object with ``quote=True`` is also
+ prevented from being modified in the case of a so-called
+ "name normalize" option. Certain database backends, such as
+ Oracle, Firebird, and DB2 "normalize" case-insensitive names
+ as uppercase. The SQLAlchemy dialects for these backends
+ convert from SQLAlchemy's lower-case-means-insensitive convention
+ to the upper-case-means-insensitive conventions of those backends.
+ The ``quote=True`` flag here will prevent this conversion from occurring
+ to support an identifier that's quoted as all lower case against
+ such a backend.
+
+ The :class:`.quoted_name` object is normally created automatically
+ when specifying the name for key schema constructs such as :class:`.Table`,
+ :class:`.Column`, and others. The class can also be passed explicitly
+ as the name to any function that receives a name which can be quoted.
+ Such as to use the :meth:`.Engine.has_table` method with an unconditionally
+ quoted name::
+
+ from sqlaclchemy import create_engine
+ from sqlalchemy.sql.elements import quoted_name
+
+ engine = create_engine("oracle+cx_oracle://some_dsn")
+ engine.has_table(quoted_name("some_table", True))
+
+ The above logic will run the "has table" logic against the Oracle backend,
+ passing the name exactly as ``"some_table"`` without converting to
+ upper case.
+
+ .. versionadded:: 0.9.0
+
+ """
+
+ def __new__(cls, value, quote):
+ if value is None:
+ return None
+ # experimental - don't bother with quoted_name
+ # if quote flag is None. doesn't seem to make any dent
+ # in performance however
+ # elif not sprcls and quote is None:
+ # return value
+ elif isinstance(value, cls) and (
+ quote is None or value.quote == quote
+ ):
+ return value
+ self = super(quoted_name, cls).__new__(cls, value)
+ self.quote = quote
+ return self
+
+ def __reduce__(self):
+ return quoted_name, (util.text_type(self), self.quote)
+
+ @util.memoized_instancemethod
+ def lower(self):
+ if self.quote:
+ return self
+ else:
+ return util.text_type(self).lower()
+
+ @util.memoized_instancemethod
+ def upper(self):
+ if self.quote:
+ return self
+ else:
+ return util.text_type(self).upper()
+
+ def __repr__(self):
+ backslashed = self.encode('ascii', 'backslashreplace')
+ if not util.py2k:
+ backslashed = backslashed.decode('ascii')
+ return "'%s'" % backslashed
+
+class _truncated_label(quoted_name):
+ """A unicode subclass used to identify symbolic "
+ "names that may require truncation."""
+
+ def __new__(cls, value, quote=None):
+ quote = getattr(value, "quote", quote)
+ #return super(_truncated_label, cls).__new__(cls, value, quote, True)
+ return super(_truncated_label, cls).__new__(cls, value, quote)
+
+ def __reduce__(self):
+ return self.__class__, (util.text_type(self), self.quote)
+
+ def apply_map(self, map_):
+ return self
+
+# for backwards compatibility in case
+# someone is re-implementing the
+# _truncated_identifier() sequence in a custom
+# compiler
+_generated_label = _truncated_label
+
+
+class _anonymous_label(_truncated_label):
+ """A unicode subclass used to identify anonymously
+ generated names."""
+
+ def __add__(self, other):
+ return _anonymous_label(
+ quoted_name(
+ util.text_type.__add__(self, util.text_type(other)),
+ self.quote)
+ )
+
+ def __radd__(self, other):
+ return _anonymous_label(
+ quoted_name(
+ util.text_type.__add__(util.text_type(other), self),
+ self.quote)
+ )
+
+ def apply_map(self, map_):
+ if self.quote is not None:
+ # preserve quoting only if necessary
+ return quoted_name(self % map_, self.quote)
+ else:
+ # else skip the constructor call
+ return self % map_
+
+
+def _as_truncated(value):
+ """coerce the given value to :class:`._truncated_label`.
+
+ Existing :class:`._truncated_label` and
+ :class:`._anonymous_label` objects are passed
+ unchanged.
+ """
+
+ if isinstance(value, _truncated_label):
+ return value
+ else:
+ return _truncated_label(value)
+
+
+def _string_or_unprintable(element):
+ if isinstance(element, util.string_types):
+ return element
+ else:
+ try:
+ return str(element)
+ except:
+ return "unprintable element %r" % element
+
+
+def _expand_cloned(elements):
+ """expand the given set of ClauseElements to be the set of all 'cloned'
+ predecessors.
+
+ """
+ return itertools.chain(*[x._cloned_set for x in elements])
+
+
+def _select_iterables(elements):
+ """expand tables into individual columns in the
+ given list of column expressions.
+
+ """
+ return itertools.chain(*[c._select_iterable for c in elements])
+
+
+def _cloned_intersection(a, b):
+ """return the intersection of sets a and b, counting
+ any overlap between 'cloned' predecessors.
+
+ The returned set is in terms of the entities present within 'a'.
+
+ """
+ all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
+ return set(elem for elem in a
+ if all_overlap.intersection(elem._cloned_set))
+
+def _cloned_difference(a, b):
+ all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
+ return set(elem for elem in a
+ if not all_overlap.intersection(elem._cloned_set))
+
+
+def _labeled(element):
+ if not hasattr(element, 'name'):
+ return element.label(None)
+ else:
+ return element
+
+
+def _is_column(col):
+ """True if ``col`` is an instance of :class:`.ColumnElement`."""
+
+ return isinstance(col, ColumnElement)
+
+
+def _find_columns(clause):
+ """locate Column objects within the given expression."""
+
+ cols = util.column_set()
+ traverse(clause, {}, {'column': cols.add})
+ return cols
+
+
+# there is some inconsistency here between the usage of
+# inspect() vs. checking for Visitable and __clause_element__.
+# Ideally all functions here would derive from inspect(),
+# however the inspect() versions add significant callcount
+# overhead for critical functions like _interpret_as_column_or_from().
+# Generally, the column-based functions are more performance critical
+# and are fine just checking for __clause_element__(). it's only
+# _interpret_as_from() where we'd like to be able to receive ORM entities
+# that have no defined namespace, hence inspect() is needed there.
+
+
+def _column_as_key(element):
+ if isinstance(element, util.string_types):
+ return element
+ if hasattr(element, '__clause_element__'):
+ element = element.__clause_element__()
+ try:
+ return element.key
+ except AttributeError:
+ return None
+
+
+def _clause_element_as_expr(element):
+ if hasattr(element, '__clause_element__'):
+ return element.__clause_element__()
+ else:
+ return element
+
+
+def _literal_as_text(element):
+ if isinstance(element, Visitable):
+ return element
+ elif hasattr(element, '__clause_element__'):
+ return element.__clause_element__()
+ elif isinstance(element, util.string_types):
+ return TextClause(util.text_type(element))
+ elif isinstance(element, (util.NoneType, bool)):
+ return _const_expr(element)
+ else:
+ raise exc.ArgumentError(
+ "SQL expression object or string expected."
+ )
+
+
+def _no_literals(element):
+ if hasattr(element, '__clause_element__'):
+ return element.__clause_element__()
+ elif not isinstance(element, Visitable):
+ raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' "
+ "function to indicate a SQL expression "
+ "literal, or 'literal()' to indicate a "
+ "bound value." % element)
+ else:
+ return element
+
+
+def _is_literal(element):
+ return not isinstance(element, Visitable) and \
+ not hasattr(element, '__clause_element__')
+
+
+def _only_column_elements_or_none(element, name):
+ if element is None:
+ return None
+ else:
+ return _only_column_elements(element, name)
+
+
+def _only_column_elements(element, name):
+ if hasattr(element, '__clause_element__'):
+ element = element.__clause_element__()
+ if not isinstance(element, ColumnElement):
+ raise exc.ArgumentError(
+ "Column-based expression object expected for argument "
+ "'%s'; got: '%s', type %s" % (name, element, type(element)))
+ return element
+
+def _literal_as_binds(element, name=None, type_=None):
+ if hasattr(element, '__clause_element__'):
+ return element.__clause_element__()
+ elif not isinstance(element, Visitable):
+ if element is None:
+ return Null()
+ else:
+ return BindParameter(name, element, type_=type_, unique=True)
+ else:
+ return element
+
+
+def _interpret_as_column_or_from(element):
+ if isinstance(element, Visitable):
+ return element
+ elif hasattr(element, '__clause_element__'):
+ return element.__clause_element__()
+
+ insp = inspection.inspect(element, raiseerr=False)
+ if insp is None:
+ if isinstance(element, (util.NoneType, bool)):
+ return _const_expr(element)
+ elif hasattr(insp, "selectable"):
+ return insp.selectable
+
+ return ColumnClause(str(element), is_literal=True)
+
+
+def _const_expr(element):
+ if isinstance(element, (Null, False_, True_)):
+ return element
+ elif element is None:
+ return Null()
+ elif element is False:
+ return False_()
+ elif element is True:
+ return True_()
+ else:
+ raise exc.ArgumentError(
+ "Expected None, False, or True"
+ )
+
+
+def _type_from_args(args):
+ for a in args:
+ if not a.type._isnull:
+ return a.type
+ else:
+ return type_api.NULLTYPE
+
+
+def _corresponding_column_or_error(fromclause, column,
+ require_embedded=False):
+ c = fromclause.corresponding_column(column,
+ require_embedded=require_embedded)
+ if c is None:
+ raise exc.InvalidRequestError(
+ "Given column '%s', attached to table '%s', "
+ "failed to locate a corresponding column from table '%s'"
+ %
+ (column,
+ getattr(column, 'table', None),
+ fromclause.description)
+ )
+ return c
+
+
+class AnnotatedColumnElement(Annotated):
+ def __init__(self, element, values):
+ Annotated.__init__(self, element, values)
+ ColumnElement.comparator._reset(self)
+ for attr in ('name', 'key', 'table'):
+ if self.__dict__.get(attr, False) is None:
+ self.__dict__.pop(attr)
+
+ def _with_annotations(self, values):
+ clone = super(AnnotatedColumnElement, self)._with_annotations(values)
+ ColumnElement.comparator._reset(clone)
+ return clone
+
+ @util.memoized_property
+ def name(self):
+ """pull 'name' from parent, if not present"""
+ return self._Annotated__element.name
+
+ @util.memoized_property
+ def table(self):
+ """pull 'table' from parent, if not present"""
+ return self._Annotated__element.table
+
+ @util.memoized_property
+ def key(self):
+ """pull 'key' from parent, if not present"""
+ return self._Annotated__element.key
+
+ @util.memoized_property
+ def info(self):
+ return self._Annotated__element.info
+
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 08ef20a89..c99665b42 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -1,47 +1,18 @@
# sql/expression.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Defines the base components of SQL expression trees.
+"""Defines the public namespace for SQL expression constructs.
-All components are derived from a common base class
-:class:`.ClauseElement`. Common behaviors are organized
-based on class hierarchies, in some cases via mixins.
-
-All object construction from this package occurs via functions which
-in some cases will construct composite :class:`.ClauseElement` structures
-together, and in other cases simply return a single :class:`.ClauseElement`
-constructed directly. The function interface affords a more "DSL-ish"
-feel to constructing SQL expressions and also allows future class
-reorganizations.
-
-Even though classes are not constructed directly from the outside,
-most classes which have additional public methods are considered to be
-public (i.e. have no leading underscore). Other classes which are
-"semi-public" are marked with a single leading underscore; these
-classes usually have few or no public methods and are less guaranteed
-to stay the same in future releases.
+Prior to version 0.9, this module contained all of "elements", "dml",
+"default_comparator" and "selectable". The module was broken up
+and most "factory" functions were moved to be grouped with their associated
+class.
"""
-from __future__ import unicode_literals
-import itertools
-import re
-from operator import attrgetter
-
-from .. import util, exc, inspection
-from . import operators
-from .operators import ColumnOperators
-from .visitors import Visitable, cloned_traverse
-import operator
-
-functions = util.importlater("sqlalchemy.sql", "functions")
-sqlutil = util.importlater("sqlalchemy.sql", "util")
-sqltypes = util.importlater("sqlalchemy", "types")
-default = util.importlater("sqlalchemy.engine", "default")
-
__all__ = [
'Alias', 'ClauseElement', 'ColumnCollection', 'ColumnElement',
'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Select',
@@ -52,6579 +23,88 @@ __all__ = [
'literal', 'literal_column', 'not_', 'null', 'nullsfirst', 'nullslast',
'or_', 'outparam', 'outerjoin', 'over', 'select', 'subquery',
'table', 'text',
- 'tuple_', 'type_coerce', 'union', 'union_all', 'update', ]
-
-PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT')
-NO_ARG = util.symbol('NO_ARG')
-
-
-def nullsfirst(column):
- """Return a NULLS FIRST ``ORDER BY`` clause element.
-
- e.g.::
-
- someselect.order_by(desc(table1.mycol).nullsfirst())
-
- produces::
-
- ORDER BY mycol DESC NULLS FIRST
-
- """
- return UnaryExpression(column, modifier=operators.nullsfirst_op)
-
-
-def nullslast(column):
- """Return a NULLS LAST ``ORDER BY`` clause element.
-
- e.g.::
-
- someselect.order_by(desc(table1.mycol).nullslast())
-
- produces::
-
- ORDER BY mycol DESC NULLS LAST
-
- """
- return UnaryExpression(column, modifier=operators.nullslast_op)
-
-
-def desc(column):
- """Return a descending ``ORDER BY`` clause element.
-
- e.g.::
-
- someselect.order_by(desc(table1.mycol))
-
- produces::
-
- ORDER BY mycol DESC
-
- """
- return UnaryExpression(column, modifier=operators.desc_op)
-
-
-def asc(column):
- """Return an ascending ``ORDER BY`` clause element.
-
- e.g.::
-
- someselect.order_by(asc(table1.mycol))
-
- produces::
-
- ORDER BY mycol ASC
-
- """
- return UnaryExpression(column, modifier=operators.asc_op)
-
-
-def outerjoin(left, right, onclause=None):
- """Return an ``OUTER JOIN`` clause element.
-
- The returned object is an instance of :class:`.Join`.
-
- Similar functionality is also available via the
- :meth:`~.FromClause.outerjoin()` method on any
- :class:`.FromClause`.
-
- :param left: The left side of the join.
-
- :param right: The right side of the join.
-
- :param onclause: Optional criterion for the ``ON`` clause, is
- derived from foreign key relationships established between
- left and right otherwise.
-
- To chain joins together, use the :meth:`.FromClause.join` or
- :meth:`.FromClause.outerjoin` methods on the resulting
- :class:`.Join` object.
-
- """
- return Join(left, right, onclause, isouter=True)
-
-
-def join(left, right, onclause=None, isouter=False):
- """Return a ``JOIN`` clause element (regular inner join).
-
- The returned object is an instance of :class:`.Join`.
-
- Similar functionality is also available via the
- :meth:`~.FromClause.join()` method on any
- :class:`.FromClause`.
-
- :param left: The left side of the join.
-
- :param right: The right side of the join.
-
- :param onclause: Optional criterion for the ``ON`` clause, is
- derived from foreign key relationships established between
- left and right otherwise.
-
- To chain joins together, use the :meth:`.FromClause.join` or
- :meth:`.FromClause.outerjoin` methods on the resulting
- :class:`.Join` object.
-
-
- """
- return Join(left, right, onclause, isouter)
-
-
-def select(columns=None, whereclause=None, from_obj=[], **kwargs):
- """Returns a ``SELECT`` clause element.
-
- Similar functionality is also available via the :func:`select()`
- method on any :class:`.FromClause`.
-
- The returned object is an instance of :class:`.Select`.
-
- All arguments which accept :class:`.ClauseElement` arguments also accept
- string arguments, which will be converted as appropriate into
- either :func:`text()` or :func:`literal_column()` constructs.
-
- .. seealso::
-
- :ref:`coretutorial_selecting` - Core Tutorial description of
- :func:`.select`.
-
- :param columns:
- A list of :class:`.ClauseElement` objects, typically
- :class:`.ColumnElement` objects or subclasses, which will form the
- columns clause of the resulting statement. For all members which are
- instances of :class:`.Selectable`, the individual :class:`.ColumnElement`
- members of the :class:`.Selectable` will be added individually to the
- columns clause. For example, specifying a
- :class:`~sqlalchemy.schema.Table` instance will result in all the
- contained :class:`~sqlalchemy.schema.Column` objects within to be added
- to the columns clause.
-
- This argument is not present on the form of :func:`select()`
- available on :class:`~sqlalchemy.schema.Table`.
-
- :param whereclause:
- A :class:`.ClauseElement` expression which will be used to form the
- ``WHERE`` clause.
-
- :param from_obj:
- A list of :class:`.ClauseElement` objects which will be added to the
- ``FROM`` clause of the resulting statement. Note that "from" objects are
- automatically located within the columns and whereclause ClauseElements.
- Use this parameter to explicitly specify "from" objects which are not
- automatically locatable. This could include
- :class:`~sqlalchemy.schema.Table` objects that aren't otherwise present,
- or :class:`.Join` objects whose presence will supercede that of the
- :class:`~sqlalchemy.schema.Table` objects already located in the other
- clauses.
-
- :param autocommit:
- Deprecated. Use .execution_options(autocommit=<True|False>)
- to set the autocommit option.
-
- :param bind=None:
- an :class:`~.base.Engine` or :class:`~.base.Connection` instance
- to which the
- resulting :class:`.Select` object will be bound. The :class:`.Select`
- object will otherwise automatically bind to whatever
- :class:`~.base.Connectable` instances can be located within its contained
- :class:`.ClauseElement` members.
-
- :param correlate=True:
- indicates that this :class:`.Select` object should have its
- contained :class:`.FromClause` elements "correlated" to an enclosing
- :class:`.Select` object. This means that any :class:`.ClauseElement`
- instance within the "froms" collection of this :class:`.Select`
- which is also present in the "froms" collection of an
- enclosing select will not be rendered in the ``FROM`` clause
- of this select statement.
-
- :param distinct=False:
- when ``True``, applies a ``DISTINCT`` qualifier to the columns
- clause of the resulting statement.
-
- The boolean argument may also be a column expression or list
- of column expressions - this is a special calling form which
- is understood by the Postgresql dialect to render the
- ``DISTINCT ON (<columns>)`` syntax.
-
- ``distinct`` is also available via the :meth:`~.Select.distinct`
- generative method.
-
- :param for_update=False:
- when ``True``, applies ``FOR UPDATE`` to the end of the
- resulting statement.
-
- Certain database dialects also support
- alternate values for this parameter:
-
- * With the MySQL dialect, the value ``"read"`` translates to
- ``LOCK IN SHARE MODE``.
- * With the Oracle and Postgresql dialects, the value ``"nowait"``
- translates to ``FOR UPDATE NOWAIT``.
- * With the Postgresql dialect, the values "read" and ``"read_nowait"``
- translate to ``FOR SHARE`` and ``FOR SHARE NOWAIT``, respectively.
-
- .. versionadded:: 0.7.7
-
- :param group_by:
- a list of :class:`.ClauseElement` objects which will comprise the
- ``GROUP BY`` clause of the resulting select.
-
- :param having:
- a :class:`.ClauseElement` that will comprise the ``HAVING`` clause
- of the resulting select when ``GROUP BY`` is used.
-
- :param limit=None:
- a numerical value which usually compiles to a ``LIMIT``
- expression in the resulting select. Databases that don't
- support ``LIMIT`` will attempt to provide similar
- functionality.
-
- :param offset=None:
- a numeric value which usually compiles to an ``OFFSET``
- expression in the resulting select. Databases that don't
- support ``OFFSET`` will attempt to provide similar
- functionality.
-
- :param order_by:
- a scalar or list of :class:`.ClauseElement` objects which will
- comprise the ``ORDER BY`` clause of the resulting select.
-
- :param use_labels=False:
- when ``True``, the statement will be generated using labels
- for each column in the columns clause, which qualify each
- column with its parent table's (or aliases) name so that name
- conflicts between columns in different tables don't occur.
- The format of the label is <tablename>_<column>. The "c"
- collection of the resulting :class:`.Select` object will use these
- names as well for targeting column members.
-
- use_labels is also available via the :meth:`~.SelectBase.apply_labels`
- generative method.
-
- """
- return Select(columns, whereclause=whereclause, from_obj=from_obj,
- **kwargs)
-
-
-def subquery(alias, *args, **kwargs):
- """Return an :class:`.Alias` object derived
- from a :class:`.Select`.
-
- name
- alias name
-
- \*args, \**kwargs
-
- all other arguments are delivered to the
- :func:`select` function.
-
- """
- return Select(*args, **kwargs).alias(alias)
-
-
-def insert(table, values=None, inline=False, **kwargs):
- """Represent an ``INSERT`` statement via the :class:`.Insert` SQL
- construct.
-
- Similar functionality is available via the
- :meth:`~.TableClause.insert` method on
- :class:`~.schema.Table`.
-
-
- :param table: :class:`.TableClause` which is the subject of the insert.
-
- :param values: collection of values to be inserted; see
- :meth:`.Insert.values` for a description of allowed formats here.
- Can be omitted entirely; a :class:`.Insert` construct will also
- dynamically render the VALUES clause at execution time based on
- the parameters passed to :meth:`.Connection.execute`.
-
- :param inline: if True, SQL defaults will be compiled 'inline' into the
- statement and not pre-executed.
-
- If both `values` and compile-time bind parameters are present, the
- compile-time bind parameters override the information specified
- within `values` on a per-key basis.
-
- The keys within `values` can be either :class:`~sqlalchemy.schema.Column`
- objects or their string identifiers. Each key may reference one of:
-
- * a literal data value (i.e. string, number, etc.);
- * a Column object;
- * a SELECT statement.
-
- If a ``SELECT`` statement is specified which references this
- ``INSERT`` statement's table, the statement will be correlated
- against the ``INSERT`` statement.
-
- .. seealso::
-
- :ref:`coretutorial_insert_expressions` - SQL Expression Tutorial
-
- :ref:`inserts_and_updates` - SQL Expression Tutorial
-
- """
- return Insert(table, values, inline=inline, **kwargs)
-
-
-def update(table, whereclause=None, values=None, inline=False, **kwargs):
- """Represent an ``UPDATE`` statement via the :class:`.Update` SQL
- construct.
-
- E.g.::
-
- from sqlalchemy import update
-
- stmt = update(users).where(users.c.id==5).\\
- values(name='user #5')
-
- Similar functionality is available via the
- :meth:`~.TableClause.update` method on
- :class:`.Table`::
-
-
- stmt = users.update().\\
- where(users.c.id==5).\\
- values(name='user #5')
-
- :param table: A :class:`.Table` object representing the database
- table to be updated.
-
- :param whereclause: Optional SQL expression describing the ``WHERE``
- condition of the ``UPDATE`` statement. Modern applications
- may prefer to use the generative :meth:`~Update.where()`
- method to specify the ``WHERE`` clause.
-
- The WHERE clause can refer to multiple tables.
- For databases which support this, an ``UPDATE FROM`` clause will
- be generated, or on MySQL, a multi-table update. The statement
- will fail on databases that don't have support for multi-table
- update statements. A SQL-standard method of referring to
- additional tables in the WHERE clause is to use a correlated
- subquery::
-
- users.update().values(name='ed').where(
- users.c.name==select([addresses.c.email_address]).\\
- where(addresses.c.user_id==users.c.id).\\
- as_scalar()
- )
-
- .. versionchanged:: 0.7.4
- The WHERE clause can refer to multiple tables.
-
- :param values:
- Optional dictionary which specifies the ``SET`` conditions of the
- ``UPDATE``. If left as ``None``, the ``SET``
- conditions are determined from those parameters passed to the
- statement during the execution and/or compilation of the
- statement. When compiled standalone without any parameters,
- the ``SET`` clause generates for all columns.
-
- Modern applications may prefer to use the generative
- :meth:`.Update.values` method to set the values of the
- UPDATE statement.
-
- :param inline:
- if True, SQL defaults present on :class:`.Column` objects via
- the ``default`` keyword will be compiled 'inline' into the statement
- and not pre-executed. This means that their values will not
- be available in the dictionary returned from
- :meth:`.ResultProxy.last_updated_params`.
-
- If both ``values`` and compile-time bind parameters are present, the
- compile-time bind parameters override the information specified
- within ``values`` on a per-key basis.
-
- The keys within ``values`` can be either :class:`.Column`
- objects or their string identifiers (specifically the "key" of the
- :class:`.Column`, normally but not necessarily equivalent to
- its "name"). Normally, the
- :class:`.Column` objects used here are expected to be
- part of the target :class:`.Table` that is the table
- to be updated. However when using MySQL, a multiple-table
- UPDATE statement can refer to columns from any of
- the tables referred to in the WHERE clause.
-
- The values referred to in ``values`` are typically:
-
- * a literal data value (i.e. string, number, etc.)
- * a SQL expression, such as a related :class:`.Column`,
- a scalar-returning :func:`.select` construct,
- etc.
-
- When combining :func:`.select` constructs within the values
- clause of an :func:`.update` construct,
- the subquery represented by the :func:`.select` should be
- *correlated* to the parent table, that is, providing criterion
- which links the table inside the subquery to the outer table
- being updated::
-
- users.update().values(
- name=select([addresses.c.email_address]).\\
- where(addresses.c.user_id==users.c.id).\\
- as_scalar()
- )
-
- .. seealso::
-
- :ref:`inserts_and_updates` - SQL Expression
- Language Tutorial
-
-
- """
- return Update(
- table,
- whereclause=whereclause,
- values=values,
- inline=inline,
- **kwargs)
-
-
-def delete(table, whereclause=None, **kwargs):
- """Represent a ``DELETE`` statement via the :class:`.Delete` SQL
- construct.
-
- Similar functionality is available via the
- :meth:`~.TableClause.delete` method on
- :class:`~.schema.Table`.
-
- :param table: The table to be updated.
-
- :param whereclause: A :class:`.ClauseElement` describing the ``WHERE``
- condition of the ``UPDATE`` statement. Note that the
- :meth:`~Delete.where()` generative method may be used instead.
-
- .. seealso::
-
- :ref:`deletes` - SQL Expression Tutorial
-
- """
- return Delete(table, whereclause, **kwargs)
-
-
-def and_(*clauses):
- """Join a list of clauses together using the ``AND`` operator.
-
- The ``&`` operator is also overloaded on all :class:`.ColumnElement`
- subclasses to produce the
- same result.
-
- """
- if len(clauses) == 1:
- return clauses[0]
- return BooleanClauseList(operator=operators.and_, *clauses)
-
-
-def or_(*clauses):
- """Join a list of clauses together using the ``OR`` operator.
-
- The ``|`` operator is also overloaded on all
- :class:`.ColumnElement` subclasses to produce the
- same result.
-
- """
- if len(clauses) == 1:
- return clauses[0]
- return BooleanClauseList(operator=operators.or_, *clauses)
-
-
-def not_(clause):
- """Return a negation of the given clause, i.e. ``NOT(clause)``.
-
- The ``~`` operator is also overloaded on all
- :class:`.ColumnElement` subclasses to produce the
- same result.
-
- """
- return operators.inv(_literal_as_binds(clause))
-
-
-def distinct(expr):
- """Return a ``DISTINCT`` clause.
-
- e.g.::
-
- distinct(a)
-
- renders::
-
- DISTINCT a
-
- """
- expr = _literal_as_binds(expr)
- return UnaryExpression(expr,
- operator=operators.distinct_op, type_=expr.type)
-
-
-def between(ctest, cleft, cright):
- """Return a ``BETWEEN`` predicate clause.
-
- Equivalent of SQL ``clausetest BETWEEN clauseleft AND clauseright``.
-
- The :func:`between()` method on all
- :class:`.ColumnElement` subclasses provides
- similar functionality.
-
- """
- ctest = _literal_as_binds(ctest)
- return ctest.between(cleft, cright)
-
-
-def case(whens, value=None, else_=None):
- """Produce a ``CASE`` statement.
-
- whens
- A sequence of pairs, or alternatively a dict,
- to be translated into "WHEN / THEN" clauses.
-
- value
- Optional for simple case statements, produces
- a column expression as in "CASE <expr> WHEN ..."
-
- else\_
- Optional as well, for case defaults produces
- the "ELSE" portion of the "CASE" statement.
-
- The expressions used for THEN and ELSE,
- when specified as strings, will be interpreted
- as bound values. To specify textual SQL expressions
- for these, use the :func:`literal_column`
- construct.
-
- The expressions used for the WHEN criterion
- may only be literal strings when "value" is
- present, i.e. CASE table.somecol WHEN "x" THEN "y".
- Otherwise, literal strings are not accepted
- in this position, and either the text(<string>)
- or literal(<string>) constructs must be used to
- interpret raw string values.
-
- Usage examples::
-
- case([(orderline.c.qty > 100, item.c.specialprice),
- (orderline.c.qty > 10, item.c.bulkprice)
- ], else_=item.c.regularprice)
- case(value=emp.c.type, whens={
- 'engineer': emp.c.salary * 1.1,
- 'manager': emp.c.salary * 3,
- })
-
- Using :func:`literal_column()`, to allow for databases that
- do not support bind parameters in the ``then`` clause. The type
- can be specified which determines the type of the :func:`case()` construct
- overall::
-
- case([(orderline.c.qty > 100,
- literal_column("'greaterthan100'", String)),
- (orderline.c.qty > 10, literal_column("'greaterthan10'",
- String))
- ], else_=literal_column("'lethan10'", String))
-
- """
-
- return Case(whens, value=value, else_=else_)
-
-
-def cast(clause, totype, **kwargs):
- """Return a ``CAST`` function.
-
- Equivalent of SQL ``CAST(clause AS totype)``.
-
- Use with a :class:`~sqlalchemy.types.TypeEngine` subclass, i.e::
-
- cast(table.c.unit_price * table.c.qty, Numeric(10,4))
-
- or::
-
- cast(table.c.timestamp, DATE)
-
- """
- return Cast(clause, totype, **kwargs)
-
-
-def extract(field, expr):
- """Return the clause ``extract(field FROM expr)``."""
-
- return Extract(field, expr)
-
-
-def collate(expression, collation):
- """Return the clause ``expression COLLATE collation``.
-
- e.g.::
-
- collate(mycolumn, 'utf8_bin')
-
- produces::
-
- mycolumn COLLATE utf8_bin
-
- """
-
- expr = _literal_as_binds(expression)
- return BinaryExpression(
- expr,
- _literal_as_text(collation),
- operators.collate, type_=expr.type)
-
-
-def exists(*args, **kwargs):
- """Return an ``EXISTS`` clause as applied to a :class:`.Select` object.
-
- Calling styles are of the following forms::
-
- # use on an existing select()
- s = select([table.c.col1]).where(table.c.col2==5)
- s = exists(s)
-
- # construct a select() at once
- exists(['*'], **select_arguments).where(criterion)
-
- # columns argument is optional, generates "EXISTS (SELECT *)"
- # by default.
- exists().where(table.c.col2==5)
-
- """
- return Exists(*args, **kwargs)
-
-
-def union(*selects, **kwargs):
- """Return a ``UNION`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- A similar :func:`union()` method is available on all
- :class:`.FromClause` subclasses.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs)
-
-
-def union_all(*selects, **kwargs):
- """Return a ``UNION ALL`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- A similar :func:`union_all()` method is available on all
- :class:`.FromClause` subclasses.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs)
-
-
-def except_(*selects, **kwargs):
- """Return an ``EXCEPT`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs)
-
-
-def except_all(*selects, **kwargs):
- """Return an ``EXCEPT ALL`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs)
-
-
-def intersect(*selects, **kwargs):
- """Return an ``INTERSECT`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs)
-
-
-def intersect_all(*selects, **kwargs):
- """Return an ``INTERSECT ALL`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
-
-
-def alias(selectable, name=None, flat=False):
- """Return an :class:`.Alias` object.
-
- An :class:`.Alias` represents any :class:`.FromClause`
- with an alternate name assigned within SQL, typically using the ``AS``
- clause when generated, e.g. ``SELECT * FROM table AS aliasname``.
-
- Similar functionality is available via the
- :meth:`~.FromClause.alias` method
- available on all :class:`.FromClause` subclasses.
-
- When an :class:`.Alias` is created from a :class:`.Table` object,
- this has the effect of the table being rendered
- as ``tablename AS aliasname`` in a SELECT statement.
-
- For :func:`.select` objects, the effect is that of creating a named
- subquery, i.e. ``(select ...) AS aliasname``.
-
- The ``name`` parameter is optional, and provides the name
- to use in the rendered SQL. If blank, an "anonymous" name
- will be deterministically generated at compile time.
- Deterministic means the name is guaranteed to be unique against
- other constructs used in the same statement, and will also be the
- same name for each successive compilation of the same statement
- object.
-
- :param selectable: any :class:`.FromClause` subclass,
- such as a table, select statement, etc.
-
- :param name: string name to be assigned as the alias.
- If ``None``, a name will be deterministically generated
- at compile time.
-
- :param flat: Will be passed through to if the given selectable
- is an instance of :class:`.Join` - see :meth:`.Join.alias`
- for details.
-
- .. versionadded:: 0.9.0
-
- """
- return selectable.alias(name=name, flat=flat)
-
-
-def literal(value, type_=None):
- """Return a literal clause, bound to a bind parameter.
-
- Literal clauses are created automatically when non- :class:`.ClauseElement`
- objects (such as strings, ints, dates, etc.) are used in a comparison
- operation with a :class:`.ColumnElement`
- subclass, such as a :class:`~sqlalchemy.schema.Column` object.
- Use this function to force the
- generation of a literal clause, which will be created as a
- :class:`BindParameter` with a bound value.
-
- :param value: the value to be bound. Can be any Python object supported by
- the underlying DB-API, or is translatable via the given type argument.
-
- :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which
- will provide bind-parameter translation for this literal.
-
- """
- return BindParameter(None, value, type_=type_, unique=True)
-
-
-def tuple_(*expr):
- """Return a SQL tuple.
-
- Main usage is to produce a composite IN construct::
-
- tuple_(table.c.col1, table.c.col2).in_(
- [(1, 2), (5, 12), (10, 19)]
- )
-
- .. warning::
-
- The composite IN construct is not supported by all backends,
- and is currently known to work on Postgresql and MySQL,
- but not SQLite. Unsupported backends will raise
- a subclass of :class:`~sqlalchemy.exc.DBAPIError` when such
- an expression is invoked.
-
- """
- return Tuple(*expr)
-
-
-def type_coerce(expr, type_):
- """Coerce the given expression into the given type,
- on the Python side only.
-
- :func:`.type_coerce` is roughly similar to :func:`.cast`, except no
- "CAST" expression is rendered - the given type is only applied towards
- expression typing and against received result values.
-
- e.g.::
-
- from sqlalchemy.types import TypeDecorator
- import uuid
-
- class AsGuid(TypeDecorator):
- impl = String
-
- def process_bind_param(self, value, dialect):
- if value is not None:
- return str(value)
- else:
- return None
-
- def process_result_value(self, value, dialect):
- if value is not None:
- return uuid.UUID(value)
- else:
- return None
-
- conn.execute(
- select([type_coerce(mytable.c.ident, AsGuid)]).\\
- where(
- type_coerce(mytable.c.ident, AsGuid) ==
- uuid.uuid3(uuid.NAMESPACE_URL, 'bar')
- )
- )
-
- """
- type_ = sqltypes.to_instance(type_)
-
- if hasattr(expr, '__clause_expr__'):
- return type_coerce(expr.__clause_expr__())
- elif isinstance(expr, BindParameter):
- bp = expr._clone()
- bp.type = type_
- return bp
- elif not isinstance(expr, Visitable):
- if expr is None:
- return null()
- else:
- return literal(expr, type_=type_)
- else:
- return Label(None, expr, type_=type_)
-
-
-def label(name, obj):
- """Return a :class:`Label` object for the
- given :class:`.ColumnElement`.
-
- A label changes the name of an element in the columns clause of a
- ``SELECT`` statement, typically via the ``AS`` SQL keyword.
-
- This functionality is more conveniently available via the
- :func:`label()` method on :class:`.ColumnElement`.
-
- name
- label name
-
- obj
- a :class:`.ColumnElement`.
-
- """
- return Label(name, obj)
-
-
-def column(text, type_=None):
- """Return a textual column clause, as would be in the columns clause of a
- ``SELECT`` statement.
-
- The object returned is an instance of :class:`.ColumnClause`, which
- represents the "syntactical" portion of the schema-level
- :class:`~sqlalchemy.schema.Column` object. It is often used directly
- within :func:`~.expression.select` constructs or with lightweight
- :func:`~.expression.table` constructs.
-
- Note that the :func:`~.expression.column` function is not part of
- the ``sqlalchemy`` namespace. It must be imported from the
- ``sql`` package::
-
- from sqlalchemy.sql import table, column
-
- :param text: the name of the column. Quoting rules will be applied
- to the clause like any other column name. For textual column constructs
- that are not to be quoted, use the :func:`literal_column` function.
-
- :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object
- which will provide result-set translation for this column.
-
- See :class:`.ColumnClause` for further examples.
-
- """
- return ColumnClause(text, type_=type_)
-
-
-def literal_column(text, type_=None):
- """Return a textual column expression, as would be in the columns
- clause of a ``SELECT`` statement.
-
- The object returned supports further expressions in the same way as any
- other column object, including comparison, math and string operations.
- The type\_ parameter is important to determine proper expression behavior
- (such as, '+' means string concatenation or numerical addition based on
- the type).
-
- :param text: the text of the expression; can be any SQL expression.
- Quoting rules will not be applied. To specify a column-name expression
- which should be subject to quoting rules, use the :func:`column`
- function.
-
- :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine`
- object which will
- provide result-set translation and additional expression semantics for
- this column. If left as None the type will be NullType.
-
- """
- return ColumnClause(text, type_=type_, is_literal=True)
-
-
-def table(name, *columns):
- """Represent a textual table clause.
-
- The object returned is an instance of :class:`.TableClause`, which
- represents the "syntactical" portion of the schema-level
- :class:`~.schema.Table` object.
- It may be used to construct lightweight table constructs.
-
- Note that the :func:`~.expression.table` function is not part of
- the ``sqlalchemy`` namespace. It must be imported from the
- ``sql`` package::
-
- from sqlalchemy.sql import table, column
-
- :param name: Name of the table.
-
- :param columns: A collection of :func:`~.expression.column` constructs.
-
- See :class:`.TableClause` for further examples.
-
- """
- return TableClause(name, *columns)
-
-
-def bindparam(key, value=NO_ARG, type_=None, unique=False, required=NO_ARG,
- quote=None, callable_=None):
- """Create a bind parameter clause with the given key.
-
- :param key:
- the key for this bind param. Will be used in the generated
- SQL statement for dialects that use named parameters. This
- value may be modified when part of a compilation operation,
- if other :class:`BindParameter` objects exist with the same
- key, or if its length is too long and truncation is
- required.
-
- :param value:
- Initial value for this bind param. This value may be
- overridden by the dictionary of parameters sent to statement
- compilation/execution.
-
- Defaults to ``None``, however if neither ``value`` nor
- ``callable`` are passed explicitly, the ``required`` flag will be
- set to ``True`` which has the effect of requiring a value be present
- when the statement is actually executed.
-
- .. versionchanged:: 0.8 The ``required`` flag is set to ``True``
- automatically if ``value`` or ``callable`` is not passed.
-
- :param callable\_:
- A callable function that takes the place of "value". The function
- will be called at statement execution time to determine the
- ultimate value. Used for scenarios where the actual bind
- value cannot be determined at the point at which the clause
- construct is created, but embedded bind values are still desirable.
-
- :param type\_:
- A ``TypeEngine`` object that will be used to pre-process the
- value corresponding to this :class:`BindParameter` at
- execution time.
-
- :param unique:
- if True, the key name of this BindParamClause will be
- modified if another :class:`BindParameter` of the same name
- already has been located within the containing
- :class:`.ClauseElement`.
-
- :param required:
- If ``True``, a value is required at execution time. If not passed,
- is set to ``True`` or ``False`` based on whether or not
- one of ``value`` or ``callable`` were passed..
-
- .. versionchanged:: 0.8 If the ``required`` flag is not specified,
- it will be set automatically to ``True`` or ``False`` depending
- on whether or not the ``value`` or ``callable`` parameters
- were specified.
-
- :param quote:
- True if this parameter name requires quoting and is not
- currently known as a SQLAlchemy reserved word; this currently
- only applies to the Oracle backend.
-
- """
- if isinstance(key, ColumnClause):
- type_ = key.type
- key = key.name
- if required is NO_ARG:
- required = (value is NO_ARG and callable_ is None)
- if value is NO_ARG:
- value = None
- return BindParameter(key, value, type_=type_,
- callable_=callable_,
- unique=unique, required=required,
- quote=quote)
-
-
-def outparam(key, type_=None):
- """Create an 'OUT' parameter for usage in functions (stored procedures),
- for databases which support them.
-
- The ``outparam`` can be used like a regular function parameter.
- The "output" value will be available from the
- :class:`~sqlalchemy.engine.ResultProxy` object via its ``out_parameters``
- attribute, which returns a dictionary containing the values.
-
- """
- return BindParameter(
- key, None, type_=type_, unique=False, isoutparam=True)
-
-
-def text(text, bind=None, *args, **kwargs):
- """Create a SQL construct that is represented by a literal string.
-
- E.g.::
-
- t = text("SELECT * FROM users")
- result = connection.execute(t)
-
- The advantages :func:`text` provides over a plain string are
- backend-neutral support for bind parameters, per-statement
- execution options, as well as
- bind parameter and result-column typing behavior, allowing
- SQLAlchemy type constructs to play a role when executing
- a statement that is specified literally.
-
- Bind parameters are specified by name, using the format ``:name``.
- E.g.::
-
- t = text("SELECT * FROM users WHERE id=:user_id")
- result = connection.execute(t, user_id=12)
-
- To invoke SQLAlchemy typing logic for bind parameters, the
- ``bindparams`` list allows specification of :func:`bindparam`
- constructs which specify the type for a given name::
-
- t = text("SELECT id FROM users WHERE updated_at>:updated",
- bindparams=[bindparam('updated', DateTime())]
- )
-
- Typing during result row processing is also an important concern.
- Result column types
- are specified using the ``typemap`` dictionary, where the keys
- match the names of columns. These names are taken from what
- the DBAPI returns as ``cursor.description``::
-
- t = text("SELECT id, name FROM users",
- typemap={
- 'id':Integer,
- 'name':Unicode
- }
- )
-
- The :func:`text` construct is used internally for most cases when
- a literal string is specified for part of a larger query, such as
- within :func:`select()`, :func:`update()`,
- :func:`insert()` or :func:`delete()`. In those cases, the same
- bind parameter syntax is applied::
-
- s = select([users.c.id, users.c.name]).where("id=:user_id")
- result = connection.execute(s, user_id=12)
-
- Using :func:`text` explicitly usually implies the construction
- of a full, standalone statement. As such, SQLAlchemy refers
- to it as an :class:`.Executable` object, and it supports
- the :meth:`Executable.execution_options` method. For example,
- a :func:`text` construct that should be subject to "autocommit"
- can be set explicitly so using the ``autocommit`` option::
-
- t = text("EXEC my_procedural_thing()").\\
- execution_options(autocommit=True)
-
- Note that SQLAlchemy's usual "autocommit" behavior applies to
- :func:`text` constructs - that is, statements which begin
- with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``,
- or a variety of other phrases specific to certain backends, will
- be eligible for autocommit if no transaction is in progress.
-
- :param text:
- the text of the SQL statement to be created. use ``:<param>``
- to specify bind parameters; they will be compiled to their
- engine-specific format.
-
- :param autocommit:
- Deprecated. Use .execution_options(autocommit=<True|False>)
- to set the autocommit option.
-
- :param bind:
- an optional connection or engine to be used for this text query.
-
- :param bindparams:
- a list of :func:`bindparam()` instances which can be used to define
- the types and/or initial values for the bind parameters within
- the textual statement; the keynames of the bindparams must match
- those within the text of the statement. The types will be used
- for pre-processing on bind values.
-
- :param typemap:
- a dictionary mapping the names of columns represented in the
- columns clause of a ``SELECT`` statement to type objects,
- which will be used to perform post-processing on columns within
- the result set. This argument applies to any expression
- that returns result sets.
-
- """
- return TextClause(text, bind=bind, *args, **kwargs)
-
-
-def over(func, partition_by=None, order_by=None):
- """Produce an OVER clause against a function.
-
- Used against aggregate or so-called "window" functions,
- for database backends that support window functions.
-
- E.g.::
-
- from sqlalchemy import over
- over(func.row_number(), order_by='x')
-
- Would produce "ROW_NUMBER() OVER(ORDER BY x)".
-
- :param func: a :class:`.FunctionElement` construct, typically
- generated by :data:`~.expression.func`.
- :param partition_by: a column element or string, or a list
- of such, that will be used as the PARTITION BY clause
- of the OVER construct.
- :param order_by: a column element or string, or a list
- of such, that will be used as the ORDER BY clause
- of the OVER construct.
-
- This function is also available from the :data:`~.expression.func`
- construct itself via the :meth:`.FunctionElement.over` method.
-
- .. versionadded:: 0.7
-
- """
- return Over(func, partition_by=partition_by, order_by=order_by)
-
-
-def null():
- """Return a :class:`Null` object, which compiles to ``NULL``.
-
- """
- return Null()
-
-
-def true():
- """Return a :class:`True_` object, which compiles to ``true``, or the
- boolean equivalent for the target dialect.
-
- """
- return True_()
-
-
-def false():
- """Return a :class:`False_` object, which compiles to ``false``, or the
- boolean equivalent for the target dialect.
-
- """
- return False_()
-
-
-class _FunctionGenerator(object):
- """Generate :class:`.Function` objects based on getattr calls."""
-
- def __init__(self, **opts):
- self.__names = []
- self.opts = opts
-
- def __getattr__(self, name):
- # passthru __ attributes; fixes pydoc
- if name.startswith('__'):
- try:
- return self.__dict__[name]
- except KeyError:
- raise AttributeError(name)
-
- elif name.endswith('_'):
- name = name[0:-1]
- f = _FunctionGenerator(**self.opts)
- f.__names = list(self.__names) + [name]
- return f
-
- def __call__(self, *c, **kwargs):
- o = self.opts.copy()
- o.update(kwargs)
-
- tokens = len(self.__names)
-
- if tokens == 2:
- package, fname = self.__names
- elif tokens == 1:
- package, fname = "_default", self.__names[0]
- else:
- package = None
-
- if package is not None and \
- package in functions._registry and \
- fname in functions._registry[package]:
- func = functions._registry[package][fname]
- return func(*c, **o)
-
- return Function(self.__names[-1],
- packagenames=self.__names[0:-1], *c, **o)
-
-# "func" global - i.e. func.count()
-func = _FunctionGenerator()
-"""Generate SQL function expressions.
-
- :data:`.func` is a special object instance which generates SQL
- functions based on name-based attributes, e.g.::
-
- >>> print func.count(1)
- count(:param_1)
-
- The element is a column-oriented SQL element like any other, and is
- used in that way::
-
- >>> print select([func.count(table.c.id)])
- SELECT count(sometable.id) FROM sometable
-
- Any name can be given to :data:`.func`. If the function name is unknown to
- SQLAlchemy, it will be rendered exactly as is. For common SQL functions
- which SQLAlchemy is aware of, the name may be interpreted as a *generic
- function* which will be compiled appropriately to the target database::
-
- >>> print func.current_timestamp()
- CURRENT_TIMESTAMP
-
- To call functions which are present in dot-separated packages,
- specify them in the same manner::
-
- >>> print func.stats.yield_curve(5, 10)
- stats.yield_curve(:yield_curve_1, :yield_curve_2)
-
- SQLAlchemy can be made aware of the return type of functions to enable
- type-specific lexical and result-based behavior. For example, to ensure
- that a string-based function returns a Unicode value and is similarly
- treated as a string in expressions, specify
- :class:`~sqlalchemy.types.Unicode` as the type:
-
- >>> print func.my_string(u'hi', type_=Unicode) + ' ' + \
- ... func.my_string(u'there', type_=Unicode)
- my_string(:my_string_1) || :my_string_2 || my_string(:my_string_3)
-
- The object returned by a :data:`.func` call is usually an instance of
- :class:`.Function`.
- This object meets the "column" interface, including comparison and labeling
- functions. The object can also be passed the :meth:`~.Connectable.execute`
- method of a :class:`.Connection` or :class:`.Engine`, where it will be
- wrapped inside of a SELECT statement first::
-
- print connection.execute(func.current_timestamp()).scalar()
-
- In a few exception cases, the :data:`.func` accessor
- will redirect a name to a built-in expression such as :func:`.cast`
- or :func:`.extract`, as these names have well-known meaning
- but are not exactly the same as "functions" from a SQLAlchemy
- perspective.
-
- .. versionadded:: 0.8 :data:`.func` can return non-function expression
- constructs for common quasi-functional names like :func:`.cast`
- and :func:`.extract`.
-
- Functions which are interpreted as "generic" functions know how to
- calculate their return type automatically. For a listing of known generic
- functions, see :ref:`generic_functions`.
-
-"""
-
-# "modifier" global - i.e. modifier.distinct
-# TODO: use UnaryExpression for this instead ?
-modifier = _FunctionGenerator(group=False)
-
-
-class _truncated_label(util.text_type):
- """A unicode subclass used to identify symbolic "
- "names that may require truncation."""
-
- def apply_map(self, map_):
- return self
-
-# for backwards compatibility in case
-# someone is re-implementing the
-# _truncated_identifier() sequence in a custom
-# compiler
-_generated_label = _truncated_label
-
-
-class _anonymous_label(_truncated_label):
- """A unicode subclass used to identify anonymously
- generated names."""
-
- def __add__(self, other):
- return _anonymous_label(
- util.text_type(self) +
- util.text_type(other))
-
- def __radd__(self, other):
- return _anonymous_label(
- util.text_type(other) +
- util.text_type(self))
-
- def apply_map(self, map_):
- return self % map_
-
-
-def _as_truncated(value):
- """coerce the given value to :class:`._truncated_label`.
-
- Existing :class:`._truncated_label` and
- :class:`._anonymous_label` objects are passed
- unchanged.
- """
-
- if isinstance(value, _truncated_label):
- return value
- else:
- return _truncated_label(value)
-
-
-def _string_or_unprintable(element):
- if isinstance(element, util.string_types):
- return element
- else:
- try:
- return str(element)
- except:
- return "unprintable element %r" % element
-
-
-def _clone(element, **kw):
- return element._clone()
-
-
-def _expand_cloned(elements):
- """expand the given set of ClauseElements to be the set of all 'cloned'
- predecessors.
-
- """
- return itertools.chain(*[x._cloned_set for x in elements])
-
-
-def _select_iterables(elements):
- """expand tables into individual columns in the
- given list of column expressions.
-
- """
- return itertools.chain(*[c._select_iterable for c in elements])
-
-
-def _cloned_intersection(a, b):
- """return the intersection of sets a and b, counting
- any overlap between 'cloned' predecessors.
-
- The returned set is in terms of the entities present within 'a'.
-
- """
- all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
- return set(elem for elem in a
- if all_overlap.intersection(elem._cloned_set))
-
-def _cloned_difference(a, b):
- all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
- return set(elem for elem in a
- if not all_overlap.intersection(elem._cloned_set))
-
-def _from_objects(*elements):
- return itertools.chain(*[element._from_objects for element in elements])
-
-
-def _labeled(element):
- if not hasattr(element, 'name'):
- return element.label(None)
- else:
- return element
-
-
-# there is some inconsistency here between the usage of
-# inspect() vs. checking for Visitable and __clause_element__.
-# Ideally all functions here would derive from inspect(),
-# however the inspect() versions add significant callcount
-# overhead for critical functions like _interpret_as_column_or_from().
-# Generally, the column-based functions are more performance critical
-# and are fine just checking for __clause_element__(). it's only
-# _interpret_as_from() where we'd like to be able to receive ORM entities
-# that have no defined namespace, hence inspect() is needed there.
-
-
-def _column_as_key(element):
- if isinstance(element, util.string_types):
- return element
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- try:
- return element.key
- except AttributeError:
- return None
-
-
-def _clause_element_as_expr(element):
- if hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- else:
- return element
-
-
-def _literal_as_text(element):
- if isinstance(element, Visitable):
- return element
- elif hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- elif isinstance(element, util.string_types):
- return TextClause(util.text_type(element))
- elif isinstance(element, (util.NoneType, bool)):
- return _const_expr(element)
- else:
- raise exc.ArgumentError(
- "SQL expression object or string expected."
- )
-
-
-def _no_literals(element):
- if hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- elif not isinstance(element, Visitable):
- raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' "
- "function to indicate a SQL expression "
- "literal, or 'literal()' to indicate a "
- "bound value." % element)
- else:
- return element
-
-
-def _is_literal(element):
- return not isinstance(element, Visitable) and \
- not hasattr(element, '__clause_element__')
-
-
-def _only_column_elements_or_none(element, name):
- if element is None:
- return None
- else:
- return _only_column_elements(element, name)
-
-
-def _only_column_elements(element, name):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- if not isinstance(element, ColumnElement):
- raise exc.ArgumentError(
- "Column-based expression object expected for argument "
- "'%s'; got: '%s', type %s" % (name, element, type(element)))
- return element
-
-
-def _literal_as_binds(element, name=None, type_=None):
- if hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- elif not isinstance(element, Visitable):
- if element is None:
- return null()
- else:
- return _BindParamClause(name, element, type_=type_, unique=True)
- else:
- return element
-
-
-def _interpret_as_column_or_from(element):
- if isinstance(element, Visitable):
- return element
- elif hasattr(element, '__clause_element__'):
- return element.__clause_element__()
-
- insp = inspection.inspect(element, raiseerr=False)
- if insp is None:
- if isinstance(element, (util.NoneType, bool)):
- return _const_expr(element)
- elif hasattr(insp, "selectable"):
- return insp.selectable
-
- return literal_column(str(element))
-
-
-def _interpret_as_from(element):
- insp = inspection.inspect(element, raiseerr=False)
- if insp is None:
- if isinstance(element, util.string_types):
- return TextClause(util.text_type(element))
- elif hasattr(insp, "selectable"):
- return insp.selectable
- raise exc.ArgumentError("FROM expression expected")
-
-def _interpret_as_select(element):
- element = _interpret_as_from(element)
- if isinstance(element, Alias):
- element = element.original
- if not isinstance(element, Select):
- element = element.select()
- return element
-
-
-def _const_expr(element):
- if isinstance(element, (Null, False_, True_)):
- return element
- elif element is None:
- return null()
- elif element is False:
- return false()
- elif element is True:
- return true()
- else:
- raise exc.ArgumentError(
- "Expected None, False, or True"
- )
-
-
-def _type_from_args(args):
- for a in args:
- if not isinstance(a.type, sqltypes.NullType):
- return a.type
- else:
- return sqltypes.NullType
-
-
-def _corresponding_column_or_error(fromclause, column,
- require_embedded=False):
- c = fromclause.corresponding_column(column,
- require_embedded=require_embedded)
- if c is None:
- raise exc.InvalidRequestError(
- "Given column '%s', attached to table '%s', "
- "failed to locate a corresponding column from table '%s'"
- %
- (column,
- getattr(column, 'table', None),
- fromclause.description)
- )
- return c
-
-
-@util.decorator
-def _generative(fn, *args, **kw):
- """Mark a method as generative."""
-
- self = args[0]._generate()
- fn(self, *args[1:], **kw)
- return self
-
-
-def is_column(col):
- """True if ``col`` is an instance of :class:`.ColumnElement`."""
-
- return isinstance(col, ColumnElement)
-
-
-class ClauseElement(Visitable):
- """Base class for elements of a programmatically constructed SQL
- expression.
-
- """
- __visit_name__ = 'clause'
-
- _annotations = {}
- supports_execution = False
- _from_objects = []
- bind = None
- _is_clone_of = None
- is_selectable = False
- is_clause_element = True
-
- _order_by_label_element = None
-
- def _clone(self):
- """Create a shallow copy of this ClauseElement.
-
- This method may be used by a generative API. Its also used as
- part of the "deep" copy afforded by a traversal that combines
- the _copy_internals() method.
-
- """
- c = self.__class__.__new__(self.__class__)
- c.__dict__ = self.__dict__.copy()
- ClauseElement._cloned_set._reset(c)
- ColumnElement.comparator._reset(c)
-
- # this is a marker that helps to "equate" clauses to each other
- # when a Select returns its list of FROM clauses. the cloning
- # process leaves around a lot of remnants of the previous clause
- # typically in the form of column expressions still attached to the
- # old table.
- c._is_clone_of = self
-
- return c
-
- @property
- def _constructor(self):
- """return the 'constructor' for this ClauseElement.
-
- This is for the purposes for creating a new object of
- this type. Usually, its just the element's __class__.
- However, the "Annotated" version of the object overrides
- to return the class of its proxied element.
-
- """
- return self.__class__
-
- @util.memoized_property
- def _cloned_set(self):
- """Return the set consisting all cloned ancestors of this
- ClauseElement.
-
- Includes this ClauseElement. This accessor tends to be used for
- FromClause objects to identify 'equivalent' FROM clauses, regardless
- of transformative operations.
-
- """
- s = util.column_set()
- f = self
- while f is not None:
- s.add(f)
- f = f._is_clone_of
- return s
-
- def __getstate__(self):
- d = self.__dict__.copy()
- d.pop('_is_clone_of', None)
- return d
-
- def _annotate(self, values):
- """return a copy of this ClauseElement with annotations
- updated by the given dictionary.
-
- """
- return sqlutil.Annotated(self, values)
-
- def _with_annotations(self, values):
- """return a copy of this ClauseElement with annotations
- replaced by the given dictionary.
-
- """
- return sqlutil.Annotated(self, values)
-
- def _deannotate(self, values=None, clone=False):
- """return a copy of this :class:`.ClauseElement` with annotations
- removed.
-
- :param values: optional tuple of individual values
- to remove.
-
- """
- if clone:
- # clone is used when we are also copying
- # the expression for a deep deannotation
- return self._clone()
- else:
- # if no clone, since we have no annotations we return
- # self
- return self
-
- def unique_params(self, *optionaldict, **kwargs):
- """Return a copy with :func:`bindparam()` elements replaced.
-
- Same functionality as ``params()``, except adds `unique=True`
- to affected bind parameters so that multiple statements can be
- used.
-
- """
- return self._params(True, optionaldict, kwargs)
-
- def params(self, *optionaldict, **kwargs):
- """Return a copy with :func:`bindparam()` elements replaced.
-
- Returns a copy of this ClauseElement with :func:`bindparam()`
- elements replaced with values taken from the given dictionary::
-
- >>> clause = column('x') + bindparam('foo')
- >>> print clause.compile().params
- {'foo':None}
- >>> print clause.params({'foo':7}).compile().params
- {'foo':7}
-
- """
- return self._params(False, optionaldict, kwargs)
-
- def _params(self, unique, optionaldict, kwargs):
- if len(optionaldict) == 1:
- kwargs.update(optionaldict[0])
- elif len(optionaldict) > 1:
- raise exc.ArgumentError(
- "params() takes zero or one positional dictionary argument")
-
- def visit_bindparam(bind):
- if bind.key in kwargs:
- bind.value = kwargs[bind.key]
- bind.required = False
- if unique:
- bind._convert_to_unique()
- return cloned_traverse(self, {}, {'bindparam': visit_bindparam})
-
- def compare(self, other, **kw):
- """Compare this ClauseElement to the given ClauseElement.
-
- Subclasses should override the default behavior, which is a
- straight identity comparison.
-
- \**kw are arguments consumed by subclass compare() methods and
- may be used to modify the criteria for comparison.
- (see :class:`.ColumnElement`)
-
- """
- return self is other
-
- def _copy_internals(self, clone=_clone, **kw):
- """Reassign internal elements to be clones of themselves.
-
- Called during a copy-and-traverse operation on newly
- shallow-copied elements to create a deep copy.
-
- The given clone function should be used, which may be applying
- additional transformations to the element (i.e. replacement
- traversal, cloned traversal, annotations).
-
- """
- pass
-
- def get_children(self, **kwargs):
- """Return immediate child elements of this :class:`.ClauseElement`.
-
- This is used for visit traversal.
-
- \**kwargs may contain flags that change the collection that is
- returned, for example to return a subset of items in order to
- cut down on larger traversals, or to return child items from a
- different context (such as schema-level collections instead of
- clause-level).
-
- """
- return []
-
- def self_group(self, against=None):
- """Apply a 'grouping' to this :class:`.ClauseElement`.
-
- This method is overridden by subclasses to return a
- "grouping" construct, i.e. parenthesis. In particular
- it's used by "binary" expressions to provide a grouping
- around themselves when placed into a larger expression,
- as well as by :func:`.select` constructs when placed into
- the FROM clause of another :func:`.select`. (Note that
- subqueries should be normally created using the
- :func:`.Select.alias` method, as many platforms require
- nested SELECT statements to be named).
-
- As expressions are composed together, the application of
- :meth:`self_group` is automatic - end-user code should never
- need to use this method directly. Note that SQLAlchemy's
- clause constructs take operator precedence into account -
- so parenthesis might not be needed, for example, in
- an expression like ``x OR (y AND z)`` - AND takes precedence
- over OR.
-
- The base :meth:`self_group` method of :class:`.ClauseElement`
- just returns self.
- """
- return self
-
- def compile(self, bind=None, dialect=None, **kw):
- """Compile this SQL expression.
-
- The return value is a :class:`~.Compiled` object.
- Calling ``str()`` or ``unicode()`` on the returned value will yield a
- string representation of the result. The
- :class:`~.Compiled` object also can return a
- dictionary of bind parameter names and values
- using the ``params`` accessor.
-
- :param bind: An ``Engine`` or ``Connection`` from which a
- ``Compiled`` will be acquired. This argument takes precedence over
- this :class:`.ClauseElement`'s bound engine, if any.
-
- :param column_keys: Used for INSERT and UPDATE statements, a list of
- column names which should be present in the VALUES clause of the
- compiled statement. If ``None``, all columns from the target table
- object are rendered.
-
- :param dialect: A ``Dialect`` instance from which a ``Compiled``
- will be acquired. This argument takes precedence over the `bind`
- argument as well as this :class:`.ClauseElement`'s bound engine, if
- any.
-
- :param inline: Used for INSERT statements, for a dialect which does
- not support inline retrieval of newly generated primary key
- columns, will force the expression used to create the new primary
- key value to be rendered inline within the INSERT statement's
- VALUES clause. This typically refers to Sequence execution but may
- also refer to any server-side default generation function
- associated with a primary key `Column`.
-
- """
-
- if not dialect:
- if bind:
- dialect = bind.dialect
- elif self.bind:
- dialect = self.bind.dialect
- bind = self.bind
- else:
- dialect = default.DefaultDialect()
- return self._compiler(dialect, bind=bind, **kw)
-
- def _compiler(self, dialect, **kw):
- """Return a compiler appropriate for this ClauseElement, given a
- Dialect."""
-
- return dialect.statement_compiler(dialect, self, **kw)
-
- def __str__(self):
- if util.py3k:
- return str(self.compile())
- else:
- return unicode(self.compile()).encode('ascii', 'backslashreplace')
-
- def __and__(self, other):
- return and_(self, other)
-
- def __or__(self, other):
- return or_(self, other)
-
- def __invert__(self):
- return self._negate()
-
- def __bool__(self):
- raise TypeError("Boolean value of this clause is not defined")
-
- __nonzero__ = __bool__
-
- def _negate(self):
- if hasattr(self, 'negation_clause'):
- return self.negation_clause
- else:
- return UnaryExpression(
- self.self_group(against=operators.inv),
- operator=operators.inv,
- negate=None)
-
- def __repr__(self):
- friendly = getattr(self, 'description', None)
- if friendly is None:
- return object.__repr__(self)
- else:
- return '<%s.%s at 0x%x; %s>' % (
- self.__module__, self.__class__.__name__, id(self), friendly)
-
-inspection._self_inspects(ClauseElement)
-
-
-class Immutable(object):
- """mark a ClauseElement as 'immutable' when expressions are cloned."""
-
- def unique_params(self, *optionaldict, **kwargs):
- raise NotImplementedError("Immutable objects do not support copying")
-
- def params(self, *optionaldict, **kwargs):
- raise NotImplementedError("Immutable objects do not support copying")
-
- def _clone(self):
- return self
-
-
-class _DefaultColumnComparator(operators.ColumnOperators):
- """Defines comparison and math operations.
-
- See :class:`.ColumnOperators` and :class:`.Operators` for descriptions
- of all operations.
-
- """
-
- @util.memoized_property
- def type(self):
- return self.expr.type
-
- def operate(self, op, *other, **kwargs):
- o = self.operators[op.__name__]
- return o[0](self, self.expr, op, *(other + o[1:]), **kwargs)
-
- def reverse_operate(self, op, other, **kwargs):
- o = self.operators[op.__name__]
- return o[0](self, self.expr, op, other, reverse=True, *o[1:], **kwargs)
-
- def _adapt_expression(self, op, other_comparator):
- """evaluate the return type of <self> <op> <othertype>,
- and apply any adaptations to the given operator.
-
- This method determines the type of a resulting binary expression
- given two source types and an operator. For example, two
- :class:`.Column` objects, both of the type :class:`.Integer`, will
- produce a :class:`.BinaryExpression` that also has the type
- :class:`.Integer` when compared via the addition (``+``) operator.
- However, using the addition operator with an :class:`.Integer`
- and a :class:`.Date` object will produce a :class:`.Date`, assuming
- "days delta" behavior by the database (in reality, most databases
- other than Postgresql don't accept this particular operation).
-
- The method returns a tuple of the form <operator>, <type>.
- The resulting operator and type will be those applied to the
- resulting :class:`.BinaryExpression` as the final operator and the
- right-hand side of the expression.
-
- Note that only a subset of operators make usage of
- :meth:`._adapt_expression`,
- including math operators and user-defined operators, but not
- boolean comparison or special SQL keywords like MATCH or BETWEEN.
-
- """
- return op, other_comparator.type
-
- def _boolean_compare(self, expr, op, obj, negate=None, reverse=False,
- _python_is_types=(util.NoneType, bool),
- **kwargs):
-
- if isinstance(obj, _python_is_types + (Null, True_, False_)):
-
- # allow x ==/!= True/False to be treated as a literal.
- # this comes out to "== / != true/false" or "1/0" if those
- # constants aren't supported and works on all platforms
- if op in (operators.eq, operators.ne) and \
- isinstance(obj, (bool, True_, False_)):
- return BinaryExpression(expr,
- obj,
- op,
- type_=sqltypes.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
- else:
- # all other None/True/False uses IS, IS NOT
- if op in (operators.eq, operators.is_):
- return BinaryExpression(expr, _const_expr(obj),
- operators.is_,
- negate=operators.isnot)
- elif op in (operators.ne, operators.isnot):
- return BinaryExpression(expr, _const_expr(obj),
- operators.isnot,
- negate=operators.is_)
- else:
- raise exc.ArgumentError(
- "Only '=', '!=', 'is_()', 'isnot()' operators can "
- "be used with None/True/False")
- else:
- obj = self._check_literal(expr, op, obj)
-
- if reverse:
- return BinaryExpression(obj,
- expr,
- op,
- type_=sqltypes.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
- else:
- return BinaryExpression(expr,
- obj,
- op,
- type_=sqltypes.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
-
- def _binary_operate(self, expr, op, obj, reverse=False, result_type=None,
- **kw):
- obj = self._check_literal(expr, op, obj)
-
- if reverse:
- left, right = obj, expr
- else:
- left, right = expr, obj
-
- if result_type is None:
- op, result_type = left.comparator._adapt_expression(
- op, right.comparator)
-
- return BinaryExpression(left, right, op, type_=result_type)
-
- def _scalar(self, expr, op, fn, **kw):
- return fn(expr)
-
- def _in_impl(self, expr, op, seq_or_selectable, negate_op, **kw):
- seq_or_selectable = _clause_element_as_expr(seq_or_selectable)
-
- if isinstance(seq_or_selectable, ScalarSelect):
- return self._boolean_compare(expr, op, seq_or_selectable,
- negate=negate_op)
- elif isinstance(seq_or_selectable, SelectBase):
-
- # TODO: if we ever want to support (x, y, z) IN (select x,
- # y, z from table), we would need a multi-column version of
- # as_scalar() to produce a multi- column selectable that
- # does not export itself as a FROM clause
-
- return self._boolean_compare(
- expr, op, seq_or_selectable.as_scalar(),
- negate=negate_op, **kw)
- elif isinstance(seq_or_selectable, (Selectable, TextClause)):
- return self._boolean_compare(expr, op, seq_or_selectable,
- negate=negate_op, **kw)
-
- # Handle non selectable arguments as sequences
- args = []
- for o in seq_or_selectable:
- if not _is_literal(o):
- if not isinstance(o, ColumnOperators):
- raise exc.InvalidRequestError('in() function accept'
- 's either a list of non-selectable values, '
- 'or a selectable: %r' % o)
- elif o is None:
- o = null()
- else:
- o = expr._bind_param(op, o)
- args.append(o)
- if len(args) == 0:
-
- # Special case handling for empty IN's, behave like
- # comparison against zero row selectable. We use != to
- # build the contradiction as it handles NULL values
- # appropriately, i.e. "not (x IN ())" should not return NULL
- # values for x.
-
- util.warn('The IN-predicate on "%s" was invoked with an '
- 'empty sequence. This results in a '
- 'contradiction, which nonetheless can be '
- 'expensive to evaluate. Consider alternative '
- 'strategies for improved performance.' % expr)
- return expr != expr
-
- return self._boolean_compare(expr, op,
- ClauseList(*args).self_group(against=op),
- negate=negate_op)
-
- def _unsupported_impl(self, expr, op, *arg, **kw):
- raise NotImplementedError("Operator '%s' is not supported on "
- "this expression" % op.__name__)
-
- def _neg_impl(self, expr, op, **kw):
- """See :meth:`.ColumnOperators.__neg__`."""
- return UnaryExpression(expr, operator=operators.neg)
-
- def _match_impl(self, expr, op, other, **kw):
- """See :meth:`.ColumnOperators.match`."""
- return self._boolean_compare(expr, operators.match_op,
- self._check_literal(expr, operators.match_op,
- other))
-
- def _distinct_impl(self, expr, op, **kw):
- """See :meth:`.ColumnOperators.distinct`."""
- return UnaryExpression(expr, operator=operators.distinct_op,
- type_=expr.type)
-
- def _between_impl(self, expr, op, cleft, cright, **kw):
- """See :meth:`.ColumnOperators.between`."""
- return BinaryExpression(
- expr,
- ClauseList(
- self._check_literal(expr, operators.and_, cleft),
- self._check_literal(expr, operators.and_, cright),
- operator=operators.and_,
- group=False),
- operators.between_op)
-
- def _collate_impl(self, expr, op, other, **kw):
- return collate(expr, other)
-
- # a mapping of operators with the method they use, along with
- # their negated operator for comparison operators
- operators = {
- "add": (_binary_operate,),
- "mul": (_binary_operate,),
- "sub": (_binary_operate,),
- "div": (_binary_operate,),
- "mod": (_binary_operate,),
- "truediv": (_binary_operate,),
- "custom_op": (_binary_operate,),
- "concat_op": (_binary_operate,),
- "lt": (_boolean_compare, operators.ge),
- "le": (_boolean_compare, operators.gt),
- "ne": (_boolean_compare, operators.eq),
- "gt": (_boolean_compare, operators.le),
- "ge": (_boolean_compare, operators.lt),
- "eq": (_boolean_compare, operators.ne),
- "like_op": (_boolean_compare, operators.notlike_op),
- "ilike_op": (_boolean_compare, operators.notilike_op),
- "notlike_op": (_boolean_compare, operators.like_op),
- "notilike_op": (_boolean_compare, operators.ilike_op),
- "contains_op": (_boolean_compare, operators.notcontains_op),
- "startswith_op": (_boolean_compare, operators.notstartswith_op),
- "endswith_op": (_boolean_compare, operators.notendswith_op),
- "desc_op": (_scalar, desc),
- "asc_op": (_scalar, asc),
- "nullsfirst_op": (_scalar, nullsfirst),
- "nullslast_op": (_scalar, nullslast),
- "in_op": (_in_impl, operators.notin_op),
- "notin_op": (_in_impl, operators.in_op),
- "is_": (_boolean_compare, operators.is_),
- "isnot": (_boolean_compare, operators.isnot),
- "collate": (_collate_impl,),
- "match_op": (_match_impl,),
- "distinct_op": (_distinct_impl,),
- "between_op": (_between_impl, ),
- "neg": (_neg_impl,),
- "getitem": (_unsupported_impl,),
- "lshift": (_unsupported_impl,),
- "rshift": (_unsupported_impl,),
- }
-
- def _check_literal(self, expr, operator, other):
- if isinstance(other, (ColumnElement, TextClause)):
- if isinstance(other, BindParameter) and \
- isinstance(other.type, sqltypes.NullType):
- # TODO: perhaps we should not mutate the incoming
- # bindparam() here and instead make a copy of it.
- # this might be the only place that we're mutating
- # an incoming construct.
- other.type = expr.type
- return other
- elif hasattr(other, '__clause_element__'):
- other = other.__clause_element__()
- elif isinstance(other, sqltypes.TypeEngine.Comparator):
- other = other.expr
-
- if isinstance(other, (SelectBase, Alias)):
- return other.as_scalar()
- elif not isinstance(other, (ColumnElement, TextClause)):
- return expr._bind_param(operator, other)
- else:
- return other
-
-
-class ColumnElement(ClauseElement, ColumnOperators):
- """Represent a column-oriented SQL expression suitable for usage in the
- "columns" clause, WHERE clause etc. of a statement.
-
- While the most familiar kind of :class:`.ColumnElement` is the
- :class:`.Column` object, :class:`.ColumnElement` serves as the basis
- for any unit that may be present in a SQL expression, including
- the expressions themselves, SQL functions, bound parameters,
- literal expressions, keywords such as ``NULL``, etc.
- :class:`.ColumnElement` is the ultimate base class for all such elements.
-
- A :class:`.ColumnElement` provides the ability to generate new
- :class:`.ColumnElement`
- objects using Python expressions. This means that Python operators
- such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations,
- and allow the instantiation of further :class:`.ColumnElement` instances
- which are composed from other, more fundamental :class:`.ColumnElement`
- objects. For example, two :class:`.ColumnClause` objects can be added
- together with the addition operator ``+`` to produce
- a :class:`.BinaryExpression`.
- Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses
- of :class:`.ColumnElement`::
-
- >>> from sqlalchemy.sql import column
- >>> column('a') + column('b')
- <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
- >>> print column('a') + column('b')
- a + b
-
- :class:`.ColumnElement` supports the ability to be a *proxy* element,
- which indicates that the :class:`.ColumnElement` may be associated with
- a :class:`.Selectable` which was derived from another :class:`.Selectable`.
- An example of a "derived" :class:`.Selectable` is an :class:`.Alias` of a
- :class:`~sqlalchemy.schema.Table`. For the ambitious, an in-depth
- discussion of this concept can be found at
- `Expression Transformations <http://techspot.zzzeek.org/2008/01/23/expression-transformations/>`_.
-
- """
-
- __visit_name__ = 'column'
- primary_key = False
- foreign_keys = []
- quote = None
- _label = None
- _key_label = None
- _alt_names = ()
-
- @util.memoized_property
- def type(self):
- return sqltypes.NULLTYPE
-
- @util.memoized_property
- def comparator(self):
- return self.type.comparator_factory(self)
-
- def __getattr__(self, key):
- try:
- return getattr(self.comparator, key)
- except AttributeError:
- raise AttributeError(
- 'Neither %r object nor %r object has an attribute %r' % (
- type(self).__name__,
- type(self.comparator).__name__,
- key)
- )
-
- def operate(self, op, *other, **kwargs):
- return op(self.comparator, *other, **kwargs)
-
- def reverse_operate(self, op, other, **kwargs):
- return op(other, self.comparator, **kwargs)
-
- def _bind_param(self, operator, obj):
- return BindParameter(None, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
-
- @property
- def expression(self):
- """Return a column expression.
-
- Part of the inspection interface; returns self.
-
- """
- return self
-
- @property
- def _select_iterable(self):
- return (self, )
-
- @util.memoized_property
- def base_columns(self):
- return util.column_set(c for c in self.proxy_set
- if not hasattr(c, '_proxies'))
-
- @util.memoized_property
- def proxy_set(self):
- s = util.column_set([self])
- if hasattr(self, '_proxies'):
- for c in self._proxies:
- s.update(c.proxy_set)
- return s
-
- def shares_lineage(self, othercolumn):
- """Return True if the given :class:`.ColumnElement`
- has a common ancestor to this :class:`.ColumnElement`."""
-
- return bool(self.proxy_set.intersection(othercolumn.proxy_set))
-
- def _compare_name_for_result(self, other):
- """Return True if the given column element compares to this one
- when targeting within a result row."""
-
- return hasattr(other, 'name') and hasattr(self, 'name') and \
- other.name == self.name
-
- def _make_proxy(self, selectable, name=None, name_is_truncatable=False, **kw):
- """Create a new :class:`.ColumnElement` representing this
- :class:`.ColumnElement` as it appears in the select list of a
- descending selectable.
-
- """
- if name is None:
- name = self.anon_label
- try:
- key = str(self)
- except exc.UnsupportedCompilationError:
- key = self.anon_label
- else:
- key = name
- co = ColumnClause(_as_truncated(name) if name_is_truncatable else name,
- selectable,
- type_=getattr(self,
- 'type', None))
- co._proxies = [self]
- if selectable._is_clone_of is not None:
- co._is_clone_of = \
- selectable._is_clone_of.columns.get(key)
- selectable._columns[key] = co
- return co
-
- def compare(self, other, use_proxies=False, equivalents=None, **kw):
- """Compare this ColumnElement to another.
-
- Special arguments understood:
-
- :param use_proxies: when True, consider two columns that
- share a common base column as equivalent (i.e. shares_lineage())
-
- :param equivalents: a dictionary of columns as keys mapped to sets
- of columns. If the given "other" column is present in this
- dictionary, if any of the columns in the corresponding set() pass the
- comparison test, the result is True. This is used to expand the
- comparison to other columns that may be known to be equivalent to
- this one via foreign key or other criterion.
-
- """
- to_compare = (other, )
- if equivalents and other in equivalents:
- to_compare = equivalents[other].union(to_compare)
-
- for oth in to_compare:
- if use_proxies and self.shares_lineage(oth):
- return True
- elif hash(oth) == hash(self):
- return True
- else:
- return False
-
- def label(self, name):
- """Produce a column label, i.e. ``<columnname> AS <name>``.
-
- This is a shortcut to the :func:`~.expression.label` function.
-
- if 'name' is None, an anonymous label name will be generated.
-
- """
- return Label(name, self, self.type)
-
- @util.memoized_property
- def anon_label(self):
- """provides a constant 'anonymous label' for this ColumnElement.
-
- This is a label() expression which will be named at compile time.
- The same label() is returned each time anon_label is called so
- that expressions can reference anon_label multiple times, producing
- the same label name at compile time.
-
- the compiler uses this function automatically at compile time
- for expressions that are known to be 'unnamed' like binary
- expressions and function calls.
-
- """
- return _anonymous_label('%%(%d %s)s' % (id(self), getattr(self,
- 'name', 'anon')))
-
-
-class ColumnCollection(util.OrderedProperties):
- """An ordered dictionary that stores a list of ColumnElement
- instances.
-
- Overrides the ``__eq__()`` method to produce SQL clauses between
- sets of correlated columns.
-
- """
-
- def __init__(self, *cols):
- super(ColumnCollection, self).__init__()
- self._data.update((c.key, c) for c in cols)
- self.__dict__['_all_cols'] = util.column_set(self)
-
- def __str__(self):
- return repr([str(c) for c in self])
-
- def replace(self, column):
- """add the given column to this collection, removing unaliased
- versions of this column as well as existing columns with the
- same key.
-
- e.g.::
-
- t = Table('sometable', metadata, Column('col1', Integer))
- t.columns.replace(Column('col1', Integer, key='columnone'))
-
- will remove the original 'col1' from the collection, and add
- the new column under the name 'columnname'.
-
- Used by schema.Column to override columns during table reflection.
-
- """
- if column.name in self and column.key != column.name:
- other = self[column.name]
- if other.name == other.key:
- del self._data[other.name]
- self._all_cols.remove(other)
- if column.key in self._data:
- self._all_cols.remove(self._data[column.key])
- self._all_cols.add(column)
- self._data[column.key] = column
-
- def add(self, column):
- """Add a column to this collection.
-
- The key attribute of the column will be used as the hash key
- for this dictionary.
-
- """
- self[column.key] = column
-
- def __delitem__(self, key):
- raise NotImplementedError()
-
- def __setattr__(self, key, object):
- raise NotImplementedError()
-
- def __setitem__(self, key, value):
- if key in self:
-
- # this warning is primarily to catch select() statements
- # which have conflicting column names in their exported
- # columns collection
-
- existing = self[key]
- if not existing.shares_lineage(value):
- util.warn('Column %r on table %r being replaced by '
- '%r, which has the same key. Consider '
- 'use_labels for select() statements.' % (key,
- getattr(existing, 'table', None), value))
- self._all_cols.remove(existing)
- # pop out memoized proxy_set as this
- # operation may very well be occurring
- # in a _make_proxy operation
- ColumnElement.proxy_set._reset(value)
- self._all_cols.add(value)
- self._data[key] = value
-
- def clear(self):
- self._data.clear()
- self._all_cols.clear()
-
- def remove(self, column):
- del self._data[column.key]
- self._all_cols.remove(column)
-
- def update(self, value):
- self._data.update(value)
- self._all_cols.clear()
- self._all_cols.update(self._data.values())
-
- def extend(self, iter):
- self.update((c.key, c) for c in iter)
-
- __hash__ = None
-
- def __eq__(self, other):
- l = []
- for c in other:
- for local in self:
- if c.shares_lineage(local):
- l.append(c == local)
- return and_(*l)
-
- def __contains__(self, other):
- if not isinstance(other, util.string_types):
- raise exc.ArgumentError("__contains__ requires a string argument")
- return util.OrderedProperties.__contains__(self, other)
-
- def __setstate__(self, state):
- self.__dict__['_data'] = state['_data']
- self.__dict__['_all_cols'] = util.column_set(self._data.values())
-
- def contains_column(self, col):
- # this has to be done via set() membership
- return col in self._all_cols
-
- def as_immutable(self):
- return ImmutableColumnCollection(self._data, self._all_cols)
-
-
-class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection):
- def __init__(self, data, colset):
- util.ImmutableProperties.__init__(self, data)
- self.__dict__['_all_cols'] = colset
-
- extend = remove = util.ImmutableProperties._immutable
-
-
-class ColumnSet(util.ordered_column_set):
- def contains_column(self, col):
- return col in self
-
- def extend(self, cols):
- for col in cols:
- self.add(col)
-
- def __add__(self, other):
- return list(self) + list(other)
-
- def __eq__(self, other):
- l = []
- for c in other:
- for local in self:
- if c.shares_lineage(local):
- l.append(c == local)
- return and_(*l)
-
- def __hash__(self):
- return hash(tuple(x for x in self))
-
-
-class Selectable(ClauseElement):
- """mark a class as being selectable"""
- __visit_name__ = 'selectable'
-
- is_selectable = True
-
- @property
- def selectable(self):
- return self
-
-
-class FromClause(Selectable):
- """Represent an element that can be used within the ``FROM``
- clause of a ``SELECT`` statement.
-
- The most common forms of :class:`.FromClause` are the
- :class:`.Table` and the :func:`.select` constructs. Key
- features common to all :class:`.FromClause` objects include:
-
- * a :attr:`.c` collection, which provides per-name access to a collection
- of :class:`.ColumnElement` objects.
- * a :attr:`.primary_key` attribute, which is a collection of all those
- :class:`.ColumnElement` objects that indicate the ``primary_key`` flag.
- * Methods to generate various derivations of a "from" clause, including
- :meth:`.FromClause.alias`, :meth:`.FromClause.join`,
- :meth:`.FromClause.select`.
-
-
- """
- __visit_name__ = 'fromclause'
- named_with_column = False
- _hide_froms = []
- quote = None
- schema = None
- _memoized_property = util.group_expirable_memoized_property(["_columns"])
-
- def count(self, whereclause=None, **params):
- """return a SELECT COUNT generated against this
- :class:`.FromClause`."""
-
- if self.primary_key:
- col = list(self.primary_key)[0]
- else:
- col = list(self.columns)[0]
- return select(
- [func.count(col).label('tbl_row_count')],
- whereclause,
- from_obj=[self],
- **params)
-
- def select(self, whereclause=None, **params):
- """return a SELECT of this :class:`.FromClause`.
-
- .. seealso::
-
- :func:`~.sql.expression.select` - general purpose
- method which allows for arbitrary column lists.
-
- """
-
- return select([self], whereclause, **params)
-
- def join(self, right, onclause=None, isouter=False):
- """return a join of this :class:`.FromClause` against another
- :class:`.FromClause`."""
-
- return Join(self, right, onclause, isouter)
-
- def outerjoin(self, right, onclause=None):
- """return an outer join of this :class:`.FromClause` against another
- :class:`.FromClause`."""
-
- return Join(self, right, onclause, True)
-
- def alias(self, name=None, flat=False):
- """return an alias of this :class:`.FromClause`.
-
- This is shorthand for calling::
-
- from sqlalchemy import alias
- a = alias(self, name=name)
-
- See :func:`~.expression.alias` for details.
-
- """
-
- return Alias(self, name)
-
- def is_derived_from(self, fromclause):
- """Return True if this FromClause is 'derived' from the given
- FromClause.
-
- An example would be an Alias of a Table is derived from that Table.
-
- """
- # this is essentially an "identity" check in the base class.
- # Other constructs override this to traverse through
- # contained elements.
- return fromclause in self._cloned_set
-
- def _is_lexical_equivalent(self, other):
- """Return True if this FromClause and the other represent
- the same lexical identity.
-
- This tests if either one is a copy of the other, or
- if they are the same via annotation identity.
-
- """
- return self._cloned_set.intersection(other._cloned_set)
-
- def replace_selectable(self, old, alias):
- """replace all occurrences of FromClause 'old' with the given Alias
- object, returning a copy of this :class:`.FromClause`.
-
- """
-
- return sqlutil.ClauseAdapter(alias).traverse(self)
-
- def correspond_on_equivalents(self, column, equivalents):
- """Return corresponding_column for the given column, or if None
- search for a match in the given dictionary.
-
- """
- col = self.corresponding_column(column, require_embedded=True)
- if col is None and col in equivalents:
- for equiv in equivalents[col]:
- nc = self.corresponding_column(equiv, require_embedded=True)
- if nc:
- return nc
- return col
-
- def corresponding_column(self, column, require_embedded=False):
- """Given a :class:`.ColumnElement`, return the exported
- :class:`.ColumnElement` object from this :class:`.Selectable`
- which corresponds to that original
- :class:`~sqlalchemy.schema.Column` via a common ancestor
- column.
-
- :param column: the target :class:`.ColumnElement` to be matched
-
- :param require_embedded: only return corresponding columns for
- the given :class:`.ColumnElement`, if the given
- :class:`.ColumnElement` is actually present within a sub-element
- of this :class:`.FromClause`. Normally the column will match if
- it merely shares a common ancestor with one of the exported
- columns of this :class:`.FromClause`.
-
- """
-
- def embedded(expanded_proxy_set, target_set):
- for t in target_set.difference(expanded_proxy_set):
- if not set(_expand_cloned([t])
- ).intersection(expanded_proxy_set):
- return False
- return True
-
- # don't dig around if the column is locally present
- if self.c.contains_column(column):
- return column
- col, intersect = None, None
- target_set = column.proxy_set
- cols = self.c
- for c in cols:
- expanded_proxy_set = set(_expand_cloned(c.proxy_set))
- i = target_set.intersection(expanded_proxy_set)
- if i and (not require_embedded
- or embedded(expanded_proxy_set, target_set)):
- if col is None:
-
- # no corresponding column yet, pick this one.
-
- col, intersect = c, i
- elif len(i) > len(intersect):
-
- # 'c' has a larger field of correspondence than
- # 'col'. i.e. selectable.c.a1_x->a1.c.x->table.c.x
- # matches a1.c.x->table.c.x better than
- # selectable.c.x->table.c.x does.
-
- col, intersect = c, i
- elif i == intersect:
-
- # they have the same field of correspondence. see
- # which proxy_set has fewer columns in it, which
- # indicates a closer relationship with the root
- # column. Also take into account the "weight"
- # attribute which CompoundSelect() uses to give
- # higher precedence to columns based on vertical
- # position in the compound statement, and discard
- # columns that have no reference to the target
- # column (also occurs with CompoundSelect)
-
- col_distance = util.reduce(operator.add,
- [sc._annotations.get('weight', 1) for sc in
- col.proxy_set if sc.shares_lineage(column)])
- c_distance = util.reduce(operator.add,
- [sc._annotations.get('weight', 1) for sc in
- c.proxy_set if sc.shares_lineage(column)])
- if c_distance < col_distance:
- col, intersect = c, i
- return col
-
- @property
- def description(self):
- """a brief description of this FromClause.
-
- Used primarily for error message formatting.
-
- """
- return getattr(self, 'name', self.__class__.__name__ + " object")
-
- def _reset_exported(self):
- """delete memoized collections when a FromClause is cloned."""
-
- self._memoized_property.expire_instance(self)
-
- @_memoized_property
- def columns(self):
- """A named-based collection of :class:`.ColumnElement` objects
- maintained by this :class:`.FromClause`.
-
- The :attr:`.columns`, or :attr:`.c` collection, is the gateway
- to the construction of SQL expressions using table-bound or
- other selectable-bound columns::
-
- select([mytable]).where(mytable.c.somecolumn == 5)
-
- """
-
- if '_columns' not in self.__dict__:
- self._init_collections()
- self._populate_column_collection()
- return self._columns.as_immutable()
-
- @_memoized_property
- def primary_key(self):
- """Return the collection of Column objects which comprise the
- primary key of this FromClause."""
-
- self._init_collections()
- self._populate_column_collection()
- return self.primary_key
-
- @_memoized_property
- def foreign_keys(self):
- """Return the collection of ForeignKey objects which this
- FromClause references."""
-
- self._init_collections()
- self._populate_column_collection()
- return self.foreign_keys
-
- c = property(attrgetter('columns'),
- doc="An alias for the :attr:`.columns` attribute.")
- _select_iterable = property(attrgetter('columns'))
-
- def _init_collections(self):
- assert '_columns' not in self.__dict__
- assert 'primary_key' not in self.__dict__
- assert 'foreign_keys' not in self.__dict__
-
- self._columns = ColumnCollection()
- self.primary_key = ColumnSet()
- self.foreign_keys = set()
-
- @property
- def _cols_populated(self):
- return '_columns' in self.__dict__
-
- def _populate_column_collection(self):
- """Called on subclasses to establish the .c collection.
-
- Each implementation has a different way of establishing
- this collection.
-
- """
-
- def _refresh_for_new_column(self, column):
- """Given a column added to the .c collection of an underlying
- selectable, produce the local version of that column, assuming this
- selectable ultimately should proxy this column.
-
- this is used to "ping" a derived selectable to add a new column
- to its .c. collection when a Column has been added to one of the
- Table objects it ultimtely derives from.
-
- If the given selectable hasn't populated it's .c. collection yet,
- it should at least pass on the message to the contained selectables,
- but it will return None.
-
- This method is currently used by Declarative to allow Table
- columns to be added to a partially constructed inheritance
- mapping that may have already produced joins. The method
- isn't public right now, as the full span of implications
- and/or caveats aren't yet clear.
-
- It's also possible that this functionality could be invoked by
- default via an event, which would require that
- selectables maintain a weak referencing collection of all
- derivations.
-
- """
- if not self._cols_populated:
- return None
- elif column.key in self.columns and self.columns[column.key] is column:
- return column
- else:
- return None
-
-
-class BindParameter(ColumnElement):
- """Represent a bind parameter.
-
- Public constructor is the :func:`bindparam()` function.
-
- """
-
- __visit_name__ = 'bindparam'
- quote = None
-
- _is_crud = False
-
- def __init__(self, key, value, type_=None, unique=False,
- callable_=None,
- isoutparam=False, required=False,
- quote=None,
- _compared_to_operator=None,
- _compared_to_type=None):
- """Construct a BindParameter.
-
- :param key:
- the key for this bind param. Will be used in the generated
- SQL statement for dialects that use named parameters. This
- value may be modified when part of a compilation operation,
- if other :class:`BindParameter` objects exist with the same
- key, or if its length is too long and truncation is
- required.
-
- :param value:
- Initial value for this bind param. This value may be
- overridden by the dictionary of parameters sent to statement
- compilation/execution.
-
- :param callable\_:
- A callable function that takes the place of "value". The function
- will be called at statement execution time to determine the
- ultimate value. Used for scenarios where the actual bind
- value cannot be determined at the point at which the clause
- construct is created, but embedded bind values are still desirable.
-
- :param type\_:
- A ``TypeEngine`` object that will be used to pre-process the
- value corresponding to this :class:`BindParameter` at
- execution time.
-
- :param unique:
- if True, the key name of this BindParamClause will be
- modified if another :class:`BindParameter` of the same name
- already has been located within the containing
- :class:`.ClauseElement`.
-
- :param quote:
- True if this parameter name requires quoting and is not
- currently known as a SQLAlchemy reserved word; this currently
- only applies to the Oracle backend.
-
- :param required:
- a value is required at execution time.
-
- :param isoutparam:
- if True, the parameter should be treated like a stored procedure
- "OUT" parameter.
-
- """
- if unique:
- self.key = _anonymous_label('%%(%d %s)s' % (id(self), key
- or 'param'))
- else:
- self.key = key or _anonymous_label('%%(%d param)s'
- % id(self))
-
- # identifying key that won't change across
- # clones, used to identify the bind's logical
- # identity
- self._identifying_key = self.key
-
- # key that was passed in the first place, used to
- # generate new keys
- self._orig_key = key or 'param'
-
- self.unique = unique
- self.value = value
- self.callable = callable_
- self.isoutparam = isoutparam
- self.required = required
- self.quote = quote
- if type_ is None:
- if _compared_to_type is not None:
- self.type = \
- _compared_to_type.coerce_compared_value(
- _compared_to_operator, value)
- else:
- self.type = sqltypes._type_map.get(type(value),
- sqltypes.NULLTYPE)
- elif isinstance(type_, type):
- self.type = type_()
- else:
- self.type = type_
-
- @property
- def effective_value(self):
- """Return the value of this bound parameter,
- taking into account if the ``callable`` parameter
- was set.
-
- The ``callable`` value will be evaluated
- and returned if present, else ``value``.
-
- """
- if self.callable:
- return self.callable()
- else:
- return self.value
-
- def _clone(self):
- c = ClauseElement._clone(self)
- if self.unique:
- c.key = _anonymous_label('%%(%d %s)s' % (id(c), c._orig_key
- or 'param'))
- return c
-
- def _convert_to_unique(self):
- if not self.unique:
- self.unique = True
- self.key = _anonymous_label('%%(%d %s)s' % (id(self),
- self._orig_key or 'param'))
-
- def compare(self, other, **kw):
- """Compare this :class:`BindParameter` to the given
- clause."""
-
- return isinstance(other, BindParameter) \
- and self.type._compare_type_affinity(other.type) \
- and self.value == other.value
-
- def __getstate__(self):
- """execute a deferred value for serialization purposes."""
-
- d = self.__dict__.copy()
- v = self.value
- if self.callable:
- v = self.callable()
- d['callable'] = None
- d['value'] = v
- return d
-
- def __repr__(self):
- return 'BindParameter(%r, %r, type_=%r)' % (self.key,
- self.value, self.type)
-
-
-class TypeClause(ClauseElement):
- """Handle a type keyword in a SQL statement.
-
- Used by the ``Case`` statement.
-
- """
-
- __visit_name__ = 'typeclause'
-
- def __init__(self, type):
- self.type = type
-
-
-class Generative(object):
- """Allow a ClauseElement to generate itself via the
- @_generative decorator.
-
- """
-
- def _generate(self):
- s = self.__class__.__new__(self.__class__)
- s.__dict__ = self.__dict__.copy()
- return s
-
-
-class Executable(Generative):
- """Mark a ClauseElement as supporting execution.
-
- :class:`.Executable` is a superclass for all "statement" types
- of objects, including :func:`select`, :func:`delete`, :func:`update`,
- :func:`insert`, :func:`text`.
-
- """
-
- supports_execution = True
- _execution_options = util.immutabledict()
- _bind = None
-
- @_generative
- def execution_options(self, **kw):
- """ Set non-SQL options for the statement which take effect during
- execution.
-
- Execution options can be set on a per-statement or
- per :class:`.Connection` basis. Additionally, the
- :class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide
- access to execution options which they in turn configure upon
- connections.
-
- The :meth:`execution_options` method is generative. A new
- instance of this statement is returned that contains the options::
-
- statement = select([table.c.x, table.c.y])
- statement = statement.execution_options(autocommit=True)
-
- Note that only a subset of possible execution options can be applied
- to a statement - these include "autocommit" and "stream_results",
- but not "isolation_level" or "compiled_cache".
- See :meth:`.Connection.execution_options` for a full list of
- possible options.
-
- .. seealso::
-
- :meth:`.Connection.execution_options()`
-
- :meth:`.Query.execution_options()`
-
- """
- if 'isolation_level' in kw:
- raise exc.ArgumentError(
- "'isolation_level' execution option may only be specified "
- "on Connection.execution_options(), or "
- "per-engine using the isolation_level "
- "argument to create_engine()."
- )
- if 'compiled_cache' in kw:
- raise exc.ArgumentError(
- "'compiled_cache' execution option may only be specified "
- "on Connection.execution_options(), not per statement."
- )
- self._execution_options = self._execution_options.union(kw)
-
- def execute(self, *multiparams, **params):
- """Compile and execute this :class:`.Executable`."""
- e = self.bind
- if e is None:
- label = getattr(self, 'description', self.__class__.__name__)
- msg = ('This %s is not directly bound to a Connection or Engine.'
- 'Use the .execute() method of a Connection or Engine '
- 'to execute this construct.' % label)
- raise exc.UnboundExecutionError(msg)
- return e._execute_clauseelement(self, multiparams, params)
-
- def scalar(self, *multiparams, **params):
- """Compile and execute this :class:`.Executable`, returning the
- result's scalar representation.
-
- """
- return self.execute(*multiparams, **params).scalar()
-
- @property
- def bind(self):
- """Returns the :class:`.Engine` or :class:`.Connection` to
- which this :class:`.Executable` is bound, or None if none found.
-
- This is a traversal which checks locally, then
- checks among the "from" clauses of associated objects
- until a bound engine or connection is found.
-
- """
- if self._bind is not None:
- return self._bind
-
- for f in _from_objects(self):
- if f is self:
- continue
- engine = f.bind
- if engine is not None:
- return engine
- else:
- return None
-
-
-# legacy, some outside users may be calling this
-_Executable = Executable
-
-
-class TextClause(Executable, ClauseElement):
- """Represent a literal SQL text fragment.
-
- Public constructor is the :func:`text()` function.
-
- """
+ 'tuple_', 'type_coerce', 'union', 'union_all', 'update']
+
+
+from .visitors import Visitable
+from .functions import func, modifier, FunctionElement
+from ..util.langhelpers import public_factory
+from .elements import ClauseElement, ColumnElement,\
+ BindParameter, UnaryExpression, BooleanClauseList, \
+ Label, Cast, Case, ColumnClause, TextClause, Over, Null, \
+ True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \
+ Grouping, not_, \
+ collate, literal_column, between,\
+ literal, outparam, type_coerce, ClauseList
+
+from .elements import SavepointClause, RollbackToSavepointClause, \
+ ReleaseSavepointClause
+
+from .base import ColumnCollection, Generative, Executable, \
+ PARSE_AUTOCOMMIT
+
+from .selectable import Alias, Join, Select, Selectable, TableClause, \
+ CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \
+ alias, GenerativeSelect, \
+ subquery, HasPrefixes, Exists, ScalarSelect, TextAsFrom
+
+
+from .dml import Insert, Update, Delete, UpdateBase, ValuesBase
+
+# factory functions - these pull class-bound constructors and classmethods
+# from SQL elements and selectables into public functions. This allows
+# the functions to be available in the sqlalchemy.sql.* namespace and
+# to be auto-cross-documenting from the function to the class itself.
+
+and_ = public_factory(BooleanClauseList.and_, ".expression.and_")
+or_ = public_factory(BooleanClauseList.or_, ".expression.or_")
+bindparam = public_factory(BindParameter, ".expression.bindparam")
+select = public_factory(Select, ".expression.select")
+text = public_factory(TextClause._create_text, ".expression.text")
+table = public_factory(TableClause, ".expression.table")
+column = public_factory(ColumnClause, ".expression.column")
+over = public_factory(Over, ".expression.over")
+label = public_factory(Label, ".expression.label")
+case = public_factory(Case, ".expression.case")
+cast = public_factory(Cast, ".expression.cast")
+extract = public_factory(Extract, ".expression.extract")
+tuple_ = public_factory(Tuple, ".expression.tuple_")
+except_ = public_factory(CompoundSelect._create_except, ".expression.except_")
+except_all = public_factory(CompoundSelect._create_except_all, ".expression.except_all")
+intersect = public_factory(CompoundSelect._create_intersect, ".expression.intersect")
+intersect_all = public_factory(CompoundSelect._create_intersect_all, ".expression.intersect_all")
+union = public_factory(CompoundSelect._create_union, ".expression.union")
+union_all = public_factory(CompoundSelect._create_union_all, ".expression.union_all")
+exists = public_factory(Exists, ".expression.exists")
+nullsfirst = public_factory(UnaryExpression._create_nullsfirst, ".expression.nullsfirst")
+nullslast = public_factory(UnaryExpression._create_nullslast, ".expression.nullslast")
+asc = public_factory(UnaryExpression._create_asc, ".expression.asc")
+desc = public_factory(UnaryExpression._create_desc, ".expression.desc")
+distinct = public_factory(UnaryExpression._create_distinct, ".expression.distinct")
+true = public_factory(True_._singleton, ".expression.true")
+false = public_factory(False_._singleton, ".expression.false")
+null = public_factory(Null._singleton, ".expression.null")
+join = public_factory(Join._create_join, ".expression.join")
+outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin")
+insert = public_factory(Insert, ".expression.insert")
+update = public_factory(Update, ".expression.update")
+delete = public_factory(Delete, ".expression.delete")
+
+
+# internal functions still being called from tests and the ORM,
+# these might be better off in some other namespace
+from .base import _from_objects
+from .elements import _literal_as_text, _clause_element_as_expr,\
+ _is_column, _labeled, _only_column_elements, _string_or_unprintable, \
+ _truncated_label, _clone, _cloned_difference, _cloned_intersection,\
+ _column_as_key, _literal_as_binds, _select_iterables, \
+ _corresponding_column_or_error
+from .selectable import _interpret_as_from
- __visit_name__ = 'textclause'
- _bind_params_regex = re.compile(r'(?<![:\w\x5c]):(\w+)(?!:)', re.UNICODE)
- _execution_options = \
- Executable._execution_options.union(
- {'autocommit': PARSE_AUTOCOMMIT})
-
- @property
- def _select_iterable(self):
- return (self,)
-
- @property
- def selectable(self):
- return self
-
- _hide_froms = []
-
- def __init__(
- self,
- text='',
- bind=None,
- bindparams=None,
- typemap=None,
- autocommit=None):
-
- self._bind = bind
- self.bindparams = {}
- self.typemap = typemap
- if autocommit is not None:
- util.warn_deprecated('autocommit on text() is deprecated. '
- 'Use .execution_options(autocommit=Tru'
- 'e)')
- self._execution_options = \
- self._execution_options.union(
- {'autocommit': autocommit})
- if typemap is not None:
- for key in typemap:
- typemap[key] = sqltypes.to_instance(typemap[key])
-
- def repl(m):
- self.bindparams[m.group(1)] = bindparam(m.group(1))
- return ':%s' % m.group(1)
-
- # scan the string and search for bind parameter names, add them
- # to the list of bindparams
-
- self.text = self._bind_params_regex.sub(repl, text)
- if bindparams is not None:
- for b in bindparams:
- self.bindparams[b.key] = b
-
- @property
- def type(self):
- if self.typemap is not None and len(self.typemap) == 1:
- return list(self.typemap)[0]
- else:
- return sqltypes.NULLTYPE
-
- @property
- def comparator(self):
- return self.type.comparator_factory(self)
-
- def self_group(self, against=None):
- if against is operators.in_op:
- return Grouping(self)
- else:
- return self
-
- def _copy_internals(self, clone=_clone, **kw):
- self.bindparams = dict((b.key, clone(b, **kw))
- for b in self.bindparams.values())
-
- def get_children(self, **kwargs):
- return list(self.bindparams.values())
-
-
-class Null(ColumnElement):
- """Represent the NULL keyword in a SQL statement.
-
- Public constructor is the :func:`null()` function.
-
- """
-
- __visit_name__ = 'null'
-
- def __init__(self):
- self.type = sqltypes.NULLTYPE
-
- def compare(self, other):
- return isinstance(other, Null)
-
-
-class False_(ColumnElement):
- """Represent the ``false`` keyword in a SQL statement.
-
- Public constructor is the :func:`false()` function.
-
- """
-
- __visit_name__ = 'false'
-
- def __init__(self):
- self.type = sqltypes.BOOLEANTYPE
-
- def compare(self, other):
- return isinstance(other, False_)
-
-class True_(ColumnElement):
- """Represent the ``true`` keyword in a SQL statement.
-
- Public constructor is the :func:`true()` function.
-
- """
-
- __visit_name__ = 'true'
-
- def __init__(self):
- self.type = sqltypes.BOOLEANTYPE
-
- def compare(self, other):
- return isinstance(other, True_)
-
-
-class ClauseList(ClauseElement):
- """Describe a list of clauses, separated by an operator.
-
- By default, is comma-separated, such as a column listing.
-
- """
- __visit_name__ = 'clauselist'
-
- def __init__(self, *clauses, **kwargs):
- self.operator = kwargs.pop('operator', operators.comma_op)
- self.group = kwargs.pop('group', True)
- self.group_contents = kwargs.pop('group_contents', True)
- if self.group_contents:
- self.clauses = [
- _literal_as_text(clause).self_group(against=self.operator)
- for clause in clauses if clause is not None]
- else:
- self.clauses = [
- _literal_as_text(clause)
- for clause in clauses if clause is not None]
-
- def __iter__(self):
- return iter(self.clauses)
-
- def __len__(self):
- return len(self.clauses)
-
- @property
- def _select_iterable(self):
- return iter(self)
-
- def append(self, clause):
- # TODO: not sure if i like the 'group_contents' flag. need to
- # define the difference between a ClauseList of ClauseLists,
- # and a "flattened" ClauseList of ClauseLists. flatten()
- # method ?
- if self.group_contents:
- self.clauses.append(_literal_as_text(clause).\
- self_group(against=self.operator))
- else:
- self.clauses.append(_literal_as_text(clause))
-
- def _copy_internals(self, clone=_clone, **kw):
- self.clauses = [clone(clause, **kw) for clause in self.clauses]
-
- def get_children(self, **kwargs):
- return self.clauses
-
- @property
- def _from_objects(self):
- return list(itertools.chain(*[c._from_objects for c in self.clauses]))
-
- def self_group(self, against=None):
- if self.group and operators.is_precedent(self.operator, against):
- return Grouping(self)
- else:
- return self
-
- def compare(self, other, **kw):
- """Compare this :class:`.ClauseList` to the given :class:`.ClauseList`,
- including a comparison of all the clause items.
-
- """
- if not isinstance(other, ClauseList) and len(self.clauses) == 1:
- return self.clauses[0].compare(other, **kw)
- elif isinstance(other, ClauseList) and \
- len(self.clauses) == len(other.clauses):
- for i in range(0, len(self.clauses)):
- if not self.clauses[i].compare(other.clauses[i], **kw):
- return False
- else:
- return self.operator == other.operator
- else:
- return False
-
-
-class BooleanClauseList(ClauseList, ColumnElement):
- __visit_name__ = 'clauselist'
-
- def __init__(self, *clauses, **kwargs):
- super(BooleanClauseList, self).__init__(*clauses, **kwargs)
- self.type = sqltypes.to_instance(kwargs.get('type_',
- sqltypes.Boolean))
-
- @property
- def _select_iterable(self):
- return (self, )
-
- def self_group(self, against=None):
- if not self.clauses:
- return self
- else:
- return super(BooleanClauseList, self).self_group(against=against)
-
-
-class Tuple(ClauseList, ColumnElement):
-
- def __init__(self, *clauses, **kw):
- clauses = [_literal_as_binds(c) for c in clauses]
- self.type = kw.pop('type_', None)
- if self.type is None:
- self.type = _type_from_args(clauses)
- super(Tuple, self).__init__(*clauses, **kw)
-
- @property
- def _select_iterable(self):
- return (self, )
-
- def _bind_param(self, operator, obj):
- return Tuple(*[
- BindParameter(None, o, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
- for o in obj
- ]).self_group()
-
-
-class Case(ColumnElement):
- __visit_name__ = 'case'
-
- def __init__(self, whens, value=None, else_=None):
- try:
- whens = util.dictlike_iteritems(whens)
- except TypeError:
- pass
-
- if value is not None:
- whenlist = [
- (_literal_as_binds(c).self_group(),
- _literal_as_binds(r)) for (c, r) in whens
- ]
- else:
- whenlist = [
- (_no_literals(c).self_group(),
- _literal_as_binds(r)) for (c, r) in whens
- ]
-
- if whenlist:
- type_ = list(whenlist[-1])[-1].type
- else:
- type_ = None
-
- if value is None:
- self.value = None
- else:
- self.value = _literal_as_binds(value)
-
- self.type = type_
- self.whens = whenlist
- if else_ is not None:
- self.else_ = _literal_as_binds(else_)
- else:
- self.else_ = None
-
- def _copy_internals(self, clone=_clone, **kw):
- if self.value is not None:
- self.value = clone(self.value, **kw)
- self.whens = [(clone(x, **kw), clone(y, **kw))
- for x, y in self.whens]
- if self.else_ is not None:
- self.else_ = clone(self.else_, **kw)
-
- def get_children(self, **kwargs):
- if self.value is not None:
- yield self.value
- for x, y in self.whens:
- yield x
- yield y
- if self.else_ is not None:
- yield self.else_
-
- @property
- def _from_objects(self):
- return list(itertools.chain(*[x._from_objects for x in
- self.get_children()]))
-
-
-class FunctionElement(Executable, ColumnElement, FromClause):
- """Base for SQL function-oriented constructs.
-
- .. seealso::
-
- :class:`.Function` - named SQL function.
-
- :data:`.func` - namespace which produces registered or ad-hoc
- :class:`.Function` instances.
-
- :class:`.GenericFunction` - allows creation of registered function
- types.
-
- """
-
- packagenames = ()
-
- def __init__(self, *clauses, **kwargs):
- """Construct a :class:`.FunctionElement`.
- """
- args = [_literal_as_binds(c, self.name) for c in clauses]
- self.clause_expr = ClauseList(
- operator=operators.comma_op,
- group_contents=True, *args).\
- self_group()
-
- @property
- def columns(self):
- """Fulfill the 'columns' contract of :class:`.ColumnElement`.
-
- Returns a single-element list consisting of this object.
-
- """
- return [self]
-
- @util.memoized_property
- def clauses(self):
- """Return the underlying :class:`.ClauseList` which contains
- the arguments for this :class:`.FunctionElement`.
-
- """
- return self.clause_expr.element
-
- def over(self, partition_by=None, order_by=None):
- """Produce an OVER clause against this function.
-
- Used against aggregate or so-called "window" functions,
- for database backends that support window functions.
-
- The expression::
-
- func.row_number().over(order_by='x')
-
- is shorthand for::
-
- from sqlalchemy import over
- over(func.row_number(), order_by='x')
-
- See :func:`~.expression.over` for a full description.
-
- .. versionadded:: 0.7
-
- """
- return over(self, partition_by=partition_by, order_by=order_by)
-
- @property
- def _from_objects(self):
- return self.clauses._from_objects
-
- def get_children(self, **kwargs):
- return self.clause_expr,
-
- def _copy_internals(self, clone=_clone, **kw):
- self.clause_expr = clone(self.clause_expr, **kw)
- self._reset_exported()
- FunctionElement.clauses._reset(self)
-
- def select(self):
- """Produce a :func:`~.expression.select` construct
- against this :class:`.FunctionElement`.
-
- This is shorthand for::
-
- s = select([function_element])
-
- """
- s = select([self])
- if self._execution_options:
- s = s.execution_options(**self._execution_options)
- return s
-
- def scalar(self):
- """Execute this :class:`.FunctionElement` against an embedded
- 'bind' and return a scalar value.
-
- This first calls :meth:`~.FunctionElement.select` to
- produce a SELECT construct.
-
- Note that :class:`.FunctionElement` can be passed to
- the :meth:`.Connectable.scalar` method of :class:`.Connection`
- or :class:`.Engine`.
-
- """
- return self.select().execute().scalar()
-
- def execute(self):
- """Execute this :class:`.FunctionElement` against an embedded
- 'bind'.
-
- This first calls :meth:`~.FunctionElement.select` to
- produce a SELECT construct.
-
- Note that :class:`.FunctionElement` can be passed to
- the :meth:`.Connectable.execute` method of :class:`.Connection`
- or :class:`.Engine`.
-
- """
- return self.select().execute()
-
- def _bind_param(self, operator, obj):
- return BindParameter(None, obj, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
-
-
-class Function(FunctionElement):
- """Describe a named SQL function.
-
- See the superclass :class:`.FunctionElement` for a description
- of public methods.
-
- .. seealso::
-
- :data:`.func` - namespace which produces registered or ad-hoc
- :class:`.Function` instances.
-
- :class:`.GenericFunction` - allows creation of registered function
- types.
-
- """
-
- __visit_name__ = 'function'
-
- def __init__(self, name, *clauses, **kw):
- """Construct a :class:`.Function`.
-
- The :data:`.func` construct is normally used to construct
- new :class:`.Function` instances.
-
- """
- self.packagenames = kw.pop('packagenames', None) or []
- self.name = name
- self._bind = kw.get('bind', None)
- self.type = sqltypes.to_instance(kw.get('type_', None))
-
- FunctionElement.__init__(self, *clauses, **kw)
-
- def _bind_param(self, operator, obj):
- return BindParameter(self.name, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type,
- unique=True)
-
-
-class Cast(ColumnElement):
-
- __visit_name__ = 'cast'
-
- def __init__(self, clause, totype, **kwargs):
- self.type = sqltypes.to_instance(totype)
- self.clause = _literal_as_binds(clause, None)
- self.typeclause = TypeClause(self.type)
-
- def _copy_internals(self, clone=_clone, **kw):
- self.clause = clone(self.clause, **kw)
- self.typeclause = clone(self.typeclause, **kw)
-
- def get_children(self, **kwargs):
- return self.clause, self.typeclause
-
- @property
- def _from_objects(self):
- return self.clause._from_objects
-
-
-class Extract(ColumnElement):
-
- __visit_name__ = 'extract'
-
- def __init__(self, field, expr, **kwargs):
- self.type = sqltypes.Integer()
- self.field = field
- self.expr = _literal_as_binds(expr, None)
-
- def _copy_internals(self, clone=_clone, **kw):
- self.expr = clone(self.expr, **kw)
-
- def get_children(self, **kwargs):
- return self.expr,
-
- @property
- def _from_objects(self):
- return self.expr._from_objects
-
-
-class UnaryExpression(ColumnElement):
- """Define a 'unary' expression.
-
- A unary expression has a single column expression
- and an operator. The operator can be placed on the left
- (where it is called the 'operator') or right (where it is called the
- 'modifier') of the column expression.
-
- """
- __visit_name__ = 'unary'
-
- def __init__(self, element, operator=None, modifier=None,
- type_=None, negate=None):
- self.operator = operator
- self.modifier = modifier
-
- self.element = _literal_as_text(element).\
- self_group(against=self.operator or self.modifier)
- self.type = sqltypes.to_instance(type_)
- self.negate = negate
-
- @util.memoized_property
- def _order_by_label_element(self):
- if self.modifier in (operators.desc_op, operators.asc_op):
- return self.element._order_by_label_element
- else:
- return None
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def _copy_internals(self, clone=_clone, **kw):
- self.element = clone(self.element, **kw)
-
- def get_children(self, **kwargs):
- return self.element,
-
- def compare(self, other, **kw):
- """Compare this :class:`UnaryExpression` against the given
- :class:`.ClauseElement`."""
-
- return (
- isinstance(other, UnaryExpression) and
- self.operator == other.operator and
- self.modifier == other.modifier and
- self.element.compare(other.element, **kw)
- )
-
- def _negate(self):
- if self.negate is not None:
- return UnaryExpression(
- self.element,
- operator=self.negate,
- negate=self.operator,
- modifier=self.modifier,
- type_=self.type)
- else:
- return super(UnaryExpression, self)._negate()
-
- def self_group(self, against=None):
- if self.operator and operators.is_precedent(self.operator,
- against):
- return Grouping(self)
- else:
- return self
-
-
-class BinaryExpression(ColumnElement):
- """Represent an expression that is ``LEFT <operator> RIGHT``.
-
- A :class:`.BinaryExpression` is generated automatically
- whenever two column expressions are used in a Python binary expresion::
-
- >>> from sqlalchemy.sql import column
- >>> column('a') + column('b')
- <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
- >>> print column('a') + column('b')
- a + b
-
- """
-
- __visit_name__ = 'binary'
-
- def __init__(self, left, right, operator, type_=None,
- negate=None, modifiers=None):
- # allow compatibility with libraries that
- # refer to BinaryExpression directly and pass strings
- if isinstance(operator, util.string_types):
- operator = operators.custom_op(operator)
- self._orig = (left, right)
- self.left = _literal_as_text(left).self_group(against=operator)
- self.right = _literal_as_text(right).self_group(against=operator)
- self.operator = operator
- self.type = sqltypes.to_instance(type_)
- self.negate = negate
-
- if modifiers is None:
- self.modifiers = {}
- else:
- self.modifiers = modifiers
-
- def __bool__(self):
- if self.operator in (operator.eq, operator.ne):
- return self.operator(hash(self._orig[0]), hash(self._orig[1]))
- else:
- raise TypeError("Boolean value of this clause is not defined")
-
- __nonzero__ = __bool__
-
- @property
- def is_comparison(self):
- return operators.is_comparison(self.operator)
-
- @property
- def _from_objects(self):
- return self.left._from_objects + self.right._from_objects
-
- def _copy_internals(self, clone=_clone, **kw):
- self.left = clone(self.left, **kw)
- self.right = clone(self.right, **kw)
-
- def get_children(self, **kwargs):
- return self.left, self.right
-
- def compare(self, other, **kw):
- """Compare this :class:`BinaryExpression` against the
- given :class:`BinaryExpression`."""
-
- return (
- isinstance(other, BinaryExpression) and
- self.operator == other.operator and
- (
- self.left.compare(other.left, **kw) and
- self.right.compare(other.right, **kw) or
- (
- operators.is_commutative(self.operator) and
- self.left.compare(other.right, **kw) and
- self.right.compare(other.left, **kw)
- )
- )
- )
-
- def self_group(self, against=None):
- if operators.is_precedent(self.operator, against):
- return Grouping(self)
- else:
- return self
-
- def _negate(self):
- if self.negate is not None:
- return BinaryExpression(
- self.left,
- self.right,
- self.negate,
- negate=self.operator,
- type_=sqltypes.BOOLEANTYPE,
- modifiers=self.modifiers)
- else:
- return super(BinaryExpression, self)._negate()
-
-
-class Exists(UnaryExpression):
- __visit_name__ = UnaryExpression.__visit_name__
- _from_objects = []
-
- def __init__(self, *args, **kwargs):
- if args and isinstance(args[0], (SelectBase, ScalarSelect)):
- s = args[0]
- else:
- if not args:
- args = ([literal_column('*')],)
- s = select(*args, **kwargs).as_scalar().self_group()
-
- UnaryExpression.__init__(self, s, operator=operators.exists,
- type_=sqltypes.Boolean)
-
- def select(self, whereclause=None, **params):
- return select([self], whereclause, **params)
-
- def correlate(self, *fromclause):
- e = self._clone()
- e.element = self.element.correlate(*fromclause).self_group()
- return e
-
- def correlate_except(self, *fromclause):
- e = self._clone()
- e.element = self.element.correlate_except(*fromclause).self_group()
- return e
-
- def select_from(self, clause):
- """return a new :class:`.Exists` construct, applying the given
- expression to the :meth:`.Select.select_from` method of the select
- statement contained.
-
- """
- e = self._clone()
- e.element = self.element.select_from(clause).self_group()
- return e
-
- def where(self, clause):
- """return a new exists() construct with the given expression added to
- its WHERE clause, joined to the existing clause via AND, if any.
-
- """
- e = self._clone()
- e.element = self.element.where(clause).self_group()
- return e
-
-
-class Join(FromClause):
- """represent a ``JOIN`` construct between two :class:`.FromClause`
- elements.
-
- The public constructor function for :class:`.Join` is the module-level
- :func:`join()` function, as well as the :func:`join()` method available
- off all :class:`.FromClause` subclasses.
-
- """
- __visit_name__ = 'join'
-
- def __init__(self, left, right, onclause=None, isouter=False):
- """Construct a new :class:`.Join`.
-
- The usual entrypoint here is the :func:`~.expression.join`
- function or the :meth:`.FromClause.join` method of any
- :class:`.FromClause` object.
-
- """
- self.left = _interpret_as_from(left)
- self.right = _interpret_as_from(right).self_group()
-
- if onclause is None:
- self.onclause = self._match_primaries(self.left, self.right)
- else:
- self.onclause = onclause
-
- self.isouter = isouter
-
- @property
- def description(self):
- return "Join object on %s(%d) and %s(%d)" % (
- self.left.description,
- id(self.left),
- self.right.description,
- id(self.right))
-
- def is_derived_from(self, fromclause):
- return fromclause is self or \
- self.left.is_derived_from(fromclause) or \
- self.right.is_derived_from(fromclause)
-
- def self_group(self, against=None):
- return FromGrouping(self)
-
- def _populate_column_collection(self):
- columns = [c for c in self.left.columns] + \
- [c for c in self.right.columns]
-
- self.primary_key.extend(sqlutil.reduce_columns(
- (c for c in columns if c.primary_key), self.onclause))
- self._columns.update((col._label, col) for col in columns)
- self.foreign_keys.update(itertools.chain(
- *[col.foreign_keys for col in columns]))
-
- def _refresh_for_new_column(self, column):
- col = self.left._refresh_for_new_column(column)
- if col is None:
- col = self.right._refresh_for_new_column(column)
- if col is not None:
- if self._cols_populated:
- self._columns[col._label] = col
- self.foreign_keys.add(col)
- if col.primary_key:
- self.primary_key.add(col)
- return col
- return None
-
- def _copy_internals(self, clone=_clone, **kw):
- self._reset_exported()
- self.left = clone(self.left, **kw)
- self.right = clone(self.right, **kw)
- self.onclause = clone(self.onclause, **kw)
-
- def get_children(self, **kwargs):
- return self.left, self.right, self.onclause
-
- def _match_primaries(self, left, right):
- if isinstance(left, Join):
- left_right = left.right
- else:
- left_right = None
- return sqlutil.join_condition(left, right, a_subset=left_right)
-
- def select(self, whereclause=None, **kwargs):
- """Create a :class:`.Select` from this :class:`.Join`.
-
- The equivalent long-hand form, given a :class:`.Join` object
- ``j``, is::
-
- from sqlalchemy import select
- j = select([j.left, j.right], **kw).\\
- where(whereclause).\\
- select_from(j)
-
- :param whereclause: the WHERE criterion that will be sent to
- the :func:`select()` function
-
- :param \**kwargs: all other kwargs are sent to the
- underlying :func:`select()` function.
-
- """
- collist = [self.left, self.right]
-
- return select(collist, whereclause, from_obj=[self], **kwargs)
-
- @property
- def bind(self):
- return self.left.bind or self.right.bind
-
- def alias(self, name=None, flat=False):
- """return an alias of this :class:`.Join`.
-
- The default behavior here is to first produce a SELECT
- construct from this :class:`.Join`, then to produce a
- :class:`.Alias` from that. So given a join of the form::
-
- j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
-
- The JOIN by itself would look like::
-
- table_a JOIN table_b ON table_a.id = table_b.a_id
-
- Whereas the alias of the above, ``j.alias()``, would in a
- SELECT context look like::
-
- (SELECT table_a.id AS table_a_id, table_b.id AS table_b_id,
- table_b.a_id AS table_b_a_id
- FROM table_a
- JOIN table_b ON table_a.id = table_b.a_id) AS anon_1
-
- The equivalent long-hand form, given a :class:`.Join` object
- ``j``, is::
-
- from sqlalchemy import select, alias
- j = alias(
- select([j.left, j.right]).\\
- select_from(j).\\
- with_labels(True).\\
- correlate(False),
- name=name
- )
-
- The selectable produced by :meth:`.Join.alias` features the same
- columns as that of the two individual selectables presented under
- a single name - the individual columns are "auto-labeled", meaning
- the ``.c.`` collection of the resulting :class:`.Alias` represents
- the names of the individual columns using a ``<tablename>_<columname>``
- scheme::
-
- j.c.table_a_id
- j.c.table_b_a_id
-
- :meth:`.Join.alias` also features an alternate
- option for aliasing joins which produces no enclosing SELECT and
- does not normally apply labels to the column names. The
- ``flat=True`` option will call :meth:`.FromClause.alias`
- against the left and right sides individually.
- Using this option, no new ``SELECT`` is produced;
- we instead, from a construct as below::
-
- j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
- j = j.alias(flat=True)
-
- we get a result like this::
-
- table_a AS table_a_1 JOIN table_b AS table_b_1 ON
- table_a_1.id = table_b_1.a_id
-
- The ``flat=True`` argument is also propagated to the contained
- selectables, so that a composite join such as::
-
- j = table_a.join(
- table_b.join(table_c,
- table_b.c.id == table_c.c.b_id),
- table_b.c.a_id == table_a.c.id
- ).alias(flat=True)
-
- Will produce an expression like::
-
- table_a AS table_a_1 JOIN (
- table_b AS table_b_1 JOIN table_c AS table_c_1
- ON table_b_1.id = table_c_1.b_id
- ) ON table_a_1.id = table_b_1.a_id
-
- The standalone :func:`experssion.alias` function as well as the
- base :meth:`.FromClause.alias` method also support the ``flat=True``
- argument as a no-op, so that the argument can be passed to the
- ``alias()`` method of any selectable.
-
- .. versionadded:: 0.9.0 Added the ``flat=True`` option to create
- "aliases" of joins without enclosing inside of a SELECT
- subquery.
-
- :param name: name given to the alias.
-
- :param flat: if True, produce an alias of the left and right
- sides of this :class:`.Join` and return the join of those
- two selectables. This produces join expression that does not
- include an enclosing SELECT.
-
- .. versionadded:: 0.9.0
-
- .. seealso::
-
- :func:`~.expression.alias`
-
- """
- if flat:
- assert name is None, "Can't send name argument with flat"
- left_a, right_a = self.left.alias(flat=True), \
- self.right.alias(flat=True)
- adapter = sqlutil.ClauseAdapter(left_a).\
- chain(sqlutil.ClauseAdapter(right_a))
-
- return left_a.join(right_a,
- adapter.traverse(self.onclause), isouter=self.isouter)
- else:
- return self.select(use_labels=True, correlate=False).alias(name)
-
- @property
- def _hide_froms(self):
- return itertools.chain(*[_from_objects(x.left, x.right)
- for x in self._cloned_set])
-
- @property
- def _from_objects(self):
- return [self] + \
- self.onclause._from_objects + \
- self.left._from_objects + \
- self.right._from_objects
-
-
-class Alias(FromClause):
- """Represents an table or selectable alias (AS).
-
- Represents an alias, as typically applied to any table or
- sub-select within a SQL statement using the ``AS`` keyword (or
- without the keyword on certain databases such as Oracle).
-
- This object is constructed from the :func:`~.expression.alias` module level
- function as well as the :meth:`.FromClause.alias` method available on all
- :class:`.FromClause` subclasses.
-
- """
-
- __visit_name__ = 'alias'
- named_with_column = True
-
- def __init__(self, selectable, name=None):
- baseselectable = selectable
- while isinstance(baseselectable, Alias):
- baseselectable = baseselectable.element
- self.original = baseselectable
- self.supports_execution = baseselectable.supports_execution
- if self.supports_execution:
- self._execution_options = baseselectable._execution_options
- self.element = selectable
- if name is None:
- if self.original.named_with_column:
- name = getattr(self.original, 'name', None)
- name = _anonymous_label('%%(%d %s)s' % (id(self), name
- or 'anon'))
- self.name = name
-
- @property
- def description(self):
- if util.py3k:
- return self.name
- else:
- return self.name.encode('ascii', 'backslashreplace')
-
- def as_scalar(self):
- try:
- return self.element.as_scalar()
- except AttributeError:
- raise AttributeError("Element %s does not support "
- "'as_scalar()'" % self.element)
-
- def is_derived_from(self, fromclause):
- if fromclause in self._cloned_set:
- return True
- return self.element.is_derived_from(fromclause)
-
- def _populate_column_collection(self):
- for col in self.element.columns:
- col._make_proxy(self)
-
- def _refresh_for_new_column(self, column):
- col = self.element._refresh_for_new_column(column)
- if col is not None:
- if not self._cols_populated:
- return None
- else:
- return col._make_proxy(self)
- else:
- return None
-
- def _copy_internals(self, clone=_clone, **kw):
- # don't apply anything to an aliased Table
- # for now. May want to drive this from
- # the given **kw.
- if isinstance(self.element, TableClause):
- return
- self._reset_exported()
- self.element = clone(self.element, **kw)
- baseselectable = self.element
- while isinstance(baseselectable, Alias):
- baseselectable = baseselectable.element
- self.original = baseselectable
-
- def get_children(self, column_collections=True, **kw):
- if column_collections:
- for c in self.c:
- yield c
- yield self.element
-
- @property
- def _from_objects(self):
- return [self]
-
- @property
- def bind(self):
- return self.element.bind
-
-
-class CTE(Alias):
- """Represent a Common Table Expression.
-
- The :class:`.CTE` object is obtained using the
- :meth:`.SelectBase.cte` method from any selectable.
- See that method for complete examples.
-
- .. versionadded:: 0.7.6
-
- """
- __visit_name__ = 'cte'
-
- def __init__(self, selectable,
- name=None,
- recursive=False,
- cte_alias=False,
- _restates=frozenset()):
- self.recursive = recursive
- self.cte_alias = cte_alias
- self._restates = _restates
- super(CTE, self).__init__(selectable, name=name)
-
- def alias(self, name=None, flat=False):
- return CTE(
- self.original,
- name=name,
- recursive=self.recursive,
- cte_alias=self.name
- )
-
- def union(self, other):
- return CTE(
- self.original.union(other),
- name=self.name,
- recursive=self.recursive,
- _restates=self._restates.union([self])
- )
-
- def union_all(self, other):
- return CTE(
- self.original.union_all(other),
- name=self.name,
- recursive=self.recursive,
- _restates=self._restates.union([self])
- )
-
-
-class Grouping(ColumnElement):
- """Represent a grouping within a column expression"""
-
- __visit_name__ = 'grouping'
-
- def __init__(self, element):
- self.element = element
- self.type = getattr(element, 'type', sqltypes.NULLTYPE)
-
- @property
- def _label(self):
- return getattr(self.element, '_label', None) or self.anon_label
-
- def _copy_internals(self, clone=_clone, **kw):
- self.element = clone(self.element, **kw)
-
- def get_children(self, **kwargs):
- return self.element,
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def __getattr__(self, attr):
- return getattr(self.element, attr)
-
- def __getstate__(self):
- return {'element': self.element, 'type': self.type}
-
- def __setstate__(self, state):
- self.element = state['element']
- self.type = state['type']
-
- def compare(self, other, **kw):
- return isinstance(other, Grouping) and \
- self.element.compare(other.element)
-
-
-class FromGrouping(FromClause):
- """Represent a grouping of a FROM clause"""
- __visit_name__ = 'grouping'
-
- def __init__(self, element):
- self.element = element
-
- def _init_collections(self):
- pass
-
- @property
- def columns(self):
- return self.element.columns
-
- @property
- def primary_key(self):
- return self.element.primary_key
-
- @property
- def foreign_keys(self):
- return self.element.foreign_keys
-
- def is_derived_from(self, element):
- return self.element.is_derived_from(element)
-
- def alias(self, **kw):
- return FromGrouping(self.element.alias(**kw))
-
- @property
- def _hide_froms(self):
- return self.element._hide_froms
-
- def get_children(self, **kwargs):
- return self.element,
-
- def _copy_internals(self, clone=_clone, **kw):
- self.element = clone(self.element, **kw)
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def __getattr__(self, attr):
- return getattr(self.element, attr)
-
- def __getstate__(self):
- return {'element': self.element}
-
- def __setstate__(self, state):
- self.element = state['element']
-
-
-class Over(ColumnElement):
- """Represent an OVER clause.
-
- This is a special operator against a so-called
- "window" function, as well as any aggregate function,
- which produces results relative to the result set
- itself. It's supported only by certain database
- backends.
-
- """
- __visit_name__ = 'over'
-
- order_by = None
- partition_by = None
-
- def __init__(self, func, partition_by=None, order_by=None):
- self.func = func
- if order_by is not None:
- self.order_by = ClauseList(*util.to_list(order_by))
- if partition_by is not None:
- self.partition_by = ClauseList(*util.to_list(partition_by))
-
- @util.memoized_property
- def type(self):
- return self.func.type
-
- def get_children(self, **kwargs):
- return [c for c in
- (self.func, self.partition_by, self.order_by)
- if c is not None]
-
- def _copy_internals(self, clone=_clone, **kw):
- self.func = clone(self.func, **kw)
- if self.partition_by is not None:
- self.partition_by = clone(self.partition_by, **kw)
- if self.order_by is not None:
- self.order_by = clone(self.order_by, **kw)
-
- @property
- def _from_objects(self):
- return list(itertools.chain(
- *[c._from_objects for c in
- (self.func, self.partition_by, self.order_by)
- if c is not None]
- ))
-
-
-class Label(ColumnElement):
- """Represents a column label (AS).
-
- Represent a label, as typically applied to any column-level
- element using the ``AS`` sql keyword.
-
- This object is constructed from the :func:`label()` module level
- function as well as the :func:`label()` method available on all
- :class:`.ColumnElement` subclasses.
-
- """
-
- __visit_name__ = 'label'
-
- def __init__(self, name, element, type_=None):
- while isinstance(element, Label):
- element = element.element
- if name:
- self.name = name
- else:
- self.name = _anonymous_label('%%(%d %s)s' % (id(self),
- getattr(element, 'name', 'anon')))
- self.key = self._label = self._key_label = self.name
- self._element = element
- self._type = type_
- self.quote = element.quote
- self._proxies = [element]
-
- @util.memoized_property
- def _order_by_label_element(self):
- return self
-
- @util.memoized_property
- def type(self):
- return sqltypes.to_instance(
- self._type or getattr(self._element, 'type', None)
- )
-
- @util.memoized_property
- def element(self):
- return self._element.self_group(against=operators.as_)
-
- def self_group(self, against=None):
- sub_element = self._element.self_group(against=against)
- if sub_element is not self._element:
- return Label(self.name,
- sub_element,
- type_=self._type)
- else:
- return self
-
- @property
- def primary_key(self):
- return self.element.primary_key
-
- @property
- def foreign_keys(self):
- return self.element.foreign_keys
-
- def get_children(self, **kwargs):
- return self.element,
-
- def _copy_internals(self, clone=_clone, **kw):
- self.element = clone(self.element, **kw)
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def _make_proxy(self, selectable, name=None, **kw):
- e = self.element._make_proxy(selectable,
- name=name if name else self.name)
- e._proxies.append(self)
- if self._type is not None:
- e.type = self._type
- return e
-
-
-class ColumnClause(Immutable, ColumnElement):
- """Represents a generic column expression from any textual string.
-
- This includes columns associated with tables, aliases and select
- statements, but also any arbitrary text. May or may not be bound
- to an underlying :class:`.Selectable`.
-
- :class:`.ColumnClause` is constructed by itself typically via
- the :func:`~.expression.column` function. It may be placed directly
- into constructs such as :func:`.select` constructs::
-
- from sqlalchemy.sql import column, select
-
- c1, c2 = column("c1"), column("c2")
- s = select([c1, c2]).where(c1==5)
-
- There is also a variant on :func:`~.expression.column` known
- as :func:`~.expression.literal_column` - the difference is that
- in the latter case, the string value is assumed to be an exact
- expression, rather than a column name, so that no quoting rules
- or similar are applied::
-
- from sqlalchemy.sql import literal_column, select
-
- s = select([literal_column("5 + 7")])
-
- :class:`.ColumnClause` can also be used in a table-like
- fashion by combining the :func:`~.expression.column` function
- with the :func:`~.expression.table` function, to produce
- a "lightweight" form of table metadata::
-
- from sqlalchemy.sql import table, column
-
- user = table("user",
- column("id"),
- column("name"),
- column("description"),
- )
-
- The above construct can be created in an ad-hoc fashion and is
- not associated with any :class:`.schema.MetaData`, unlike it's
- more full fledged :class:`.schema.Table` counterpart.
-
- :param text: the text of the element.
-
- :param selectable: parent selectable.
-
- :param type: :class:`.types.TypeEngine` object which can associate
- this :class:`.ColumnClause` with a type.
-
- :param is_literal: if True, the :class:`.ColumnClause` is assumed to
- be an exact expression that will be delivered to the output with no
- quoting rules applied regardless of case sensitive settings. the
- :func:`literal_column()` function is usually used to create such a
- :class:`.ColumnClause`.
-
-
- """
- __visit_name__ = 'column'
-
- onupdate = default = server_default = server_onupdate = None
-
- _memoized_property = util.group_expirable_memoized_property()
-
- def __init__(self, text, selectable=None, type_=None, is_literal=False):
- self.key = self.name = text
- self.table = selectable
- self.type = sqltypes.to_instance(type_)
- self.is_literal = is_literal
-
- def _compare_name_for_result(self, other):
- if self.is_literal or \
- self.table is None or \
- not hasattr(other, 'proxy_set') or (
- isinstance(other, ColumnClause) and other.is_literal
- ):
- return super(ColumnClause, self).\
- _compare_name_for_result(other)
- else:
- return other.proxy_set.intersection(self.proxy_set)
-
- def _get_table(self):
- return self.__dict__['table']
-
- def _set_table(self, table):
- self._memoized_property.expire_instance(self)
- self.__dict__['table'] = table
- table = property(_get_table, _set_table)
-
- @_memoized_property
- def _from_objects(self):
- t = self.table
- if t is not None:
- return [t]
- else:
- return []
-
- @util.memoized_property
- def description(self):
- if util.py3k:
- return self.name
- else:
- return self.name.encode('ascii', 'backslashreplace')
-
- @_memoized_property
- def _key_label(self):
- if self.key != self.name:
- return self._gen_label(self.key)
- else:
- return self._label
-
- @_memoized_property
- def _label(self):
- return self._gen_label(self.name)
-
- def _gen_label(self, name):
- t = self.table
- if self.is_literal:
- return None
-
- elif t is not None and t.named_with_column:
- if getattr(t, 'schema', None):
- label = t.schema.replace('.', '_') + "_" + \
- t.name + "_" + name
- else:
- label = t.name + "_" + name
-
- # ensure the label name doesn't conflict with that
- # of an existing column
- if label in t.c:
- _label = label
- counter = 1
- while _label in t.c:
- _label = label + "_" + str(counter)
- counter += 1
- label = _label
-
- return _as_truncated(label)
-
- else:
- return name
-
- def _bind_param(self, operator, obj):
- return BindParameter(self.name, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type,
- unique=True)
-
- def _make_proxy(self, selectable, name=None, attach=True,
- name_is_truncatable=False, **kw):
- # propagate the "is_literal" flag only if we are keeping our name,
- # otherwise its considered to be a label
- is_literal = self.is_literal and (name is None or name == self.name)
- c = self._constructor(
- _as_truncated(name or self.name) if \
- name_is_truncatable else \
- (name or self.name),
- selectable=selectable,
- type_=self.type,
- is_literal=is_literal
- )
- if name is None:
- c.key = self.key
- c._proxies = [self]
- if selectable._is_clone_of is not None:
- c._is_clone_of = \
- selectable._is_clone_of.columns.get(c.key)
-
- if attach:
- selectable._columns[c.key] = c
- return c
-
-
-class TableClause(Immutable, FromClause):
- """Represents a minimal "table" construct.
-
- The constructor for :class:`.TableClause` is the
- :func:`~.expression.table` function. This produces
- a lightweight table object that has only a name and a
- collection of columns, which are typically produced
- by the :func:`~.expression.column` function::
-
- from sqlalchemy.sql import table, column
-
- user = table("user",
- column("id"),
- column("name"),
- column("description"),
- )
-
- The :class:`.TableClause` construct serves as the base for
- the more commonly used :class:`~.schema.Table` object, providing
- the usual set of :class:`~.expression.FromClause` services including
- the ``.c.`` collection and statement generation methods.
-
- It does **not** provide all the additional schema-level services
- of :class:`~.schema.Table`, including constraints, references to other
- tables, or support for :class:`.MetaData`-level services. It's useful
- on its own as an ad-hoc construct used to generate quick SQL
- statements when a more fully fledged :class:`~.schema.Table`
- is not on hand.
-
- """
-
- __visit_name__ = 'table'
-
- named_with_column = True
-
- implicit_returning = False
- """:class:`.TableClause` doesn't support having a primary key or column
- -level defaults, so implicit returning doesn't apply."""
-
- _autoincrement_column = None
- """No PK or default support so no autoincrement column."""
-
- def __init__(self, name, *columns):
- super(TableClause, self).__init__()
- self.name = self.fullname = name
- self._columns = ColumnCollection()
- self.primary_key = ColumnSet()
- self.foreign_keys = set()
- for c in columns:
- self.append_column(c)
-
- def _init_collections(self):
- pass
-
- @util.memoized_property
- def description(self):
- if util.py3k:
- return self.name
- else:
- return self.name.encode('ascii', 'backslashreplace')
-
- def append_column(self, c):
- self._columns[c.key] = c
- c.table = self
-
- def get_children(self, column_collections=True, **kwargs):
- if column_collections:
- return [c for c in self.c]
- else:
- return []
-
- def count(self, whereclause=None, **params):
- """return a SELECT COUNT generated against this
- :class:`.TableClause`."""
-
- if self.primary_key:
- col = list(self.primary_key)[0]
- else:
- col = list(self.columns)[0]
- return select(
- [func.count(col).label('tbl_row_count')],
- whereclause,
- from_obj=[self],
- **params)
-
- def insert(self, values=None, inline=False, **kwargs):
- """Generate an :func:`.insert` construct against this
- :class:`.TableClause`.
-
- E.g.::
-
- table.insert().values(name='foo')
-
- See :func:`.insert` for argument and usage information.
-
- """
-
- return insert(self, values=values, inline=inline, **kwargs)
-
- def update(self, whereclause=None, values=None, inline=False, **kwargs):
- """Generate an :func:`.update` construct against this
- :class:`.TableClause`.
-
- E.g.::
-
- table.update().where(table.c.id==7).values(name='foo')
-
- See :func:`.update` for argument and usage information.
-
- """
-
- return update(self, whereclause=whereclause,
- values=values, inline=inline, **kwargs)
-
- def delete(self, whereclause=None, **kwargs):
- """Generate a :func:`.delete` construct against this
- :class:`.TableClause`.
-
- E.g.::
-
- table.delete().where(table.c.id==7)
-
- See :func:`.delete` for argument and usage information.
-
- """
-
- return delete(self, whereclause, **kwargs)
-
- @property
- def _from_objects(self):
- return [self]
-
-
-class SelectBase(Executable, FromClause):
- """Base class for :class:`.Select` and ``CompoundSelects``."""
-
- _order_by_clause = ClauseList()
- _group_by_clause = ClauseList()
- _limit = None
- _offset = None
-
- def __init__(self,
- use_labels=False,
- for_update=False,
- limit=None,
- offset=None,
- order_by=None,
- group_by=None,
- bind=None,
- autocommit=None):
- self.use_labels = use_labels
- self.for_update = for_update
- if autocommit is not None:
- util.warn_deprecated('autocommit on select() is '
- 'deprecated. Use .execution_options(a'
- 'utocommit=True)')
- self._execution_options = \
- self._execution_options.union(
- {'autocommit': autocommit})
- if limit is not None:
- self._limit = util.asint(limit)
- if offset is not None:
- self._offset = util.asint(offset)
- self._bind = bind
-
- if order_by is not None:
- self._order_by_clause = ClauseList(*util.to_list(order_by))
- if group_by is not None:
- self._group_by_clause = ClauseList(*util.to_list(group_by))
-
- def as_scalar(self):
- """return a 'scalar' representation of this selectable, which can be
- used as a column expression.
-
- Typically, a select statement which has only one column in its columns
- clause is eligible to be used as a scalar expression.
-
- The returned object is an instance of
- :class:`ScalarSelect`.
-
- """
- return ScalarSelect(self)
-
- @_generative
- def apply_labels(self):
- """return a new selectable with the 'use_labels' flag set to True.
-
- This will result in column expressions being generated using labels
- against their table name, such as "SELECT somecolumn AS
- tablename_somecolumn". This allows selectables which contain multiple
- FROM clauses to produce a unique set of column names regardless of
- name conflicts among the individual FROM clauses.
-
- """
- self.use_labels = True
-
- def label(self, name):
- """return a 'scalar' representation of this selectable, embedded as a
- subquery with a label.
-
- .. seealso::
-
- :meth:`~.SelectBase.as_scalar`.
-
- """
- return self.as_scalar().label(name)
-
- def cte(self, name=None, recursive=False):
- """Return a new :class:`.CTE`, or Common Table Expression instance.
-
- Common table expressions are a SQL standard whereby SELECT
- statements can draw upon secondary statements specified along
- with the primary statement, using a clause called "WITH".
- Special semantics regarding UNION can also be employed to
- allow "recursive" queries, where a SELECT statement can draw
- upon the set of rows that have previously been selected.
-
- SQLAlchemy detects :class:`.CTE` objects, which are treated
- similarly to :class:`.Alias` objects, as special elements
- to be delivered to the FROM clause of the statement as well
- as to a WITH clause at the top of the statement.
-
- .. versionadded:: 0.7.6
-
- :param name: name given to the common table expression. Like
- :meth:`._FromClause.alias`, the name can be left as ``None``
- in which case an anonymous symbol will be used at query
- compile time.
- :param recursive: if ``True``, will render ``WITH RECURSIVE``.
- A recursive common table expression is intended to be used in
- conjunction with UNION ALL in order to derive rows
- from those already selected.
-
- The following examples illustrate two examples from
- Postgresql's documentation at
- http://www.postgresql.org/docs/8.4/static/queries-with.html.
-
- Example 1, non recursive::
-
- from sqlalchemy import Table, Column, String, Integer, MetaData, \\
- select, func
-
- metadata = MetaData()
-
- orders = Table('orders', metadata,
- Column('region', String),
- Column('amount', Integer),
- Column('product', String),
- Column('quantity', Integer)
- )
-
- regional_sales = select([
- orders.c.region,
- func.sum(orders.c.amount).label('total_sales')
- ]).group_by(orders.c.region).cte("regional_sales")
-
-
- top_regions = select([regional_sales.c.region]).\\
- where(
- regional_sales.c.total_sales >
- select([
- func.sum(regional_sales.c.total_sales)/10
- ])
- ).cte("top_regions")
-
- statement = select([
- orders.c.region,
- orders.c.product,
- func.sum(orders.c.quantity).label("product_units"),
- func.sum(orders.c.amount).label("product_sales")
- ]).where(orders.c.region.in_(
- select([top_regions.c.region])
- )).group_by(orders.c.region, orders.c.product)
-
- result = conn.execute(statement).fetchall()
-
- Example 2, WITH RECURSIVE::
-
- from sqlalchemy import Table, Column, String, Integer, MetaData, \\
- select, func
-
- metadata = MetaData()
-
- parts = Table('parts', metadata,
- Column('part', String),
- Column('sub_part', String),
- Column('quantity', Integer),
- )
-
- included_parts = select([
- parts.c.sub_part,
- parts.c.part,
- parts.c.quantity]).\\
- where(parts.c.part=='our part').\\
- cte(recursive=True)
-
-
- incl_alias = included_parts.alias()
- parts_alias = parts.alias()
- included_parts = included_parts.union_all(
- select([
- parts_alias.c.part,
- parts_alias.c.sub_part,
- parts_alias.c.quantity
- ]).
- where(parts_alias.c.part==incl_alias.c.sub_part)
- )
-
- statement = select([
- included_parts.c.sub_part,
- func.sum(included_parts.c.quantity).
- label('total_quantity')
- ]).\
- select_from(included_parts.join(parts,
- included_parts.c.part==parts.c.part)).\\
- group_by(included_parts.c.sub_part)
-
- result = conn.execute(statement).fetchall()
-
-
- .. seealso::
-
- :meth:`.orm.query.Query.cte` - ORM version of :meth:`.SelectBase.cte`.
-
- """
- return CTE(self, name=name, recursive=recursive)
-
- @_generative
- @util.deprecated('0.6',
- message=":func:`.autocommit` is deprecated. Use "
- ":func:`.Executable.execution_options` with the "
- "'autocommit' flag.")
- def autocommit(self):
- """return a new selectable with the 'autocommit' flag set to
- True."""
-
- self._execution_options = \
- self._execution_options.union({'autocommit': True})
-
- def _generate(self):
- """Override the default _generate() method to also clear out
- exported collections."""
-
- s = self.__class__.__new__(self.__class__)
- s.__dict__ = self.__dict__.copy()
- s._reset_exported()
- return s
-
- @_generative
- def limit(self, limit):
- """return a new selectable with the given LIMIT criterion
- applied."""
-
- self._limit = util.asint(limit)
-
- @_generative
- def offset(self, offset):
- """return a new selectable with the given OFFSET criterion
- applied."""
-
- self._offset = util.asint(offset)
-
- @_generative
- def order_by(self, *clauses):
- """return a new selectable with the given list of ORDER BY
- criterion applied.
-
- The criterion will be appended to any pre-existing ORDER BY
- criterion.
-
- """
-
- self.append_order_by(*clauses)
-
- @_generative
- def group_by(self, *clauses):
- """return a new selectable with the given list of GROUP BY
- criterion applied.
-
- The criterion will be appended to any pre-existing GROUP BY
- criterion.
-
- """
-
- self.append_group_by(*clauses)
-
- def append_order_by(self, *clauses):
- """Append the given ORDER BY criterion applied to this selectable.
-
- The criterion will be appended to any pre-existing ORDER BY criterion.
-
- This is an **in-place** mutation method; the
- :meth:`~.SelectBase.order_by` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- if len(clauses) == 1 and clauses[0] is None:
- self._order_by_clause = ClauseList()
- else:
- if getattr(self, '_order_by_clause', None) is not None:
- clauses = list(self._order_by_clause) + list(clauses)
- self._order_by_clause = ClauseList(*clauses)
-
- def append_group_by(self, *clauses):
- """Append the given GROUP BY criterion applied to this selectable.
-
- The criterion will be appended to any pre-existing GROUP BY criterion.
-
- This is an **in-place** mutation method; the
- :meth:`~.SelectBase.group_by` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- if len(clauses) == 1 and clauses[0] is None:
- self._group_by_clause = ClauseList()
- else:
- if getattr(self, '_group_by_clause', None) is not None:
- clauses = list(self._group_by_clause) + list(clauses)
- self._group_by_clause = ClauseList(*clauses)
-
- @property
- def _from_objects(self):
- return [self]
-
-
-class ScalarSelect(Generative, Grouping):
- _from_objects = []
-
- def __init__(self, element):
- self.element = element
- self.type = element._scalar_type()
-
- @property
- def columns(self):
- raise exc.InvalidRequestError('Scalar Select expression has no '
- 'columns; use this object directly within a '
- 'column-level expression.')
- c = columns
-
- @_generative
- def where(self, crit):
- """Apply a WHERE clause to the SELECT statement referred to
- by this :class:`.ScalarSelect`.
-
- """
- self.element = self.element.where(crit)
-
- def self_group(self, **kwargs):
- return self
-
-
-class CompoundSelect(SelectBase):
- """Forms the basis of ``UNION``, ``UNION ALL``, and other
- SELECT-based set operations."""
-
- __visit_name__ = 'compound_select'
-
- UNION = util.symbol('UNION')
- UNION_ALL = util.symbol('UNION ALL')
- EXCEPT = util.symbol('EXCEPT')
- EXCEPT_ALL = util.symbol('EXCEPT ALL')
- INTERSECT = util.symbol('INTERSECT')
- INTERSECT_ALL = util.symbol('INTERSECT ALL')
-
- def __init__(self, keyword, *selects, **kwargs):
- self._auto_correlate = kwargs.pop('correlate', False)
- self.keyword = keyword
- self.selects = []
-
- numcols = None
-
- # some DBs do not like ORDER BY in the inner queries of a UNION, etc.
- for n, s in enumerate(selects):
- s = _clause_element_as_expr(s)
-
- if not numcols:
- numcols = len(s.c)
- elif len(s.c) != numcols:
- raise exc.ArgumentError('All selectables passed to '
- 'CompoundSelect must have identical numbers of '
- 'columns; select #%d has %d columns, select '
- '#%d has %d' % (1, len(self.selects[0].c), n
- + 1, len(s.c)))
-
- self.selects.append(s.self_group(self))
-
- SelectBase.__init__(self, **kwargs)
-
- def _scalar_type(self):
- return self.selects[0]._scalar_type()
-
- def self_group(self, against=None):
- return FromGrouping(self)
-
- def is_derived_from(self, fromclause):
- for s in self.selects:
- if s.is_derived_from(fromclause):
- return True
- return False
-
- def _populate_column_collection(self):
- for cols in zip(*[s.c for s in self.selects]):
-
- # this is a slightly hacky thing - the union exports a
- # column that resembles just that of the *first* selectable.
- # to get at a "composite" column, particularly foreign keys,
- # you have to dig through the proxies collection which we
- # generate below. We may want to improve upon this, such as
- # perhaps _make_proxy can accept a list of other columns
- # that are "shared" - schema.column can then copy all the
- # ForeignKeys in. this would allow the union() to have all
- # those fks too.
-
- proxy = cols[0]._make_proxy(self,
- name=cols[0]._label if self.use_labels else None,
- key=cols[0]._key_label if self.use_labels else None)
-
- # hand-construct the "_proxies" collection to include all
- # derived columns place a 'weight' annotation corresponding
- # to how low in the list of select()s the column occurs, so
- # that the corresponding_column() operation can resolve
- # conflicts
-
- proxy._proxies = [c._annotate({'weight': i + 1}) for (i,
- c) in enumerate(cols)]
-
- def _refresh_for_new_column(self, column):
- for s in self.selects:
- s._refresh_for_new_column(column)
-
- if not self._cols_populated:
- return None
-
- raise NotImplementedError("CompoundSelect constructs don't support "
- "addition of columns to underlying selectables")
-
- def _copy_internals(self, clone=_clone, **kw):
- self._reset_exported()
- self.selects = [clone(s, **kw) for s in self.selects]
- if hasattr(self, '_col_map'):
- del self._col_map
- for attr in ('_order_by_clause', '_group_by_clause'):
- if getattr(self, attr) is not None:
- setattr(self, attr, clone(getattr(self, attr), **kw))
-
- def get_children(self, column_collections=True, **kwargs):
- return (column_collections and list(self.c) or []) \
- + [self._order_by_clause, self._group_by_clause] \
- + list(self.selects)
-
- def bind(self):
- if self._bind:
- return self._bind
- for s in self.selects:
- e = s.bind
- if e:
- return e
- else:
- return None
-
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
-
-class HasPrefixes(object):
- _prefixes = ()
-
- @_generative
- def prefix_with(self, *expr, **kw):
- """Add one or more expressions following the statement keyword, i.e.
- SELECT, INSERT, UPDATE, or DELETE. Generative.
-
- This is used to support backend-specific prefix keywords such as those
- provided by MySQL.
-
- E.g.::
-
- stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
-
- Multiple prefixes can be specified by multiple calls
- to :meth:`.prefix_with`.
-
- :param \*expr: textual or :class:`.ClauseElement` construct which
- will be rendered following the INSERT, UPDATE, or DELETE
- keyword.
- :param \**kw: A single keyword 'dialect' is accepted. This is an
- optional string dialect name which will
- limit rendering of this prefix to only that dialect.
-
- """
- dialect = kw.pop('dialect', None)
- if kw:
- raise exc.ArgumentError("Unsupported argument(s): %s" %
- ",".join(kw))
- self._setup_prefixes(expr, dialect)
-
- def _setup_prefixes(self, prefixes, dialect=None):
- self._prefixes = self._prefixes + tuple(
- [(_literal_as_text(p), dialect) for p in prefixes])
-
-
-class Select(HasPrefixes, SelectBase):
- """Represents a ``SELECT`` statement.
-
- .. seealso::
-
- :func:`~.expression.select` - the function which creates
- a :class:`.Select` object.
-
- :ref:`coretutorial_selecting` - Core Tutorial description
- of :func:`.select`.
-
- """
-
- __visit_name__ = 'select'
-
- _prefixes = ()
- _hints = util.immutabledict()
- _distinct = False
- _from_cloned = None
- _correlate = ()
- _correlate_except = None
- _memoized_property = SelectBase._memoized_property
-
- def __init__(self,
- columns,
- whereclause=None,
- from_obj=None,
- distinct=False,
- having=None,
- correlate=True,
- prefixes=None,
- **kwargs):
- """Construct a Select object.
-
- The public constructor for Select is the
- :func:`select` function; see that function for
- argument descriptions.
-
- Additional generative and mutator methods are available on the
- :class:`SelectBase` superclass.
-
- """
- self._auto_correlate = correlate
- if distinct is not False:
- if distinct is True:
- self._distinct = True
- else:
- self._distinct = [
- _literal_as_text(e)
- for e in util.to_list(distinct)
- ]
-
- if from_obj is not None:
- self._from_obj = util.OrderedSet(
- _interpret_as_from(f)
- for f in util.to_list(from_obj))
- else:
- self._from_obj = util.OrderedSet()
-
- try:
- cols_present = bool(columns)
- except TypeError:
- raise exc.ArgumentError("columns argument to select() must "
- "be a Python list or other iterable")
-
- if cols_present:
- self._raw_columns = []
- for c in columns:
- c = _interpret_as_column_or_from(c)
- if isinstance(c, ScalarSelect):
- c = c.self_group(against=operators.comma_op)
- self._raw_columns.append(c)
- else:
- self._raw_columns = []
-
- if whereclause is not None:
- self._whereclause = _literal_as_text(whereclause)
- else:
- self._whereclause = None
-
- if having is not None:
- self._having = _literal_as_text(having)
- else:
- self._having = None
-
- if prefixes:
- self._setup_prefixes(prefixes)
-
- SelectBase.__init__(self, **kwargs)
-
- @property
- def _froms(self):
- # would love to cache this,
- # but there's just enough edge cases, particularly now that
- # declarative encourages construction of SQL expressions
- # without tables present, to just regen this each time.
- froms = []
- seen = set()
- translate = self._from_cloned
-
- def add(items):
- for item in items:
- if translate and item in translate:
- item = translate[item]
- if not seen.intersection(item._cloned_set):
- froms.append(item)
- seen.update(item._cloned_set)
-
- add(_from_objects(*self._raw_columns))
- if self._whereclause is not None:
- add(_from_objects(self._whereclause))
- add(self._from_obj)
-
- return froms
-
- def _get_display_froms(self, explicit_correlate_froms=None,
- implicit_correlate_froms=None):
- """Return the full list of 'from' clauses to be displayed.
-
- Takes into account a set of existing froms which may be
- rendered in the FROM clause of enclosing selects; this Select
- may want to leave those absent if it is automatically
- correlating.
-
- """
- froms = self._froms
-
- toremove = set(itertools.chain(*[
- _expand_cloned(f._hide_froms)
- for f in froms]))
- if toremove:
- # if we're maintaining clones of froms,
- # add the copies out to the toremove list. only include
- # clones that are lexical equivalents.
- if self._from_cloned:
- toremove.update(
- self._from_cloned[f] for f in
- toremove.intersection(self._from_cloned)
- if self._from_cloned[f]._is_lexical_equivalent(f)
- )
- # filter out to FROM clauses not in the list,
- # using a list to maintain ordering
- froms = [f for f in froms if f not in toremove]
-
- if self._correlate:
- to_correlate = self._correlate
- if to_correlate:
- froms = [
- f for f in froms if f not in
- _cloned_intersection(
- _cloned_intersection(froms, explicit_correlate_froms or ()),
- to_correlate
- )
- ]
-
- if self._correlate_except is not None:
-
- froms = [
- f for f in froms if f not in
- _cloned_difference(
- _cloned_intersection(froms, explicit_correlate_froms or ()),
- self._correlate_except
- )
- ]
-
- if self._auto_correlate and \
- implicit_correlate_froms and \
- len(froms) > 1:
-
- froms = [
- f for f in froms if f not in
- _cloned_intersection(froms, implicit_correlate_froms)
- ]
-
- if not len(froms):
- raise exc.InvalidRequestError("Select statement '%s"
- "' returned no FROM clauses due to "
- "auto-correlation; specify "
- "correlate(<tables>) to control "
- "correlation manually." % self)
-
- return froms
-
- def _scalar_type(self):
- elem = self._raw_columns[0]
- cols = list(elem._select_iterable)
- return cols[0].type
-
- @property
- def froms(self):
- """Return the displayed list of FromClause elements."""
-
- return self._get_display_froms()
-
- @_generative
- def with_hint(self, selectable, text, dialect_name='*'):
- """Add an indexing hint for the given selectable to this
- :class:`.Select`.
-
- The text of the hint is rendered in the appropriate
- location for the database backend in use, relative
- to the given :class:`.Table` or :class:`.Alias` passed as the
- ``selectable`` argument. The dialect implementation
- typically uses Python string substitution syntax
- with the token ``%(name)s`` to render the name of
- the table or alias. E.g. when using Oracle, the
- following::
-
- select([mytable]).\\
- with_hint(mytable, "+ index(%(name)s ix_mytable)")
-
- Would render SQL as::
-
- select /*+ index(mytable ix_mytable) */ ... from mytable
-
- The ``dialect_name`` option will limit the rendering of a particular
- hint to a particular backend. Such as, to add hints for both Oracle
- and Sybase simultaneously::
-
- select([mytable]).\\
- with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
- with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
-
- """
- self._hints = self._hints.union(
- {(selectable, dialect_name): text})
-
- @property
- def type(self):
- raise exc.InvalidRequestError("Select objects don't have a type. "
- "Call as_scalar() on this Select object "
- "to return a 'scalar' version of this Select.")
-
- @_memoized_property.method
- def locate_all_froms(self):
- """return a Set of all FromClause elements referenced by this Select.
-
- This set is a superset of that returned by the ``froms`` property,
- which is specifically for those FromClause elements that would
- actually be rendered.
-
- """
- froms = self._froms
- return froms + list(_from_objects(*froms))
-
- @property
- def inner_columns(self):
- """an iterator of all ColumnElement expressions which would
- be rendered into the columns clause of the resulting SELECT statement.
-
- """
- return _select_iterables(self._raw_columns)
-
- def is_derived_from(self, fromclause):
- if self in fromclause._cloned_set:
- return True
-
- for f in self.locate_all_froms():
- if f.is_derived_from(fromclause):
- return True
- return False
-
- def _copy_internals(self, clone=_clone, **kw):
-
- # Select() object has been cloned and probably adapted by the
- # given clone function. Apply the cloning function to internal
- # objects
-
- # 1. keep a dictionary of the froms we've cloned, and what
- # they've become. This is consulted later when we derive
- # additional froms from "whereclause" and the columns clause,
- # which may still reference the uncloned parent table.
- # as of 0.7.4 we also put the current version of _froms, which
- # gets cleared on each generation. previously we were "baking"
- # _froms into self._from_obj.
- self._from_cloned = from_cloned = dict((f, clone(f, **kw))
- for f in self._from_obj.union(self._froms))
-
- # 3. update persistent _from_obj with the cloned versions.
- self._from_obj = util.OrderedSet(from_cloned[f] for f in
- self._from_obj)
-
- # the _correlate collection is done separately, what can happen
- # here is the same item is _correlate as in _from_obj but the
- # _correlate version has an annotation on it - (specifically
- # RelationshipProperty.Comparator._criterion_exists() does
- # this). Also keep _correlate liberally open with it's previous
- # contents, as this set is used for matching, not rendering.
- self._correlate = set(clone(f) for f in
- self._correlate).union(self._correlate)
-
- # 4. clone other things. The difficulty here is that Column
- # objects are not actually cloned, and refer to their original
- # .table, resulting in the wrong "from" parent after a clone
- # operation. Hence _from_cloned and _from_obj supercede what is
- # present here.
- self._raw_columns = [clone(c, **kw) for c in self._raw_columns]
- for attr in '_whereclause', '_having', '_order_by_clause', \
- '_group_by_clause':
- if getattr(self, attr) is not None:
- setattr(self, attr, clone(getattr(self, attr), **kw))
-
- # erase exported column list, _froms collection,
- # etc.
- self._reset_exported()
-
- def get_children(self, column_collections=True, **kwargs):
- """return child elements as per the ClauseElement specification."""
-
- return (column_collections and list(self.columns) or []) + \
- self._raw_columns + list(self._froms) + \
- [x for x in
- (self._whereclause, self._having,
- self._order_by_clause, self._group_by_clause)
- if x is not None]
-
- @_generative
- def column(self, column):
- """return a new select() construct with the given column expression
- added to its columns clause.
-
- """
- self.append_column(column)
-
- def reduce_columns(self, only_synonyms=True):
- """Return a new :func`.select` construct with redundantly
- named, equivalently-valued columns removed from the columns clause.
-
- "Redundant" here means two columns where one refers to the
- other either based on foreign key, or via a simple equality
- comparison in the WHERE clause of the statement. The primary purpose
- of this method is to automatically construct a select statement
- with all uniquely-named columns, without the need to use
- table-qualified labels as :meth:`.apply_labels` does.
-
- When columns are omitted based on foreign key, the referred-to
- column is the one that's kept. When columns are omitted based on
- WHERE eqivalence, the first column in the columns clause is the
- one that's kept.
-
- :param only_synonyms: when True, limit the removal of columns
- to those which have the same name as the equivalent. Otherwise,
- all columns that are equivalent to another are removed.
-
- .. versionadded:: 0.8
-
- """
- return self.with_only_columns(
- sqlutil.reduce_columns(
- self.inner_columns,
- only_synonyms=only_synonyms,
- *(self._whereclause, ) + tuple(self._from_obj)
- )
- )
-
- @_generative
- def with_only_columns(self, columns):
- """Return a new :func:`.select` construct with its columns
- clause replaced with the given columns.
-
- .. versionchanged:: 0.7.3
- Due to a bug fix, this method has a slight
- behavioral change as of version 0.7.3.
- Prior to version 0.7.3, the FROM clause of
- a :func:`.select` was calculated upfront and as new columns
- were added; in 0.7.3 and later it's calculated
- at compile time, fixing an issue regarding late binding
- of columns to parent tables. This changes the behavior of
- :meth:`.Select.with_only_columns` in that FROM clauses no
- longer represented in the new list are dropped,
- but this behavior is more consistent in
- that the FROM clauses are consistently derived from the
- current columns clause. The original intent of this method
- is to allow trimming of the existing columns list to be fewer
- columns than originally present; the use case of replacing
- the columns list with an entirely different one hadn't
- been anticipated until 0.7.3 was released; the usage
- guidelines below illustrate how this should be done.
-
- This method is exactly equivalent to as if the original
- :func:`.select` had been called with the given columns
- clause. I.e. a statement::
-
- s = select([table1.c.a, table1.c.b])
- s = s.with_only_columns([table1.c.b])
-
- should be exactly equivalent to::
-
- s = select([table1.c.b])
-
- This means that FROM clauses which are only derived
- from the column list will be discarded if the new column
- list no longer contains that FROM::
-
- >>> table1 = table('t1', column('a'), column('b'))
- >>> table2 = table('t2', column('a'), column('b'))
- >>> s1 = select([table1.c.a, table2.c.b])
- >>> print s1
- SELECT t1.a, t2.b FROM t1, t2
- >>> s2 = s1.with_only_columns([table2.c.b])
- >>> print s2
- SELECT t2.b FROM t1
-
- The preferred way to maintain a specific FROM clause
- in the construct, assuming it won't be represented anywhere
- else (i.e. not in the WHERE clause, etc.) is to set it using
- :meth:`.Select.select_from`::
-
- >>> s1 = select([table1.c.a, table2.c.b]).\\
- ... select_from(table1.join(table2,
- ... table1.c.a==table2.c.a))
- >>> s2 = s1.with_only_columns([table2.c.b])
- >>> print s2
- SELECT t2.b FROM t1 JOIN t2 ON t1.a=t2.a
-
- Care should also be taken to use the correct
- set of column objects passed to :meth:`.Select.with_only_columns`.
- Since the method is essentially equivalent to calling the
- :func:`.select` construct in the first place with the given
- columns, the columns passed to :meth:`.Select.with_only_columns`
- should usually be a subset of those which were passed
- to the :func:`.select` construct, not those which are available
- from the ``.c`` collection of that :func:`.select`. That
- is::
-
- s = select([table1.c.a, table1.c.b]).select_from(table1)
- s = s.with_only_columns([table1.c.b])
-
- and **not**::
-
- # usually incorrect
- s = s.with_only_columns([s.c.b])
-
- The latter would produce the SQL::
-
- SELECT b
- FROM (SELECT t1.a AS a, t1.b AS b
- FROM t1), t1
-
- Since the :func:`.select` construct is essentially being
- asked to select both from ``table1`` as well as itself.
-
- """
- self._reset_exported()
- rc = []
- for c in columns:
- c = _interpret_as_column_or_from(c)
- if isinstance(c, ScalarSelect):
- c = c.self_group(against=operators.comma_op)
- rc.append(c)
- self._raw_columns = rc
-
- @_generative
- def where(self, whereclause):
- """return a new select() construct with the given expression added to
- its WHERE clause, joined to the existing clause via AND, if any.
-
- """
-
- self.append_whereclause(whereclause)
-
- @_generative
- def having(self, having):
- """return a new select() construct with the given expression added to
- its HAVING clause, joined to the existing clause via AND, if any.
-
- """
- self.append_having(having)
-
- @_generative
- def distinct(self, *expr):
- """Return a new select() construct which will apply DISTINCT to its
- columns clause.
-
- :param \*expr: optional column expressions. When present,
- the Postgresql dialect will render a ``DISTINCT ON (<expressions>>)``
- construct.
-
- """
- if expr:
- expr = [_literal_as_text(e) for e in expr]
- if isinstance(self._distinct, list):
- self._distinct = self._distinct + expr
- else:
- self._distinct = expr
- else:
- self._distinct = True
-
- @_generative
- def select_from(self, fromclause):
- """return a new :func:`.select` construct with the
- given FROM expression
- merged into its list of FROM objects.
-
- E.g.::
-
- table1 = table('t1', column('a'))
- table2 = table('t2', column('b'))
- s = select([table1.c.a]).\\
- select_from(
- table1.join(table2, table1.c.a==table2.c.b)
- )
-
- The "from" list is a unique set on the identity of each element,
- so adding an already present :class:`.Table` or other selectable
- will have no effect. Passing a :class:`.Join` that refers
- to an already present :class:`.Table` or other selectable will have
- the effect of concealing the presence of that selectable as
- an individual element in the rendered FROM list, instead
- rendering it into a JOIN clause.
-
- While the typical purpose of :meth:`.Select.select_from` is to
- replace the default, derived FROM clause with a join, it can
- also be called with individual table elements, multiple times
- if desired, in the case that the FROM clause cannot be fully
- derived from the columns clause::
-
- select([func.count('*')]).select_from(table1)
-
- """
- self.append_from(fromclause)
-
- @_generative
- def correlate(self, *fromclauses):
- """return a new :class:`.Select` which will correlate the given FROM
- clauses to that of an enclosing :class:`.Select`.
-
- Calling this method turns off the :class:`.Select` object's
- default behavior of "auto-correlation". Normally, FROM elements
- which appear in a :class:`.Select` that encloses this one via
- its :term:`WHERE clause`, ORDER BY, HAVING or
- :term:`columns clause` will be omitted from this :class:`.Select`
- object's :term:`FROM clause`.
- Setting an explicit correlation collection using the
- :meth:`.Select.correlate` method provides a fixed list of FROM objects
- that can potentially take place in this process.
-
- When :meth:`.Select.correlate` is used to apply specific FROM clauses
- for correlation, the FROM elements become candidates for
- correlation regardless of how deeply nested this :class:`.Select`
- object is, relative to an enclosing :class:`.Select` which refers to
- the same FROM object. This is in contrast to the behavior of
- "auto-correlation" which only correlates to an immediate enclosing
- :class:`.Select`. Multi-level correlation ensures that the link
- between enclosed and enclosing :class:`.Select` is always via
- at least one WHERE/ORDER BY/HAVING/columns clause in order for
- correlation to take place.
-
- If ``None`` is passed, the :class:`.Select` object will correlate
- none of its FROM entries, and all will render unconditionally
- in the local FROM clause.
-
- :param \*fromclauses: a list of one or more :class:`.FromClause`
- constructs, or other compatible constructs (i.e. ORM-mapped
- classes) to become part of the correlate collection.
-
- .. versionchanged:: 0.8.0 ORM-mapped classes are accepted by
- :meth:`.Select.correlate`.
-
- .. versionchanged:: 0.8.0 The :meth:`.Select.correlate` method no
- longer unconditionally removes entries from the FROM clause; instead,
- the candidate FROM entries must also be matched by a FROM entry
- located in an enclosing :class:`.Select`, which ultimately encloses
- this one as present in the WHERE clause, ORDER BY clause, HAVING
- clause, or columns clause of an enclosing :meth:`.Select`.
-
- .. versionchanged:: 0.8.2 explicit correlation takes place
- via any level of nesting of :class:`.Select` objects; in previous
- 0.8 versions, correlation would only occur relative to the immediate
- enclosing :class:`.Select` construct.
-
- .. seealso::
-
- :meth:`.Select.correlate_except`
-
- :ref:`correlated_subqueries`
-
- """
- self._auto_correlate = False
- if fromclauses and fromclauses[0] is None:
- self._correlate = ()
- else:
- self._correlate = set(self._correlate).union(
- _interpret_as_from(f) for f in fromclauses)
-
- @_generative
- def correlate_except(self, *fromclauses):
- """return a new :class:`.Select` which will omit the given FROM
- clauses from the auto-correlation process.
-
- Calling :meth:`.Select.correlate_except` turns off the
- :class:`.Select` object's default behavior of
- "auto-correlation" for the given FROM elements. An element
- specified here will unconditionally appear in the FROM list, while
- all other FROM elements remain subject to normal auto-correlation
- behaviors.
-
- .. versionchanged:: 0.8.2 The :meth:`.Select.correlate_except`
- method was improved to fully prevent FROM clauses specified here
- from being omitted from the immediate FROM clause of this
- :class:`.Select`.
-
- If ``None`` is passed, the :class:`.Select` object will correlate
- all of its FROM entries.
-
- .. versionchanged:: 0.8.2 calling ``correlate_except(None)`` will
- correctly auto-correlate all FROM clauses.
-
- :param \*fromclauses: a list of one or more :class:`.FromClause`
- constructs, or other compatible constructs (i.e. ORM-mapped
- classes) to become part of the correlate-exception collection.
-
- .. seealso::
-
- :meth:`.Select.correlate`
-
- :ref:`correlated_subqueries`
-
- """
-
- self._auto_correlate = False
- if fromclauses and fromclauses[0] is None:
- self._correlate_except = ()
- else:
- self._correlate_except = set(self._correlate_except or ()).union(
- _interpret_as_from(f) for f in fromclauses)
-
- def append_correlation(self, fromclause):
- """append the given correlation expression to this select()
- construct.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.correlate` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
-
- self._auto_correlate = False
- self._correlate = set(self._correlate).union(
- _interpret_as_from(f) for f in fromclause)
-
- def append_column(self, column):
- """append the given column expression to the columns clause of this
- select() construct.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.column` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- self._reset_exported()
- column = _interpret_as_column_or_from(column)
-
- if isinstance(column, ScalarSelect):
- column = column.self_group(against=operators.comma_op)
-
- self._raw_columns = self._raw_columns + [column]
-
- def append_prefix(self, clause):
- """append the given columns clause prefix expression to this select()
- construct.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.prefix_with` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- clause = _literal_as_text(clause)
- self._prefixes = self._prefixes + (clause,)
-
- def append_whereclause(self, whereclause):
- """append the given expression to this select() construct's WHERE
- criterion.
-
- The expression will be joined to existing WHERE criterion via AND.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.where` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- self._reset_exported()
- whereclause = _literal_as_text(whereclause)
-
- if self._whereclause is not None:
- self._whereclause = and_(self._whereclause, whereclause)
- else:
- self._whereclause = whereclause
-
- def append_having(self, having):
- """append the given expression to this select() construct's HAVING
- criterion.
-
- The expression will be joined to existing HAVING criterion via AND.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.having` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- if self._having is not None:
- self._having = and_(self._having, _literal_as_text(having))
- else:
- self._having = _literal_as_text(having)
-
- def append_from(self, fromclause):
- """append the given FromClause expression to this select() construct's
- FROM clause.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.select_from` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- self._reset_exported()
- fromclause = _interpret_as_from(fromclause)
- self._from_obj = self._from_obj.union([fromclause])
-
-
- @_memoized_property
- def _columns_plus_names(self):
- if self.use_labels:
- names = set()
- def name_for_col(c):
- if c._label is None:
- return (None, c)
- name = c._label
- if name in names:
- name = c.anon_label
- else:
- names.add(name)
- return name, c
-
- return [
- name_for_col(c)
- for c in util.unique_list(_select_iterables(self._raw_columns))
- ]
- else:
- return [
- (None, c)
- for c in util.unique_list(_select_iterables(self._raw_columns))
- ]
-
- def _populate_column_collection(self):
- for name, c in self._columns_plus_names:
- if not hasattr(c, '_make_proxy'):
- continue
- if name is None:
- key = None
- elif self.use_labels:
- key = c._key_label
- if key is not None and key in self.c:
- key = c.anon_label
- else:
- key = None
-
- c._make_proxy(self, key=key,
- name=name,
- name_is_truncatable=True)
-
- def _refresh_for_new_column(self, column):
- for fromclause in self._froms:
- col = fromclause._refresh_for_new_column(column)
- if col is not None:
- if col in self.inner_columns and self._cols_populated:
- our_label = col._key_label if self.use_labels else col.key
- if our_label not in self.c:
- return col._make_proxy(self,
- name=col._label if self.use_labels else None,
- key=col._key_label if self.use_labels else None,
- name_is_truncatable=True)
- return None
- return None
-
- def self_group(self, against=None):
- """return a 'grouping' construct as per the ClauseElement
- specification.
-
- This produces an element that can be embedded in an expression. Note
- that this method is called automatically as needed when constructing
- expressions and should not require explicit use.
-
- """
- if isinstance(against, CompoundSelect):
- return self
- return FromGrouping(self)
-
- def union(self, other, **kwargs):
- """return a SQL UNION of this select() construct against the given
- selectable."""
-
- return union(self, other, **kwargs)
-
- def union_all(self, other, **kwargs):
- """return a SQL UNION ALL of this select() construct against the given
- selectable.
-
- """
- return union_all(self, other, **kwargs)
-
- def except_(self, other, **kwargs):
- """return a SQL EXCEPT of this select() construct against the given
- selectable."""
-
- return except_(self, other, **kwargs)
-
- def except_all(self, other, **kwargs):
- """return a SQL EXCEPT ALL of this select() construct against the
- given selectable.
-
- """
- return except_all(self, other, **kwargs)
-
- def intersect(self, other, **kwargs):
- """return a SQL INTERSECT of this select() construct against the given
- selectable.
-
- """
- return intersect(self, other, **kwargs)
-
- def intersect_all(self, other, **kwargs):
- """return a SQL INTERSECT ALL of this select() construct against the
- given selectable.
-
- """
- return intersect_all(self, other, **kwargs)
-
- def bind(self):
- if self._bind:
- return self._bind
- froms = self._froms
- if not froms:
- for c in self._raw_columns:
- e = c.bind
- if e:
- self._bind = e
- return e
- else:
- e = list(froms)[0].bind
- if e:
- self._bind = e
- return e
-
- return None
-
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
-
-class UpdateBase(HasPrefixes, Executable, ClauseElement):
- """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.
-
- """
-
- __visit_name__ = 'update_base'
-
- _execution_options = \
- Executable._execution_options.union({'autocommit': True})
- kwargs = util.immutabledict()
- _hints = util.immutabledict()
- _prefixes = ()
-
- def _process_colparams(self, parameters):
- def process_single(p):
- if isinstance(p, (list, tuple)):
- return dict(
- (c.key, pval)
- for c, pval in zip(self.table.c, p)
- )
- else:
- return p
-
- if isinstance(parameters, (list, tuple)) and \
- isinstance(parameters[0], (list, tuple, dict)):
-
- if not self._supports_multi_parameters:
- raise exc.InvalidRequestError(
- "This construct does not support "
- "multiple parameter sets.")
-
- return [process_single(p) for p in parameters], True
- else:
- return process_single(parameters), False
-
- def params(self, *arg, **kw):
- """Set the parameters for the statement.
-
- This method raises ``NotImplementedError`` on the base class,
- and is overridden by :class:`.ValuesBase` to provide the
- SET/VALUES clause of UPDATE and INSERT.
-
- """
- raise NotImplementedError(
- "params() is not supported for INSERT/UPDATE/DELETE statements."
- " To set the values for an INSERT or UPDATE statement, use"
- " stmt.values(**parameters).")
-
- def bind(self):
- """Return a 'bind' linked to this :class:`.UpdateBase`
- or a :class:`.Table` associated with it.
-
- """
- return self._bind or self.table.bind
-
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
- @_generative
- def returning(self, *cols):
- """Add a RETURNING or equivalent clause to this statement.
-
- The given list of columns represent columns within the table that is
- the target of the INSERT, UPDATE, or DELETE. Each element can be any
- column expression. :class:`~sqlalchemy.schema.Table` objects will be
- expanded into their individual columns.
-
- Upon compilation, a RETURNING clause, or database equivalent,
- will be rendered within the statement. For INSERT and UPDATE,
- the values are the newly inserted/updated values. For DELETE,
- the values are those of the rows which were deleted.
-
- Upon execution, the values of the columns to be returned
- are made available via the result set and can be iterated
- using ``fetchone()`` and similar. For DBAPIs which do not
- natively support returning values (i.e. cx_oracle),
- SQLAlchemy will approximate this behavior at the result level
- so that a reasonable amount of behavioral neutrality is
- provided.
-
- Note that not all databases/DBAPIs
- support RETURNING. For those backends with no support,
- an exception is raised upon compilation and/or execution.
- For those who do support it, the functionality across backends
- varies greatly, including restrictions on executemany()
- and other statements which return multiple rows. Please
- read the documentation notes for the database in use in
- order to determine the availability of RETURNING.
-
- """
- self._returning = cols
-
- @_generative
- def with_hint(self, text, selectable=None, dialect_name="*"):
- """Add a table hint for a single table to this
- INSERT/UPDATE/DELETE statement.
-
- .. note::
-
- :meth:`.UpdateBase.with_hint` currently applies only to
- Microsoft SQL Server. For MySQL INSERT/UPDATE/DELETE hints, use
- :meth:`.UpdateBase.prefix_with`.
-
- The text of the hint is rendered in the appropriate
- location for the database backend in use, relative
- to the :class:`.Table` that is the subject of this
- statement, or optionally to that of the given
- :class:`.Table` passed as the ``selectable`` argument.
-
- The ``dialect_name`` option will limit the rendering of a particular
- hint to a particular backend. Such as, to add a hint
- that only takes effect for SQL Server::
-
- mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql")
-
- .. versionadded:: 0.7.6
-
- :param text: Text of the hint.
- :param selectable: optional :class:`.Table` that specifies
- an element of the FROM clause within an UPDATE or DELETE
- to be the subject of the hint - applies only to certain backends.
- :param dialect_name: defaults to ``*``, if specified as the name
- of a particular dialect, will apply these hints only when
- that dialect is in use.
- """
- if selectable is None:
- selectable = self.table
-
- self._hints = self._hints.union(
- {(selectable, dialect_name): text})
-
-
-class ValuesBase(UpdateBase):
- """Supplies support for :meth:`.ValuesBase.values` to
- INSERT and UPDATE constructs."""
-
- __visit_name__ = 'values_base'
-
- _supports_multi_parameters = False
- _has_multi_parameters = False
- select = None
-
- def __init__(self, table, values, prefixes):
- self.table = _interpret_as_from(table)
- self.parameters, self._has_multi_parameters = \
- self._process_colparams(values)
- if prefixes:
- self._setup_prefixes(prefixes)
-
- @_generative
- def values(self, *args, **kwargs):
- """specify a fixed VALUES clause for an INSERT statement, or the SET
- clause for an UPDATE.
-
- Note that the :class:`.Insert` and :class:`.Update` constructs support
- per-execution time formatting of the VALUES and/or SET clauses,
- based on the arguments passed to :meth:`.Connection.execute`. However,
- the :meth:`.ValuesBase.values` method can be used to "fix" a particular
- set of parameters into the statement.
-
- Multiple calls to :meth:`.ValuesBase.values` will produce a new
- construct, each one with the parameter list modified to include
- the new parameters sent. In the typical case of a single
- dictionary of parameters, the newly passed keys will replace
- the same keys in the previous construct. In the case of a list-based
- "multiple values" construct, each new list of values is extended
- onto the existing list of values.
-
- :param \**kwargs: key value pairs representing the string key
- of a :class:`.Column` mapped to the value to be rendered into the
- VALUES or SET clause::
-
- users.insert().values(name="some name")
-
- users.update().where(users.c.id==5).values(name="some name")
-
- :param \*args: Alternatively, a dictionary, tuple or list
- of dictionaries or tuples can be passed as a single positional
- argument in order to form the VALUES or
- SET clause of the statement. The single dictionary form
- works the same as the kwargs form::
-
- users.insert().values({"name": "some name"})
-
- If a tuple is passed, the tuple should contain the same number
- of columns as the target :class:`.Table`::
-
- users.insert().values((5, "some name"))
-
- The :class:`.Insert` construct also supports multiply-rendered VALUES
- construct, for those backends which support this SQL syntax
- (SQLite, Postgresql, MySQL). This mode is indicated by passing a list
- of one or more dictionaries/tuples::
-
- users.insert().values([
- {"name": "some name"},
- {"name": "some other name"},
- {"name": "yet another name"},
- ])
-
- In the case of an :class:`.Update`
- construct, only the single dictionary/tuple form is accepted,
- else an exception is raised. It is also an exception case to
- attempt to mix the single-/multiple- value styles together,
- either through multiple :meth:`.ValuesBase.values` calls
- or by sending a list + kwargs at the same time.
-
- .. note::
-
- Passing a multiple values list is *not* the same
- as passing a multiple values list to the :meth:`.Connection.execute`
- method. Passing a list of parameter sets to :meth:`.ValuesBase.values`
- produces a construct of this form::
-
- INSERT INTO table (col1, col2, col3) VALUES
- (col1_0, col2_0, col3_0),
- (col1_1, col2_1, col3_1),
- ...
-
- whereas a multiple list passed to :meth:`.Connection.execute`
- has the effect of using the DBAPI
- `executemany() <http://www.python.org/dev/peps/pep-0249/#id18>`_
- method, which provides a high-performance system of invoking
- a single-row INSERT statement many times against a series
- of parameter sets. The "executemany" style is supported by
- all database backends, as it does not depend on a special SQL
- syntax.
-
- .. versionadded:: 0.8
- Support for multiple-VALUES INSERT statements.
-
-
- .. seealso::
-
- :ref:`inserts_and_updates` - SQL Expression
- Language Tutorial
-
- :func:`~.expression.insert` - produce an ``INSERT`` statement
-
- :func:`~.expression.update` - produce an ``UPDATE`` statement
-
- """
- if self.select is not None:
- raise exc.InvalidRequestError(
- "This construct already inserts from a SELECT")
- if self._has_multi_parameters and kwargs:
- raise exc.InvalidRequestError(
- "This construct already has multiple parameter sets.")
-
- if args:
- if len(args) > 1:
- raise exc.ArgumentError(
- "Only a single dictionary/tuple or list of "
- "dictionaries/tuples is accepted positionally.")
- v = args[0]
- else:
- v = {}
-
- if self.parameters is None:
- self.parameters, self._has_multi_parameters = \
- self._process_colparams(v)
- else:
- if self._has_multi_parameters:
- self.parameters = list(self.parameters)
- p, self._has_multi_parameters = self._process_colparams(v)
- if not self._has_multi_parameters:
- raise exc.ArgumentError(
- "Can't mix single-values and multiple values "
- "formats in one statement")
-
- self.parameters.extend(p)
- else:
- self.parameters = self.parameters.copy()
- p, self._has_multi_parameters = self._process_colparams(v)
- if self._has_multi_parameters:
- raise exc.ArgumentError(
- "Can't mix single-values and multiple values "
- "formats in one statement")
- self.parameters.update(p)
-
- if kwargs:
- if self._has_multi_parameters:
- raise exc.ArgumentError(
- "Can't pass kwargs and multiple parameter sets "
- "simultaenously")
- else:
- self.parameters.update(kwargs)
-
-
-class Insert(ValuesBase):
- """Represent an INSERT construct.
-
- The :class:`.Insert` object is created using the
- :func:`~.expression.insert()` function.
-
- .. seealso::
-
- :ref:`coretutorial_insert_expressions`
-
- """
- __visit_name__ = 'insert'
-
- _supports_multi_parameters = True
-
- def __init__(self,
- table,
- values=None,
- inline=False,
- bind=None,
- prefixes=None,
- returning=None,
- **kwargs):
- ValuesBase.__init__(self, table, values, prefixes)
- self._bind = bind
- self.select = None
- self.inline = inline
- self._returning = returning
- self.kwargs = kwargs
-
- def get_children(self, **kwargs):
- if self.select is not None:
- return self.select,
- else:
- return ()
-
- @_generative
- def from_select(self, names, select):
- """Return a new :class:`.Insert` construct which represents
- an ``INSERT...FROM SELECT`` statement.
-
- e.g.::
-
- sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
- ins = table2.insert().from_select(['a', 'b'], sel)
-
- :param names: a sequence of string column names or :class:`.Column`
- objects representing the target columns.
- :param select: a :func:`.select` construct, :class:`.FromClause`
- or other construct which resolves into a :class:`.FromClause`,
- such as an ORM :class:`.Query` object, etc. The order of
- columns returned from this FROM clause should correspond to the
- order of columns sent as the ``names`` parameter; while this
- is not checked before passing along to the database, the database
- would normally raise an exception if these column lists don't
- correspond.
-
- .. note::
-
- Depending on backend, it may be necessary for the :class:`.Insert`
- statement to be constructed using the ``inline=True`` flag; this
- flag will prevent the implicit usage of ``RETURNING`` when the
- ``INSERT`` statement is rendered, which isn't supported on a backend
- such as Oracle in conjunction with an ``INSERT..SELECT`` combination::
-
- sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
- ins = table2.insert(inline=True).from_select(['a', 'b'], sel)
-
- .. versionadded:: 0.8.3
-
- """
- if self.parameters:
- raise exc.InvalidRequestError(
- "This construct already inserts value expressions")
-
- self.parameters, self._has_multi_parameters = \
- self._process_colparams(dict((n, null()) for n in names))
-
- self.select = _interpret_as_select(select)
-
- def _copy_internals(self, clone=_clone, **kw):
- # TODO: coverage
- self.parameters = self.parameters.copy()
- if self.select is not None:
- self.select = _clone(self.select)
-
-
-class Update(ValuesBase):
- """Represent an Update construct.
-
- The :class:`.Update` object is created using the :func:`update()` function.
-
- """
- __visit_name__ = 'update'
-
- def __init__(self,
- table,
- whereclause,
- values=None,
- inline=False,
- bind=None,
- prefixes=None,
- returning=None,
- **kwargs):
- ValuesBase.__init__(self, table, values, prefixes)
- self._bind = bind
- self._returning = returning
- if whereclause is not None:
- self._whereclause = _literal_as_text(whereclause)
- else:
- self._whereclause = None
- self.inline = inline
- self.kwargs = kwargs
-
-
- def get_children(self, **kwargs):
- if self._whereclause is not None:
- return self._whereclause,
- else:
- return ()
-
- def _copy_internals(self, clone=_clone, **kw):
- # TODO: coverage
- self._whereclause = clone(self._whereclause, **kw)
- self.parameters = self.parameters.copy()
-
- @_generative
- def where(self, whereclause):
- """return a new update() construct with the given expression added to
- its WHERE clause, joined to the existing clause via AND, if any.
-
- """
- if self._whereclause is not None:
- self._whereclause = and_(self._whereclause,
- _literal_as_text(whereclause))
- else:
- self._whereclause = _literal_as_text(whereclause)
-
- @property
- def _extra_froms(self):
- # TODO: this could be made memoized
- # if the memoization is reset on each generative call.
- froms = []
- seen = set([self.table])
-
- if self._whereclause is not None:
- for item in _from_objects(self._whereclause):
- if not seen.intersection(item._cloned_set):
- froms.append(item)
- seen.update(item._cloned_set)
-
- return froms
-
-
-class Delete(UpdateBase):
- """Represent a DELETE construct.
-
- The :class:`.Delete` object is created using the :func:`delete()` function.
-
- """
-
- __visit_name__ = 'delete'
-
- def __init__(self,
- table,
- whereclause,
- bind=None,
- returning=None,
- prefixes=None,
- **kwargs):
- self._bind = bind
- self.table = _interpret_as_from(table)
- self._returning = returning
-
- if prefixes:
- self._setup_prefixes(prefixes)
-
- if whereclause is not None:
- self._whereclause = _literal_as_text(whereclause)
- else:
- self._whereclause = None
-
- self.kwargs = kwargs
-
- def get_children(self, **kwargs):
- if self._whereclause is not None:
- return self._whereclause,
- else:
- return ()
-
- @_generative
- def where(self, whereclause):
- """Add the given WHERE clause to a newly returned delete construct."""
-
- if self._whereclause is not None:
- self._whereclause = and_(self._whereclause,
- _literal_as_text(whereclause))
- else:
- self._whereclause = _literal_as_text(whereclause)
-
- def _copy_internals(self, clone=_clone, **kw):
- # TODO: coverage
- self._whereclause = clone(self._whereclause, **kw)
-
-
-class _IdentifiedClause(Executable, ClauseElement):
-
- __visit_name__ = 'identified'
- _execution_options = \
- Executable._execution_options.union({'autocommit': False})
- quote = None
-
- def __init__(self, ident):
- self.ident = ident
-
-
-class SavepointClause(_IdentifiedClause):
- __visit_name__ = 'savepoint'
-
-
-class RollbackToSavepointClause(_IdentifiedClause):
- __visit_name__ = 'rollback_to_savepoint'
-
-
-class ReleaseSavepointClause(_IdentifiedClause):
- __visit_name__ = 'release_savepoint'
# old names for compatibility
+_Executable = Executable
_BindParamClause = BindParameter
_Label = Label
_SelectBase = SelectBase
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index 5e2d0792c..f300e2416 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -1,14 +1,19 @@
# sql/functions.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-from .. import types as sqltypes, schema
-from .expression import (
- ClauseList, Function, _literal_as_binds, literal_column, _type_from_args,
- cast, extract
- )
+"""SQL function API, factories, and built-in functions.
+
+"""
+from . import sqltypes, schema
+from .base import Executable
+from .elements import ClauseList, Cast, Extract, _literal_as_binds, \
+ literal_column, _type_from_args, ColumnElement, _clone,\
+ Over, BindParameter
+from .selectable import FromClause, Select
+
from . import operators
from .visitors import VisitableType
from .. import util
@@ -29,6 +34,281 @@ def register_function(identifier, fn, package="_default"):
reg[identifier] = fn
+class FunctionElement(Executable, ColumnElement, FromClause):
+ """Base for SQL function-oriented constructs.
+
+ .. seealso::
+
+ :class:`.Function` - named SQL function.
+
+ :data:`.func` - namespace which produces registered or ad-hoc
+ :class:`.Function` instances.
+
+ :class:`.GenericFunction` - allows creation of registered function
+ types.
+
+ """
+
+ packagenames = ()
+
+ def __init__(self, *clauses, **kwargs):
+ """Construct a :class:`.FunctionElement`.
+ """
+ args = [_literal_as_binds(c, self.name) for c in clauses]
+ self.clause_expr = ClauseList(
+ operator=operators.comma_op,
+ group_contents=True, *args).\
+ self_group()
+
+ def _execute_on_connection(self, connection, multiparams, params):
+ return connection._execute_function(self, multiparams, params)
+
+ @property
+ def columns(self):
+ """Fulfill the 'columns' contract of :class:`.ColumnElement`.
+
+ Returns a single-element list consisting of this object.
+
+ """
+ return [self]
+
+ @util.memoized_property
+ def clauses(self):
+ """Return the underlying :class:`.ClauseList` which contains
+ the arguments for this :class:`.FunctionElement`.
+
+ """
+ return self.clause_expr.element
+
+ def over(self, partition_by=None, order_by=None):
+ """Produce an OVER clause against this function.
+
+ Used against aggregate or so-called "window" functions,
+ for database backends that support window functions.
+
+ The expression::
+
+ func.row_number().over(order_by='x')
+
+ is shorthand for::
+
+ from sqlalchemy import over
+ over(func.row_number(), order_by='x')
+
+ See :func:`~.expression.over` for a full description.
+
+ .. versionadded:: 0.7
+
+ """
+ return Over(self, partition_by=partition_by, order_by=order_by)
+
+ @property
+ def _from_objects(self):
+ return self.clauses._from_objects
+
+ def get_children(self, **kwargs):
+ return self.clause_expr,
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.clause_expr = clone(self.clause_expr, **kw)
+ self._reset_exported()
+ FunctionElement.clauses._reset(self)
+
+ def select(self):
+ """Produce a :func:`~.expression.select` construct
+ against this :class:`.FunctionElement`.
+
+ This is shorthand for::
+
+ s = select([function_element])
+
+ """
+ s = Select([self])
+ if self._execution_options:
+ s = s.execution_options(**self._execution_options)
+ return s
+
+ def scalar(self):
+ """Execute this :class:`.FunctionElement` against an embedded
+ 'bind' and return a scalar value.
+
+ This first calls :meth:`~.FunctionElement.select` to
+ produce a SELECT construct.
+
+ Note that :class:`.FunctionElement` can be passed to
+ the :meth:`.Connectable.scalar` method of :class:`.Connection`
+ or :class:`.Engine`.
+
+ """
+ return self.select().execute().scalar()
+
+ def execute(self):
+ """Execute this :class:`.FunctionElement` against an embedded
+ 'bind'.
+
+ This first calls :meth:`~.FunctionElement.select` to
+ produce a SELECT construct.
+
+ Note that :class:`.FunctionElement` can be passed to
+ the :meth:`.Connectable.execute` method of :class:`.Connection`
+ or :class:`.Engine`.
+
+ """
+ return self.select().execute()
+
+ def _bind_param(self, operator, obj):
+ return BindParameter(None, obj, _compared_to_operator=operator,
+ _compared_to_type=self.type, unique=True)
+
+
+class _FunctionGenerator(object):
+ """Generate :class:`.Function` objects based on getattr calls."""
+
+ def __init__(self, **opts):
+ self.__names = []
+ self.opts = opts
+
+ def __getattr__(self, name):
+ # passthru __ attributes; fixes pydoc
+ if name.startswith('__'):
+ try:
+ return self.__dict__[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ elif name.endswith('_'):
+ name = name[0:-1]
+ f = _FunctionGenerator(**self.opts)
+ f.__names = list(self.__names) + [name]
+ return f
+
+ def __call__(self, *c, **kwargs):
+ o = self.opts.copy()
+ o.update(kwargs)
+
+ tokens = len(self.__names)
+
+ if tokens == 2:
+ package, fname = self.__names
+ elif tokens == 1:
+ package, fname = "_default", self.__names[0]
+ else:
+ package = None
+
+ if package is not None:
+ func = _registry[package].get(fname)
+ if func is not None:
+ return func(*c, **o)
+
+ return Function(self.__names[-1],
+ packagenames=self.__names[0:-1], *c, **o)
+
+
+func = _FunctionGenerator()
+"""Generate SQL function expressions.
+
+ :data:`.func` is a special object instance which generates SQL
+ functions based on name-based attributes, e.g.::
+
+ >>> print func.count(1)
+ count(:param_1)
+
+ The element is a column-oriented SQL element like any other, and is
+ used in that way::
+
+ >>> print select([func.count(table.c.id)])
+ SELECT count(sometable.id) FROM sometable
+
+ Any name can be given to :data:`.func`. If the function name is unknown to
+ SQLAlchemy, it will be rendered exactly as is. For common SQL functions
+ which SQLAlchemy is aware of, the name may be interpreted as a *generic
+ function* which will be compiled appropriately to the target database::
+
+ >>> print func.current_timestamp()
+ CURRENT_TIMESTAMP
+
+ To call functions which are present in dot-separated packages,
+ specify them in the same manner::
+
+ >>> print func.stats.yield_curve(5, 10)
+ stats.yield_curve(:yield_curve_1, :yield_curve_2)
+
+ SQLAlchemy can be made aware of the return type of functions to enable
+ type-specific lexical and result-based behavior. For example, to ensure
+ that a string-based function returns a Unicode value and is similarly
+ treated as a string in expressions, specify
+ :class:`~sqlalchemy.types.Unicode` as the type:
+
+ >>> print func.my_string(u'hi', type_=Unicode) + ' ' + \
+ ... func.my_string(u'there', type_=Unicode)
+ my_string(:my_string_1) || :my_string_2 || my_string(:my_string_3)
+
+ The object returned by a :data:`.func` call is usually an instance of
+ :class:`.Function`.
+ This object meets the "column" interface, including comparison and labeling
+ functions. The object can also be passed the :meth:`~.Connectable.execute`
+ method of a :class:`.Connection` or :class:`.Engine`, where it will be
+ wrapped inside of a SELECT statement first::
+
+ print connection.execute(func.current_timestamp()).scalar()
+
+ In a few exception cases, the :data:`.func` accessor
+ will redirect a name to a built-in expression such as :func:`.cast`
+ or :func:`.extract`, as these names have well-known meaning
+ but are not exactly the same as "functions" from a SQLAlchemy
+ perspective.
+
+ .. versionadded:: 0.8 :data:`.func` can return non-function expression
+ constructs for common quasi-functional names like :func:`.cast`
+ and :func:`.extract`.
+
+ Functions which are interpreted as "generic" functions know how to
+ calculate their return type automatically. For a listing of known generic
+ functions, see :ref:`generic_functions`.
+
+"""
+
+modifier = _FunctionGenerator(group=False)
+
+class Function(FunctionElement):
+ """Describe a named SQL function.
+
+ See the superclass :class:`.FunctionElement` for a description
+ of public methods.
+
+ .. seealso::
+
+ :data:`.func` - namespace which produces registered or ad-hoc
+ :class:`.Function` instances.
+
+ :class:`.GenericFunction` - allows creation of registered function
+ types.
+
+ """
+
+ __visit_name__ = 'function'
+
+ def __init__(self, name, *clauses, **kw):
+ """Construct a :class:`.Function`.
+
+ The :data:`.func` construct is normally used to construct
+ new :class:`.Function` instances.
+
+ """
+ self.packagenames = kw.pop('packagenames', None) or []
+ self.name = name
+ self._bind = kw.get('bind', None)
+ self.type = sqltypes.to_instance(kw.get('type_', None))
+
+ FunctionElement.__init__(self, *clauses, **kw)
+
+ def _bind_param(self, operator, obj):
+ return BindParameter(self.name, obj,
+ _compared_to_operator=operator,
+ _compared_to_type=self.type,
+ unique=True)
+
+
class _GenericMeta(VisitableType):
def __init__(cls, clsname, bases, clsdict):
cls.name = name = clsdict.get('name', clsname)
@@ -128,8 +408,8 @@ class GenericFunction(util.with_metaclass(_GenericMeta, Function)):
kwargs.pop("type_", None) or getattr(self, 'type', None))
-register_function("cast", cast)
-register_function("extract", extract)
+register_function("cast", Cast)
+register_function("extract", Extract)
class next_value(GenericFunction):
diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py
index 128442158..d7ec977aa 100644
--- a/lib/sqlalchemy/sql/operators.py
+++ b/lib/sqlalchemy/sql/operators.py
@@ -1,5 +1,5 @@
# sql/operators.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -27,8 +27,11 @@ else:
class Operators(object):
"""Base of comparison and logical operators.
- Implements base methods :meth:`operate` and :meth:`reverse_operate`,
- as well as :meth:`__and__`, :meth:`__or__`, :meth:`__invert__`.
+ Implements base methods :meth:`~sqlalchemy.sql.operators.Operators.operate` and
+ :meth:`~sqlalchemy.sql.operators.Operators.reverse_operate`, as well as
+ :meth:`~sqlalchemy.sql.operators.Operators.__and__`,
+ :meth:`~sqlalchemy.sql.operators.Operators.__or__`,
+ :meth:`~sqlalchemy.sql.operators.Operators.__invert__`.
Usually is used via its most common subclass
:class:`.ColumnOperators`.
@@ -654,6 +657,12 @@ def exists():
raise NotImplementedError()
+def istrue(a):
+ raise NotImplementedError()
+
+def isfalse(a):
+ raise NotImplementedError()
+
def is_(a, b):
return a.is_(b)
@@ -779,6 +788,7 @@ parenthesize (a op b).
"""
+_asbool = util.symbol('_asbool', canonical=-10)
_smallest = util.symbol('_smallest', canonical=-100)
_largest = util.symbol('_largest', canonical=100)
@@ -816,12 +826,19 @@ _PRECEDENCE = {
between_op: 5,
distinct_op: 5,
inv: 5,
+ istrue: 5,
+ isfalse: 5,
and_: 3,
or_: 2,
comma_op: -1,
- collate: 7,
+
+ desc_op: 3,
+ asc_op: 3,
+ collate: 4,
+
as_: -1,
exists: 0,
+ _asbool: -10,
_smallest: _smallest,
_largest: _largest
}
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
new file mode 100644
index 000000000..ba38b5070
--- /dev/null
+++ b/lib/sqlalchemy/sql/schema.py
@@ -0,0 +1,3273 @@
+# sql/schema.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""The schema module provides the building blocks for database metadata.
+
+Each element within this module describes a database entity which can be
+created and dropped, or is otherwise part of such an entity. Examples include
+tables, columns, sequences, and indexes.
+
+All entities are subclasses of :class:`~sqlalchemy.schema.SchemaItem`, and as
+defined in this module they are intended to be agnostic of any vendor-specific
+constructs.
+
+A collection of entities are grouped into a unit called
+:class:`~sqlalchemy.schema.MetaData`. MetaData serves as a logical grouping of
+schema elements, and can also be associated with an actual database connection
+such that operations involving the contained elements can contact the database
+as needed.
+
+Two of the elements here also build upon their "syntactic" counterparts, which
+are defined in :class:`~sqlalchemy.sql.expression.`, specifically
+:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Column`.
+Since these objects are part of the SQL expression language, they are usable
+as components in SQL expressions.
+
+"""
+
+import inspect
+from .. import exc, util, event, inspection
+from .base import SchemaEventTarget, DialectKWArgs
+from . import visitors
+from . import type_api
+from .base import _bind_or_error, ColumnCollection
+from .elements import ClauseElement, ColumnClause, _truncated_label, \
+ _as_truncated, TextClause, _literal_as_text,\
+ ColumnElement, _find_columns, quoted_name
+from .selectable import TableClause
+import collections
+import sqlalchemy
+from . import ddl
+
+RETAIN_SCHEMA = util.symbol('retain_schema')
+
+
+def _get_table_key(name, schema):
+ if schema is None:
+ return name
+ else:
+ return schema + "." + name
+
+
+
+@inspection._self_inspects
+class SchemaItem(SchemaEventTarget, visitors.Visitable):
+ """Base class for items that define a database schema."""
+
+ __visit_name__ = 'schema_item'
+
+ def _execute_on_connection(self, connection, multiparams, params):
+ return connection._execute_default(self, multiparams, params)
+
+ def _init_items(self, *args):
+ """Initialize the list of child items for this SchemaItem."""
+
+ for item in args:
+ if item is not None:
+ item._set_parent_with_dispatch(self)
+
+ def get_children(self, **kwargs):
+ """used to allow SchemaVisitor access"""
+ return []
+
+ def __repr__(self):
+ return util.generic_repr(self)
+
+ @property
+ @util.deprecated('0.9', 'Use ``<obj>.name.quote``')
+ def quote(self):
+ """Return the value of the ``quote`` flag passed
+ to this schema object, for those schema items which
+ have a ``name`` field.
+
+ """
+
+ return self.name.quote
+
+ @util.memoized_property
+ def info(self):
+ """Info dictionary associated with the object, allowing user-defined
+ data to be associated with this :class:`.SchemaItem`.
+
+ The dictionary is automatically generated when first accessed.
+ It can also be specified in the constructor of some objects,
+ such as :class:`.Table` and :class:`.Column`.
+
+ """
+ return {}
+
+ def _schema_item_copy(self, schema_item):
+ if 'info' in self.__dict__:
+ schema_item.info = self.info.copy()
+ schema_item.dispatch._update(self.dispatch)
+ return schema_item
+
+
+class Table(DialectKWArgs, SchemaItem, TableClause):
+ """Represent a table in a database.
+
+ e.g.::
+
+ mytable = Table("mytable", metadata,
+ Column('mytable_id', Integer, primary_key=True),
+ Column('value', String(50))
+ )
+
+ The :class:`.Table` object constructs a unique instance of itself based
+ on its name and optional schema name within the given
+ :class:`.MetaData` object. Calling the :class:`.Table`
+ constructor with the same name and same :class:`.MetaData` argument
+ a second time will return the *same* :class:`.Table` object - in this way
+ the :class:`.Table` constructor acts as a registry function.
+
+ .. seealso::
+
+ :ref:`metadata_describing` - Introduction to database metadata
+
+ Constructor arguments are as follows:
+
+ :param name: The name of this table as represented in the database.
+
+ The table name, along with the value of the ``schema`` parameter,
+ forms a key which uniquely identifies this :class:`.Table` within
+ the owning :class:`.MetaData` collection.
+ Additional calls to :class:`.Table` with the same name, metadata,
+ and schema name will return the same :class:`.Table` object.
+
+ Names which contain no upper case characters
+ will be treated as case insensitive names, and will not be quoted
+ unless they are a reserved word or contain special characters.
+ A name with any number of upper case characters is considered
+ to be case sensitive, and will be sent as quoted.
+
+ To enable unconditional quoting for the table name, specify the flag
+ ``quote=True`` to the constructor, or use the :class:`.quoted_name`
+ construct to specify the name.
+
+ :param metadata: a :class:`.MetaData` object which will contain this
+ table. The metadata is used as a point of association of this table
+ with other tables which are referenced via foreign key. It also
+ may be used to associate this table with a particular
+ :class:`.Connectable`.
+
+ :param \*args: Additional positional arguments are used primarily
+ to add the list of :class:`.Column` objects contained within this
+ table. Similar to the style of a CREATE TABLE statement, other
+ :class:`.SchemaItem` constructs may be added here, including
+ :class:`.PrimaryKeyConstraint`, and :class:`.ForeignKeyConstraint`.
+
+ :param autoload: Defaults to False: the Columns for this table should
+ be reflected from the database. Usually there will be no Column
+ objects in the constructor if this property is set.
+
+ :param autoload_replace: If ``True``, when using ``autoload=True``
+ and ``extend_existing=True``,
+ replace ``Column`` objects already present in the ``Table`` that's
+ in the ``MetaData`` registry with
+ what's reflected. Otherwise, all existing columns will be
+ excluded from the reflection process. Note that this does
+ not impact ``Column`` objects specified in the same call to ``Table``
+ which includes ``autoload``, those always take precedence.
+ Defaults to ``True``.
+
+ .. versionadded:: 0.7.5
+
+ :param autoload_with: If autoload==True, this is an optional Engine
+ or Connection instance to be used for the table reflection. If
+ ``None``, the underlying MetaData's bound connectable will be used.
+
+ :param extend_existing: When ``True``, indicates that if this
+ :class:`.Table` is already present in the given :class:`.MetaData`,
+ apply further arguments within the constructor to the existing
+ :class:`.Table`.
+
+ If ``extend_existing`` or ``keep_existing`` are not set, an error is
+ raised if additional table modifiers are specified when
+ the given :class:`.Table` is already present in the :class:`.MetaData`.
+
+ .. versionchanged:: 0.7.4
+ ``extend_existing`` will work in conjunction
+ with ``autoload=True`` to run a new reflection operation against
+ the database; new :class:`.Column` objects will be produced
+ from database metadata to replace those existing with the same
+ name, and additional :class:`.Column` objects not present
+ in the :class:`.Table` will be added.
+
+ As is always the case with ``autoload=True``, :class:`.Column`
+ objects can be specified in the same :class:`.Table` constructor,
+ which will take precedence. I.e.::
+
+ Table("mytable", metadata,
+ Column('y', Integer),
+ extend_existing=True,
+ autoload=True,
+ autoload_with=engine
+ )
+
+ The above will overwrite all columns within ``mytable`` which
+ are present in the database, except for ``y`` which will be used as is
+ from the above definition. If the ``autoload_replace`` flag
+ is set to False, no existing columns will be replaced.
+
+ :param implicit_returning: True by default - indicates that
+ RETURNING can be used by default to fetch newly inserted primary key
+ values, for backends which support this. Note that
+ create_engine() also provides an implicit_returning flag.
+
+ :param include_columns: A list of strings indicating a subset of
+ columns to be loaded via the ``autoload`` operation; table columns who
+ aren't present in this list will not be represented on the resulting
+ ``Table`` object. Defaults to ``None`` which indicates all columns
+ should be reflected.
+
+ :param info: Optional data dictionary which will be populated into the
+ :attr:`.SchemaItem.info` attribute of this object.
+
+ :param keep_existing: When ``True``, indicates that if this Table
+ is already present in the given :class:`.MetaData`, ignore
+ further arguments within the constructor to the existing
+ :class:`.Table`, and return the :class:`.Table` object as
+ originally created. This is to allow a function that wishes
+ to define a new :class:`.Table` on first call, but on
+ subsequent calls will return the same :class:`.Table`,
+ without any of the declarations (particularly constraints)
+ being applied a second time. Also see extend_existing.
+
+ If extend_existing or keep_existing are not set, an error is
+ raised if additional table modifiers are specified when
+ the given :class:`.Table` is already present in the :class:`.MetaData`.
+
+ :param listeners: A list of tuples of the form ``(<eventname>, <fn>)``
+ which will be passed to :func:`.event.listen` upon construction.
+ This alternate hook to :func:`.event.listen` allows the establishment
+ of a listener function specific to this :class:`.Table` before
+ the "autoload" process begins. Particularly useful for
+ the :meth:`.DDLEvents.column_reflect` event::
+
+ def listen_for_reflect(table, column_info):
+ "handle the column reflection event"
+ # ...
+
+ t = Table(
+ 'sometable',
+ autoload=True,
+ listeners=[
+ ('column_reflect', listen_for_reflect)
+ ])
+
+ :param mustexist: When ``True``, indicates that this Table must already
+ be present in the given :class:`.MetaData` collection, else
+ an exception is raised.
+
+ :param prefixes:
+ A list of strings to insert after CREATE in the CREATE TABLE
+ statement. They will be separated by spaces.
+
+ :param quote: Force quoting of this table's name on or off, corresponding
+ to ``True`` or ``False``. When left at its default of ``None``,
+ the column identifier will be quoted according to whether the name is
+ case sensitive (identifiers with at least one upper case character are
+ treated as case sensitive), or if it's a reserved word. This flag
+ is only needed to force quoting of a reserved word which is not known
+ by the SQLAlchemy dialect.
+
+ :param quote_schema: same as 'quote' but applies to the schema identifier.
+
+ :param schema: The schema name for this table, which is required if
+ the table resides in a schema other than the default selected schema
+ for the engine's database connection. Defaults to ``None``.
+
+ The quoting rules for the schema name are the same as those for the
+ ``name`` parameter, in that quoting is applied for reserved words or
+ case-sensitive names; to enable unconditional quoting for the
+ schema name, specify the flag
+ ``quote_schema=True`` to the constructor, or use the :class:`.quoted_name`
+ construct to specify the name.
+
+ :param useexisting: Deprecated. Use extend_existing.
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form ``<dialectname>_<argname>``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """
+
+ __visit_name__ = 'table'
+
+ def __new__(cls, *args, **kw):
+ if not args:
+ # python3k pickle seems to call this
+ return object.__new__(cls)
+
+ try:
+ name, metadata, args = args[0], args[1], args[2:]
+ except IndexError:
+ raise TypeError("Table() takes at least two arguments")
+
+ schema = kw.get('schema', None)
+ if schema is None:
+ schema = metadata.schema
+ keep_existing = kw.pop('keep_existing', False)
+ extend_existing = kw.pop('extend_existing', False)
+ if 'useexisting' in kw:
+ msg = "useexisting is deprecated. Use extend_existing."
+ util.warn_deprecated(msg)
+ if extend_existing:
+ msg = "useexisting is synonymous with extend_existing."
+ raise exc.ArgumentError(msg)
+ extend_existing = kw.pop('useexisting', False)
+
+ if keep_existing and extend_existing:
+ msg = "keep_existing and extend_existing are mutually exclusive."
+ raise exc.ArgumentError(msg)
+
+ mustexist = kw.pop('mustexist', False)
+ key = _get_table_key(name, schema)
+ if key in metadata.tables:
+ if not keep_existing and not extend_existing and bool(args):
+ raise exc.InvalidRequestError(
+ "Table '%s' is already defined for this MetaData "
+ "instance. Specify 'extend_existing=True' "
+ "to redefine "
+ "options and columns on an "
+ "existing Table object." % key)
+ table = metadata.tables[key]
+ if extend_existing:
+ table._init_existing(*args, **kw)
+ return table
+ else:
+ if mustexist:
+ raise exc.InvalidRequestError(
+ "Table '%s' not defined" % (key))
+ table = object.__new__(cls)
+ table.dispatch.before_parent_attach(table, metadata)
+ metadata._add_table(name, schema, table)
+ try:
+ table._init(name, metadata, *args, **kw)
+ table.dispatch.after_parent_attach(table, metadata)
+ return table
+ except:
+ #metadata._remove_table(name, schema)
+ raise
+
+
+ @property
+ @util.deprecated('0.9', 'Use ``table.schema.quote``')
+ def quote_schema(self):
+ """Return the value of the ``quote_schema`` flag passed
+ to this :class:`.Table`.
+ """
+
+ return self.schema.quote
+
+ def __init__(self, *args, **kw):
+ """Constructor for :class:`~.schema.Table`.
+
+ This method is a no-op. See the top-level
+ documentation for :class:`~.schema.Table`
+ for constructor arguments.
+
+ """
+ # __init__ is overridden to prevent __new__ from
+ # calling the superclass constructor.
+
+ def _init(self, name, metadata, *args, **kwargs):
+ super(Table, self).__init__(quoted_name(name, kwargs.pop('quote', None)))
+ self.metadata = metadata
+
+ self.schema = kwargs.pop('schema', None)
+ if self.schema is None:
+ self.schema = metadata.schema
+ else:
+ quote_schema = kwargs.pop('quote_schema', None)
+ self.schema = quoted_name(self.schema, quote_schema)
+
+ self.indexes = set()
+ self.constraints = set()
+ self._columns = ColumnCollection()
+ PrimaryKeyConstraint()._set_parent_with_dispatch(self)
+ self.foreign_keys = set()
+ self._extra_dependencies = set()
+ if self.schema is not None:
+ self.fullname = "%s.%s" % (self.schema, self.name)
+ else:
+ self.fullname = self.name
+
+ autoload = kwargs.pop('autoload', False)
+ autoload_with = kwargs.pop('autoload_with', None)
+ # this argument is only used with _init_existing()
+ kwargs.pop('autoload_replace', True)
+ include_columns = kwargs.pop('include_columns', None)
+
+ self.implicit_returning = kwargs.pop('implicit_returning', True)
+
+ if 'info' in kwargs:
+ self.info = kwargs.pop('info')
+ if 'listeners' in kwargs:
+ listeners = kwargs.pop('listeners')
+ for evt, fn in listeners:
+ event.listen(self, evt, fn)
+
+ self._prefixes = kwargs.pop('prefixes', [])
+
+ self._extra_kwargs(**kwargs)
+
+ # load column definitions from the database if 'autoload' is defined
+ # we do it after the table is in the singleton dictionary to support
+ # circular foreign keys
+ if autoload:
+ self._autoload(metadata, autoload_with, include_columns)
+
+ # initialize all the column, etc. objects. done after reflection to
+ # allow user-overrides
+ self._init_items(*args)
+
+ def _autoload(self, metadata, autoload_with, include_columns,
+ exclude_columns=()):
+
+ if autoload_with:
+ autoload_with.run_callable(
+ autoload_with.dialect.reflecttable,
+ self, include_columns, exclude_columns
+ )
+ else:
+ bind = _bind_or_error(metadata,
+ msg="No engine is bound to this Table's MetaData. "
+ "Pass an engine to the Table via "
+ "autoload_with=<someengine>, "
+ "or associate the MetaData with an engine via "
+ "metadata.bind=<someengine>")
+ bind.run_callable(
+ bind.dialect.reflecttable,
+ self, include_columns, exclude_columns
+ )
+
+ @property
+ def _sorted_constraints(self):
+ """Return the set of constraints as a list, sorted by creation
+ order.
+
+ """
+ return sorted(self.constraints, key=lambda c: c._creation_order)
+
+ def _init_existing(self, *args, **kwargs):
+ autoload = kwargs.pop('autoload', False)
+ autoload_with = kwargs.pop('autoload_with', None)
+ autoload_replace = kwargs.pop('autoload_replace', True)
+ schema = kwargs.pop('schema', None)
+ if schema and schema != self.schema:
+ raise exc.ArgumentError(
+ "Can't change schema of existing table from '%s' to '%s'",
+ (self.schema, schema))
+
+ include_columns = kwargs.pop('include_columns', None)
+
+ if include_columns is not None:
+ for c in self.c:
+ if c.name not in include_columns:
+ self._columns.remove(c)
+
+ for key in ('quote', 'quote_schema'):
+ if key in kwargs:
+ raise exc.ArgumentError(
+ "Can't redefine 'quote' or 'quote_schema' arguments")
+
+ if 'info' in kwargs:
+ self.info = kwargs.pop('info')
+
+ if autoload:
+ if not autoload_replace:
+ exclude_columns = [c.name for c in self.c]
+ else:
+ exclude_columns = ()
+ self._autoload(
+ self.metadata, autoload_with, include_columns, exclude_columns)
+
+ self._extra_kwargs(**kwargs)
+ self._init_items(*args)
+
+ def _extra_kwargs(self, **kwargs):
+ self._validate_dialect_kwargs(kwargs)
+
+ def _init_collections(self):
+ pass
+
+ @util.memoized_property
+ def _autoincrement_column(self):
+ for col in self.primary_key:
+ if col.autoincrement and \
+ col.type._type_affinity is not None and \
+ issubclass(col.type._type_affinity, type_api.INTEGERTYPE._type_affinity) and \
+ (not col.foreign_keys or col.autoincrement == 'ignore_fk') and \
+ isinstance(col.default, (type(None), Sequence)) and \
+ (col.server_default is None or col.server_default.reflected):
+ return col
+
+ @property
+ def key(self):
+ """Return the 'key' for this :class:`.Table`.
+
+ This value is used as the dictionary key within the
+ :attr:`.MetaData.tables` collection. It is typically the same
+ as that of :attr:`.Table.name` for a table with no :attr:`.Table.schema`
+ set; otherwise it is typically of the form ``schemaname.tablename``.
+
+ """
+ return _get_table_key(self.name, self.schema)
+
+ def __repr__(self):
+ return "Table(%s)" % ', '.join(
+ [repr(self.name)] + [repr(self.metadata)] +
+ [repr(x) for x in self.columns] +
+ ["%s=%s" % (k, repr(getattr(self, k))) for k in ['schema']])
+
+ def __str__(self):
+ return _get_table_key(self.description, self.schema)
+
+ @property
+ def bind(self):
+ """Return the connectable associated with this Table."""
+
+ return self.metadata and self.metadata.bind or None
+
+ def add_is_dependent_on(self, table):
+ """Add a 'dependency' for this Table.
+
+ This is another Table object which must be created
+ first before this one can, or dropped after this one.
+
+ Usually, dependencies between tables are determined via
+ ForeignKey objects. However, for other situations that
+ create dependencies outside of foreign keys (rules, inheriting),
+ this method can manually establish such a link.
+
+ """
+ self._extra_dependencies.add(table)
+
+ def append_column(self, column):
+ """Append a :class:`~.schema.Column` to this :class:`~.schema.Table`.
+
+ The "key" of the newly added :class:`~.schema.Column`, i.e. the
+ value of its ``.key`` attribute, will then be available
+ in the ``.c`` collection of this :class:`~.schema.Table`, and the
+ column definition will be included in any CREATE TABLE, SELECT,
+ UPDATE, etc. statements generated from this :class:`~.schema.Table`
+ construct.
+
+ Note that this does **not** change the definition of the table
+ as it exists within any underlying database, assuming that
+ table has already been created in the database. Relational
+ databases support the addition of columns to existing tables
+ using the SQL ALTER command, which would need to be
+ emitted for an already-existing table that doesn't contain
+ the newly added column.
+
+ """
+
+ column._set_parent_with_dispatch(self)
+
+ def append_constraint(self, constraint):
+ """Append a :class:`~.schema.Constraint` to this
+ :class:`~.schema.Table`.
+
+ This has the effect of the constraint being included in any
+ future CREATE TABLE statement, assuming specific DDL creation
+ events have not been associated with the given
+ :class:`~.schema.Constraint` object.
+
+ Note that this does **not** produce the constraint within the
+ relational database automatically, for a table that already exists
+ in the database. To add a constraint to an
+ existing relational database table, the SQL ALTER command must
+ be used. SQLAlchemy also provides the
+ :class:`.AddConstraint` construct which can produce this SQL when
+ invoked as an executable clause.
+
+ """
+
+ constraint._set_parent_with_dispatch(self)
+
+ def append_ddl_listener(self, event_name, listener):
+ """Append a DDL event listener to this ``Table``.
+
+ .. deprecated:: 0.7
+ See :class:`.DDLEvents`.
+
+ """
+
+ def adapt_listener(target, connection, **kw):
+ listener(event_name, target, connection)
+
+ event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
+
+ def _set_parent(self, metadata):
+ metadata._add_table(self.name, self.schema, self)
+ self.metadata = metadata
+
+ def get_children(self, column_collections=True,
+ schema_visitor=False, **kw):
+ if not schema_visitor:
+ return TableClause.get_children(
+ self, column_collections=column_collections, **kw)
+ else:
+ if column_collections:
+ return list(self.columns)
+ else:
+ return []
+
+ def exists(self, bind=None):
+ """Return True if this table exists."""
+
+ if bind is None:
+ bind = _bind_or_error(self)
+
+ return bind.run_callable(bind.dialect.has_table,
+ self.name, schema=self.schema)
+
+ def create(self, bind=None, checkfirst=False):
+ """Issue a ``CREATE`` statement for this
+ :class:`.Table`, using the given :class:`.Connectable`
+ for connectivity.
+
+ .. seealso::
+
+ :meth:`.MetaData.create_all`.
+
+ """
+
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaGenerator,
+ self,
+ checkfirst=checkfirst)
+
+ def drop(self, bind=None, checkfirst=False):
+ """Issue a ``DROP`` statement for this
+ :class:`.Table`, using the given :class:`.Connectable`
+ for connectivity.
+
+ .. seealso::
+
+ :meth:`.MetaData.drop_all`.
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaDropper,
+ self,
+ checkfirst=checkfirst)
+
+ def tometadata(self, metadata, schema=RETAIN_SCHEMA):
+ """Return a copy of this :class:`.Table` associated with a different
+ :class:`.MetaData`.
+
+ E.g.::
+
+ some_engine = create_engine("sqlite:///some.db")
+
+ # create two metadata
+ meta1 = MetaData()
+ meta2 = MetaData()
+
+ # load 'users' from the sqlite engine
+ users_table = Table('users', meta1, autoload=True,
+ autoload_with=some_engine)
+
+ # create the same Table object for the plain metadata
+ users_table_2 = users_table.tometadata(meta2)
+
+ :param metadata: Target :class:`.MetaData` object.
+ :param schema: Optional string name of a target schema, or
+ ``None`` for no schema. The :class:`.Table` object will be
+ given this schema name upon copy. Defaults to the special
+ symbol :attr:`.RETAIN_SCHEMA` which indicates no change should be
+ made to the schema name of the resulting :class:`.Table`.
+
+ """
+
+ if schema is RETAIN_SCHEMA:
+ schema = self.schema
+ elif schema is None:
+ schema = metadata.schema
+ key = _get_table_key(self.name, schema)
+ if key in metadata.tables:
+ util.warn("Table '%s' already exists within the given "
+ "MetaData - not copying." % self.description)
+ return metadata.tables[key]
+
+ args = []
+ for c in self.columns:
+ args.append(c.copy(schema=schema))
+ table = Table(
+ self.name, metadata, schema=schema,
+ *args, **self.kwargs
+ )
+ for c in self.constraints:
+ table.append_constraint(c.copy(schema=schema, target_table=table))
+
+ for index in self.indexes:
+ # skip indexes that would be generated
+ # by the 'index' flag on Column
+ if len(index.columns) == 1 and \
+ list(index.columns)[0].index:
+ continue
+ Index(index.name,
+ unique=index.unique,
+ *[table.c[col] for col in index.columns.keys()],
+ **index.kwargs)
+ return self._schema_item_copy(table)
+
+
+class Column(SchemaItem, ColumnClause):
+ """Represents a column in a database table."""
+
+ __visit_name__ = 'column'
+
+ def __init__(self, *args, **kwargs):
+ """
+ Construct a new ``Column`` object.
+
+ :param name: The name of this column as represented in the database.
+ This argument may be the first positional argument, or specified
+ via keyword.
+
+ Names which contain no upper case characters
+ will be treated as case insensitive names, and will not be quoted
+ unless they are a reserved word. Names with any number of upper
+ case characters will be quoted and sent exactly. Note that this
+ behavior applies even for databases which standardize upper
+ case names as case insensitive such as Oracle.
+
+ The name field may be omitted at construction time and applied
+ later, at any time before the Column is associated with a
+ :class:`.Table`. This is to support convenient
+ usage within the :mod:`~sqlalchemy.ext.declarative` extension.
+
+ :param type\_: The column's type, indicated using an instance which
+ subclasses :class:`~sqlalchemy.types.TypeEngine`. If no arguments
+ are required for the type, the class of the type can be sent
+ as well, e.g.::
+
+ # use a type with arguments
+ Column('data', String(50))
+
+ # use no arguments
+ Column('level', Integer)
+
+ The ``type`` argument may be the second positional argument
+ or specified by keyword.
+
+ If the ``type`` is ``None`` or is omitted, it will first default to the special
+ type :class:`.NullType`. If and when this :class:`.Column` is
+ made to refer to another column using :class:`.ForeignKey`
+ and/or :class:`.ForeignKeyConstraint`, the type of the remote-referenced
+ column will be copied to this column as well, at the moment that
+ the foreign key is resolved against that remote :class:`.Column`
+ object.
+
+ .. versionchanged:: 0.9.0
+ Support for propagation of type to a :class:`.Column` from its
+ :class:`.ForeignKey` object has been improved and should be
+ more reliable and timely.
+
+ :param \*args: Additional positional arguments include various
+ :class:`.SchemaItem` derived constructs which will be applied
+ as options to the column. These include instances of
+ :class:`.Constraint`, :class:`.ForeignKey`, :class:`.ColumnDefault`,
+ and :class:`.Sequence`. In some cases an equivalent keyword
+ argument is available such as ``server_default``, ``default``
+ and ``unique``.
+
+ :param autoincrement: This flag may be set to ``False`` to
+ indicate an integer primary key column that should not be
+ considered to be the "autoincrement" column, that is
+ the integer primary key column which generates values
+ implicitly upon INSERT and whose value is usually returned
+ via the DBAPI cursor.lastrowid attribute. It defaults
+ to ``True`` to satisfy the common use case of a table
+ with a single integer primary key column. If the table
+ has a composite primary key consisting of more than one
+ integer column, set this flag to True only on the
+ column that should be considered "autoincrement".
+
+ The setting *only* has an effect for columns which are:
+
+ * Integer derived (i.e. INT, SMALLINT, BIGINT).
+
+ * Part of the primary key
+
+ * Are not referenced by any foreign keys, unless
+ the value is specified as ``'ignore_fk'``
+
+ .. versionadded:: 0.7.4
+
+ * have no server side or client side defaults (with the exception
+ of Postgresql SERIAL).
+
+ The setting has these two effects on columns that meet the
+ above criteria:
+
+ * DDL issued for the column will include database-specific
+ keywords intended to signify this column as an
+ "autoincrement" column, such as AUTO INCREMENT on MySQL,
+ SERIAL on Postgresql, and IDENTITY on MS-SQL. It does
+ *not* issue AUTOINCREMENT for SQLite since this is a
+ special SQLite flag that is not required for autoincrementing
+ behavior. See the SQLite dialect documentation for
+ information on SQLite's AUTOINCREMENT.
+
+ * The column will be considered to be available as
+ cursor.lastrowid or equivalent, for those dialects which
+ "post fetch" newly inserted identifiers after a row has
+ been inserted (SQLite, MySQL, MS-SQL). It does not have
+ any effect in this regard for databases that use sequences
+ to generate primary key identifiers (i.e. Firebird, Postgresql,
+ Oracle).
+
+ .. versionchanged:: 0.7.4
+ ``autoincrement`` accepts a special value ``'ignore_fk'``
+ to indicate that autoincrementing status regardless of foreign
+ key references. This applies to certain composite foreign key
+ setups, such as the one demonstrated in the ORM documentation
+ at :ref:`post_update`.
+
+ :param default: A scalar, Python callable, or
+ :class:`.ColumnElement` expression representing the
+ *default value* for this column, which will be invoked upon insert
+ if this column is otherwise not specified in the VALUES clause of
+ the insert. This is a shortcut to using :class:`.ColumnDefault` as
+ a positional argument; see that class for full detail on the
+ structure of the argument.
+
+ Contrast this argument to ``server_default`` which creates a
+ default generator on the database side.
+
+ :param doc: optional String that can be used by the ORM or similar
+ to document attributes. This attribute does not render SQL
+ comments (a future attribute 'comment' will achieve that).
+
+ :param key: An optional string identifier which will identify this
+ ``Column`` object on the :class:`.Table`. When a key is provided,
+ this is the only identifier referencing the ``Column`` within the
+ application, including ORM attribute mapping; the ``name`` field
+ is used only when rendering SQL.
+
+ :param index: When ``True``, indicates that the column is indexed.
+ This is a shortcut for using a :class:`.Index` construct on the
+ table. To specify indexes with explicit names or indexes that
+ contain multiple columns, use the :class:`.Index` construct
+ instead.
+
+ :param info: Optional data dictionary which will be populated into the
+ :attr:`.SchemaItem.info` attribute of this object.
+
+ :param nullable: If set to the default of ``True``, indicates the
+ column will be rendered as allowing NULL, else it's rendered as
+ NOT NULL. This parameter is only used when issuing CREATE TABLE
+ statements.
+
+ :param onupdate: A scalar, Python callable, or
+ :class:`~sqlalchemy.sql.expression.ClauseElement` representing a
+ default value to be applied to the column within UPDATE
+ statements, which wil be invoked upon update if this column is not
+ present in the SET clause of the update. This is a shortcut to
+ using :class:`.ColumnDefault` as a positional argument with
+ ``for_update=True``.
+
+ :param primary_key: If ``True``, marks this column as a primary key
+ column. Multiple columns can have this flag set to specify
+ composite primary keys. As an alternative, the primary key of a
+ :class:`.Table` can be specified via an explicit
+ :class:`.PrimaryKeyConstraint` object.
+
+ :param server_default: A :class:`.FetchedValue` instance, str, Unicode
+ or :func:`~sqlalchemy.sql.expression.text` construct representing
+ the DDL DEFAULT value for the column.
+
+ String types will be emitted as-is, surrounded by single quotes::
+
+ Column('x', Text, server_default="val")
+
+ x TEXT DEFAULT 'val'
+
+ A :func:`~sqlalchemy.sql.expression.text` expression will be
+ rendered as-is, without quotes::
+
+ Column('y', DateTime, server_default=text('NOW()'))
+
+ y DATETIME DEFAULT NOW()
+
+ Strings and text() will be converted into a :class:`.DefaultClause`
+ object upon initialization.
+
+ Use :class:`.FetchedValue` to indicate that an already-existing
+ column will generate a default value on the database side which
+ will be available to SQLAlchemy for post-fetch after inserts. This
+ construct does not specify any DDL and the implementation is left
+ to the database, such as via a trigger.
+
+ :param server_onupdate: A :class:`.FetchedValue` instance
+ representing a database-side default generation function. This
+ indicates to SQLAlchemy that a newly generated value will be
+ available after updates. This construct does not specify any DDL
+ and the implementation is left to the database, such as via a
+ trigger.
+
+ :param quote: Force quoting of this column's name on or off,
+ corresponding to ``True`` or ``False``. When left at its default
+ of ``None``, the column identifier will be quoted according to
+ whether the name is case sensitive (identifiers with at least one
+ upper case character are treated as case sensitive), or if it's a
+ reserved word. This flag is only needed to force quoting of a
+ reserved word which is not known by the SQLAlchemy dialect.
+
+ :param unique: When ``True``, indicates that this column contains a
+ unique constraint, or if ``index`` is ``True`` as well, indicates
+ that the :class:`.Index` should be created with the unique flag.
+ To specify multiple columns in the constraint/index or to specify
+ an explicit name, use the :class:`.UniqueConstraint` or
+ :class:`.Index` constructs explicitly.
+
+ :param system: When ``True``, indicates this is a "system" column,
+ that is a column which is automatically made available by the
+ database, and should not be included in the columns list for a
+ ``CREATE TABLE`` statement.
+
+ For more elaborate scenarios where columns should be conditionally
+ rendered differently on different backends, consider custom
+ compilation rules for :class:`.CreateColumn`.
+
+ ..versionadded:: 0.8.3 Added the ``system=True`` parameter to
+ :class:`.Column`.
+
+ """
+
+ name = kwargs.pop('name', None)
+ type_ = kwargs.pop('type_', None)
+ args = list(args)
+ if args:
+ if isinstance(args[0], util.string_types):
+ if name is not None:
+ raise exc.ArgumentError(
+ "May not pass name positionally and as a keyword.")
+ name = args.pop(0)
+ if args:
+ coltype = args[0]
+
+ if hasattr(coltype, "_sqla_type"):
+ if type_ is not None:
+ raise exc.ArgumentError(
+ "May not pass type_ positionally and as a keyword.")
+ type_ = args.pop(0)
+
+ if name is not None:
+ name = quoted_name(name, kwargs.pop('quote', None))
+ elif "quote" in kwargs:
+ raise exc.ArgumentError("Explicit 'name' is required when "
+ "sending 'quote' argument")
+
+ super(Column, self).__init__(name, type_)
+ self.key = kwargs.pop('key', name)
+ self.primary_key = kwargs.pop('primary_key', False)
+ self.nullable = kwargs.pop('nullable', not self.primary_key)
+ self.default = kwargs.pop('default', None)
+ self.server_default = kwargs.pop('server_default', None)
+ self.server_onupdate = kwargs.pop('server_onupdate', None)
+
+ # these default to None because .index and .unique is *not*
+ # an informational flag about Column - there can still be an
+ # Index or UniqueConstraint referring to this Column.
+ self.index = kwargs.pop('index', None)
+ self.unique = kwargs.pop('unique', None)
+
+ self.system = kwargs.pop('system', False)
+ self.doc = kwargs.pop('doc', None)
+ self.onupdate = kwargs.pop('onupdate', None)
+ self.autoincrement = kwargs.pop('autoincrement', True)
+ self.constraints = set()
+ self.foreign_keys = set()
+
+ # check if this Column is proxying another column
+ if '_proxies' in kwargs:
+ self._proxies = kwargs.pop('_proxies')
+ # otherwise, add DDL-related events
+ elif isinstance(self.type, SchemaEventTarget):
+ self.type._set_parent_with_dispatch(self)
+
+ if self.default is not None:
+ if isinstance(self.default, (ColumnDefault, Sequence)):
+ args.append(self.default)
+ else:
+ if getattr(self.type, '_warn_on_bytestring', False):
+ if isinstance(self.default, util.binary_type):
+ util.warn("Unicode column received non-unicode "
+ "default value.")
+ args.append(ColumnDefault(self.default))
+
+ if self.server_default is not None:
+ if isinstance(self.server_default, FetchedValue):
+ args.append(self.server_default._as_for_update(False))
+ else:
+ args.append(DefaultClause(self.server_default))
+
+ if self.onupdate is not None:
+ if isinstance(self.onupdate, (ColumnDefault, Sequence)):
+ args.append(self.onupdate)
+ else:
+ args.append(ColumnDefault(self.onupdate, for_update=True))
+
+ if self.server_onupdate is not None:
+ if isinstance(self.server_onupdate, FetchedValue):
+ args.append(self.server_onupdate._as_for_update(True))
+ else:
+ args.append(DefaultClause(self.server_onupdate,
+ for_update=True))
+ self._init_items(*args)
+
+ util.set_creation_order(self)
+
+ if 'info' in kwargs:
+ self.info = kwargs.pop('info')
+
+ if kwargs:
+ raise exc.ArgumentError(
+ "Unknown arguments passed to Column: " + repr(list(kwargs)))
+
+# @property
+# def quote(self):
+# return getattr(self.name, "quote", None)
+
+ def __str__(self):
+ if self.name is None:
+ return "(no name)"
+ elif self.table is not None:
+ if self.table.named_with_column:
+ return (self.table.description + "." + self.description)
+ else:
+ return self.description
+ else:
+ return self.description
+
+ def references(self, column):
+ """Return True if this Column references the given column via foreign
+ key."""
+
+ for fk in self.foreign_keys:
+ if fk.column.proxy_set.intersection(column.proxy_set):
+ return True
+ else:
+ return False
+
+ def append_foreign_key(self, fk):
+ fk._set_parent_with_dispatch(self)
+
+ def __repr__(self):
+ kwarg = []
+ if self.key != self.name:
+ kwarg.append('key')
+ if self.primary_key:
+ kwarg.append('primary_key')
+ if not self.nullable:
+ kwarg.append('nullable')
+ if self.onupdate:
+ kwarg.append('onupdate')
+ if self.default:
+ kwarg.append('default')
+ if self.server_default:
+ kwarg.append('server_default')
+ return "Column(%s)" % ', '.join(
+ [repr(self.name)] + [repr(self.type)] +
+ [repr(x) for x in self.foreign_keys if x is not None] +
+ [repr(x) for x in self.constraints] +
+ [(self.table is not None and "table=<%s>" %
+ self.table.description or "table=None")] +
+ ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg])
+
+ def _set_parent(self, table):
+ if not self.name:
+ raise exc.ArgumentError(
+ "Column must be constructed with a non-blank name or "
+ "assign a non-blank .name before adding to a Table.")
+ if self.key is None:
+ self.key = self.name
+
+ existing = getattr(self, 'table', None)
+ if existing is not None and existing is not table:
+ raise exc.ArgumentError(
+ "Column object already assigned to Table '%s'" %
+ existing.description)
+
+ if self.key in table._columns:
+ col = table._columns.get(self.key)
+ if col is not self:
+ for fk in col.foreign_keys:
+ table.foreign_keys.remove(fk)
+ if fk.constraint in table.constraints:
+ # this might have been removed
+ # already, if it's a composite constraint
+ # and more than one col being replaced
+ table.constraints.remove(fk.constraint)
+
+ table._columns.replace(self)
+
+ if self.primary_key:
+ table.primary_key._replace(self)
+ Table._autoincrement_column._reset(table)
+ elif self.key in table.primary_key:
+ raise exc.ArgumentError(
+ "Trying to redefine primary-key column '%s' as a "
+ "non-primary-key column on table '%s'" % (
+ self.key, table.fullname))
+ self.table = table
+
+ if self.index:
+ if isinstance(self.index, util.string_types):
+ raise exc.ArgumentError(
+ "The 'index' keyword argument on Column is boolean only. "
+ "To create indexes with a specific name, create an "
+ "explicit Index object external to the Table.")
+ Index(_truncated_label('ix_%s' % self._label),
+ self, unique=bool(self.unique))
+ elif self.unique:
+ if isinstance(self.unique, util.string_types):
+ raise exc.ArgumentError(
+ "The 'unique' keyword argument on Column is boolean "
+ "only. To create unique constraints or indexes with a "
+ "specific name, append an explicit UniqueConstraint to "
+ "the Table's list of elements, or create an explicit "
+ "Index object external to the Table.")
+ table.append_constraint(UniqueConstraint(self.key))
+
+ fk_key = (table.key, self.key)
+ if fk_key in self.table.metadata._fk_memos:
+ for fk in self.table.metadata._fk_memos[fk_key]:
+ fk._set_remote_table(table)
+
+ def _on_table_attach(self, fn):
+ if self.table is not None:
+ fn(self, self.table)
+ event.listen(self, 'after_parent_attach', fn)
+
+ def copy(self, **kw):
+ """Create a copy of this ``Column``, unitialized.
+
+ This is used in ``Table.tometadata``.
+
+ """
+
+ # Constraint objects plus non-constraint-bound ForeignKey objects
+ args = \
+ [c.copy(**kw) for c in self.constraints] + \
+ [c.copy(**kw) for c in self.foreign_keys if not c.constraint]
+
+ type_ = self.type
+ if isinstance(type_, SchemaEventTarget):
+ type_ = type_.copy(**kw)
+
+ c = self._constructor(
+ name=self.name,
+ type_=type_,
+ key=self.key,
+ primary_key=self.primary_key,
+ nullable=self.nullable,
+ unique=self.unique,
+ system=self.system,
+ #quote=self.quote,
+ index=self.index,
+ autoincrement=self.autoincrement,
+ default=self.default,
+ server_default=self.server_default,
+ onupdate=self.onupdate,
+ server_onupdate=self.server_onupdate,
+ doc=self.doc,
+ *args
+ )
+ return self._schema_item_copy(c)
+
+ def _make_proxy(self, selectable, name=None, key=None,
+ name_is_truncatable=False, **kw):
+ """Create a *proxy* for this column.
+
+ This is a copy of this ``Column`` referenced by a different parent
+ (such as an alias or select statement). The column should
+ be used only in select scenarios, as its full DDL/default
+ information is not transferred.
+
+ """
+ fk = [ForeignKey(f.column, _constraint=f.constraint)
+ for f in self.foreign_keys]
+ if name is None and self.name is None:
+ raise exc.InvalidRequestError("Cannot initialize a sub-selectable"
+ " with this Column object until it's 'name' has "
+ "been assigned.")
+ try:
+ c = self._constructor(
+ _as_truncated(name or self.name) if \
+ name_is_truncatable else (name or self.name),
+ self.type,
+ key=key if key else name if name else self.key,
+ primary_key=self.primary_key,
+ nullable=self.nullable,
+ _proxies=[self], *fk)
+ except TypeError:
+ util.raise_from_cause(
+ TypeError(
+ "Could not create a copy of this %r object. "
+ "Ensure the class includes a _constructor() "
+ "attribute or method which accepts the "
+ "standard Column constructor arguments, or "
+ "references the Column class itself." % self.__class__)
+ )
+
+ c.table = selectable
+ selectable._columns.add(c)
+ if selectable._is_clone_of is not None:
+ c._is_clone_of = selectable._is_clone_of.columns[c.key]
+ if self.primary_key:
+ selectable.primary_key.add(c)
+ c.dispatch.after_parent_attach(c, selectable)
+ return c
+
+ def get_children(self, schema_visitor=False, **kwargs):
+ if schema_visitor:
+ return [x for x in (self.default, self.onupdate)
+ if x is not None] + \
+ list(self.foreign_keys) + list(self.constraints)
+ else:
+ return ColumnClause.get_children(self, **kwargs)
+
+
+class ForeignKey(DialectKWArgs, SchemaItem):
+ """Defines a dependency between two columns.
+
+ ``ForeignKey`` is specified as an argument to a :class:`.Column` object,
+ e.g.::
+
+ t = Table("remote_table", metadata,
+ Column("remote_id", ForeignKey("main_table.id"))
+ )
+
+ Note that ``ForeignKey`` is only a marker object that defines
+ a dependency between two columns. The actual constraint
+ is in all cases represented by the :class:`.ForeignKeyConstraint`
+ object. This object will be generated automatically when
+ a ``ForeignKey`` is associated with a :class:`.Column` which
+ in turn is associated with a :class:`.Table`. Conversely,
+ when :class:`.ForeignKeyConstraint` is applied to a :class:`.Table`,
+ ``ForeignKey`` markers are automatically generated to be
+ present on each associated :class:`.Column`, which are also
+ associated with the constraint object.
+
+ Note that you cannot define a "composite" foreign key constraint,
+ that is a constraint between a grouping of multiple parent/child
+ columns, using ``ForeignKey`` objects. To define this grouping,
+ the :class:`.ForeignKeyConstraint` object must be used, and applied
+ to the :class:`.Table`. The associated ``ForeignKey`` objects
+ are created automatically.
+
+ The ``ForeignKey`` objects associated with an individual
+ :class:`.Column` object are available in the `foreign_keys` collection
+ of that column.
+
+ Further examples of foreign key configuration are in
+ :ref:`metadata_foreignkeys`.
+
+ """
+
+ __visit_name__ = 'foreign_key'
+
+ def __init__(self, column, _constraint=None, use_alter=False, name=None,
+ onupdate=None, ondelete=None, deferrable=None,
+ initially=None, link_to_name=False, match=None,
+ **dialect_kw):
+ """
+ Construct a column-level FOREIGN KEY.
+
+ The :class:`.ForeignKey` object when constructed generates a
+ :class:`.ForeignKeyConstraint` which is associated with the parent
+ :class:`.Table` object's collection of constraints.
+
+ :param column: A single target column for the key relationship. A
+ :class:`.Column` object or a column name as a string:
+ ``tablename.columnkey`` or ``schema.tablename.columnkey``.
+ ``columnkey`` is the ``key`` which has been assigned to the column
+ (defaults to the column name itself), unless ``link_to_name`` is
+ ``True`` in which case the rendered name of the column is used.
+
+ .. versionadded:: 0.7.4
+ Note that if the schema name is not included, and the
+ underlying :class:`.MetaData` has a "schema", that value will
+ be used.
+
+ :param name: Optional string. An in-database name for the key if
+ `constraint` is not provided.
+
+ :param onupdate: Optional string. If set, emit ON UPDATE <value> when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+
+ :param ondelete: Optional string. If set, emit ON DELETE <value> when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+
+ :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
+ DEFERRABLE when issuing DDL for this constraint.
+
+ :param initially: Optional string. If set, emit INITIALLY <value> when
+ issuing DDL for this constraint.
+
+ :param link_to_name: if True, the string name given in ``column`` is
+ the rendered name of the referenced column, not its locally
+ assigned ``key``.
+
+ :param use_alter: passed to the underlying
+ :class:`.ForeignKeyConstraint` to indicate the constraint should be
+ generated/dropped externally from the CREATE TABLE/ DROP TABLE
+ statement. See that classes' constructor for details.
+
+ :param match: Optional string. If set, emit MATCH <value> when issuing
+ DDL for this constraint. Typical values include SIMPLE, PARTIAL
+ and FULL.
+
+ :param \**dialect_kw: Additional keyword arguments are dialect specific,
+ and passed in the form ``<dialectname>_<argname>``. The arguments
+ are ultimately handled by a corresponding :class:`.ForeignKeyConstraint`.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ .. versionadded:: 0.9.2
+
+ """
+
+ self._colspec = column
+ if isinstance(self._colspec, util.string_types):
+ self._table_column = None
+ else:
+ if hasattr(self._colspec, '__clause_element__'):
+ self._table_column = self._colspec.__clause_element__()
+ else:
+ self._table_column = self._colspec
+
+ if not isinstance(self._table_column, ColumnClause):
+ raise exc.ArgumentError(
+ "String, Column, or Column-bound argument "
+ "expected, got %r" % self._table_column)
+ elif not isinstance(self._table_column.table, (util.NoneType, TableClause)):
+ raise exc.ArgumentError(
+ "ForeignKey received Column not bound "
+ "to a Table, got: %r" % self._table_column.table
+ )
+
+ # the linked ForeignKeyConstraint.
+ # ForeignKey will create this when parent Column
+ # is attached to a Table, *or* ForeignKeyConstraint
+ # object passes itself in when creating ForeignKey
+ # markers.
+ self.constraint = _constraint
+ self.parent = None
+ self.use_alter = use_alter
+ self.name = name
+ self.onupdate = onupdate
+ self.ondelete = ondelete
+ self.deferrable = deferrable
+ self.initially = initially
+ self.link_to_name = link_to_name
+ self.match = match
+ self._unvalidated_dialect_kw = dialect_kw
+
+ def __repr__(self):
+ return "ForeignKey(%r)" % self._get_colspec()
+
+ def copy(self, schema=None):
+ """Produce a copy of this :class:`.ForeignKey` object.
+
+ The new :class:`.ForeignKey` will not be bound
+ to any :class:`.Column`.
+
+ This method is usually used by the internal
+ copy procedures of :class:`.Column`, :class:`.Table`,
+ and :class:`.MetaData`.
+
+ :param schema: The returned :class:`.ForeignKey` will
+ reference the original table and column name, qualified
+ by the given string schema name.
+
+ """
+
+ fk = ForeignKey(
+ self._get_colspec(schema=schema),
+ use_alter=self.use_alter,
+ name=self.name,
+ onupdate=self.onupdate,
+ ondelete=self.ondelete,
+ deferrable=self.deferrable,
+ initially=self.initially,
+ link_to_name=self.link_to_name,
+ match=self.match,
+ **self._unvalidated_dialect_kw
+ )
+ return self._schema_item_copy(fk)
+
+
+ def _get_colspec(self, schema=None):
+ """Return a string based 'column specification' for this
+ :class:`.ForeignKey`.
+
+ This is usually the equivalent of the string-based "tablename.colname"
+ argument first passed to the object's constructor.
+
+ """
+ if schema:
+ _schema, tname, colname = self._column_tokens
+ return "%s.%s.%s" % (schema, tname, colname)
+ elif self._table_column is not None:
+ return "%s.%s" % (
+ self._table_column.table.fullname, self._table_column.key)
+ else:
+ return self._colspec
+
+
+ def _table_key(self):
+ if self._table_column is not None:
+ if self._table_column.table is None:
+ return None
+ else:
+ return self._table_column.table.key
+ else:
+ schema, tname, colname = self._column_tokens
+ return _get_table_key(tname, schema)
+
+
+
+ target_fullname = property(_get_colspec)
+
+ def references(self, table):
+ """Return True if the given :class:`.Table` is referenced by this
+ :class:`.ForeignKey`."""
+
+ return table.corresponding_column(self.column) is not None
+
+ def get_referent(self, table):
+ """Return the :class:`.Column` in the given :class:`.Table`
+ referenced by this :class:`.ForeignKey`.
+
+ Returns None if this :class:`.ForeignKey` does not reference the given
+ :class:`.Table`.
+
+ """
+
+ return table.corresponding_column(self.column)
+
+ @util.memoized_property
+ def _column_tokens(self):
+ """parse a string-based _colspec into its component parts."""
+
+ m = self._colspec.split('.')
+ if m is None:
+ raise exc.ArgumentError(
+ "Invalid foreign key column specification: %s" %
+ self._colspec)
+ if (len(m) == 1):
+ tname = m.pop()
+ colname = None
+ else:
+ colname = m.pop()
+ tname = m.pop()
+
+ # A FK between column 'bar' and table 'foo' can be
+ # specified as 'foo', 'foo.bar', 'dbo.foo.bar',
+ # 'otherdb.dbo.foo.bar'. Once we have the column name and
+ # the table name, treat everything else as the schema
+ # name. Some databases (e.g. Sybase) support
+ # inter-database foreign keys. See tickets#1341 and --
+ # indirectly related -- Ticket #594. This assumes that '.'
+ # will never appear *within* any component of the FK.
+
+ if (len(m) > 0):
+ schema = '.'.join(m)
+ else:
+ schema = None
+ return schema, tname, colname
+
+ def _resolve_col_tokens(self):
+ if self.parent is None:
+ raise exc.InvalidRequestError(
+ "this ForeignKey object does not yet have a "
+ "parent Column associated with it.")
+
+ elif self.parent.table is None:
+ raise exc.InvalidRequestError(
+ "this ForeignKey's parent column is not yet associated "
+ "with a Table.")
+
+ parenttable = self.parent.table
+
+ # assertion, can be commented out.
+ # basically Column._make_proxy() sends the actual
+ # target Column to the ForeignKey object, so the
+ # string resolution here is never called.
+ for c in self.parent.base_columns:
+ if isinstance(c, Column):
+ assert c.table is parenttable
+ break
+ else:
+ assert False
+ ######################
+
+ schema, tname, colname = self._column_tokens
+
+ if schema is None and parenttable.metadata.schema is not None:
+ schema = parenttable.metadata.schema
+
+ tablekey = _get_table_key(tname, schema)
+ return parenttable, tablekey, colname
+
+
+ def _link_to_col_by_colstring(self, parenttable, table, colname):
+ if not hasattr(self.constraint, '_referred_table'):
+ self.constraint._referred_table = table
+ else:
+ assert self.constraint._referred_table is table
+
+ _column = None
+ if colname is None:
+ # colname is None in the case that ForeignKey argument
+ # was specified as table name only, in which case we
+ # match the column name to the same column on the
+ # parent.
+ key = self.parent
+ _column = table.c.get(self.parent.key, None)
+ elif self.link_to_name:
+ key = colname
+ for c in table.c:
+ if c.name == colname:
+ _column = c
+ else:
+ key = colname
+ _column = table.c.get(colname, None)
+
+ if _column is None:
+ raise exc.NoReferencedColumnError(
+ "Could not initialize target column for ForeignKey '%s' on table '%s': "
+ "table '%s' has no column named '%s'" % (
+ self._colspec, parenttable.name, table.name, key),
+ table.name, key)
+
+ self._set_target_column(_column)
+
+ def _set_target_column(self, column):
+ # propagate TypeEngine to parent if it didn't have one
+ if self.parent.type._isnull:
+ self.parent.type = column.type
+
+ # super-edgy case, if other FKs point to our column,
+ # they'd get the type propagated out also.
+ if isinstance(self.parent.table, Table):
+ fk_key = (self.parent.table.key, self.parent.key)
+ if fk_key in self.parent.table.metadata._fk_memos:
+ for fk in self.parent.table.metadata._fk_memos[fk_key]:
+ if fk.parent.type._isnull:
+ fk.parent.type = column.type
+
+ self.column = column
+
+ @util.memoized_property
+ def column(self):
+ """Return the target :class:`.Column` referenced by this
+ :class:`.ForeignKey`.
+
+ If no target column has been established, an exception
+ is raised.
+
+ .. versionchanged:: 0.9.0
+ Foreign key target column resolution now occurs as soon as both
+ the ForeignKey object and the remote Column to which it refers
+ are both associated with the same MetaData object.
+
+ """
+
+ if isinstance(self._colspec, util.string_types):
+
+ parenttable, tablekey, colname = self._resolve_col_tokens()
+
+ if tablekey not in parenttable.metadata:
+ raise exc.NoReferencedTableError(
+ "Foreign key associated with column '%s' could not find "
+ "table '%s' with which to generate a "
+ "foreign key to target column '%s'" %
+ (self.parent, tablekey, colname),
+ tablekey)
+ elif parenttable.key not in parenttable.metadata:
+ raise exc.InvalidRequestError(
+ "Table %s is no longer associated with its "
+ "parent MetaData" % parenttable)
+ else:
+ raise exc.NoReferencedColumnError(
+ "Could not initialize target column for "
+ "ForeignKey '%s' on table '%s': "
+ "table '%s' has no column named '%s'" % (
+ self._colspec, parenttable.name, tablekey, colname),
+ tablekey, colname)
+ elif hasattr(self._colspec, '__clause_element__'):
+ _column = self._colspec.__clause_element__()
+ return _column
+ else:
+ _column = self._colspec
+ return _column
+
+ def _set_parent(self, column):
+ if self.parent is not None and self.parent is not column:
+ raise exc.InvalidRequestError(
+ "This ForeignKey already has a parent !")
+ self.parent = column
+ self.parent.foreign_keys.add(self)
+ self.parent._on_table_attach(self._set_table)
+
+ def _set_remote_table(self, table):
+ parenttable, tablekey, colname = self._resolve_col_tokens()
+ self._link_to_col_by_colstring(parenttable, table, colname)
+ self.constraint._validate_dest_table(table)
+
+ def _remove_from_metadata(self, metadata):
+ parenttable, table_key, colname = self._resolve_col_tokens()
+ fk_key = (table_key, colname)
+
+ if self in metadata._fk_memos[fk_key]:
+ # TODO: no test coverage for self not in memos
+ metadata._fk_memos[fk_key].remove(self)
+
+ def _set_table(self, column, table):
+ # standalone ForeignKey - create ForeignKeyConstraint
+ # on the hosting Table when attached to the Table.
+ if self.constraint is None and isinstance(table, Table):
+ self.constraint = ForeignKeyConstraint(
+ [], [], use_alter=self.use_alter, name=self.name,
+ onupdate=self.onupdate, ondelete=self.ondelete,
+ deferrable=self.deferrable, initially=self.initially,
+ match=self.match,
+ **self._unvalidated_dialect_kw
+ )
+ self.constraint._elements[self.parent] = self
+ self.constraint._set_parent_with_dispatch(table)
+ table.foreign_keys.add(self)
+
+ # set up remote ".column" attribute, or a note to pick it
+ # up when the other Table/Column shows up
+ if isinstance(self._colspec, util.string_types):
+ parenttable, table_key, colname = self._resolve_col_tokens()
+ fk_key = (table_key, colname)
+ if table_key in parenttable.metadata.tables:
+ table = parenttable.metadata.tables[table_key]
+ try:
+ self._link_to_col_by_colstring(parenttable, table, colname)
+ except exc.NoReferencedColumnError:
+ # this is OK, we'll try later
+ pass
+ parenttable.metadata._fk_memos[fk_key].append(self)
+ elif hasattr(self._colspec, '__clause_element__'):
+ _column = self._colspec.__clause_element__()
+ self._set_target_column(_column)
+ else:
+ _column = self._colspec
+ self._set_target_column(_column)
+
+
+
+class _NotAColumnExpr(object):
+ def _not_a_column_expr(self):
+ raise exc.InvalidRequestError(
+ "This %s cannot be used directly "
+ "as a column expression." % self.__class__.__name__)
+
+ __clause_element__ = self_group = lambda self: self._not_a_column_expr()
+ _from_objects = property(lambda self: self._not_a_column_expr())
+
+
+class DefaultGenerator(_NotAColumnExpr, SchemaItem):
+ """Base class for column *default* values."""
+
+ __visit_name__ = 'default_generator'
+
+ is_sequence = False
+ is_server_default = False
+ column = None
+
+ def __init__(self, for_update=False):
+ self.for_update = for_update
+
+ def _set_parent(self, column):
+ self.column = column
+ if self.for_update:
+ self.column.onupdate = self
+ else:
+ self.column.default = self
+
+ def execute(self, bind=None, **kwargs):
+ if bind is None:
+ bind = _bind_or_error(self)
+ return bind._execute_default(self, **kwargs)
+
+ @property
+ def bind(self):
+ """Return the connectable associated with this default."""
+ if getattr(self, 'column', None) is not None:
+ return self.column.table.bind
+ else:
+ return None
+
+
+class ColumnDefault(DefaultGenerator):
+ """A plain default value on a column.
+
+ This could correspond to a constant, a callable function,
+ or a SQL clause.
+
+ :class:`.ColumnDefault` is generated automatically
+ whenever the ``default``, ``onupdate`` arguments of
+ :class:`.Column` are used. A :class:`.ColumnDefault`
+ can be passed positionally as well.
+
+ For example, the following::
+
+ Column('foo', Integer, default=50)
+
+ Is equivalent to::
+
+ Column('foo', Integer, ColumnDefault(50))
+
+
+ """
+
+ def __init__(self, arg, **kwargs):
+ """"Construct a new :class:`.ColumnDefault`.
+
+
+ :param arg: argument representing the default value.
+ May be one of the following:
+
+ * a plain non-callable Python value, such as a
+ string, integer, boolean, or other simple type.
+ The default value will be used as is each time.
+ * a SQL expression, that is one which derives from
+ :class:`.ColumnElement`. The SQL expression will
+ be rendered into the INSERT or UPDATE statement,
+ or in the case of a primary key column when
+ RETURNING is not used may be
+ pre-executed before an INSERT within a SELECT.
+ * A Python callable. The function will be invoked for each
+ new row subject to an INSERT or UPDATE.
+ The callable must accept exactly
+ zero or one positional arguments. The one-argument form
+ will receive an instance of the :class:`.ExecutionContext`,
+ which provides contextual information as to the current
+ :class:`.Connection` in use as well as the current
+ statement and parameters.
+
+ """
+ super(ColumnDefault, self).__init__(**kwargs)
+ if isinstance(arg, FetchedValue):
+ raise exc.ArgumentError(
+ "ColumnDefault may not be a server-side default type.")
+ if util.callable(arg):
+ arg = self._maybe_wrap_callable(arg)
+ self.arg = arg
+
+ @util.memoized_property
+ def is_callable(self):
+ return util.callable(self.arg)
+
+ @util.memoized_property
+ def is_clause_element(self):
+ return isinstance(self.arg, ClauseElement)
+
+ @util.memoized_property
+ def is_scalar(self):
+ return not self.is_callable and \
+ not self.is_clause_element and \
+ not self.is_sequence
+
+ def _maybe_wrap_callable(self, fn):
+ """Wrap callables that don't accept a context.
+
+ The alternative here is to require that
+ a simple callable passed to "default" would need
+ to be of the form "default=lambda ctx: datetime.now".
+ That is the more "correct" way to go, but the case
+ of using a zero-arg callable for "default" is so
+ much more prominent than the context-specific one
+ I'm having trouble justifying putting that inconvenience
+ on everyone.
+
+ """
+ if inspect.isfunction(fn) or inspect.ismethod(fn):
+ inspectable = fn
+ elif inspect.isclass(fn):
+ inspectable = fn.__init__
+ elif hasattr(fn, '__call__'):
+ inspectable = fn.__call__
+ else:
+ # probably not inspectable, try anyways.
+ inspectable = fn
+ try:
+ argspec = inspect.getargspec(inspectable)
+ except TypeError:
+ return lambda ctx: fn()
+
+ defaulted = argspec[3] is not None and len(argspec[3]) or 0
+ positionals = len(argspec[0]) - defaulted
+
+ # Py3K compat - no unbound methods
+ if inspect.ismethod(inspectable) or inspect.isclass(fn):
+ positionals -= 1
+
+ if positionals == 0:
+ return lambda ctx: fn()
+ elif positionals == 1:
+ return fn
+ else:
+ raise exc.ArgumentError(
+ "ColumnDefault Python function takes zero or one "
+ "positional arguments")
+
+ def _visit_name(self):
+ if self.for_update:
+ return "column_onupdate"
+ else:
+ return "column_default"
+ __visit_name__ = property(_visit_name)
+
+ def __repr__(self):
+ return "ColumnDefault(%r)" % self.arg
+
+
+class Sequence(DefaultGenerator):
+ """Represents a named database sequence.
+
+ The :class:`.Sequence` object represents the name and configurational
+ parameters of a database sequence. It also represents
+ a construct that can be "executed" by a SQLAlchemy :class:`.Engine`
+ or :class:`.Connection`, rendering the appropriate "next value" function
+ for the target database and returning a result.
+
+ The :class:`.Sequence` is typically associated with a primary key column::
+
+ some_table = Table('some_table', metadata,
+ Column('id', Integer, Sequence('some_table_seq'), primary_key=True)
+ )
+
+ When CREATE TABLE is emitted for the above :class:`.Table`, if the
+ target platform supports sequences, a CREATE SEQUENCE statement will
+ be emitted as well. For platforms that don't support sequences,
+ the :class:`.Sequence` construct is ignored.
+
+ .. seealso::
+
+ :class:`.CreateSequence`
+
+ :class:`.DropSequence`
+
+ """
+
+ __visit_name__ = 'sequence'
+
+ is_sequence = True
+
+ def __init__(self, name, start=None, increment=None, schema=None,
+ optional=False, quote=None, metadata=None,
+ quote_schema=None,
+ for_update=False):
+ """Construct a :class:`.Sequence` object.
+
+ :param name: The name of the sequence.
+ :param start: the starting index of the sequence. This value is
+ used when the CREATE SEQUENCE command is emitted to the database
+ as the value of the "START WITH" clause. If ``None``, the
+ clause is omitted, which on most platforms indicates a starting
+ value of 1.
+ :param increment: the increment value of the sequence. This
+ value is used when the CREATE SEQUENCE command is emitted to
+ the database as the value of the "INCREMENT BY" clause. If ``None``,
+ the clause is omitted, which on most platforms indicates an
+ increment of 1.
+ :param schema: Optional schema name for the sequence, if located
+ in a schema other than the default.
+ :param optional: boolean value, when ``True``, indicates that this
+ :class:`.Sequence` object only needs to be explicitly generated
+ on backends that don't provide another way to generate primary
+ key identifiers. Currently, it essentially means, "don't create
+ this sequence on the Postgresql backend, where the SERIAL keyword
+ creates a sequence for us automatically".
+ :param quote: boolean value, when ``True`` or ``False``, explicitly
+ forces quoting of the schema name on or off. When left at its
+ default of ``None``, normal quoting rules based on casing and reserved
+ words take place.
+ :param quote_schema: set the quoting preferences for the ``schema``
+ name.
+ :param metadata: optional :class:`.MetaData` object which will be
+ associated with this :class:`.Sequence`. A :class:`.Sequence`
+ that is associated with a :class:`.MetaData` gains access to the
+ ``bind`` of that :class:`.MetaData`, meaning the
+ :meth:`.Sequence.create` and :meth:`.Sequence.drop` methods will
+ make usage of that engine automatically.
+
+ .. versionchanged:: 0.7
+ Additionally, the appropriate CREATE SEQUENCE/
+ DROP SEQUENCE DDL commands will be emitted corresponding to this
+ :class:`.Sequence` when :meth:`.MetaData.create_all` and
+ :meth:`.MetaData.drop_all` are invoked.
+
+ Note that when a :class:`.Sequence` is applied to a :class:`.Column`,
+ the :class:`.Sequence` is automatically associated with the
+ :class:`.MetaData` object of that column's parent :class:`.Table`,
+ when that association is made. The :class:`.Sequence` will then
+ be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding
+ to when the :class:`.Table` object itself is created or dropped,
+ rather than that of the :class:`.MetaData` object overall.
+ :param for_update: Indicates this :class:`.Sequence`, when associated
+ with a :class:`.Column`, should be invoked for UPDATE statements
+ on that column's table, rather than for INSERT statements, when
+ no value is otherwise present for that column in the statement.
+
+ """
+ super(Sequence, self).__init__(for_update=for_update)
+ self.name = quoted_name(name, quote)
+ self.start = start
+ self.increment = increment
+ self.optional = optional
+ if metadata is not None and schema is None and metadata.schema:
+ self.schema = schema = metadata.schema
+ else:
+ self.schema = quoted_name(schema, quote_schema)
+ self.metadata = metadata
+ self._key = _get_table_key(name, schema)
+ if metadata:
+ self._set_metadata(metadata)
+
+ @util.memoized_property
+ def is_callable(self):
+ return False
+
+ @util.memoized_property
+ def is_clause_element(self):
+ return False
+
+ @util.dependencies("sqlalchemy.sql.functions.func")
+ def next_value(self, func):
+ """Return a :class:`.next_value` function element
+ which will render the appropriate increment function
+ for this :class:`.Sequence` within any SQL expression.
+
+ """
+ return func.next_value(self, bind=self.bind)
+
+ def _set_parent(self, column):
+ super(Sequence, self)._set_parent(column)
+ column._on_table_attach(self._set_table)
+
+ def _set_table(self, column, table):
+ self._set_metadata(table.metadata)
+
+ def _set_metadata(self, metadata):
+ self.metadata = metadata
+ self.metadata._sequences[self._key] = self
+
+ @property
+ def bind(self):
+ if self.metadata:
+ return self.metadata.bind
+ else:
+ return None
+
+ def create(self, bind=None, checkfirst=True):
+ """Creates this sequence in the database."""
+
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaGenerator,
+ self,
+ checkfirst=checkfirst)
+
+ def drop(self, bind=None, checkfirst=True):
+ """Drops this sequence from the database."""
+
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaDropper,
+ self,
+ checkfirst=checkfirst)
+
+ def _not_a_column_expr(self):
+ raise exc.InvalidRequestError(
+ "This %s cannot be used directly "
+ "as a column expression. Use func.next_value(sequence) "
+ "to produce a 'next value' function that's usable "
+ "as a column element."
+ % self.__class__.__name__)
+
+
+@inspection._self_inspects
+class FetchedValue(_NotAColumnExpr, SchemaEventTarget):
+ """A marker for a transparent database-side default.
+
+ Use :class:`.FetchedValue` when the database is configured
+ to provide some automatic default for a column.
+
+ E.g.::
+
+ Column('foo', Integer, FetchedValue())
+
+ Would indicate that some trigger or default generator
+ will create a new value for the ``foo`` column during an
+ INSERT.
+
+ .. seealso::
+
+ :ref:`triggered_columns`
+
+ """
+ is_server_default = True
+ reflected = False
+ has_argument = False
+
+ def __init__(self, for_update=False):
+ self.for_update = for_update
+
+ def _as_for_update(self, for_update):
+ if for_update == self.for_update:
+ return self
+ else:
+ return self._clone(for_update)
+
+ def _clone(self, for_update):
+ n = self.__class__.__new__(self.__class__)
+ n.__dict__.update(self.__dict__)
+ n.__dict__.pop('column', None)
+ n.for_update = for_update
+ return n
+
+ def _set_parent(self, column):
+ self.column = column
+ if self.for_update:
+ self.column.server_onupdate = self
+ else:
+ self.column.server_default = self
+
+ def __repr__(self):
+ return util.generic_repr(self)
+
+
+class DefaultClause(FetchedValue):
+ """A DDL-specified DEFAULT column value.
+
+ :class:`.DefaultClause` is a :class:`.FetchedValue`
+ that also generates a "DEFAULT" clause when
+ "CREATE TABLE" is emitted.
+
+ :class:`.DefaultClause` is generated automatically
+ whenever the ``server_default``, ``server_onupdate`` arguments of
+ :class:`.Column` are used. A :class:`.DefaultClause`
+ can be passed positionally as well.
+
+ For example, the following::
+
+ Column('foo', Integer, server_default="50")
+
+ Is equivalent to::
+
+ Column('foo', Integer, DefaultClause("50"))
+
+ """
+
+ has_argument = True
+
+ def __init__(self, arg, for_update=False, _reflected=False):
+ util.assert_arg_type(arg, (util.string_types[0],
+ ClauseElement,
+ TextClause), 'arg')
+ super(DefaultClause, self).__init__(for_update)
+ self.arg = arg
+ self.reflected = _reflected
+
+ def __repr__(self):
+ return "DefaultClause(%r, for_update=%r)" % \
+ (self.arg, self.for_update)
+
+
+class PassiveDefault(DefaultClause):
+ """A DDL-specified DEFAULT column value.
+
+ .. deprecated:: 0.6
+ :class:`.PassiveDefault` is deprecated.
+ Use :class:`.DefaultClause`.
+ """
+ @util.deprecated("0.6",
+ ":class:`.PassiveDefault` is deprecated. "
+ "Use :class:`.DefaultClause`.",
+ False)
+ def __init__(self, *arg, **kw):
+ DefaultClause.__init__(self, *arg, **kw)
+
+
+class Constraint(DialectKWArgs, SchemaItem):
+ """A table-level SQL constraint."""
+
+ __visit_name__ = 'constraint'
+
+ def __init__(self, name=None, deferrable=None, initially=None,
+ _create_rule=None,
+ **dialect_kw):
+ """Create a SQL constraint.
+
+ :param name:
+ Optional, the in-database name of this ``Constraint``.
+
+ :param deferrable:
+ Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
+ issuing DDL for this constraint.
+
+ :param initially:
+ Optional string. If set, emit INITIALLY <value> when issuing DDL
+ for this constraint.
+
+ :param _create_rule:
+ a callable which is passed the DDLCompiler object during
+ compilation. Returns True or False to signal inline generation of
+ this Constraint.
+
+ The AddConstraint and DropConstraint DDL constructs provide
+ DDLElement's more comprehensive "conditional DDL" approach that is
+ passed a database connection when DDL is being issued. _create_rule
+ is instead called during any CREATE TABLE compilation, where there
+ may not be any transaction/connection in progress. However, it
+ allows conditional compilation of the constraint even for backends
+ which do not support addition of constraints through ALTER TABLE,
+ which currently includes SQLite.
+
+ _create_rule is used by some types to create constraints.
+ Currently, its call signature is subject to change at any time.
+
+ :param \**dialect_kw: Additional keyword arguments are dialect specific,
+ and passed in the form ``<dialectname>_<argname>``. See the
+ documentation regarding an individual dialect at :ref:`dialect_toplevel`
+ for detail on documented arguments.
+
+ """
+
+ self.name = name
+ self.deferrable = deferrable
+ self.initially = initially
+ self._create_rule = _create_rule
+ util.set_creation_order(self)
+ self._validate_dialect_kwargs(dialect_kw)
+
+ @property
+ def table(self):
+ try:
+ if isinstance(self.parent, Table):
+ return self.parent
+ except AttributeError:
+ pass
+ raise exc.InvalidRequestError(
+ "This constraint is not bound to a table. Did you "
+ "mean to call table.append_constraint(constraint) ?")
+
+ def _set_parent(self, parent):
+ self.parent = parent
+ parent.constraints.add(self)
+
+ def copy(self, **kw):
+ raise NotImplementedError()
+
+
+def _to_schema_column(element):
+ if hasattr(element, '__clause_element__'):
+ element = element.__clause_element__()
+ if not isinstance(element, Column):
+ raise exc.ArgumentError("schema.Column object expected")
+ return element
+
+
+def _to_schema_column_or_string(element):
+ if hasattr(element, '__clause_element__'):
+ element = element.__clause_element__()
+ if not isinstance(element, util.string_types + (ColumnElement, )):
+ msg = "Element %r is not a string name or column element"
+ raise exc.ArgumentError(msg % element)
+ return element
+
+
+class ColumnCollectionMixin(object):
+ def __init__(self, *columns):
+ self.columns = ColumnCollection()
+ self._pending_colargs = [_to_schema_column_or_string(c)
+ for c in columns]
+ if self._pending_colargs and \
+ isinstance(self._pending_colargs[0], Column) and \
+ isinstance(self._pending_colargs[0].table, Table):
+ self._set_parent_with_dispatch(self._pending_colargs[0].table)
+
+ def _set_parent(self, table):
+ for col in self._pending_colargs:
+ if isinstance(col, util.string_types):
+ col = table.c[col]
+ self.columns.add(col)
+
+
+class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint):
+ """A constraint that proxies a ColumnCollection."""
+
+ def __init__(self, *columns, **kw):
+ """
+ :param \*columns:
+ A sequence of column names or Column objects.
+
+ :param name:
+ Optional, the in-database name of this constraint.
+
+ :param deferrable:
+ Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
+ issuing DDL for this constraint.
+
+ :param initially:
+ Optional string. If set, emit INITIALLY <value> when issuing DDL
+ for this constraint.
+
+ :param \**kw: other keyword arguments including dialect-specific
+ arguments are propagated to the :class:`.Constraint` superclass.
+
+ """
+ ColumnCollectionMixin.__init__(self, *columns)
+ Constraint.__init__(self, **kw)
+
+ def _set_parent(self, table):
+ ColumnCollectionMixin._set_parent(self, table)
+ Constraint._set_parent(self, table)
+
+ def __contains__(self, x):
+ return x in self.columns
+
+ def copy(self, **kw):
+ c = self.__class__(name=self.name, deferrable=self.deferrable,
+ initially=self.initially, *self.columns.keys())
+ return self._schema_item_copy(c)
+
+ def contains_column(self, col):
+ return self.columns.contains_column(col)
+
+ def __iter__(self):
+ # inlining of
+ # return iter(self.columns)
+ # ColumnCollection->OrderedProperties->OrderedDict
+ ordered_dict = self.columns._data
+ return (ordered_dict[key] for key in ordered_dict._list)
+
+ def __len__(self):
+ return len(self.columns._data)
+
+
+class CheckConstraint(Constraint):
+ """A table- or column-level CHECK constraint.
+
+ Can be included in the definition of a Table or Column.
+ """
+
+ def __init__(self, sqltext, name=None, deferrable=None,
+ initially=None, table=None, _create_rule=None,
+ _autoattach=True):
+ """Construct a CHECK constraint.
+
+ :param sqltext:
+ A string containing the constraint definition, which will be used
+ verbatim, or a SQL expression construct. If given as a string,
+ the object is converted to a :class:`.Text` object. If the textual
+ string includes a colon character, escape this using a backslash::
+
+ CheckConstraint(r"foo ~ E'a(?\:b|c)d")
+
+ :param name:
+ Optional, the in-database name of the constraint.
+
+ :param deferrable:
+ Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
+ issuing DDL for this constraint.
+
+ :param initially:
+ Optional string. If set, emit INITIALLY <value> when issuing DDL
+ for this constraint.
+
+ """
+
+ super(CheckConstraint, self).\
+ __init__(name, deferrable, initially, _create_rule)
+ self.sqltext = _literal_as_text(sqltext)
+ if table is not None:
+ self._set_parent_with_dispatch(table)
+ elif _autoattach:
+ cols = _find_columns(self.sqltext)
+ tables = set([c.table for c in cols
+ if isinstance(c.table, Table)])
+ if len(tables) == 1:
+ self._set_parent_with_dispatch(
+ tables.pop())
+
+ def __visit_name__(self):
+ if isinstance(self.parent, Table):
+ return "check_constraint"
+ else:
+ return "column_check_constraint"
+ __visit_name__ = property(__visit_name__)
+
+ def copy(self, target_table=None, **kw):
+ if target_table is not None:
+ def replace(col):
+ if self.table.c.contains_column(col):
+ return target_table.c[col.key]
+ else:
+ return None
+ sqltext = visitors.replacement_traverse(self.sqltext, {}, replace)
+ else:
+ sqltext = self.sqltext
+ c = CheckConstraint(sqltext,
+ name=self.name,
+ initially=self.initially,
+ deferrable=self.deferrable,
+ _create_rule=self._create_rule,
+ table=target_table,
+ _autoattach=False)
+ return self._schema_item_copy(c)
+
+
+class ForeignKeyConstraint(Constraint):
+ """A table-level FOREIGN KEY constraint.
+
+ Defines a single column or composite FOREIGN KEY ... REFERENCES
+ constraint. For a no-frills, single column foreign key, adding a
+ :class:`.ForeignKey` to the definition of a :class:`.Column` is a shorthand
+ equivalent for an unnamed, single column :class:`.ForeignKeyConstraint`.
+
+ Examples of foreign key configuration are in :ref:`metadata_foreignkeys`.
+
+ """
+ __visit_name__ = 'foreign_key_constraint'
+
+ def __init__(self, columns, refcolumns, name=None, onupdate=None,
+ ondelete=None, deferrable=None, initially=None, use_alter=False,
+ link_to_name=False, match=None, table=None, **dialect_kw):
+ """Construct a composite-capable FOREIGN KEY.
+
+ :param columns: A sequence of local column names. The named columns
+ must be defined and present in the parent Table. The names should
+ match the ``key`` given to each column (defaults to the name) unless
+ ``link_to_name`` is True.
+
+ :param refcolumns: A sequence of foreign column names or Column
+ objects. The columns must all be located within the same Table.
+
+ :param name: Optional, the in-database name of the key.
+
+ :param onupdate: Optional string. If set, emit ON UPDATE <value> when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+
+ :param ondelete: Optional string. If set, emit ON DELETE <value> when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+
+ :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
+ DEFERRABLE when issuing DDL for this constraint.
+
+ :param initially: Optional string. If set, emit INITIALLY <value> when
+ issuing DDL for this constraint.
+
+ :param link_to_name: if True, the string name given in ``column`` is
+ the rendered name of the referenced column, not its locally assigned
+ ``key``.
+
+ :param use_alter: If True, do not emit the DDL for this constraint as
+ part of the CREATE TABLE definition. Instead, generate it via an
+ ALTER TABLE statement issued after the full collection of tables
+ have been created, and drop it via an ALTER TABLE statement before
+ the full collection of tables are dropped. This is shorthand for the
+ usage of :class:`.AddConstraint` and :class:`.DropConstraint` applied
+ as "after-create" and "before-drop" events on the MetaData object.
+ This is normally used to generate/drop constraints on objects that
+ are mutually dependent on each other.
+
+ :param match: Optional string. If set, emit MATCH <value> when issuing
+ DDL for this constraint. Typical values include SIMPLE, PARTIAL
+ and FULL.
+
+ :param \**dialect_kw: Additional keyword arguments are dialect specific,
+ and passed in the form ``<dialectname>_<argname>``. See the
+ documentation regarding an individual dialect at :ref:`dialect_toplevel`
+ for detail on documented arguments.
+
+ .. versionadded:: 0.9.2
+
+ """
+ super(ForeignKeyConstraint, self).\
+ __init__(name, deferrable, initially, **dialect_kw)
+
+ self.onupdate = onupdate
+ self.ondelete = ondelete
+ self.link_to_name = link_to_name
+ if self.name is None and use_alter:
+ raise exc.ArgumentError("Alterable Constraint requires a name")
+ self.use_alter = use_alter
+ self.match = match
+
+ self._elements = util.OrderedDict()
+
+ # standalone ForeignKeyConstraint - create
+ # associated ForeignKey objects which will be applied to hosted
+ # Column objects (in col.foreign_keys), either now or when attached
+ # to the Table for string-specified names
+ for col, refcol in zip(columns, refcolumns):
+ self._elements[col] = ForeignKey(
+ refcol,
+ _constraint=self,
+ name=self.name,
+ onupdate=self.onupdate,
+ ondelete=self.ondelete,
+ use_alter=self.use_alter,
+ link_to_name=self.link_to_name,
+ match=self.match,
+ deferrable=self.deferrable,
+ initially=self.initially,
+ **self.dialect_kwargs
+ )
+
+ if table is not None:
+ self._set_parent_with_dispatch(table)
+ elif columns and \
+ isinstance(columns[0], Column) and \
+ columns[0].table is not None:
+ self._set_parent_with_dispatch(columns[0].table)
+
+ def _validate_dest_table(self, table):
+ table_keys = set([elem._table_key() for elem in self._elements.values()])
+ if None not in table_keys and len(table_keys) > 1:
+ elem0, elem1 = sorted(table_keys)[0:2]
+ raise exc.ArgumentError(
+ 'ForeignKeyConstraint on %s(%s) refers to '
+ 'multiple remote tables: %s and %s' % (
+ table.fullname,
+ self._col_description,
+ elem0,
+ elem1
+ ))
+
+ @property
+ def _col_description(self):
+ return ", ".join(self._elements)
+
+ @property
+ def columns(self):
+ return list(self._elements)
+
+ @property
+ def elements(self):
+ return list(self._elements.values())
+
+ def _set_parent(self, table):
+ super(ForeignKeyConstraint, self)._set_parent(table)
+
+ self._validate_dest_table(table)
+
+ for col, fk in self._elements.items():
+ # string-specified column names now get
+ # resolved to Column objects
+ if isinstance(col, util.string_types):
+ try:
+ col = table.c[col]
+ except KeyError:
+ raise exc.ArgumentError(
+ "Can't create ForeignKeyConstraint "
+ "on table '%s': no column "
+ "named '%s' is present." % (table.description, col))
+
+ if not hasattr(fk, 'parent') or \
+ fk.parent is not col:
+ fk._set_parent_with_dispatch(col)
+
+ if self.use_alter:
+ def supports_alter(ddl, event, schema_item, bind, **kw):
+ return table in set(kw['tables']) and \
+ bind.dialect.supports_alter
+
+ event.listen(table.metadata, "after_create",
+ ddl.AddConstraint(self, on=supports_alter))
+ event.listen(table.metadata, "before_drop",
+ ddl.DropConstraint(self, on=supports_alter))
+
+ def copy(self, schema=None, **kw):
+ fkc = ForeignKeyConstraint(
+ [x.parent.key for x in self._elements.values()],
+ [x._get_colspec(schema=schema) for x in self._elements.values()],
+ name=self.name,
+ onupdate=self.onupdate,
+ ondelete=self.ondelete,
+ use_alter=self.use_alter,
+ deferrable=self.deferrable,
+ initially=self.initially,
+ link_to_name=self.link_to_name,
+ match=self.match
+ )
+ for self_fk, other_fk in zip(
+ self._elements.values(),
+ fkc._elements.values()):
+ self_fk._schema_item_copy(other_fk)
+ return self._schema_item_copy(fkc)
+
+
+class PrimaryKeyConstraint(ColumnCollectionConstraint):
+ """A table-level PRIMARY KEY constraint.
+
+ The :class:`.PrimaryKeyConstraint` object is present automatically
+ on any :class:`.Table` object; it is assigned a set of
+ :class:`.Column` objects corresponding to those marked with
+ the :paramref:`.Column.primary_key` flag::
+
+ >>> my_table = Table('mytable', metadata,
+ ... Column('id', Integer, primary_key=True),
+ ... Column('version_id', Integer, primary_key=True),
+ ... Column('data', String(50))
+ ... )
+ >>> my_table.primary_key
+ PrimaryKeyConstraint(
+ Column('id', Integer(), table=<mytable>, primary_key=True, nullable=False),
+ Column('version_id', Integer(), table=<mytable>, primary_key=True, nullable=False)
+ )
+
+ The primary key of a :class:`.Table` can also be specified by using
+ a :class:`.PrimaryKeyConstraint` object explicitly; in this mode of usage,
+ the "name" of the constraint can also be specified, as well as other
+ options which may be recognized by dialects::
+
+ my_table = Table('mytable', metadata,
+ Column('id', Integer),
+ Column('version_id', Integer),
+ Column('data', String(50)),
+ PrimaryKeyConstraint('id', 'version_id', name='mytable_pk')
+ )
+
+ The two styles of column-specification should generally not be mixed.
+ An warning is emitted if the columns present in the
+ :class:`.PrimaryKeyConstraint`
+ don't match the columns that were marked as ``primary_key=True``, if both
+ are present; in this case, the columns are taken strictly from the
+ :class:`.PrimaryKeyConstraint` declaration, and those columns otherwise marked
+ as ``primary_key=True`` are ignored. This behavior is intended to be
+ backwards compatible with previous behavior.
+
+ .. versionchanged:: 0.9.2 Using a mixture of columns within a
+ :class:`.PrimaryKeyConstraint` in addition to columns marked as
+ ``primary_key=True`` now emits a warning if the lists don't match.
+ The ultimate behavior of ignoring those columns marked with the flag
+ only is currently maintained for backwards compatibility; this warning
+ may raise an exception in a future release.
+
+ For the use case where specific options are to be specified on the
+ :class:`.PrimaryKeyConstraint`, but the usual style of using ``primary_key=True``
+ flags is still desirable, an empty :class:`.PrimaryKeyConstraint` may be
+ specified, which will take on the primary key column collection from
+ the :class:`.Table` based on the flags::
+
+ my_table = Table('mytable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('version_id', Integer, primary_key=True),
+ Column('data', String(50)),
+ PrimaryKeyConstraint(name='mytable_pk', mssql_clustered=True)
+ )
+
+ .. versionadded:: 0.9.2 an empty :class:`.PrimaryKeyConstraint` may now
+ be specified for the purposes of establishing keyword arguments with the
+ constraint, independently of the specification of "primary key" columns
+ within the :class:`.Table` itself; columns marked as ``primary_key=True``
+ will be gathered into the empty constraint's column collection.
+
+ """
+
+ __visit_name__ = 'primary_key_constraint'
+
+ def _set_parent(self, table):
+ super(PrimaryKeyConstraint, self)._set_parent(table)
+
+ if table.primary_key is not self:
+ table.constraints.discard(table.primary_key)
+ table.primary_key = self
+ table.constraints.add(self)
+
+ table_pks = [c for c in table.c if c.primary_key]
+ if self.columns and table_pks and \
+ set(table_pks) != set(self.columns.values()):
+ util.warn(
+ "Table '%s' specifies columns %s as primary_key=True, "
+ "not matching locally specified columns %s; setting the "
+ "current primary key columns to %s. This warning "
+ "may become an exception in a future release" %
+ (
+ table.name,
+ ", ".join("'%s'" % c.name for c in table_pks),
+ ", ".join("'%s'" % c.name for c in self.columns),
+ ", ".join("'%s'" % c.name for c in self.columns)
+ )
+ )
+ table_pks[:] = []
+
+ for c in self.columns:
+ c.primary_key = True
+ self.columns.extend(table_pks)
+
+ def _reload(self, columns):
+ """repopulate this :class:`.PrimaryKeyConstraint` given
+ a set of columns.
+
+ Existing columns in the table that are marked as primary_key=True
+ are maintained.
+
+ Also fires a new event.
+
+ This is basically like putting a whole new
+ :class:`.PrimaryKeyConstraint` object on the parent
+ :class:`.Table` object without actually replacing the object.
+
+ The ordering of the given list of columns is also maintained; these
+ columns will be appended to the list of columns after any which
+ are already present.
+
+ """
+
+ # set the primary key flag on new columns.
+ # note any existing PK cols on the table also have their
+ # flag still set.
+ for col in columns:
+ col.primary_key = True
+
+ self.columns.extend(columns)
+
+ self._set_parent_with_dispatch(self.table)
+
+ def _replace(self, col):
+ self.columns.replace(col)
+
+
+class UniqueConstraint(ColumnCollectionConstraint):
+ """A table-level UNIQUE constraint.
+
+ Defines a single column or composite UNIQUE constraint. For a no-frills,
+ single column constraint, adding ``unique=True`` to the ``Column``
+ definition is a shorthand equivalent for an unnamed, single column
+ UniqueConstraint.
+ """
+
+ __visit_name__ = 'unique_constraint'
+
+
+class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem):
+ """A table-level INDEX.
+
+ Defines a composite (one or more column) INDEX.
+
+ E.g.::
+
+ sometable = Table("sometable", metadata,
+ Column("name", String(50)),
+ Column("address", String(100))
+ )
+
+ Index("some_index", sometable.c.name)
+
+ For a no-frills, single column index, adding
+ :class:`.Column` also supports ``index=True``::
+
+ sometable = Table("sometable", metadata,
+ Column("name", String(50), index=True)
+ )
+
+ For a composite index, multiple columns can be specified::
+
+ Index("some_index", sometable.c.name, sometable.c.address)
+
+ Functional indexes are supported as well, keeping in mind that at least
+ one :class:`.Column` must be present::
+
+ Index("some_index", func.lower(sometable.c.name))
+
+ .. versionadded:: 0.8 support for functional and expression-based indexes.
+
+ .. seealso::
+
+ :ref:`schema_indexes` - General information on :class:`.Index`.
+
+ :ref:`postgresql_indexes` - PostgreSQL-specific options available for the
+ :class:`.Index` construct.
+
+ :ref:`mysql_indexes` - MySQL-specific options available for the
+ :class:`.Index` construct.
+
+ :ref:`mssql_indexes` - MSSQL-specific options available for the
+ :class:`.Index` construct.
+
+ """
+
+ __visit_name__ = 'index'
+
+ def __init__(self, name, *expressions, **kw):
+ """Construct an index object.
+
+ :param name:
+ The name of the index
+
+ :param \*expressions:
+ Column expressions to include in the index. The expressions
+ are normally instances of :class:`.Column`, but may also
+ be arbitrary SQL expressions which ultmately refer to a
+ :class:`.Column`.
+
+ :param unique=False:
+ Keyword only argument; if True, create a unique index.
+
+ :param quote=None:
+ Keyword only argument; whether to apply quoting to the name of
+ the index. Works in the same manner as that of
+ :paramref:`.Column.quote`.
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form ``<dialectname>_<argname>``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """
+ self.table = None
+
+ columns = []
+ for expr in expressions:
+ if not isinstance(expr, ClauseElement):
+ columns.append(expr)
+ else:
+ cols = []
+ visitors.traverse(expr, {}, {'column': cols.append})
+ if cols:
+ columns.append(cols[0])
+ else:
+ columns.append(expr)
+
+ self.expressions = expressions
+ self.name = quoted_name(name, kw.pop("quote", None))
+ self.unique = kw.pop('unique', False)
+ self._validate_dialect_kwargs(kw)
+
+ # will call _set_parent() if table-bound column
+ # objects are present
+ ColumnCollectionMixin.__init__(self, *columns)
+
+
+
+ def _set_parent(self, table):
+ ColumnCollectionMixin._set_parent(self, table)
+
+ if self.table is not None and table is not self.table:
+ raise exc.ArgumentError(
+ "Index '%s' is against table '%s', and "
+ "cannot be associated with table '%s'." % (
+ self.name,
+ self.table.description,
+ table.description
+ )
+ )
+ self.table = table
+ for c in self.columns:
+ if c.table != self.table:
+ raise exc.ArgumentError(
+ "Column '%s' is not part of table '%s'." %
+ (c, self.table.description)
+ )
+ table.indexes.add(self)
+
+ self.expressions = [
+ expr if isinstance(expr, ClauseElement)
+ else colexpr
+ for expr, colexpr in zip(self.expressions, self.columns)
+ ]
+
+ @property
+ def bind(self):
+ """Return the connectable associated with this Index."""
+
+ return self.table.bind
+
+ def create(self, bind=None):
+ """Issue a ``CREATE`` statement for this
+ :class:`.Index`, using the given :class:`.Connectable`
+ for connectivity.
+
+ .. seealso::
+
+ :meth:`.MetaData.create_all`.
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaGenerator, self)
+ return self
+
+ def drop(self, bind=None):
+ """Issue a ``DROP`` statement for this
+ :class:`.Index`, using the given :class:`.Connectable`
+ for connectivity.
+
+ .. seealso::
+
+ :meth:`.MetaData.drop_all`.
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaDropper, self)
+
+ def __repr__(self):
+ return 'Index(%s)' % (
+ ", ".join(
+ [repr(self.name)] +
+ [repr(c) for c in self.columns] +
+ (self.unique and ["unique=True"] or [])
+ ))
+
+
+class MetaData(SchemaItem):
+ """A collection of :class:`.Table` objects and their associated schema
+ constructs.
+
+ Holds a collection of :class:`.Table` objects as well as
+ an optional binding to an :class:`.Engine` or
+ :class:`.Connection`. If bound, the :class:`.Table` objects
+ in the collection and their columns may participate in implicit SQL
+ execution.
+
+ The :class:`.Table` objects themselves are stored in the
+ :attr:`.MetaData.tables` dictionary.
+
+ :class:`.MetaData` is a thread-safe object for read operations. Construction
+ of new tables within a single :class:`.MetaData` object, either explicitly
+ or via reflection, may not be completely thread-safe.
+
+ .. seealso::
+
+ :ref:`metadata_describing` - Introduction to database metadata
+
+ """
+
+ __visit_name__ = 'metadata'
+
+ def __init__(self, bind=None, reflect=False, schema=None,
+ quote_schema=None):
+ """Create a new MetaData object.
+
+ :param bind:
+ An Engine or Connection to bind to. May also be a string or URL
+ instance, these are passed to create_engine() and this MetaData will
+ be bound to the resulting engine.
+
+ :param reflect:
+ Optional, automatically load all tables from the bound database.
+ Defaults to False. ``bind`` is required when this option is set.
+
+ .. deprecated:: 0.8
+ Please use the :meth:`.MetaData.reflect` method.
+
+ :param schema:
+ The default schema to use for the :class:`.Table`,
+ :class:`.Sequence`, and other objects associated with this
+ :class:`.MetaData`. Defaults to ``None``.
+
+ :param quote_schema:
+ Sets the ``quote_schema`` flag for those :class:`.Table`,
+ :class:`.Sequence`, and other objects which make usage of the
+ local ``schema`` name.
+
+ .. versionadded:: 0.7.4
+ ``schema`` and ``quote_schema`` parameters.
+
+ """
+ self.tables = util.immutabledict()
+ self.schema = quoted_name(schema, quote_schema)
+ self._schemas = set()
+ self._sequences = {}
+ self._fk_memos = collections.defaultdict(list)
+
+ self.bind = bind
+ if reflect:
+ util.warn("reflect=True is deprecate; please "
+ "use the reflect() method.")
+ if not bind:
+ raise exc.ArgumentError(
+ "A bind must be supplied in conjunction "
+ "with reflect=True")
+ self.reflect()
+
+ tables = None
+ """A dictionary of :class:`.Table` objects keyed to their name or "table key".
+
+ The exact key is that determined by the :attr:`.Table.key` attribute;
+ for a table with no :attr:`.Table.schema` attribute, this is the same
+ as :attr:`.Table.name`. For a table with a schema, it is typically of the
+ form ``schemaname.tablename``.
+
+ .. seealso::
+
+ :attr:`.MetaData.sorted_tables`
+
+ """
+
+ def __repr__(self):
+ return 'MetaData(bind=%r)' % self.bind
+
+ def __contains__(self, table_or_key):
+ if not isinstance(table_or_key, util.string_types):
+ table_or_key = table_or_key.key
+ return table_or_key in self.tables
+
+ def _add_table(self, name, schema, table):
+ key = _get_table_key(name, schema)
+ dict.__setitem__(self.tables, key, table)
+ if schema:
+ self._schemas.add(schema)
+
+
+
+ def _remove_table(self, name, schema):
+ key = _get_table_key(name, schema)
+ removed = dict.pop(self.tables, key, None)
+ if removed is not None:
+ for fk in removed.foreign_keys:
+ fk._remove_from_metadata(self)
+ if self._schemas:
+ self._schemas = set([t.schema
+ for t in self.tables.values()
+ if t.schema is not None])
+
+
+ def __getstate__(self):
+ return {'tables': self.tables,
+ 'schema': self.schema,
+ 'schemas': self._schemas,
+ 'sequences': self._sequences,
+ 'fk_memos': self._fk_memos}
+
+ def __setstate__(self, state):
+ self.tables = state['tables']
+ self.schema = state['schema']
+ self._bind = None
+ self._sequences = state['sequences']
+ self._schemas = state['schemas']
+ self._fk_memos = state['fk_memos']
+
+ def is_bound(self):
+ """True if this MetaData is bound to an Engine or Connection."""
+
+ return self._bind is not None
+
+ def bind(self):
+ """An :class:`.Engine` or :class:`.Connection` to which this
+ :class:`.MetaData` is bound.
+
+ Typically, a :class:`.Engine` is assigned to this attribute
+ so that "implicit execution" may be used, or alternatively
+ as a means of providing engine binding information to an
+ ORM :class:`.Session` object::
+
+ engine = create_engine("someurl://")
+ metadata.bind = engine
+
+ .. seealso::
+
+ :ref:`dbengine_implicit` - background on "bound metadata"
+
+ """
+ return self._bind
+
+ @util.dependencies("sqlalchemy.engine.url")
+ def _bind_to(self, url, bind):
+ """Bind this MetaData to an Engine, Connection, string or URL."""
+
+ if isinstance(bind, util.string_types + (url.URL, )):
+ self._bind = sqlalchemy.create_engine(bind)
+ else:
+ self._bind = bind
+ bind = property(bind, _bind_to)
+
+ def clear(self):
+ """Clear all Table objects from this MetaData."""
+
+ dict.clear(self.tables)
+ self._schemas.clear()
+ self._fk_memos.clear()
+
+ def remove(self, table):
+ """Remove the given Table object from this MetaData."""
+
+ self._remove_table(table.name, table.schema)
+
+ @property
+ def sorted_tables(self):
+ """Returns a list of :class:`.Table` objects sorted in order of
+ foreign key dependency.
+
+ The sorting will place :class:`.Table` objects that have dependencies
+ first, before the dependencies themselves, representing the
+ order in which they can be created. To get the order in which
+ the tables would be dropped, use the ``reversed()`` Python built-in.
+
+ .. seealso::
+
+ :attr:`.MetaData.tables`
+
+ :meth:`.Inspector.get_table_names`
+
+ """
+ return ddl.sort_tables(self.tables.values())
+
+ def reflect(self, bind=None, schema=None, views=False, only=None,
+ extend_existing=False,
+ autoload_replace=True):
+ """Load all available table definitions from the database.
+
+ Automatically creates ``Table`` entries in this ``MetaData`` for any
+ table available in the database but not yet present in the
+ ``MetaData``. May be called multiple times to pick up tables recently
+ added to the database, however no special action is taken if a table
+ in this ``MetaData`` no longer exists in the database.
+
+ :param bind:
+ A :class:`.Connectable` used to access the database; if None, uses
+ the existing bind on this ``MetaData``, if any.
+
+ :param schema:
+ Optional, query and reflect tables from an alterate schema.
+ If None, the schema associated with this :class:`.MetaData`
+ is used, if any.
+
+ :param views:
+ If True, also reflect views.
+
+ :param only:
+ Optional. Load only a sub-set of available named tables. May be
+ specified as a sequence of names or a callable.
+
+ If a sequence of names is provided, only those tables will be
+ reflected. An error is raised if a table is requested but not
+ available. Named tables already present in this ``MetaData`` are
+ ignored.
+
+ If a callable is provided, it will be used as a boolean predicate to
+ filter the list of potential table names. The callable is called
+ with a table name and this ``MetaData`` instance as positional
+ arguments and should return a true value for any table to reflect.
+
+ :param extend_existing: Passed along to each :class:`.Table` as
+ :paramref:`.Table.extend_existing`.
+
+ .. versionadded:: 0.9.1
+
+ :param autoload_replace: Passed along to each :class:`.Table` as
+ :paramref:`.Table.autoload_replace`.
+
+ .. versionadded:: 0.9.1
+
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+
+ with bind.connect() as conn:
+
+ reflect_opts = {
+ 'autoload': True,
+ 'autoload_with': conn,
+ 'extend_existing': extend_existing,
+ 'autoload_replace': autoload_replace
+ }
+
+ if schema is None:
+ schema = self.schema
+
+ if schema is not None:
+ reflect_opts['schema'] = schema
+
+ available = util.OrderedSet(bind.engine.table_names(schema,
+ connection=conn))
+ if views:
+ available.update(
+ bind.dialect.get_view_names(conn, schema)
+ )
+
+ if schema is not None:
+ available_w_schema = util.OrderedSet(["%s.%s" % (schema, name)
+ for name in available])
+ else:
+ available_w_schema = available
+
+ current = set(self.tables)
+
+ if only is None:
+ load = [name for name, schname in
+ zip(available, available_w_schema)
+ if extend_existing or schname not in current]
+ elif util.callable(only):
+ load = [name for name, schname in
+ zip(available, available_w_schema)
+ if (extend_existing or schname not in current)
+ and only(name, self)]
+ else:
+ missing = [name for name in only if name not in available]
+ if missing:
+ s = schema and (" schema '%s'" % schema) or ''
+ raise exc.InvalidRequestError(
+ 'Could not reflect: requested table(s) not available '
+ 'in %s%s: (%s)' %
+ (bind.engine.url, s, ', '.join(missing)))
+ load = [name for name in only if extend_existing or
+ name not in current]
+
+ for name in load:
+ Table(name, self, **reflect_opts)
+
+ def append_ddl_listener(self, event_name, listener):
+ """Append a DDL event listener to this ``MetaData``.
+
+ .. deprecated:: 0.7
+ See :class:`.DDLEvents`.
+
+ """
+ def adapt_listener(target, connection, **kw):
+ tables = kw['tables']
+ listener(event, target, connection, tables=tables)
+
+ event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
+
+ def create_all(self, bind=None, tables=None, checkfirst=True):
+ """Create all tables stored in this metadata.
+
+ Conditional by default, will not attempt to recreate tables already
+ present in the target database.
+
+ :param bind:
+ A :class:`.Connectable` used to access the
+ database; if None, uses the existing bind on this ``MetaData``, if
+ any.
+
+ :param tables:
+ Optional list of ``Table`` objects, which is a subset of the total
+ tables in the ``MetaData`` (others are ignored).
+
+ :param checkfirst:
+ Defaults to True, don't issue CREATEs for tables already present
+ in the target database.
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaGenerator,
+ self,
+ checkfirst=checkfirst,
+ tables=tables)
+
+ def drop_all(self, bind=None, tables=None, checkfirst=True):
+ """Drop all tables stored in this metadata.
+
+ Conditional by default, will not attempt to drop tables not present in
+ the target database.
+
+ :param bind:
+ A :class:`.Connectable` used to access the
+ database; if None, uses the existing bind on this ``MetaData``, if
+ any.
+
+ :param tables:
+ Optional list of ``Table`` objects, which is a subset of the
+ total tables in the ``MetaData`` (others are ignored).
+
+ :param checkfirst:
+ Defaults to True, only issue DROPs for tables confirmed to be
+ present in the target database.
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaDropper,
+ self,
+ checkfirst=checkfirst,
+ tables=tables)
+
+
+class ThreadLocalMetaData(MetaData):
+ """A MetaData variant that presents a different ``bind`` in every thread.
+
+ Makes the ``bind`` property of the MetaData a thread-local value, allowing
+ this collection of tables to be bound to different ``Engine``
+ implementations or connections in each thread.
+
+ The ThreadLocalMetaData starts off bound to None in each thread. Binds
+ must be made explicitly by assigning to the ``bind`` property or using
+ ``connect()``. You can also re-bind dynamically multiple times per
+ thread, just like a regular ``MetaData``.
+
+ """
+
+ __visit_name__ = 'metadata'
+
+ def __init__(self):
+ """Construct a ThreadLocalMetaData."""
+
+ self.context = util.threading.local()
+ self.__engines = {}
+ super(ThreadLocalMetaData, self).__init__()
+
+ def bind(self):
+ """The bound Engine or Connection for this thread.
+
+ This property may be assigned an Engine or Connection, or assigned a
+ string or URL to automatically create a basic Engine for this bind
+ with ``create_engine()``."""
+
+ return getattr(self.context, '_engine', None)
+
+ @util.dependencies("sqlalchemy.engine.url")
+ def _bind_to(self, url, bind):
+ """Bind to a Connectable in the caller's thread."""
+
+ if isinstance(bind, util.string_types + (url.URL, )):
+ try:
+ self.context._engine = self.__engines[bind]
+ except KeyError:
+ e = sqlalchemy.create_engine(bind)
+ self.__engines[bind] = e
+ self.context._engine = e
+ else:
+ # TODO: this is squirrely. we shouldnt have to hold onto engines
+ # in a case like this
+ if bind not in self.__engines:
+ self.__engines[bind] = bind
+ self.context._engine = bind
+
+ bind = property(bind, _bind_to)
+
+ def is_bound(self):
+ """True if there is a bind for this thread."""
+ return (hasattr(self.context, '_engine') and
+ self.context._engine is not None)
+
+ def dispose(self):
+ """Dispose all bound engines, in all thread contexts."""
+
+ for e in self.__engines.values():
+ if hasattr(e, 'dispose'):
+ e.dispose()
+
+
+
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
new file mode 100644
index 000000000..951268b22
--- /dev/null
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -0,0 +1,3001 @@
+# sql/selectable.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""The :class:`.FromClause` class of SQL expression elements, representing
+SQL tables and derived rowsets.
+
+"""
+
+from .elements import ClauseElement, TextClause, ClauseList, \
+ and_, Grouping, UnaryExpression, literal_column
+from .elements import _clone, \
+ _literal_as_text, _interpret_as_column_or_from, _expand_cloned,\
+ _select_iterables, _anonymous_label, _clause_element_as_expr,\
+ _cloned_intersection, _cloned_difference, True_, _only_column_elements
+from .base import Immutable, Executable, _generative, \
+ ColumnCollection, ColumnSet, _from_objects, Generative
+from . import type_api
+from .. import inspection
+from .. import util
+from .. import exc
+from operator import attrgetter
+from . import operators
+import operator
+from .annotation import Annotated
+import itertools
+
+def _interpret_as_from(element):
+ insp = inspection.inspect(element, raiseerr=False)
+ if insp is None:
+ if isinstance(element, util.string_types):
+ return TextClause(util.text_type(element))
+ elif hasattr(insp, "selectable"):
+ return insp.selectable
+ raise exc.ArgumentError("FROM expression expected")
+
+def _interpret_as_select(element):
+ element = _interpret_as_from(element)
+ if isinstance(element, Alias):
+ element = element.original
+ if not isinstance(element, Select):
+ element = element.select()
+ return element
+
+def subquery(alias, *args, **kwargs):
+ """Return an :class:`.Alias` object derived
+ from a :class:`.Select`.
+
+ name
+ alias name
+
+ \*args, \**kwargs
+
+ all other arguments are delivered to the
+ :func:`select` function.
+
+ """
+ return Select(*args, **kwargs).alias(alias)
+
+
+
+def alias(selectable, name=None, flat=False):
+ """Return an :class:`.Alias` object.
+
+ An :class:`.Alias` represents any :class:`.FromClause`
+ with an alternate name assigned within SQL, typically using the ``AS``
+ clause when generated, e.g. ``SELECT * FROM table AS aliasname``.
+
+ Similar functionality is available via the
+ :meth:`~.FromClause.alias` method
+ available on all :class:`.FromClause` subclasses.
+
+ When an :class:`.Alias` is created from a :class:`.Table` object,
+ this has the effect of the table being rendered
+ as ``tablename AS aliasname`` in a SELECT statement.
+
+ For :func:`.select` objects, the effect is that of creating a named
+ subquery, i.e. ``(select ...) AS aliasname``.
+
+ The ``name`` parameter is optional, and provides the name
+ to use in the rendered SQL. If blank, an "anonymous" name
+ will be deterministically generated at compile time.
+ Deterministic means the name is guaranteed to be unique against
+ other constructs used in the same statement, and will also be the
+ same name for each successive compilation of the same statement
+ object.
+
+ :param selectable: any :class:`.FromClause` subclass,
+ such as a table, select statement, etc.
+
+ :param name: string name to be assigned as the alias.
+ If ``None``, a name will be deterministically generated
+ at compile time.
+
+ :param flat: Will be passed through to if the given selectable
+ is an instance of :class:`.Join` - see :meth:`.Join.alias`
+ for details.
+
+ .. versionadded:: 0.9.0
+
+ """
+ return selectable.alias(name=name, flat=flat)
+
+
+class Selectable(ClauseElement):
+ """mark a class as being selectable"""
+ __visit_name__ = 'selectable'
+
+ is_selectable = True
+
+ @property
+ def selectable(self):
+ return self
+
+
+class FromClause(Selectable):
+ """Represent an element that can be used within the ``FROM``
+ clause of a ``SELECT`` statement.
+
+ The most common forms of :class:`.FromClause` are the
+ :class:`.Table` and the :func:`.select` constructs. Key
+ features common to all :class:`.FromClause` objects include:
+
+ * a :attr:`.c` collection, which provides per-name access to a collection
+ of :class:`.ColumnElement` objects.
+ * a :attr:`.primary_key` attribute, which is a collection of all those
+ :class:`.ColumnElement` objects that indicate the ``primary_key`` flag.
+ * Methods to generate various derivations of a "from" clause, including
+ :meth:`.FromClause.alias`, :meth:`.FromClause.join`,
+ :meth:`.FromClause.select`.
+
+
+ """
+ __visit_name__ = 'fromclause'
+ named_with_column = False
+ _hide_froms = []
+
+ schema = None
+ """Define the 'schema' attribute for this :class:`.FromClause`.
+
+ This is typically ``None`` for most objects except that of :class:`.Table`,
+ where it is taken as the value of the :paramref:`.Table.schema` argument.
+
+ """
+
+ _memoized_property = util.group_expirable_memoized_property(["_columns"])
+
+ @util.dependencies("sqlalchemy.sql.functions")
+ def count(self, functions, whereclause=None, **params):
+ """return a SELECT COUNT generated against this
+ :class:`.FromClause`."""
+
+ if self.primary_key:
+ col = list(self.primary_key)[0]
+ else:
+ col = list(self.columns)[0]
+ return Select(
+ [functions.func.count(col).label('tbl_row_count')],
+ whereclause,
+ from_obj=[self],
+ **params)
+
+ def select(self, whereclause=None, **params):
+ """return a SELECT of this :class:`.FromClause`.
+
+ .. seealso::
+
+ :func:`~.sql.expression.select` - general purpose
+ method which allows for arbitrary column lists.
+
+ """
+
+ return Select([self], whereclause, **params)
+
+ def join(self, right, onclause=None, isouter=False):
+ """return a join of this :class:`.FromClause` against another
+ :class:`.FromClause`."""
+
+ return Join(self, right, onclause, isouter)
+
+ def outerjoin(self, right, onclause=None):
+ """return an outer join of this :class:`.FromClause` against another
+ :class:`.FromClause`."""
+
+ return Join(self, right, onclause, True)
+
+ def alias(self, name=None, flat=False):
+ """return an alias of this :class:`.FromClause`.
+
+ This is shorthand for calling::
+
+ from sqlalchemy import alias
+ a = alias(self, name=name)
+
+ See :func:`~.expression.alias` for details.
+
+ """
+
+ return Alias(self, name)
+
+ def is_derived_from(self, fromclause):
+ """Return True if this FromClause is 'derived' from the given
+ FromClause.
+
+ An example would be an Alias of a Table is derived from that Table.
+
+ """
+ # this is essentially an "identity" check in the base class.
+ # Other constructs override this to traverse through
+ # contained elements.
+ return fromclause in self._cloned_set
+
+ def _is_lexical_equivalent(self, other):
+ """Return True if this FromClause and the other represent
+ the same lexical identity.
+
+ This tests if either one is a copy of the other, or
+ if they are the same via annotation identity.
+
+ """
+ return self._cloned_set.intersection(other._cloned_set)
+
+ @util.dependencies("sqlalchemy.sql.util")
+ def replace_selectable(self, sqlutil, old, alias):
+ """replace all occurrences of FromClause 'old' with the given Alias
+ object, returning a copy of this :class:`.FromClause`.
+
+ """
+
+ return sqlutil.ClauseAdapter(alias).traverse(self)
+
+ def correspond_on_equivalents(self, column, equivalents):
+ """Return corresponding_column for the given column, or if None
+ search for a match in the given dictionary.
+
+ """
+ col = self.corresponding_column(column, require_embedded=True)
+ if col is None and col in equivalents:
+ for equiv in equivalents[col]:
+ nc = self.corresponding_column(equiv, require_embedded=True)
+ if nc:
+ return nc
+ return col
+
+ def corresponding_column(self, column, require_embedded=False):
+ """Given a :class:`.ColumnElement`, return the exported
+ :class:`.ColumnElement` object from this :class:`.Selectable`
+ which corresponds to that original
+ :class:`~sqlalchemy.schema.Column` via a common ancestor
+ column.
+
+ :param column: the target :class:`.ColumnElement` to be matched
+
+ :param require_embedded: only return corresponding columns for
+ the given :class:`.ColumnElement`, if the given :class:`.ColumnElement`
+ is actually present within a sub-element
+ of this :class:`.FromClause`. Normally the column will match if
+ it merely shares a common ancestor with one of the exported
+ columns of this :class:`.FromClause`.
+
+ """
+
+ def embedded(expanded_proxy_set, target_set):
+ for t in target_set.difference(expanded_proxy_set):
+ if not set(_expand_cloned([t])
+ ).intersection(expanded_proxy_set):
+ return False
+ return True
+
+ # don't dig around if the column is locally present
+ if self.c.contains_column(column):
+ return column
+ col, intersect = None, None
+ target_set = column.proxy_set
+ cols = self.c
+ for c in cols:
+ expanded_proxy_set = set(_expand_cloned(c.proxy_set))
+ i = target_set.intersection(expanded_proxy_set)
+ if i and (not require_embedded
+ or embedded(expanded_proxy_set, target_set)):
+ if col is None:
+
+ # no corresponding column yet, pick this one.
+
+ col, intersect = c, i
+ elif len(i) > len(intersect):
+
+ # 'c' has a larger field of correspondence than
+ # 'col'. i.e. selectable.c.a1_x->a1.c.x->table.c.x
+ # matches a1.c.x->table.c.x better than
+ # selectable.c.x->table.c.x does.
+
+ col, intersect = c, i
+ elif i == intersect:
+
+ # they have the same field of correspondence. see
+ # which proxy_set has fewer columns in it, which
+ # indicates a closer relationship with the root
+ # column. Also take into account the "weight"
+ # attribute which CompoundSelect() uses to give
+ # higher precedence to columns based on vertical
+ # position in the compound statement, and discard
+ # columns that have no reference to the target
+ # column (also occurs with CompoundSelect)
+
+ col_distance = util.reduce(operator.add,
+ [sc._annotations.get('weight', 1) for sc in
+ col.proxy_set if sc.shares_lineage(column)])
+ c_distance = util.reduce(operator.add,
+ [sc._annotations.get('weight', 1) for sc in
+ c.proxy_set if sc.shares_lineage(column)])
+ if c_distance < col_distance:
+ col, intersect = c, i
+ return col
+
+ @property
+ def description(self):
+ """a brief description of this FromClause.
+
+ Used primarily for error message formatting.
+
+ """
+ return getattr(self, 'name', self.__class__.__name__ + " object")
+
+ def _reset_exported(self):
+ """delete memoized collections when a FromClause is cloned."""
+
+ self._memoized_property.expire_instance(self)
+
+ @_memoized_property
+ def columns(self):
+ """A named-based collection of :class:`.ColumnElement` objects
+ maintained by this :class:`.FromClause`.
+
+ The :attr:`.columns`, or :attr:`.c` collection, is the gateway
+ to the construction of SQL expressions using table-bound or
+ other selectable-bound columns::
+
+ select([mytable]).where(mytable.c.somecolumn == 5)
+
+ """
+
+ if '_columns' not in self.__dict__:
+ self._init_collections()
+ self._populate_column_collection()
+ return self._columns.as_immutable()
+
+ @_memoized_property
+ def primary_key(self):
+ """Return the collection of Column objects which comprise the
+ primary key of this FromClause."""
+
+ self._init_collections()
+ self._populate_column_collection()
+ return self.primary_key
+
+ @_memoized_property
+ def foreign_keys(self):
+ """Return the collection of ForeignKey objects which this
+ FromClause references."""
+
+ self._init_collections()
+ self._populate_column_collection()
+ return self.foreign_keys
+
+ c = property(attrgetter('columns'),
+ doc="An alias for the :attr:`.columns` attribute.")
+ _select_iterable = property(attrgetter('columns'))
+
+ def _init_collections(self):
+ assert '_columns' not in self.__dict__
+ assert 'primary_key' not in self.__dict__
+ assert 'foreign_keys' not in self.__dict__
+
+ self._columns = ColumnCollection()
+ self.primary_key = ColumnSet()
+ self.foreign_keys = set()
+
+ @property
+ def _cols_populated(self):
+ return '_columns' in self.__dict__
+
+ def _populate_column_collection(self):
+ """Called on subclasses to establish the .c collection.
+
+ Each implementation has a different way of establishing
+ this collection.
+
+ """
+
+ def _refresh_for_new_column(self, column):
+ """Given a column added to the .c collection of an underlying
+ selectable, produce the local version of that column, assuming this
+ selectable ultimately should proxy this column.
+
+ this is used to "ping" a derived selectable to add a new column
+ to its .c. collection when a Column has been added to one of the
+ Table objects it ultimtely derives from.
+
+ If the given selectable hasn't populated it's .c. collection yet,
+ it should at least pass on the message to the contained selectables,
+ but it will return None.
+
+ This method is currently used by Declarative to allow Table
+ columns to be added to a partially constructed inheritance
+ mapping that may have already produced joins. The method
+ isn't public right now, as the full span of implications
+ and/or caveats aren't yet clear.
+
+ It's also possible that this functionality could be invoked by
+ default via an event, which would require that
+ selectables maintain a weak referencing collection of all
+ derivations.
+
+ """
+ if not self._cols_populated:
+ return None
+ elif column.key in self.columns and self.columns[column.key] is column:
+ return column
+ else:
+ return None
+
+
+class Join(FromClause):
+ """represent a ``JOIN`` construct between two :class:`.FromClause`
+ elements.
+
+ The public constructor function for :class:`.Join` is the module-level
+ :func:`join()` function, as well as the :func:`join()` method available
+ off all :class:`.FromClause` subclasses.
+
+ """
+ __visit_name__ = 'join'
+
+ def __init__(self, left, right, onclause=None, isouter=False):
+ """Construct a new :class:`.Join`.
+
+ The usual entrypoint here is the :func:`~.expression.join`
+ function or the :meth:`.FromClause.join` method of any
+ :class:`.FromClause` object.
+
+ """
+ self.left = _interpret_as_from(left)
+ self.right = _interpret_as_from(right).self_group()
+
+ if onclause is None:
+ self.onclause = self._match_primaries(self.left, self.right)
+ else:
+ self.onclause = onclause
+
+ self.isouter = isouter
+
+ @classmethod
+ def _create_outerjoin(cls, left, right, onclause=None):
+ """Return an ``OUTER JOIN`` clause element.
+
+ The returned object is an instance of :class:`.Join`.
+
+ Similar functionality is also available via the
+ :meth:`~.FromClause.outerjoin()` method on any
+ :class:`.FromClause`.
+
+ :param left: The left side of the join.
+
+ :param right: The right side of the join.
+
+ :param onclause: Optional criterion for the ``ON`` clause, is
+ derived from foreign key relationships established between
+ left and right otherwise.
+
+ To chain joins together, use the :meth:`.FromClause.join` or
+ :meth:`.FromClause.outerjoin` methods on the resulting
+ :class:`.Join` object.
+
+ """
+ return cls(left, right, onclause, isouter=True)
+
+
+ @classmethod
+ def _create_join(cls, left, right, onclause=None, isouter=False):
+ """Return a ``JOIN`` clause element (regular inner join).
+
+ The returned object is an instance of :class:`.Join`.
+
+ Similar functionality is also available via the
+ :meth:`~.FromClause.join()` method on any
+ :class:`.FromClause`.
+
+ :param left: The left side of the join.
+
+ :param right: The right side of the join.
+
+ :param onclause: Optional criterion for the ``ON`` clause, is
+ derived from foreign key relationships established between
+ left and right otherwise.
+
+ :param isouter: if True, produce an outer join; synonymous
+ with :func:`.outerjoin`.
+
+ To chain joins together, use the :meth:`.FromClause.join` or
+ :meth:`.FromClause.outerjoin` methods on the resulting
+ :class:`.Join` object.
+
+
+ """
+ return cls(left, right, onclause, isouter)
+
+
+ @property
+ def description(self):
+ return "Join object on %s(%d) and %s(%d)" % (
+ self.left.description,
+ id(self.left),
+ self.right.description,
+ id(self.right))
+
+ def is_derived_from(self, fromclause):
+ return fromclause is self or \
+ self.left.is_derived_from(fromclause) or \
+ self.right.is_derived_from(fromclause)
+
+ def self_group(self, against=None):
+ return FromGrouping(self)
+
+ @util.dependencies("sqlalchemy.sql.util")
+ def _populate_column_collection(self, sqlutil):
+ columns = [c for c in self.left.columns] + \
+ [c for c in self.right.columns]
+
+ self.primary_key.extend(sqlutil.reduce_columns(
+ (c for c in columns if c.primary_key), self.onclause))
+ self._columns.update((col._label, col) for col in columns)
+ self.foreign_keys.update(itertools.chain(
+ *[col.foreign_keys for col in columns]))
+
+ def _refresh_for_new_column(self, column):
+ col = self.left._refresh_for_new_column(column)
+ if col is None:
+ col = self.right._refresh_for_new_column(column)
+ if col is not None:
+ if self._cols_populated:
+ self._columns[col._label] = col
+ self.foreign_keys.add(col)
+ if col.primary_key:
+ self.primary_key.add(col)
+ return col
+ return None
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self._reset_exported()
+ self.left = clone(self.left, **kw)
+ self.right = clone(self.right, **kw)
+ self.onclause = clone(self.onclause, **kw)
+
+ def get_children(self, **kwargs):
+ return self.left, self.right, self.onclause
+
+ def _match_primaries(self, left, right):
+ if isinstance(left, Join):
+ left_right = left.right
+ else:
+ left_right = None
+ return self._join_condition(left, right, a_subset=left_right)
+
+ @classmethod
+ def _join_condition(cls, a, b, ignore_nonexistent_tables=False,
+ a_subset=None,
+ consider_as_foreign_keys=None):
+ """create a join condition between two tables or selectables.
+
+ e.g.::
+
+ join_condition(tablea, tableb)
+
+ would produce an expression along the lines of::
+
+ tablea.c.id==tableb.c.tablea_id
+
+ The join is determined based on the foreign key relationships
+ between the two selectables. If there are multiple ways
+ to join, or no way to join, an error is raised.
+
+ :param ignore_nonexistent_tables: Deprecated - this
+ flag is no longer used. Only resolution errors regarding
+ the two given tables are propagated.
+
+ :param a_subset: An optional expression that is a sub-component
+ of ``a``. An attempt will be made to join to just this sub-component
+ first before looking at the full ``a`` construct, and if found
+ will be successful even if there are other ways to join to ``a``.
+ This allows the "right side" of a join to be passed thereby
+ providing a "natural join".
+
+ """
+ crit = []
+ constraints = set()
+
+ for left in (a_subset, a):
+ if left is None:
+ continue
+ for fk in sorted(
+ b.foreign_keys,
+ key=lambda fk: fk.parent._creation_order):
+ if consider_as_foreign_keys is not None and \
+ fk.parent not in consider_as_foreign_keys:
+ continue
+ try:
+ col = fk.get_referent(left)
+ except exc.NoReferenceError as nrte:
+ if nrte.table_name == left.name:
+ raise
+ else:
+ continue
+
+ if col is not None:
+ crit.append(col == fk.parent)
+ constraints.add(fk.constraint)
+ if left is not b:
+ for fk in sorted(
+ left.foreign_keys,
+ key=lambda fk: fk.parent._creation_order):
+ if consider_as_foreign_keys is not None and \
+ fk.parent not in consider_as_foreign_keys:
+ continue
+ try:
+ col = fk.get_referent(b)
+ except exc.NoReferenceError as nrte:
+ if nrte.table_name == b.name:
+ raise
+ else:
+ # this is totally covered. can't get
+ # coverage to mark it.
+ continue
+
+ if col is not None:
+ crit.append(col == fk.parent)
+ constraints.add(fk.constraint)
+ if crit:
+ break
+
+ if len(crit) == 0:
+ if isinstance(b, FromGrouping):
+ hint = " Perhaps you meant to convert the right side to a "\
+ "subquery using alias()?"
+ else:
+ hint = ""
+ raise exc.NoForeignKeysError(
+ "Can't find any foreign key relationships "
+ "between '%s' and '%s'.%s" % (a.description, b.description, hint))
+ elif len(constraints) > 1:
+ raise exc.AmbiguousForeignKeysError(
+ "Can't determine join between '%s' and '%s'; "
+ "tables have more than one foreign key "
+ "constraint relationship between them. "
+ "Please specify the 'onclause' of this "
+ "join explicitly." % (a.description, b.description))
+ elif len(crit) == 1:
+ return (crit[0])
+ else:
+ return and_(*crit)
+
+
+ def select(self, whereclause=None, **kwargs):
+ """Create a :class:`.Select` from this :class:`.Join`.
+
+ The equivalent long-hand form, given a :class:`.Join` object
+ ``j``, is::
+
+ from sqlalchemy import select
+ j = select([j.left, j.right], **kw).\\
+ where(whereclause).\\
+ select_from(j)
+
+ :param whereclause: the WHERE criterion that will be sent to
+ the :func:`select()` function
+
+ :param \**kwargs: all other kwargs are sent to the
+ underlying :func:`select()` function.
+
+ """
+ collist = [self.left, self.right]
+
+ return Select(collist, whereclause, from_obj=[self], **kwargs)
+
+ @property
+ def bind(self):
+ return self.left.bind or self.right.bind
+
+ @util.dependencies("sqlalchemy.sql.util")
+ def alias(self, sqlutil, name=None, flat=False):
+ """return an alias of this :class:`.Join`.
+
+ The default behavior here is to first produce a SELECT
+ construct from this :class:`.Join`, then to produce a
+ :class:`.Alias` from that. So given a join of the form::
+
+ j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
+
+ The JOIN by itself would look like::
+
+ table_a JOIN table_b ON table_a.id = table_b.a_id
+
+ Whereas the alias of the above, ``j.alias()``, would in a
+ SELECT context look like::
+
+ (SELECT table_a.id AS table_a_id, table_b.id AS table_b_id,
+ table_b.a_id AS table_b_a_id
+ FROM table_a
+ JOIN table_b ON table_a.id = table_b.a_id) AS anon_1
+
+ The equivalent long-hand form, given a :class:`.Join` object
+ ``j``, is::
+
+ from sqlalchemy import select, alias
+ j = alias(
+ select([j.left, j.right]).\\
+ select_from(j).\\
+ with_labels(True).\\
+ correlate(False),
+ name=name
+ )
+
+ The selectable produced by :meth:`.Join.alias` features the same
+ columns as that of the two individual selectables presented under
+ a single name - the individual columns are "auto-labeled", meaning
+ the ``.c.`` collection of the resulting :class:`.Alias` represents
+ the names of the individual columns using a ``<tablename>_<columname>``
+ scheme::
+
+ j.c.table_a_id
+ j.c.table_b_a_id
+
+ :meth:`.Join.alias` also features an alternate
+ option for aliasing joins which produces no enclosing SELECT and
+ does not normally apply labels to the column names. The
+ ``flat=True`` option will call :meth:`.FromClause.alias`
+ against the left and right sides individually.
+ Using this option, no new ``SELECT`` is produced;
+ we instead, from a construct as below::
+
+ j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
+ j = j.alias(flat=True)
+
+ we get a result like this::
+
+ table_a AS table_a_1 JOIN table_b AS table_b_1 ON
+ table_a_1.id = table_b_1.a_id
+
+ The ``flat=True`` argument is also propagated to the contained
+ selectables, so that a composite join such as::
+
+ j = table_a.join(
+ table_b.join(table_c,
+ table_b.c.id == table_c.c.b_id),
+ table_b.c.a_id == table_a.c.id
+ ).alias(flat=True)
+
+ Will produce an expression like::
+
+ table_a AS table_a_1 JOIN (
+ table_b AS table_b_1 JOIN table_c AS table_c_1
+ ON table_b_1.id = table_c_1.b_id
+ ) ON table_a_1.id = table_b_1.a_id
+
+ The standalone :func:`~.expression.alias` function as well as the
+ base :meth:`.FromClause.alias` method also support the ``flat=True``
+ argument as a no-op, so that the argument can be passed to the
+ ``alias()`` method of any selectable.
+
+ .. versionadded:: 0.9.0 Added the ``flat=True`` option to create
+ "aliases" of joins without enclosing inside of a SELECT
+ subquery.
+
+ :param name: name given to the alias.
+
+ :param flat: if True, produce an alias of the left and right
+ sides of this :class:`.Join` and return the join of those
+ two selectables. This produces join expression that does not
+ include an enclosing SELECT.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :func:`~.expression.alias`
+
+ """
+ if flat:
+ assert name is None, "Can't send name argument with flat"
+ left_a, right_a = self.left.alias(flat=True), \
+ self.right.alias(flat=True)
+ adapter = sqlutil.ClauseAdapter(left_a).\
+ chain(sqlutil.ClauseAdapter(right_a))
+
+ return left_a.join(right_a,
+ adapter.traverse(self.onclause), isouter=self.isouter)
+ else:
+ return self.select(use_labels=True, correlate=False).alias(name)
+
+ @property
+ def _hide_froms(self):
+ return itertools.chain(*[_from_objects(x.left, x.right)
+ for x in self._cloned_set])
+
+ @property
+ def _from_objects(self):
+ return [self] + \
+ self.onclause._from_objects + \
+ self.left._from_objects + \
+ self.right._from_objects
+
+
+class Alias(FromClause):
+ """Represents an table or selectable alias (AS).
+
+ Represents an alias, as typically applied to any table or
+ sub-select within a SQL statement using the ``AS`` keyword (or
+ without the keyword on certain databases such as Oracle).
+
+ This object is constructed from the :func:`~.expression.alias` module level
+ function as well as the :meth:`.FromClause.alias` method available on all
+ :class:`.FromClause` subclasses.
+
+ """
+
+ __visit_name__ = 'alias'
+ named_with_column = True
+
+ def __init__(self, selectable, name=None):
+ baseselectable = selectable
+ while isinstance(baseselectable, Alias):
+ baseselectable = baseselectable.element
+ self.original = baseselectable
+ self.supports_execution = baseselectable.supports_execution
+ if self.supports_execution:
+ self._execution_options = baseselectable._execution_options
+ self.element = selectable
+ if name is None:
+ if self.original.named_with_column:
+ name = getattr(self.original, 'name', None)
+ name = _anonymous_label('%%(%d %s)s' % (id(self), name
+ or 'anon'))
+ self.name = name
+
+ @property
+ def description(self):
+ if util.py3k:
+ return self.name
+ else:
+ return self.name.encode('ascii', 'backslashreplace')
+
+ def as_scalar(self):
+ try:
+ return self.element.as_scalar()
+ except AttributeError:
+ raise AttributeError("Element %s does not support "
+ "'as_scalar()'" % self.element)
+
+ def is_derived_from(self, fromclause):
+ if fromclause in self._cloned_set:
+ return True
+ return self.element.is_derived_from(fromclause)
+
+ def _populate_column_collection(self):
+ for col in self.element.columns:
+ col._make_proxy(self)
+
+ def _refresh_for_new_column(self, column):
+ col = self.element._refresh_for_new_column(column)
+ if col is not None:
+ if not self._cols_populated:
+ return None
+ else:
+ return col._make_proxy(self)
+ else:
+ return None
+
+ def _copy_internals(self, clone=_clone, **kw):
+ # don't apply anything to an aliased Table
+ # for now. May want to drive this from
+ # the given **kw.
+ if isinstance(self.element, TableClause):
+ return
+ self._reset_exported()
+ self.element = clone(self.element, **kw)
+ baseselectable = self.element
+ while isinstance(baseselectable, Alias):
+ baseselectable = baseselectable.element
+ self.original = baseselectable
+
+ def get_children(self, column_collections=True, **kw):
+ if column_collections:
+ for c in self.c:
+ yield c
+ yield self.element
+
+ @property
+ def _from_objects(self):
+ return [self]
+
+ @property
+ def bind(self):
+ return self.element.bind
+
+
+class CTE(Alias):
+ """Represent a Common Table Expression.
+
+ The :class:`.CTE` object is obtained using the
+ :meth:`.SelectBase.cte` method from any selectable.
+ See that method for complete examples.
+
+ .. versionadded:: 0.7.6
+
+ """
+ __visit_name__ = 'cte'
+
+ def __init__(self, selectable,
+ name=None,
+ recursive=False,
+ _cte_alias=None,
+ _restates=frozenset()):
+ self.recursive = recursive
+ self._cte_alias = _cte_alias
+ self._restates = _restates
+ super(CTE, self).__init__(selectable, name=name)
+
+ def alias(self, name=None, flat=False):
+ return CTE(
+ self.original,
+ name=name,
+ recursive=self.recursive,
+ _cte_alias=self,
+ )
+
+ def union(self, other):
+ return CTE(
+ self.original.union(other),
+ name=self.name,
+ recursive=self.recursive,
+ _restates=self._restates.union([self])
+ )
+
+ def union_all(self, other):
+ return CTE(
+ self.original.union_all(other),
+ name=self.name,
+ recursive=self.recursive,
+ _restates=self._restates.union([self])
+ )
+
+
+
+
+class FromGrouping(FromClause):
+ """Represent a grouping of a FROM clause"""
+ __visit_name__ = 'grouping'
+
+ def __init__(self, element):
+ self.element = element
+
+ def _init_collections(self):
+ pass
+
+ @property
+ def columns(self):
+ return self.element.columns
+
+ @property
+ def primary_key(self):
+ return self.element.primary_key
+
+ @property
+ def foreign_keys(self):
+ return self.element.foreign_keys
+
+ def is_derived_from(self, element):
+ return self.element.is_derived_from(element)
+
+ def alias(self, **kw):
+ return FromGrouping(self.element.alias(**kw))
+
+ @property
+ def _hide_froms(self):
+ return self.element._hide_froms
+
+ def get_children(self, **kwargs):
+ return self.element,
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.element = clone(self.element, **kw)
+
+ @property
+ def _from_objects(self):
+ return self.element._from_objects
+
+ def __getattr__(self, attr):
+ return getattr(self.element, attr)
+
+ def __getstate__(self):
+ return {'element': self.element}
+
+ def __setstate__(self, state):
+ self.element = state['element']
+
+class TableClause(Immutable, FromClause):
+ """Represents a minimal "table" construct.
+
+ This is a lightweight table object that has only a name and a
+ collection of columns, which are typically produced
+ by the :func:`.expression.column` function::
+
+ from sqlalchemy.sql import table, column
+
+ user = table("user",
+ column("id"),
+ column("name"),
+ column("description"),
+ )
+
+ The :class:`.TableClause` construct serves as the base for
+ the more commonly used :class:`~.schema.Table` object, providing
+ the usual set of :class:`~.expression.FromClause` services including
+ the ``.c.`` collection and statement generation methods.
+
+ It does **not** provide all the additional schema-level services
+ of :class:`~.schema.Table`, including constraints, references to other
+ tables, or support for :class:`.MetaData`-level services. It's useful
+ on its own as an ad-hoc construct used to generate quick SQL
+ statements when a more fully fledged :class:`~.schema.Table`
+ is not on hand.
+
+ """
+
+ __visit_name__ = 'table'
+
+ named_with_column = True
+
+ implicit_returning = False
+ """:class:`.TableClause` doesn't support having a primary key or column
+ -level defaults, so implicit returning doesn't apply."""
+
+ _autoincrement_column = None
+ """No PK or default support so no autoincrement column."""
+
+ def __init__(self, name, *columns):
+ """Produce a new :class:`.TableClause`.
+
+ The object returned is an instance of :class:`.TableClause`, which
+ represents the "syntactical" portion of the schema-level
+ :class:`~.schema.Table` object.
+ It may be used to construct lightweight table constructs.
+
+ Note that the :func:`.expression.table` function is not part of
+ the ``sqlalchemy`` namespace. It must be imported from the
+ ``sql`` package::
+
+ from sqlalchemy.sql import table, column
+
+ :param name: Name of the table.
+
+ :param columns: A collection of :func:`.expression.column` constructs.
+
+ """
+
+ super(TableClause, self).__init__()
+ self.name = self.fullname = name
+ self._columns = ColumnCollection()
+ self.primary_key = ColumnSet()
+ self.foreign_keys = set()
+ for c in columns:
+ self.append_column(c)
+
+ def _init_collections(self):
+ pass
+
+ @util.memoized_property
+ def description(self):
+ if util.py3k:
+ return self.name
+ else:
+ return self.name.encode('ascii', 'backslashreplace')
+
+ def append_column(self, c):
+ self._columns[c.key] = c
+ c.table = self
+
+ def get_children(self, column_collections=True, **kwargs):
+ if column_collections:
+ return [c for c in self.c]
+ else:
+ return []
+
+ @util.dependencies("sqlalchemy.sql.functions")
+ def count(self, functions, whereclause=None, **params):
+ """return a SELECT COUNT generated against this
+ :class:`.TableClause`."""
+
+ if self.primary_key:
+ col = list(self.primary_key)[0]
+ else:
+ col = list(self.columns)[0]
+ return Select(
+ [functions.func.count(col).label('tbl_row_count')],
+ whereclause,
+ from_obj=[self],
+ **params)
+
+ @util.dependencies("sqlalchemy.sql.dml")
+ def insert(self, dml, values=None, inline=False, **kwargs):
+ """Generate an :func:`.insert` construct against this
+ :class:`.TableClause`.
+
+ E.g.::
+
+ table.insert().values(name='foo')
+
+ See :func:`.insert` for argument and usage information.
+
+ """
+
+ return dml.Insert(self, values=values, inline=inline, **kwargs)
+
+ @util.dependencies("sqlalchemy.sql.dml")
+ def update(self, dml, whereclause=None, values=None, inline=False, **kwargs):
+ """Generate an :func:`.update` construct against this
+ :class:`.TableClause`.
+
+ E.g.::
+
+ table.update().where(table.c.id==7).values(name='foo')
+
+ See :func:`.update` for argument and usage information.
+
+ """
+
+ return dml.Update(self, whereclause=whereclause,
+ values=values, inline=inline, **kwargs)
+
+ @util.dependencies("sqlalchemy.sql.dml")
+ def delete(self, dml, whereclause=None, **kwargs):
+ """Generate a :func:`.delete` construct against this
+ :class:`.TableClause`.
+
+ E.g.::
+
+ table.delete().where(table.c.id==7)
+
+ See :func:`.delete` for argument and usage information.
+
+ """
+
+ return dml.Delete(self, whereclause, **kwargs)
+
+ @property
+ def _from_objects(self):
+ return [self]
+
+
+class ForUpdateArg(ClauseElement):
+
+ @classmethod
+ def parse_legacy_select(self, arg):
+ """Parse the for_update arugment of :func:`.select`.
+
+ :param mode: Defines the lockmode to use.
+
+ ``None`` - translates to no lockmode
+
+ ``'update'`` - translates to ``FOR UPDATE``
+ (standard SQL, supported by most dialects)
+
+ ``'nowait'`` - translates to ``FOR UPDATE NOWAIT``
+ (supported by Oracle, PostgreSQL 8.1 upwards)
+
+ ``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL),
+ and ``FOR SHARE`` (for PostgreSQL)
+
+ ``'read_nowait'`` - translates to ``FOR SHARE NOWAIT``
+ (supported by PostgreSQL). ``FOR SHARE`` and
+ ``FOR SHARE NOWAIT`` (PostgreSQL).
+
+ """
+ if arg in (None, False):
+ return None
+
+ nowait = read = False
+ if arg == 'nowait':
+ nowait = True
+ elif arg == 'read':
+ read = True
+ elif arg == 'read_nowait':
+ read = nowait = True
+ elif arg is not True:
+ raise exc.ArgumentError("Unknown for_update argument: %r" % arg)
+
+ return ForUpdateArg(read=read, nowait=nowait)
+
+ @property
+ def legacy_for_update_value(self):
+ if self.read and not self.nowait:
+ return "read"
+ elif self.read and self.nowait:
+ return "read_nowait"
+ elif self.nowait:
+ return "nowait"
+ else:
+ return True
+
+ def _copy_internals(self, clone=_clone, **kw):
+ if self.of is not None:
+ self.of = [clone(col, **kw) for col in self.of]
+
+ def __init__(self, nowait=False, read=False, of=None):
+ """Represents arguments specified to :meth:`.Select.for_update`.
+
+ .. versionadded:: 0.9.0
+ """
+
+ self.nowait = nowait
+ self.read = read
+ if of is not None:
+ self.of = [_interpret_as_column_or_from(elem)
+ for elem in util.to_list(of)]
+ else:
+ self.of = None
+
+
+class SelectBase(Executable, FromClause):
+ """Base class for SELECT statements.
+
+
+ This includes :class:`.Select`, :class:`.CompoundSelect` and
+ :class:`.TextAsFrom`.
+
+
+ """
+
+ def as_scalar(self):
+ """return a 'scalar' representation of this selectable, which can be
+ used as a column expression.
+
+ Typically, a select statement which has only one column in its columns
+ clause is eligible to be used as a scalar expression.
+
+ The returned object is an instance of
+ :class:`ScalarSelect`.
+
+ """
+ return ScalarSelect(self)
+
+
+ def label(self, name):
+ """return a 'scalar' representation of this selectable, embedded as a
+ subquery with a label.
+
+ .. seealso::
+
+ :meth:`~.SelectBase.as_scalar`.
+
+ """
+ return self.as_scalar().label(name)
+
+ def cte(self, name=None, recursive=False):
+ """Return a new :class:`.CTE`, or Common Table Expression instance.
+
+ Common table expressions are a SQL standard whereby SELECT
+ statements can draw upon secondary statements specified along
+ with the primary statement, using a clause called "WITH".
+ Special semantics regarding UNION can also be employed to
+ allow "recursive" queries, where a SELECT statement can draw
+ upon the set of rows that have previously been selected.
+
+ SQLAlchemy detects :class:`.CTE` objects, which are treated
+ similarly to :class:`.Alias` objects, as special elements
+ to be delivered to the FROM clause of the statement as well
+ as to a WITH clause at the top of the statement.
+
+ .. versionadded:: 0.7.6
+
+ :param name: name given to the common table expression. Like
+ :meth:`._FromClause.alias`, the name can be left as ``None``
+ in which case an anonymous symbol will be used at query
+ compile time.
+ :param recursive: if ``True``, will render ``WITH RECURSIVE``.
+ A recursive common table expression is intended to be used in
+ conjunction with UNION ALL in order to derive rows
+ from those already selected.
+
+ The following examples illustrate two examples from
+ Postgresql's documentation at
+ http://www.postgresql.org/docs/8.4/static/queries-with.html.
+
+ Example 1, non recursive::
+
+ from sqlalchemy import Table, Column, String, Integer, MetaData, \\
+ select, func
+
+ metadata = MetaData()
+
+ orders = Table('orders', metadata,
+ Column('region', String),
+ Column('amount', Integer),
+ Column('product', String),
+ Column('quantity', Integer)
+ )
+
+ regional_sales = select([
+ orders.c.region,
+ func.sum(orders.c.amount).label('total_sales')
+ ]).group_by(orders.c.region).cte("regional_sales")
+
+
+ top_regions = select([regional_sales.c.region]).\\
+ where(
+ regional_sales.c.total_sales >
+ select([
+ func.sum(regional_sales.c.total_sales)/10
+ ])
+ ).cte("top_regions")
+
+ statement = select([
+ orders.c.region,
+ orders.c.product,
+ func.sum(orders.c.quantity).label("product_units"),
+ func.sum(orders.c.amount).label("product_sales")
+ ]).where(orders.c.region.in_(
+ select([top_regions.c.region])
+ )).group_by(orders.c.region, orders.c.product)
+
+ result = conn.execute(statement).fetchall()
+
+ Example 2, WITH RECURSIVE::
+
+ from sqlalchemy import Table, Column, String, Integer, MetaData, \\
+ select, func
+
+ metadata = MetaData()
+
+ parts = Table('parts', metadata,
+ Column('part', String),
+ Column('sub_part', String),
+ Column('quantity', Integer),
+ )
+
+ included_parts = select([
+ parts.c.sub_part,
+ parts.c.part,
+ parts.c.quantity]).\\
+ where(parts.c.part=='our part').\\
+ cte(recursive=True)
+
+
+ incl_alias = included_parts.alias()
+ parts_alias = parts.alias()
+ included_parts = included_parts.union_all(
+ select([
+ parts_alias.c.part,
+ parts_alias.c.sub_part,
+ parts_alias.c.quantity
+ ]).
+ where(parts_alias.c.part==incl_alias.c.sub_part)
+ )
+
+ statement = select([
+ included_parts.c.sub_part,
+ func.sum(included_parts.c.quantity).
+ label('total_quantity')
+ ]).\
+ select_from(included_parts.join(parts,
+ included_parts.c.part==parts.c.part)).\\
+ group_by(included_parts.c.sub_part)
+
+ result = conn.execute(statement).fetchall()
+
+
+ .. seealso::
+
+ :meth:`.orm.query.Query.cte` - ORM version of :meth:`.SelectBase.cte`.
+
+ """
+ return CTE(self, name=name, recursive=recursive)
+
+ @_generative
+ @util.deprecated('0.6',
+ message="``autocommit()`` is deprecated. Use "
+ ":meth:`.Executable.execution_options` with the "
+ "'autocommit' flag.")
+ def autocommit(self):
+ """return a new selectable with the 'autocommit' flag set to
+ True.
+ """
+
+ self._execution_options = \
+ self._execution_options.union({'autocommit': True})
+
+ def _generate(self):
+ """Override the default _generate() method to also clear out
+ exported collections."""
+
+ s = self.__class__.__new__(self.__class__)
+ s.__dict__ = self.__dict__.copy()
+ s._reset_exported()
+ return s
+
+ @property
+ def _from_objects(self):
+ return [self]
+
+class GenerativeSelect(SelectBase):
+ """Base class for SELECT statements where additional elements can be
+ added.
+
+ This serves as the base for :class:`.Select` and :class:`.CompoundSelect`
+ where elements such as ORDER BY, GROUP BY can be added and column rendering
+ can be controlled. Compare to :class:`.TextAsFrom`, which, while it
+ subclasses :class:`.SelectBase` and is also a SELECT construct, represents
+ a fixed textual string which cannot be altered at this level, only
+ wrapped as a subquery.
+
+ .. versionadded:: 0.9.0 :class:`.GenerativeSelect` was added to
+ provide functionality specific to :class:`.Select` and :class:`.CompoundSelect`
+ while allowing :class:`.SelectBase` to be used for other SELECT-like
+ objects, e.g. :class:`.TextAsFrom`.
+
+ """
+ _order_by_clause = ClauseList()
+ _group_by_clause = ClauseList()
+ _limit = None
+ _offset = None
+ _for_update_arg = None
+
+ def __init__(self,
+ use_labels=False,
+ for_update=False,
+ limit=None,
+ offset=None,
+ order_by=None,
+ group_by=None,
+ bind=None,
+ autocommit=None):
+ self.use_labels = use_labels
+
+ if for_update is not False:
+ self._for_update_arg = ForUpdateArg.parse_legacy_select(for_update)
+
+ if autocommit is not None:
+ util.warn_deprecated('autocommit on select() is '
+ 'deprecated. Use .execution_options(a'
+ 'utocommit=True)')
+ self._execution_options = \
+ self._execution_options.union(
+ {'autocommit': autocommit})
+ if limit is not None:
+ self._limit = util.asint(limit)
+ if offset is not None:
+ self._offset = util.asint(offset)
+ self._bind = bind
+
+ if order_by is not None:
+ self._order_by_clause = ClauseList(*util.to_list(order_by))
+ if group_by is not None:
+ self._group_by_clause = ClauseList(*util.to_list(group_by))
+
+ @property
+ def for_update(self):
+ """Provide legacy dialect support for the ``for_update`` attribute.
+ """
+ if self._for_update_arg is not None:
+ return self._for_update_arg.legacy_for_update_value
+ else:
+ return None
+
+ @for_update.setter
+ def for_update(self, value):
+ self._for_update_arg = ForUpdateArg.parse_legacy_select(value)
+
+ @_generative
+ def with_for_update(self, nowait=False, read=False, of=None):
+ """Specify a ``FOR UPDATE`` clause for this :class:`.GenerativeSelect`.
+
+ E.g.::
+
+ stmt = select([table]).with_for_update(nowait=True)
+
+ On a database like Postgresql or Oracle, the above would render a
+ statement like::
+
+ SELECT table.a, table.b FROM table FOR UPDATE NOWAIT
+
+ on other backends, the ``nowait`` option is ignored and instead
+ would produce::
+
+ SELECT table.a, table.b FROM table FOR UPDATE
+
+ When called with no arguments, the statement will render with
+ the suffix ``FOR UPDATE``. Additional arguments can then be
+ provided which allow for common database-specific
+ variants.
+
+ :param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle and
+ Postgresql dialects.
+
+ :param read: boolean; will render ``LOCK IN SHARE MODE`` on MySQL,
+ ``FOR SHARE`` on Postgresql. On Postgresql, when combined with
+ ``nowait``, will render ``FOR SHARE NOWAIT``.
+
+ :param of: SQL expression or list of SQL expression elements
+ (typically :class:`.Column` objects or a compatible expression) which
+ will render into a ``FOR UPDATE OF`` clause; supported by PostgreSQL
+ and Oracle. May render as a table or as a column depending on
+ backend.
+
+ .. versionadded:: 0.9.0
+
+ """
+ self._for_update_arg = ForUpdateArg(nowait=nowait, read=read, of=of)
+
+ @_generative
+ def apply_labels(self):
+ """return a new selectable with the 'use_labels' flag set to True.
+
+ This will result in column expressions being generated using labels
+ against their table name, such as "SELECT somecolumn AS
+ tablename_somecolumn". This allows selectables which contain multiple
+ FROM clauses to produce a unique set of column names regardless of
+ name conflicts among the individual FROM clauses.
+
+ """
+ self.use_labels = True
+
+ @_generative
+ def limit(self, limit):
+ """return a new selectable with the given LIMIT criterion
+ applied."""
+
+ self._limit = util.asint(limit)
+
+ @_generative
+ def offset(self, offset):
+ """return a new selectable with the given OFFSET criterion
+ applied."""
+
+ self._offset = util.asint(offset)
+
+ @_generative
+ def order_by(self, *clauses):
+ """return a new selectable with the given list of ORDER BY
+ criterion applied.
+
+ The criterion will be appended to any pre-existing ORDER BY
+ criterion.
+
+ """
+
+ self.append_order_by(*clauses)
+
+ @_generative
+ def group_by(self, *clauses):
+ """return a new selectable with the given list of GROUP BY
+ criterion applied.
+
+ The criterion will be appended to any pre-existing GROUP BY
+ criterion.
+
+ """
+
+ self.append_group_by(*clauses)
+
+ def append_order_by(self, *clauses):
+ """Append the given ORDER BY criterion applied to this selectable.
+
+ The criterion will be appended to any pre-existing ORDER BY criterion.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.GenerativeSelect.order_by` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ if len(clauses) == 1 and clauses[0] is None:
+ self._order_by_clause = ClauseList()
+ else:
+ if getattr(self, '_order_by_clause', None) is not None:
+ clauses = list(self._order_by_clause) + list(clauses)
+ self._order_by_clause = ClauseList(*clauses)
+
+ def append_group_by(self, *clauses):
+ """Append the given GROUP BY criterion applied to this selectable.
+
+ The criterion will be appended to any pre-existing GROUP BY criterion.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.GenerativeSelect.group_by` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ if len(clauses) == 1 and clauses[0] is None:
+ self._group_by_clause = ClauseList()
+ else:
+ if getattr(self, '_group_by_clause', None) is not None:
+ clauses = list(self._group_by_clause) + list(clauses)
+ self._group_by_clause = ClauseList(*clauses)
+
+
+class CompoundSelect(GenerativeSelect):
+ """Forms the basis of ``UNION``, ``UNION ALL``, and other
+ SELECT-based set operations.
+
+
+ .. seealso::
+
+ :func:`.union`
+
+ :func:`.union_all`
+
+ :func:`.intersect`
+
+ :func:`.intersect_all`
+
+ :func:`.except`
+
+ :func:`.except_all`
+
+ """
+
+ __visit_name__ = 'compound_select'
+
+ UNION = util.symbol('UNION')
+ UNION_ALL = util.symbol('UNION ALL')
+ EXCEPT = util.symbol('EXCEPT')
+ EXCEPT_ALL = util.symbol('EXCEPT ALL')
+ INTERSECT = util.symbol('INTERSECT')
+ INTERSECT_ALL = util.symbol('INTERSECT ALL')
+
+ def __init__(self, keyword, *selects, **kwargs):
+ self._auto_correlate = kwargs.pop('correlate', False)
+ self.keyword = keyword
+ self.selects = []
+
+ numcols = None
+
+ # some DBs do not like ORDER BY in the inner queries of a UNION, etc.
+ for n, s in enumerate(selects):
+ s = _clause_element_as_expr(s)
+
+ if not numcols:
+ numcols = len(s.c)
+ elif len(s.c) != numcols:
+ raise exc.ArgumentError('All selectables passed to '
+ 'CompoundSelect must have identical numbers of '
+ 'columns; select #%d has %d columns, select '
+ '#%d has %d' % (1, len(self.selects[0].c), n
+ + 1, len(s.c)))
+
+ self.selects.append(s.self_group(self))
+
+ GenerativeSelect.__init__(self, **kwargs)
+
+ @classmethod
+ def _create_union(cls, *selects, **kwargs):
+ """Return a ``UNION`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ A similar :func:`union()` method is available on all
+ :class:`.FromClause` subclasses.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs)
+
+ @classmethod
+ def _create_union_all(cls, *selects, **kwargs):
+ """Return a ``UNION ALL`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ A similar :func:`union_all()` method is available on all
+ :class:`.FromClause` subclasses.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs)
+
+
+ @classmethod
+ def _create_except(cls, *selects, **kwargs):
+ """Return an ``EXCEPT`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs)
+
+
+ @classmethod
+ def _create_except_all(cls, *selects, **kwargs):
+ """Return an ``EXCEPT ALL`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs)
+
+
+ @classmethod
+ def _create_intersect(cls, *selects, **kwargs):
+ """Return an ``INTERSECT`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs)
+
+
+ @classmethod
+ def _create_intersect_all(cls, *selects, **kwargs):
+ """Return an ``INTERSECT ALL`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
+
+
+ def _scalar_type(self):
+ return self.selects[0]._scalar_type()
+
+ def self_group(self, against=None):
+ return FromGrouping(self)
+
+ def is_derived_from(self, fromclause):
+ for s in self.selects:
+ if s.is_derived_from(fromclause):
+ return True
+ return False
+
+ def _populate_column_collection(self):
+ for cols in zip(*[s.c for s in self.selects]):
+
+ # this is a slightly hacky thing - the union exports a
+ # column that resembles just that of the *first* selectable.
+ # to get at a "composite" column, particularly foreign keys,
+ # you have to dig through the proxies collection which we
+ # generate below. We may want to improve upon this, such as
+ # perhaps _make_proxy can accept a list of other columns
+ # that are "shared" - schema.column can then copy all the
+ # ForeignKeys in. this would allow the union() to have all
+ # those fks too.
+
+ proxy = cols[0]._make_proxy(self,
+ name=cols[0]._label if self.use_labels else None,
+ key=cols[0]._key_label if self.use_labels else None)
+
+ # hand-construct the "_proxies" collection to include all
+ # derived columns place a 'weight' annotation corresponding
+ # to how low in the list of select()s the column occurs, so
+ # that the corresponding_column() operation can resolve
+ # conflicts
+
+ proxy._proxies = [c._annotate({'weight': i + 1}) for (i,
+ c) in enumerate(cols)]
+
+ def _refresh_for_new_column(self, column):
+ for s in self.selects:
+ s._refresh_for_new_column(column)
+
+ if not self._cols_populated:
+ return None
+
+ raise NotImplementedError("CompoundSelect constructs don't support "
+ "addition of columns to underlying selectables")
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self._reset_exported()
+ self.selects = [clone(s, **kw) for s in self.selects]
+ if hasattr(self, '_col_map'):
+ del self._col_map
+ for attr in ('_order_by_clause', '_group_by_clause', '_for_update_arg'):
+ if getattr(self, attr) is not None:
+ setattr(self, attr, clone(getattr(self, attr), **kw))
+
+ def get_children(self, column_collections=True, **kwargs):
+ return (column_collections and list(self.c) or []) \
+ + [self._order_by_clause, self._group_by_clause] \
+ + list(self.selects)
+
+ def bind(self):
+ if self._bind:
+ return self._bind
+ for s in self.selects:
+ e = s.bind
+ if e:
+ return e
+ else:
+ return None
+
+ def _set_bind(self, bind):
+ self._bind = bind
+ bind = property(bind, _set_bind)
+
+
+class HasPrefixes(object):
+ _prefixes = ()
+
+ @_generative
+ def prefix_with(self, *expr, **kw):
+ """Add one or more expressions following the statement keyword, i.e.
+ SELECT, INSERT, UPDATE, or DELETE. Generative.
+
+ This is used to support backend-specific prefix keywords such as those
+ provided by MySQL.
+
+ E.g.::
+
+ stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
+
+ Multiple prefixes can be specified by multiple calls
+ to :meth:`.prefix_with`.
+
+ :param \*expr: textual or :class:`.ClauseElement` construct which
+ will be rendered following the INSERT, UPDATE, or DELETE
+ keyword.
+ :param \**kw: A single keyword 'dialect' is accepted. This is an
+ optional string dialect name which will
+ limit rendering of this prefix to only that dialect.
+
+ """
+ dialect = kw.pop('dialect', None)
+ if kw:
+ raise exc.ArgumentError("Unsupported argument(s): %s" %
+ ",".join(kw))
+ self._setup_prefixes(expr, dialect)
+
+ def _setup_prefixes(self, prefixes, dialect=None):
+ self._prefixes = self._prefixes + tuple(
+ [(_literal_as_text(p), dialect) for p in prefixes])
+
+
+
+class Select(HasPrefixes, GenerativeSelect):
+ """Represents a ``SELECT`` statement.
+
+ """
+
+ __visit_name__ = 'select'
+
+ _prefixes = ()
+ _hints = util.immutabledict()
+ _distinct = False
+ _from_cloned = None
+ _correlate = ()
+ _correlate_except = None
+ _memoized_property = SelectBase._memoized_property
+
+ def __init__(self,
+ columns=None,
+ whereclause=None,
+ from_obj=None,
+ distinct=False,
+ having=None,
+ correlate=True,
+ prefixes=None,
+ **kwargs):
+ """Construct a new :class:`.Select`.
+
+ Similar functionality is also available via the :meth:`.FromClause.select`
+ method on any :class:`.FromClause`.
+
+ All arguments which accept :class:`.ClauseElement` arguments also accept
+ string arguments, which will be converted as appropriate into
+ either :func:`text()` or :func:`literal_column()` constructs.
+
+ .. seealso::
+
+ :ref:`coretutorial_selecting` - Core Tutorial description of
+ :func:`.select`.
+
+ :param columns:
+ A list of :class:`.ClauseElement` objects, typically
+ :class:`.ColumnElement` objects or subclasses, which will form the
+ columns clause of the resulting statement. For all members which are
+ instances of :class:`.Selectable`, the individual :class:`.ColumnElement`
+ members of the :class:`.Selectable` will be added individually to the
+ columns clause. For example, specifying a
+ :class:`~sqlalchemy.schema.Table` instance will result in all the
+ contained :class:`~sqlalchemy.schema.Column` objects within to be added
+ to the columns clause.
+
+ This argument is not present on the form of :func:`select()`
+ available on :class:`~sqlalchemy.schema.Table`.
+
+ :param whereclause:
+ A :class:`.ClauseElement` expression which will be used to form the
+ ``WHERE`` clause.
+
+ :param from_obj:
+ A list of :class:`.ClauseElement` objects which will be added to the
+ ``FROM`` clause of the resulting statement. Note that "from" objects are
+ automatically located within the columns and whereclause ClauseElements.
+ Use this parameter to explicitly specify "from" objects which are not
+ automatically locatable. This could include
+ :class:`~sqlalchemy.schema.Table` objects that aren't otherwise present,
+ or :class:`.Join` objects whose presence will supercede that of the
+ :class:`~sqlalchemy.schema.Table` objects already located in the other
+ clauses.
+
+ :param autocommit:
+ Deprecated. Use .execution_options(autocommit=<True|False>)
+ to set the autocommit option.
+
+ :param bind=None:
+ an :class:`~.Engine` or :class:`~.Connection` instance
+ to which the
+ resulting :class:`.Select` object will be bound. The :class:`.Select`
+ object will otherwise automatically bind to whatever
+ :class:`~.base.Connectable` instances can be located within its contained
+ :class:`.ClauseElement` members.
+
+ :param correlate=True:
+ indicates that this :class:`.Select` object should have its
+ contained :class:`.FromClause` elements "correlated" to an enclosing
+ :class:`.Select` object. This means that any :class:`.ClauseElement`
+ instance within the "froms" collection of this :class:`.Select`
+ which is also present in the "froms" collection of an
+ enclosing select will not be rendered in the ``FROM`` clause
+ of this select statement.
+
+ :param distinct=False:
+ when ``True``, applies a ``DISTINCT`` qualifier to the columns
+ clause of the resulting statement.
+
+ The boolean argument may also be a column expression or list
+ of column expressions - this is a special calling form which
+ is understood by the Postgresql dialect to render the
+ ``DISTINCT ON (<columns>)`` syntax.
+
+ ``distinct`` is also available via the :meth:`~.Select.distinct`
+ generative method.
+
+ :param for_update=False:
+ when ``True``, applies ``FOR UPDATE`` to the end of the
+ resulting statement.
+
+ .. deprecated:: 0.9.0 - use :meth:`.GenerativeSelect.with_for_update`
+ to specify the structure of the ``FOR UPDATE`` clause.
+
+ ``for_update`` accepts various string values interpreted by
+ specific backends, including:
+
+ * ``"read"`` - on MySQL, translates to ``LOCK IN SHARE MODE``;
+ on Postgresql, translates to ``FOR SHARE``.
+ * ``"nowait"`` - on Postgresql and Oracle, translates to
+ ``FOR UPDATE NOWAIT``.
+ * ``"read_nowait"`` - on Postgresql, translates to
+ ``FOR SHARE NOWAIT``.
+
+ .. seealso::
+
+ :meth:`.GenerativeSelect.with_for_update` - improved API for
+ specifying the ``FOR UPDATE`` clause.
+
+ :param group_by:
+ a list of :class:`.ClauseElement` objects which will comprise the
+ ``GROUP BY`` clause of the resulting select.
+
+ :param having:
+ a :class:`.ClauseElement` that will comprise the ``HAVING`` clause
+ of the resulting select when ``GROUP BY`` is used.
+
+ :param limit=None:
+ a numerical value which usually compiles to a ``LIMIT``
+ expression in the resulting select. Databases that don't
+ support ``LIMIT`` will attempt to provide similar
+ functionality.
+
+ :param offset=None:
+ a numeric value which usually compiles to an ``OFFSET``
+ expression in the resulting select. Databases that don't
+ support ``OFFSET`` will attempt to provide similar
+ functionality.
+
+ :param order_by:
+ a scalar or list of :class:`.ClauseElement` objects which will
+ comprise the ``ORDER BY`` clause of the resulting select.
+
+ :param use_labels=False:
+ when ``True``, the statement will be generated using labels
+ for each column in the columns clause, which qualify each
+ column with its parent table's (or aliases) name so that name
+ conflicts between columns in different tables don't occur.
+ The format of the label is <tablename>_<column>. The "c"
+ collection of the resulting :class:`.Select` object will use these
+ names as well for targeting column members.
+
+ use_labels is also available via the :meth:`~.GenerativeSelect.apply_labels`
+ generative method.
+
+ """
+ self._auto_correlate = correlate
+ if distinct is not False:
+ if distinct is True:
+ self._distinct = True
+ else:
+ self._distinct = [
+ _literal_as_text(e)
+ for e in util.to_list(distinct)
+ ]
+
+ if from_obj is not None:
+ self._from_obj = util.OrderedSet(
+ _interpret_as_from(f)
+ for f in util.to_list(from_obj))
+ else:
+ self._from_obj = util.OrderedSet()
+
+ try:
+ cols_present = bool(columns)
+ except TypeError:
+ raise exc.ArgumentError("columns argument to select() must "
+ "be a Python list or other iterable")
+
+ if cols_present:
+ self._raw_columns = []
+ for c in columns:
+ c = _interpret_as_column_or_from(c)
+ if isinstance(c, ScalarSelect):
+ c = c.self_group(against=operators.comma_op)
+ self._raw_columns.append(c)
+ else:
+ self._raw_columns = []
+
+ if whereclause is not None:
+ self._whereclause = _literal_as_text(whereclause)
+ else:
+ self._whereclause = None
+
+ if having is not None:
+ self._having = _literal_as_text(having)
+ else:
+ self._having = None
+
+ if prefixes:
+ self._setup_prefixes(prefixes)
+
+ GenerativeSelect.__init__(self, **kwargs)
+
+ @property
+ def _froms(self):
+ # would love to cache this,
+ # but there's just enough edge cases, particularly now that
+ # declarative encourages construction of SQL expressions
+ # without tables present, to just regen this each time.
+ froms = []
+ seen = set()
+ translate = self._from_cloned
+
+ def add(items):
+ for item in items:
+ if item is self:
+ raise exc.InvalidRequestError(
+ "select() construct refers to itself as a FROM")
+ if translate and item in translate:
+ item = translate[item]
+ if not seen.intersection(item._cloned_set):
+ froms.append(item)
+ seen.update(item._cloned_set)
+
+ add(_from_objects(*self._raw_columns))
+ if self._whereclause is not None:
+ add(_from_objects(self._whereclause))
+ add(self._from_obj)
+
+ return froms
+
+ def _get_display_froms(self, explicit_correlate_froms=None,
+ implicit_correlate_froms=None):
+ """Return the full list of 'from' clauses to be displayed.
+
+ Takes into account a set of existing froms which may be
+ rendered in the FROM clause of enclosing selects; this Select
+ may want to leave those absent if it is automatically
+ correlating.
+
+ """
+ froms = self._froms
+
+ toremove = set(itertools.chain(*[
+ _expand_cloned(f._hide_froms)
+ for f in froms]))
+ if toremove:
+ # if we're maintaining clones of froms,
+ # add the copies out to the toremove list. only include
+ # clones that are lexical equivalents.
+ if self._from_cloned:
+ toremove.update(
+ self._from_cloned[f] for f in
+ toremove.intersection(self._from_cloned)
+ if self._from_cloned[f]._is_lexical_equivalent(f)
+ )
+ # filter out to FROM clauses not in the list,
+ # using a list to maintain ordering
+ froms = [f for f in froms if f not in toremove]
+
+ if self._correlate:
+ to_correlate = self._correlate
+ if to_correlate:
+ froms = [
+ f for f in froms if f not in
+ _cloned_intersection(
+ _cloned_intersection(froms, explicit_correlate_froms or ()),
+ to_correlate
+ )
+ ]
+
+ if self._correlate_except is not None:
+
+ froms = [
+ f for f in froms if f not in
+ _cloned_difference(
+ _cloned_intersection(froms, explicit_correlate_froms or ()),
+ self._correlate_except
+ )
+ ]
+
+ if self._auto_correlate and \
+ implicit_correlate_froms and \
+ len(froms) > 1:
+
+ froms = [
+ f for f in froms if f not in
+ _cloned_intersection(froms, implicit_correlate_froms)
+ ]
+
+ if not len(froms):
+ raise exc.InvalidRequestError("Select statement '%s"
+ "' returned no FROM clauses due to "
+ "auto-correlation; specify "
+ "correlate(<tables>) to control "
+ "correlation manually." % self)
+
+ return froms
+
+ def _scalar_type(self):
+ elem = self._raw_columns[0]
+ cols = list(elem._select_iterable)
+ return cols[0].type
+
+ @property
+ def froms(self):
+ """Return the displayed list of FromClause elements."""
+
+ return self._get_display_froms()
+
+ @_generative
+ def with_hint(self, selectable, text, dialect_name='*'):
+ """Add an indexing hint for the given selectable to this
+ :class:`.Select`.
+
+ The text of the hint is rendered in the appropriate
+ location for the database backend in use, relative
+ to the given :class:`.Table` or :class:`.Alias` passed as the
+ ``selectable`` argument. The dialect implementation
+ typically uses Python string substitution syntax
+ with the token ``%(name)s`` to render the name of
+ the table or alias. E.g. when using Oracle, the
+ following::
+
+ select([mytable]).\\
+ with_hint(mytable, "+ index(%(name)s ix_mytable)")
+
+ Would render SQL as::
+
+ select /*+ index(mytable ix_mytable) */ ... from mytable
+
+ The ``dialect_name`` option will limit the rendering of a particular
+ hint to a particular backend. Such as, to add hints for both Oracle
+ and Sybase simultaneously::
+
+ select([mytable]).\\
+ with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
+ with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
+
+ """
+ self._hints = self._hints.union(
+ {(selectable, dialect_name): text})
+
+ @property
+ def type(self):
+ raise exc.InvalidRequestError("Select objects don't have a type. "
+ "Call as_scalar() on this Select object "
+ "to return a 'scalar' version of this Select.")
+
+ @_memoized_property.method
+ def locate_all_froms(self):
+ """return a Set of all FromClause elements referenced by this Select.
+
+ This set is a superset of that returned by the ``froms`` property,
+ which is specifically for those FromClause elements that would
+ actually be rendered.
+
+ """
+ froms = self._froms
+ return froms + list(_from_objects(*froms))
+
+ @property
+ def inner_columns(self):
+ """an iterator of all ColumnElement expressions which would
+ be rendered into the columns clause of the resulting SELECT statement.
+
+ """
+ return _select_iterables(self._raw_columns)
+
+ def is_derived_from(self, fromclause):
+ if self in fromclause._cloned_set:
+ return True
+
+ for f in self.locate_all_froms():
+ if f.is_derived_from(fromclause):
+ return True
+ return False
+
+ def _copy_internals(self, clone=_clone, **kw):
+
+ # Select() object has been cloned and probably adapted by the
+ # given clone function. Apply the cloning function to internal
+ # objects
+
+ # 1. keep a dictionary of the froms we've cloned, and what
+ # they've become. This is consulted later when we derive
+ # additional froms from "whereclause" and the columns clause,
+ # which may still reference the uncloned parent table.
+ # as of 0.7.4 we also put the current version of _froms, which
+ # gets cleared on each generation. previously we were "baking"
+ # _froms into self._from_obj.
+ self._from_cloned = from_cloned = dict((f, clone(f, **kw))
+ for f in self._from_obj.union(self._froms))
+
+ # 3. update persistent _from_obj with the cloned versions.
+ self._from_obj = util.OrderedSet(from_cloned[f] for f in
+ self._from_obj)
+
+ # the _correlate collection is done separately, what can happen
+ # here is the same item is _correlate as in _from_obj but the
+ # _correlate version has an annotation on it - (specifically
+ # RelationshipProperty.Comparator._criterion_exists() does
+ # this). Also keep _correlate liberally open with it's previous
+ # contents, as this set is used for matching, not rendering.
+ self._correlate = set(clone(f) for f in
+ self._correlate).union(self._correlate)
+
+ # 4. clone other things. The difficulty here is that Column
+ # objects are not actually cloned, and refer to their original
+ # .table, resulting in the wrong "from" parent after a clone
+ # operation. Hence _from_cloned and _from_obj supercede what is
+ # present here.
+ self._raw_columns = [clone(c, **kw) for c in self._raw_columns]
+ for attr in '_whereclause', '_having', '_order_by_clause', \
+ '_group_by_clause', '_for_update_arg':
+ if getattr(self, attr) is not None:
+ setattr(self, attr, clone(getattr(self, attr), **kw))
+
+ # erase exported column list, _froms collection,
+ # etc.
+ self._reset_exported()
+
+ def get_children(self, column_collections=True, **kwargs):
+ """return child elements as per the ClauseElement specification."""
+
+ return (column_collections and list(self.columns) or []) + \
+ self._raw_columns + list(self._froms) + \
+ [x for x in
+ (self._whereclause, self._having,
+ self._order_by_clause, self._group_by_clause)
+ if x is not None]
+
+ @_generative
+ def column(self, column):
+ """return a new select() construct with the given column expression
+ added to its columns clause.
+
+ """
+ self.append_column(column)
+
+ @util.dependencies("sqlalchemy.sql.util")
+ def reduce_columns(self, sqlutil, only_synonyms=True):
+ """Return a new :func`.select` construct with redundantly
+ named, equivalently-valued columns removed from the columns clause.
+
+ "Redundant" here means two columns where one refers to the
+ other either based on foreign key, or via a simple equality
+ comparison in the WHERE clause of the statement. The primary purpose
+ of this method is to automatically construct a select statement
+ with all uniquely-named columns, without the need to use
+ table-qualified labels as :meth:`.apply_labels` does.
+
+ When columns are omitted based on foreign key, the referred-to
+ column is the one that's kept. When columns are omitted based on
+ WHERE eqivalence, the first column in the columns clause is the
+ one that's kept.
+
+ :param only_synonyms: when True, limit the removal of columns
+ to those which have the same name as the equivalent. Otherwise,
+ all columns that are equivalent to another are removed.
+
+ .. versionadded:: 0.8
+
+ """
+ return self.with_only_columns(
+ sqlutil.reduce_columns(
+ self.inner_columns,
+ only_synonyms=only_synonyms,
+ *(self._whereclause, ) + tuple(self._from_obj)
+ )
+ )
+
+ @_generative
+ def with_only_columns(self, columns):
+ """Return a new :func:`.select` construct with its columns
+ clause replaced with the given columns.
+
+ .. versionchanged:: 0.7.3
+ Due to a bug fix, this method has a slight
+ behavioral change as of version 0.7.3.
+ Prior to version 0.7.3, the FROM clause of
+ a :func:`.select` was calculated upfront and as new columns
+ were added; in 0.7.3 and later it's calculated
+ at compile time, fixing an issue regarding late binding
+ of columns to parent tables. This changes the behavior of
+ :meth:`.Select.with_only_columns` in that FROM clauses no
+ longer represented in the new list are dropped,
+ but this behavior is more consistent in
+ that the FROM clauses are consistently derived from the
+ current columns clause. The original intent of this method
+ is to allow trimming of the existing columns list to be fewer
+ columns than originally present; the use case of replacing
+ the columns list with an entirely different one hadn't
+ been anticipated until 0.7.3 was released; the usage
+ guidelines below illustrate how this should be done.
+
+ This method is exactly equivalent to as if the original
+ :func:`.select` had been called with the given columns
+ clause. I.e. a statement::
+
+ s = select([table1.c.a, table1.c.b])
+ s = s.with_only_columns([table1.c.b])
+
+ should be exactly equivalent to::
+
+ s = select([table1.c.b])
+
+ This means that FROM clauses which are only derived
+ from the column list will be discarded if the new column
+ list no longer contains that FROM::
+
+ >>> table1 = table('t1', column('a'), column('b'))
+ >>> table2 = table('t2', column('a'), column('b'))
+ >>> s1 = select([table1.c.a, table2.c.b])
+ >>> print s1
+ SELECT t1.a, t2.b FROM t1, t2
+ >>> s2 = s1.with_only_columns([table2.c.b])
+ >>> print s2
+ SELECT t2.b FROM t1
+
+ The preferred way to maintain a specific FROM clause
+ in the construct, assuming it won't be represented anywhere
+ else (i.e. not in the WHERE clause, etc.) is to set it using
+ :meth:`.Select.select_from`::
+
+ >>> s1 = select([table1.c.a, table2.c.b]).\\
+ ... select_from(table1.join(table2,
+ ... table1.c.a==table2.c.a))
+ >>> s2 = s1.with_only_columns([table2.c.b])
+ >>> print s2
+ SELECT t2.b FROM t1 JOIN t2 ON t1.a=t2.a
+
+ Care should also be taken to use the correct
+ set of column objects passed to :meth:`.Select.with_only_columns`.
+ Since the method is essentially equivalent to calling the
+ :func:`.select` construct in the first place with the given
+ columns, the columns passed to :meth:`.Select.with_only_columns`
+ should usually be a subset of those which were passed
+ to the :func:`.select` construct, not those which are available
+ from the ``.c`` collection of that :func:`.select`. That
+ is::
+
+ s = select([table1.c.a, table1.c.b]).select_from(table1)
+ s = s.with_only_columns([table1.c.b])
+
+ and **not**::
+
+ # usually incorrect
+ s = s.with_only_columns([s.c.b])
+
+ The latter would produce the SQL::
+
+ SELECT b
+ FROM (SELECT t1.a AS a, t1.b AS b
+ FROM t1), t1
+
+ Since the :func:`.select` construct is essentially being
+ asked to select both from ``table1`` as well as itself.
+
+ """
+ self._reset_exported()
+ rc = []
+ for c in columns:
+ c = _interpret_as_column_or_from(c)
+ if isinstance(c, ScalarSelect):
+ c = c.self_group(against=operators.comma_op)
+ rc.append(c)
+ self._raw_columns = rc
+
+ @_generative
+ def where(self, whereclause):
+ """return a new select() construct with the given expression added to
+ its WHERE clause, joined to the existing clause via AND, if any.
+
+ """
+
+ self.append_whereclause(whereclause)
+
+ @_generative
+ def having(self, having):
+ """return a new select() construct with the given expression added to
+ its HAVING clause, joined to the existing clause via AND, if any.
+
+ """
+ self.append_having(having)
+
+ @_generative
+ def distinct(self, *expr):
+ """Return a new select() construct which will apply DISTINCT to its
+ columns clause.
+
+ :param \*expr: optional column expressions. When present,
+ the Postgresql dialect will render a ``DISTINCT ON (<expressions>>)``
+ construct.
+
+ """
+ if expr:
+ expr = [_literal_as_text(e) for e in expr]
+ if isinstance(self._distinct, list):
+ self._distinct = self._distinct + expr
+ else:
+ self._distinct = expr
+ else:
+ self._distinct = True
+
+ @_generative
+ def select_from(self, fromclause):
+ """return a new :func:`.select` construct with the
+ given FROM expression
+ merged into its list of FROM objects.
+
+ E.g.::
+
+ table1 = table('t1', column('a'))
+ table2 = table('t2', column('b'))
+ s = select([table1.c.a]).\\
+ select_from(
+ table1.join(table2, table1.c.a==table2.c.b)
+ )
+
+ The "from" list is a unique set on the identity of each element,
+ so adding an already present :class:`.Table` or other selectable
+ will have no effect. Passing a :class:`.Join` that refers
+ to an already present :class:`.Table` or other selectable will have
+ the effect of concealing the presence of that selectable as
+ an individual element in the rendered FROM list, instead
+ rendering it into a JOIN clause.
+
+ While the typical purpose of :meth:`.Select.select_from` is to
+ replace the default, derived FROM clause with a join, it can
+ also be called with individual table elements, multiple times
+ if desired, in the case that the FROM clause cannot be fully
+ derived from the columns clause::
+
+ select([func.count('*')]).select_from(table1)
+
+ """
+ self.append_from(fromclause)
+
+ @_generative
+ def correlate(self, *fromclauses):
+ """return a new :class:`.Select` which will correlate the given FROM
+ clauses to that of an enclosing :class:`.Select`.
+
+ Calling this method turns off the :class:`.Select` object's
+ default behavior of "auto-correlation". Normally, FROM elements
+ which appear in a :class:`.Select` that encloses this one via
+ its :term:`WHERE clause`, ORDER BY, HAVING or
+ :term:`columns clause` will be omitted from this :class:`.Select`
+ object's :term:`FROM clause`.
+ Setting an explicit correlation collection using the
+ :meth:`.Select.correlate` method provides a fixed list of FROM objects
+ that can potentially take place in this process.
+
+ When :meth:`.Select.correlate` is used to apply specific FROM clauses
+ for correlation, the FROM elements become candidates for
+ correlation regardless of how deeply nested this :class:`.Select`
+ object is, relative to an enclosing :class:`.Select` which refers to
+ the same FROM object. This is in contrast to the behavior of
+ "auto-correlation" which only correlates to an immediate enclosing
+ :class:`.Select`. Multi-level correlation ensures that the link
+ between enclosed and enclosing :class:`.Select` is always via
+ at least one WHERE/ORDER BY/HAVING/columns clause in order for
+ correlation to take place.
+
+ If ``None`` is passed, the :class:`.Select` object will correlate
+ none of its FROM entries, and all will render unconditionally
+ in the local FROM clause.
+
+ :param \*fromclauses: a list of one or more :class:`.FromClause`
+ constructs, or other compatible constructs (i.e. ORM-mapped
+ classes) to become part of the correlate collection.
+
+ .. versionchanged:: 0.8.0 ORM-mapped classes are accepted by
+ :meth:`.Select.correlate`.
+
+ .. versionchanged:: 0.8.0 The :meth:`.Select.correlate` method no
+ longer unconditionally removes entries from the FROM clause; instead,
+ the candidate FROM entries must also be matched by a FROM entry
+ located in an enclosing :class:`.Select`, which ultimately encloses
+ this one as present in the WHERE clause, ORDER BY clause, HAVING
+ clause, or columns clause of an enclosing :meth:`.Select`.
+
+ .. versionchanged:: 0.8.2 explicit correlation takes place
+ via any level of nesting of :class:`.Select` objects; in previous
+ 0.8 versions, correlation would only occur relative to the immediate
+ enclosing :class:`.Select` construct.
+
+ .. seealso::
+
+ :meth:`.Select.correlate_except`
+
+ :ref:`correlated_subqueries`
+
+ """
+ self._auto_correlate = False
+ if fromclauses and fromclauses[0] is None:
+ self._correlate = ()
+ else:
+ self._correlate = set(self._correlate).union(
+ _interpret_as_from(f) for f in fromclauses)
+
+ @_generative
+ def correlate_except(self, *fromclauses):
+ """return a new :class:`.Select` which will omit the given FROM
+ clauses from the auto-correlation process.
+
+ Calling :meth:`.Select.correlate_except` turns off the
+ :class:`.Select` object's default behavior of
+ "auto-correlation" for the given FROM elements. An element
+ specified here will unconditionally appear in the FROM list, while
+ all other FROM elements remain subject to normal auto-correlation
+ behaviors.
+
+ .. versionchanged:: 0.8.2 The :meth:`.Select.correlate_except`
+ method was improved to fully prevent FROM clauses specified here
+ from being omitted from the immediate FROM clause of this
+ :class:`.Select`.
+
+ If ``None`` is passed, the :class:`.Select` object will correlate
+ all of its FROM entries.
+
+ .. versionchanged:: 0.8.2 calling ``correlate_except(None)`` will
+ correctly auto-correlate all FROM clauses.
+
+ :param \*fromclauses: a list of one or more :class:`.FromClause`
+ constructs, or other compatible constructs (i.e. ORM-mapped
+ classes) to become part of the correlate-exception collection.
+
+ .. seealso::
+
+ :meth:`.Select.correlate`
+
+ :ref:`correlated_subqueries`
+
+ """
+
+ self._auto_correlate = False
+ if fromclauses and fromclauses[0] is None:
+ self._correlate_except = ()
+ else:
+ self._correlate_except = set(self._correlate_except or ()).union(
+ _interpret_as_from(f) for f in fromclauses)
+
+ def append_correlation(self, fromclause):
+ """append the given correlation expression to this select()
+ construct.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.correlate` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+
+ self._auto_correlate = False
+ self._correlate = set(self._correlate).union(
+ _interpret_as_from(f) for f in fromclause)
+
+ def append_column(self, column):
+ """append the given column expression to the columns clause of this
+ select() construct.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.column` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ self._reset_exported()
+ column = _interpret_as_column_or_from(column)
+
+ if isinstance(column, ScalarSelect):
+ column = column.self_group(against=operators.comma_op)
+
+ self._raw_columns = self._raw_columns + [column]
+
+ def append_prefix(self, clause):
+ """append the given columns clause prefix expression to this select()
+ construct.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.prefix_with` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ clause = _literal_as_text(clause)
+ self._prefixes = self._prefixes + (clause,)
+
+ def append_whereclause(self, whereclause):
+ """append the given expression to this select() construct's WHERE
+ criterion.
+
+ The expression will be joined to existing WHERE criterion via AND.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.where` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+
+ self._reset_exported()
+ self._whereclause = and_(True_._ifnone(self._whereclause), whereclause)
+
+ def append_having(self, having):
+ """append the given expression to this select() construct's HAVING
+ criterion.
+
+ The expression will be joined to existing HAVING criterion via AND.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.having` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ self._reset_exported()
+ self._having = and_(True_._ifnone(self._having), having)
+
+ def append_from(self, fromclause):
+ """append the given FromClause expression to this select() construct's
+ FROM clause.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.select_from` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ self._reset_exported()
+ fromclause = _interpret_as_from(fromclause)
+ self._from_obj = self._from_obj.union([fromclause])
+
+
+ @_memoized_property
+ def _columns_plus_names(self):
+ if self.use_labels:
+ names = set()
+ def name_for_col(c):
+ if c._label is None:
+ return (None, c)
+ name = c._label
+ if name in names:
+ name = c.anon_label
+ else:
+ names.add(name)
+ return name, c
+
+ return [
+ name_for_col(c)
+ for c in util.unique_list(_select_iterables(self._raw_columns))
+ ]
+ else:
+ return [
+ (None, c)
+ for c in util.unique_list(_select_iterables(self._raw_columns))
+ ]
+
+ def _populate_column_collection(self):
+ for name, c in self._columns_plus_names:
+ if not hasattr(c, '_make_proxy'):
+ continue
+ if name is None:
+ key = None
+ elif self.use_labels:
+ key = c._key_label
+ if key is not None and key in self.c:
+ key = c.anon_label
+ else:
+ key = None
+
+ c._make_proxy(self, key=key,
+ name=name,
+ name_is_truncatable=True)
+
+ def _refresh_for_new_column(self, column):
+ for fromclause in self._froms:
+ col = fromclause._refresh_for_new_column(column)
+ if col is not None:
+ if col in self.inner_columns and self._cols_populated:
+ our_label = col._key_label if self.use_labels else col.key
+ if our_label not in self.c:
+ return col._make_proxy(self,
+ name=col._label if self.use_labels else None,
+ key=col._key_label if self.use_labels else None,
+ name_is_truncatable=True)
+ return None
+ return None
+
+ def self_group(self, against=None):
+ """return a 'grouping' construct as per the ClauseElement
+ specification.
+
+ This produces an element that can be embedded in an expression. Note
+ that this method is called automatically as needed when constructing
+ expressions and should not require explicit use.
+
+ """
+ if isinstance(against, CompoundSelect):
+ return self
+ return FromGrouping(self)
+
+ def union(self, other, **kwargs):
+ """return a SQL UNION of this select() construct against the given
+ selectable."""
+
+ return CompoundSelect._create_union(self, other, **kwargs)
+
+ def union_all(self, other, **kwargs):
+ """return a SQL UNION ALL of this select() construct against the given
+ selectable.
+
+ """
+ return CompoundSelect._create_union_all(self, other, **kwargs)
+
+ def except_(self, other, **kwargs):
+ """return a SQL EXCEPT of this select() construct against the given
+ selectable."""
+
+ return CompoundSelect._create_except(self, other, **kwargs)
+
+ def except_all(self, other, **kwargs):
+ """return a SQL EXCEPT ALL of this select() construct against the
+ given selectable.
+
+ """
+ return CompoundSelect._create_except_all(self, other, **kwargs)
+
+ def intersect(self, other, **kwargs):
+ """return a SQL INTERSECT of this select() construct against the given
+ selectable.
+
+ """
+ return CompoundSelect._create_intersect(self, other, **kwargs)
+
+ def intersect_all(self, other, **kwargs):
+ """return a SQL INTERSECT ALL of this select() construct against the
+ given selectable.
+
+ """
+ return CompoundSelect._create_intersect_all(self, other, **kwargs)
+
+ def bind(self):
+ if self._bind:
+ return self._bind
+ froms = self._froms
+ if not froms:
+ for c in self._raw_columns:
+ e = c.bind
+ if e:
+ self._bind = e
+ return e
+ else:
+ e = list(froms)[0].bind
+ if e:
+ self._bind = e
+ return e
+
+ return None
+
+ def _set_bind(self, bind):
+ self._bind = bind
+ bind = property(bind, _set_bind)
+
+
+class ScalarSelect(Generative, Grouping):
+ _from_objects = []
+
+ def __init__(self, element):
+ self.element = element
+ self.type = element._scalar_type()
+
+ @property
+ def columns(self):
+ raise exc.InvalidRequestError('Scalar Select expression has no '
+ 'columns; use this object directly within a '
+ 'column-level expression.')
+ c = columns
+
+ @_generative
+ def where(self, crit):
+ """Apply a WHERE clause to the SELECT statement referred to
+ by this :class:`.ScalarSelect`.
+
+ """
+ self.element = self.element.where(crit)
+
+ def self_group(self, **kwargs):
+ return self
+
+
+class Exists(UnaryExpression):
+ """Represent an ``EXISTS`` clause.
+
+ """
+ __visit_name__ = UnaryExpression.__visit_name__
+ _from_objects = []
+
+
+ def __init__(self, *args, **kwargs):
+ """Construct a new :class:`.Exists` against an existing
+ :class:`.Select` object.
+
+ Calling styles are of the following forms::
+
+ # use on an existing select()
+ s = select([table.c.col1]).where(table.c.col2==5)
+ s = exists(s)
+
+ # construct a select() at once
+ exists(['*'], **select_arguments).where(criterion)
+
+ # columns argument is optional, generates "EXISTS (SELECT *)"
+ # by default.
+ exists().where(table.c.col2==5)
+
+ """
+ if args and isinstance(args[0], (SelectBase, ScalarSelect)):
+ s = args[0]
+ else:
+ if not args:
+ args = ([literal_column('*')],)
+ s = Select(*args, **kwargs).as_scalar().self_group()
+
+ UnaryExpression.__init__(self, s, operator=operators.exists,
+ type_=type_api.BOOLEANTYPE)
+
+ def select(self, whereclause=None, **params):
+ return Select([self], whereclause, **params)
+
+ def correlate(self, *fromclause):
+ e = self._clone()
+ e.element = self.element.correlate(*fromclause).self_group()
+ return e
+
+ def correlate_except(self, *fromclause):
+ e = self._clone()
+ e.element = self.element.correlate_except(*fromclause).self_group()
+ return e
+
+ def select_from(self, clause):
+ """return a new :class:`.Exists` construct, applying the given
+ expression to the :meth:`.Select.select_from` method of the select
+ statement contained.
+
+ """
+ e = self._clone()
+ e.element = self.element.select_from(clause).self_group()
+ return e
+
+ def where(self, clause):
+ """return a new exists() construct with the given expression added to
+ its WHERE clause, joined to the existing clause via AND, if any.
+
+ """
+ e = self._clone()
+ e.element = self.element.where(clause).self_group()
+ return e
+
+
+class TextAsFrom(SelectBase):
+ """Wrap a :class:`.TextClause` construct within a :class:`.SelectBase`
+ interface.
+
+ This allows the :class:`.TextClause` object to gain a ``.c`` collection and
+ other FROM-like capabilities such as :meth:`.FromClause.alias`,
+ :meth:`.SelectBase.cte`, etc.
+
+ The :class:`.TextAsFrom` construct is produced via the
+ :meth:`.TextClause.columns` method - see that method for details.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :func:`.text`
+
+ :meth:`.TextClause.columns`
+
+ """
+ __visit_name__ = "text_as_from"
+
+ def __init__(self, text, columns):
+ self.element = text
+ self.column_args = columns
+
+ @property
+ def _bind(self):
+ return self.element._bind
+
+ @_generative
+ def bindparams(self, *binds, **bind_as_values):
+ self.element = self.element.bindparams(*binds, **bind_as_values)
+
+ def _populate_column_collection(self):
+ for c in self.column_args:
+ c._make_proxy(self)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self._reset_exported()
+ self.element = clone(self.element, **kw)
+
+ def _scalar_type(self):
+ return self.column_args[0].type
+
+class AnnotatedFromClause(Annotated):
+ def __init__(self, element, values):
+ # force FromClause to generate their internal
+ # collections into __dict__
+ element.c
+ Annotated.__init__(self, element, values)
+
+
diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py
new file mode 100644
index 000000000..d779caaea
--- /dev/null
+++ b/lib/sqlalchemy/sql/sqltypes.py
@@ -0,0 +1,1628 @@
+# sql/sqltypes.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""SQL specific types.
+
+"""
+
+import datetime as dt
+import codecs
+
+from .type_api import TypeEngine, TypeDecorator, to_instance
+from .elements import quoted_name, type_coerce
+from .default_comparator import _DefaultColumnComparator
+from .. import exc, util, processors
+from .base import _bind_or_error, SchemaEventTarget
+from . import operators
+from .. import event
+from ..util import pickle
+import decimal
+
+if util.jython:
+ import array
+
+class _DateAffinity(object):
+ """Mixin date/time specific expression adaptations.
+
+ Rules are implemented within Date,Time,Interval,DateTime, Numeric,
+ Integer. Based on http://www.postgresql.org/docs/current/static
+ /functions-datetime.html.
+
+ """
+
+ @property
+ def _expression_adaptations(self):
+ raise NotImplementedError()
+
+ class Comparator(TypeEngine.Comparator):
+ _blank_dict = util.immutabledict()
+
+ def _adapt_expression(self, op, other_comparator):
+ othertype = other_comparator.type._type_affinity
+ return op, \
+ to_instance(self.type._expression_adaptations.get(op, self._blank_dict).\
+ get(othertype, NULLTYPE))
+ comparator_factory = Comparator
+
+class Concatenable(object):
+ """A mixin that marks a type as supporting 'concatenation',
+ typically strings."""
+
+ class Comparator(TypeEngine.Comparator):
+ def _adapt_expression(self, op, other_comparator):
+ if op is operators.add and isinstance(other_comparator,
+ (Concatenable.Comparator, NullType.Comparator)):
+ return operators.concat_op, self.expr.type
+ else:
+ return op, self.expr.type
+
+ comparator_factory = Comparator
+
+
+class String(Concatenable, TypeEngine):
+ """The base for all string and character types.
+
+ In SQL, corresponds to VARCHAR. Can also take Python unicode objects
+ and encode to the database's encoding in bind params (and the reverse for
+ result sets.)
+
+ The `length` field is usually required when the `String` type is
+ used within a CREATE TABLE statement, as VARCHAR requires a length
+ on most databases.
+
+ """
+
+ __visit_name__ = 'string'
+
+ def __init__(self, length=None, collation=None,
+ convert_unicode=False,
+ unicode_error=None,
+ _warn_on_bytestring=False
+ ):
+ """
+ Create a string-holding type.
+
+ :param length: optional, a length for the column for use in
+ DDL and CAST expressions. May be safely omitted if no ``CREATE
+ TABLE`` will be issued. Certain databases may require a
+ ``length`` for use in DDL, and will raise an exception when
+ the ``CREATE TABLE`` DDL is issued if a ``VARCHAR``
+ with no length is included. Whether the value is
+ interpreted as bytes or characters is database specific.
+
+ :param collation: Optional, a column-level collation for
+ use in DDL and CAST expressions. Renders using the
+ COLLATE keyword supported by SQLite, MySQL, and Postgresql.
+ E.g.::
+
+ >>> from sqlalchemy import cast, select, String
+ >>> print select([cast('some string', String(collation='utf8'))])
+ SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1
+
+ .. versionadded:: 0.8 Added support for COLLATE to all
+ string types.
+
+ :param convert_unicode: When set to ``True``, the
+ :class:`.String` type will assume that
+ input is to be passed as Python ``unicode`` objects,
+ and results returned as Python ``unicode`` objects.
+ If the DBAPI in use does not support Python unicode
+ (which is fewer and fewer these days), SQLAlchemy
+ will encode/decode the value, using the
+ value of the ``encoding`` parameter passed to
+ :func:`.create_engine` as the encoding.
+
+ When using a DBAPI that natively supports Python
+ unicode objects, this flag generally does not
+ need to be set. For columns that are explicitly
+ intended to store non-ASCII data, the :class:`.Unicode`
+ or :class:`.UnicodeText`
+ types should be used regardless, which feature
+ the same behavior of ``convert_unicode`` but
+ also indicate an underlying column type that
+ directly supports unicode, such as ``NVARCHAR``.
+
+ For the extremely rare case that Python ``unicode``
+ is to be encoded/decoded by SQLAlchemy on a backend
+ that does natively support Python ``unicode``,
+ the value ``force`` can be passed here which will
+ cause SQLAlchemy's encode/decode services to be
+ used unconditionally.
+
+ :param unicode_error: Optional, a method to use to handle Unicode
+ conversion errors. Behaves like the ``errors`` keyword argument to
+ the standard library's ``string.decode()`` functions. This flag
+ requires that ``convert_unicode`` is set to ``force`` - otherwise,
+ SQLAlchemy is not guaranteed to handle the task of unicode
+ conversion. Note that this flag adds significant performance
+ overhead to row-fetching operations for backends that already
+ return unicode objects natively (which most DBAPIs do). This
+ flag should only be used as a last resort for reading
+ strings from a column with varied or corrupted encodings.
+
+ """
+ if unicode_error is not None and convert_unicode != 'force':
+ raise exc.ArgumentError("convert_unicode must be 'force' "
+ "when unicode_error is set.")
+
+ self.length = length
+ self.collation = collation
+ self.convert_unicode = convert_unicode
+ self.unicode_error = unicode_error
+ self._warn_on_bytestring = _warn_on_bytestring
+
+ def literal_processor(self, dialect):
+ def process(value):
+ value = value.replace("'", "''")
+ return "'%s'" % value
+ return process
+
+ def bind_processor(self, dialect):
+ if self.convert_unicode or dialect.convert_unicode:
+ if dialect.supports_unicode_binds and \
+ self.convert_unicode != 'force':
+ if self._warn_on_bytestring:
+ def process(value):
+ if isinstance(value, util.binary_type):
+ util.warn("Unicode type received non-unicode bind "
+ "param value.")
+ return value
+ return process
+ else:
+ return None
+ else:
+ encoder = codecs.getencoder(dialect.encoding)
+ warn_on_bytestring = self._warn_on_bytestring
+
+ def process(value):
+ if isinstance(value, util.text_type):
+ return encoder(value, self.unicode_error)[0]
+ elif warn_on_bytestring and value is not None:
+ util.warn("Unicode type received non-unicode bind "
+ "param value")
+ return value
+ return process
+ else:
+ return None
+
+ def result_processor(self, dialect, coltype):
+ wants_unicode = self.convert_unicode or dialect.convert_unicode
+ needs_convert = wants_unicode and \
+ (dialect.returns_unicode_strings is not True or
+ self.convert_unicode in ('force', 'force_nocheck'))
+ needs_isinstance = (
+ needs_convert and
+ dialect.returns_unicode_strings and
+ self.convert_unicode != 'force_nocheck'
+ )
+
+ if needs_convert:
+ to_unicode = processors.to_unicode_processor_factory(
+ dialect.encoding, self.unicode_error)
+
+ if needs_isinstance:
+ return processors.to_conditional_unicode_processor_factory(
+ dialect.encoding, self.unicode_error)
+ else:
+ return processors.to_unicode_processor_factory(
+ dialect.encoding, self.unicode_error)
+ else:
+ return None
+
+ @property
+ def python_type(self):
+ if self.convert_unicode:
+ return util.text_type
+ else:
+ return str
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.STRING
+
+
+class Text(String):
+ """A variably sized string type.
+
+ In SQL, usually corresponds to CLOB or TEXT. Can also take Python
+ unicode objects and encode to the database's encoding in bind
+ params (and the reverse for result sets.) In general, TEXT objects
+ do not have a length; while some databases will accept a length
+ argument here, it will be rejected by others.
+
+ """
+ __visit_name__ = 'text'
+
+
+class Unicode(String):
+ """A variable length Unicode string type.
+
+ The :class:`.Unicode` type is a :class:`.String` subclass
+ that assumes input and output as Python ``unicode`` data,
+ and in that regard is equivalent to the usage of the
+ ``convert_unicode`` flag with the :class:`.String` type.
+ However, unlike plain :class:`.String`, it also implies an
+ underlying column type that is explicitly supporting of non-ASCII
+ data, such as ``NVARCHAR`` on Oracle and SQL Server.
+ This can impact the output of ``CREATE TABLE`` statements
+ and ``CAST`` functions at the dialect level, and can
+ also affect the handling of bound parameters in some
+ specific DBAPI scenarios.
+
+ The encoding used by the :class:`.Unicode` type is usually
+ determined by the DBAPI itself; most modern DBAPIs
+ feature support for Python ``unicode`` objects as bound
+ values and result set values, and the encoding should
+ be configured as detailed in the notes for the target
+ DBAPI in the :ref:`dialect_toplevel` section.
+
+ For those DBAPIs which do not support, or are not configured
+ to accommodate Python ``unicode`` objects
+ directly, SQLAlchemy does the encoding and decoding
+ outside of the DBAPI. The encoding in this scenario
+ is determined by the ``encoding`` flag passed to
+ :func:`.create_engine`.
+
+ When using the :class:`.Unicode` type, it is only appropriate
+ to pass Python ``unicode`` objects, and not plain ``str``.
+ If a plain ``str`` is passed under Python 2, a warning
+ is emitted. If you notice your application emitting these warnings but
+ you're not sure of the source of them, the Python
+ ``warnings`` filter, documented at
+ http://docs.python.org/library/warnings.html,
+ can be used to turn these warnings into exceptions
+ which will illustrate a stack trace::
+
+ import warnings
+ warnings.simplefilter('error')
+
+ For an application that wishes to pass plain bytestrings
+ and Python ``unicode`` objects to the ``Unicode`` type
+ equally, the bytestrings must first be decoded into
+ unicode. The recipe at :ref:`coerce_to_unicode` illustrates
+ how this is done.
+
+ See also:
+
+ :class:`.UnicodeText` - unlengthed textual counterpart
+ to :class:`.Unicode`.
+
+ """
+
+ __visit_name__ = 'unicode'
+
+ def __init__(self, length=None, **kwargs):
+ """
+ Create a :class:`.Unicode` object.
+
+ Parameters are the same as that of :class:`.String`,
+ with the exception that ``convert_unicode``
+ defaults to ``True``.
+
+ """
+ kwargs.setdefault('convert_unicode', True)
+ kwargs.setdefault('_warn_on_bytestring', True)
+ super(Unicode, self).__init__(length=length, **kwargs)
+
+
+class UnicodeText(Text):
+ """An unbounded-length Unicode string type.
+
+ See :class:`.Unicode` for details on the unicode
+ behavior of this object.
+
+ Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a
+ unicode-capable type being used on the backend, such as
+ ``NCLOB``, ``NTEXT``.
+
+ """
+
+ __visit_name__ = 'unicode_text'
+
+ def __init__(self, length=None, **kwargs):
+ """
+ Create a Unicode-converting Text type.
+
+ Parameters are the same as that of :class:`.Text`,
+ with the exception that ``convert_unicode``
+ defaults to ``True``.
+
+ """
+ kwargs.setdefault('convert_unicode', True)
+ kwargs.setdefault('_warn_on_bytestring', True)
+ super(UnicodeText, self).__init__(length=length, **kwargs)
+
+
+class Integer(_DateAffinity, TypeEngine):
+ """A type for ``int`` integers."""
+
+ __visit_name__ = 'integer'
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.NUMBER
+
+ @property
+ def python_type(self):
+ return int
+
+ def literal_processor(self, dialect):
+ def process(value):
+ return str(value)
+ return process
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ # TODO: need a dictionary object that will
+ # handle operators generically here, this is incomplete
+ return {
+ operators.add: {
+ Date: Date,
+ Integer: self.__class__,
+ Numeric: Numeric,
+ },
+ operators.mul: {
+ Interval: Interval,
+ Integer: self.__class__,
+ Numeric: Numeric,
+ },
+ operators.div: {
+ Integer: self.__class__,
+ Numeric: Numeric,
+ },
+ operators.truediv: {
+ Integer: self.__class__,
+ Numeric: Numeric,
+ },
+ operators.sub: {
+ Integer: self.__class__,
+ Numeric: Numeric,
+ },
+ }
+
+
+
+class SmallInteger(Integer):
+ """A type for smaller ``int`` integers.
+
+ Typically generates a ``SMALLINT`` in DDL, and otherwise acts like
+ a normal :class:`.Integer` on the Python side.
+
+ """
+
+ __visit_name__ = 'small_integer'
+
+
+class BigInteger(Integer):
+ """A type for bigger ``int`` integers.
+
+ Typically generates a ``BIGINT`` in DDL, and otherwise acts like
+ a normal :class:`.Integer` on the Python side.
+
+ """
+
+ __visit_name__ = 'big_integer'
+
+
+
+class Numeric(_DateAffinity, TypeEngine):
+ """A type for fixed precision numbers, such as ``NUMERIC`` or ``DECIMAL``.
+
+ This type returns Python ``decimal.Decimal`` objects by default, unless the
+ :paramref:`.Numeric.asdecimal` flag is set to False, in which case they
+ are coerced to Python ``float`` objects.
+
+ .. note::
+
+ The :class:`.Numeric` type is designed to receive data from a database
+ type that is explicitly known to be a decimal type
+ (e.g. ``DECIMAL``, ``NUMERIC``, others) and not a floating point
+ type (e.g. ``FLOAT``, ``REAL``, others).
+ If the database column on the server is in fact a floating-point type
+ type, such as ``FLOAT`` or ``REAL``, use the :class:`.Float`
+ type or a subclass, otherwise numeric coercion between ``float``/``Decimal``
+ may or may not function as expected.
+
+ .. note::
+
+ The Python ``decimal.Decimal`` class is generally slow
+ performing; cPython 3.3 has now switched to use the `cdecimal
+ <http://pypi.python.org/pypi/cdecimal/>`_ library natively. For
+ older Python versions, the ``cdecimal`` library can be patched
+ into any application where it will replace the ``decimal``
+ library fully, however this needs to be applied globally and
+ before any other modules have been imported, as follows::
+
+ import sys
+ import cdecimal
+ sys.modules["decimal"] = cdecimal
+
+ Note that the ``cdecimal`` and ``decimal`` libraries are **not
+ compatible with each other**, so patching ``cdecimal`` at the
+ global level is the only way it can be used effectively with
+ various DBAPIs that hardcode to import the ``decimal`` library.
+
+ """
+
+ __visit_name__ = 'numeric'
+
+ _default_decimal_return_scale = 10
+
+ def __init__(self, precision=None, scale=None,
+ decimal_return_scale=None, asdecimal=True):
+ """
+ Construct a Numeric.
+
+ :param precision: the numeric precision for use in DDL ``CREATE
+ TABLE``.
+
+ :param scale: the numeric scale for use in DDL ``CREATE TABLE``.
+
+ :param asdecimal: default True. Return whether or not
+ values should be sent as Python Decimal objects, or
+ as floats. Different DBAPIs send one or the other based on
+ datatypes - the Numeric type will ensure that return values
+ are one or the other across DBAPIs consistently.
+
+ :param decimal_return_scale: Default scale to use when converting
+ from floats to Python decimals. Floating point values will typically
+ be much longer due to decimal inaccuracy, and most floating point
+ database types don't have a notion of "scale", so by default the
+ float type looks for the first ten decimal places when converting.
+ Specfiying this value will override that length. Types which
+ do include an explicit ".scale" value, such as the base :class:`.Numeric`
+ as well as the MySQL float types, will use the value of ".scale"
+ as the default for decimal_return_scale, if not otherwise specified.
+
+ .. versionadded:: 0.9.0
+
+ When using the ``Numeric`` type, care should be taken to ensure
+ that the asdecimal setting is apppropriate for the DBAPI in use -
+ when Numeric applies a conversion from Decimal->float or float->
+ Decimal, this conversion incurs an additional performance overhead
+ for all result columns received.
+
+ DBAPIs that return Decimal natively (e.g. psycopg2) will have
+ better accuracy and higher performance with a setting of ``True``,
+ as the native translation to Decimal reduces the amount of floating-
+ point issues at play, and the Numeric type itself doesn't need
+ to apply any further conversions. However, another DBAPI which
+ returns floats natively *will* incur an additional conversion
+ overhead, and is still subject to floating point data loss - in
+ which case ``asdecimal=False`` will at least remove the extra
+ conversion overhead.
+
+ """
+ self.precision = precision
+ self.scale = scale
+ self.decimal_return_scale = decimal_return_scale
+ self.asdecimal = asdecimal
+
+ @property
+ def _effective_decimal_return_scale(self):
+ if self.decimal_return_scale is not None:
+ return self.decimal_return_scale
+ elif getattr(self, "scale", None) is not None:
+ return self.scale
+ else:
+ return self._default_decimal_return_scale
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.NUMBER
+
+ def literal_processor(self, dialect):
+ def process(value):
+ return str(value)
+ return process
+
+ @property
+ def python_type(self):
+ if self.asdecimal:
+ return decimal.Decimal
+ else:
+ return float
+
+ def bind_processor(self, dialect):
+ if dialect.supports_native_decimal:
+ return None
+ else:
+ return processors.to_float
+
+ def result_processor(self, dialect, coltype):
+ if self.asdecimal:
+ if dialect.supports_native_decimal:
+ # we're a "numeric", DBAPI will give us Decimal directly
+ return None
+ else:
+ util.warn('Dialect %s+%s does *not* support Decimal '
+ 'objects natively, and SQLAlchemy must '
+ 'convert from floating point - rounding '
+ 'errors and other issues may occur. Please '
+ 'consider storing Decimal numbers as strings '
+ 'or integers on this platform for lossless '
+ 'storage.' % (dialect.name, dialect.driver))
+
+ # we're a "numeric", DBAPI returns floats, convert.
+ return processors.to_decimal_processor_factory(
+ decimal.Decimal,
+ self.scale if self.scale is not None
+ else self._default_decimal_return_scale)
+ else:
+ if dialect.supports_native_decimal:
+ return processors.to_float
+ else:
+ return None
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.mul: {
+ Interval: Interval,
+ Numeric: self.__class__,
+ Integer: self.__class__,
+ },
+ operators.div: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
+ },
+ operators.truediv: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
+ },
+ operators.add: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
+ },
+ operators.sub: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
+ }
+ }
+
+
+class Float(Numeric):
+ """Type representing floating point types, such as ``FLOAT`` or ``REAL``.
+
+ This type returns Python ``float`` objects by default, unless the
+ :paramref:`.Float.asdecimal` flag is set to True, in which case they
+ are coerced to ``decimal.Decimal`` objects.
+
+ .. note::
+
+ The :class:`.Float` type is designed to receive data from a database
+ type that is explicitly known to be a floating point type
+ (e.g. ``FLOAT``, ``REAL``, others)
+ and not a decimal type (e.g. ``DECIMAL``, ``NUMERIC``, others).
+ If the database column on the server is in fact a Numeric
+ type, such as ``DECIMAL`` or ``NUMERIC``, use the :class:`.Numeric`
+ type or a subclass, otherwise numeric coercion between ``float``/``Decimal``
+ may or may not function as expected.
+
+ """
+
+ __visit_name__ = 'float'
+
+ scale = None
+
+ def __init__(self, precision=None, asdecimal=False,
+ decimal_return_scale=None, **kwargs):
+ """
+ Construct a Float.
+
+ :param precision: the numeric precision for use in DDL ``CREATE
+ TABLE``.
+
+ :param asdecimal: the same flag as that of :class:`.Numeric`, but
+ defaults to ``False``. Note that setting this flag to ``True``
+ results in floating point conversion.
+
+ :param decimal_return_scale: Default scale to use when converting
+ from floats to Python decimals. Floating point values will typically
+ be much longer due to decimal inaccuracy, and most floating point
+ database types don't have a notion of "scale", so by default the
+ float type looks for the first ten decimal places when converting.
+ Specfiying this value will override that length. Note that the
+ MySQL float types, which do include "scale", will use "scale"
+ as the default for decimal_return_scale, if not otherwise specified.
+
+ .. versionadded:: 0.9.0
+
+ :param \**kwargs: deprecated. Additional arguments here are ignored
+ by the default :class:`.Float` type. For database specific
+ floats that support additional arguments, see that dialect's
+ documentation for details, such as
+ :class:`sqlalchemy.dialects.mysql.FLOAT`.
+
+ """
+ self.precision = precision
+ self.asdecimal = asdecimal
+ self.decimal_return_scale = decimal_return_scale
+ if kwargs:
+ util.warn_deprecated("Additional keyword arguments "
+ "passed to Float ignored.")
+
+ def result_processor(self, dialect, coltype):
+ if self.asdecimal:
+ return processors.to_decimal_processor_factory(
+ decimal.Decimal,
+ self._effective_decimal_return_scale)
+ else:
+ return None
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.mul: {
+ Interval: Interval,
+ Numeric: self.__class__,
+ },
+ operators.div: {
+ Numeric: self.__class__,
+ },
+ operators.truediv: {
+ Numeric: self.__class__,
+ },
+ operators.add: {
+ Numeric: self.__class__,
+ },
+ operators.sub: {
+ Numeric: self.__class__,
+ }
+ }
+
+
+class DateTime(_DateAffinity, TypeEngine):
+ """A type for ``datetime.datetime()`` objects.
+
+ Date and time types return objects from the Python ``datetime``
+ module. Most DBAPIs have built in support for the datetime
+ module, with the noted exception of SQLite. In the case of
+ SQLite, date and time types are stored as strings which are then
+ converted back to datetime objects when rows are returned.
+
+ """
+
+ __visit_name__ = 'datetime'
+
+ def __init__(self, timezone=False):
+ """Construct a new :class:`.DateTime`.
+
+ :param timezone: boolean. If True, and supported by the
+ backend, will produce 'TIMESTAMP WITH TIMEZONE'. For backends
+ that don't support timezone aware timestamps, has no
+ effect.
+
+ """
+ self.timezone = timezone
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.DATETIME
+
+ @property
+ def python_type(self):
+ return dt.datetime
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.add: {
+ Interval: self.__class__,
+ },
+ operators.sub: {
+ Interval: self.__class__,
+ DateTime: Interval,
+ },
+ }
+
+
+class Date(_DateAffinity, TypeEngine):
+ """A type for ``datetime.date()`` objects."""
+
+ __visit_name__ = 'date'
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.DATETIME
+
+ @property
+ def python_type(self):
+ return dt.date
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.add: {
+ Integer: self.__class__,
+ Interval: DateTime,
+ Time: DateTime,
+ },
+ operators.sub: {
+ # date - integer = date
+ Integer: self.__class__,
+
+ # date - date = integer.
+ Date: Integer,
+
+ Interval: DateTime,
+
+ # date - datetime = interval,
+ # this one is not in the PG docs
+ # but works
+ DateTime: Interval,
+ },
+ }
+
+
+class Time(_DateAffinity, TypeEngine):
+ """A type for ``datetime.time()`` objects."""
+
+ __visit_name__ = 'time'
+
+ def __init__(self, timezone=False):
+ self.timezone = timezone
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.DATETIME
+
+ @property
+ def python_type(self):
+ return dt.time
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.add: {
+ Date: DateTime,
+ Interval: self.__class__
+ },
+ operators.sub: {
+ Time: Interval,
+ Interval: self.__class__,
+ },
+ }
+
+
+class _Binary(TypeEngine):
+ """Define base behavior for binary types."""
+
+ def __init__(self, length=None):
+ self.length = length
+
+ def literal_processor(self, dialect):
+ def process(value):
+ value = value.decode(self.dialect.encoding).replace("'", "''")
+ return "'%s'" % value
+ return process
+
+ @property
+ def python_type(self):
+ return util.binary_type
+
+ # Python 3 - sqlite3 doesn't need the `Binary` conversion
+ # here, though pg8000 does to indicate "bytea"
+ def bind_processor(self, dialect):
+ DBAPIBinary = dialect.dbapi.Binary
+
+ def process(value):
+ if value is not None:
+ return DBAPIBinary(value)
+ else:
+ return None
+ return process
+
+ # Python 3 has native bytes() type
+ # both sqlite3 and pg8000 seem to return it,
+ # psycopg2 as of 2.5 returns 'memoryview'
+ if util.py2k:
+ def result_processor(self, dialect, coltype):
+ if util.jython:
+ def process(value):
+ if value is not None:
+ if isinstance(value, array.array):
+ return value.tostring()
+ return str(value)
+ else:
+ return None
+ else:
+ process = processors.to_str
+ return process
+ else:
+ def result_processor(self, dialect, coltype):
+ def process(value):
+ if value is not None:
+ value = bytes(value)
+ return value
+ return process
+
+ def coerce_compared_value(self, op, value):
+ """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
+
+ if isinstance(value, util.string_types):
+ return self
+ else:
+ return super(_Binary, self).coerce_compared_value(op, value)
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.BINARY
+
+
+class LargeBinary(_Binary):
+ """A type for large binary byte data.
+
+ The Binary type generates BLOB or BYTEA when tables are created,
+ and also converts incoming values using the ``Binary`` callable
+ provided by each DB-API.
+
+ """
+
+ __visit_name__ = 'large_binary'
+
+ def __init__(self, length=None):
+ """
+ Construct a LargeBinary type.
+
+ :param length: optional, a length for the column for use in
+ DDL statements, for those BLOB types that accept a length
+ (i.e. MySQL). It does *not* produce a small BINARY/VARBINARY
+ type - use the BINARY/VARBINARY types specifically for those.
+ May be safely omitted if no ``CREATE
+ TABLE`` will be issued. Certain databases may require a
+ *length* for use in DDL, and will raise an exception when
+ the ``CREATE TABLE`` DDL is issued.
+
+ """
+ _Binary.__init__(self, length=length)
+
+
+class Binary(LargeBinary):
+ """Deprecated. Renamed to LargeBinary."""
+
+ def __init__(self, *arg, **kw):
+ util.warn_deprecated('The Binary type has been renamed to '
+ 'LargeBinary.')
+ LargeBinary.__init__(self, *arg, **kw)
+
+
+
+class SchemaType(SchemaEventTarget):
+ """Mark a type as possibly requiring schema-level DDL for usage.
+
+ Supports types that must be explicitly created/dropped (i.e. PG ENUM type)
+ as well as types that are complimented by table or schema level
+ constraints, triggers, and other rules.
+
+ :class:`.SchemaType` classes can also be targets for the
+ :meth:`.DDLEvents.before_parent_attach` and
+ :meth:`.DDLEvents.after_parent_attach` events, where the events fire off
+ surrounding the association of the type object with a parent
+ :class:`.Column`.
+
+ .. seealso::
+
+ :class:`.Enum`
+
+ :class:`.Boolean`
+
+
+ """
+
+ def __init__(self, name=None, schema=None, metadata=None,
+ inherit_schema=False, quote=None):
+ if name is not None:
+ self.name = quoted_name(name, quote)
+ else:
+ self.name = None
+ self.schema = schema
+ self.metadata = metadata
+ self.inherit_schema = inherit_schema
+ if self.metadata:
+ event.listen(
+ self.metadata,
+ "before_create",
+ util.portable_instancemethod(self._on_metadata_create)
+ )
+ event.listen(
+ self.metadata,
+ "after_drop",
+ util.portable_instancemethod(self._on_metadata_drop)
+ )
+
+ def _set_parent(self, column):
+ column._on_table_attach(util.portable_instancemethod(self._set_table))
+
+ def _set_table(self, column, table):
+ if self.inherit_schema:
+ self.schema = table.schema
+
+ event.listen(
+ table,
+ "before_create",
+ util.portable_instancemethod(
+ self._on_table_create)
+ )
+ event.listen(
+ table,
+ "after_drop",
+ util.portable_instancemethod(self._on_table_drop)
+ )
+ if self.metadata is None:
+ # TODO: what's the difference between self.metadata
+ # and table.metadata here ?
+ event.listen(
+ table.metadata,
+ "before_create",
+ util.portable_instancemethod(self._on_metadata_create)
+ )
+ event.listen(
+ table.metadata,
+ "after_drop",
+ util.portable_instancemethod(self._on_metadata_drop)
+ )
+
+ def copy(self, **kw):
+ return self.adapt(self.__class__)
+
+ def adapt(self, impltype, **kw):
+ schema = kw.pop('schema', self.schema)
+ metadata = kw.pop('metadata', self.metadata)
+ return impltype(name=self.name,
+ schema=schema,
+ metadata=metadata,
+ inherit_schema=self.inherit_schema,
+ **kw
+ )
+
+ @property
+ def bind(self):
+ return self.metadata and self.metadata.bind or None
+
+ def create(self, bind=None, checkfirst=False):
+ """Issue CREATE ddl for this type, if applicable."""
+
+ if bind is None:
+ bind = _bind_or_error(self)
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t.create(bind=bind, checkfirst=checkfirst)
+
+ def drop(self, bind=None, checkfirst=False):
+ """Issue DROP ddl for this type, if applicable."""
+
+ if bind is None:
+ bind = _bind_or_error(self)
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t.drop(bind=bind, checkfirst=checkfirst)
+
+ def _on_table_create(self, target, bind, **kw):
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t._on_table_create(target, bind, **kw)
+
+ def _on_table_drop(self, target, bind, **kw):
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t._on_table_drop(target, bind, **kw)
+
+ def _on_metadata_create(self, target, bind, **kw):
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t._on_metadata_create(target, bind, **kw)
+
+ def _on_metadata_drop(self, target, bind, **kw):
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t._on_metadata_drop(target, bind, **kw)
+
+class Enum(String, SchemaType):
+ """Generic Enum Type.
+
+ The Enum type provides a set of possible string values which the
+ column is constrained towards.
+
+ By default, uses the backend's native ENUM type if available,
+ else uses VARCHAR + a CHECK constraint.
+
+ .. seealso::
+
+ :class:`~.postgresql.ENUM` - PostgreSQL-specific type,
+ which has additional functionality.
+
+ """
+
+ __visit_name__ = 'enum'
+
+ def __init__(self, *enums, **kw):
+ """Construct an enum.
+
+ Keyword arguments which don't apply to a specific backend are ignored
+ by that backend.
+
+ :param \*enums: string or unicode enumeration labels. If unicode
+ labels are present, the `convert_unicode` flag is auto-enabled.
+
+ :param convert_unicode: Enable unicode-aware bind parameter and
+ result-set processing for this Enum's data. This is set
+ automatically based on the presence of unicode label strings.
+
+ :param metadata: Associate this type directly with a ``MetaData``
+ object. For types that exist on the target database as an
+ independent schema construct (Postgresql), this type will be
+ created and dropped within ``create_all()`` and ``drop_all()``
+ operations. If the type is not associated with any ``MetaData``
+ object, it will associate itself with each ``Table`` in which it is
+ used, and will be created when any of those individual tables are
+ created, after a check is performed for it's existence. The type is
+ only dropped when ``drop_all()`` is called for that ``Table``
+ object's metadata, however.
+
+ :param name: The name of this type. This is required for Postgresql
+ and any future supported database which requires an explicitly
+ named type, or an explicitly named constraint in order to generate
+ the type and/or a table that uses it.
+
+ :param native_enum: Use the database's native ENUM type when
+ available. Defaults to True. When False, uses VARCHAR + check
+ constraint for all backends.
+
+ :param schema: Schema name of this type. For types that exist on the
+ target database as an independent schema construct (Postgresql),
+ this parameter specifies the named schema in which the type is
+ present.
+
+ .. note::
+
+ The ``schema`` of the :class:`.Enum` type does not
+ by default make use of the ``schema`` established on the
+ owning :class:`.Table`. If this behavior is desired,
+ set the ``inherit_schema`` flag to ``True``.
+
+ :param quote: Set explicit quoting preferences for the type's name.
+
+ :param inherit_schema: When ``True``, the "schema" from the owning
+ :class:`.Table` will be copied to the "schema" attribute of this
+ :class:`.Enum`, replacing whatever value was passed for the
+ ``schema`` attribute. This also takes effect when using the
+ :meth:`.Table.tometadata` operation.
+
+ .. versionadded:: 0.8
+
+ """
+ self.enums = enums
+ self.native_enum = kw.pop('native_enum', True)
+ convert_unicode = kw.pop('convert_unicode', None)
+ if convert_unicode is None:
+ for e in enums:
+ if isinstance(e, util.text_type):
+ convert_unicode = True
+ break
+ else:
+ convert_unicode = False
+
+ if self.enums:
+ length = max(len(x) for x in self.enums)
+ else:
+ length = 0
+ String.__init__(self,
+ length=length,
+ convert_unicode=convert_unicode,
+ )
+ SchemaType.__init__(self, **kw)
+
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[Enum, SchemaType],
+ )
+
+ def _should_create_constraint(self, compiler):
+ return not self.native_enum or \
+ not compiler.dialect.supports_native_enum
+
+ @util.dependencies("sqlalchemy.sql.schema")
+ def _set_table(self, schema, column, table):
+ if self.native_enum:
+ SchemaType._set_table(self, column, table)
+
+ e = schema.CheckConstraint(
+ type_coerce(column, self).in_(self.enums),
+ name=self.name,
+ _create_rule=util.portable_instancemethod(
+ self._should_create_constraint)
+ )
+ table.append_constraint(e)
+
+ def adapt(self, impltype, **kw):
+ schema = kw.pop('schema', self.schema)
+ metadata = kw.pop('metadata', self.metadata)
+ if issubclass(impltype, Enum):
+ return impltype(name=self.name,
+ schema=schema,
+ metadata=metadata,
+ convert_unicode=self.convert_unicode,
+ native_enum=self.native_enum,
+ inherit_schema=self.inherit_schema,
+ *self.enums,
+ **kw
+ )
+ else:
+ return super(Enum, self).adapt(impltype, **kw)
+
+
+class PickleType(TypeDecorator):
+ """Holds Python objects, which are serialized using pickle.
+
+ PickleType builds upon the Binary type to apply Python's
+ ``pickle.dumps()`` to incoming objects, and ``pickle.loads()`` on
+ the way out, allowing any pickleable Python object to be stored as
+ a serialized binary field.
+
+ To allow ORM change events to propagate for elements associated
+ with :class:`.PickleType`, see :ref:`mutable_toplevel`.
+
+ """
+
+ impl = LargeBinary
+
+ def __init__(self, protocol=pickle.HIGHEST_PROTOCOL,
+ pickler=None, comparator=None):
+ """
+ Construct a PickleType.
+
+ :param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``.
+
+ :param pickler: defaults to cPickle.pickle or pickle.pickle if
+ cPickle is not available. May be any object with
+ pickle-compatible ``dumps` and ``loads`` methods.
+
+ :param comparator: a 2-arg callable predicate used
+ to compare values of this type. If left as ``None``,
+ the Python "equals" operator is used to compare values.
+
+ """
+ self.protocol = protocol
+ self.pickler = pickler or pickle
+ self.comparator = comparator
+ super(PickleType, self).__init__()
+
+ def __reduce__(self):
+ return PickleType, (self.protocol,
+ None,
+ self.comparator)
+
+ def bind_processor(self, dialect):
+ impl_processor = self.impl.bind_processor(dialect)
+ dumps = self.pickler.dumps
+ protocol = self.protocol
+ if impl_processor:
+ def process(value):
+ if value is not None:
+ value = dumps(value, protocol)
+ return impl_processor(value)
+ else:
+ def process(value):
+ if value is not None:
+ value = dumps(value, protocol)
+ return value
+ return process
+
+ def result_processor(self, dialect, coltype):
+ impl_processor = self.impl.result_processor(dialect, coltype)
+ loads = self.pickler.loads
+ if impl_processor:
+ def process(value):
+ value = impl_processor(value)
+ if value is None:
+ return None
+ return loads(value)
+ else:
+ def process(value):
+ if value is None:
+ return None
+ return loads(value)
+ return process
+
+ def compare_values(self, x, y):
+ if self.comparator:
+ return self.comparator(x, y)
+ else:
+ return x == y
+
+
+class Boolean(TypeEngine, SchemaType):
+ """A bool datatype.
+
+ Boolean typically uses BOOLEAN or SMALLINT on the DDL side, and on
+ the Python side deals in ``True`` or ``False``.
+
+ """
+
+ __visit_name__ = 'boolean'
+
+ def __init__(self, create_constraint=True, name=None):
+ """Construct a Boolean.
+
+ :param create_constraint: defaults to True. If the boolean
+ is generated as an int/smallint, also create a CHECK constraint
+ on the table that ensures 1 or 0 as a value.
+
+ :param name: if a CHECK constraint is generated, specify
+ the name of the constraint.
+
+ """
+ self.create_constraint = create_constraint
+ self.name = name
+
+ def _should_create_constraint(self, compiler):
+ return not compiler.dialect.supports_native_boolean
+
+ @util.dependencies("sqlalchemy.sql.schema")
+ def _set_table(self, schema, column, table):
+ if not self.create_constraint:
+ return
+
+ e = schema.CheckConstraint(
+ type_coerce(column, self).in_([0, 1]),
+ name=self.name,
+ _create_rule=util.portable_instancemethod(
+ self._should_create_constraint)
+ )
+ table.append_constraint(e)
+
+ @property
+ def python_type(self):
+ return bool
+
+ def bind_processor(self, dialect):
+ if dialect.supports_native_boolean:
+ return None
+ else:
+ return processors.boolean_to_int
+
+ def result_processor(self, dialect, coltype):
+ if dialect.supports_native_boolean:
+ return None
+ else:
+ return processors.int_to_boolean
+
+
+class Interval(_DateAffinity, TypeDecorator):
+ """A type for ``datetime.timedelta()`` objects.
+
+ The Interval type deals with ``datetime.timedelta`` objects. In
+ PostgreSQL, the native ``INTERVAL`` type is used; for others, the
+ value is stored as a date which is relative to the "epoch"
+ (Jan. 1, 1970).
+
+ Note that the ``Interval`` type does not currently provide date arithmetic
+ operations on platforms which do not support interval types natively. Such
+ operations usually require transformation of both sides of the expression
+ (such as, conversion of both sides into integer epoch values first) which
+ currently is a manual procedure (such as via
+ :attr:`~sqlalchemy.sql.expression.func`).
+
+ """
+
+ impl = DateTime
+ epoch = dt.datetime.utcfromtimestamp(0)
+
+ def __init__(self, native=True,
+ second_precision=None,
+ day_precision=None):
+ """Construct an Interval object.
+
+ :param native: when True, use the actual
+ INTERVAL type provided by the database, if
+ supported (currently Postgresql, Oracle).
+ Otherwise, represent the interval data as
+ an epoch value regardless.
+
+ :param second_precision: For native interval types
+ which support a "fractional seconds precision" parameter,
+ i.e. Oracle and Postgresql
+
+ :param day_precision: for native interval types which
+ support a "day precision" parameter, i.e. Oracle.
+
+ """
+ super(Interval, self).__init__()
+ self.native = native
+ self.second_precision = second_precision
+ self.day_precision = day_precision
+
+ def adapt(self, cls, **kw):
+ if self.native and hasattr(cls, '_adapt_from_generic_interval'):
+ return cls._adapt_from_generic_interval(self, **kw)
+ else:
+ return self.__class__(
+ native=self.native,
+ second_precision=self.second_precision,
+ day_precision=self.day_precision,
+ **kw)
+
+ @property
+ def python_type(self):
+ return dt.timedelta
+
+ def bind_processor(self, dialect):
+ impl_processor = self.impl.bind_processor(dialect)
+ epoch = self.epoch
+ if impl_processor:
+ def process(value):
+ if value is not None:
+ value = epoch + value
+ return impl_processor(value)
+ else:
+ def process(value):
+ if value is not None:
+ value = epoch + value
+ return value
+ return process
+
+ def result_processor(self, dialect, coltype):
+ impl_processor = self.impl.result_processor(dialect, coltype)
+ epoch = self.epoch
+ if impl_processor:
+ def process(value):
+ value = impl_processor(value)
+ if value is None:
+ return None
+ return value - epoch
+ else:
+ def process(value):
+ if value is None:
+ return None
+ return value - epoch
+ return process
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.add: {
+ Date: DateTime,
+ Interval: self.__class__,
+ DateTime: DateTime,
+ Time: Time,
+ },
+ operators.sub: {
+ Interval: self.__class__
+ },
+ operators.mul: {
+ Numeric: self.__class__
+ },
+ operators.truediv: {
+ Numeric: self.__class__
+ },
+ operators.div: {
+ Numeric: self.__class__
+ }
+ }
+
+ @property
+ def _type_affinity(self):
+ return Interval
+
+ def coerce_compared_value(self, op, value):
+ """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
+
+ return self.impl.coerce_compared_value(op, value)
+
+
+class REAL(Float):
+ """The SQL REAL type."""
+
+ __visit_name__ = 'REAL'
+
+
+class FLOAT(Float):
+ """The SQL FLOAT type."""
+
+ __visit_name__ = 'FLOAT'
+
+
+class NUMERIC(Numeric):
+ """The SQL NUMERIC type."""
+
+ __visit_name__ = 'NUMERIC'
+
+
+class DECIMAL(Numeric):
+ """The SQL DECIMAL type."""
+
+ __visit_name__ = 'DECIMAL'
+
+
+class INTEGER(Integer):
+ """The SQL INT or INTEGER type."""
+
+ __visit_name__ = 'INTEGER'
+INT = INTEGER
+
+
+class SMALLINT(SmallInteger):
+ """The SQL SMALLINT type."""
+
+ __visit_name__ = 'SMALLINT'
+
+
+class BIGINT(BigInteger):
+ """The SQL BIGINT type."""
+
+ __visit_name__ = 'BIGINT'
+
+
+class TIMESTAMP(DateTime):
+ """The SQL TIMESTAMP type."""
+
+ __visit_name__ = 'TIMESTAMP'
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.TIMESTAMP
+
+
+class DATETIME(DateTime):
+ """The SQL DATETIME type."""
+
+ __visit_name__ = 'DATETIME'
+
+
+class DATE(Date):
+ """The SQL DATE type."""
+
+ __visit_name__ = 'DATE'
+
+
+class TIME(Time):
+ """The SQL TIME type."""
+
+ __visit_name__ = 'TIME'
+
+
+class TEXT(Text):
+ """The SQL TEXT type."""
+
+ __visit_name__ = 'TEXT'
+
+
+class CLOB(Text):
+ """The CLOB type.
+
+ This type is found in Oracle and Informix.
+ """
+
+ __visit_name__ = 'CLOB'
+
+
+class VARCHAR(String):
+ """The SQL VARCHAR type."""
+
+ __visit_name__ = 'VARCHAR'
+
+
+class NVARCHAR(Unicode):
+ """The SQL NVARCHAR type."""
+
+ __visit_name__ = 'NVARCHAR'
+
+
+class CHAR(String):
+ """The SQL CHAR type."""
+
+ __visit_name__ = 'CHAR'
+
+
+class NCHAR(Unicode):
+ """The SQL NCHAR type."""
+
+ __visit_name__ = 'NCHAR'
+
+
+class BLOB(LargeBinary):
+ """The SQL BLOB type."""
+
+ __visit_name__ = 'BLOB'
+
+
+class BINARY(_Binary):
+ """The SQL BINARY type."""
+
+ __visit_name__ = 'BINARY'
+
+
+class VARBINARY(_Binary):
+ """The SQL VARBINARY type."""
+
+ __visit_name__ = 'VARBINARY'
+
+
+class BOOLEAN(Boolean):
+ """The SQL BOOLEAN type."""
+
+ __visit_name__ = 'BOOLEAN'
+
+class NullType(TypeEngine):
+ """An unknown type.
+
+ :class:`.NullType` is used as a default type for those cases where
+ a type cannot be determined, including:
+
+ * During table reflection, when the type of a column is not recognized
+ by the :class:`.Dialect`
+ * When constructing SQL expressions using plain Python objects of
+ unknown types (e.g. ``somecolumn == my_special_object``)
+ * When a new :class:`.Column` is created, and the given type is passed
+ as ``None`` or is not passed at all.
+
+ The :class:`.NullType` can be used within SQL expression invocation
+ without issue, it just has no behavior either at the expression construction
+ level or at the bind-parameter/result processing level. :class:`.NullType`
+ will result in a :exc:`.CompileError` if the compiler is asked to render
+ the type itself, such as if it is used in a :func:`.cast` operation
+ or within a schema creation operation such as that invoked by
+ :meth:`.MetaData.create_all` or the :class:`.CreateTable` construct.
+
+ """
+ __visit_name__ = 'null'
+
+ _isnull = True
+
+ def literal_processor(self, dialect):
+ def process(value):
+ return "NULL"
+ return process
+
+ class Comparator(TypeEngine.Comparator):
+ def _adapt_expression(self, op, other_comparator):
+ if isinstance(other_comparator, NullType.Comparator) or \
+ not operators.is_commutative(op):
+ return op, self.expr.type
+ else:
+ return other_comparator._adapt_expression(op, self)
+ comparator_factory = Comparator
+
+
+NULLTYPE = NullType()
+BOOLEANTYPE = Boolean()
+STRINGTYPE = String()
+INTEGERTYPE = Integer()
+
+_type_map = {
+ int: Integer(),
+ float: Numeric(),
+ bool: BOOLEANTYPE,
+ decimal.Decimal: Numeric(),
+ dt.date: Date(),
+ dt.datetime: DateTime(),
+ dt.time: Time(),
+ dt.timedelta: Interval(),
+ util.NoneType: NULLTYPE
+}
+
+if util.py3k:
+ _type_map[bytes] = LargeBinary()
+ _type_map[str] = Unicode()
+else:
+ _type_map[unicode] = Unicode()
+ _type_map[str] = String()
+
+
+# back-assign to type_api
+from . import type_api
+type_api.BOOLEANTYPE = BOOLEANTYPE
+type_api.STRINGTYPE = STRINGTYPE
+type_api.INTEGERTYPE = INTEGERTYPE
+type_api.NULLTYPE = NULLTYPE
+type_api._type_map = _type_map
+
+# this one, there's all kinds of ways to play it, but at the EOD
+# there's just a giant dependency cycle between the typing system and
+# the expression element system, as you might expect. We can use
+# importlaters or whatnot, but the typing system just necessarily has
+# to have some kind of connection like this. right now we're injecting the
+# _DefaultColumnComparator implementation into the TypeEngine.Comparator interface.
+# Alternatively TypeEngine.Comparator could have an "impl" injected, though
+# just injecting the base is simpler, error free, and more performant.
+class Comparator(_DefaultColumnComparator):
+ BOOLEANTYPE = BOOLEANTYPE
+
+TypeEngine.Comparator.__bases__ = (Comparator, ) + TypeEngine.Comparator.__bases__
+
diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py
new file mode 100644
index 000000000..c6aad92ba
--- /dev/null
+++ b/lib/sqlalchemy/sql/type_api.py
@@ -0,0 +1,1053 @@
+# sql/types_api.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Base types API.
+
+"""
+
+
+from .. import exc, util
+from . import operators
+from .visitors import Visitable
+
+# these are back-assigned by sqltypes.
+BOOLEANTYPE = None
+INTEGERTYPE = None
+NULLTYPE = None
+STRINGTYPE = None
+
+class TypeEngine(Visitable):
+ """Base for built-in types."""
+
+ _sqla_type = True
+ _isnull = False
+
+ class Comparator(operators.ColumnOperators):
+ """Base class for custom comparison operations defined at the
+ type level. See :attr:`.TypeEngine.comparator_factory`.
+
+
+ """
+
+ def __init__(self, expr):
+ self.expr = expr
+
+ def __reduce__(self):
+ return _reconstitute_comparator, (self.expr, )
+
+
+ hashable = True
+ """Flag, if False, means values from this type aren't hashable.
+
+ Used by the ORM when uniquing result lists.
+
+ """
+
+ comparator_factory = Comparator
+ """A :class:`.TypeEngine.Comparator` class which will apply
+ to operations performed by owning :class:`.ColumnElement` objects.
+
+ The :attr:`.comparator_factory` attribute is a hook consulted by
+ the core expression system when column and SQL expression operations
+ are performed. When a :class:`.TypeEngine.Comparator` class is
+ associated with this attribute, it allows custom re-definition of
+ all existing operators, as well as definition of new operators.
+ Existing operators include those provided by Python operator overloading
+ such as :meth:`.operators.ColumnOperators.__add__` and
+ :meth:`.operators.ColumnOperators.__eq__`,
+ those provided as standard
+ attributes of :class:`.operators.ColumnOperators` such as
+ :meth:`.operators.ColumnOperators.like`
+ and :meth:`.operators.ColumnOperators.in_`.
+
+ Rudimentary usage of this hook is allowed through simple subclassing
+ of existing types, or alternatively by using :class:`.TypeDecorator`.
+ See the documentation section :ref:`types_operators` for examples.
+
+ .. versionadded:: 0.8 The expression system was enhanced to support
+ customization of operators on a per-type level.
+
+ """
+
+ def copy_value(self, value):
+ return value
+
+ def literal_processor(self, dialect):
+ """Return a conversion function for processing literal values that are
+ to be rendered directly without using binds.
+
+ This function is used when the compiler makes use of the
+ "literal_binds" flag, typically used in DDL generation as well
+ as in certain scenarios where backends don't accept bound parameters.
+
+ .. versionadded:: 0.9.0
+
+ """
+ return None
+
+ def bind_processor(self, dialect):
+ """Return a conversion function for processing bind values.
+
+ Returns a callable which will receive a bind parameter value
+ as the sole positional argument and will return a value to
+ send to the DB-API.
+
+ If processing is not necessary, the method should return ``None``.
+
+ :param dialect: Dialect instance in use.
+
+ """
+ return None
+
+ def result_processor(self, dialect, coltype):
+ """Return a conversion function for processing result row values.
+
+ Returns a callable which will receive a result row column
+ value as the sole positional argument and will return a value
+ to return to the user.
+
+ If processing is not necessary, the method should return ``None``.
+
+ :param dialect: Dialect instance in use.
+
+ :param coltype: DBAPI coltype argument received in cursor.description.
+
+ """
+ return None
+
+ def column_expression(self, colexpr):
+ """Given a SELECT column expression, return a wrapping SQL expression.
+
+ This is typically a SQL function that wraps a column expression
+ as rendered in the columns clause of a SELECT statement.
+ It is used for special data types that require
+ columns to be wrapped in some special database function in order
+ to coerce the value before being sent back to the application.
+ It is the SQL analogue of the :meth:`.TypeEngine.result_processor`
+ method.
+
+ The method is evaluated at statement compile time, as opposed
+ to statement construction time.
+
+ See also:
+
+ :ref:`types_sql_value_processing`
+
+ """
+
+ return None
+
+ @util.memoized_property
+ def _has_column_expression(self):
+ """memoized boolean, check if column_expression is implemented.
+
+ Allows the method to be skipped for the vast majority of expression
+ types that don't use this feature.
+
+ """
+
+ return self.__class__.column_expression.__code__ \
+ is not TypeEngine.column_expression.__code__
+
+ def bind_expression(self, bindvalue):
+ """"Given a bind value (i.e. a :class:`.BindParameter` instance),
+ return a SQL expression in its place.
+
+ This is typically a SQL function that wraps the existing bound
+ parameter within the statement. It is used for special data types
+ that require literals being wrapped in some special database function
+ in order to coerce an application-level value into a database-specific
+ format. It is the SQL analogue of the
+ :meth:`.TypeEngine.bind_processor` method.
+
+ The method is evaluated at statement compile time, as opposed
+ to statement construction time.
+
+ Note that this method, when implemented, should always return
+ the exact same structure, without any conditional logic, as it
+ may be used in an executemany() call against an arbitrary number
+ of bound parameter sets.
+
+ See also:
+
+ :ref:`types_sql_value_processing`
+
+ """
+ return None
+
+ @util.memoized_property
+ def _has_bind_expression(self):
+ """memoized boolean, check if bind_expression is implemented.
+
+ Allows the method to be skipped for the vast majority of expression
+ types that don't use this feature.
+
+ """
+
+ return self.__class__.bind_expression.__code__ \
+ is not TypeEngine.bind_expression.__code__
+
+ def compare_values(self, x, y):
+ """Compare two values for equality."""
+
+ return x == y
+
+ def get_dbapi_type(self, dbapi):
+ """Return the corresponding type object from the underlying DB-API, if
+ any.
+
+ This can be useful for calling ``setinputsizes()``, for example.
+
+ """
+ return None
+
+ @property
+ def python_type(self):
+ """Return the Python type object expected to be returned
+ by instances of this type, if known.
+
+ Basically, for those types which enforce a return type,
+ or are known across the board to do such for all common
+ DBAPIs (like ``int`` for example), will return that type.
+
+ If a return type is not defined, raises
+ ``NotImplementedError``.
+
+ Note that any type also accommodates NULL in SQL which
+ means you can also get back ``None`` from any type
+ in practice.
+
+ """
+ raise NotImplementedError()
+
+ def with_variant(self, type_, dialect_name):
+ """Produce a new type object that will utilize the given
+ type when applied to the dialect of the given name.
+
+ e.g.::
+
+ from sqlalchemy.types import String
+ from sqlalchemy.dialects import mysql
+
+ s = String()
+
+ s = s.with_variant(mysql.VARCHAR(collation='foo'), 'mysql')
+
+ The construction of :meth:`.TypeEngine.with_variant` is always
+ from the "fallback" type to that which is dialect specific.
+ The returned type is an instance of :class:`.Variant`, which
+ itself provides a :meth:`~sqlalchemy.types.Variant.with_variant`
+ that can be called repeatedly.
+
+ :param type_: a :class:`.TypeEngine` that will be selected
+ as a variant from the originating type, when a dialect
+ of the given name is in use.
+ :param dialect_name: base name of the dialect which uses
+ this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
+
+ .. versionadded:: 0.7.2
+
+ """
+ return Variant(self, {dialect_name: type_})
+
+
+ @util.memoized_property
+ def _type_affinity(self):
+ """Return a rudimental 'affinity' value expressing the general class
+ of type."""
+
+ typ = None
+ for t in self.__class__.__mro__:
+ if t in (TypeEngine, UserDefinedType):
+ return typ
+ elif issubclass(t, (TypeEngine, UserDefinedType)):
+ typ = t
+ else:
+ return self.__class__
+
+ def dialect_impl(self, dialect):
+ """Return a dialect-specific implementation for this
+ :class:`.TypeEngine`.
+
+ """
+ try:
+ return dialect._type_memos[self]['impl']
+ except KeyError:
+ return self._dialect_info(dialect)['impl']
+
+
+ def _cached_literal_processor(self, dialect):
+ """Return a dialect-specific literal processor for this type."""
+ try:
+ return dialect._type_memos[self]['literal']
+ except KeyError:
+ d = self._dialect_info(dialect)
+ d['literal'] = lp = d['impl'].literal_processor(dialect)
+ return lp
+
+ def _cached_bind_processor(self, dialect):
+ """Return a dialect-specific bind processor for this type."""
+
+ try:
+ return dialect._type_memos[self]['bind']
+ except KeyError:
+ d = self._dialect_info(dialect)
+ d['bind'] = bp = d['impl'].bind_processor(dialect)
+ return bp
+
+ def _cached_result_processor(self, dialect, coltype):
+ """Return a dialect-specific result processor for this type."""
+
+ try:
+ return dialect._type_memos[self][coltype]
+ except KeyError:
+ d = self._dialect_info(dialect)
+ # key assumption: DBAPI type codes are
+ # constants. Else this dictionary would
+ # grow unbounded.
+ d[coltype] = rp = d['impl'].result_processor(dialect, coltype)
+ return rp
+
+ def _dialect_info(self, dialect):
+ """Return a dialect-specific registry which
+ caches a dialect-specific implementation, bind processing
+ function, and one or more result processing functions."""
+
+ if self in dialect._type_memos:
+ return dialect._type_memos[self]
+ else:
+ impl = self._gen_dialect_impl(dialect)
+ if impl is self:
+ impl = self.adapt(type(self))
+ # this can't be self, else we create a cycle
+ assert impl is not self
+ dialect._type_memos[self] = d = {'impl': impl}
+ return d
+
+ def _gen_dialect_impl(self, dialect):
+ return dialect.type_descriptor(self)
+
+ def adapt(self, cls, **kw):
+ """Produce an "adapted" form of this type, given an "impl" class
+ to work with.
+
+ This method is used internally to associate generic
+ types with "implementation" types that are specific to a particular
+ dialect.
+ """
+ return util.constructor_copy(self, cls, **kw)
+
+
+ def coerce_compared_value(self, op, value):
+ """Suggest a type for a 'coerced' Python value in an expression.
+
+ Given an operator and value, gives the type a chance
+ to return a type which the value should be coerced into.
+
+ The default behavior here is conservative; if the right-hand
+ side is already coerced into a SQL type based on its
+ Python type, it is usually left alone.
+
+ End-user functionality extension here should generally be via
+ :class:`.TypeDecorator`, which provides more liberal behavior in that
+ it defaults to coercing the other side of the expression into this
+ type, thus applying special Python conversions above and beyond those
+ needed by the DBAPI to both ides. It also provides the public method
+ :meth:`.TypeDecorator.coerce_compared_value` which is intended for
+ end-user customization of this behavior.
+
+ """
+ _coerced_type = _type_map.get(type(value), NULLTYPE)
+ if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
+ is self._type_affinity:
+ return self
+ else:
+ return _coerced_type
+
+ def _compare_type_affinity(self, other):
+ return self._type_affinity is other._type_affinity
+
+ def compile(self, dialect=None):
+ """Produce a string-compiled form of this :class:`.TypeEngine`.
+
+ When called with no arguments, uses a "default" dialect
+ to produce a string result.
+
+ :param dialect: a :class:`.Dialect` instance.
+
+ """
+ # arg, return value is inconsistent with
+ # ClauseElement.compile()....this is a mistake.
+
+ if not dialect:
+ dialect = self._default_dialect()
+
+ return dialect.type_compiler.process(self)
+
+ @util.dependencies("sqlalchemy.engine.default")
+ def _default_dialect(self, default):
+ if self.__class__.__module__.startswith("sqlalchemy.dialects"):
+ tokens = self.__class__.__module__.split(".")[0:3]
+ mod = ".".join(tokens)
+ return getattr(__import__(mod).dialects, tokens[-1]).dialect()
+ else:
+ return default.DefaultDialect()
+
+ def __str__(self):
+ if util.py2k:
+ return unicode(self.compile()).\
+ encode('ascii', 'backslashreplace')
+ else:
+ return str(self.compile())
+
+ def __repr__(self):
+ return util.generic_repr(self)
+
+class UserDefinedType(TypeEngine):
+ """Base for user defined types.
+
+ This should be the base of new types. Note that
+ for most cases, :class:`.TypeDecorator` is probably
+ more appropriate::
+
+ import sqlalchemy.types as types
+
+ class MyType(types.UserDefinedType):
+ def __init__(self, precision = 8):
+ self.precision = precision
+
+ def get_col_spec(self):
+ return "MYTYPE(%s)" % self.precision
+
+ def bind_processor(self, dialect):
+ def process(value):
+ return value
+ return process
+
+ def result_processor(self, dialect, coltype):
+ def process(value):
+ return value
+ return process
+
+ Once the type is made, it's immediately usable::
+
+ table = Table('foo', meta,
+ Column('id', Integer, primary_key=True),
+ Column('data', MyType(16))
+ )
+
+ """
+ __visit_name__ = "user_defined"
+
+
+ class Comparator(TypeEngine.Comparator):
+ def _adapt_expression(self, op, other_comparator):
+ if hasattr(self.type, 'adapt_operator'):
+ util.warn_deprecated(
+ "UserDefinedType.adapt_operator is deprecated. Create "
+ "a UserDefinedType.Comparator subclass instead which "
+ "generates the desired expression constructs, given a "
+ "particular operator."
+ )
+ return self.type.adapt_operator(op), self.type
+ else:
+ return op, self.type
+
+ comparator_factory = Comparator
+
+ def coerce_compared_value(self, op, value):
+ """Suggest a type for a 'coerced' Python value in an expression.
+
+ Default behavior for :class:`.UserDefinedType` is the
+ same as that of :class:`.TypeDecorator`; by default it returns
+ ``self``, assuming the compared value should be coerced into
+ the same type as this one. See
+ :meth:`.TypeDecorator.coerce_compared_value` for more detail.
+
+ .. versionchanged:: 0.8 :meth:`.UserDefinedType.coerce_compared_value`
+ now returns ``self`` by default, rather than falling onto the
+ more fundamental behavior of
+ :meth:`.TypeEngine.coerce_compared_value`.
+
+ """
+
+ return self
+
+
+class TypeDecorator(TypeEngine):
+ """Allows the creation of types which add additional functionality
+ to an existing type.
+
+ This method is preferred to direct subclassing of SQLAlchemy's
+ built-in types as it ensures that all required functionality of
+ the underlying type is kept in place.
+
+ Typical usage::
+
+ import sqlalchemy.types as types
+
+ class MyType(types.TypeDecorator):
+ '''Prefixes Unicode values with "PREFIX:" on the way in and
+ strips it off on the way out.
+ '''
+
+ impl = types.Unicode
+
+ def process_bind_param(self, value, dialect):
+ return "PREFIX:" + value
+
+ def process_result_value(self, value, dialect):
+ return value[7:]
+
+ def copy(self):
+ return MyType(self.impl.length)
+
+ The class-level "impl" attribute is required, and can reference any
+ TypeEngine class. Alternatively, the load_dialect_impl() method
+ can be used to provide different type classes based on the dialect
+ given; in this case, the "impl" variable can reference
+ ``TypeEngine`` as a placeholder.
+
+ Types that receive a Python type that isn't similar to the ultimate type
+ used may want to define the :meth:`TypeDecorator.coerce_compared_value`
+ method. This is used to give the expression system a hint when coercing
+ Python objects into bind parameters within expressions. Consider this
+ expression::
+
+ mytable.c.somecol + datetime.date(2009, 5, 15)
+
+ Above, if "somecol" is an ``Integer`` variant, it makes sense that
+ we're doing date arithmetic, where above is usually interpreted
+ by databases as adding a number of days to the given date.
+ The expression system does the right thing by not attempting to
+ coerce the "date()" value into an integer-oriented bind parameter.
+
+ However, in the case of ``TypeDecorator``, we are usually changing an
+ incoming Python type to something new - ``TypeDecorator`` by default will
+ "coerce" the non-typed side to be the same type as itself. Such as below,
+ we define an "epoch" type that stores a date value as an integer::
+
+ class MyEpochType(types.TypeDecorator):
+ impl = types.Integer
+
+ epoch = datetime.date(1970, 1, 1)
+
+ def process_bind_param(self, value, dialect):
+ return (value - self.epoch).days
+
+ def process_result_value(self, value, dialect):
+ return self.epoch + timedelta(days=value)
+
+ Our expression of ``somecol + date`` with the above type will coerce the
+ "date" on the right side to also be treated as ``MyEpochType``.
+
+ This behavior can be overridden via the
+ :meth:`~TypeDecorator.coerce_compared_value` method, which returns a type
+ that should be used for the value of the expression. Below we set it such
+ that an integer value will be treated as an ``Integer``, and any other
+ value is assumed to be a date and will be treated as a ``MyEpochType``::
+
+ def coerce_compared_value(self, op, value):
+ if isinstance(value, int):
+ return Integer()
+ else:
+ return self
+
+ """
+
+ __visit_name__ = "type_decorator"
+
+ def __init__(self, *args, **kwargs):
+ """Construct a :class:`.TypeDecorator`.
+
+ Arguments sent here are passed to the constructor
+ of the class assigned to the ``impl`` class level attribute,
+ assuming the ``impl`` is a callable, and the resulting
+ object is assigned to the ``self.impl`` instance attribute
+ (thus overriding the class attribute of the same name).
+
+ If the class level ``impl`` is not a callable (the unusual case),
+ it will be assigned to the same instance attribute 'as-is',
+ ignoring those arguments passed to the constructor.
+
+ Subclasses can override this to customize the generation
+ of ``self.impl`` entirely.
+
+ """
+
+ if not hasattr(self.__class__, 'impl'):
+ raise AssertionError("TypeDecorator implementations "
+ "require a class-level variable "
+ "'impl' which refers to the class of "
+ "type being decorated")
+ self.impl = to_instance(self.__class__.impl, *args, **kwargs)
+
+ coerce_to_is_types = (util.NoneType, )
+ """Specify those Python types which should be coerced at the expression
+ level to "IS <constant>" when compared using ``==`` (and same for
+ ``IS NOT`` in conjunction with ``!=``.
+
+ For most SQLAlchemy types, this includes ``NoneType``, as well as ``bool``.
+
+ :class:`.TypeDecorator` modifies this list to only include ``NoneType``,
+ as typedecorator implementations that deal with boolean types are common.
+
+ Custom :class:`.TypeDecorator` classes can override this attribute to
+ return an empty tuple, in which case no values will be coerced to
+ constants.
+
+ ..versionadded:: 0.8.2
+ Added :attr:`.TypeDecorator.coerce_to_is_types` to allow for easier
+ control of ``__eq__()`` ``__ne__()`` operations.
+
+ """
+
+ class Comparator(TypeEngine.Comparator):
+
+ def operate(self, op, *other, **kwargs):
+ kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
+ return super(TypeDecorator.Comparator, self).operate(
+ op, *other, **kwargs)
+
+ def reverse_operate(self, op, other, **kwargs):
+ kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
+ return super(TypeDecorator.Comparator, self).reverse_operate(
+ op, other, **kwargs)
+
+ @property
+ def comparator_factory(self):
+ return type("TDComparator",
+ (TypeDecorator.Comparator, self.impl.comparator_factory),
+ {})
+
+ def _gen_dialect_impl(self, dialect):
+ """
+ #todo
+ """
+ adapted = dialect.type_descriptor(self)
+ if adapted is not self:
+ return adapted
+
+ # otherwise adapt the impl type, link
+ # to a copy of this TypeDecorator and return
+ # that.
+ typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect)
+ tt = self.copy()
+ if not isinstance(tt, self.__class__):
+ raise AssertionError('Type object %s does not properly '
+ 'implement the copy() method, it must '
+ 'return an object of type %s' % (self,
+ self.__class__))
+ tt.impl = typedesc
+ return tt
+
+ @property
+ def _type_affinity(self):
+ """
+ #todo
+ """
+ return self.impl._type_affinity
+
+ def type_engine(self, dialect):
+ """Return a dialect-specific :class:`.TypeEngine` instance
+ for this :class:`.TypeDecorator`.
+
+ In most cases this returns a dialect-adapted form of
+ the :class:`.TypeEngine` type represented by ``self.impl``.
+ Makes usage of :meth:`dialect_impl` but also traverses
+ into wrapped :class:`.TypeDecorator` instances.
+ Behavior can be customized here by overriding
+ :meth:`load_dialect_impl`.
+
+ """
+ adapted = dialect.type_descriptor(self)
+ if type(adapted) is not type(self):
+ return adapted
+ elif isinstance(self.impl, TypeDecorator):
+ return self.impl.type_engine(dialect)
+ else:
+ return self.load_dialect_impl(dialect)
+
+ def load_dialect_impl(self, dialect):
+ """Return a :class:`.TypeEngine` object corresponding to a dialect.
+
+ This is an end-user override hook that can be used to provide
+ differing types depending on the given dialect. It is used
+ by the :class:`.TypeDecorator` implementation of :meth:`type_engine`
+ to help determine what type should ultimately be returned
+ for a given :class:`.TypeDecorator`.
+
+ By default returns ``self.impl``.
+
+ """
+ return self.impl
+
+ def __getattr__(self, key):
+ """Proxy all other undefined accessors to the underlying
+ implementation."""
+ return getattr(self.impl, key)
+
+ def process_literal_param(self, value, dialect):
+ """Receive a literal parameter value to be rendered inline within
+ a statement.
+
+ This method is used when the compiler renders a
+ literal value without using binds, typically within DDL
+ such as in the "server default" of a column or an expression
+ within a CHECK constraint.
+
+ The returned string will be rendered into the output string.
+
+ .. versionadded:: 0.9.0
+
+ """
+ raise NotImplementedError()
+
+ def process_bind_param(self, value, dialect):
+ """Receive a bound parameter value to be converted.
+
+ Subclasses override this method to return the
+ value that should be passed along to the underlying
+ :class:`.TypeEngine` object, and from there to the
+ DBAPI ``execute()`` method.
+
+ The operation could be anything desired to perform custom
+ behavior, such as transforming or serializing data.
+ This could also be used as a hook for validating logic.
+
+ This operation should be designed with the reverse operation
+ in mind, which would be the process_result_value method of
+ this class.
+
+ :param value: Data to operate upon, of any type expected by
+ this method in the subclass. Can be ``None``.
+ :param dialect: the :class:`.Dialect` in use.
+
+ """
+
+ raise NotImplementedError()
+
+ def process_result_value(self, value, dialect):
+ """Receive a result-row column value to be converted.
+
+ Subclasses should implement this method to operate on data
+ fetched from the database.
+
+ Subclasses override this method to return the
+ value that should be passed back to the application,
+ given a value that is already processed by
+ the underlying :class:`.TypeEngine` object, originally
+ from the DBAPI cursor method ``fetchone()`` or similar.
+
+ The operation could be anything desired to perform custom
+ behavior, such as transforming or serializing data.
+ This could also be used as a hook for validating logic.
+
+ :param value: Data to operate upon, of any type expected by
+ this method in the subclass. Can be ``None``.
+ :param dialect: the :class:`.Dialect` in use.
+
+ This operation should be designed to be reversible by
+ the "process_bind_param" method of this class.
+
+ """
+
+ raise NotImplementedError()
+
+ @util.memoized_property
+ def _has_bind_processor(self):
+ """memoized boolean, check if process_bind_param is implemented.
+
+ Allows the base process_bind_param to raise
+ NotImplementedError without needing to test an expensive
+ exception throw.
+
+ """
+
+ return self.__class__.process_bind_param.__code__ \
+ is not TypeDecorator.process_bind_param.__code__
+
+ @util.memoized_property
+ def _has_literal_processor(self):
+ """memoized boolean, check if process_literal_param is implemented.
+
+
+ """
+
+ return self.__class__.process_literal_param.__code__ \
+ is not TypeDecorator.process_literal_param.__code__
+
+ def literal_processor(self, dialect):
+ """Provide a literal processing function for the given
+ :class:`.Dialect`.
+
+ Subclasses here will typically override :meth:`.TypeDecorator.process_literal_param`
+ instead of this method directly.
+
+ By default, this method makes use of :meth:`.TypeDecorator.process_bind_param`
+ if that method is implemented, where :meth:`.TypeDecorator.process_literal_param`
+ is not. The rationale here is that :class:`.TypeDecorator` typically deals
+ with Python conversions of data that are above the layer of database
+ presentation. With the value converted by :meth:`.TypeDecorator.process_bind_param`,
+ the underlying type will then handle whether it needs to be presented to the
+ DBAPI as a bound parameter or to the database as an inline SQL value.
+
+ .. versionadded:: 0.9.0
+
+ """
+ if self._has_literal_processor:
+ process_param = self.process_literal_param
+ elif self._has_bind_processor:
+ # the bind processor should normally be OK
+ # for TypeDecorator since it isn't doing DB-level
+ # handling, the handling here won't be different for bound vs.
+ # literals.
+ process_param = self.process_bind_param
+ else:
+ process_param = None
+
+ if process_param:
+ impl_processor = self.impl.literal_processor(dialect)
+ if impl_processor:
+ def process(value):
+ return impl_processor(process_param(value, dialect))
+ else:
+ def process(value):
+ return process_param(value, dialect)
+
+ return process
+ else:
+ return self.impl.literal_processor(dialect)
+
+ def bind_processor(self, dialect):
+ """Provide a bound value processing function for the
+ given :class:`.Dialect`.
+
+ This is the method that fulfills the :class:`.TypeEngine`
+ contract for bound value conversion. :class:`.TypeDecorator`
+ will wrap a user-defined implementation of
+ :meth:`process_bind_param` here.
+
+ User-defined code can override this method directly,
+ though its likely best to use :meth:`process_bind_param` so that
+ the processing provided by ``self.impl`` is maintained.
+
+ :param dialect: Dialect instance in use.
+
+ This method is the reverse counterpart to the
+ :meth:`result_processor` method of this class.
+
+ """
+ if self._has_bind_processor:
+ process_param = self.process_bind_param
+ impl_processor = self.impl.bind_processor(dialect)
+ if impl_processor:
+ def process(value):
+ return impl_processor(process_param(value, dialect))
+
+ else:
+ def process(value):
+ return process_param(value, dialect)
+
+ return process
+ else:
+ return self.impl.bind_processor(dialect)
+
+ @util.memoized_property
+ def _has_result_processor(self):
+ """memoized boolean, check if process_result_value is implemented.
+
+ Allows the base process_result_value to raise
+ NotImplementedError without needing to test an expensive
+ exception throw.
+
+ """
+ return self.__class__.process_result_value.__code__ \
+ is not TypeDecorator.process_result_value.__code__
+
+ def result_processor(self, dialect, coltype):
+ """Provide a result value processing function for the given
+ :class:`.Dialect`.
+
+ This is the method that fulfills the :class:`.TypeEngine`
+ contract for result value conversion. :class:`.TypeDecorator`
+ will wrap a user-defined implementation of
+ :meth:`process_result_value` here.
+
+ User-defined code can override this method directly,
+ though its likely best to use :meth:`process_result_value` so that
+ the processing provided by ``self.impl`` is maintained.
+
+ :param dialect: Dialect instance in use.
+ :param coltype: An SQLAlchemy data type
+
+ This method is the reverse counterpart to the
+ :meth:`bind_processor` method of this class.
+
+ """
+ if self._has_result_processor:
+ process_value = self.process_result_value
+ impl_processor = self.impl.result_processor(dialect,
+ coltype)
+ if impl_processor:
+ def process(value):
+ return process_value(impl_processor(value), dialect)
+
+ else:
+ def process(value):
+ return process_value(value, dialect)
+
+ return process
+ else:
+ return self.impl.result_processor(dialect, coltype)
+
+ def coerce_compared_value(self, op, value):
+ """Suggest a type for a 'coerced' Python value in an expression.
+
+ By default, returns self. This method is called by
+ the expression system when an object using this type is
+ on the left or right side of an expression against a plain Python
+ object which does not yet have a SQLAlchemy type assigned::
+
+ expr = table.c.somecolumn + 35
+
+ Where above, if ``somecolumn`` uses this type, this method will
+ be called with the value ``operator.add``
+ and ``35``. The return value is whatever SQLAlchemy type should
+ be used for ``35`` for this particular operation.
+
+ """
+ return self
+
+ def copy(self):
+ """Produce a copy of this :class:`.TypeDecorator` instance.
+
+ This is a shallow copy and is provided to fulfill part of
+ the :class:`.TypeEngine` contract. It usually does not
+ need to be overridden unless the user-defined :class:`.TypeDecorator`
+ has local state that should be deep-copied.
+
+ """
+
+ instance = self.__class__.__new__(self.__class__)
+ instance.__dict__.update(self.__dict__)
+ return instance
+
+ def get_dbapi_type(self, dbapi):
+ """Return the DBAPI type object represented by this
+ :class:`.TypeDecorator`.
+
+ By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the
+ underlying "impl".
+ """
+ return self.impl.get_dbapi_type(dbapi)
+
+ def compare_values(self, x, y):
+ """Given two values, compare them for equality.
+
+ By default this calls upon :meth:`.TypeEngine.compare_values`
+ of the underlying "impl", which in turn usually
+ uses the Python equals operator ``==``.
+
+ This function is used by the ORM to compare
+ an original-loaded value with an intercepted
+ "changed" value, to determine if a net change
+ has occurred.
+
+ """
+ return self.impl.compare_values(x, y)
+
+ def __repr__(self):
+ return util.generic_repr(self, to_inspect=self.impl)
+
+
+class Variant(TypeDecorator):
+ """A wrapping type that selects among a variety of
+ implementations based on dialect in use.
+
+ The :class:`.Variant` type is typically constructed
+ using the :meth:`.TypeEngine.with_variant` method.
+
+ .. versionadded:: 0.7.2
+
+ .. seealso:: :meth:`.TypeEngine.with_variant` for an example of use.
+
+ """
+
+ def __init__(self, base, mapping):
+ """Construct a new :class:`.Variant`.
+
+ :param base: the base 'fallback' type
+ :param mapping: dictionary of string dialect names to
+ :class:`.TypeEngine` instances.
+
+ """
+ self.impl = base
+ self.mapping = mapping
+
+ def load_dialect_impl(self, dialect):
+ if dialect.name in self.mapping:
+ return self.mapping[dialect.name]
+ else:
+ return self.impl
+
+ def with_variant(self, type_, dialect_name):
+ """Return a new :class:`.Variant` which adds the given
+ type + dialect name to the mapping, in addition to the
+ mapping present in this :class:`.Variant`.
+
+ :param type_: a :class:`.TypeEngine` that will be selected
+ as a variant from the originating type, when a dialect
+ of the given name is in use.
+ :param dialect_name: base name of the dialect which uses
+ this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
+
+ """
+
+ if dialect_name in self.mapping:
+ raise exc.ArgumentError(
+ "Dialect '%s' is already present in "
+ "the mapping for this Variant" % dialect_name)
+ mapping = self.mapping.copy()
+ mapping[dialect_name] = type_
+ return Variant(self.impl, mapping)
+
+def _reconstitute_comparator(expression):
+ return expression.comparator
+
+
+def to_instance(typeobj, *arg, **kw):
+ if typeobj is None:
+ return NULLTYPE
+
+ if util.callable(typeobj):
+ return typeobj(*arg, **kw)
+ else:
+ return typeobj
+
+
+def adapt_type(typeobj, colspecs):
+ if isinstance(typeobj, type):
+ typeobj = typeobj()
+ for t in typeobj.__class__.__mro__[0:-1]:
+ try:
+ impltype = colspecs[t]
+ break
+ except KeyError:
+ pass
+ else:
+ # couldnt adapt - so just return the type itself
+ # (it may be a user-defined type)
+ return typeobj
+ # if we adapted the given generic type to a database-specific type,
+ # but it turns out the originally given "generic" type
+ # is actually a subclass of our resulting type, then we were already
+ # given a more specific type than that required; so use that.
+ if (issubclass(typeobj.__class__, impltype)):
+ return typeobj
+ return typeobj.adapt(impltype)
+
+
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index 6796d7edb..50ce30aaf 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -1,48 +1,32 @@
# sql/util.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-from .. import exc, schema, util, sql
-from ..util import topological
-from . import expression, operators, visitors
+"""High level utilities which build upon other modules here.
+
+"""
+
+from .. import exc, util
+from .base import _from_objects, ColumnSet
+from . import operators, visitors
from itertools import chain
from collections import deque
-"""Utility functions that build upon SQL and Schema constructs."""
+from .elements import BindParameter, ColumnClause, ColumnElement, \
+ Null, UnaryExpression, literal_column, Label
+from .selectable import ScalarSelect, Join, FromClause, FromGrouping
+from .schema import Column
+join_condition = util.langhelpers.public_factory(
+ Join._join_condition,
+ ".sql.util.join_condition")
-def sort_tables(tables, skip_fn=None, extra_dependencies=None):
- """sort a collection of Table objects in order of
- their foreign-key dependency."""
-
- tables = list(tables)
- tuples = []
- if extra_dependencies is not None:
- tuples.extend(extra_dependencies)
-
- def visit_foreign_key(fkey):
- if fkey.use_alter:
- return
- elif skip_fn and skip_fn(fkey):
- return
- parent_table = fkey.column.table
- if parent_table in tables:
- child_table = fkey.parent.table
- if parent_table is not child_table:
- tuples.append((parent_table, child_table))
-
- for table in tables:
- visitors.traverse(table,
- {'schema_visitor': True},
- {'foreign_key': visit_foreign_key})
-
- tuples.extend(
- [parent, table] for parent in table._extra_dependencies
- )
-
- return list(topological.sort(tuples, tables))
+# names that are still being imported from the outside
+from .annotation import _shallow_annotate, _deep_annotate, _deep_deannotate
+from .elements import _find_columns
+from .ddl import sort_tables
def find_join_source(clauses, join_to):
@@ -62,7 +46,7 @@ def find_join_source(clauses, join_to):
"""
- selectables = list(expression._from_objects(join_to))
+ selectables = list(_from_objects(join_to))
for i, f in enumerate(clauses):
for s in selectables:
if f.is_derived_from(s):
@@ -109,7 +93,7 @@ def visit_binary_product(fn, expr):
stack = []
def visit(element):
- if isinstance(element, (expression.ScalarSelect)):
+ if isinstance(element, ScalarSelect):
# we dont want to dig into correlated subqueries,
# those are just column elements by themselves
yield element
@@ -123,7 +107,7 @@ def visit_binary_product(fn, expr):
for elem in element.get_children():
visit(elem)
else:
- if isinstance(element, expression.ColumnClause):
+ if isinstance(element, ColumnClause):
yield element
for elem in element.get_children():
for e in visit(elem):
@@ -163,13 +147,6 @@ def find_tables(clause, check_columns=False,
return tables
-def find_columns(clause):
- """locate Column objects within the given expression."""
-
- cols = util.column_set()
- visitors.traverse(clause, {}, {'column': cols.add})
- return cols
-
def unwrap_order_by(clause):
"""Break up an 'order by' expression into individual column-expressions,
@@ -179,9 +156,9 @@ def unwrap_order_by(clause):
stack = deque([clause])
while stack:
t = stack.popleft()
- if isinstance(t, expression.ColumnElement) and \
+ if isinstance(t, ColumnElement) and \
(
- not isinstance(t, expression.UnaryExpression) or \
+ not isinstance(t, UnaryExpression) or \
not operators.is_ordering_modifier(t.modifier)
):
cols.add(t)
@@ -211,9 +188,9 @@ def surface_selectables(clause):
while stack:
elem = stack.pop()
yield elem
- if isinstance(elem, expression.Join):
+ if isinstance(elem, Join):
stack.extend((elem.left, elem.right))
- elif isinstance(elem, expression.FromGrouping):
+ elif isinstance(elem, FromGrouping):
stack.append(elem.element)
def selectables_overlap(left, right):
@@ -277,27 +254,6 @@ class _repr_params(object):
return repr(self.params)
-def expression_as_ddl(clause):
- """Given a SQL expression, convert for usage in DDL, such as
- CREATE INDEX and CHECK CONSTRAINT.
-
- Converts bind params into quoted literals, column identifiers
- into detached column constructs so that the parent table
- identifier is not included.
-
- """
- def repl(element):
- if isinstance(element, expression.BindParameter):
- return expression.literal_column(_quote_ddl_expr(element.value))
- elif isinstance(element, expression.ColumnClause) and \
- element.table is not None:
- col = expression.column(element.name)
- col.quote = element.quote
- return col
- else:
- return None
-
- return visitors.replacement_traverse(clause, {}, repl)
def adapt_criterion_to_null(crit, nulls):
@@ -307,308 +263,22 @@ def adapt_criterion_to_null(crit, nulls):
"""
def visit_binary(binary):
- if isinstance(binary.left, expression.BindParameter) \
+ if isinstance(binary.left, BindParameter) \
and binary.left._identifying_key in nulls:
# reverse order if the NULL is on the left side
binary.left = binary.right
- binary.right = expression.null()
+ binary.right = Null()
binary.operator = operators.is_
binary.negate = operators.isnot
- elif isinstance(binary.right, expression.BindParameter) \
+ elif isinstance(binary.right, BindParameter) \
and binary.right._identifying_key in nulls:
- binary.right = expression.null()
+ binary.right = Null()
binary.operator = operators.is_
binary.negate = operators.isnot
return visitors.cloned_traverse(crit, {}, {'binary': visit_binary})
-def join_condition(a, b, ignore_nonexistent_tables=False,
- a_subset=None,
- consider_as_foreign_keys=None):
- """create a join condition between two tables or selectables.
-
- e.g.::
-
- join_condition(tablea, tableb)
-
- would produce an expression along the lines of::
-
- tablea.c.id==tableb.c.tablea_id
-
- The join is determined based on the foreign key relationships
- between the two selectables. If there are multiple ways
- to join, or no way to join, an error is raised.
-
- :param ignore_nonexistent_tables: Deprecated - this
- flag is no longer used. Only resolution errors regarding
- the two given tables are propagated.
-
- :param a_subset: An optional expression that is a sub-component
- of ``a``. An attempt will be made to join to just this sub-component
- first before looking at the full ``a`` construct, and if found
- will be successful even if there are other ways to join to ``a``.
- This allows the "right side" of a join to be passed thereby
- providing a "natural join".
-
- """
- crit = []
- constraints = set()
-
- for left in (a_subset, a):
- if left is None:
- continue
- for fk in sorted(
- b.foreign_keys,
- key=lambda fk: fk.parent._creation_order):
- if consider_as_foreign_keys is not None and \
- fk.parent not in consider_as_foreign_keys:
- continue
- try:
- col = fk.get_referent(left)
- except exc.NoReferenceError as nrte:
- if nrte.table_name == left.name:
- raise
- else:
- continue
-
- if col is not None:
- crit.append(col == fk.parent)
- constraints.add(fk.constraint)
- if left is not b:
- for fk in sorted(
- left.foreign_keys,
- key=lambda fk: fk.parent._creation_order):
- if consider_as_foreign_keys is not None and \
- fk.parent not in consider_as_foreign_keys:
- continue
- try:
- col = fk.get_referent(b)
- except exc.NoReferenceError as nrte:
- if nrte.table_name == b.name:
- raise
- else:
- # this is totally covered. can't get
- # coverage to mark it.
- continue
-
- if col is not None:
- crit.append(col == fk.parent)
- constraints.add(fk.constraint)
- if crit:
- break
-
- if len(crit) == 0:
- if isinstance(b, expression.FromGrouping):
- hint = " Perhaps you meant to convert the right side to a "\
- "subquery using alias()?"
- else:
- hint = ""
- raise exc.NoForeignKeysError(
- "Can't find any foreign key relationships "
- "between '%s' and '%s'.%s" % (a.description, b.description, hint))
- elif len(constraints) > 1:
- raise exc.AmbiguousForeignKeysError(
- "Can't determine join between '%s' and '%s'; "
- "tables have more than one foreign key "
- "constraint relationship between them. "
- "Please specify the 'onclause' of this "
- "join explicitly." % (a.description, b.description))
- elif len(crit) == 1:
- return (crit[0])
- else:
- return sql.and_(*crit)
-
-
-class Annotated(object):
- """clones a ClauseElement and applies an 'annotations' dictionary.
-
- Unlike regular clones, this clone also mimics __hash__() and
- __cmp__() of the original element so that it takes its place
- in hashed collections.
-
- A reference to the original element is maintained, for the important
- reason of keeping its hash value current. When GC'ed, the
- hash value may be reused, causing conflicts.
-
- """
-
- def __new__(cls, *args):
- if not args:
- # clone constructor
- return object.__new__(cls)
- else:
- element, values = args
- # pull appropriate subclass from registry of annotated
- # classes
- try:
- cls = annotated_classes[element.__class__]
- except KeyError:
- cls = annotated_classes[element.__class__] = type.__new__(type,
- "Annotated%s" % element.__class__.__name__,
- (cls, element.__class__), {})
- return object.__new__(cls)
-
- def __init__(self, element, values):
- # force FromClause to generate their internal
- # collections into __dict__
- if isinstance(element, expression.FromClause):
- element.c
-
- self.__dict__ = element.__dict__.copy()
- expression.ColumnElement.comparator._reset(self)
- self.__element = element
- self._annotations = values
-
- def _annotate(self, values):
- _values = self._annotations.copy()
- _values.update(values)
- return self._with_annotations(_values)
-
- def _with_annotations(self, values):
- clone = self.__class__.__new__(self.__class__)
- clone.__dict__ = self.__dict__.copy()
- expression.ColumnElement.comparator._reset(clone)
- clone._annotations = values
- return clone
-
- def _deannotate(self, values=None, clone=True):
- if values is None:
- return self.__element
- else:
- _values = self._annotations.copy()
- for v in values:
- _values.pop(v, None)
- return self._with_annotations(_values)
-
- def _compiler_dispatch(self, visitor, **kw):
- return self.__element.__class__._compiler_dispatch(self, visitor, **kw)
-
- @property
- def _constructor(self):
- return self.__element._constructor
-
- def _clone(self):
- clone = self.__element._clone()
- if clone is self.__element:
- # detect immutable, don't change anything
- return self
- else:
- # update the clone with any changes that have occurred
- # to this object's __dict__.
- clone.__dict__.update(self.__dict__)
- return self.__class__(clone, self._annotations)
-
- def __hash__(self):
- return hash(self.__element)
-
- def __eq__(self, other):
- if isinstance(self.__element, expression.ColumnOperators):
- return self.__element.__class__.__eq__(self, other)
- else:
- return hash(other) == hash(self)
-
-
-class AnnotatedColumnElement(Annotated):
- def __init__(self, element, values):
- Annotated.__init__(self, element, values)
- for attr in ('name', 'key'):
- if self.__dict__.get(attr, False) is None:
- self.__dict__.pop(attr)
-
- @util.memoized_property
- def name(self):
- """pull 'name' from parent, if not present"""
- return self._Annotated__element.name
-
- @util.memoized_property
- def key(self):
- """pull 'key' from parent, if not present"""
- return self._Annotated__element.key
-
- @util.memoized_property
- def info(self):
- return self._Annotated__element.info
-
-# hard-generate Annotated subclasses. this technique
-# is used instead of on-the-fly types (i.e. type.__new__())
-# so that the resulting objects are pickleable.
-annotated_classes = {}
-
-for cls in list(expression.__dict__.values()) + [schema.Column, schema.Table]:
- if isinstance(cls, type) and issubclass(cls, expression.ClauseElement):
- if issubclass(cls, expression.ColumnElement):
- annotation_cls = "AnnotatedColumnElement"
- else:
- annotation_cls = "Annotated"
- exec("class Annotated%s(%s, cls):\n" \
- " pass" % (cls.__name__, annotation_cls), locals())
- exec("annotated_classes[cls] = Annotated%s" % (cls.__name__,))
-
-
-def _deep_annotate(element, annotations, exclude=None):
- """Deep copy the given ClauseElement, annotating each element
- with the given annotations dictionary.
-
- Elements within the exclude collection will be cloned but not annotated.
-
- """
- def clone(elem):
- if exclude and \
- hasattr(elem, 'proxy_set') and \
- elem.proxy_set.intersection(exclude):
- newelem = elem._clone()
- elif annotations != elem._annotations:
- newelem = elem._annotate(annotations)
- else:
- newelem = elem
- newelem._copy_internals(clone=clone)
- return newelem
-
- if element is not None:
- element = clone(element)
- return element
-
-
-def _deep_deannotate(element, values=None):
- """Deep copy the given element, removing annotations."""
-
- cloned = util.column_dict()
-
- def clone(elem):
- # if a values dict is given,
- # the elem must be cloned each time it appears,
- # as there may be different annotations in source
- # elements that are remaining. if totally
- # removing all annotations, can assume the same
- # slate...
- if values or elem not in cloned:
- newelem = elem._deannotate(values=values, clone=True)
- newelem._copy_internals(clone=clone)
- if not values:
- cloned[elem] = newelem
- return newelem
- else:
- return cloned[elem]
-
- if element is not None:
- element = clone(element)
- return element
-
-
-def _shallow_annotate(element, annotations):
- """Annotate the given ClauseElement and copy its internals so that
- internal objects refer to the new annotated object.
-
- Basically used to apply a "dont traverse" annotation to a
- selectable, without digging throughout the whole
- structure wasting time.
- """
- element = element._annotate(annotations)
- element._copy_internals()
- return element
-
-
def splice_joins(left, right, stop_on=None):
if left is None:
return right
@@ -619,7 +289,7 @@ def splice_joins(left, right, stop_on=None):
ret = None
while stack:
(right, prevright) = stack.pop()
- if isinstance(right, expression.Join) and right is not stop_on:
+ if isinstance(right, Join) and right is not stop_on:
right = right._clone()
right._reset_exported()
right.onclause = adapter.traverse(right.onclause)
@@ -703,7 +373,7 @@ def reduce_columns(columns, *clauses, **kw):
if clause is not None:
visitors.traverse(clause, {}, {'binary': visit_binary})
- return expression.ColumnSet(columns.difference(omit))
+ return ColumnSet(columns.difference(omit))
def criterion_as_pairs(expression, consider_as_foreign_keys=None,
@@ -722,8 +392,8 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None,
def visit_binary(binary):
if not any_operator and binary.operator is not operators.eq:
return
- if not isinstance(binary.left, sql.ColumnElement) or \
- not isinstance(binary.right, sql.ColumnElement):
+ if not isinstance(binary.left, ColumnElement) or \
+ not isinstance(binary.right, ColumnElement):
return
if consider_as_foreign_keys:
@@ -745,8 +415,8 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None,
binary.left not in consider_as_referenced_keys):
pairs.append((binary.right, binary.left))
else:
- if isinstance(binary.left, schema.Column) and \
- isinstance(binary.right, schema.Column):
+ if isinstance(binary.left, Column) and \
+ isinstance(binary.right, Column):
if binary.left.references(binary.right):
pairs.append((binary.right, binary.left))
elif binary.right.references(binary.left):
@@ -756,6 +426,7 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None,
return pairs
+
class AliasedRow(object):
"""Wrap a RowProxy with a translation map.
@@ -848,10 +519,10 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor):
magic_flag = False
def replace(self, col):
- if not self.magic_flag and isinstance(col, expression.FromClause) and \
+ if not self.magic_flag and isinstance(col, FromClause) and \
self.selectable.is_derived_from(col):
return self.selectable
- elif not isinstance(col, expression.ColumnElement):
+ elif not isinstance(col, ColumnElement):
return None
elif self.include_fn and not self.include_fn(col):
return None
@@ -903,7 +574,7 @@ class ColumnAdapter(ClauseAdapter):
c = self.adapt_clause(col)
# anonymize labels in case they have a hardcoded name
- if isinstance(c, expression.Label):
+ if isinstance(c, Label):
c = c.label(None)
# adapt_required used by eager loading to indicate that
@@ -927,3 +598,4 @@ class ColumnAdapter(ClauseAdapter):
def __setstate__(self, state):
self.__dict__.update(state)
self.columns = util.PopulateDict(self._locate_col)
+
diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py
index 7b729bf7f..d9ad04fc0 100644
--- a/lib/sqlalchemy/sql/visitors.py
+++ b/lib/sqlalchemy/sql/visitors.py
@@ -1,5 +1,5 @@
# sql/visitors.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py
index a87829499..8ad856e2b 100644
--- a/lib/sqlalchemy/testing/__init__.py
+++ b/lib/sqlalchemy/testing/__init__.py
@@ -1,3 +1,8 @@
+# testing/__init__.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .warnings import testing_warn, assert_warnings, resetwarnings
@@ -11,7 +16,7 @@ from .exclusions import db_spec, _is_excluded, fails_if, skip_if, future,\
from .assertions import emits_warning, emits_warning_on, uses_deprecated, \
eq_, ne_, is_, is_not_, startswith_, assert_raises, \
assert_raises_message, AssertsCompiledSQL, ComparesTables, \
- AssertsExecutionResults
+ AssertsExecutionResults, expect_deprecated
from .util import run_as_contextmanager, rowset, fail, provide_metadata, adict
diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py
index 96a8bc023..61649e5e3 100644
--- a/lib/sqlalchemy/testing/assertions.py
+++ b/lib/sqlalchemy/testing/assertions.py
@@ -1,8 +1,14 @@
+# testing/assertions.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
from __future__ import absolute_import
from . import util as testutil
from sqlalchemy import pool, orm, util
-from sqlalchemy.engine import default, create_engine
+from sqlalchemy.engine import default, create_engine, url
from sqlalchemy import exc as sa_exc
from sqlalchemy.util import decorator
from sqlalchemy import types as sqltypes, schema
@@ -92,30 +98,36 @@ def uses_deprecated(*messages):
@decorator
def decorate(fn, *args, **kw):
- # todo: should probably be strict about this, too
- filters = [dict(action='ignore',
- category=sa_exc.SAPendingDeprecationWarning)]
- if not messages:
- filters.append(dict(action='ignore',
- category=sa_exc.SADeprecationWarning))
- else:
- filters.extend(
- [dict(action='ignore',
- message=message,
- category=sa_exc.SADeprecationWarning)
- for message in
- [(m.startswith('//') and
- ('Call to deprecated function ' + m[2:]) or m)
- for m in messages]])
-
- for f in filters:
- warnings.filterwarnings(**f)
- try:
+ with expect_deprecated(*messages):
return fn(*args, **kw)
- finally:
- resetwarnings()
return decorate
+@contextlib.contextmanager
+def expect_deprecated(*messages):
+ # todo: should probably be strict about this, too
+ filters = [dict(action='ignore',
+ category=sa_exc.SAPendingDeprecationWarning)]
+ if not messages:
+ filters.append(dict(action='ignore',
+ category=sa_exc.SADeprecationWarning))
+ else:
+ filters.extend(
+ [dict(action='ignore',
+ message=message,
+ category=sa_exc.SADeprecationWarning)
+ for message in
+ [(m.startswith('//') and
+ ('Call to deprecated function ' + m[2:]) or m)
+ for m in messages]])
+
+ for f in filters:
+ warnings.filterwarnings(**f)
+ try:
+ yield
+ finally:
+ resetwarnings()
+
+
def global_cleanup_assertions():
"""Check things that have to be finalized at the end of a test suite.
@@ -181,7 +193,8 @@ class AssertsCompiledSQL(object):
checkparams=None, dialect=None,
checkpositional=None,
use_default_dialect=False,
- allow_dialect_select=False):
+ allow_dialect_select=False,
+ literal_binds=False):
if use_default_dialect:
dialect = default.DefaultDialect()
elif allow_dialect_select:
@@ -195,26 +208,36 @@ class AssertsCompiledSQL(object):
elif dialect == 'default':
dialect = default.DefaultDialect()
elif isinstance(dialect, util.string_types):
- dialect = create_engine("%s://" % dialect).dialect
+ dialect = url.URL(dialect).get_dialect()()
kw = {}
+ compile_kwargs = {}
+
if params is not None:
kw['column_keys'] = list(params)
+ if literal_binds:
+ compile_kwargs['literal_binds'] = True
+
if isinstance(clause, orm.Query):
context = clause._compile_context()
context.statement.use_labels = True
clause = context.statement
+ if compile_kwargs:
+ kw['compile_kwargs'] = compile_kwargs
+
c = clause.compile(dialect=dialect, **kw)
param_str = repr(getattr(c, 'params', {}))
if util.py3k:
param_str = param_str.encode('utf-8').decode('ascii', 'ignore')
+ print(("\nSQL String:\n" + util.text_type(c) + param_str).encode('utf-8'))
+ else:
+ print("\nSQL String:\n" + util.text_type(c).encode('utf-8') + param_str)
- print("\nSQL String:\n" + util.text_type(c) + param_str)
cc = re.sub(r'[\n\t]', '', util.text_type(c))
diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py
index a6b63b2c3..3e0d4c9d3 100644
--- a/lib/sqlalchemy/testing/assertsql.py
+++ b/lib/sqlalchemy/testing/assertsql.py
@@ -1,3 +1,8 @@
+# testing/assertsql.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
from ..engine.default import DefaultDialect
from .. import util
diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py
index ae4f585e1..64f578dab 100644
--- a/lib/sqlalchemy/testing/config.py
+++ b/lib/sqlalchemy/testing/config.py
@@ -1,2 +1,8 @@
+# testing/config.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
requirements = None
db = None
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 29c8b6a03..d85771f8a 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -1,3 +1,9 @@
+# testing/engines.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
from __future__ import absolute_import
import types
@@ -26,6 +32,9 @@ class ConnectionKiller(object):
def checkout(self, dbapi_con, con_record, con_proxy):
self.proxy_refs[con_proxy] = True
+ def invalidate(self, dbapi_con, con_record, exception):
+ self.conns.discard((dbapi_con, con_record))
+
def _safe(self, fn):
try:
fn()
@@ -43,7 +52,7 @@ class ConnectionKiller(object):
def close_all(self):
for rec in list(self.proxy_refs):
- if rec is not None:
+ if rec is not None and rec.is_valid:
self._safe(rec._close)
def _after_test_ctx(self):
@@ -52,7 +61,7 @@ class ConnectionKiller(object):
# is collecting in finalize_fairy, deadlock.
# not sure if this should be if pypy/jython only.
# note that firebird/fdb definitely needs this though
- for conn, rec in self.conns:
+ for conn, rec in list(self.conns):
self._safe(conn.rollback)
def _stop_test_ctx(self):
@@ -72,10 +81,10 @@ class ConnectionKiller(object):
def _stop_test_ctx_aggressive(self):
self.close_all()
- for conn, rec in self.conns:
+ for conn, rec in list(self.conns):
self._safe(conn.close)
rec.connection = None
-
+
self.conns = set()
for rec in list(self.testing_engines):
rec.dispose()
@@ -220,6 +229,7 @@ def testing_engine(url=None, options=None):
if use_reaper:
event.listen(engine.pool, 'connect', testing_reaper.connect)
event.listen(engine.pool, 'checkout', testing_reaper.checkout)
+ event.listen(engine.pool, 'invalidate', testing_reaper.invalidate)
testing_reaper.add_engine(engine)
return engine
diff --git a/lib/sqlalchemy/testing/entities.py b/lib/sqlalchemy/testing/entities.py
index c0dd58650..0553e0e22 100644
--- a/lib/sqlalchemy/testing/entities.py
+++ b/lib/sqlalchemy/testing/entities.py
@@ -1,3 +1,9 @@
+# testing/entities.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
import sqlalchemy as sa
from sqlalchemy import exc as sa_exc
diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py
index f580f3fde..f868f6396 100644
--- a/lib/sqlalchemy/testing/exclusions.py
+++ b/lib/sqlalchemy/testing/exclusions.py
@@ -1,3 +1,8 @@
+# testing/exclusions.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
import operator
@@ -19,6 +24,11 @@ class skip_if(object):
def enabled(self):
return not self.predicate()
+ def __add__(self, other):
+ def decorate(fn):
+ return other(self(fn))
+ return decorate
+
@contextlib.contextmanager
def fail_if(self, name='block'):
try:
@@ -93,7 +103,14 @@ class Predicate(object):
elif isinstance(predicate, tuple):
return SpecPredicate(*predicate)
elif isinstance(predicate, util.string_types):
- return SpecPredicate(predicate, None, None)
+ tokens = predicate.split(" ", 2)
+ op = spec = None
+ db = tokens.pop(0)
+ if tokens:
+ op = tokens.pop(0)
+ if tokens:
+ spec = tuple(int(d) for d in tokens.pop(0).split("."))
+ return SpecPredicate(db, op, spec)
elif util.callable(predicate):
return LambdaPredicate(predicate)
else:
diff --git a/lib/sqlalchemy/testing/fixtures.py b/lib/sqlalchemy/testing/fixtures.py
index daa779ae3..464a723d2 100644
--- a/lib/sqlalchemy/testing/fixtures.py
+++ b/lib/sqlalchemy/testing/fixtures.py
@@ -1,3 +1,9 @@
+# testing/fixtures.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
from . import config
from . import assertions, schema
from .util import adict
diff --git a/lib/sqlalchemy/testing/mock.py b/lib/sqlalchemy/testing/mock.py
index 650962384..18ba053ea 100644
--- a/lib/sqlalchemy/testing/mock.py
+++ b/lib/sqlalchemy/testing/mock.py
@@ -1,13 +1,19 @@
+# testing/mock.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Import stub for mock library.
"""
from __future__ import absolute_import
from ..util import py33
if py33:
- from unittest.mock import MagicMock, Mock, call
+ from unittest.mock import MagicMock, Mock, call, patch
else:
try:
- from mock import MagicMock, Mock, call
+ from mock import MagicMock, Mock, call, patch
except ImportError:
raise ImportError(
"SQLAlchemy's test suite requires the "
diff --git a/lib/sqlalchemy/testing/pickleable.py b/lib/sqlalchemy/testing/pickleable.py
index 09d51b5fa..9a41034bf 100644
--- a/lib/sqlalchemy/testing/pickleable.py
+++ b/lib/sqlalchemy/testing/pickleable.py
@@ -1,3 +1,9 @@
+# testing/pickleable.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Classes used in pickling tests, need to be at the module level for
unpickling.
"""
diff --git a/lib/sqlalchemy/testing/plugin/noseplugin.py b/lib/sqlalchemy/testing/plugin/noseplugin.py
index b3cd3a4e3..27a028cd4 100644
--- a/lib/sqlalchemy/testing/plugin/noseplugin.py
+++ b/lib/sqlalchemy/testing/plugin/noseplugin.py
@@ -1,3 +1,9 @@
+# plugin/noseplugin.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Enhance nose with extra options and behaviors for running SQLAlchemy tests.
When running ./sqla_nose.py, this module is imported relative to the
@@ -351,6 +357,8 @@ class NoseSQLAlchemy(Plugin):
return ""
def wantFunction(self, fn):
+ if fn.__module__ is None:
+ return False
if fn.__module__.startswith('sqlalchemy.testing'):
return False
@@ -385,8 +393,9 @@ class NoseSQLAlchemy(Plugin):
check.reason if check.reason
else
(
- "'%s' unsupported on DB implementation '%s'" % (
- cls.__name__, config.db.name
+ "'%s' unsupported on DB implementation '%s' == %s" % (
+ cls.__name__, config.db.name,
+ config.db.dialect.server_version_info
)
)
)
@@ -395,16 +404,18 @@ class NoseSQLAlchemy(Plugin):
spec = exclusions.db_spec(*cls.__unsupported_on__)
if spec(config.db):
raise SkipTest(
- "'%s' unsupported on DB implementation '%s'" % (
- cls.__name__, config.db.name)
+ "'%s' unsupported on DB implementation '%s' == %s" % (
+ cls.__name__, config.db.name,
+ config.db.dialect.server_version_info)
)
if getattr(cls, '__only_on__', None):
spec = exclusions.db_spec(*util.to_list(cls.__only_on__))
if not spec(config.db):
raise SkipTest(
- "'%s' unsupported on DB implementation '%s'" % (
- cls.__name__, config.db.name)
+ "'%s' unsupported on DB implementation '%s' == %s" % (
+ cls.__name__, config.db.name,
+ config.db.dialect.server_version_info)
)
if getattr(cls, '__skip_if__', False):
diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py
index bda44d80c..fa2490649 100644
--- a/lib/sqlalchemy/testing/profiling.py
+++ b/lib/sqlalchemy/testing/profiling.py
@@ -1,3 +1,9 @@
+# testing/profiling.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Profiling support for unit and performance tests.
These are special purpose profiling methods which operate
@@ -45,12 +51,10 @@ def profiled(target=None, **target_opts):
if target is None:
target = 'anonymous_target'
- filename = "%s.prof" % target
-
@decorator
def decorate(fn, *args, **kw):
elapsed, load_stats, result = _profile(
- filename, fn, *args, **kw)
+ fn, *args, **kw)
graphic = target_opts.get('graphic', profile_config['graphic'])
if graphic:
@@ -60,8 +64,8 @@ def profiled(target=None, **target_opts):
if report:
sort_ = target_opts.get('sort', profile_config['sort'])
limit = target_opts.get('limit', profile_config['limit'])
- print(("Profile report for target '%s' (%s)" % (
- target, filename)
+ print(("Profile report for target '%s'" % (
+ target, )
))
stats = load_stats()
@@ -81,7 +85,6 @@ def profiled(target=None, **target_opts):
if print_callees:
stats.print_callees()
- os.unlink(filename)
return result
return decorate
@@ -162,6 +165,15 @@ class ProfileStatsFile(object):
per_platform['current_count'] += 1
return result
+ def replace(self, callcount):
+ test_key = _current_test
+ per_fn = self.data[test_key]
+ per_platform = per_fn[self.platform_key]
+ counts = per_platform['counts']
+ counts[-1] = callcount
+ if self.write:
+ self._write()
+
def _header(self):
return \
"# %s\n"\
@@ -263,16 +275,19 @@ def function_call_count(variance=0.05):
if expected_count:
deviance = int(callcount * variance)
- if abs(callcount - expected_count) > deviance:
- raise AssertionError(
- "Adjusted function call count %s not within %s%% "
- "of expected %s. (Delete line %d of file %s to "
- "regenerate this callcount, when tests are run "
- "with --write-profiles.)"
- % (
- callcount, (variance * 100),
- expected_count, line_no,
- _profile_stats.fname))
+ failed = abs(callcount - expected_count) > deviance
+
+ if failed:
+ if _profile_stats.write:
+ _profile_stats.replace(callcount)
+ else:
+ raise AssertionError(
+ "Adjusted function call count %s not within %s%% "
+ "of expected %s. Rerun with --write-profiles to "
+ "regenerate this callcount."
+ % (
+ callcount, (variance * 100),
+ expected_count))
return fn_result
return update_wrapper(wrap, fn)
return decorate
diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py
index d301dc69f..706d6d060 100644
--- a/lib/sqlalchemy/testing/requirements.py
+++ b/lib/sqlalchemy/testing/requirements.py
@@ -1,3 +1,9 @@
+# testing/requirements.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Global database feature support policy.
Provides decorators to mark tests requiring specific feature support from the
@@ -133,6 +139,20 @@ class SuiteRequirements(Requirements):
return exclusions.open()
@property
+ def fetch_rows_post_commit(self):
+ """target platform will allow cursor.fetchone() to proceed after a
+ COMMIT.
+
+ Typically this refers to an INSERT statement with RETURNING which
+ is invoked within "autocommit". If the row can be returned
+ after the autocommit, then this rule can be open.
+
+ """
+
+ return exclusions.open()
+
+
+ @property
def empty_inserts(self):
"""target platform supports INSERT with no values, i.e.
INSERT DEFAULT VALUES or equivalent."""
@@ -296,6 +316,15 @@ class SuiteRequirements(Requirements):
return exclusions.closed()
@property
+ def datetime_literals(self):
+ """target dialect supports rendering of a date, time, or datetime as a
+ literal string, e.g. via the TypeEngine.literal_processor() method.
+
+ """
+
+ return exclusions.closed()
+
+ @property
def datetime(self):
"""target dialect supports representation of Python
datetime.datetime() objects."""
@@ -379,6 +408,14 @@ class SuiteRequirements(Requirements):
return exclusions.closed()
@property
+ def precision_generic_float_type(self):
+ """target backend will return native floating point numbers with at
+ least seven decimal places when using the generic Float type.
+
+ """
+ return exclusions.open()
+
+ @property
def floats_to_four_decimals(self):
"""target backend can return a floating-point number with four
significant digits (such as 15.7563) accurately
@@ -388,6 +425,16 @@ class SuiteRequirements(Requirements):
return exclusions.open()
@property
+ def fetch_null_from_numeric(self):
+ """target backend doesn't crash when you try to select a NUMERIC
+ value that has a value of NULL.
+
+ Added to support Pyodbc bug #351.
+ """
+
+ return exclusions.open()
+
+ @property
def text_type(self):
"""Target database must support an unbounded Text() "
"type such as TEXT or CLOB"""
diff --git a/lib/sqlalchemy/testing/runner.py b/lib/sqlalchemy/testing/runner.py
index 2bdbaebd1..19aba53df 100644
--- a/lib/sqlalchemy/testing/runner.py
+++ b/lib/sqlalchemy/testing/runner.py
@@ -1,4 +1,9 @@
#!/usr/bin/env python
+# testing/runner.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
Nose test runner module.
diff --git a/lib/sqlalchemy/testing/schema.py b/lib/sqlalchemy/testing/schema.py
index 025bbaabe..ec0085219 100644
--- a/lib/sqlalchemy/testing/schema.py
+++ b/lib/sqlalchemy/testing/schema.py
@@ -1,3 +1,8 @@
+# testing/schema.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
from . import exclusions
from .. import schema, event
diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py
index e671eeb7a..5732e37ec 100644
--- a/lib/sqlalchemy/testing/suite/test_insert.py
+++ b/lib/sqlalchemy/testing/suite/test_insert.py
@@ -56,8 +56,9 @@ class LastrowidTest(fixtures.TablesTest):
[pk]
)
- @exclusions.fails_if(lambda: util.pypy, "lastrowid not maintained after "
- "connection close")
+ # failed on pypy1.9 but seems to be OK on pypy 2.1
+ #@exclusions.fails_if(lambda: util.pypy, "lastrowid not maintained after "
+ # "connection close")
@requirements.dbapi_lastrowid
def test_native_lastrowid_autoinc(self):
r = config.db.execute(
@@ -81,6 +82,10 @@ class InsertBehaviorTest(fixtures.TablesTest):
test_needs_autoincrement=True),
Column('data', String(50))
)
+ Table('manual_pk', metadata,
+ Column('id', Integer, primary_key=True, autoincrement=False),
+ Column('data', String(50))
+ )
def test_autoclose_on_insert(self):
if requirements.returning.enabled:
@@ -123,13 +128,13 @@ class InsertBehaviorTest(fixtures.TablesTest):
@requirements.insert_from_select
def test_insert_from_select(self):
- table = self.tables.autoinc_pk
+ table = self.tables.manual_pk
config.db.execute(
table.insert(),
[
- dict(data="data1"),
- dict(data="data2"),
- dict(data="data3"),
+ dict(id=1, data="data1"),
+ dict(id=2, data="data2"),
+ dict(id=3, data="data3"),
]
)
@@ -171,7 +176,8 @@ class ReturningTest(fixtures.TablesTest):
Column('data', String(50))
)
- def test_explicit_returning_pk(self):
+ @requirements.fetch_rows_post_commit
+ def test_explicit_returning_pk_autocommit(self):
engine = config.db
table = self.tables.autoinc_pk
r = engine.execute(
@@ -183,6 +189,19 @@ class ReturningTest(fixtures.TablesTest):
fetched_pk = config.db.scalar(select([table.c.id]))
eq_(fetched_pk, pk)
+ def test_explicit_returning_pk_no_autocommit(self):
+ engine = config.db
+ table = self.tables.autoinc_pk
+ with engine.begin() as conn:
+ r = conn.execute(
+ table.insert().returning(
+ table.c.id),
+ data="some data"
+ )
+ pk = r.first()[0]
+ fetched_pk = config.db.scalar(select([table.c.id]))
+ eq_(fetched_pk, pk)
+
def test_autoincrement_on_insert_implcit_returning(self):
config.db.execute(
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 5a8a54c46..9f737bc64 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -147,6 +147,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
table_names = insp.get_view_names(schema)
table_names.sort()
answer = ['email_addresses_v', 'users_v']
+ eq_(sorted(table_names), answer)
else:
table_names = insp.get_table_names(schema,
order_by=order_by)
@@ -180,6 +181,12 @@ class ComponentReflectionTest(fixtures.TablesTest):
def test_get_view_names_with_schema(self):
self._test_get_table_names('test_schema', table_type='view')
+ @testing.requires.table_reflection
+ @testing.requires.view_reflection
+ def test_get_tables_and_views(self):
+ self._test_get_table_names()
+ self._test_get_table_names(table_type='view')
+
def _test_get_columns(self, schema=None, table_type='table'):
meta = MetaData(testing.db)
users, addresses, dingalings = self.tables.users, \
@@ -448,6 +455,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
def test_get_table_oid_with_schema(self):
self._test_get_table_oid('users', schema='test_schema')
+ @testing.requires.table_reflection
@testing.provide_metadata
def test_autoincrement_col(self):
"""test that 'autoincrement' is reflected according to sqla's policy.
diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py
index 0de462eb7..a6e937e8e 100644
--- a/lib/sqlalchemy/testing/suite/test_types.py
+++ b/lib/sqlalchemy/testing/suite/test_types.py
@@ -5,7 +5,7 @@ from ..assertions import eq_
from ..config import requirements
from sqlalchemy import Integer, Unicode, UnicodeText, select
from sqlalchemy import Date, DateTime, Time, MetaData, String, \
- Text, Numeric, Float
+ Text, Numeric, Float, literal
from ..schema import Table, Column
from ... import testing
import decimal
@@ -13,7 +13,34 @@ import datetime
from ...util import u
from ... import util
-class _UnicodeFixture(object):
+
+class _LiteralRoundTripFixture(object):
+ @testing.provide_metadata
+ def _literal_round_trip(self, type_, input_, output, filter_=None):
+ """test literal rendering """
+
+ # for literal, we test the literal render in an INSERT
+ # into a typed column. we can then SELECT it back as it's
+ # official type; ideally we'd be able to use CAST here
+ # but MySQL in particular can't CAST fully
+ t = Table('t', self.metadata, Column('x', type_))
+ t.create()
+
+ for value in input_:
+ ins = t.insert().values(x=literal(value)).compile(
+ dialect=testing.db.dialect,
+ compile_kwargs=dict(literal_binds=True)
+ )
+ testing.db.execute(ins)
+
+ for row in t.select().execute():
+ value = row[0]
+ if filter_ is not None:
+ value = filter_(value)
+ assert value in output
+
+
+class _UnicodeFixture(_LiteralRoundTripFixture):
__requires__ = 'unicode_data',
data = u("Alors vous imaginez ma surprise, au lever du jour, "\
@@ -87,6 +114,9 @@ class _UnicodeFixture(object):
).first()
eq_(row, (u(''),))
+ def test_literal(self):
+ self._literal_round_trip(self.datatype, [self.data], [self.data])
+
class UnicodeVarcharTest(_UnicodeFixture, fixtures.TablesTest):
__requires__ = 'unicode_data',
@@ -107,7 +137,7 @@ class UnicodeTextTest(_UnicodeFixture, fixtures.TablesTest):
def test_empty_strings_text(self):
self._test_empty_strings()
-class TextTest(fixtures.TablesTest):
+class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('text_table', metadata,
@@ -140,8 +170,18 @@ class TextTest(fixtures.TablesTest):
).first()
eq_(row, ('',))
+ def test_literal(self):
+ self._literal_round_trip(Text, ["some text"], ["some text"])
-class StringTest(fixtures.TestBase):
+ def test_literal_quoting(self):
+ data = '''some 'text' hey "hi there" that's text'''
+ self._literal_round_trip(Text, [data], [data])
+
+ def test_literal_backslashes(self):
+ data = r'backslash one \ backslash two \\ end'
+ self._literal_round_trip(Text, [data], [data])
+
+class StringTest(_LiteralRoundTripFixture, fixtures.TestBase):
@requirements.unbounded_varchar
def test_nolength_string(self):
metadata = MetaData()
@@ -152,8 +192,19 @@ class StringTest(fixtures.TestBase):
foo.create(config.db)
foo.drop(config.db)
+ def test_literal(self):
+ self._literal_round_trip(String(40), ["some text"], ["some text"])
+
+ def test_literal_quoting(self):
+ data = '''some 'text' hey "hi there" that's text'''
+ self._literal_round_trip(String(40), [data], [data])
+
+ def test_literal_backslashes(self):
+ data = r'backslash one \ backslash two \\ end'
+ self._literal_round_trip(Text, [data], [data])
-class _DateFixture(object):
+
+class _DateFixture(_LiteralRoundTripFixture):
compare = None
@classmethod
@@ -198,6 +249,12 @@ class _DateFixture(object):
).first()
eq_(row, (None,))
+ @testing.requires.datetime_literals
+ def test_literal(self):
+ compare = self.compare or self.data
+ self._literal_round_trip(self.datatype, [self.data], [compare])
+
+
class DateTimeTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'datetime',
@@ -247,7 +304,12 @@ class DateHistoricTest(_DateFixture, fixtures.TablesTest):
datatype = Date
data = datetime.date(1727, 4, 1)
-class NumericTest(fixtures.TestBase):
+
+class IntegerTest(_LiteralRoundTripFixture, fixtures.TestBase):
+ def test_literal(self):
+ self._literal_round_trip(Integer, [5], [5])
+
+class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
@testing.emits_warning(r".*does \*not\* support Decimal objects natively")
@testing.provide_metadata
@@ -269,18 +331,69 @@ class NumericTest(fixtures.TestBase):
[str(x) for x in output],
)
+
+ @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
+ def test_render_literal_numeric(self):
+ self._literal_round_trip(
+ Numeric(precision=8, scale=4),
+ [15.7563, decimal.Decimal("15.7563")],
+ [decimal.Decimal("15.7563")],
+ )
+
+ @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
+ def test_render_literal_numeric_asfloat(self):
+ self._literal_round_trip(
+ Numeric(precision=8, scale=4, asdecimal=False),
+ [15.7563, decimal.Decimal("15.7563")],
+ [15.7563],
+ )
+
+ def test_render_literal_float(self):
+ self._literal_round_trip(
+ Float(4),
+ [15.7563, decimal.Decimal("15.7563")],
+ [15.7563,],
+ filter_=lambda n: n is not None and round(n, 5) or None
+ )
+
+
+ @testing.requires.precision_generic_float_type
+ def test_float_custom_scale(self):
+ self._do_test(
+ Float(None, decimal_return_scale=7, asdecimal=True),
+ [15.7563827, decimal.Decimal("15.7563827")],
+ [decimal.Decimal("15.7563827"),],
+ check_scale=True
+ )
+
def test_numeric_as_decimal(self):
self._do_test(
Numeric(precision=8, scale=4),
- [15.7563, decimal.Decimal("15.7563"), None],
- [decimal.Decimal("15.7563"), None],
+ [15.7563, decimal.Decimal("15.7563")],
+ [decimal.Decimal("15.7563")],
)
def test_numeric_as_float(self):
self._do_test(
Numeric(precision=8, scale=4, asdecimal=False),
- [15.7563, decimal.Decimal("15.7563"), None],
- [15.7563, None],
+ [15.7563, decimal.Decimal("15.7563")],
+ [15.7563],
+ )
+
+ @testing.requires.fetch_null_from_numeric
+ def test_numeric_null_as_decimal(self):
+ self._do_test(
+ Numeric(precision=8, scale=4),
+ [None],
+ [None],
+ )
+
+ @testing.requires.fetch_null_from_numeric
+ def test_numeric_null_as_float(self):
+ self._do_test(
+ Numeric(precision=8, scale=4, asdecimal=False),
+ [None],
+ [None],
)
@testing.requires.floats_to_four_decimals
@@ -291,6 +404,7 @@ class NumericTest(fixtures.TestBase):
[decimal.Decimal("15.7563"), None],
)
+
def test_float_as_float(self):
self._do_test(
Float(precision=8),
@@ -299,6 +413,7 @@ class NumericTest(fixtures.TestBase):
filter_=lambda n: n is not None and round(n, 5) or None
)
+
@testing.requires.precision_numerics_general
def test_precision_decimal(self):
numbers = set([
@@ -313,6 +428,7 @@ class NumericTest(fixtures.TestBase):
numbers,
)
+
@testing.requires.precision_numerics_enotation_large
def test_enotation_decimal(self):
"""test exceedingly small decimals.
@@ -342,6 +458,7 @@ class NumericTest(fixtures.TestBase):
numbers
)
+
@testing.requires.precision_numerics_enotation_large
def test_enotation_decimal_large(self):
"""test exceedingly large decimals.
@@ -389,7 +506,7 @@ class NumericTest(fixtures.TestBase):
__all__ = ('UnicodeVarcharTest', 'UnicodeTextTest',
'DateTest', 'DateTimeTest', 'TextTest',
- 'NumericTest',
+ 'NumericTest', 'IntegerTest',
'DateTimeHistoricTest', 'DateTimeCoercedToDateTimeTest',
'TimeMicrosecondsTest', 'TimeTest', 'DateTimeMicrosecondsTest',
'DateHistoricTest', 'StringTest')
diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py
index 1288902f2..bde11a356 100644
--- a/lib/sqlalchemy/testing/util.py
+++ b/lib/sqlalchemy/testing/util.py
@@ -1,3 +1,9 @@
+# testing/util.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
from ..util import jython, pypy, defaultdict, decorator, py2k
import decimal
import gc
diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py
index 6193acd88..74a8933a6 100644
--- a/lib/sqlalchemy/testing/warnings.py
+++ b/lib/sqlalchemy/testing/warnings.py
@@ -1,3 +1,9 @@
+# testing/warnings.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
from __future__ import absolute_import
import warnings
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index 017c8dd04..3994bd4a8 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -1,17 +1,14 @@
-# sqlalchemy/types.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# types.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""defines genericized SQL types, each represented by a subclass of
-:class:`~sqlalchemy.types.AbstractType`. Dialects define further subclasses
-of these types.
-
-For more information see the SQLAlchemy documentation on types.
+"""Compatiblity namespace for sqlalchemy.sql.types.
"""
-__all__ = ['TypeEngine', 'TypeDecorator', 'AbstractType', 'UserDefinedType',
+
+__all__ = ['TypeEngine', 'TypeDecorator', 'UserDefinedType',
'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text',
'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME',
'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT',
@@ -20,2478 +17,61 @@ __all__ = ['TypeEngine', 'TypeDecorator', 'AbstractType', 'UserDefinedType',
'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode',
'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum']
-import datetime as dt
-import codecs
-
-from . import exc, schema, util, processors, events, event
-from .sql import operators
-from .sql.expression import _DefaultColumnComparator
-from .util import pickle
-from .sql.visitors import Visitable
-import decimal
-default = util.importlater("sqlalchemy.engine", "default")
-
-NoneType = type(None)
-if util.jython:
- import array
-
-
-class AbstractType(Visitable):
- """Base for all types - not needed except for backwards
- compatibility."""
-
-
-class TypeEngine(AbstractType):
- """Base for built-in types."""
-
- class Comparator(_DefaultColumnComparator):
- """Base class for custom comparison operations defined at the
- type level. See :attr:`.TypeEngine.comparator_factory`.
-
- The public base class for :class:`.TypeEngine.Comparator`
- is :class:`.ColumnOperators`.
-
- """
-
- def __init__(self, expr):
- self.expr = expr
-
- def __reduce__(self):
- return _reconstitute_comparator, (self.expr, )
-
- hashable = True
- """Flag, if False, means values from this type aren't hashable.
-
- Used by the ORM when uniquing result lists.
-
- """
-
- comparator_factory = Comparator
- """A :class:`.TypeEngine.Comparator` class which will apply
- to operations performed by owning :class:`.ColumnElement` objects.
-
- The :attr:`.comparator_factory` attribute is a hook consulted by
- the core expression system when column and SQL expression operations
- are performed. When a :class:`.TypeEngine.Comparator` class is
- associated with this attribute, it allows custom re-definition of
- all existing operators, as well as definition of new operators.
- Existing operators include those provided by Python operator overloading
- such as :meth:`.operators.ColumnOperators.__add__` and
- :meth:`.operators.ColumnOperators.__eq__`,
- those provided as standard
- attributes of :class:`.operators.ColumnOperators` such as
- :meth:`.operators.ColumnOperators.like`
- and :meth:`.operators.ColumnOperators.in_`.
-
- Rudimentary usage of this hook is allowed through simple subclassing
- of existing types, or alternatively by using :class:`.TypeDecorator`.
- See the documentation section :ref:`types_operators` for examples.
-
- .. versionadded:: 0.8 The expression system was enhanced to support
- customization of operators on a per-type level.
-
- """
-
- def copy_value(self, value):
- return value
-
- def bind_processor(self, dialect):
- """Return a conversion function for processing bind values.
-
- Returns a callable which will receive a bind parameter value
- as the sole positional argument and will return a value to
- send to the DB-API.
-
- If processing is not necessary, the method should return ``None``.
-
- :param dialect: Dialect instance in use.
-
- """
- return None
-
- def result_processor(self, dialect, coltype):
- """Return a conversion function for processing result row values.
-
- Returns a callable which will receive a result row column
- value as the sole positional argument and will return a value
- to return to the user.
-
- If processing is not necessary, the method should return ``None``.
-
- :param dialect: Dialect instance in use.
-
- :param coltype: DBAPI coltype argument received in cursor.description.
-
- """
- return None
-
- def column_expression(self, colexpr):
- """Given a SELECT column expression, return a wrapping SQL expression.
-
- This is typically a SQL function that wraps a column expression
- as rendered in the columns clause of a SELECT statement.
- It is used for special data types that require
- columns to be wrapped in some special database function in order
- to coerce the value before being sent back to the application.
- It is the SQL analogue of the :meth:`.TypeEngine.result_processor`
- method.
-
- The method is evaluated at statement compile time, as opposed
- to statement construction time.
-
- See also:
-
- :ref:`types_sql_value_processing`
-
- """
-
- return None
-
- @util.memoized_property
- def _has_column_expression(self):
- """memoized boolean, check if column_expression is implemented.
-
- Allows the method to be skipped for the vast majority of expression
- types that don't use this feature.
-
- """
-
- return self.__class__.column_expression.__code__ \
- is not TypeEngine.column_expression.__code__
-
- def bind_expression(self, bindvalue):
- """"Given a bind value (i.e. a :class:`.BindParameter` instance),
- return a SQL expression in its place.
-
- This is typically a SQL function that wraps the existing bound
- parameter within the statement. It is used for special data types
- that require literals being wrapped in some special database function
- in order to coerce an application-level value into a database-specific
- format. It is the SQL analogue of the
- :meth:`.TypeEngine.bind_processor` method.
-
- The method is evaluated at statement compile time, as opposed
- to statement construction time.
-
- Note that this method, when implemented, should always return
- the exact same structure, without any conditional logic, as it
- may be used in an executemany() call against an arbitrary number
- of bound parameter sets.
-
- See also:
-
- :ref:`types_sql_value_processing`
-
- """
- return None
-
- @util.memoized_property
- def _has_bind_expression(self):
- """memoized boolean, check if bind_expression is implemented.
-
- Allows the method to be skipped for the vast majority of expression
- types that don't use this feature.
-
- """
-
- return self.__class__.bind_expression.__code__ \
- is not TypeEngine.bind_expression.__code__
-
- def compare_values(self, x, y):
- """Compare two values for equality."""
-
- return x == y
-
- def get_dbapi_type(self, dbapi):
- """Return the corresponding type object from the underlying DB-API, if
- any.
-
- This can be useful for calling ``setinputsizes()``, for example.
-
- """
- return None
-
- @property
- def python_type(self):
- """Return the Python type object expected to be returned
- by instances of this type, if known.
-
- Basically, for those types which enforce a return type,
- or are known across the board to do such for all common
- DBAPIs (like ``int`` for example), will return that type.
-
- If a return type is not defined, raises
- ``NotImplementedError``.
-
- Note that any type also accommodates NULL in SQL which
- means you can also get back ``None`` from any type
- in practice.
-
- """
- raise NotImplementedError()
-
- def with_variant(self, type_, dialect_name):
- """Produce a new type object that will utilize the given
- type when applied to the dialect of the given name.
-
- e.g.::
-
- from sqlalchemy.types import String
- from sqlalchemy.dialects import mysql
-
- s = String()
-
- s = s.with_variant(mysql.VARCHAR(collation='foo'), 'mysql')
-
- The construction of :meth:`.TypeEngine.with_variant` is always
- from the "fallback" type to that which is dialect specific.
- The returned type is an instance of :class:`.Variant`, which
- itself provides a :meth:`~sqlalchemy.types.Variant.with_variant`
- that can be called repeatedly.
-
- :param type_: a :class:`.TypeEngine` that will be selected
- as a variant from the originating type, when a dialect
- of the given name is in use.
- :param dialect_name: base name of the dialect which uses
- this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
-
- .. versionadded:: 0.7.2
-
- """
- return Variant(self, {dialect_name: type_})
-
- @util.memoized_property
- def _type_affinity(self):
- """Return a rudimental 'affinity' value expressing the general class
- of type."""
-
- typ = None
- for t in self.__class__.__mro__:
- if t is TypeEngine or t is UserDefinedType:
- return typ
- elif issubclass(t, TypeEngine):
- typ = t
- else:
- return self.__class__
-
- def dialect_impl(self, dialect):
- """Return a dialect-specific implementation for this
- :class:`.TypeEngine`.
-
- """
- try:
- return dialect._type_memos[self]['impl']
- except KeyError:
- return self._dialect_info(dialect)['impl']
-
- def _cached_bind_processor(self, dialect):
- """Return a dialect-specific bind processor for this type."""
-
- try:
- return dialect._type_memos[self]['bind']
- except KeyError:
- d = self._dialect_info(dialect)
- d['bind'] = bp = d['impl'].bind_processor(dialect)
- return bp
-
- def _cached_result_processor(self, dialect, coltype):
- """Return a dialect-specific result processor for this type."""
-
- try:
- return dialect._type_memos[self][coltype]
- except KeyError:
- d = self._dialect_info(dialect)
- # key assumption: DBAPI type codes are
- # constants. Else this dictionary would
- # grow unbounded.
- d[coltype] = rp = d['impl'].result_processor(dialect, coltype)
- return rp
-
- def _dialect_info(self, dialect):
- """Return a dialect-specific registry which
- caches a dialect-specific implementation, bind processing
- function, and one or more result processing functions."""
-
- if self in dialect._type_memos:
- return dialect._type_memos[self]
- else:
- impl = self._gen_dialect_impl(dialect)
- if impl is self:
- impl = self.adapt(type(self))
- # this can't be self, else we create a cycle
- assert impl is not self
- dialect._type_memos[self] = d = {'impl': impl}
- return d
-
- def _gen_dialect_impl(self, dialect):
- return dialect.type_descriptor(self)
-
- def adapt(self, cls, **kw):
- """Produce an "adapted" form of this type, given an "impl" class
- to work with.
-
- This method is used internally to associate generic
- types with "implementation" types that are specific to a particular
- dialect.
- """
- return util.constructor_copy(self, cls, **kw)
-
- def coerce_compared_value(self, op, value):
- """Suggest a type for a 'coerced' Python value in an expression.
-
- Given an operator and value, gives the type a chance
- to return a type which the value should be coerced into.
-
- The default behavior here is conservative; if the right-hand
- side is already coerced into a SQL type based on its
- Python type, it is usually left alone.
-
- End-user functionality extension here should generally be via
- :class:`.TypeDecorator`, which provides more liberal behavior in that
- it defaults to coercing the other side of the expression into this
- type, thus applying special Python conversions above and beyond those
- needed by the DBAPI to both ides. It also provides the public method
- :meth:`.TypeDecorator.coerce_compared_value` which is intended for
- end-user customization of this behavior.
-
- """
- _coerced_type = _type_map.get(type(value), NULLTYPE)
- if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
- is self._type_affinity:
- return self
- else:
- return _coerced_type
-
- def _compare_type_affinity(self, other):
- return self._type_affinity is other._type_affinity
-
- def compile(self, dialect=None):
- """Produce a string-compiled form of this :class:`.TypeEngine`.
-
- When called with no arguments, uses a "default" dialect
- to produce a string result.
-
- :param dialect: a :class:`.Dialect` instance.
-
- """
- # arg, return value is inconsistent with
- # ClauseElement.compile()....this is a mistake.
-
- if not dialect:
- dialect = self._default_dialect
-
- return dialect.type_compiler.process(self)
-
- @property
- def _default_dialect(self):
- if self.__class__.__module__.startswith("sqlalchemy.dialects"):
- tokens = self.__class__.__module__.split(".")[0:3]
- mod = ".".join(tokens)
- return getattr(__import__(mod).dialects, tokens[-1]).dialect()
- else:
- return default.DefaultDialect()
-
- def __str__(self):
- if util.py2k:
- return unicode(self.compile()).\
- encode('ascii', 'backslashreplace')
- else:
- return str(self.compile())
-
- def __init__(self, *args, **kwargs):
- """Support implementations that were passing arguments"""
- if args or kwargs:
- util.warn_deprecated("Passing arguments to type object "
- "constructor %s is deprecated" % self.__class__)
-
- def __repr__(self):
- return util.generic_repr(self)
-
-
-def _reconstitute_comparator(expression):
- return expression.comparator
-
-
-class UserDefinedType(TypeEngine):
- """Base for user defined types.
-
- This should be the base of new types. Note that
- for most cases, :class:`.TypeDecorator` is probably
- more appropriate::
-
- import sqlalchemy.types as types
-
- class MyType(types.UserDefinedType):
- def __init__(self, precision = 8):
- self.precision = precision
-
- def get_col_spec(self):
- return "MYTYPE(%s)" % self.precision
-
- def bind_processor(self, dialect):
- def process(value):
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- def process(value):
- return value
- return process
-
- Once the type is made, it's immediately usable::
-
- table = Table('foo', meta,
- Column('id', Integer, primary_key=True),
- Column('data', MyType(16))
- )
-
- """
- __visit_name__ = "user_defined"
-
- class Comparator(TypeEngine.Comparator):
- def _adapt_expression(self, op, other_comparator):
- if hasattr(self.type, 'adapt_operator'):
- util.warn_deprecated(
- "UserDefinedType.adapt_operator is deprecated. Create "
- "a UserDefinedType.Comparator subclass instead which "
- "generates the desired expression constructs, given a "
- "particular operator."
- )
- return self.type.adapt_operator(op), self.type
- else:
- return op, self.type
-
- comparator_factory = Comparator
-
- def coerce_compared_value(self, op, value):
- """Suggest a type for a 'coerced' Python value in an expression.
-
- Default behavior for :class:`.UserDefinedType` is the
- same as that of :class:`.TypeDecorator`; by default it returns
- ``self``, assuming the compared value should be coerced into
- the same type as this one. See
- :meth:`.TypeDecorator.coerce_compared_value` for more detail.
-
- .. versionchanged:: 0.8 :meth:`.UserDefinedType.coerce_compared_value`
- now returns ``self`` by default, rather than falling onto the
- more fundamental behavior of
- :meth:`.TypeEngine.coerce_compared_value`.
-
- """
-
- return self
-
-
-class TypeDecorator(TypeEngine):
- """Allows the creation of types which add additional functionality
- to an existing type.
-
- This method is preferred to direct subclassing of SQLAlchemy's
- built-in types as it ensures that all required functionality of
- the underlying type is kept in place.
-
- Typical usage::
-
- import sqlalchemy.types as types
-
- class MyType(types.TypeDecorator):
- '''Prefixes Unicode values with "PREFIX:" on the way in and
- strips it off on the way out.
- '''
-
- impl = types.Unicode
-
- def process_bind_param(self, value, dialect):
- return "PREFIX:" + value
-
- def process_result_value(self, value, dialect):
- return value[7:]
-
- def copy(self):
- return MyType(self.impl.length)
-
- The class-level "impl" attribute is required, and can reference any
- TypeEngine class. Alternatively, the load_dialect_impl() method
- can be used to provide different type classes based on the dialect
- given; in this case, the "impl" variable can reference
- ``TypeEngine`` as a placeholder.
-
- Types that receive a Python type that isn't similar to the ultimate type
- used may want to define the :meth:`TypeDecorator.coerce_compared_value`
- method. This is used to give the expression system a hint when coercing
- Python objects into bind parameters within expressions. Consider this
- expression::
-
- mytable.c.somecol + datetime.date(2009, 5, 15)
-
- Above, if "somecol" is an ``Integer`` variant, it makes sense that
- we're doing date arithmetic, where above is usually interpreted
- by databases as adding a number of days to the given date.
- The expression system does the right thing by not attempting to
- coerce the "date()" value into an integer-oriented bind parameter.
-
- However, in the case of ``TypeDecorator``, we are usually changing an
- incoming Python type to something new - ``TypeDecorator`` by default will
- "coerce" the non-typed side to be the same type as itself. Such as below,
- we define an "epoch" type that stores a date value as an integer::
-
- class MyEpochType(types.TypeDecorator):
- impl = types.Integer
-
- epoch = datetime.date(1970, 1, 1)
-
- def process_bind_param(self, value, dialect):
- return (value - self.epoch).days
-
- def process_result_value(self, value, dialect):
- return self.epoch + timedelta(days=value)
-
- Our expression of ``somecol + date`` with the above type will coerce the
- "date" on the right side to also be treated as ``MyEpochType``.
-
- This behavior can be overridden via the
- :meth:`~TypeDecorator.coerce_compared_value` method, which returns a type
- that should be used for the value of the expression. Below we set it such
- that an integer value will be treated as an ``Integer``, and any other
- value is assumed to be a date and will be treated as a ``MyEpochType``::
-
- def coerce_compared_value(self, op, value):
- if isinstance(value, int):
- return Integer()
- else:
- return self
-
- """
-
- __visit_name__ = "type_decorator"
-
- def __init__(self, *args, **kwargs):
- """Construct a :class:`.TypeDecorator`.
-
- Arguments sent here are passed to the constructor
- of the class assigned to the ``impl`` class level attribute,
- assuming the ``impl`` is a callable, and the resulting
- object is assigned to the ``self.impl`` instance attribute
- (thus overriding the class attribute of the same name).
-
- If the class level ``impl`` is not a callable (the unusual case),
- it will be assigned to the same instance attribute 'as-is',
- ignoring those arguments passed to the constructor.
-
- Subclasses can override this to customize the generation
- of ``self.impl`` entirely.
-
- """
-
- if not hasattr(self.__class__, 'impl'):
- raise AssertionError("TypeDecorator implementations "
- "require a class-level variable "
- "'impl' which refers to the class of "
- "type being decorated")
- self.impl = to_instance(self.__class__.impl, *args, **kwargs)
-
- coerce_to_is_types = (util.NoneType, )
- """Specify those Python types which should be coerced at the expression
- level to "IS <constant>" when compared using ``==`` (and same for
- ``IS NOT`` in conjunction with ``!=``.
-
- For most SQLAlchemy types, this includes ``NoneType``, as well as ``bool``.
-
- :class:`.TypeDecorator` modifies this list to only include ``NoneType``,
- as typedecorator implementations that deal with boolean types are common.
-
- Custom :class:`.TypeDecorator` classes can override this attribute to
- return an empty tuple, in which case no values will be coerced to
- constants.
-
- ..versionadded:: 0.8.2
- Added :attr:`.TypeDecorator.coerce_to_is_types` to allow for easier
- control of ``__eq__()`` ``__ne__()`` operations.
-
- """
-
- class Comparator(TypeEngine.Comparator):
- def operate(self, op, *other, **kwargs):
- kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
- return super(TypeDecorator.Comparator, self).operate(
- op, *other, **kwargs)
-
- def reverse_operate(self, op, other, **kwargs):
- kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
- return super(TypeDecorator.Comparator, self).reverse_operate(
- op, other, **kwargs)
-
- @property
- def comparator_factory(self):
- return type("TDComparator",
- (TypeDecorator.Comparator, self.impl.comparator_factory),
- {})
-
- def _gen_dialect_impl(self, dialect):
- """
- #todo
- """
- adapted = dialect.type_descriptor(self)
- if adapted is not self:
- return adapted
-
- # otherwise adapt the impl type, link
- # to a copy of this TypeDecorator and return
- # that.
- typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect)
- tt = self.copy()
- if not isinstance(tt, self.__class__):
- raise AssertionError('Type object %s does not properly '
- 'implement the copy() method, it must '
- 'return an object of type %s' % (self,
- self.__class__))
- tt.impl = typedesc
- return tt
-
- @property
- def _type_affinity(self):
- """
- #todo
- """
- return self.impl._type_affinity
-
- def type_engine(self, dialect):
- """Return a dialect-specific :class:`.TypeEngine` instance
- for this :class:`.TypeDecorator`.
-
- In most cases this returns a dialect-adapted form of
- the :class:`.TypeEngine` type represented by ``self.impl``.
- Makes usage of :meth:`dialect_impl` but also traverses
- into wrapped :class:`.TypeDecorator` instances.
- Behavior can be customized here by overriding
- :meth:`load_dialect_impl`.
-
- """
- adapted = dialect.type_descriptor(self)
- if type(adapted) is not type(self):
- return adapted
- elif isinstance(self.impl, TypeDecorator):
- return self.impl.type_engine(dialect)
- else:
- return self.load_dialect_impl(dialect)
-
- def load_dialect_impl(self, dialect):
- """Return a :class:`.TypeEngine` object corresponding to a dialect.
-
- This is an end-user override hook that can be used to provide
- differing types depending on the given dialect. It is used
- by the :class:`.TypeDecorator` implementation of :meth:`type_engine`
- to help determine what type should ultimately be returned
- for a given :class:`.TypeDecorator`.
-
- By default returns ``self.impl``.
-
- """
- return self.impl
-
- def __getattr__(self, key):
- """Proxy all other undefined accessors to the underlying
- implementation."""
- return getattr(self.impl, key)
-
- def process_bind_param(self, value, dialect):
- """Receive a bound parameter value to be converted.
-
- Subclasses override this method to return the
- value that should be passed along to the underlying
- :class:`.TypeEngine` object, and from there to the
- DBAPI ``execute()`` method.
-
- The operation could be anything desired to perform custom
- behavior, such as transforming or serializing data.
- This could also be used as a hook for validating logic.
-
- This operation should be designed with the reverse operation
- in mind, which would be the process_result_value method of
- this class.
-
- :param value: Data to operate upon, of any type expected by
- this method in the subclass. Can be ``None``.
- :param dialect: the :class:`.Dialect` in use.
-
- """
-
- raise NotImplementedError()
-
- def process_result_value(self, value, dialect):
- """Receive a result-row column value to be converted.
-
- Subclasses should implement this method to operate on data
- fetched from the database.
-
- Subclasses override this method to return the
- value that should be passed back to the application,
- given a value that is already processed by
- the underlying :class:`.TypeEngine` object, originally
- from the DBAPI cursor method ``fetchone()`` or similar.
-
- The operation could be anything desired to perform custom
- behavior, such as transforming or serializing data.
- This could also be used as a hook for validating logic.
-
- :param value: Data to operate upon, of any type expected by
- this method in the subclass. Can be ``None``.
- :param dialect: the :class:`.Dialect` in use.
-
- This operation should be designed to be reversible by
- the "process_bind_param" method of this class.
-
- """
-
- raise NotImplementedError()
-
- @util.memoized_property
- def _has_bind_processor(self):
- """memoized boolean, check if process_bind_param is implemented.
-
- Allows the base process_bind_param to raise
- NotImplementedError without needing to test an expensive
- exception throw.
-
- """
-
- return self.__class__.process_bind_param.__code__ \
- is not TypeDecorator.process_bind_param.__code__
-
- def bind_processor(self, dialect):
- """Provide a bound value processing function for the
- given :class:`.Dialect`.
-
- This is the method that fulfills the :class:`.TypeEngine`
- contract for bound value conversion. :class:`.TypeDecorator`
- will wrap a user-defined implementation of
- :meth:`process_bind_param` here.
-
- User-defined code can override this method directly,
- though its likely best to use :meth:`process_bind_param` so that
- the processing provided by ``self.impl`` is maintained.
-
- :param dialect: Dialect instance in use.
-
- This method is the reverse counterpart to the
- :meth:`result_processor` method of this class.
-
- """
- if self._has_bind_processor:
- process_param = self.process_bind_param
- impl_processor = self.impl.bind_processor(dialect)
- if impl_processor:
- def process(value):
- return impl_processor(process_param(value, dialect))
-
- else:
- def process(value):
- return process_param(value, dialect)
-
- return process
- else:
- return self.impl.bind_processor(dialect)
-
- @util.memoized_property
- def _has_result_processor(self):
- """memoized boolean, check if process_result_value is implemented.
-
- Allows the base process_result_value to raise
- NotImplementedError without needing to test an expensive
- exception throw.
-
- """
- return self.__class__.process_result_value.__code__ \
- is not TypeDecorator.process_result_value.__code__
-
- def result_processor(self, dialect, coltype):
- """Provide a result value processing function for the given
- :class:`.Dialect`.
-
- This is the method that fulfills the :class:`.TypeEngine`
- contract for result value conversion. :class:`.TypeDecorator`
- will wrap a user-defined implementation of
- :meth:`process_result_value` here.
-
- User-defined code can override this method directly,
- though its likely best to use :meth:`process_result_value` so that
- the processing provided by ``self.impl`` is maintained.
-
- :param dialect: Dialect instance in use.
- :param coltype: An SQLAlchemy data type
-
- This method is the reverse counterpart to the
- :meth:`bind_processor` method of this class.
-
- """
- if self._has_result_processor:
- process_value = self.process_result_value
- impl_processor = self.impl.result_processor(dialect,
- coltype)
- if impl_processor:
- def process(value):
- return process_value(impl_processor(value), dialect)
-
- else:
- def process(value):
- return process_value(value, dialect)
-
- return process
- else:
- return self.impl.result_processor(dialect, coltype)
-
- def coerce_compared_value(self, op, value):
- """Suggest a type for a 'coerced' Python value in an expression.
-
- By default, returns self. This method is called by
- the expression system when an object using this type is
- on the left or right side of an expression against a plain Python
- object which does not yet have a SQLAlchemy type assigned::
-
- expr = table.c.somecolumn + 35
-
- Where above, if ``somecolumn`` uses this type, this method will
- be called with the value ``operator.add``
- and ``35``. The return value is whatever SQLAlchemy type should
- be used for ``35`` for this particular operation.
-
- """
- return self
-
- def copy(self):
- """Produce a copy of this :class:`.TypeDecorator` instance.
-
- This is a shallow copy and is provided to fulfill part of
- the :class:`.TypeEngine` contract. It usually does not
- need to be overridden unless the user-defined :class:`.TypeDecorator`
- has local state that should be deep-copied.
-
- """
-
- instance = self.__class__.__new__(self.__class__)
- instance.__dict__.update(self.__dict__)
- return instance
-
- def get_dbapi_type(self, dbapi):
- """Return the DBAPI type object represented by this
- :class:`.TypeDecorator`.
-
- By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the
- underlying "impl".
- """
- return self.impl.get_dbapi_type(dbapi)
-
- def compare_values(self, x, y):
- """Given two values, compare them for equality.
-
- By default this calls upon :meth:`.TypeEngine.compare_values`
- of the underlying "impl", which in turn usually
- uses the Python equals operator ``==``.
-
- This function is used by the ORM to compare
- an original-loaded value with an intercepted
- "changed" value, to determine if a net change
- has occurred.
-
- """
- return self.impl.compare_values(x, y)
-
- def __repr__(self):
- return util.generic_repr(self, to_inspect=self.impl)
-
-
-class Variant(TypeDecorator):
- """A wrapping type that selects among a variety of
- implementations based on dialect in use.
-
- The :class:`.Variant` type is typically constructed
- using the :meth:`.TypeEngine.with_variant` method.
-
- .. versionadded:: 0.7.2
-
- .. seealso:: :meth:`.TypeEngine.with_variant` for an example of use.
-
- """
-
- def __init__(self, base, mapping):
- """Construct a new :class:`.Variant`.
-
- :param base: the base 'fallback' type
- :param mapping: dictionary of string dialect names to
- :class:`.TypeEngine` instances.
-
- """
- self.impl = base
- self.mapping = mapping
-
- def load_dialect_impl(self, dialect):
- if dialect.name in self.mapping:
- return self.mapping[dialect.name]
- else:
- return self.impl
-
- def with_variant(self, type_, dialect_name):
- """Return a new :class:`.Variant` which adds the given
- type + dialect name to the mapping, in addition to the
- mapping present in this :class:`.Variant`.
-
- :param type_: a :class:`.TypeEngine` that will be selected
- as a variant from the originating type, when a dialect
- of the given name is in use.
- :param dialect_name: base name of the dialect which uses
- this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
-
- """
-
- if dialect_name in self.mapping:
- raise exc.ArgumentError(
- "Dialect '%s' is already present in "
- "the mapping for this Variant" % dialect_name)
- mapping = self.mapping.copy()
- mapping[dialect_name] = type_
- return Variant(self.impl, mapping)
-
-
-def to_instance(typeobj, *arg, **kw):
- if typeobj is None:
- return NULLTYPE
-
- if util.callable(typeobj):
- return typeobj(*arg, **kw)
- else:
- return typeobj
-
-
-def adapt_type(typeobj, colspecs):
- if isinstance(typeobj, type):
- typeobj = typeobj()
- for t in typeobj.__class__.__mro__[0:-1]:
- try:
- impltype = colspecs[t]
- break
- except KeyError:
- pass
- else:
- # couldnt adapt - so just return the type itself
- # (it may be a user-defined type)
- return typeobj
- # if we adapted the given generic type to a database-specific type,
- # but it turns out the originally given "generic" type
- # is actually a subclass of our resulting type, then we were already
- # given a more specific type than that required; so use that.
- if (issubclass(typeobj.__class__, impltype)):
- return typeobj
- return typeobj.adapt(impltype)
-
-
-class NullType(TypeEngine):
- """An unknown type.
-
- :class:`.NullType` is used as a default type for those cases where
- a type cannot be determined, including:
-
- * During table reflection, when the type of a column is not recognized
- by the :class:`.Dialect`
- * When constructing SQL expressions using plain Python objects of
- unknown types (e.g. ``somecolumn == my_special_object``)
- * When a new :class:`.Column` is created, and the given type is passed
- as ``None`` or is not passed at all.
-
- The :class:`.NullType` can be used within SQL expression invocation
- without issue, it just has no behavior either at the expression construction
- level or at the bind-parameter/result processing level. :class:`.NullType`
- will result in a :class:`.CompileException` if the compiler is asked to render
- the type itself, such as if it is used in a :func:`.cast` operation
- or within a schema creation operation such as that invoked by
- :meth:`.MetaData.create_all` or the :class:`.CreateTable` construct.
-
- """
- __visit_name__ = 'null'
-
- class Comparator(TypeEngine.Comparator):
- def _adapt_expression(self, op, other_comparator):
- if isinstance(other_comparator, NullType.Comparator) or \
- not operators.is_commutative(op):
- return op, self.expr.type
- else:
- return other_comparator._adapt_expression(op, self)
- comparator_factory = Comparator
-
-NullTypeEngine = NullType
-
-
-class Concatenable(object):
- """A mixin that marks a type as supporting 'concatenation',
- typically strings."""
-
- class Comparator(TypeEngine.Comparator):
- def _adapt_expression(self, op, other_comparator):
- if op is operators.add and isinstance(other_comparator,
- (Concatenable.Comparator, NullType.Comparator)):
- return operators.concat_op, self.expr.type
- else:
- return op, self.expr.type
-
- comparator_factory = Comparator
-
-
-class _DateAffinity(object):
- """Mixin date/time specific expression adaptations.
-
- Rules are implemented within Date,Time,Interval,DateTime, Numeric,
- Integer. Based on http://www.postgresql.org/docs/current/static
- /functions-datetime.html.
-
- """
-
- @property
- def _expression_adaptations(self):
- raise NotImplementedError()
-
- class Comparator(TypeEngine.Comparator):
- _blank_dict = util.immutabledict()
-
- def _adapt_expression(self, op, other_comparator):
- othertype = other_comparator.type._type_affinity
- return op, \
- self.type._expression_adaptations.get(op, self._blank_dict).\
- get(othertype, NULLTYPE)
- comparator_factory = Comparator
-
-
-class String(Concatenable, TypeEngine):
- """The base for all string and character types.
-
- In SQL, corresponds to VARCHAR. Can also take Python unicode objects
- and encode to the database's encoding in bind params (and the reverse for
- result sets.)
-
- The `length` field is usually required when the `String` type is
- used within a CREATE TABLE statement, as VARCHAR requires a length
- on most databases.
-
- """
-
- __visit_name__ = 'string'
-
- def __init__(self, length=None, collation=None,
- convert_unicode=False,
- unicode_error=None,
- _warn_on_bytestring=False
- ):
- """
- Create a string-holding type.
-
- :param length: optional, a length for the column for use in
- DDL and CAST expressions. May be safely omitted if no ``CREATE
- TABLE`` will be issued. Certain databases may require a
- ``length`` for use in DDL, and will raise an exception when
- the ``CREATE TABLE`` DDL is issued if a ``VARCHAR``
- with no length is included. Whether the value is
- interpreted as bytes or characters is database specific.
-
- :param collation: Optional, a column-level collation for
- use in DDL and CAST expressions. Renders using the
- COLLATE keyword supported by SQLite, MySQL, and Postgresql.
- E.g.::
-
- >>> from sqlalchemy import cast, select, String
- >>> print select([cast('some string', String(collation='utf8'))])
- SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1
-
- .. versionadded:: 0.8 Added support for COLLATE to all
- string types.
-
- :param convert_unicode: When set to ``True``, the
- :class:`.String` type will assume that
- input is to be passed as Python ``unicode`` objects,
- and results returned as Python ``unicode`` objects.
- If the DBAPI in use does not support Python unicode
- (which is fewer and fewer these days), SQLAlchemy
- will encode/decode the value, using the
- value of the ``encoding`` parameter passed to
- :func:`.create_engine` as the encoding.
-
- When using a DBAPI that natively supports Python
- unicode objects, this flag generally does not
- need to be set. For columns that are explicitly
- intended to store non-ASCII data, the :class:`.Unicode`
- or :class:`UnicodeText`
- types should be used regardless, which feature
- the same behavior of ``convert_unicode`` but
- also indicate an underlying column type that
- directly supports unicode, such as ``NVARCHAR``.
-
- For the extremely rare case that Python ``unicode``
- is to be encoded/decoded by SQLAlchemy on a backend
- that does natively support Python ``unicode``,
- the value ``force`` can be passed here which will
- cause SQLAlchemy's encode/decode services to be
- used unconditionally.
-
- :param unicode_error: Optional, a method to use to handle Unicode
- conversion errors. Behaves like the ``errors`` keyword argument to
- the standard library's ``string.decode()`` functions. This flag
- requires that ``convert_unicode`` is set to ``force`` - otherwise,
- SQLAlchemy is not guaranteed to handle the task of unicode
- conversion. Note that this flag adds significant performance
- overhead to row-fetching operations for backends that already
- return unicode objects natively (which most DBAPIs do). This
- flag should only be used as a last resort for reading
- strings from a column with varied or corrupted encodings.
-
- """
- if unicode_error is not None and convert_unicode != 'force':
- raise exc.ArgumentError("convert_unicode must be 'force' "
- "when unicode_error is set.")
-
- self.length = length
- self.collation = collation
- self.convert_unicode = convert_unicode
- self.unicode_error = unicode_error
- self._warn_on_bytestring = _warn_on_bytestring
-
- def bind_processor(self, dialect):
- if self.convert_unicode or dialect.convert_unicode:
- if dialect.supports_unicode_binds and \
- self.convert_unicode != 'force':
- if self._warn_on_bytestring:
- def process(value):
- if isinstance(value, util.binary_type):
- util.warn("Unicode type received non-unicode bind "
- "param value.")
- return value
- return process
- else:
- return None
- else:
- encoder = codecs.getencoder(dialect.encoding)
- warn_on_bytestring = self._warn_on_bytestring
-
- def process(value):
- if isinstance(value, util.text_type):
- return encoder(value, self.unicode_error)[0]
- elif warn_on_bytestring and value is not None:
- util.warn("Unicode type received non-unicode bind "
- "param value")
- return value
- return process
- else:
- return None
-
- def result_processor(self, dialect, coltype):
- wants_unicode = self.convert_unicode or dialect.convert_unicode
- needs_convert = wants_unicode and \
- (dialect.returns_unicode_strings is not True or
- self.convert_unicode == 'force')
-
- if needs_convert:
- to_unicode = processors.to_unicode_processor_factory(
- dialect.encoding, self.unicode_error)
-
- if dialect.returns_unicode_strings:
- # we wouldn't be here unless convert_unicode='force'
- # was specified, or the driver has erratic unicode-returning
- # habits. since we will be getting back unicode
- # in most cases, we check for it (decode will fail).
- def process(value):
- if isinstance(value, util.text_type):
- return value
- else:
- return to_unicode(value)
- return process
- else:
- # here, we assume that the object is not unicode,
- # avoiding expensive isinstance() check.
- return to_unicode
- else:
- return None
-
- @property
- def python_type(self):
- if self.convert_unicode:
- return util.text_type
- else:
- return str
-
- def get_dbapi_type(self, dbapi):
- return dbapi.STRING
-
-
-class Text(String):
- """A variably sized string type.
-
- In SQL, usually corresponds to CLOB or TEXT. Can also take Python
- unicode objects and encode to the database's encoding in bind
- params (and the reverse for result sets.) In general, TEXT objects
- do not have a length; while some databases will accept a length
- argument here, it will be rejected by others.
-
- """
- __visit_name__ = 'text'
-
-
-class Unicode(String):
- """A variable length Unicode string type.
-
- The :class:`.Unicode` type is a :class:`.String` subclass
- that assumes input and output as Python ``unicode`` data,
- and in that regard is equivalent to the usage of the
- ``convert_unicode`` flag with the :class:`.String` type.
- However, unlike plain :class:`.String`, it also implies an
- underlying column type that is explicitly supporting of non-ASCII
- data, such as ``NVARCHAR`` on Oracle and SQL Server.
- This can impact the output of ``CREATE TABLE`` statements
- and ``CAST`` functions at the dialect level, and can
- also affect the handling of bound parameters in some
- specific DBAPI scenarios.
-
- The encoding used by the :class:`.Unicode` type is usually
- determined by the DBAPI itself; most modern DBAPIs
- feature support for Python ``unicode`` objects as bound
- values and result set values, and the encoding should
- be configured as detailed in the notes for the target
- DBAPI in the :ref:`dialect_toplevel` section.
-
- For those DBAPIs which do not support, or are not configured
- to accommodate Python ``unicode`` objects
- directly, SQLAlchemy does the encoding and decoding
- outside of the DBAPI. The encoding in this scenario
- is determined by the ``encoding`` flag passed to
- :func:`.create_engine`.
-
- When using the :class:`.Unicode` type, it is only appropriate
- to pass Python ``unicode`` objects, and not plain ``str``.
- If a plain ``str`` is passed under Python 2, a warning
- is emitted. If you notice your application emitting these warnings but
- you're not sure of the source of them, the Python
- ``warnings`` filter, documented at
- http://docs.python.org/library/warnings.html,
- can be used to turn these warnings into exceptions
- which will illustrate a stack trace::
-
- import warnings
- warnings.simplefilter('error')
-
- For an application that wishes to pass plain bytestrings
- and Python ``unicode`` objects to the ``Unicode`` type
- equally, the bytestrings must first be decoded into
- unicode. The recipe at :ref:`coerce_to_unicode` illustrates
- how this is done.
-
- See also:
-
- :class:`.UnicodeText` - unlengthed textual counterpart
- to :class:`.Unicode`.
-
- """
-
- __visit_name__ = 'unicode'
-
- def __init__(self, length=None, **kwargs):
- """
- Create a :class:`.Unicode` object.
-
- Parameters are the same as that of :class:`.String`,
- with the exception that ``convert_unicode``
- defaults to ``True``.
-
- """
- kwargs.setdefault('convert_unicode', True)
- kwargs.setdefault('_warn_on_bytestring', True)
- super(Unicode, self).__init__(length=length, **kwargs)
-
-
-class UnicodeText(Text):
- """An unbounded-length Unicode string type.
-
- See :class:`.Unicode` for details on the unicode
- behavior of this object.
-
- Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a
- unicode-capable type being used on the backend, such as
- ``NCLOB``, ``NTEXT``.
-
- """
-
- __visit_name__ = 'unicode_text'
-
- def __init__(self, length=None, **kwargs):
- """
- Create a Unicode-converting Text type.
-
- Parameters are the same as that of :class:`.Text`,
- with the exception that ``convert_unicode``
- defaults to ``True``.
-
- """
- kwargs.setdefault('convert_unicode', True)
- kwargs.setdefault('_warn_on_bytestring', True)
- super(UnicodeText, self).__init__(length=length, **kwargs)
-
-
-class Integer(_DateAffinity, TypeEngine):
- """A type for ``int`` integers."""
-
- __visit_name__ = 'integer'
-
- def get_dbapi_type(self, dbapi):
- return dbapi.NUMBER
-
- @property
- def python_type(self):
- return int
-
- @util.memoized_property
- def _expression_adaptations(self):
- # TODO: need a dictionary object that will
- # handle operators generically here, this is incomplete
- return {
- operators.add: {
- Date: Date,
- Integer: self.__class__,
- Numeric: Numeric,
- },
- operators.mul: {
- Interval: Interval,
- Integer: self.__class__,
- Numeric: Numeric,
- },
- operators.div: {
- Integer: self.__class__,
- Numeric: Numeric,
- },
- operators.truediv: {
- Integer: self.__class__,
- Numeric: Numeric,
- },
- operators.sub: {
- Integer: self.__class__,
- Numeric: Numeric,
- },
- }
-
-
-class SmallInteger(Integer):
- """A type for smaller ``int`` integers.
-
- Typically generates a ``SMALLINT`` in DDL, and otherwise acts like
- a normal :class:`.Integer` on the Python side.
-
- """
-
- __visit_name__ = 'small_integer'
-
-
-class BigInteger(Integer):
- """A type for bigger ``int`` integers.
-
- Typically generates a ``BIGINT`` in DDL, and otherwise acts like
- a normal :class:`.Integer` on the Python side.
-
- """
-
- __visit_name__ = 'big_integer'
-
-
-class Numeric(_DateAffinity, TypeEngine):
- """A type for fixed precision numbers.
-
- Typically generates DECIMAL or NUMERIC. Returns
- ``decimal.Decimal`` objects by default, applying
- conversion as needed.
-
- .. note::
-
- The `cdecimal <http://pypi.python.org/pypi/cdecimal/>`_ library
- is a high performing alternative to Python's built-in
- ``decimal.Decimal`` type, which performs very poorly in high volume
- situations. SQLAlchemy 0.7 is tested against ``cdecimal`` and supports
- it fully. The type is not necessarily supported by DBAPI
- implementations however, most of which contain an import for plain
- ``decimal`` in their source code, even though some such as psycopg2
- provide hooks for alternate adapters. SQLAlchemy imports ``decimal``
- globally as well. The most straightforward and
- foolproof way to use "cdecimal" given current DBAPI and Python support
- is to patch it directly into sys.modules before anything else is
- imported::
-
- import sys
- import cdecimal
- sys.modules["decimal"] = cdecimal
-
- While the global patch is a little ugly, it's particularly
- important to use just one decimal library at a time since
- Python Decimal and cdecimal Decimal objects
- are not currently compatible *with each other*::
-
- >>> import cdecimal
- >>> import decimal
- >>> decimal.Decimal("10") == cdecimal.Decimal("10")
- False
-
- SQLAlchemy will provide more natural support of
- cdecimal if and when it becomes a standard part of Python
- installations and is supported by all DBAPIs.
-
- """
-
- __visit_name__ = 'numeric'
-
- def __init__(self, precision=None, scale=None, asdecimal=True):
- """
- Construct a Numeric.
-
- :param precision: the numeric precision for use in DDL ``CREATE
- TABLE``.
-
- :param scale: the numeric scale for use in DDL ``CREATE TABLE``.
-
- :param asdecimal: default True. Return whether or not
- values should be sent as Python Decimal objects, or
- as floats. Different DBAPIs send one or the other based on
- datatypes - the Numeric type will ensure that return values
- are one or the other across DBAPIs consistently.
-
- When using the ``Numeric`` type, care should be taken to ensure
- that the asdecimal setting is apppropriate for the DBAPI in use -
- when Numeric applies a conversion from Decimal->float or float->
- Decimal, this conversion incurs an additional performance overhead
- for all result columns received.
-
- DBAPIs that return Decimal natively (e.g. psycopg2) will have
- better accuracy and higher performance with a setting of ``True``,
- as the native translation to Decimal reduces the amount of floating-
- point issues at play, and the Numeric type itself doesn't need
- to apply any further conversions. However, another DBAPI which
- returns floats natively *will* incur an additional conversion
- overhead, and is still subject to floating point data loss - in
- which case ``asdecimal=False`` will at least remove the extra
- conversion overhead.
-
- """
- self.precision = precision
- self.scale = scale
- self.asdecimal = asdecimal
-
- def get_dbapi_type(self, dbapi):
- return dbapi.NUMBER
-
- @property
- def python_type(self):
- if self.asdecimal:
- return decimal.Decimal
- else:
- return float
-
- def bind_processor(self, dialect):
- if dialect.supports_native_decimal:
- return None
- else:
- return processors.to_float
-
- def result_processor(self, dialect, coltype):
- if self.asdecimal:
- if dialect.supports_native_decimal:
- # we're a "numeric", DBAPI will give us Decimal directly
- return None
- else:
- util.warn('Dialect %s+%s does *not* support Decimal '
- 'objects natively, and SQLAlchemy must '
- 'convert from floating point - rounding '
- 'errors and other issues may occur. Please '
- 'consider storing Decimal numbers as strings '
- 'or integers on this platform for lossless '
- 'storage.' % (dialect.name, dialect.driver))
-
- # we're a "numeric", DBAPI returns floats, convert.
- if self.scale is not None:
- return processors.to_decimal_processor_factory(
- decimal.Decimal, self.scale)
- else:
- return processors.to_decimal_processor_factory(
- decimal.Decimal)
- else:
- if dialect.supports_native_decimal:
- return processors.to_float
- else:
- return None
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.mul: {
- Interval: Interval,
- Numeric: self.__class__,
- Integer: self.__class__,
- },
- operators.div: {
- Numeric: self.__class__,
- Integer: self.__class__,
- },
- operators.truediv: {
- Numeric: self.__class__,
- Integer: self.__class__,
- },
- operators.add: {
- Numeric: self.__class__,
- Integer: self.__class__,
- },
- operators.sub: {
- Numeric: self.__class__,
- Integer: self.__class__,
- }
- }
-
-
-class Float(Numeric):
- """A type for ``float`` numbers.
-
- Returns Python ``float`` objects by default, applying
- conversion as needed.
-
- """
-
- __visit_name__ = 'float'
-
- scale = None
-
- def __init__(self, precision=None, asdecimal=False, **kwargs):
- """
- Construct a Float.
-
- :param precision: the numeric precision for use in DDL ``CREATE
- TABLE``.
-
- :param asdecimal: the same flag as that of :class:`.Numeric`, but
- defaults to ``False``. Note that setting this flag to ``True``
- results in floating point conversion.
-
- :param \**kwargs: deprecated. Additional arguments here are ignored
- by the default :class:`.Float` type. For database specific
- floats that support additional arguments, see that dialect's
- documentation for details, such as
- :class:`sqlalchemy.dialects.mysql.FLOAT`.
-
- """
- self.precision = precision
- self.asdecimal = asdecimal
- if kwargs:
- util.warn_deprecated("Additional keyword arguments "
- "passed to Float ignored.")
-
- def result_processor(self, dialect, coltype):
- if self.asdecimal:
- return processors.to_decimal_processor_factory(decimal.Decimal)
- else:
- return None
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.mul: {
- Interval: Interval,
- Numeric: self.__class__,
- },
- operators.div: {
- Numeric: self.__class__,
- },
- operators.truediv: {
- Numeric: self.__class__,
- },
- operators.add: {
- Numeric: self.__class__,
- },
- operators.sub: {
- Numeric: self.__class__,
- }
- }
-
-
-class DateTime(_DateAffinity, TypeEngine):
- """A type for ``datetime.datetime()`` objects.
-
- Date and time types return objects from the Python ``datetime``
- module. Most DBAPIs have built in support for the datetime
- module, with the noted exception of SQLite. In the case of
- SQLite, date and time types are stored as strings which are then
- converted back to datetime objects when rows are returned.
-
- """
-
- __visit_name__ = 'datetime'
-
- def __init__(self, timezone=False):
- """Construct a new :class:`.DateTime`.
-
- :param timezone: boolean. If True, and supported by the
- backend, will produce 'TIMESTAMP WITH TIMEZONE'. For backends
- that don't support timezone aware timestamps, has no
- effect.
-
- """
- self.timezone = timezone
-
- def get_dbapi_type(self, dbapi):
- return dbapi.DATETIME
-
- @property
- def python_type(self):
- return dt.datetime
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add: {
- Interval: self.__class__,
- },
- operators.sub: {
- Interval: self.__class__,
- DateTime: Interval,
- },
- }
-
-
-class Date(_DateAffinity, TypeEngine):
- """A type for ``datetime.date()`` objects."""
-
- __visit_name__ = 'date'
-
- def get_dbapi_type(self, dbapi):
- return dbapi.DATETIME
-
- @property
- def python_type(self):
- return dt.date
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add: {
- Integer: self.__class__,
- Interval: DateTime,
- Time: DateTime,
- },
- operators.sub: {
- # date - integer = date
- Integer: self.__class__,
-
- # date - date = integer.
- Date: Integer,
-
- Interval: DateTime,
-
- # date - datetime = interval,
- # this one is not in the PG docs
- # but works
- DateTime: Interval,
- },
- }
-
-
-class Time(_DateAffinity, TypeEngine):
- """A type for ``datetime.time()`` objects."""
-
- __visit_name__ = 'time'
-
- def __init__(self, timezone=False):
- self.timezone = timezone
-
- def get_dbapi_type(self, dbapi):
- return dbapi.DATETIME
-
- @property
- def python_type(self):
- return dt.time
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add: {
- Date: DateTime,
- Interval: self.__class__
- },
- operators.sub: {
- Time: Interval,
- Interval: self.__class__,
- },
- }
-
-
-class _Binary(TypeEngine):
- """Define base behavior for binary types."""
-
- def __init__(self, length=None):
- self.length = length
-
- @property
- def python_type(self):
- return util.binary_type
-
- # Python 3 - sqlite3 doesn't need the `Binary` conversion
- # here, though pg8000 does to indicate "bytea"
- def bind_processor(self, dialect):
- DBAPIBinary = dialect.dbapi.Binary
-
- def process(value):
- x = self
- if value is not None:
- return DBAPIBinary(value)
- else:
- return None
- return process
-
- # Python 3 has native bytes() type
- # both sqlite3 and pg8000 seem to return it,
- # psycopg2 as of 2.5 returns 'memoryview'
- if util.py2k:
- def result_processor(self, dialect, coltype):
- if util.jython:
- def process(value):
- if value is not None:
- if isinstance(value, array.array):
- return value.tostring()
- return str(value)
- else:
- return None
- else:
- process = processors.to_str
- return process
- else:
- def result_processor(self, dialect, coltype):
- def process(value):
- if value is not None:
- value = bytes(value)
- return value
- return process
-
- def coerce_compared_value(self, op, value):
- """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
-
- if isinstance(value, util.string_types):
- return self
- else:
- return super(_Binary, self).coerce_compared_value(op, value)
-
- def get_dbapi_type(self, dbapi):
- return dbapi.BINARY
-
-
-class LargeBinary(_Binary):
- """A type for large binary byte data.
-
- The Binary type generates BLOB or BYTEA when tables are created,
- and also converts incoming values using the ``Binary`` callable
- provided by each DB-API.
-
- """
-
- __visit_name__ = 'large_binary'
-
- def __init__(self, length=None):
- """
- Construct a LargeBinary type.
-
- :param length: optional, a length for the column for use in
- DDL statements, for those BLOB types that accept a length
- (i.e. MySQL). It does *not* produce a small BINARY/VARBINARY
- type - use the BINARY/VARBINARY types specifically for those.
- May be safely omitted if no ``CREATE
- TABLE`` will be issued. Certain databases may require a
- *length* for use in DDL, and will raise an exception when
- the ``CREATE TABLE`` DDL is issued.
-
- """
- _Binary.__init__(self, length=length)
-
-
-class Binary(LargeBinary):
- """Deprecated. Renamed to LargeBinary."""
-
- def __init__(self, *arg, **kw):
- util.warn_deprecated('The Binary type has been renamed to '
- 'LargeBinary.')
- LargeBinary.__init__(self, *arg, **kw)
-
-
-class SchemaType(events.SchemaEventTarget):
- """Mark a type as possibly requiring schema-level DDL for usage.
-
- Supports types that must be explicitly created/dropped (i.e. PG ENUM type)
- as well as types that are complimented by table or schema level
- constraints, triggers, and other rules.
-
- :class:`.SchemaType` classes can also be targets for the
- :meth:`.DDLEvents.before_parent_attach` and
- :meth:`.DDLEvents.after_parent_attach` events, where the events fire off
- surrounding the association of the type object with a parent
- :class:`.Column`.
-
- .. seealso::
-
- :class:`.Enum`
-
- :class:`.Boolean`
-
-
- """
-
- def __init__(self, **kw):
- self.name = kw.pop('name', None)
- self.quote = kw.pop('quote', None)
- self.schema = kw.pop('schema', None)
- self.metadata = kw.pop('metadata', None)
- self.inherit_schema = kw.pop('inherit_schema', False)
- if self.metadata:
- event.listen(
- self.metadata,
- "before_create",
- util.portable_instancemethod(self._on_metadata_create)
- )
- event.listen(
- self.metadata,
- "after_drop",
- util.portable_instancemethod(self._on_metadata_drop)
- )
-
- def _set_parent(self, column):
- column._on_table_attach(util.portable_instancemethod(self._set_table))
-
- def _set_table(self, column, table):
- if self.inherit_schema:
- self.schema = table.schema
-
- event.listen(
- table,
- "before_create",
- util.portable_instancemethod(
- self._on_table_create)
- )
- event.listen(
- table,
- "after_drop",
- util.portable_instancemethod(self._on_table_drop)
- )
- if self.metadata is None:
- # TODO: what's the difference between self.metadata
- # and table.metadata here ?
- event.listen(
- table.metadata,
- "before_create",
- util.portable_instancemethod(self._on_metadata_create)
- )
- event.listen(
- table.metadata,
- "after_drop",
- util.portable_instancemethod(self._on_metadata_drop)
- )
-
- def copy(self, **kw):
- return self.adapt(self.__class__)
-
- def adapt(self, impltype, **kw):
- schema = kw.pop('schema', self.schema)
- metadata = kw.pop('metadata', self.metadata)
- return impltype(name=self.name,
- quote=self.quote,
- schema=schema,
- metadata=metadata,
- inherit_schema=self.inherit_schema,
- **kw
- )
-
- @property
- def bind(self):
- return self.metadata and self.metadata.bind or None
-
- def create(self, bind=None, checkfirst=False):
- """Issue CREATE ddl for this type, if applicable."""
-
- if bind is None:
- bind = schema._bind_or_error(self)
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t.create(bind=bind, checkfirst=checkfirst)
-
- def drop(self, bind=None, checkfirst=False):
- """Issue DROP ddl for this type, if applicable."""
-
- if bind is None:
- bind = schema._bind_or_error(self)
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t.drop(bind=bind, checkfirst=checkfirst)
-
- def _on_table_create(self, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_table_create(target, bind, **kw)
-
- def _on_table_drop(self, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_table_drop(target, bind, **kw)
-
- def _on_metadata_create(self, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_metadata_create(target, bind, **kw)
-
- def _on_metadata_drop(self, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_metadata_drop(target, bind, **kw)
-
-
-class Enum(String, SchemaType):
- """Generic Enum Type.
-
- The Enum type provides a set of possible string values which the
- column is constrained towards.
-
- By default, uses the backend's native ENUM type if available,
- else uses VARCHAR + a CHECK constraint.
-
- .. seealso::
-
- :class:`~.postgresql.ENUM` - PostgreSQL-specific type,
- which has additional functionality.
-
- """
-
- __visit_name__ = 'enum'
-
- def __init__(self, *enums, **kw):
- """Construct an enum.
-
- Keyword arguments which don't apply to a specific backend are ignored
- by that backend.
-
- :param \*enums: string or unicode enumeration labels. If unicode
- labels are present, the `convert_unicode` flag is auto-enabled.
-
- :param convert_unicode: Enable unicode-aware bind parameter and
- result-set processing for this Enum's data. This is set
- automatically based on the presence of unicode label strings.
-
- :param metadata: Associate this type directly with a ``MetaData``
- object. For types that exist on the target database as an
- independent schema construct (Postgresql), this type will be
- created and dropped within ``create_all()`` and ``drop_all()``
- operations. If the type is not associated with any ``MetaData``
- object, it will associate itself with each ``Table`` in which it is
- used, and will be created when any of those individual tables are
- created, after a check is performed for it's existence. The type is
- only dropped when ``drop_all()`` is called for that ``Table``
- object's metadata, however.
-
- :param name: The name of this type. This is required for Postgresql
- and any future supported database which requires an explicitly
- named type, or an explicitly named constraint in order to generate
- the type and/or a table that uses it.
-
- :param native_enum: Use the database's native ENUM type when
- available. Defaults to True. When False, uses VARCHAR + check
- constraint for all backends.
-
- :param schema: Schema name of this type. For types that exist on the
- target database as an independent schema construct (Postgresql),
- this parameter specifies the named schema in which the type is
- present.
-
- .. note::
-
- The ``schema`` of the :class:`.Enum` type does not
- by default make use of the ``schema`` established on the
- owning :class:`.Table`. If this behavior is desired,
- set the ``inherit_schema`` flag to ``True``.
-
- :param quote: Force quoting to be on or off on the type's name. If
- left as the default of `None`, the usual schema-level "case
- sensitive"/"reserved name" rules are used to determine if this
- type's name should be quoted.
-
- :param inherit_schema: When ``True``, the "schema" from the owning
- :class:`.Table` will be copied to the "schema" attribute of this
- :class:`.Enum`, replacing whatever value was passed for the
- ``schema`` attribute. This also takes effect when using the
- :meth:`.Table.tometadata` operation.
-
- .. versionadded:: 0.8
-
- """
- self.enums = enums
- self.native_enum = kw.pop('native_enum', True)
- convert_unicode = kw.pop('convert_unicode', None)
- if convert_unicode is None:
- for e in enums:
- if isinstance(e, util.text_type):
- convert_unicode = True
- break
- else:
- convert_unicode = False
-
- if self.enums:
- length = max(len(x) for x in self.enums)
- else:
- length = 0
- String.__init__(self,
- length=length,
- convert_unicode=convert_unicode,
- )
- SchemaType.__init__(self, **kw)
-
- def __repr__(self):
- return util.generic_repr(self, [
- ("native_enum", True),
- ("name", None)
- ])
-
- def _should_create_constraint(self, compiler):
- return not self.native_enum or \
- not compiler.dialect.supports_native_enum
-
- def _set_table(self, column, table):
- if self.native_enum:
- SchemaType._set_table(self, column, table)
-
- e = schema.CheckConstraint(
- column.in_(self.enums),
- name=self.name,
- _create_rule=util.portable_instancemethod(
- self._should_create_constraint)
- )
- table.append_constraint(e)
-
- def adapt(self, impltype, **kw):
- schema = kw.pop('schema', self.schema)
- metadata = kw.pop('metadata', self.metadata)
- if issubclass(impltype, Enum):
- return impltype(name=self.name,
- quote=self.quote,
- schema=schema,
- metadata=metadata,
- convert_unicode=self.convert_unicode,
- native_enum=self.native_enum,
- inherit_schema=self.inherit_schema,
- *self.enums,
- **kw
- )
- else:
- return super(Enum, self).adapt(impltype, **kw)
-
-
-class PickleType(TypeDecorator):
- """Holds Python objects, which are serialized using pickle.
-
- PickleType builds upon the Binary type to apply Python's
- ``pickle.dumps()`` to incoming objects, and ``pickle.loads()`` on
- the way out, allowing any pickleable Python object to be stored as
- a serialized binary field.
-
- To allow ORM change events to propagate for elements associated
- with :class:`.PickleType`, see :ref:`mutable_toplevel`.
-
- """
-
- impl = LargeBinary
-
- def __init__(self, protocol=pickle.HIGHEST_PROTOCOL,
- pickler=None, comparator=None):
- """
- Construct a PickleType.
-
- :param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``.
-
- :param pickler: defaults to cPickle.pickle or pickle.pickle if
- cPickle is not available. May be any object with
- pickle-compatible ``dumps` and ``loads`` methods.
-
- :param comparator: a 2-arg callable predicate used
- to compare values of this type. If left as ``None``,
- the Python "equals" operator is used to compare values.
-
- """
- self.protocol = protocol
- self.pickler = pickler or pickle
- self.comparator = comparator
- super(PickleType, self).__init__()
-
- def __reduce__(self):
- return PickleType, (self.protocol,
- None,
- self.comparator)
-
- def bind_processor(self, dialect):
- impl_processor = self.impl.bind_processor(dialect)
- dumps = self.pickler.dumps
- protocol = self.protocol
- if impl_processor:
- def process(value):
- if value is not None:
- value = dumps(value, protocol)
- return impl_processor(value)
- else:
- def process(value):
- if value is not None:
- value = dumps(value, protocol)
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- impl_processor = self.impl.result_processor(dialect, coltype)
- loads = self.pickler.loads
- if impl_processor:
- def process(value):
- value = impl_processor(value)
- if value is None:
- return None
- return loads(value)
- else:
- def process(value):
- if value is None:
- return None
- return loads(value)
- return process
-
- def compare_values(self, x, y):
- if self.comparator:
- return self.comparator(x, y)
- else:
- return x == y
-
-
-class Boolean(TypeEngine, SchemaType):
- """A bool datatype.
-
- Boolean typically uses BOOLEAN or SMALLINT on the DDL side, and on
- the Python side deals in ``True`` or ``False``.
-
- """
-
- __visit_name__ = 'boolean'
-
- def __init__(self, create_constraint=True, name=None):
- """Construct a Boolean.
-
- :param create_constraint: defaults to True. If the boolean
- is generated as an int/smallint, also create a CHECK constraint
- on the table that ensures 1 or 0 as a value.
-
- :param name: if a CHECK constraint is generated, specify
- the name of the constraint.
-
- """
- self.create_constraint = create_constraint
- self.name = name
-
- def _should_create_constraint(self, compiler):
- return not compiler.dialect.supports_native_boolean
-
- def _set_table(self, column, table):
- if not self.create_constraint:
- return
-
- e = schema.CheckConstraint(
- column.in_([0, 1]),
- name=self.name,
- _create_rule=util.portable_instancemethod(
- self._should_create_constraint)
- )
- table.append_constraint(e)
-
- @property
- def python_type(self):
- return bool
-
- def bind_processor(self, dialect):
- if dialect.supports_native_boolean:
- return None
- else:
- return processors.boolean_to_int
-
- def result_processor(self, dialect, coltype):
- if dialect.supports_native_boolean:
- return None
- else:
- return processors.int_to_boolean
-
-
-class Interval(_DateAffinity, TypeDecorator):
- """A type for ``datetime.timedelta()`` objects.
-
- The Interval type deals with ``datetime.timedelta`` objects. In
- PostgreSQL, the native ``INTERVAL`` type is used; for others, the
- value is stored as a date which is relative to the "epoch"
- (Jan. 1, 1970).
-
- Note that the ``Interval`` type does not currently provide date arithmetic
- operations on platforms which do not support interval types natively. Such
- operations usually require transformation of both sides of the expression
- (such as, conversion of both sides into integer epoch values first) which
- currently is a manual procedure (such as via
- :attr:`~sqlalchemy.sql.expression.func`).
-
- """
-
- impl = DateTime
- epoch = dt.datetime.utcfromtimestamp(0)
-
- def __init__(self, native=True,
- second_precision=None,
- day_precision=None):
- """Construct an Interval object.
-
- :param native: when True, use the actual
- INTERVAL type provided by the database, if
- supported (currently Postgresql, Oracle).
- Otherwise, represent the interval data as
- an epoch value regardless.
-
- :param second_precision: For native interval types
- which support a "fractional seconds precision" parameter,
- i.e. Oracle and Postgresql
-
- :param day_precision: for native interval types which
- support a "day precision" parameter, i.e. Oracle.
-
- """
- super(Interval, self).__init__()
- self.native = native
- self.second_precision = second_precision
- self.day_precision = day_precision
-
- def adapt(self, cls, **kw):
- if self.native and hasattr(cls, '_adapt_from_generic_interval'):
- return cls._adapt_from_generic_interval(self, **kw)
- else:
- return self.__class__(
- native=self.native,
- second_precision=self.second_precision,
- day_precision=self.day_precision,
- **kw)
-
- @property
- def python_type(self):
- return dt.timedelta
-
- def bind_processor(self, dialect):
- impl_processor = self.impl.bind_processor(dialect)
- epoch = self.epoch
- if impl_processor:
- def process(value):
- if value is not None:
- value = epoch + value
- return impl_processor(value)
- else:
- def process(value):
- if value is not None:
- value = epoch + value
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- impl_processor = self.impl.result_processor(dialect, coltype)
- epoch = self.epoch
- if impl_processor:
- def process(value):
- value = impl_processor(value)
- if value is None:
- return None
- return value - epoch
- else:
- def process(value):
- if value is None:
- return None
- return value - epoch
- return process
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add: {
- Date: DateTime,
- Interval: self.__class__,
- DateTime: DateTime,
- Time: Time,
- },
- operators.sub: {
- Interval: self.__class__
- },
- operators.mul: {
- Numeric: self.__class__
- },
- operators.truediv: {
- Numeric: self.__class__
- },
- operators.div: {
- Numeric: self.__class__
- }
- }
-
- @property
- def _type_affinity(self):
- return Interval
-
- def coerce_compared_value(self, op, value):
- """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
-
- return self.impl.coerce_compared_value(op, value)
-
-
-class REAL(Float):
- """The SQL REAL type."""
-
- __visit_name__ = 'REAL'
-
-
-class FLOAT(Float):
- """The SQL FLOAT type."""
-
- __visit_name__ = 'FLOAT'
-
-
-class NUMERIC(Numeric):
- """The SQL NUMERIC type."""
-
- __visit_name__ = 'NUMERIC'
-
-
-class DECIMAL(Numeric):
- """The SQL DECIMAL type."""
-
- __visit_name__ = 'DECIMAL'
-
-
-class INTEGER(Integer):
- """The SQL INT or INTEGER type."""
-
- __visit_name__ = 'INTEGER'
-INT = INTEGER
-
-
-class SMALLINT(SmallInteger):
- """The SQL SMALLINT type."""
-
- __visit_name__ = 'SMALLINT'
-
-
-class BIGINT(BigInteger):
- """The SQL BIGINT type."""
-
- __visit_name__ = 'BIGINT'
-
-
-class TIMESTAMP(DateTime):
- """The SQL TIMESTAMP type."""
-
- __visit_name__ = 'TIMESTAMP'
-
- def get_dbapi_type(self, dbapi):
- return dbapi.TIMESTAMP
-
-
-class DATETIME(DateTime):
- """The SQL DATETIME type."""
-
- __visit_name__ = 'DATETIME'
-
-
-class DATE(Date):
- """The SQL DATE type."""
-
- __visit_name__ = 'DATE'
-
-
-class TIME(Time):
- """The SQL TIME type."""
-
- __visit_name__ = 'TIME'
-
-
-class TEXT(Text):
- """The SQL TEXT type."""
-
- __visit_name__ = 'TEXT'
-
-
-class CLOB(Text):
- """The CLOB type.
-
- This type is found in Oracle and Informix.
- """
-
- __visit_name__ = 'CLOB'
-
-
-class VARCHAR(String):
- """The SQL VARCHAR type."""
-
- __visit_name__ = 'VARCHAR'
-
-
-class NVARCHAR(Unicode):
- """The SQL NVARCHAR type."""
-
- __visit_name__ = 'NVARCHAR'
-
-
-class CHAR(String):
- """The SQL CHAR type."""
-
- __visit_name__ = 'CHAR'
-
-
-class NCHAR(Unicode):
- """The SQL NCHAR type."""
-
- __visit_name__ = 'NCHAR'
-
-
-class BLOB(LargeBinary):
- """The SQL BLOB type."""
-
- __visit_name__ = 'BLOB'
-
-
-class BINARY(_Binary):
- """The SQL BINARY type."""
-
- __visit_name__ = 'BINARY'
-
-
-class VARBINARY(_Binary):
- """The SQL VARBINARY type."""
-
- __visit_name__ = 'VARBINARY'
-
-
-class BOOLEAN(Boolean):
- """The SQL BOOLEAN type."""
-
- __visit_name__ = 'BOOLEAN'
-
-NULLTYPE = NullType()
-BOOLEANTYPE = Boolean()
-STRINGTYPE = String()
-
-_type_map = {
- int: Integer(),
- float: Numeric(),
- bool: BOOLEANTYPE,
- decimal.Decimal: Numeric(),
- dt.date: Date(),
- dt.datetime: DateTime(),
- dt.time: Time(),
- dt.timedelta: Interval(),
- NoneType: NULLTYPE
-}
-
-if util.py3k:
- _type_map[bytes] = LargeBinary()
- _type_map[str] = Unicode()
-else:
- _type_map[unicode] = Unicode()
- _type_map[str] = String()
-
+from .sql.type_api import (
+ adapt_type,
+ TypeEngine,
+ TypeDecorator,
+ Variant,
+ to_instance,
+ UserDefinedType
+)
+from .sql.sqltypes import (
+ BIGINT,
+ BINARY,
+ BLOB,
+ BOOLEAN,
+ BigInteger,
+ Binary,
+ _Binary,
+ Boolean,
+ CHAR,
+ CLOB,
+ Concatenable,
+ DATE,
+ DATETIME,
+ DECIMAL,
+ Date,
+ DateTime,
+ Enum,
+ FLOAT,
+ Float,
+ INT,
+ INTEGER,
+ Integer,
+ Interval,
+ LargeBinary,
+ NCHAR,
+ NVARCHAR,
+ NullType,
+ NULLTYPE,
+ NUMERIC,
+ Numeric,
+ PickleType,
+ REAL,
+ SchemaType,
+ SMALLINT,
+ SmallInteger,
+ String,
+ STRINGTYPE,
+ TEXT,
+ TIME,
+ TIMESTAMP,
+ Text,
+ Time,
+ Unicode,
+ UnicodeText,
+ VARBINARY,
+ VARCHAR,
+ _type_map
+ )
diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py
index 104566215..eba64ed15 100644
--- a/lib/sqlalchemy/util/__init__.py
+++ b/lib/sqlalchemy/util/__init__.py
@@ -1,16 +1,16 @@
# util/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .compat import callable, cmp, reduce, \
threading, py3k, py33, py2k, jython, pypy, cpython, win32, \
- pickle, dottedgetter, parse_qsl, namedtuple, next, WeakSet, reraise, \
+ pickle, dottedgetter, parse_qsl, namedtuple, next, reraise, \
raise_from_cause, text_type, string_types, int_types, binary_type, \
quote_plus, with_metaclass, print_, itertools_filterfalse, u, ue, b,\
- unquote_plus, b64decode, b64encode, byte_buffer, itertools_filter,\
- StringIO, inspect_getargspec
+ unquote_plus, unquote, b64decode, b64encode, byte_buffer, itertools_filter,\
+ iterbytes, StringIO, inspect_getargspec
from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
Properties, OrderedProperties, ImmutableProperties, OrderedDict, \
@@ -18,14 +18,15 @@ from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
column_dict, ordered_column_set, populate_column_dict, unique_list, \
UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \
to_column_set, update_copy, flatten_iterator, \
- LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence
+ LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence, \
+ coerce_generator_arg
from .langhelpers import iterate_attributes, class_hierarchy, \
portable_instancemethod, unbound_method_to_callable, \
getargspec_init, format_argspec_init, format_argspec_plus, \
get_func_kwargs, get_cls_kwargs, decorator, as_interface, \
memoized_property, memoized_instancemethod, md5_hex, \
- group_expirable_memoized_property, importlater, decode_slice, \
+ group_expirable_memoized_property, dependencies, decode_slice, \
monkeypatch_proxied_specials, asbool, bool_or_str, coerce_kw_type,\
duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\
classproperty, set_creation_order, warn_exception, warn, NoneType,\
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index 86a90828a..c0a24ba4f 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -1,16 +1,17 @@
# util/_collections.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Collection classes and helpers."""
-import itertools
+from __future__ import absolute_import
import weakref
import operator
from .compat import threading, itertools_filterfalse
from . import py2k
+import types
EMPTY_SET = frozenset()
@@ -650,19 +651,31 @@ class IdentitySet(object):
class WeakSequence(object):
- def __init__(self, elements):
- self._storage = weakref.WeakValueDictionary(
- (idx, element) for idx, element in enumerate(elements)
- )
+ def __init__(self, __elements=()):
+ self._storage = [
+ weakref.ref(element, self._remove) for element in __elements
+ ]
+
+ def append(self, item):
+ self._storage.append(weakref.ref(item, self._remove))
+
+ def _remove(self, ref):
+ self._storage.remove(ref)
+
+ def __len__(self):
+ return len(self._storage)
def __iter__(self):
- return iter(self._storage.values())
+ return (obj for obj in
+ (ref() for ref in self._storage) if obj is not None)
def __getitem__(self, index):
try:
- return self._storage[index]
+ obj = self._storage[index]
except KeyError:
raise IndexError("Index %s out of range" % index)
+ else:
+ return obj()
class OrderedIdentitySet(IdentitySet):
@@ -743,6 +756,11 @@ class UniqueAppender(object):
def __iter__(self):
return iter(self.data)
+def coerce_generator_arg(arg):
+ if len(arg) == 1 and isinstance(arg[0], types.GeneratorType):
+ return list(arg[0])
+ else:
+ return arg
def to_list(x, default=None):
if x is None:
diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py
index a89762b4e..f1346406e 100644
--- a/lib/sqlalchemy/util/compat.py
+++ b/lib/sqlalchemy/util/compat.py
@@ -1,5 +1,5 @@
# util/compat.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -40,7 +40,7 @@ if py3k:
import builtins
from inspect import getfullargspec as inspect_getfullargspec
- from urllib.parse import quote_plus, unquote_plus, parse_qsl
+ from urllib.parse import quote_plus, unquote_plus, parse_qsl, quote, unquote
import configparser
from io import StringIO
@@ -95,7 +95,7 @@ if py3k:
else:
from inspect import getargspec as inspect_getfullargspec
inspect_getargspec = inspect_getfullargspec
- from urllib import quote_plus, unquote_plus
+ from urllib import quote_plus, unquote_plus, quote, unquote
from urlparse import parse_qsl
import ConfigParser as configparser
from StringIO import StringIO
@@ -149,29 +149,6 @@ else:
itertools_imap = itertools.imap
-
-try:
- from weakref import WeakSet
-except:
- import weakref
-
- class WeakSet(object):
- """Implement the small subset of set() which SQLAlchemy needs
- here. """
- def __init__(self, values=None):
- self._storage = weakref.WeakKeyDictionary()
- if values is not None:
- self._storage.update((value, None) for value in values)
-
- def __iter__(self):
- return iter(self._storage)
-
- def union(self, other):
- return WeakSet(set(self).union(other))
-
- def add(self, other):
- self._storage[other] = True
-
import time
if win32 or jython:
time_func = time.clock
diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py
index c315d2da6..c8854dc32 100644
--- a/lib/sqlalchemy/util/deprecations.py
+++ b/lib/sqlalchemy/util/deprecations.py
@@ -1,5 +1,5 @@
# util/deprecations.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index ef1ff881d..82e37ce99 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -1,5 +1,5 @@
# util/langhelpers.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -19,6 +19,7 @@ from functools import update_wrapper
from .. import exc
import hashlib
from . import compat
+from . import _collections
def md5_hex(x):
if compat.py3k:
@@ -98,14 +99,60 @@ def decorator(target):
metadata = dict(target=targ_name, fn=fn_name)
metadata.update(format_argspec_plus(spec, grouped=False))
-
- code = 'lambda %(args)s: %(target)s(%(fn)s, %(apply_kw)s)' % (
- metadata)
- decorated = eval(code, {targ_name: target, fn_name: fn})
+ metadata['name'] = fn.__name__
+ code = """\
+def %(name)s(%(args)s):
+ return %(target)s(%(fn)s, %(apply_kw)s)
+""" % metadata
+ decorated = _exec_code_in_env(code,
+ {targ_name: target, fn_name: fn},
+ fn.__name__)
decorated.__defaults__ = getattr(fn, 'im_func', fn).__defaults__
return update_wrapper(decorated, fn)
return update_wrapper(decorate, target)
+def _exec_code_in_env(code, env, fn_name):
+ exec(code, env)
+ return env[fn_name]
+
+def public_factory(target, location):
+ """Produce a wrapping function for the given cls or classmethod.
+
+ Rationale here is so that the __init__ method of the
+ class can serve as documentation for the function.
+
+ """
+ if isinstance(target, type):
+ fn = target.__init__
+ callable_ = target
+ doc = "Construct a new :class:`.%s` object. \n\n"\
+ "This constructor is mirrored as a public API function; see :func:`~%s` "\
+ "for a full usage and argument description." % (
+ target.__name__, location, )
+ else:
+ fn = callable_ = target
+ doc = "This function is mirrored; see :func:`~%s` "\
+ "for a description of arguments." % location
+
+ location_name = location.split(".")[-1]
+ spec = compat.inspect_getfullargspec(fn)
+ del spec[0][0]
+ metadata = format_argspec_plus(spec, grouped=False)
+ metadata['name'] = location_name
+ code = """\
+def %(name)s(%(args)s):
+ return cls(%(apply_kw)s)
+""" % metadata
+ env = {'cls': callable_, 'symbol': symbol}
+ exec(code, env)
+ decorated = env[location_name]
+ decorated.__doc__ = fn.__doc__
+ if compat.py2k or hasattr(fn, '__func__'):
+ fn.__func__.__doc__ = doc
+ else:
+ fn.__doc__ = doc
+ return decorated
+
class PluginLoader(object):
@@ -134,8 +181,7 @@ class PluginLoader(object):
self.impls[name] = impl.load
return impl.load()
- from sqlalchemy import exc
- raise exc.ArgumentError(
+ raise exc.NoSuchModuleError(
"Can't load plugin: %s:%s" %
(self.group, name))
@@ -187,6 +233,7 @@ def get_cls_kwargs(cls, _set=None):
try:
+ # TODO: who doesn't have this constant?
from inspect import CO_VARKEYWORDS
def inspect_func_args(fn):
@@ -221,7 +268,9 @@ def get_callable_argspec(fn, no_self=False):
return compat.ArgSpec(spec.args[1:], spec.varargs, spec.keywords, spec.defaults)
elif hasattr(fn, '__func__'):
return compat.inspect_getargspec(fn.__func__)
- elif hasattr(fn, '__call__'):
+ elif hasattr(fn, '__call__') and \
+ not hasattr(fn.__call__, '__call__'): # functools.partial does this;
+ # not much we can do
return get_callable_argspec(fn.__call__)
else:
raise ValueError("Can't inspect function: %s" % fn)
@@ -360,44 +409,66 @@ def generic_repr(obj, additional_kw=(), to_inspect=None):
"""
if to_inspect is None:
- to_inspect = obj
+ to_inspect = [obj]
+ else:
+ to_inspect = _collections.to_list(to_inspect)
missing = object()
- def genargs():
+ pos_args = []
+ kw_args = _collections.OrderedDict()
+ vargs = None
+ for i, insp in enumerate(to_inspect):
try:
- (args, vargs, vkw, defaults) = \
- inspect.getargspec(to_inspect.__init__)
+ (_args, _vargs, vkw, defaults) = \
+ inspect.getargspec(insp.__init__)
except TypeError:
- return
+ continue
+ else:
+ default_len = defaults and len(defaults) or 0
+ if i == 0:
+ if _vargs:
+ vargs = _vargs
+ if default_len:
+ pos_args.extend(_args[1:-default_len])
+ else:
+ pos_args.extend(_args[1:])
+ else:
+ kw_args.update([
+ (arg, missing) for arg in _args[1:-default_len]
+ ])
+
+ if default_len:
+ kw_args.update([
+ (arg, default)
+ for arg, default
+ in zip(_args[-default_len:], defaults)
+ ])
+ output = []
+
+ output.extend(repr(getattr(obj, arg, None)) for arg in pos_args)
+
+ if vargs is not None and hasattr(obj, vargs):
+ output.extend([repr(val) for val in getattr(obj, vargs)])
+
+ for arg, defval in kw_args.items():
+ try:
+ val = getattr(obj, arg, missing)
+ if val is not missing and val != defval:
+ output.append('%s=%r' % (arg, val))
+ except:
+ pass
- default_len = defaults and len(defaults) or 0
+ if additional_kw:
+ for arg, defval in additional_kw:
+ try:
+ val = getattr(obj, arg, missing)
+ if val is not missing and val != defval:
+ output.append('%s=%r' % (arg, val))
+ except:
+ pass
- if not default_len:
- for arg in args[1:]:
- yield repr(getattr(obj, arg, None))
- if vargs is not None and hasattr(obj, vargs):
- yield ', '.join(repr(val) for val in getattr(obj, vargs))
- else:
- for arg in args[1:-default_len]:
- yield repr(getattr(obj, arg, None))
- for (arg, defval) in zip(args[-default_len:], defaults):
- try:
- val = getattr(obj, arg, missing)
- if val is not missing and val != defval:
- yield '%s=%r' % (arg, val)
- except:
- pass
- if additional_kw:
- for arg, defval in additional_kw:
- try:
- val = getattr(obj, arg, missing)
- if val is not missing and val != defval:
- yield '%s=%r' % (arg, val)
- except:
- pass
-
- return "%s(%s)" % (obj.__class__.__name__, ", ".join(genargs()))
+ return "%s(%s)" % (obj.__class__.__name__, ", ".join(output))
class portable_instancemethod(object):
@@ -619,7 +690,11 @@ class memoized_property(object):
return result
def _reset(self, obj):
- obj.__dict__.pop(self.__name__, None)
+ memoized_property.reset(obj, self.__name__)
+
+ @classmethod
+ def reset(cls, obj, name):
+ obj.__dict__.pop(name, None)
class memoized_instancemethod(object):
@@ -675,84 +750,135 @@ class group_expirable_memoized_property(object):
return memoized_instancemethod(fn)
-class importlater(object):
- """Deferred import object.
- e.g.::
-
- somesubmod = importlater("mypackage.somemodule", "somesubmod")
+def dependency_for(modulename):
+ def decorate(obj):
+ # TODO: would be nice to improve on this import silliness,
+ # unfortunately importlib doesn't work that great either
+ tokens = modulename.split(".")
+ mod = compat.import_(".".join(tokens[0:-1]), globals(), locals(), tokens[-1])
+ mod = getattr(mod, tokens[-1])
+ setattr(mod, obj.__name__, obj)
+ return obj
+ return decorate
- is equivalent to::
+class dependencies(object):
+ """Apply imported dependencies as arguments to a function.
- from mypackage.somemodule import somesubmod
+ E.g.::
- except evaluted upon attribute access to "somesubmod".
+ @util.dependencies(
+ "sqlalchemy.sql.widget",
+ "sqlalchemy.engine.default"
+ );
+ def some_func(self, widget, default, arg1, arg2, **kw):
+ # ...
- importlater() currently requires that resolve_all() be
- called, typically at the bottom of a package's __init__.py.
- This is so that __import__ still called only at
- module import time, and not potentially within
- a non-main thread later on.
+ Rationale is so that the impact of a dependency cycle can be
+ associated directly with the few functions that cause the cycle,
+ and not pollute the module-level namespace.
"""
- _unresolved = set()
+ def __init__(self, *deps):
+ self.import_deps = []
+ for dep in deps:
+ tokens = dep.split(".")
+ self.import_deps.append(
+ dependencies._importlater(
+ ".".join(tokens[0:-1]),
+ tokens[-1]
+ )
+ )
+
+ def __call__(self, fn):
+ import_deps = self.import_deps
+ spec = compat.inspect_getfullargspec(fn)
+
+ spec_zero = list(spec[0])
+ hasself = spec_zero[0] in ('self', 'cls')
+
+ for i in range(len(import_deps)):
+ spec[0][i + (1 if hasself else 0)] = "import_deps[%r]" % i
+
+ inner_spec = format_argspec_plus(spec, grouped=False)
+
+ for impname in import_deps:
+ del spec_zero[1 if hasself else 0]
+ spec[0][:] = spec_zero
+
+ outer_spec = format_argspec_plus(spec, grouped=False)
- def __init__(self, path, addtl=None):
- self._il_path = path
- self._il_addtl = addtl
- importlater._unresolved.add(self)
+ code = 'lambda %(args)s: fn(%(apply_kw)s)' % {
+ "args": outer_spec['args'],
+ "apply_kw": inner_spec['apply_kw']
+ }
+
+ decorated = eval(code, locals())
+ decorated.__defaults__ = getattr(fn, 'im_func', fn).__defaults__
+ return update_wrapper(decorated, fn)
@classmethod
- def resolve_all(cls):
- for m in list(importlater._unresolved):
- m._resolve()
+ def resolve_all(cls, path):
+ for m in list(dependencies._unresolved):
+ if m._full_path.startswith(path):
+ m._resolve()
+
+ _unresolved = set()
+ _by_key = {}
+
+ class _importlater(object):
+ _unresolved = set()
- @property
- def _full_path(self):
- if self._il_addtl:
+ _by_key = {}
+
+ def __new__(cls, path, addtl):
+ key = path + "." + addtl
+ if key in dependencies._by_key:
+ return dependencies._by_key[key]
+ else:
+ dependencies._by_key[key] = imp = object.__new__(cls)
+ return imp
+
+ def __init__(self, path, addtl):
+ self._il_path = path
+ self._il_addtl = addtl
+ dependencies._unresolved.add(self)
+
+
+ @property
+ def _full_path(self):
return self._il_path + "." + self._il_addtl
- else:
- return self._il_path
-
- @memoized_property
- def module(self):
- if self in importlater._unresolved:
- raise ImportError(
- "importlater.resolve_all() hasn't "
- "been called (this is %s %s)"
- % (self._il_path, self._il_addtl))
-
- m = self._initial_import
- if self._il_addtl:
- m = getattr(m, self._il_addtl)
- else:
- for token in self._il_path.split(".")[1:]:
- m = getattr(m, token)
- return m
- def _resolve(self):
- importlater._unresolved.discard(self)
- if self._il_addtl:
+ @memoized_property
+ def module(self):
+ if self in dependencies._unresolved:
+ raise ImportError(
+ "importlater.resolve_all() hasn't "
+ "been called (this is %s %s)"
+ % (self._il_path, self._il_addtl))
+
+ return getattr(self._initial_import, self._il_addtl)
+
+ def _resolve(self):
+ dependencies._unresolved.discard(self)
self._initial_import = compat.import_(
self._il_path, globals(), locals(),
[self._il_addtl])
- else:
- self._initial_import = compat.import_(self._il_path)
- def __getattr__(self, key):
- if key == 'module':
- raise ImportError("Could not resolve module %s"
- % self._full_path)
- try:
- attr = getattr(self.module, key)
- except AttributeError:
- raise AttributeError(
- "Module %s has no attribute '%s'" %
- (self._full_path, key)
- )
- self.__dict__[key] = attr
- return attr
+ def __getattr__(self, key):
+ if key == 'module':
+ raise ImportError("Could not resolve module %s"
+ % self._full_path)
+ try:
+ attr = getattr(self.module, key)
+ except AttributeError:
+ raise AttributeError(
+ "Module %s has no attribute '%s'" %
+ (self._full_path, key)
+ )
+ self.__dict__[key] = attr
+ return attr
# from paste.deploy.converters
@@ -956,7 +1082,7 @@ class _symbol(int):
return repr(self)
def __repr__(self):
- return "<symbol '%s>" % self.name
+ return "symbol(%r)" % self.name
_symbol.__name__ = 'symbol'
diff --git a/lib/sqlalchemy/util/queue.py b/lib/sqlalchemy/util/queue.py
index 537526bef..82ff55a5d 100644
--- a/lib/sqlalchemy/util/queue.py
+++ b/lib/sqlalchemy/util/queue.py
@@ -1,5 +1,5 @@
# util/queue.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -25,14 +25,7 @@ within QueuePool.
from collections import deque
from time import time as _time
from .compat import threading
-import sys
-if sys.version_info < (2, 6):
- def notify_all(condition):
- condition.notify()
-else:
- def notify_all(condition):
- condition.notify_all()
__all__ = ['Empty', 'Full', 'Queue', 'SAAbort']
@@ -158,7 +151,6 @@ class Queue:
return an item if one is immediately available, else raise the
``Empty`` exception (`timeout` is ignored in that case).
"""
-
self.not_empty.acquire()
try:
if not block:
@@ -166,7 +158,11 @@ class Queue:
raise Empty
elif timeout is None:
while self._empty():
- self.not_empty.wait()
+ # wait for only half a second, then
+ # loop around, so that we can see a change in
+ # _sqla_abort_context in case we missed the notify_all()
+ # called by abort()
+ self.not_empty.wait(.5)
if self._sqla_abort_context:
raise SAAbort(self._sqla_abort_context)
else:
@@ -195,7 +191,10 @@ class Queue:
if not self.not_full.acquire(False):
return
try:
- notify_all(self.not_empty)
+ # note that this is now optional
+ # as the waiters in get() both loop around
+ # to check the _sqla_abort_context flag periodically
+ self.not_empty.notify_all()
finally:
self.not_full.release()
diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py
index de3dfd0ae..fe7e76896 100644
--- a/lib/sqlalchemy/util/topological.py
+++ b/lib/sqlalchemy/util/topological.py
@@ -1,5 +1,5 @@
# util/topological.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/setup.cfg b/setup.cfg
index 61c868a16..feaa1cdde 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,9 @@
[egg_info]
tag_build = dev
+[wheel]
+universal = 1
+
[nosetests]
with-sqla_testing = true
exclude = ^examples
diff --git a/setup.py b/setup.py
index 5b506f529..8e7e62829 100644
--- a/setup.py
+++ b/setup.py
@@ -137,7 +137,7 @@ def run_setup(with_cext):
**kwargs
)
-if pypy or jython or py3k:
+if pypy or jython:
run_setup(False)
status_msgs(
"WARNING: C extensions are not supported on " +
diff --git a/test/aaa_profiling/test_memusage.py b/test/aaa_profiling/test_memusage.py
index 20c6f0a65..3766abb88 100644
--- a/test/aaa_profiling/test_memusage.py
+++ b/test/aaa_profiling/test_memusage.py
@@ -47,6 +47,7 @@ def profile_memory(times=50):
gc_collect()
samples[x] = len(get_objects_skipping_sqlite_issue())
+
print("sample gc sizes:", samples)
assert len(_sessions) == 0
@@ -307,7 +308,7 @@ class MemUsageTest(EnsureZeroed):
finally:
metadata.drop_all()
- @testing.crashes('mysql+cymysql', 'blocking with cymysql >= 0.6')
+ @testing.crashes('mysql+cymysql', 'blocking')
def test_unicode_warnings(self):
metadata = MetaData(testing.db)
table1 = Table('mytable', metadata, Column('col1', Integer,
@@ -603,6 +604,7 @@ class MemUsageTest(EnsureZeroed):
# in pysqlite itself. background at:
# http://thread.gmane.org/gmane.comp.python.db.pysqlite.user/2290
+ @testing.crashes('mysql+cymysql', 'blocking')
def test_join_cache(self):
metadata = MetaData(testing.db)
table1 = Table('table1', metadata, Column('id', Integer,
diff --git a/test/aaa_profiling/test_orm.py b/test/aaa_profiling/test_orm.py
index 6d71468b7..2c1e84afb 100644
--- a/test/aaa_profiling/test_orm.py
+++ b/test/aaa_profiling/test_orm.py
@@ -310,3 +310,63 @@ class DeferOptionsTest(fixtures.MappedTest):
*[defer(letter) for letter in ['x', 'y', 'z', 'p', 'q', 'r']]).\
all()
+
+class AttributeOverheadTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('parent', metadata, Column('id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True), Column('data',
+ String(20)))
+ Table('child', metadata, Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
+ Column('data', String(20)), Column('parent_id',
+ Integer, ForeignKey('parent.id'), nullable=False))
+
+ @classmethod
+ def setup_classes(cls):
+ class Parent(cls.Basic):
+ pass
+
+ class Child(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Child, Parent, parent, child = (cls.classes.Child,
+ cls.classes.Parent,
+ cls.tables.parent,
+ cls.tables.child)
+
+ mapper(Parent, parent, properties={'children':
+ relationship(Child, backref='parent')})
+ mapper(Child, child)
+
+
+ def test_attribute_set(self):
+ Parent, Child = self.classes.Parent, self.classes.Child
+ p1 = Parent()
+ c1 = Child()
+
+ @profiling.function_call_count()
+ def go():
+ for i in range(30):
+ c1.parent = p1
+ c1.parent = None
+ c1.parent = p1
+ del c1.parent
+ go()
+
+ def test_collection_append_remove(self):
+ Parent, Child = self.classes.Parent, self.classes.Child
+ p1 = Parent()
+ children = [Child() for i in range(100)]
+
+ @profiling.function_call_count()
+ def go():
+ for child in children:
+ p1.children.append(child)
+ for child in children:
+ p1.children.remove(child)
+ go()
+
diff --git a/test/aaa_profiling/test_resultset.py b/test/aaa_profiling/test_resultset.py
index bbd8c4dba..d2f8c2256 100644
--- a/test/aaa_profiling/test_resultset.py
+++ b/test/aaa_profiling/test_resultset.py
@@ -53,6 +53,7 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
c1 in row
go()
+
class ExecutionTest(fixtures.TestBase):
def test_minimal_connection_execute(self):
diff --git a/test/aaa_profiling/test_zoomark.py b/test/aaa_profiling/test_zoomark.py
index 145f3c594..d850782e0 100644
--- a/test/aaa_profiling/test_zoomark.py
+++ b/test/aaa_profiling/test_zoomark.py
@@ -30,7 +30,6 @@ class ZooMarkTest(fixtures.TestBase):
"""
__requires__ = 'cpython',
__only_on__ = 'postgresql+psycopg2'
- __skip_if__ = lambda : sys.version_info < (2, 5),
def test_baseline_0_setup(self):
global metadata
diff --git a/test/aaa_profiling/test_zoomark_orm.py b/test/aaa_profiling/test_zoomark_orm.py
index ddcad681a..c9d1438aa 100644
--- a/test/aaa_profiling/test_zoomark_orm.py
+++ b/test/aaa_profiling/test_zoomark_orm.py
@@ -32,7 +32,6 @@ class ZooMarkTest(fixtures.TestBase):
__requires__ = 'cpython',
__only_on__ = 'postgresql+psycopg2'
- __skip_if__ = lambda : sys.version_info < (2, 5),
def test_baseline_0_setup(self):
global metadata, session
diff --git a/test/base/test_events.py b/test/base/test_events.py
index 1e0568f27..e985f8d5b 100644
--- a/test/base/test_events.py
+++ b/test/base/test_events.py
@@ -6,15 +6,12 @@ from sqlalchemy import event, exc
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.util import gc_collect
from sqlalchemy.testing.mock import Mock, call
-
+from sqlalchemy import testing
class EventsTest(fixtures.TestBase):
"""Test class- and instance-level event registration."""
def setUp(self):
- assert 'event_one' not in event._registrars
- assert 'event_two' not in event._registrars
-
class TargetEvents(event.Events):
def event_one(self, x, y):
pass
@@ -30,7 +27,7 @@ class EventsTest(fixtures.TestBase):
self.Target = Target
def tearDown(self):
- event._remove_dispatcher(self.Target.__dict__['dispatch'].events)
+ event.base._remove_dispatcher(self.Target.__dict__['dispatch'].events)
def test_register_class(self):
def listen(x, y):
@@ -84,7 +81,7 @@ class EventsTest(fixtures.TestBase):
eq_(len(self.Target().dispatch.event_one), 2)
eq_(len(t1.dispatch.event_one), 3)
- def test_append_vs_insert(self):
+ def test_append_vs_insert_cls(self):
def listen_one(x, y):
pass
@@ -103,6 +100,26 @@ class EventsTest(fixtures.TestBase):
[listen_three, listen_one, listen_two]
)
+ def test_append_vs_insert_instance(self):
+ def listen_one(x, y):
+ pass
+
+ def listen_two(x, y):
+ pass
+
+ def listen_three(x, y):
+ pass
+
+ target = self.Target()
+ event.listen(target, "event_one", listen_one)
+ event.listen(target, "event_one", listen_two)
+ event.listen(target, "event_one", listen_three, insert=True)
+
+ eq_(
+ list(target.dispatch.event_one),
+ [listen_three, listen_one, listen_two]
+ )
+
def test_decorator(self):
@event.listens_for(self.Target, "event_one")
def listen_one(x, y):
@@ -189,7 +206,7 @@ class NamedCallTest(fixtures.TestBase):
self.TargetOne = TargetOne
def tearDown(self):
- event._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events)
+ event.base._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events)
def test_kw_accept(self):
@@ -261,7 +278,7 @@ class LegacySignatureTest(fixtures.TestBase):
self.TargetOne = TargetOne
def tearDown(self):
- event._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events)
+ event.base._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events)
def test_legacy_accept(self):
canary = Mock()
@@ -294,6 +311,26 @@ class LegacySignatureTest(fixtures.TestBase):
canary(x, y, kw)
self._test_legacy_accept_kw(inst, canary)
+ def test_legacy_accept_partial(self):
+ canary = Mock()
+ def evt(a, x, y, **kw):
+ canary(a, x, y, **kw)
+ from functools import partial
+ evt_partial = partial(evt, 5)
+ target = self.TargetOne()
+ event.listen(target, "event_four", evt_partial)
+ # can't do legacy accept on a partial; we can't inspect it
+ assert_raises(
+ TypeError,
+ target.dispatch.event_four, 4, 5, 6, 7, foo="bar"
+ )
+ target.dispatch.event_four(4, 5, foo="bar")
+ eq_(
+ canary.mock_calls,
+ [call(5, 4, 5, foo="bar")]
+ )
+
+
def _test_legacy_accept_kw(self, target, canary):
target.dispatch.event_four(4, 5, 6, 7, foo="bar")
@@ -375,7 +412,7 @@ class ClsLevelListenTest(fixtures.TestBase):
def tearDown(self):
- event._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events)
+ event.base._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events)
def setUp(self):
class TargetEventsOne(event.Events):
@@ -386,7 +423,7 @@ class ClsLevelListenTest(fixtures.TestBase):
self.TargetOne = TargetOne
def tearDown(self):
- event._remove_dispatcher(
+ event.base._remove_dispatcher(
self.TargetOne.__dict__['dispatch'].events)
def test_lis_subcalss_lis(self):
@@ -473,8 +510,8 @@ class AcceptTargetsTest(fixtures.TestBase):
self.TargetTwo = TargetTwo
def tearDown(self):
- event._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events)
- event._remove_dispatcher(self.TargetTwo.__dict__['dispatch'].events)
+ event.base._remove_dispatcher(self.TargetOne.__dict__['dispatch'].events)
+ event.base._remove_dispatcher(self.TargetTwo.__dict__['dispatch'].events)
def test_target_accept(self):
"""Test that events of the same name are routed to the correct
@@ -543,7 +580,7 @@ class CustomTargetsTest(fixtures.TestBase):
self.Target = Target
def tearDown(self):
- event._remove_dispatcher(self.Target.__dict__['dispatch'].events)
+ event.base._remove_dispatcher(self.Target.__dict__['dispatch'].events)
def test_indirect(self):
def listen(x, y):
@@ -593,14 +630,14 @@ class ListenOverrideTest(fixtures.TestBase):
def setUp(self):
class TargetEvents(event.Events):
@classmethod
- def _listen(cls, target, identifier, fn, add=False):
+ def _listen(cls, event_key, add=False):
+ fn = event_key.fn
if add:
def adapt(x, y):
fn(x + y)
- else:
- adapt = fn
+ event_key = event_key.with_wrapper(adapt)
- event.Events._listen(target, identifier, adapt)
+ event_key.base_listen()
def event_one(self, x, y):
pass
@@ -610,7 +647,7 @@ class ListenOverrideTest(fixtures.TestBase):
self.Target = Target
def tearDown(self):
- event._remove_dispatcher(self.Target.__dict__['dispatch'].events)
+ event.base._remove_dispatcher(self.Target.__dict__['dispatch'].events)
def test_listen_override(self):
listen_one = Mock()
@@ -700,7 +737,7 @@ class JoinTest(fixtures.TestBase):
for cls in (self.TargetElement,
self.TargetFactory, self.BaseTarget):
if 'dispatch' in cls.__dict__:
- event._remove_dispatcher(cls.__dict__['dispatch'].events)
+ event.base._remove_dispatcher(cls.__dict__['dispatch'].events)
def test_neither(self):
element = self.TargetFactory().create()
@@ -842,13 +879,19 @@ class JoinTest(fixtures.TestBase):
element.run_event(2)
element.run_event(3)
- # c1 gets no events due to _JoinedListener
- # fixing the "parent" at construction time.
- # this can be changed to be "live" at the cost
- # of performance.
+ # if _JoinedListener fixes .listeners
+ # at construction time, then we don't get
+ # the new listeners.
+ #eq_(l1.mock_calls, [])
+
+ # alternatively, if _JoinedListener shares the list
+ # using a @property, then we get them, at the arguable
+ # expense of the extra method call to access the .listeners
+ # collection
eq_(
- l1.mock_calls, []
+ l1.mock_calls, [call(element, 2), call(element, 3)]
)
+
eq_(
l2.mock_calls,
[call(element, 1), call(element, 2), call(element, 3)]
@@ -892,3 +935,160 @@ class JoinTest(fixtures.TestBase):
l1.mock_calls,
[call(element, 1), call(element, 2), call(element, 3)]
)
+
+class RemovalTest(fixtures.TestBase):
+ def _fixture(self):
+ class TargetEvents(event.Events):
+ def event_one(self, x, y):
+ pass
+
+ def event_two(self, x):
+ pass
+
+ def event_three(self, x):
+ pass
+
+ class Target(object):
+ dispatch = event.dispatcher(TargetEvents)
+ return Target
+
+ def test_clslevel(self):
+ Target = self._fixture()
+
+ m1 = Mock()
+
+ event.listen(Target, "event_two", m1)
+
+ t1 = Target()
+ t1.dispatch.event_two("x")
+
+ event.remove(Target, "event_two", m1)
+
+ t1.dispatch.event_two("y")
+
+ eq_(m1.mock_calls, [call("x")])
+
+ def test_clslevel_subclass(self):
+ Target = self._fixture()
+ class SubTarget(Target):
+ pass
+
+ m1 = Mock()
+
+ event.listen(Target, "event_two", m1)
+
+ t1 = SubTarget()
+ t1.dispatch.event_two("x")
+
+ event.remove(Target, "event_two", m1)
+
+ t1.dispatch.event_two("y")
+
+ eq_(m1.mock_calls, [call("x")])
+
+ def test_instance(self):
+ Target = self._fixture()
+
+ class Foo(object):
+ def __init__(self):
+ self.mock = Mock()
+
+ def evt(self, arg):
+ self.mock(arg)
+
+ f1 = Foo()
+ f2 = Foo()
+
+ event.listen(Target, "event_one", f1.evt)
+ event.listen(Target, "event_one", f2.evt)
+
+ t1 = Target()
+ t1.dispatch.event_one("x")
+
+ event.remove(Target, "event_one", f1.evt)
+
+ t1.dispatch.event_one("y")
+
+ eq_(f1.mock.mock_calls, [call("x")])
+ eq_(f2.mock.mock_calls, [call("x"), call("y")])
+
+ def test_propagate(self):
+ Target = self._fixture()
+
+ m1 = Mock()
+
+ t1 = Target()
+ t2 = Target()
+
+ event.listen(t1, "event_one", m1, propagate=True)
+ event.listen(t1, "event_two", m1, propagate=False)
+
+ t2.dispatch._update(t1.dispatch)
+
+ t1.dispatch.event_one("t1e1x")
+ t1.dispatch.event_two("t1e2x")
+ t2.dispatch.event_one("t2e1x")
+ t2.dispatch.event_two("t2e2x")
+
+ event.remove(t1, "event_one", m1)
+ event.remove(t1, "event_two", m1)
+
+ t1.dispatch.event_one("t1e1y")
+ t1.dispatch.event_two("t1e2y")
+ t2.dispatch.event_one("t2e1y")
+ t2.dispatch.event_two("t2e2y")
+
+ eq_(m1.mock_calls,
+ [call('t1e1x'), call('t1e2x'),
+ call('t2e1x')])
+
+ @testing.requires.predictable_gc
+ def test_listener_collection_removed_cleanup(self):
+ from sqlalchemy.event import registry
+
+ Target = self._fixture()
+
+ m1 = Mock()
+
+ t1 = Target()
+
+ event.listen(t1, "event_one", m1)
+
+ key = (id(t1), "event_one", id(m1))
+
+ assert key in registry._key_to_collection
+ collection_ref = list(registry._key_to_collection[key])[0]
+ assert collection_ref in registry._collection_to_key
+
+ t1.dispatch.event_one("t1")
+
+ del t1
+
+ gc_collect()
+
+ assert key not in registry._key_to_collection
+ assert collection_ref not in registry._collection_to_key
+
+ def test_remove_not_listened(self):
+ Target = self._fixture()
+
+ m1 = Mock()
+
+ t1 = Target()
+
+ event.listen(t1, "event_one", m1, propagate=True)
+ event.listen(t1, "event_three", m1)
+
+ event.remove(t1, "event_one", m1)
+ assert_raises_message(
+ exc.InvalidRequestError,
+ r"No listeners found for event <.*Target.*> / 'event_two' / <Mock.*> ",
+ event.remove, t1, "event_two", m1
+ )
+
+ event.remove(t1, "event_three", m1)
+
+
+
+
+
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index aefc6d421..86e4b190a 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -1,10 +1,10 @@
import copy
-from sqlalchemy import util, sql, exc
+from sqlalchemy import util, sql, exc, testing
from sqlalchemy.testing import assert_raises, assert_raises_message, fixtures
from sqlalchemy.testing import eq_, is_, ne_, fails_if
-from sqlalchemy.testing.util import picklers
-from sqlalchemy.util import classproperty
+from sqlalchemy.testing.util import picklers, gc_collect
+from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec
class KeyedTupleTest():
@@ -115,6 +115,36 @@ class KeyedTupleTest():
keyed_tuple[0] = 100
assert_raises(TypeError, should_raise)
+class WeakSequenceTest(fixtures.TestBase):
+ @testing.requires.predictable_gc
+ def test_cleanout_elements(self):
+ class Foo(object):
+ pass
+ f1, f2, f3 = Foo(), Foo(), Foo()
+ w = WeakSequence([f1, f2, f3])
+ eq_(len(w), 3)
+ eq_(len(w._storage), 3)
+ del f2
+ gc_collect()
+ eq_(len(w), 2)
+ eq_(len(w._storage), 2)
+
+ @testing.requires.predictable_gc
+ def test_cleanout_appended(self):
+ class Foo(object):
+ pass
+ f1, f2, f3 = Foo(), Foo(), Foo()
+ w = WeakSequence()
+ w.append(f1)
+ w.append(f2)
+ w.append(f3)
+ eq_(len(w), 3)
+ eq_(len(w._storage), 3)
+ del f2
+ gc_collect()
+ eq_(len(w), 2)
+ eq_(len(w._storage), 2)
+
class OrderedDictTest(fixtures.TestBase):
@@ -1154,6 +1184,33 @@ class ArgInspectionTest(fixtures.TestBase):
test(f3)
test(f4)
+ def test_callable_argspec_fn(self):
+ def foo(x, y, **kw):
+ pass
+ eq_(
+ get_callable_argspec(foo),
+ (['x', 'y'], None, 'kw', None)
+ )
+
+ def test_callable_argspec_method(self):
+ class Foo(object):
+ def foo(self, x, y, **kw):
+ pass
+ eq_(
+ get_callable_argspec(Foo.foo),
+ (['self', 'x', 'y'], None, 'kw', None)
+ )
+
+ def test_callable_argspec_partial(self):
+ from functools import partial
+ def foo(x, y, z, **kw):
+ pass
+ bar = partial(foo, 5)
+
+ assert_raises(
+ ValueError,
+ get_callable_argspec, bar
+ )
class SymbolTest(fixtures.TestBase):
@@ -1389,6 +1446,55 @@ class GenericReprTest(fixtures.TestBase):
"Foo(b=5, d=7)"
)
+ def test_multi_kw(self):
+ class Foo(object):
+ def __init__(self, a, b, c=3, d=4):
+ self.a = a
+ self.b = b
+ self.c = c
+ self.d = d
+ class Bar(Foo):
+ def __init__(self, e, f, g=5, **kw):
+ self.e = e
+ self.f = f
+ self.g = g
+ super(Bar, self).__init__(**kw)
+
+ eq_(
+ util.generic_repr(
+ Bar('e', 'f', g=7, a=6, b=5, d=9),
+ to_inspect=[Bar, Foo]
+ ),
+ "Bar('e', 'f', g=7, a=6, b=5, d=9)"
+ )
+
+ eq_(
+ util.generic_repr(
+ Bar('e', 'f', a=6, b=5),
+ to_inspect=[Bar, Foo]
+ ),
+ "Bar('e', 'f', a=6, b=5)"
+ )
+
+ def test_multi_kw_repeated(self):
+ class Foo(object):
+ def __init__(self, a=1, b=2):
+ self.a = a
+ self.b = b
+ class Bar(Foo):
+ def __init__(self, b=3, c=4, **kw):
+ self.c = c
+ super(Bar, self).__init__(b=b, **kw)
+
+ eq_(
+ util.generic_repr(
+ Bar(a='a', b='b', c='c'),
+ to_inspect=[Bar, Foo]
+ ),
+ "Bar(b='b', c='c', a='a')"
+ )
+
+
def test_discard_vargs(self):
class Foo(object):
def __init__(self, a, b, *args):
@@ -1586,3 +1692,5 @@ class TestClassProperty(fixtures.TestBase):
return d
eq_(B.something, {'foo': 1, 'bazz': 2})
+
+
diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py
index 87037c6a4..f12ab0330 100644
--- a/test/dialect/mssql/test_compiler.py
+++ b/test/dialect/mssql/test_compiler.py
@@ -510,6 +510,29 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))"
)
+ def test_table_pkc_clustering(self):
+ metadata = MetaData()
+ tbl = Table('test', metadata,
+ Column('x', Integer, autoincrement=False),
+ Column('y', Integer, autoincrement=False),
+ PrimaryKeyConstraint("x", "y", mssql_clustered=True))
+ self.assert_compile(schema.CreateTable(tbl),
+ "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, "
+ "PRIMARY KEY CLUSTERED (x, y))"
+ )
+
+ def test_table_uc_clustering(self):
+ metadata = MetaData()
+ tbl = Table('test', metadata,
+ Column('x', Integer, autoincrement=False),
+ Column('y', Integer, autoincrement=False),
+ PrimaryKeyConstraint("x"),
+ UniqueConstraint("y", mssql_clustered=True))
+ self.assert_compile(schema.CreateTable(tbl),
+ "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NULL, "
+ "PRIMARY KEY (x), UNIQUE CLUSTERED (y))"
+ )
+
def test_index_clustering(self):
metadata = MetaData()
tbl = Table('test', metadata,
@@ -528,6 +551,27 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"CREATE INDEX foo ON test (x DESC, y)"
)
+ def test_create_index_expr(self):
+ m = MetaData()
+ t1 = Table('foo', m,
+ Column('x', Integer)
+ )
+ self.assert_compile(
+ schema.CreateIndex(Index("bar", t1.c.x > 5)),
+ "CREATE INDEX bar ON foo (x > 5)"
+ )
+
+ def test_drop_index_w_schema(self):
+ m = MetaData()
+ t1 = Table('foo', m,
+ Column('x', Integer),
+ schema='bar'
+ )
+ self.assert_compile(
+ schema.DropIndex(Index("idx_foo", t1.c.x)),
+ "DROP INDEX idx_foo ON bar.foo"
+ )
+
def test_index_extra_include_1(self):
metadata = MetaData()
tbl = Table('test', metadata,
diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py
index 2834f35ec..c07f30040 100644
--- a/test/dialect/mssql/test_engine.py
+++ b/test/dialect/mssql/test_engine.py
@@ -131,10 +131,11 @@ class ParseConnectTest(fixtures.TestBase):
for error in [
'Adaptive Server connection timed out',
+ 'Net-Lib error during Connection reset by peer',
'message 20003',
- "Error 10054",
- "Not connected to any MS SQL server",
- "Connection is closed"
+ 'Error 10054',
+ 'Not connected to any MS SQL server',
+ 'Connection is closed'
]:
eq_(dialect.is_disconnect(error, None, None), True)
diff --git a/test/dialect/mssql/test_query.py b/test/dialect/mssql/test_query.py
index bff737145..6a12744a7 100644
--- a/test/dialect/mssql/test_query.py
+++ b/test/dialect/mssql/test_query.py
@@ -232,9 +232,10 @@ class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase):
con.execute("""drop trigger paj""")
meta.drop_all()
+ @testing.fails_on_everything_except('mssql+pyodbc', 'pyodbc-specific feature')
@testing.provide_metadata
def test_disable_scope_identity(self):
- engine = engines.testing_engine(options={"use_scope_identity":False})
+ engine = engines.testing_engine(options={"use_scope_identity": False})
metadata = self.metadata
metadata.bind = engine
t1 = Table('t1', metadata,
diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py
index a77a25cc4..45f8405c8 100644
--- a/test/dialect/mysql/test_compiler.py
+++ b/test/dialect/mysql/test_compiler.py
@@ -6,6 +6,7 @@ from sqlalchemy import sql, exc, schema, types as sqltypes
from sqlalchemy.dialects.mysql import base as mysql
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import testing
+from sqlalchemy.sql import table, column
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -94,19 +95,57 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"CREATE TABLE testtbl (data VARCHAR(255), "
"PRIMARY KEY (data) USING btree)")
- def test_skip_deferrable_kw(self):
+ def test_create_index_expr(self):
+ m = MetaData()
+ t1 = Table('foo', m,
+ Column('x', Integer)
+ )
+ self.assert_compile(
+ schema.CreateIndex(Index("bar", t1.c.x > 5)),
+ "CREATE INDEX bar ON foo (x > 5)"
+ )
+
+ def test_deferrable_initially_kw_not_ignored(self):
m = MetaData()
t1 = Table('t1', m, Column('id', Integer, primary_key=True))
t2 = Table('t2', m, Column('id', Integer,
- ForeignKey('t1.id', deferrable=True),
+ ForeignKey('t1.id', deferrable=True, initially="XYZ"),
primary_key=True))
self.assert_compile(
schema.CreateTable(t2),
"CREATE TABLE t2 (id INTEGER NOT NULL, "
- "PRIMARY KEY (id), FOREIGN KEY(id) REFERENCES t1 (id))"
+ "PRIMARY KEY (id), FOREIGN KEY(id) REFERENCES t1 (id) DEFERRABLE INITIALLY XYZ)"
)
+ def test_match_kw_raises(self):
+ m = MetaData()
+ t1 = Table('t1', m, Column('id', Integer, primary_key=True))
+ t2 = Table('t2', m, Column('id', Integer,
+ ForeignKey('t1.id', match="XYZ"),
+ primary_key=True))
+
+ assert_raises_message(
+ exc.CompileError,
+ "MySQL ignores the 'MATCH' keyword while at the same time causes "
+ "ON UPDATE/ON DELETE clauses to be ignored.",
+ schema.CreateTable(t2).compile, dialect=mysql.dialect()
+ )
+
+ def test_for_update(self):
+ table1 = table('mytable',
+ column('myid'), column('name'), column('description'))
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).with_for_update(),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = %s FOR UPDATE")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).with_for_update(read=True),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = %s LOCK IN SHARE MODE")
+
class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
"""Tests MySQL-dialect specific compilation."""
@@ -302,8 +341,10 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
(VARCHAR, "CAST(t.col AS CHAR)"),
(NCHAR, "CAST(t.col AS CHAR)"),
(CHAR, "CAST(t.col AS CHAR)"),
+ (m.CHAR(charset='utf8'), "CAST(t.col AS CHAR CHARACTER SET utf8)"),
(CLOB, "CAST(t.col AS CHAR)"),
(TEXT, "CAST(t.col AS CHAR)"),
+ (m.TEXT(charset='utf8'), "CAST(t.col AS CHAR CHARACTER SET utf8)"),
(String(32), "CAST(t.col AS CHAR(32))"),
(Unicode(32), "CAST(t.col AS CHAR(32))"),
(CHAR(32), "CAST(t.col AS CHAR(32))"),
diff --git a/test/dialect/mysql/test_dialect.py b/test/dialect/mysql/test_dialect.py
index 62bdfc81b..2ff17f0f7 100644
--- a/test/dialect/mysql/test_dialect.py
+++ b/test/dialect/mysql/test_dialect.py
@@ -9,12 +9,17 @@ from sqlalchemy.testing import engines
import datetime
class DialectTest(fixtures.TestBase):
- __only_on__ = 'mysql'
+ def test_ssl_arguments_mysqldb(self):
+ from sqlalchemy.dialects.mysql import mysqldb
+ dialect = mysqldb.dialect()
+ self._test_ssl_arguments(dialect)
- @testing.only_on(['mysql+mysqldb', 'mysql+oursql'],
- 'requires particular SSL arguments')
- def test_ssl_arguments(self):
- dialect = testing.db.dialect
+ def test_ssl_arguments_oursql(self):
+ from sqlalchemy.dialects.mysql import oursql
+ dialect = oursql.dialect()
+ self._test_ssl_arguments(dialect)
+
+ def _test_ssl_arguments(self, dialect):
kwarg = dialect.create_connect_args(
make_url("mysql://scott:tiger@localhost:3306/test"
"?ssl_ca=/ca.pem&ssl_cert=/cert.pem&ssl_key=/key.pem")
@@ -33,6 +38,50 @@ class DialectTest(fixtures.TestBase):
}
)
+ def test_mysqlconnector_buffered_arg(self):
+ from sqlalchemy.dialects.mysql import mysqlconnector
+ dialect = mysqlconnector.dialect()
+ kw = dialect.create_connect_args(
+ make_url("mysql+mysqlconnector://u:p@host/db?buffered=true")
+ )[1]
+ eq_(kw['buffered'], True)
+
+ kw = dialect.create_connect_args(
+ make_url("mysql+mysqlconnector://u:p@host/db?buffered=false")
+ )[1]
+ eq_(kw['buffered'], False)
+
+ kw = dialect.create_connect_args(
+ make_url("mysql+mysqlconnector://u:p@host/db")
+ )[1]
+ eq_(kw['buffered'], True)
+
+ def test_mysqlconnector_raise_on_warnings_arg(self):
+ from sqlalchemy.dialects.mysql import mysqlconnector
+ dialect = mysqlconnector.dialect()
+ kw = dialect.create_connect_args(
+ make_url("mysql+mysqlconnector://u:p@host/db?raise_on_warnings=true")
+ )[1]
+ eq_(kw['raise_on_warnings'], True)
+
+ kw = dialect.create_connect_args(
+ make_url("mysql+mysqlconnector://u:p@host/db?raise_on_warnings=false")
+ )[1]
+ eq_(kw['raise_on_warnings'], False)
+
+ kw = dialect.create_connect_args(
+ make_url("mysql+mysqlconnector://u:p@host/db")
+ )[1]
+ eq_(kw['raise_on_warnings'], True)
+
+ @testing.only_on('mysql')
+ def test_random_arg(self):
+ dialect = testing.db.dialect
+ kw = dialect.create_connect_args(
+ make_url("mysql://u:p@host/db?foo=true")
+ )[1]
+ eq_(kw['foo'], "true")
+
class SQLModeDetectionTest(fixtures.TestBase):
__only_on__ = 'mysql'
diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py
index b9e347d41..7494eaf43 100644
--- a/test/dialect/mysql/test_reflection.py
+++ b/test/dialect/mysql/test_reflection.py
@@ -140,33 +140,33 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
@testing.uses_deprecated('Manually quoting ENUM value literals')
def test_type_reflection(self):
# (ask_for, roundtripped_as_if_different)
- specs = [( String(1), mysql.MSString(1), ),
- ( String(3), mysql.MSString(3), ),
- ( Text(), mysql.MSText(), ),
- ( Unicode(1), mysql.MSString(1), ),
- ( Unicode(3), mysql.MSString(3), ),
- ( UnicodeText(), mysql.MSText(), ),
- ( mysql.MSChar(1), ),
- ( mysql.MSChar(3), ),
- ( NCHAR(2), mysql.MSChar(2), ),
- ( mysql.MSNChar(2), mysql.MSChar(2), ), # N is CREATE only
- ( mysql.MSNVarChar(22), mysql.MSString(22), ),
- ( SmallInteger(), mysql.MSSmallInteger(), ),
- ( SmallInteger(), mysql.MSSmallInteger(4), ),
- ( mysql.MSSmallInteger(), ),
- ( mysql.MSSmallInteger(4), mysql.MSSmallInteger(4), ),
- ( mysql.MSMediumInteger(), mysql.MSMediumInteger(), ),
- ( mysql.MSMediumInteger(8), mysql.MSMediumInteger(8), ),
- ( LargeBinary(3), mysql.TINYBLOB(), ),
- ( LargeBinary(), mysql.BLOB() ),
- ( mysql.MSBinary(3), mysql.MSBinary(3), ),
- ( mysql.MSVarBinary(3),),
- ( mysql.MSTinyBlob(),),
- ( mysql.MSBlob(),),
- ( mysql.MSBlob(1234), mysql.MSBlob()),
- ( mysql.MSMediumBlob(),),
- ( mysql.MSLongBlob(),),
- ( mysql.ENUM("''","'fleem'"), ),
+ specs = [(String(1), mysql.MSString(1), ),
+ (String(3), mysql.MSString(3), ),
+ (Text(), mysql.MSText(), ),
+ (Unicode(1), mysql.MSString(1), ),
+ (Unicode(3), mysql.MSString(3), ),
+ (UnicodeText(), mysql.MSText(), ),
+ (mysql.MSChar(1), ),
+ (mysql.MSChar(3), ),
+ (NCHAR(2), mysql.MSChar(2), ),
+ (mysql.MSNChar(2), mysql.MSChar(2), ), # N is CREATE only
+ (mysql.MSNVarChar(22), mysql.MSString(22), ),
+ (SmallInteger(), mysql.MSSmallInteger(), ),
+ (SmallInteger(), mysql.MSSmallInteger(4), ),
+ (mysql.MSSmallInteger(), ),
+ (mysql.MSSmallInteger(4), mysql.MSSmallInteger(4), ),
+ (mysql.MSMediumInteger(), mysql.MSMediumInteger(), ),
+ (mysql.MSMediumInteger(8), mysql.MSMediumInteger(8), ),
+ (LargeBinary(3), mysql.TINYBLOB(), ),
+ (LargeBinary(), mysql.BLOB() ),
+ (mysql.MSBinary(3), mysql.MSBinary(3), ),
+ (mysql.MSVarBinary(3),),
+ (mysql.MSTinyBlob(),),
+ (mysql.MSBlob(),),
+ (mysql.MSBlob(1234), mysql.MSBlob()),
+ (mysql.MSMediumBlob(),),
+ (mysql.MSLongBlob(),),
+ (mysql.ENUM("''","'fleem'"), ),
]
columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)]
@@ -298,3 +298,22 @@ class RawReflectionTest(fixtures.TestBase):
assert regex.match(' PRIMARY KEY USING BTREE (`id`)')
assert regex.match(' PRIMARY KEY (`id`) USING BTREE')
+ def test_fk_reflection(self):
+ regex = self.parser._re_constraint
+
+ m = regex.match(' CONSTRAINT `addresses_user_id_fkey` '
+ 'FOREIGN KEY (`user_id`) '
+ 'REFERENCES `users` (`id`) '
+ 'ON DELETE CASCADE ON UPDATE CASCADE')
+ eq_(m.groups(), ('addresses_user_id_fkey', '`user_id`',
+ '`users`', '`id`', None, 'CASCADE', 'CASCADE'))
+
+
+ m = regex.match(' CONSTRAINT `addresses_user_id_fkey` '
+ 'FOREIGN KEY (`user_id`) '
+ 'REFERENCES `users` (`id`) '
+ 'ON DELETE CASCADE ON UPDATE SET NULL')
+ eq_(m.groups(), ('addresses_user_id_fkey', '`user_id`',
+ '`users`', '`id`', None, 'CASCADE', 'SET NULL'))
+
+
diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py
index b918abe25..acf9c1e2f 100644
--- a/test/dialect/mysql/test_types.py
+++ b/test/dialect/mysql/test_types.py
@@ -4,12 +4,13 @@ from sqlalchemy.testing import eq_, assert_raises
from sqlalchemy import *
from sqlalchemy import sql, exc, schema
from sqlalchemy.util import u
+from sqlalchemy import util
from sqlalchemy.dialects.mysql import base as mysql
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, AssertsExecutionResults
from sqlalchemy import testing
from sqlalchemy.testing.engines import utf8_engine
import datetime
-
+import decimal
class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
"Test MySQL column types"
@@ -141,10 +142,36 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
]
for type_, args, kw, res in columns:
+ type_inst = type_(*args, **kw)
self.assert_compile(
- type_(*args, **kw),
+ type_inst,
res
)
+ # test that repr() copies out all arguments
+ self.assert_compile(
+ eval("mysql.%r" % type_inst),
+ res
+ )
+
+ @testing.only_if('mysql')
+ @testing.provide_metadata
+ def test_precision_float_roundtrip(self):
+ t = Table('t', self.metadata,
+ Column('scale_value', mysql.DOUBLE(
+ precision=15, scale=12, asdecimal=True)),
+ Column('unscale_value', mysql.DOUBLE(
+ decimal_return_scale=12, asdecimal=True))
+ )
+ t.create(testing.db)
+ testing.db.execute(
+ t.insert(), scale_value=45.768392065789,
+ unscale_value=45.768392065789
+ )
+ result = testing.db.scalar(select([t.c.scale_value]))
+ eq_(result, decimal.Decimal("45.768392065789"))
+
+ result = testing.db.scalar(select([t.c.unscale_value]))
+ eq_(result, decimal.Decimal("45.768392065789"))
@testing.exclude('mysql', '<', (4, 1, 1), 'no charset support')
def test_charset(self):
@@ -212,14 +239,22 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
(mysql.ENUM, ["foo", "bar"], {'unicode':True},
'''ENUM('foo','bar') UNICODE'''),
- (String, [20], {"collation":"utf8"}, 'VARCHAR(20) COLLATE utf8')
+ (String, [20], {"collation": "utf8"}, 'VARCHAR(20) COLLATE utf8')
]
for type_, args, kw, res in columns:
+ type_inst = type_(*args, **kw)
self.assert_compile(
- type_(*args, **kw),
+ type_inst,
+ res
+ )
+ # test that repr() copies out all arguments
+ self.assert_compile(
+ eval("mysql.%r" % type_inst)
+ if type_ is not String
+ else eval("%r" % type_inst),
res
)
@@ -229,15 +264,23 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
def test_charset_collate_table(self):
t = Table('foo', self.metadata,
Column('id', Integer),
+ Column('data', UnicodeText),
mysql_default_charset='utf8',
- mysql_collate='utf8_unicode_ci'
+ mysql_collate='utf8_bin'
)
t.create()
m2 = MetaData(testing.db)
t2 = Table('foo', m2, autoload=True)
- eq_(t2.kwargs['mysql_collate'], 'utf8_unicode_ci')
+ eq_(t2.kwargs['mysql_collate'], 'utf8_bin')
eq_(t2.kwargs['mysql_default charset'], 'utf8')
+ # test [ticket:2906]
+ # in order to test the condition here, need to use
+ # MySQLdb 1.2.3 and also need to pass either use_unicode=1
+ # or charset=utf8 to the URL.
+ t.insert().execute(id=1, data=u('some text'))
+ assert isinstance(testing.db.scalar(select([t.c.data])), util.text_type)
+
def test_bit_50(self):
"""Exercise BIT types on 5.0+ (not valid for all engine types)"""
@@ -250,7 +293,9 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
@testing.only_if('mysql')
@testing.exclude('mysql', '<', (5, 0, 5), 'a 5.0+ feature')
- @testing.fails_on('mysql+oursql', 'some round trips fail, oursql bug ?')
+ @testing.fails_if(
+ lambda: testing.against("mysql+oursql") and util.py3k,
+ 'some round trips fail, oursql bug ?')
@testing.provide_metadata
def test_bit_50_roundtrip(self):
bit_table = Table('mysql_bits', self.metadata,
@@ -474,72 +519,24 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
self.assert_(colspec(table.c.y1).startswith('y1 YEAR'))
eq_(colspec(table.c.y5), 'y5 YEAR(4)')
- @testing.only_if('mysql')
- @testing.provide_metadata
- def test_set(self):
- """Exercise the SET type."""
- set_table = Table('mysql_set', self.metadata,
- Column('s1',
- mysql.MSSet("'dq'", "'sq'")), Column('s2',
- mysql.MSSet("'a'")), Column('s3',
- mysql.MSSet("'5'", "'7'", "'9'")))
- eq_(colspec(set_table.c.s1), "s1 SET('dq','sq')")
- eq_(colspec(set_table.c.s2), "s2 SET('a')")
- eq_(colspec(set_table.c.s3), "s3 SET('5','7','9')")
- set_table.create()
- reflected = Table('mysql_set', MetaData(testing.db),
- autoload=True)
- for table in set_table, reflected:
-
- def roundtrip(store, expected=None):
- expected = expected or store
- table.insert(store).execute()
- row = table.select().execute().first()
- self.assert_(list(row) == expected)
- table.delete().execute()
-
- roundtrip([None, None, None], [None] * 3)
- roundtrip(['', '', ''], [set([''])] * 3)
- roundtrip([set(['dq']), set(['a']), set(['5'])])
- roundtrip(['dq', 'a', '5'], [set(['dq']), set(['a']),
- set(['5'])])
- roundtrip([1, 1, 1], [set(['dq']), set(['a']), set(['5'
- ])])
- roundtrip([set(['dq', 'sq']), None, set(['9', '5', '7'
- ])])
- set_table.insert().execute({'s3': set(['5'])},
- {'s3': set(['5', '7'])}, {'s3': set(['5', '7', '9'])},
- {'s3': set(['7', '9'])})
-
- # NOTE: the string sent to MySQL here is sensitive to ordering.
- # for some reason the set ordering is always "5, 7" when we test on
- # MySQLdb but in Py3K this is not guaranteed. So basically our
- # SET type doesn't do ordering correctly (not sure how it can,
- # as we don't know how the SET was configured in the first place.)
- rows = select([set_table.c.s3],
- set_table.c.s3.in_([set(['5']), ['5', '7']])
- ).execute().fetchall()
- found = set([frozenset(row[0]) for row in rows])
- eq_(found, set([frozenset(['5']), frozenset(['5', '7'])]))
-
-class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
+class EnumSetTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
__only_on__ = 'mysql'
__dialect__ = mysql.dialect()
- @testing.uses_deprecated('Manually quoting ENUM value literals')
@testing.provide_metadata
def test_enum(self):
"""Exercise the ENUM type."""
+ with testing.expect_deprecated('Manually quoting ENUM value literals'):
+ e1, e2 = mysql.ENUM("'a'", "'b'"), mysql.ENUM("'a'", "'b'")
+
enum_table = Table('mysql_enum', self.metadata,
- Column('e1', mysql.ENUM("'a'", "'b'")),
- Column('e2', mysql.ENUM("'a'", "'b'"),
- nullable=False),
- Column('e2generic', Enum("a", "b"),
- nullable=False),
+ Column('e1', e1),
+ Column('e2', e2, nullable=False),
+ Column('e2generic', Enum("a", "b"), nullable=False),
Column('e3', mysql.ENUM("'a'", "'b'", strict=True)),
Column('e4', mysql.ENUM("'a'", "'b'", strict=True),
nullable=False),
@@ -587,6 +584,106 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
eq_(res, expected)
+ @testing.provide_metadata
+ def test_set(self):
+
+ with testing.expect_deprecated('Manually quoting SET value literals'):
+ e1, e2 = mysql.SET("'a'", "'b'"), mysql.SET("'a'", "'b'")
+
+ set_table = Table('mysql_set', self.metadata,
+ Column('e1', e1),
+ Column('e2', e2, nullable=False),
+ Column('e3', mysql.SET("a", "b")),
+ Column('e4', mysql.SET("'a'", "b")),
+ Column('e5', mysql.SET("'a'", "'b'", quoting="quoted"))
+ )
+
+ eq_(colspec(set_table.c.e1),
+ "e1 SET('a','b')")
+ eq_(colspec(set_table.c.e2),
+ "e2 SET('a','b') NOT NULL")
+ eq_(colspec(set_table.c.e3),
+ "e3 SET('a','b')")
+ eq_(colspec(set_table.c.e4),
+ "e4 SET('''a''','b')")
+ eq_(colspec(set_table.c.e5),
+ "e5 SET('a','b')")
+ set_table.create()
+
+ assert_raises(exc.DBAPIError, set_table.insert().execute,
+ e1=None, e2=None, e3=None, e4=None)
+
+ if testing.against("+oursql"):
+ assert_raises(exc.StatementError, set_table.insert().execute,
+ e1='c', e2='c', e3='c', e4='c')
+
+ set_table.insert().execute(e1='a', e2='a', e3='a', e4="'a'", e5="a,b")
+ set_table.insert().execute(e1='b', e2='b', e3='b', e4='b', e5="a,b")
+
+ res = set_table.select().execute().fetchall()
+
+ if testing.against("+oursql"):
+ expected = [
+ # 1st row with all c's, data truncated
+ (set(['']), set(['']), set(['']), set(['']), None),
+ ]
+ else:
+ expected = []
+
+ expected.extend([
+ (set(['a']), set(['a']), set(['a']), set(["'a'"]), set(['a', 'b'])),
+ (set(['b']), set(['b']), set(['b']), set(['b']), set(['a', 'b']))
+ ])
+
+ eq_(res, expected)
+
+ @testing.provide_metadata
+ def test_set_roundtrip_plus_reflection(self):
+ set_table = Table('mysql_set', self.metadata,
+ Column('s1',
+ mysql.SET("dq", "sq")),
+ Column('s2', mysql.SET("a")),
+ Column('s3', mysql.SET("5", "7", "9")))
+
+ eq_(colspec(set_table.c.s1), "s1 SET('dq','sq')")
+ eq_(colspec(set_table.c.s2), "s2 SET('a')")
+ eq_(colspec(set_table.c.s3), "s3 SET('5','7','9')")
+ set_table.create()
+ reflected = Table('mysql_set', MetaData(testing.db),
+ autoload=True)
+ for table in set_table, reflected:
+
+ def roundtrip(store, expected=None):
+ expected = expected or store
+ table.insert(store).execute()
+ row = table.select().execute().first()
+ self.assert_(list(row) == expected)
+ table.delete().execute()
+
+ roundtrip([None, None, None], [None] * 3)
+ roundtrip(['', '', ''], [set([''])] * 3)
+ roundtrip([set(['dq']), set(['a']), set(['5'])])
+ roundtrip(['dq', 'a', '5'], [set(['dq']), set(['a']),
+ set(['5'])])
+ roundtrip([1, 1, 1], [set(['dq']), set(['a']), set(['5'
+ ])])
+ roundtrip([set(['dq', 'sq']), None, set(['9', '5', '7'
+ ])])
+ set_table.insert().execute({'s3': set(['5'])},
+ {'s3': set(['5', '7'])}, {'s3': set(['5', '7', '9'])},
+ {'s3': set(['7', '9'])})
+
+ # NOTE: the string sent to MySQL here is sensitive to ordering.
+ # for some reason the set ordering is always "5, 7" when we test on
+ # MySQLdb but in Py3K this is not guaranteed. So basically our
+ # SET type doesn't do ordering correctly (not sure how it can,
+ # as we don't know how the SET was configured in the first place.)
+ rows = select([set_table.c.s3],
+ set_table.c.s3.in_([set(['5']), ['5', '7']])
+ ).execute().fetchall()
+ found = set([frozenset(row[0]) for row in rows])
+ eq_(found, set([frozenset(['5']), frozenset(['5', '7'])]))
+
def test_unicode_enum(self):
unicode_engine = utf8_engine()
metadata = MetaData(unicode_engine)
@@ -634,38 +731,64 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
"VARCHAR(1), CHECK (somecolumn IN ('x', "
"'y', 'z')))")
+ @testing.provide_metadata
@testing.exclude('mysql', '<', (4,), "3.23 can't handle an ENUM of ''")
- @testing.uses_deprecated('Manually quoting ENUM value literals')
def test_enum_parse(self):
- """More exercises for the ENUM type."""
- # MySQL 3.23 can't handle an ENUM of ''....
-
- enum_table = Table('mysql_enum', MetaData(testing.db),
- Column('e1', mysql.ENUM("'a'")),
- Column('e2', mysql.ENUM("''")),
- Column('e3', mysql.ENUM('a')),
- Column('e4', mysql.ENUM('')),
- Column('e5', mysql.ENUM("'a'", "''")),
- Column('e6', mysql.ENUM("''", "'a'")),
- Column('e7', mysql.ENUM("''", "'''a'''", "'b''b'", "''''")))
+ with testing.expect_deprecated('Manually quoting ENUM value literals'):
+ enum_table = Table('mysql_enum', self.metadata,
+ Column('e1', mysql.ENUM("'a'")),
+ Column('e2', mysql.ENUM("''")),
+ Column('e3', mysql.ENUM('a')),
+ Column('e4', mysql.ENUM('')),
+ Column('e5', mysql.ENUM("'a'", "''")),
+ Column('e6', mysql.ENUM("''", "'a'")),
+ Column('e7', mysql.ENUM("''", "'''a'''", "'b''b'", "''''")))
for col in enum_table.c:
self.assert_(repr(col))
- try:
- enum_table.create()
- reflected = Table('mysql_enum', MetaData(testing.db),
- autoload=True)
- for t in enum_table, reflected:
- eq_(t.c.e1.type.enums, ("a",))
- eq_(t.c.e2.type.enums, ("",))
- eq_(t.c.e3.type.enums, ("a",))
- eq_(t.c.e4.type.enums, ("",))
- eq_(t.c.e5.type.enums, ("a", ""))
- eq_(t.c.e6.type.enums, ("", "a"))
- eq_(t.c.e7.type.enums, ("", "'a'", "b'b", "'"))
- finally:
- enum_table.drop()
+
+ enum_table.create()
+ reflected = Table('mysql_enum', MetaData(testing.db),
+ autoload=True)
+ for t in enum_table, reflected:
+ eq_(t.c.e1.type.enums, ("a",))
+ eq_(t.c.e2.type.enums, ("",))
+ eq_(t.c.e3.type.enums, ("a",))
+ eq_(t.c.e4.type.enums, ("",))
+ eq_(t.c.e5.type.enums, ("a", ""))
+ eq_(t.c.e6.type.enums, ("", "a"))
+ eq_(t.c.e7.type.enums, ("", "'a'", "b'b", "'"))
+
+ @testing.provide_metadata
+ @testing.exclude('mysql', '<', (5,))
+ def test_set_parse(self):
+ with testing.expect_deprecated('Manually quoting SET value literals'):
+ set_table = Table('mysql_set', self.metadata,
+ Column('e1', mysql.SET("'a'")),
+ Column('e2', mysql.SET("''")),
+ Column('e3', mysql.SET('a')),
+ Column('e4', mysql.SET('')),
+ Column('e5', mysql.SET("'a'", "''")),
+ Column('e6', mysql.SET("''", "'a'")),
+ Column('e7', mysql.SET("''", "'''a'''", "'b''b'", "''''")))
+
+ for col in set_table.c:
+ self.assert_(repr(col))
+
+ set_table.create()
+
+ # don't want any warnings on reflection
+ reflected = Table('mysql_set', MetaData(testing.db),
+ autoload=True)
+ for t in set_table, reflected:
+ eq_(t.c.e1.type.values, ("a",))
+ eq_(t.c.e2.type.values, ("",))
+ eq_(t.c.e3.type.values, ("a",))
+ eq_(t.c.e4.type.values, ("",))
+ eq_(t.c.e5.type.values, ("a", ""))
+ eq_(t.c.e6.type.values, ("", "a"))
+ eq_(t.c.e7.type.values, ("", "'a'", "b'b", "'"))
def colspec(c):
return testing.db.dialect.ddl_compiler(
diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py
index 11661b11f..e64afb186 100644
--- a/test/dialect/postgresql/test_compiler.py
+++ b/test/dialect/postgresql/test_compiler.py
@@ -16,6 +16,7 @@ from sqlalchemy.dialects.postgresql import base as postgresql
from sqlalchemy.dialects.postgresql import TSRANGE
from sqlalchemy.orm import mapper, aliased, Session
from sqlalchemy.sql import table, column, operators
+from sqlalchemy.util import u
class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -106,6 +107,45 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'AS length_1', dialect=dialect)
+ def test_create_drop_enum(self):
+ # test escaping and unicode within CREATE TYPE for ENUM
+ typ = postgresql.ENUM(
+ "val1", "val2", "val's 3", u('méil'), name="myname")
+ self.assert_compile(postgresql.CreateEnumType(typ),
+ u("CREATE TYPE myname AS ENUM ('val1', 'val2', 'val''s 3', 'méil')")
+ )
+
+ typ = postgresql.ENUM(
+ "val1", "val2", "val's 3", name="PleaseQuoteMe")
+ self.assert_compile(postgresql.CreateEnumType(typ),
+ "CREATE TYPE \"PleaseQuoteMe\" AS ENUM "
+ "('val1', 'val2', 'val''s 3')"
+ )
+
+ def test_generic_enum(self):
+ e1 = Enum('x', 'y', 'z', name='somename')
+ e2 = Enum('x', 'y', 'z', name='somename', schema='someschema')
+ self.assert_compile(postgresql.CreateEnumType(e1),
+ "CREATE TYPE somename AS ENUM ('x', 'y', 'z')"
+ )
+ self.assert_compile(postgresql.CreateEnumType(e2),
+ "CREATE TYPE someschema.somename AS ENUM "
+ "('x', 'y', 'z')")
+ self.assert_compile(postgresql.DropEnumType(e1),
+ 'DROP TYPE somename')
+ self.assert_compile(postgresql.DropEnumType(e2),
+ 'DROP TYPE someschema.somename')
+ t1 = Table('sometable', MetaData(), Column('somecolumn', e1))
+ self.assert_compile(schema.CreateTable(t1),
+ 'CREATE TABLE sometable (somecolumn '
+ 'somename)')
+ t1 = Table('sometable', MetaData(), Column('somecolumn',
+ Enum('x', 'y', 'z', native_enum=False)))
+ self.assert_compile(schema.CreateTable(t1),
+ "CREATE TABLE sometable (somecolumn "
+ "VARCHAR(1), CHECK (somecolumn IN ('x', "
+ "'y', 'z')))")
+
def test_create_partial_index(self):
m = MetaData()
tbl = Table('testtbl', m, Column('data', Integer))
@@ -173,6 +213,27 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'USING hash (data)',
dialect=postgresql.dialect())
+
+ def test_create_index_expr_gets_parens(self):
+ m = MetaData()
+ tbl = Table('testtbl', m, Column('x', Integer), Column('y', Integer))
+
+ idx1 = Index('test_idx1', 5 / (tbl.c.x + tbl.c.y))
+ self.assert_compile(
+ schema.CreateIndex(idx1),
+ "CREATE INDEX test_idx1 ON testtbl ((5 / (x + y)))"
+ )
+
+ def test_create_index_literals(self):
+ m = MetaData()
+ tbl = Table('testtbl', m, Column('data', Integer))
+
+ idx1 = Index('test_idx1', tbl.c.data + 5)
+ self.assert_compile(
+ schema.CreateIndex(idx1),
+ "CREATE INDEX test_idx1 ON testtbl ((data + 5))"
+ )
+
def test_exclude_constraint_min(self):
m = MetaData()
tbl = Table('testtbl', m,
@@ -228,6 +289,68 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'SUBSTRING(%(substring_1)s FROM %(substring_2)s)')
+ def test_for_update(self):
+ table1 = table('mytable',
+ column('myid'), column('name'), column('description'))
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).with_for_update(),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = %(myid_1)s FOR UPDATE")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).with_for_update(nowait=True),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = %(myid_1)s FOR UPDATE NOWAIT")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).with_for_update(read=True),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = %(myid_1)s FOR SHARE")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).
+ with_for_update(read=True, nowait=True),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = %(myid_1)s FOR SHARE NOWAIT")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).
+ with_for_update(of=table1.c.myid),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = %(myid_1)s "
+ "FOR UPDATE OF mytable")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).
+ with_for_update(read=True, nowait=True, of=table1),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = %(myid_1)s "
+ "FOR SHARE OF mytable NOWAIT")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).
+ with_for_update(read=True, nowait=True, of=table1.c.myid),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = %(myid_1)s "
+ "FOR SHARE OF mytable NOWAIT")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).
+ with_for_update(read=True, nowait=True,
+ of=[table1.c.myid, table1.c.name]),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = %(myid_1)s "
+ "FOR SHARE OF mytable NOWAIT")
+
+ ta = table1.alias()
+ self.assert_compile(
+ ta.select(ta.c.myid == 7).
+ with_for_update(of=[ta.c.myid, ta.c.name]),
+ "SELECT mytable_1.myid, mytable_1.name, mytable_1.description "
+ "FROM mytable AS mytable_1 "
+ "WHERE mytable_1.myid = %(myid_1)s FOR UPDATE OF mytable_1"
+ )
def test_reserved_words(self):
diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py
index 1fc239cb7..fd6df2c98 100644
--- a/test/dialect/postgresql/test_dialect.py
+++ b/test/dialect/postgresql/test_dialect.py
@@ -17,6 +17,7 @@ from sqlalchemy.dialects.postgresql import base as postgresql
import logging
import logging.handlers
from sqlalchemy.testing.mock import Mock
+from sqlalchemy.engine.reflection import Inspector
class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
@@ -53,7 +54,11 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
'compiled by GCC gcc (GCC) 4.4.2, 64-bit', (8, 5)),
('EnterpriseDB 9.1.2.2 on x86_64-unknown-linux-gnu, '
'compiled by gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-50), '
- '64-bit', (9, 1, 2))]:
+ '64-bit', (9, 1, 2)),
+ ('[PostgreSQL 9.2.4 ] VMware vFabric Postgres 9.2.4.0 '
+ 'release build 1080137', (9, 2, 4))
+
+ ]:
eq_(testing.db.dialect._get_server_version_info(mock_conn(string)),
version)
@@ -63,8 +68,10 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
assert testing.db.dialect.dbapi.__version__.\
startswith(".".join(str(x) for x in v))
+ # currently not passing with pg 9.3 that does not seem to generate
+ # any notices here, woudl rather find a way to mock this
@testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
- def test_notice_logging(self):
+ def _test_notice_logging(self):
log = logging.getLogger('sqlalchemy.dialects.postgresql')
buf = logging.handlers.BufferingHandler(100)
lev = log.level
@@ -199,18 +206,32 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
assert_raises(exc.InvalidRequestError, testing.db.execute, stmt)
def test_serial_integer(self):
- for type_, expected in [
- (Integer, 'SERIAL'),
- (BigInteger, 'BIGSERIAL'),
- (SmallInteger, 'SMALLINT'),
- (postgresql.INTEGER, 'SERIAL'),
- (postgresql.BIGINT, 'BIGSERIAL'),
+
+ for version, type_, expected in [
+ (None, Integer, 'SERIAL'),
+ (None, BigInteger, 'BIGSERIAL'),
+ ((9, 1), SmallInteger, 'SMALLINT'),
+ ((9, 2), SmallInteger, 'SMALLSERIAL'),
+ (None, postgresql.INTEGER, 'SERIAL'),
+ (None, postgresql.BIGINT, 'BIGSERIAL'),
]:
m = MetaData()
t = Table('t', m, Column('c', type_, primary_key=True))
- ddl_compiler = testing.db.dialect.ddl_compiler(testing.db.dialect, schema.CreateTable(t))
+
+ if version:
+ dialect = postgresql.dialect()
+ dialect._get_server_version_info = Mock(return_value=version)
+ dialect.initialize(testing.db.connect())
+ else:
+ dialect = testing.db.dialect
+
+ ddl_compiler = dialect.ddl_compiler(
+ dialect,
+ schema.CreateTable(t)
+ )
eq_(
ddl_compiler.get_column_specification(t.c.c),
"c %s NOT NULL" % expected
)
+
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
index fb399b546..58f34d5d0 100644
--- a/test/dialect/postgresql/test_reflection.py
+++ b/test/dialect/postgresql/test_reflection.py
@@ -5,6 +5,7 @@ from sqlalchemy.testing.assertions import eq_, assert_raises, \
AssertsCompiledSQL, ComparesTables
from sqlalchemy.testing import engines, fixtures
from sqlalchemy import testing
+from sqlalchemy import inspect
from sqlalchemy import Table, Column, select, MetaData, text, Integer, \
String, Sequence, ForeignKey, join, Numeric, \
PrimaryKeyConstraint, DateTime, tuple_, Float, BigInteger, \
@@ -159,6 +160,17 @@ class ReflectionTest(fixtures.TestBase):
subject.join(referer).onclause))
@testing.provide_metadata
+ def test_reflect_default_over_128_chars(self):
+ Table('t', self.metadata,
+ Column('x', String(200), server_default="abcd" * 40)
+ ).create(testing.db)
+
+ m = MetaData()
+ t = Table('t', m, autoload=True, autoload_with=testing.db)
+ eq_(
+ t.c.x.server_default.arg.text, "'%s'::character varying" % ("abcd" * 40)
+ )
+ @testing.provide_metadata
def test_renamed_sequence_reflection(self):
metadata = self.metadata
t = Table('t', metadata, Column('id', Integer, primary_key=True))
@@ -416,6 +428,70 @@ class ReflectionTest(fixtures.TestBase):
eq_(ind, [{'unique': False, 'column_names': ['y'], 'name': 'idx1'}])
conn.close()
+ @testing.provide_metadata
+ def test_foreign_key_option_inspection(self):
+ metadata = self.metadata
+ Table('person', metadata,
+ Column('id', String(length=32), nullable=False, primary_key=True),
+ Column('company_id', ForeignKey('company.id',
+ name='person_company_id_fkey',
+ match='FULL', onupdate='RESTRICT', ondelete='RESTRICT',
+ deferrable=True, initially='DEFERRED'
+ )
+ )
+ )
+ Table('company', metadata,
+ Column('id', String(length=32), nullable=False, primary_key=True),
+ Column('name', String(length=255)),
+ Column('industry_id', ForeignKey('industry.id',
+ name='company_industry_id_fkey',
+ onupdate='CASCADE', ondelete='CASCADE',
+ deferrable=False, # PG default
+ initially='IMMEDIATE' # PG default
+ )
+ )
+ )
+ Table('industry', metadata,
+ Column('id', Integer(), nullable=False, primary_key=True),
+ Column('name', String(length=255))
+ )
+ fk_ref = {
+ 'person_company_id_fkey': {
+ 'name': 'person_company_id_fkey',
+ 'constrained_columns': ['company_id'],
+ 'referred_columns': ['id'],
+ 'referred_table': 'company',
+ 'referred_schema': None,
+ 'options': {
+ 'onupdate': 'RESTRICT',
+ 'deferrable': True,
+ 'ondelete': 'RESTRICT',
+ 'initially': 'DEFERRED',
+ 'match': 'FULL'
+ }
+ },
+ 'company_industry_id_fkey': {
+ 'name': 'company_industry_id_fkey',
+ 'constrained_columns': ['industry_id'],
+ 'referred_columns': ['id'],
+ 'referred_table': 'industry',
+ 'referred_schema': None,
+ 'options': {
+ 'onupdate': 'CASCADE',
+ 'deferrable': None,
+ 'ondelete': 'CASCADE',
+ 'initially': None,
+ 'match': None
+ }
+ }
+ }
+ metadata.create_all()
+ inspector = inspect(testing.db)
+ fks = inspector.get_foreign_keys('person') + \
+ inspector.get_foreign_keys('company')
+ for fk in fks:
+ eq_(fk, fk_ref[fk['name']])
+
class CustomTypeReflectionTest(fixtures.TestBase):
class CustomType(object):
diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py
index 784f8bcbf..ba4b63e1a 100644
--- a/test/dialect/postgresql/test_types.py
+++ b/test/dialect/postgresql/test_types.py
@@ -10,18 +10,22 @@ from sqlalchemy import Table, Column, select, MetaData, text, Integer, \
PrimaryKeyConstraint, DateTime, tuple_, Float, BigInteger, \
func, literal_column, literal, bindparam, cast, extract, \
SmallInteger, Enum, REAL, update, insert, Index, delete, \
- and_, Date, TypeDecorator, Time, Unicode, Interval, or_, Text
+ and_, Date, TypeDecorator, Time, Unicode, Interval, or_, Text, \
+ type_coerce
from sqlalchemy.orm import Session, mapper, aliased
from sqlalchemy import exc, schema, types
from sqlalchemy.dialects.postgresql import base as postgresql
from sqlalchemy.dialects.postgresql import HSTORE, hstore, array, \
- INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, TSTZRANGE
+ INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, TSTZRANGE, \
+ JSON
import decimal
from sqlalchemy import util
from sqlalchemy.testing.util import round_decimal
from sqlalchemy.sql import table, column, operators
import logging
import re
+from sqlalchemy import inspect
+from sqlalchemy import event
class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
__only_on__ = 'postgresql'
@@ -96,34 +100,10 @@ class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
([5], [5], [6], [decimal.Decimal("6.4")])
)
-class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
+class EnumTest(fixtures.TestBase, AssertsExecutionResults):
__only_on__ = 'postgresql'
- __dialect__ = postgresql.dialect()
- def test_compile(self):
- e1 = Enum('x', 'y', 'z', name='somename')
- e2 = Enum('x', 'y', 'z', name='somename', schema='someschema')
- self.assert_compile(postgresql.CreateEnumType(e1),
- "CREATE TYPE somename AS ENUM ('x','y','z')"
- )
- self.assert_compile(postgresql.CreateEnumType(e2),
- "CREATE TYPE someschema.somename AS ENUM "
- "('x','y','z')")
- self.assert_compile(postgresql.DropEnumType(e1),
- 'DROP TYPE somename')
- self.assert_compile(postgresql.DropEnumType(e2),
- 'DROP TYPE someschema.somename')
- t1 = Table('sometable', MetaData(), Column('somecolumn', e1))
- self.assert_compile(schema.CreateTable(t1),
- 'CREATE TABLE sometable (somecolumn '
- 'somename)')
- t1 = Table('sometable', MetaData(), Column('somecolumn',
- Enum('x', 'y', 'z', native_enum=False)))
- self.assert_compile(schema.CreateTable(t1),
- "CREATE TABLE sometable (somecolumn "
- "VARCHAR(1), CHECK (somecolumn IN ('x', "
- "'y', 'z')))")
@testing.fails_on('postgresql+zxjdbc',
'zxjdbc fails on ENUM: column "XXX" is of type '
@@ -860,7 +840,8 @@ class SpecialTypesTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
Column('plain_interval', postgresql.INTERVAL),
Column('year_interval', y2m()),
Column('month_interval', d2s()),
- Column('precision_interval', postgresql.INTERVAL(precision=3))
+ Column('precision_interval', postgresql.INTERVAL(precision=3)),
+ Column('tsvector_document', postgresql.TSVECTOR)
)
metadata.create_all()
@@ -893,6 +874,17 @@ class SpecialTypesTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
self.assert_compile(type_, expected)
@testing.provide_metadata
+ def test_tsvector_round_trip(self):
+ t = Table('t1', self.metadata, Column('data', postgresql.TSVECTOR))
+ t.create()
+ testing.db.execute(t.insert(), data="a fat cat sat")
+ eq_(testing.db.scalar(select([t.c.data])), "'a' 'cat' 'fat' 'sat'")
+
+ testing.db.execute(t.update(), data="'a' 'cat' 'fat' 'mat' 'sat'")
+
+ eq_(testing.db.scalar(select([t.c.data])), "'a' 'cat' 'fat' 'mat' 'sat'")
+
+ @testing.provide_metadata
def test_bit_reflection(self):
metadata = self.metadata
t1 = Table('t1', metadata,
@@ -918,7 +910,6 @@ class UUIDTest(fixtures.TestBase):
__only_on__ = 'postgresql'
- @testing.requires.python25
@testing.fails_on('postgresql+zxjdbc',
'column "data" is of type uuid but expression is of type character varying')
@testing.fails_on('postgresql+pg8000', 'No support for UUID type')
@@ -932,7 +923,6 @@ class UUIDTest(fixtures.TestBase):
str(uuid.uuid4())
)
- @testing.requires.python25
@testing.fails_on('postgresql+zxjdbc',
'column "data" is of type uuid but expression is of type character varying')
@testing.fails_on('postgresql+pg8000', 'No support for UUID type')
@@ -978,13 +968,8 @@ class UUIDTest(fixtures.TestBase):
-class HStoreTest(fixtures.TestBase):
- def _assert_sql(self, construct, expected):
- dialect = postgresql.dialect()
- compiled = str(construct.compile(dialect=dialect))
- compiled = re.sub(r'\s+', ' ', compiled)
- expected = re.sub(r'\s+', ' ', expected)
- eq_(compiled, expected)
+class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
+ __dialect__ = 'postgresql'
def setup(self):
metadata = MetaData()
@@ -996,7 +981,7 @@ class HStoreTest(fixtures.TestBase):
def _test_where(self, whereclause, expected):
stmt = select([self.test_table]).where(whereclause)
- self._assert_sql(
+ self.assert_compile(
stmt,
"SELECT test_table.id, test_table.hash FROM test_table "
"WHERE %s" % expected
@@ -1004,7 +989,7 @@ class HStoreTest(fixtures.TestBase):
def _test_cols(self, colclause, expected, from_=True):
stmt = select([colclause])
- self._assert_sql(
+ self.assert_compile(
stmt,
(
"SELECT %s" +
@@ -1013,9 +998,8 @@ class HStoreTest(fixtures.TestBase):
)
def test_bind_serialize_default(self):
- from sqlalchemy.engine import default
- dialect = default.DefaultDialect()
+ dialect = postgresql.dialect()
proc = self.test_table.c.hash.type._cached_bind_processor(dialect)
eq_(
proc(util.OrderedDict([("key1", "value1"), ("key2", "value2")])),
@@ -1023,9 +1007,7 @@ class HStoreTest(fixtures.TestBase):
)
def test_bind_serialize_with_slashes_and_quotes(self):
- from sqlalchemy.engine import default
-
- dialect = default.DefaultDialect()
+ dialect = postgresql.dialect()
proc = self.test_table.c.hash.type._cached_bind_processor(dialect)
eq_(
proc({'\\"a': '\\"1'}),
@@ -1033,9 +1015,7 @@ class HStoreTest(fixtures.TestBase):
)
def test_parse_error(self):
- from sqlalchemy.engine import default
-
- dialect = default.DefaultDialect()
+ dialect = postgresql.dialect()
proc = self.test_table.c.hash.type._cached_result_processor(
dialect, None)
assert_raises_message(
@@ -1048,9 +1028,7 @@ class HStoreTest(fixtures.TestBase):
)
def test_result_deserialize_default(self):
- from sqlalchemy.engine import default
-
- dialect = default.DefaultDialect()
+ dialect = postgresql.dialect()
proc = self.test_table.c.hash.type._cached_result_processor(
dialect, None)
eq_(
@@ -1059,9 +1037,7 @@ class HStoreTest(fixtures.TestBase):
)
def test_result_deserialize_with_slashes_and_quotes(self):
- from sqlalchemy.engine import default
-
- dialect = default.DefaultDialect()
+ dialect = postgresql.dialect()
proc = self.test_table.c.hash.type._cached_result_processor(
dialect, None)
eq_(
@@ -1305,7 +1281,6 @@ class HStoreRoundTripTest(fixtures.TablesTest):
return engine
def test_reflect(self):
- from sqlalchemy import inspect
insp = inspect(testing.db)
cols = insp.get_columns('data_table')
assert isinstance(cols[2]['type'], HSTORE)
@@ -1677,3 +1652,320 @@ class DateTimeTZRangeTests(_RangeTypeMixin, fixtures.TablesTest):
def _data_obj(self):
return self.extras.DateTimeTZRange(*self.tstzs())
+
+
+class JSONTest(AssertsCompiledSQL, fixtures.TestBase):
+ __dialect__ = 'postgresql'
+
+ def setup(self):
+ metadata = MetaData()
+ self.test_table = Table('test_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('test_column', JSON)
+ )
+ self.jsoncol = self.test_table.c.test_column
+
+ def _test_where(self, whereclause, expected):
+ stmt = select([self.test_table]).where(whereclause)
+ self.assert_compile(
+ stmt,
+ "SELECT test_table.id, test_table.test_column FROM test_table "
+ "WHERE %s" % expected
+ )
+
+ def _test_cols(self, colclause, expected, from_=True):
+ stmt = select([colclause])
+ self.assert_compile(
+ stmt,
+ (
+ "SELECT %s" +
+ (" FROM test_table" if from_ else "")
+ ) % expected
+ )
+
+ def test_bind_serialize_default(self):
+ dialect = postgresql.dialect()
+ proc = self.test_table.c.test_column.type._cached_bind_processor(dialect)
+ eq_(
+ proc({"A": [1, 2, 3, True, False]}),
+ '{"A": [1, 2, 3, true, false]}'
+ )
+
+ def test_result_deserialize_default(self):
+ dialect = postgresql.dialect()
+ proc = self.test_table.c.test_column.type._cached_result_processor(
+ dialect, None)
+ eq_(
+ proc('{"A": [1, 2, 3, true, false]}'),
+ {"A": [1, 2, 3, True, False]}
+ )
+
+ # This test is a bit misleading -- in real life you will need to cast to do anything
+ def test_where_getitem(self):
+ self._test_where(
+ self.jsoncol['bar'] == None,
+ "(test_table.test_column -> %(test_column_1)s) IS NULL"
+ )
+
+ def test_where_path(self):
+ self._test_where(
+ self.jsoncol[("foo", 1)] == None,
+ "(test_table.test_column #> %(test_column_1)s) IS NULL"
+ )
+
+ def test_where_getitem_as_text(self):
+ self._test_where(
+ self.jsoncol['bar'].astext == None,
+ "(test_table.test_column ->> %(test_column_1)s) IS NULL"
+ )
+
+ def test_where_getitem_as_cast(self):
+ self._test_where(
+ self.jsoncol['bar'].cast(Integer) == 5,
+ "CAST(test_table.test_column ->> %(test_column_1)s AS INTEGER) "
+ "= %(param_1)s"
+ )
+
+ def test_where_path_as_text(self):
+ self._test_where(
+ self.jsoncol[("foo", 1)].astext == None,
+ "(test_table.test_column #>> %(test_column_1)s) IS NULL"
+ )
+
+ def test_cols_get(self):
+ self._test_cols(
+ self.jsoncol['foo'],
+ "test_table.test_column -> %(test_column_1)s AS anon_1",
+ True
+ )
+
+
+class JSONRoundTripTest(fixtures.TablesTest):
+ __only_on__ = ('postgresql >= 9.3',)
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('data_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(30), nullable=False),
+ Column('data', JSON)
+ )
+
+ def _fixture_data(self, engine):
+ data_table = self.tables.data_table
+ engine.execute(
+ data_table.insert(),
+ {'name': 'r1', 'data': {"k1": "r1v1", "k2": "r1v2"}},
+ {'name': 'r2', 'data': {"k1": "r2v1", "k2": "r2v2"}},
+ {'name': 'r3', 'data': {"k1": "r3v1", "k2": "r3v2"}},
+ {'name': 'r4', 'data': {"k1": "r4v1", "k2": "r4v2"}},
+ {'name': 'r5', 'data': {"k1": "r5v1", "k2": "r5v2", "k3": 5}},
+ )
+
+ def _assert_data(self, compare):
+ data = testing.db.execute(
+ select([self.tables.data_table.c.data]).
+ order_by(self.tables.data_table.c.name)
+ ).fetchall()
+ eq_([d for d, in data], compare)
+
+ def _test_insert(self, engine):
+ engine.execute(
+ self.tables.data_table.insert(),
+ {'name': 'r1', 'data': {"k1": "r1v1", "k2": "r1v2"}}
+ )
+ self._assert_data([{"k1": "r1v1", "k2": "r1v2"}])
+
+ def _non_native_engine(self, json_serializer=None, json_deserializer=None):
+ if json_serializer is not None or json_deserializer is not None:
+ options = {
+ "json_serializer": json_serializer,
+ "json_deserializer": json_deserializer
+ }
+ else:
+ options = {}
+
+ if testing.against("postgresql+psycopg2"):
+ from psycopg2.extras import register_default_json
+ engine = engines.testing_engine(options=options)
+ @event.listens_for(engine, "connect")
+ def connect(dbapi_connection, connection_record):
+ engine.dialect._has_native_json = False
+ def pass_(value):
+ return value
+ register_default_json(dbapi_connection, loads=pass_)
+ elif options:
+ engine = engines.testing_engine(options=options)
+ else:
+ engine = testing.db
+ engine.connect()
+ return engine
+
+ def test_reflect(self):
+ insp = inspect(testing.db)
+ cols = insp.get_columns('data_table')
+ assert isinstance(cols[2]['type'], JSON)
+
+ @testing.only_on("postgresql+psycopg2")
+ def test_insert_native(self):
+ engine = testing.db
+ self._test_insert(engine)
+
+ def test_insert_python(self):
+ engine = self._non_native_engine()
+ self._test_insert(engine)
+
+
+ def _test_custom_serialize_deserialize(self, native):
+ import json
+ def loads(value):
+ value = json.loads(value)
+ value['x'] = value['x'] + '_loads'
+ return value
+
+ def dumps(value):
+ value = dict(value)
+ value['x'] = 'dumps_y'
+ return json.dumps(value)
+
+ if native:
+ engine = engines.testing_engine(options=dict(
+ json_serializer=dumps,
+ json_deserializer=loads
+ ))
+ else:
+ engine = self._non_native_engine(
+ json_serializer=dumps,
+ json_deserializer=loads
+ )
+
+ s = select([
+ cast(
+ {
+ "key": "value",
+ "x": "q"
+ },
+ JSON
+ )
+ ])
+ eq_(
+ engine.scalar(s),
+ {
+ "key": "value",
+ "x": "dumps_y_loads"
+ },
+ )
+
+ @testing.only_on("postgresql+psycopg2")
+ def test_custom_native(self):
+ self._test_custom_serialize_deserialize(True)
+
+ @testing.only_on("postgresql+psycopg2")
+ def test_custom_python(self):
+ self._test_custom_serialize_deserialize(False)
+
+
+ @testing.only_on("postgresql+psycopg2")
+ def test_criterion_native(self):
+ engine = testing.db
+ self._fixture_data(engine)
+ self._test_criterion(engine)
+
+ def test_criterion_python(self):
+ engine = self._non_native_engine()
+ self._fixture_data(engine)
+ self._test_criterion(engine)
+
+ def test_path_query(self):
+ engine = testing.db
+ self._fixture_data(engine)
+ data_table = self.tables.data_table
+ result = engine.execute(
+ select([data_table.c.data]).where(
+ data_table.c.data[('k1',)].astext == 'r3v1'
+ )
+ ).first()
+ eq_(result, ({'k1': 'r3v1', 'k2': 'r3v2'},))
+
+ def test_query_returned_as_text(self):
+ engine = testing.db
+ self._fixture_data(engine)
+ data_table = self.tables.data_table
+ result = engine.execute(
+ select([data_table.c.data['k1'].astext])
+ ).first()
+ assert isinstance(result[0], util.text_type)
+
+ def test_query_returned_as_int(self):
+ engine = testing.db
+ self._fixture_data(engine)
+ data_table = self.tables.data_table
+ result = engine.execute(
+ select([data_table.c.data['k3'].cast(Integer)]).where(
+ data_table.c.name == 'r5')
+ ).first()
+ assert isinstance(result[0], int)
+
+ def _test_criterion(self, engine):
+ data_table = self.tables.data_table
+ result = engine.execute(
+ select([data_table.c.data]).where(
+ data_table.c.data['k1'].astext == 'r3v1'
+ )
+ ).first()
+ eq_(result, ({'k1': 'r3v1', 'k2': 'r3v2'},))
+
+ def _test_fixed_round_trip(self, engine):
+ s = select([
+ cast(
+ {
+ "key": "value",
+ "key2": {"k1": "v1", "k2": "v2"}
+ },
+ JSON
+ )
+ ])
+ eq_(
+ engine.scalar(s),
+ {
+ "key": "value",
+ "key2": {"k1": "v1", "k2": "v2"}
+ },
+ )
+
+ def test_fixed_round_trip_python(self):
+ engine = self._non_native_engine()
+ self._test_fixed_round_trip(engine)
+
+ @testing.only_on("postgresql+psycopg2")
+ def test_fixed_round_trip_native(self):
+ engine = testing.db
+ self._test_fixed_round_trip(engine)
+
+ def _test_unicode_round_trip(self, engine):
+ s = select([
+ cast(
+ {
+ util.u('réveillé'): util.u('réveillé'),
+ "data": {"k1": util.u('drôle')}
+ },
+ JSON
+ )
+ ])
+ eq_(
+ engine.scalar(s),
+ {
+ util.u('réveillé'): util.u('réveillé'),
+ "data": {"k1": util.u('drôle')}
+ },
+ )
+
+
+ def test_unicode_round_trip_python(self):
+ engine = self._non_native_engine()
+ self._test_unicode_round_trip(engine)
+
+ @testing.only_on("postgresql+psycopg2")
+ def test_unicode_round_trip_native(self):
+ engine = testing.db
+ self._test_unicode_round_trip(engine)
diff --git a/test/dialect/test_firebird.py b/test/dialect/test_firebird.py
index 4a71b7d05..222e34b93 100644
--- a/test/dialect/test_firebird.py
+++ b/test/dialect/test_firebird.py
@@ -352,6 +352,15 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
for type_, args, kw, res in columns:
self.assert_compile(type_(*args, **kw), res)
+ def test_quoting_initial_chars(self):
+ self.assert_compile(
+ column("_somecol"),
+ '"_somecol"'
+ )
+ self.assert_compile(
+ column("$somecol"),
+ '"$somecol"'
+ )
class TypesTest(fixtures.TestBase):
__only_on__ = 'firebird'
diff --git a/test/dialect/test_informix.py b/test/dialect/test_informix.py
deleted file mode 100644
index 332edd24e..000000000
--- a/test/dialect/test_informix.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from sqlalchemy import *
-from sqlalchemy.databases import informix
-from sqlalchemy.testing import *
-
-
-class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
-
- __dialect__ = informix.InformixDialect()
-
- def test_statements(self):
- meta = MetaData()
- t1 = Table('t1', meta, Column('col1', Integer,
- primary_key=True), Column('col2', String(50)))
- t2 = Table('t2', meta, Column('col1', Integer,
- primary_key=True), Column('col2', String(50)),
- Column('col3', Integer, ForeignKey('t1.col1')))
- self.assert_compile(t1.select(),
- 'SELECT t1.col1, t1.col2 FROM t1')
- self.assert_compile(select([t1, t2]).select_from(t1.join(t2)),
- 'SELECT t1.col1, t1.col2, t2.col1, '
- 't2.col2, t2.col3 FROM t1 JOIN t2 ON '
- 't1.col1 = t2.col3')
- self.assert_compile(t1.update().values({t1.c.col1: t1.c.col1
- + 1}), 'UPDATE t1 SET col1=(t1.col1 + ?)')
-
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index 71b2d96cb..8d0ff9776 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -18,7 +18,7 @@ from sqlalchemy.testing.schema import Table, Column
import datetime
import os
from sqlalchemy import sql
-
+from sqlalchemy.testing.mock import Mock
class OutParamTest(fixtures.TestBase, AssertsExecutionResults):
__only_on__ = 'oracle+cx_oracle'
@@ -26,31 +26,31 @@ class OutParamTest(fixtures.TestBase, AssertsExecutionResults):
@classmethod
def setup_class(cls):
testing.db.execute("""
-create or replace procedure foo(x_in IN number, x_out OUT number, y_out OUT number, z_out OUT varchar) IS
- retval number;
- begin
- retval := 6;
- x_out := 10;
- y_out := x_in * 15;
- z_out := NULL;
- end;
+ create or replace procedure foo(x_in IN number, x_out OUT number,
+ y_out OUT number, z_out OUT varchar) IS
+ retval number;
+ begin
+ retval := 6;
+ x_out := 10;
+ y_out := x_in * 15;
+ z_out := NULL;
+ end;
""")
def test_out_params(self):
- result = \
- testing.db.execute(text('begin foo(:x_in, :x_out, :y_out, '
+ result = testing.db.execute(text('begin foo(:x_in, :x_out, :y_out, '
':z_out); end;',
bindparams=[bindparam('x_in', Float),
outparam('x_out', Integer),
outparam('y_out', Float),
outparam('z_out', String)]), x_in=5)
- eq_(result.out_parameters, {'x_out': 10, 'y_out': 75, 'z_out'
- : None})
+ eq_(result.out_parameters,
+ {'x_out': 10, 'y_out': 75, 'z_out': None})
assert isinstance(result.out_parameters['x_out'], int)
@classmethod
def teardown_class(cls):
- testing.db.execute("DROP PROCEDURE foo")
+ testing.db.execute("DROP PROCEDURE foo")
class CXOracleArgsTest(fixtures.TestBase):
__only_on__ = 'oracle+cx_oracle'
@@ -92,7 +92,7 @@ class QuotedBindRoundTripTest(fixtures.TestBase):
metadata.create_all()
table.insert().execute(
- {"option":1, "plain":1, "union":1}
+ {"option": 1, "plain": 1, "union": 1}
)
eq_(
testing.db.execute(table.select()).first(),
@@ -106,8 +106,7 @@ class QuotedBindRoundTripTest(fixtures.TestBase):
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
-
- __dialect__ = oracle.dialect()
+ __dialect__ = "oracle" #oracle.dialect()
def test_true_false(self):
self.assert_compile(
@@ -218,6 +217,49 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
':ROWNUM_1) WHERE ora_rn > :ora_rn_1 FOR '
'UPDATE')
+ def test_for_update(self):
+ table1 = table('mytable',
+ column('myid'), column('name'), column('description'))
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).with_for_update(),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).with_for_update(of=table1.c.myid),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF mytable.myid")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).with_for_update(nowait=True),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE NOWAIT")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).
+ with_for_update(nowait=True, of=table1.c.myid),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = :myid_1 "
+ "FOR UPDATE OF mytable.myid NOWAIT")
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).
+ with_for_update(nowait=True, of=[table1.c.myid, table1.c.name]),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF "
+ "mytable.myid, mytable.name NOWAIT")
+
+ ta = table1.alias()
+ self.assert_compile(
+ ta.select(ta.c.myid == 7).
+ with_for_update(of=[ta.c.myid, ta.c.name]),
+ "SELECT mytable_1.myid, mytable_1.name, mytable_1.description "
+ "FROM mytable mytable_1 "
+ "WHERE mytable_1.myid = :myid_1 FOR UPDATE OF "
+ "mytable_1.myid, mytable_1.name"
+ )
+
def test_limit_preserves_typing_information(self):
class MyType(TypeDecorator):
impl = Integer
@@ -250,7 +292,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_use_binds_for_limits_enabled(self):
t = table('sometable', column('col1'), column('col2'))
- dialect = oracle.OracleDialect(use_binds_for_limits = True)
+ dialect = oracle.OracleDialect(use_binds_for_limits=True)
self.assert_compile(select([t]).limit(10),
"SELECT col1, col2 FROM (SELECT sometable.col1 AS col1, "
@@ -348,8 +390,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
)
query = select([table1, table2], or_(table1.c.name == 'fred',
- table1.c.myid == 10, table2.c.othername != 'jack'
- , 'EXISTS (select yay from foo where boo = lar)'
+ table1.c.myid == 10, table2.c.othername != 'jack',
+ 'EXISTS (select yay from foo where boo = lar)'
), from_obj=[outerjoin(table1, table2,
table1.c.myid == table2.c.otherid)])
self.assert_compile(query,
@@ -435,8 +477,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'mytable.description AS description FROM '
'mytable LEFT OUTER JOIN myothertable ON '
'mytable.myid = myothertable.otherid) '
- 'anon_1 ON thirdtable.userid = anon_1.myid'
- , dialect=oracle.dialect(use_ansi=True))
+ 'anon_1 ON thirdtable.userid = anon_1.myid',
+ dialect=oracle.dialect(use_ansi=True))
self.assert_compile(q,
'SELECT thirdtable.userid, '
@@ -549,7 +591,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_returning_insert_labeled(self):
t1 = table('t1', column('c1'), column('c2'), column('c3'))
self.assert_compile(
- t1.insert().values(c1=1).returning(t1.c.c2.label('c2_l'), t1.c.c3.label('c3_l')),
+ t1.insert().values(c1=1).returning(
+ t1.c.c2.label('c2_l'), t1.c.c3.label('c3_l')),
"INSERT INTO t1 (c1) VALUES (:c1) RETURNING "
"t1.c2, t1.c3 INTO :ret_0, :ret_1"
)
@@ -587,33 +630,52 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
schema.CreateIndex(Index("bar", t1.c.x)),
"CREATE INDEX alt_schema.bar ON alt_schema.foo (x)"
)
+
+ def test_create_index_expr(self):
+ m = MetaData()
+ t1 = Table('foo', m,
+ Column('x', Integer)
+ )
+ self.assert_compile(
+ schema.CreateIndex(Index("bar", t1.c.x > 5)),
+ "CREATE INDEX bar ON foo (x > 5)"
+ )
+
class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL):
- __only_on__ = 'oracle'
- def test_ora8_flags(self):
- def server_version_info(self):
- return (8, 2, 5)
+ def _dialect(self, server_version, **kw):
+ def server_version_info(conn):
+ return server_version
- dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi)
+ dialect = oracle.dialect(
+ dbapi=Mock(version="0.0.0", paramstyle="named"),
+ **kw)
dialect._get_server_version_info = server_version_info
+ dialect._check_unicode_returns = Mock()
+ dialect._check_unicode_description = Mock()
+ dialect._get_default_schema_name = Mock()
+ return dialect
+
+
+ def test_ora8_flags(self):
+ dialect = self._dialect((8, 2, 5))
# before connect, assume modern DB
assert dialect._supports_char_length
assert dialect._supports_nchar
assert dialect.use_ansi
- dialect.initialize(testing.db.connect())
+ dialect.initialize(Mock())
assert not dialect.implicit_returning
assert not dialect._supports_char_length
assert not dialect._supports_nchar
assert not dialect.use_ansi
- self.assert_compile(String(50),"VARCHAR2(50)",dialect=dialect)
- self.assert_compile(Unicode(50),"VARCHAR2(50)",dialect=dialect)
- self.assert_compile(UnicodeText(),"CLOB",dialect=dialect)
+ self.assert_compile(String(50), "VARCHAR2(50)", dialect=dialect)
+ self.assert_compile(Unicode(50), "VARCHAR2(50)", dialect=dialect)
+ self.assert_compile(UnicodeText(), "CLOB", dialect=dialect)
- dialect = oracle.dialect(implicit_returning=True,
- dbapi=testing.db.dialect.dbapi)
- dialect._get_server_version_info = server_version_info
+
+ dialect = self._dialect((8, 2, 5), implicit_returning=True)
dialect.initialize(testing.db.connect())
assert dialect.implicit_returning
@@ -621,26 +683,25 @@ class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL):
def test_default_flags(self):
"""test with no initialization or server version info"""
- dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi)
+ dialect = self._dialect(None)
+
assert dialect._supports_char_length
assert dialect._supports_nchar
assert dialect.use_ansi
- self.assert_compile(String(50),"VARCHAR2(50 CHAR)",dialect=dialect)
- self.assert_compile(Unicode(50),"NVARCHAR2(50)",dialect=dialect)
- self.assert_compile(UnicodeText(),"NCLOB",dialect=dialect)
+ self.assert_compile(String(50), "VARCHAR2(50 CHAR)", dialect=dialect)
+ self.assert_compile(Unicode(50), "NVARCHAR2(50)", dialect=dialect)
+ self.assert_compile(UnicodeText(), "NCLOB", dialect=dialect)
def test_ora10_flags(self):
- def server_version_info(self):
- return (10, 2, 5)
- dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi)
- dialect._get_server_version_info = server_version_info
- dialect.initialize(testing.db.connect())
+ dialect = self._dialect((10, 2, 5))
+
+ dialect.initialize(Mock())
assert dialect._supports_char_length
assert dialect._supports_nchar
assert dialect.use_ansi
- self.assert_compile(String(50),"VARCHAR2(50 CHAR)",dialect=dialect)
- self.assert_compile(Unicode(50),"NVARCHAR2(50)",dialect=dialect)
- self.assert_compile(UnicodeText(),"NCLOB",dialect=dialect)
+ self.assert_compile(String(50), "VARCHAR2(50 CHAR)", dialect=dialect)
+ self.assert_compile(Unicode(50), "NVARCHAR2(50)", dialect=dialect)
+ self.assert_compile(UnicodeText(), "NCLOB", dialect=dialect)
class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -664,9 +725,18 @@ create table test_schema.child(
parent_id integer references test_schema.parent(id)
);
+create table local_table(
+ id integer primary key,
+ data varchar2(50)
+);
+
create synonym test_schema.ptable for test_schema.parent;
create synonym test_schema.ctable for test_schema.child;
+create synonym test_schema_ptable for test_schema.parent;
+
+create synonym test_schema.local_table for local_table;
+
-- can't make a ref from local schema to the
-- remote schema's table without this,
-- *and* cant give yourself a grant !
@@ -682,15 +752,20 @@ grant references on test_schema.child to public;
for stmt in """
drop table test_schema.child;
drop table test_schema.parent;
+drop table local_table;
drop synonym test_schema.ctable;
drop synonym test_schema.ptable;
+drop synonym test_schema_ptable;
+drop synonym test_schema.local_table;
+
""".split(";"):
if stmt.strip():
testing.db.execute(stmt)
+ @testing.provide_metadata
def test_create_same_names_explicit_schema(self):
schema = testing.db.dialect.default_schema_name
- meta = MetaData(testing.db)
+ meta = self.metadata
parent = Table('parent', meta,
Column('pid', Integer, primary_key=True),
schema=schema
@@ -701,15 +776,31 @@ drop synonym test_schema.ptable;
schema=schema
)
meta.create_all()
- try:
- parent.insert().execute({'pid':1})
- child.insert().execute({'cid':1, 'pid':1})
- eq_(child.select().execute().fetchall(), [(1, 1)])
- finally:
- meta.drop_all()
+ parent.insert().execute({'pid': 1})
+ child.insert().execute({'cid': 1, 'pid': 1})
+ eq_(child.select().execute().fetchall(), [(1, 1)])
- def test_create_same_names_implicit_schema(self):
+ def test_reflect_alt_table_owner_local_synonym(self):
meta = MetaData(testing.db)
+ parent = Table('test_schema_ptable', meta, autoload=True,
+ oracle_resolve_synonyms=True)
+ self.assert_compile(parent.select(),
+ "SELECT test_schema_ptable.id, "
+ "test_schema_ptable.data FROM test_schema_ptable")
+ select([parent]).execute().fetchall()
+
+ def test_reflect_alt_synonym_owner_local_table(self):
+ meta = MetaData(testing.db)
+ parent = Table('local_table', meta, autoload=True,
+ oracle_resolve_synonyms=True, schema="test_schema")
+ self.assert_compile(parent.select(),
+ "SELECT test_schema.local_table.id, "
+ "test_schema.local_table.data FROM test_schema.local_table")
+ select([parent]).execute().fetchall()
+
+ @testing.provide_metadata
+ def test_create_same_names_implicit_schema(self):
+ meta = self.metadata
parent = Table('parent', meta,
Column('pid', Integer, primary_key=True),
)
@@ -718,12 +809,9 @@ drop synonym test_schema.ptable;
Column('pid', Integer, ForeignKey('parent.pid')),
)
meta.create_all()
- try:
- parent.insert().execute({'pid':1})
- child.insert().execute({'cid':1, 'pid':1})
- eq_(child.select().execute().fetchall(), [(1, 1)])
- finally:
- meta.drop_all()
+ parent.insert().execute({'pid': 1})
+ child.insert().execute({'cid': 1, 'pid': 1})
+ eq_(child.select().execute().fetchall(), [(1, 1)])
def test_reflect_alt_owner_explicit(self):
@@ -911,10 +999,17 @@ class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL):
dbapi = FakeDBAPI()
b = bindparam("foo", "hello world!")
- assert b.type.dialect_impl(dialect).get_dbapi_type(dbapi) == 'STRING'
+ eq_(
+ b.type.dialect_impl(dialect).get_dbapi_type(dbapi),
+ 'STRING'
+ )
b = bindparam("foo", "hello world!")
- assert b.type.dialect_impl(dialect).get_dbapi_type(dbapi) == 'STRING'
+ eq_(
+ b.type.dialect_impl(dialect).get_dbapi_type(dbapi),
+ 'STRING'
+ )
+
def test_long(self):
self.assert_compile(oracle.LONG(), "LONG")
@@ -943,14 +1038,14 @@ class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(oracle.RAW(35), "RAW(35)")
def test_char_length(self):
- self.assert_compile(VARCHAR(50),"VARCHAR(50 CHAR)")
+ self.assert_compile(VARCHAR(50), "VARCHAR(50 CHAR)")
oracle8dialect = oracle.dialect()
oracle8dialect.server_version_info = (8, 0)
- self.assert_compile(VARCHAR(50),"VARCHAR(50)",dialect=oracle8dialect)
+ self.assert_compile(VARCHAR(50), "VARCHAR(50)", dialect=oracle8dialect)
- self.assert_compile(NVARCHAR(50),"NVARCHAR2(50)")
- self.assert_compile(CHAR(50),"CHAR(50)")
+ self.assert_compile(NVARCHAR(50), "NVARCHAR2(50)")
+ self.assert_compile(CHAR(50), "CHAR(50)")
def test_varchar_types(self):
dialect = oracle.dialect()
@@ -961,6 +1056,12 @@ class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL):
(VARCHAR(50), "VARCHAR(50 CHAR)"),
(oracle.NVARCHAR2(50), "NVARCHAR2(50)"),
(oracle.VARCHAR2(50), "VARCHAR2(50 CHAR)"),
+ (String(), "VARCHAR2"),
+ (Unicode(), "NVARCHAR2"),
+ (NVARCHAR(), "NVARCHAR2"),
+ (VARCHAR(), "VARCHAR"),
+ (oracle.NVARCHAR2(), "NVARCHAR2"),
+ (oracle.VARCHAR2(), "VARCHAR2"),
]:
self.assert_compile(typ, exp, dialect=dialect)
@@ -998,36 +1099,36 @@ class TypesTest(fixtures.TestBase):
dict(id=3, data="value 3")
)
- eq_(t.select().where(t.c.data=='value 2').execute().fetchall(),
+ eq_(
+ t.select().where(t.c.data == 'value 2').execute().fetchall(),
[(2, 'value 2 ')]
- )
+ )
m2 = MetaData(testing.db)
t2 = Table('t1', m2, autoload=True)
assert type(t2.c.data.type) is CHAR
- eq_(t2.select().where(t2.c.data=='value 2').execute().fetchall(),
+ eq_(
+ t2.select().where(t2.c.data == 'value 2').execute().fetchall(),
[(2, 'value 2 ')]
- )
+ )
finally:
t.drop()
@testing.requires.returning
+ @testing.provide_metadata
def test_int_not_float(self):
- m = MetaData(testing.db)
+ m = self.metadata
t1 = Table('t1', m, Column('foo', Integer))
t1.create()
- try:
- r = t1.insert().values(foo=5).returning(t1.c.foo).execute()
- x = r.scalar()
- assert x == 5
- assert isinstance(x, int)
-
- x = t1.select().scalar()
- assert x == 5
- assert isinstance(x, int)
- finally:
- t1.drop()
+ r = t1.insert().values(foo=5).returning(t1.c.foo).execute()
+ x = r.scalar()
+ assert x == 5
+ assert isinstance(x, int)
+
+ x = t1.select().scalar()
+ assert x == 5
+ assert isinstance(x, int)
@testing.provide_metadata
def test_rowid(self):
@@ -1044,7 +1145,7 @@ class TypesTest(fixtures.TestBase):
# the ROWID type is not really needed here,
# as cx_oracle just treats it as a string,
# but we want to make sure the ROWID works...
- rowid_col= column('rowid', oracle.ROWID)
+ rowid_col = column('rowid', oracle.ROWID)
s3 = select([t.c.x, rowid_col]).\
where(rowid_col == cast(rowid, oracle.ROWID))
eq_(s3.select().execute().fetchall(),
@@ -1070,8 +1171,9 @@ class TypesTest(fixtures.TestBase):
eq_(row['day_interval'], datetime.timedelta(days=35,
seconds=5743))
+ @testing.provide_metadata
def test_numerics(self):
- m = MetaData(testing.db)
+ m = self.metadata
t1 = Table('t1', m,
Column('intcol', Integer),
Column('numericcol', Numeric(precision=9, scale=2)),
@@ -1084,41 +1186,38 @@ class TypesTest(fixtures.TestBase):
)
t1.create()
- try:
- t1.insert().execute(
- intcol=1,
- numericcol=5.2,
- floatcol1=6.5,
- floatcol2 = 8.5,
- doubleprec = 9.5,
- numbercol1=12,
- numbercol2=14.85,
- numbercol3=15.76
- )
-
- m2 = MetaData(testing.db)
- t2 = Table('t1', m2, autoload=True)
+ t1.insert().execute(
+ intcol=1,
+ numericcol=5.2,
+ floatcol1=6.5,
+ floatcol2=8.5,
+ doubleprec=9.5,
+ numbercol1=12,
+ numbercol2=14.85,
+ numbercol3=15.76
+ )
- for row in (
- t1.select().execute().first(),
- t2.select().execute().first()
- ):
- for i, (val, type_) in enumerate((
- (1, int),
- (decimal.Decimal("5.2"), decimal.Decimal),
- (6.5, float),
- (8.5, float),
- (9.5, float),
- (12, int),
- (decimal.Decimal("14.85"), decimal.Decimal),
- (15.76, float),
- )):
- eq_(row[i], val)
- assert isinstance(row[i], type_), '%r is not %r' \
- % (row[i], type_)
+ m2 = MetaData(testing.db)
+ t2 = Table('t1', m2, autoload=True)
+
+ for row in (
+ t1.select().execute().first(),
+ t2.select().execute().first()
+ ):
+ for i, (val, type_) in enumerate((
+ (1, int),
+ (decimal.Decimal("5.2"), decimal.Decimal),
+ (6.5, float),
+ (8.5, float),
+ (9.5, float),
+ (12, int),
+ (decimal.Decimal("14.85"), decimal.Decimal),
+ (15.76, float),
+ )):
+ eq_(row[i], val)
+ assert isinstance(row[i], type_), '%r is not %r' \
+ % (row[i], type_)
- finally:
- t1.drop()
def test_numeric_no_decimal_mode(self):
@@ -1150,28 +1249,26 @@ class TypesTest(fixtures.TestBase):
)
foo.create()
- foo.insert().execute(
- {'idata':5, 'ndata':decimal.Decimal("45.6"),
- 'ndata2':decimal.Decimal("45.0"),
- 'nidata':decimal.Decimal('53'), 'fdata':45.68392},
- )
+ foo.insert().execute({
+ 'idata': 5,
+ 'ndata': decimal.Decimal("45.6"),
+ 'ndata2': decimal.Decimal("45.0"),
+ 'nidata': decimal.Decimal('53'),
+ 'fdata': 45.68392
+ })
- stmt = """
- SELECT
- idata,
- ndata,
- ndata2,
- nidata,
- fdata
- FROM foo
- """
+ stmt = "SELECT idata, ndata, ndata2, nidata, fdata FROM foo"
row = testing.db.execute(stmt).fetchall()[0]
- eq_([type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, int, float])
+ eq_(
+ [type(x) for x in row],
+ [int, decimal.Decimal, decimal.Decimal, int, float]
+ )
eq_(
row,
- (5, decimal.Decimal('45.6'), decimal.Decimal('45'), 53, 45.683920000000001)
+ (5, decimal.Decimal('45.6'), decimal.Decimal('45'),
+ 53, 45.683920000000001)
)
# with a nested subquery,
@@ -1195,7 +1292,10 @@ class TypesTest(fixtures.TestBase):
FROM dual
"""
row = testing.db.execute(stmt).fetchall()[0]
- eq_([type(x) for x in row], [int, decimal.Decimal, int, int, decimal.Decimal])
+ eq_(
+ [type(x) for x in row],
+ [int, decimal.Decimal, int, int, decimal.Decimal]
+ )
eq_(
row,
(5, decimal.Decimal('45.6'), 45, 53, decimal.Decimal('45.68392'))
@@ -1203,15 +1303,20 @@ class TypesTest(fixtures.TestBase):
row = testing.db.execute(text(stmt,
typemap={
- 'idata':Integer(),
- 'ndata':Numeric(20, 2),
- 'ndata2':Numeric(20, 2),
- 'nidata':Numeric(5, 0),
- 'fdata':Float()
+ 'idata': Integer(),
+ 'ndata': Numeric(20, 2),
+ 'ndata2': Numeric(20, 2),
+ 'nidata': Numeric(5, 0),
+ 'fdata': Float()
})).fetchall()[0]
- eq_([type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float])
- eq_(row,
- (5, decimal.Decimal('45.6'), decimal.Decimal('45'), decimal.Decimal('53'), 45.683920000000001)
+ eq_(
+ [type(x) for x in row],
+ [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float]
+ )
+ eq_(
+ row,
+ (5, decimal.Decimal('45.6'), decimal.Decimal('45'),
+ decimal.Decimal('53'), 45.683920000000001)
)
stmt = """
@@ -1237,39 +1342,55 @@ class TypesTest(fixtures.TestBase):
)
WHERE ROWNUM >= 0) anon_1
"""
- row =testing.db.execute(stmt).fetchall()[0]
- eq_([type(x) for x in row], [int, decimal.Decimal, int, int, decimal.Decimal])
- eq_(row, (5, decimal.Decimal('45.6'), 45, 53, decimal.Decimal('45.68392')))
+ row = testing.db.execute(stmt).fetchall()[0]
+ eq_(
+ [type(x) for x in row],
+ [int, decimal.Decimal, int, int, decimal.Decimal]
+ )
+ eq_(
+ row,
+ (5, decimal.Decimal('45.6'), 45, 53, decimal.Decimal('45.68392'))
+ )
row = testing.db.execute(text(stmt,
typemap={
- 'anon_1_idata':Integer(),
- 'anon_1_ndata':Numeric(20, 2),
- 'anon_1_ndata2':Numeric(20, 2),
- 'anon_1_nidata':Numeric(5, 0),
- 'anon_1_fdata':Float()
+ 'anon_1_idata': Integer(),
+ 'anon_1_ndata': Numeric(20, 2),
+ 'anon_1_ndata2': Numeric(20, 2),
+ 'anon_1_nidata': Numeric(5, 0),
+ 'anon_1_fdata': Float()
})).fetchall()[0]
- eq_([type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float])
- eq_(row,
- (5, decimal.Decimal('45.6'), decimal.Decimal('45'), decimal.Decimal('53'), 45.683920000000001)
+ eq_(
+ [type(x) for x in row],
+ [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float]
+ )
+ eq_(
+ row,
+ (5, decimal.Decimal('45.6'), decimal.Decimal('45'),
+ decimal.Decimal('53'), 45.683920000000001)
)
row = testing.db.execute(text(stmt,
typemap={
- 'anon_1_idata':Integer(),
- 'anon_1_ndata':Numeric(20, 2, asdecimal=False),
- 'anon_1_ndata2':Numeric(20, 2, asdecimal=False),
- 'anon_1_nidata':Numeric(5, 0, asdecimal=False),
- 'anon_1_fdata':Float(asdecimal=True)
+ 'anon_1_idata': Integer(),
+ 'anon_1_ndata': Numeric(20, 2, asdecimal=False),
+ 'anon_1_ndata2': Numeric(20, 2, asdecimal=False),
+ 'anon_1_nidata': Numeric(5, 0, asdecimal=False),
+ 'anon_1_fdata': Float(asdecimal=True)
})).fetchall()[0]
- eq_([type(x) for x in row], [int, float, float, float, decimal.Decimal])
- eq_(row,
+ eq_(
+ [type(x) for x in row],
+ [int, float, float, float, decimal.Decimal]
+ )
+ eq_(
+ row,
(5, 45.6, 45, 53, decimal.Decimal('45.68392'))
)
+ @testing.provide_metadata
def test_reflect_dates(self):
- metadata = MetaData(testing.db)
+ metadata = self.metadata
Table(
"date_types", metadata,
Column('d1', DATE),
@@ -1278,20 +1399,16 @@ class TypesTest(fixtures.TestBase):
Column('d4', oracle.INTERVAL(second_precision=5)),
)
metadata.create_all()
- try:
- m = MetaData(testing.db)
- t1 = Table(
- "date_types", m,
- autoload=True)
- assert isinstance(t1.c.d1.type, DATE)
- assert isinstance(t1.c.d2.type, TIMESTAMP)
- assert not t1.c.d2.type.timezone
- assert isinstance(t1.c.d3.type, TIMESTAMP)
- assert t1.c.d3.type.timezone
- assert isinstance(t1.c.d4.type, oracle.INTERVAL)
-
- finally:
- metadata.drop_all()
+ m = MetaData(testing.db)
+ t1 = Table(
+ "date_types", m,
+ autoload=True)
+ assert isinstance(t1.c.d1.type, DATE)
+ assert isinstance(t1.c.d2.type, TIMESTAMP)
+ assert not t1.c.d2.type.timezone
+ assert isinstance(t1.c.d3.type, TIMESTAMP)
+ assert t1.c.d3.type.timezone
+ assert isinstance(t1.c.d4.type, oracle.INTERVAL)
def test_reflect_all_types_schema(self):
types_table = Table('all_types', MetaData(testing.db),
@@ -1319,7 +1436,7 @@ class TypesTest(fixtures.TestBase):
@testing.provide_metadata
def test_reflect_nvarchar(self):
metadata = self.metadata
- t = Table('t', metadata,
+ Table('t', metadata,
Column('data', sqltypes.NVARCHAR(255))
)
metadata.create_all()
@@ -1341,22 +1458,20 @@ class TypesTest(fixtures.TestBase):
assert isinstance(res, util.text_type)
+ @testing.provide_metadata
def test_char_length(self):
- metadata = MetaData(testing.db)
+ metadata = self.metadata
t1 = Table('t1', metadata,
Column("c1", VARCHAR(50)),
Column("c2", NVARCHAR(250)),
Column("c3", CHAR(200))
)
t1.create()
- try:
- m2 = MetaData(testing.db)
- t2 = Table('t1', m2, autoload=True)
- eq_(t2.c.c1.type.length, 50)
- eq_(t2.c.c2.type.length, 250)
- eq_(t2.c.c3.type.length, 200)
- finally:
- t1.drop()
+ m2 = MetaData(testing.db)
+ t2 = Table('t1', m2, autoload=True)
+ eq_(t2.c.c1.type.length, 50)
+ eq_(t2.c.c2.type.length, 250)
+ eq_(t2.c.c3.type.length, 200)
@testing.provide_metadata
def test_long_type(self):
@@ -1372,8 +1487,6 @@ class TypesTest(fixtures.TestBase):
"xyz"
)
-
-
def test_longstring(self):
metadata = MetaData(testing.db)
testing.db.execute("""
@@ -1424,15 +1537,16 @@ class EuroNumericTest(fixtures.TestBase):
del os.environ['NLS_LANG']
self.engine.dispose()
- @testing.provide_metadata
def test_output_type_handler(self):
- metadata = self.metadata
for stmt, exp, kw in [
("SELECT 0.1 FROM DUAL", decimal.Decimal("0.1"), {}),
("SELECT 15 FROM DUAL", 15, {}),
- ("SELECT CAST(15 AS NUMERIC(3, 1)) FROM DUAL", decimal.Decimal("15"), {}),
- ("SELECT CAST(0.1 AS NUMERIC(5, 2)) FROM DUAL", decimal.Decimal("0.1"), {}),
- ("SELECT :num FROM DUAL", decimal.Decimal("2.5"), {'num':decimal.Decimal("2.5")})
+ ("SELECT CAST(15 AS NUMERIC(3, 1)) FROM DUAL",
+ decimal.Decimal("15"), {}),
+ ("SELECT CAST(0.1 AS NUMERIC(5, 2)) FROM DUAL",
+ decimal.Decimal("0.1"), {}),
+ ("SELECT :num FROM DUAL", decimal.Decimal("2.5"),
+ {'num': decimal.Decimal("2.5")})
]:
test_exp = self.engine.scalar(stmt, **kw)
eq_(
@@ -1513,97 +1627,86 @@ class BufferedColumnTest(fixtures.TestBase, AssertsCompiledSQL):
class UnsupportedIndexReflectTest(fixtures.TestBase):
__only_on__ = 'oracle'
- def setup(self):
- global metadata
- metadata = MetaData(testing.db)
- t1 = Table('test_index_reflect', metadata,
+ @testing.emits_warning("No column names")
+ @testing.provide_metadata
+ def test_reflect_functional_index(self):
+ metadata = self.metadata
+ Table('test_index_reflect', metadata,
Column('data', String(20), primary_key=True)
)
metadata.create_all()
- def teardown(self):
- metadata.drop_all()
-
- @testing.emits_warning("No column names")
- def test_reflect_functional_index(self):
testing.db.execute('CREATE INDEX DATA_IDX ON '
'TEST_INDEX_REFLECT (UPPER(DATA))')
m2 = MetaData(testing.db)
- t2 = Table('test_index_reflect', m2, autoload=True)
+ Table('test_index_reflect', m2, autoload=True)
class RoundTripIndexTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ @testing.provide_metadata
def test_basic(self):
- engine = testing.db
- metadata = MetaData(engine)
+ metadata = self.metadata
- table=Table("sometable", metadata,
+ table = Table("sometable", metadata,
Column("id_a", Unicode(255), primary_key=True),
Column("id_b", Unicode(255), primary_key=True, unique=True),
Column("group", Unicode(255), primary_key=True),
Column("col", Unicode(255)),
- UniqueConstraint('col','group'),
+ UniqueConstraint('col', 'group'),
)
# "group" is a keyword, so lower case
normalind = Index('tableind', table.c.id_b, table.c.group)
- # create
metadata.create_all()
- try:
- # round trip, create from reflection
- mirror = MetaData(engine)
- mirror.reflect()
- metadata.drop_all()
- mirror.create_all()
-
- # inspect the reflected creation
- inspect = MetaData(engine)
- inspect.reflect()
-
- def obj_definition(obj):
- return obj.__class__, tuple([c.name for c in
- obj.columns]), getattr(obj, 'unique', None)
-
- # find what the primary k constraint name should be
- primaryconsname = engine.execute(
- text("""SELECT constraint_name
- FROM all_constraints
- WHERE table_name = :table_name
- AND owner = :owner
- AND constraint_type = 'P' """),
- table_name=table.name.upper(),
- owner=engine.url.username.upper()).fetchall()[0][0]
-
- reflectedtable = inspect.tables[table.name]
-
- # make a dictionary of the reflected objects:
-
- reflected = dict([(obj_definition(i), i) for i in
- reflectedtable.indexes
- | reflectedtable.constraints])
-
- # assert we got primary key constraint and its name, Error
- # if not in dict
-
- assert reflected[(PrimaryKeyConstraint, ('id_a', 'id_b',
- 'group'), None)].name.upper() \
- == primaryconsname.upper()
-
- # Error if not in dict
-
- assert reflected[(Index, ('id_b', 'group'), False)].name \
- == normalind.name
- assert (Index, ('id_b', ), True) in reflected
- assert (Index, ('col', 'group'), True) in reflected
- assert len(reflectedtable.constraints) == 1
- assert len(reflectedtable.indexes) == 3
+ mirror = MetaData(testing.db)
+ mirror.reflect()
+ metadata.drop_all()
+ mirror.create_all()
- finally:
- metadata.drop_all()
+ inspect = MetaData(testing.db)
+ inspect.reflect()
+ def obj_definition(obj):
+ return obj.__class__, tuple([c.name for c in
+ obj.columns]), getattr(obj, 'unique', None)
+ # find what the primary k constraint name should be
+ primaryconsname = testing.db.execute(
+ text("""SELECT constraint_name
+ FROM all_constraints
+ WHERE table_name = :table_name
+ AND owner = :owner
+ AND constraint_type = 'P' """),
+ table_name=table.name.upper(),
+ owner=testing.db.url.username.upper()).fetchall()[0][0]
+
+ reflectedtable = inspect.tables[table.name]
+
+ # make a dictionary of the reflected objects:
+
+ reflected = dict([(obj_definition(i), i) for i in
+ reflectedtable.indexes
+ | reflectedtable.constraints])
+
+ # assert we got primary key constraint and its name, Error
+ # if not in dict
+
+ assert reflected[(PrimaryKeyConstraint, ('id_a', 'id_b',
+ 'group'), None)].name.upper() \
+ == primaryconsname.upper()
+
+ # Error if not in dict
+
+ eq_(
+ reflected[(Index, ('id_b', 'group'), False)].name,
+ normalind.name
+ )
+ assert (Index, ('id_b', ), True) in reflected
+ assert (Index, ('col', 'group'), True) in reflected
+ eq_(len(reflectedtable.constraints), 1)
+ eq_(len(reflectedtable.indexes), 3)
class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -1650,11 +1753,11 @@ class ExecuteTest(fixtures.TestBase):
metadata.create_all()
t.insert().execute(
- {'id':1, 'data':1},
- {'id':2, 'data':7},
- {'id':3, 'data':12},
- {'id':4, 'data':15},
- {'id':5, 'data':32},
+ {'id': 1, 'data': 1},
+ {'id': 2, 'data': 7},
+ {'id': 3, 'data': 12},
+ {'id': 4, 'data': 15},
+ {'id': 5, 'data': 32},
)
# here, we can't use ORDER BY.
@@ -1679,7 +1782,7 @@ class UnicodeSchemaTest(fixtures.TestBase):
@testing.provide_metadata
def test_quoted_column_non_unicode(self):
metadata = self.metadata
- table=Table("atable", metadata,
+ table = Table("atable", metadata,
Column("_underscorecolumn", Unicode(255), primary_key=True),
)
metadata.create_all()
@@ -1688,14 +1791,14 @@ class UnicodeSchemaTest(fixtures.TestBase):
{'_underscorecolumn': u('’é')},
)
result = testing.db.execute(
- table.select().where(table.c._underscorecolumn==u('’é'))
+ table.select().where(table.c._underscorecolumn == u('’é'))
).scalar()
eq_(result, u('’é'))
@testing.provide_metadata
def test_quoted_column_unicode(self):
metadata = self.metadata
- table=Table("atable", metadata,
+ table = Table("atable", metadata,
Column(u("méil"), Unicode(255), primary_key=True),
)
metadata.create_all()
diff --git a/test/engine/test_bind.py b/test/engine/test_bind.py
index 973cf4d84..8f6c547f1 100644
--- a/test/engine/test_bind.py
+++ b/test/engine/test_bind.py
@@ -1,7 +1,7 @@
"""tests the "bind" attribute/argument across schema and SQL,
including the deprecated versions of these arguments"""
-from sqlalchemy.testing import eq_, assert_raises
+from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy import engine, exc
from sqlalchemy import MetaData, ThreadLocalMetaData
from sqlalchemy import Integer, text
@@ -44,7 +44,7 @@ class BindTest(fixtures.TestBase):
testing.db.connect()
):
for args in [
- ([], {'bind':bind}),
+ ([], {'bind': bind}),
([bind], {})
]:
metadata.create_all(*args[0], **args[1])
@@ -56,18 +56,13 @@ class BindTest(fixtures.TestBase):
def test_create_drop_err_metadata(self):
metadata = MetaData()
- table = Table('test_table', metadata, Column('foo', Integer))
+ Table('test_table', metadata, Column('foo', Integer))
for meth in [metadata.create_all, metadata.drop_all]:
- try:
- meth()
- assert False
- except exc.UnboundExecutionError as e:
- eq_(str(e),
- "The MetaData is not bound to an Engine or "
- "Connection. Execution can not proceed without a "
- "database to execute against. Either execute with "
- "an explicit connection or assign the MetaData's "
- ".bind to enable implicit execution.")
+ assert_raises_message(
+ exc.UnboundExecutionError,
+ "MetaData object is not bound to an Engine or Connection.",
+ meth
+ )
def test_create_drop_err_table(self):
metadata = MetaData()
@@ -79,23 +74,16 @@ class BindTest(fixtures.TestBase):
table.create,
table.drop,
]:
- try:
- meth()
- assert False
- except exc.UnboundExecutionError as e:
- eq_(
- str(e),
- "The Table 'test_table' "
- "is not bound to an Engine or Connection. "
- "Execution can not proceed without a database to execute "
- "against. Either execute with an explicit connection or "
- "assign this Table's .metadata.bind to enable implicit "
- "execution.")
+ assert_raises_message(
+ exc.UnboundExecutionError,
+ "Table object 'test_table' is not bound to an Engine or Connection.",
+ meth
+ )
@testing.uses_deprecated()
def test_create_drop_bound(self):
- for meta in (MetaData,ThreadLocalMetaData):
+ for meta in (MetaData, ThreadLocalMetaData):
for bind in (
testing.db,
testing.db.connect()
@@ -136,7 +124,7 @@ class BindTest(fixtures.TestBase):
try:
for args in (
([bind], {}),
- ([], {'bind':bind}),
+ ([], {'bind': bind}),
):
metadata = MetaData(*args[0], **args[1])
table = Table('test_table', metadata,
diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py
index 1d2aebf97..d3bd3c2cd 100644
--- a/test/engine/test_execute.py
+++ b/test/engine/test_execute.py
@@ -1,4 +1,4 @@
-
+# coding: utf-8
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, \
config, is_
@@ -17,9 +17,9 @@ from sqlalchemy.testing.engines import testing_engine
import logging.handlers
from sqlalchemy.dialects.oracle.zxjdbc import ReturningParam
from sqlalchemy.engine import result as _result, default
-from sqlalchemy.engine.base import Connection, Engine
+from sqlalchemy.engine.base import Engine
from sqlalchemy.testing import fixtures
-from sqlalchemy.testing.mock import Mock, call
+from sqlalchemy.testing.mock import Mock, call, patch
users, metadata, users_autoinc = None, None, None
@@ -29,11 +29,11 @@ class ExecuteTest(fixtures.TestBase):
global users, users_autoinc, metadata
metadata = MetaData(testing.db)
users = Table('users', metadata,
- Column('user_id', INT, primary_key = True, autoincrement=False),
+ Column('user_id', INT, primary_key=True, autoincrement=False),
Column('user_name', VARCHAR(20)),
)
users_autoinc = Table('users_autoinc', metadata,
- Column('user_id', INT, primary_key = True,
+ Column('user_id', INT, primary_key=True,
test_needs_autoincrement=True),
Column('user_name', VARCHAR(20)),
)
@@ -59,10 +59,9 @@ class ExecuteTest(fixtures.TestBase):
scalar(stmt)
eq_(result, '%')
- @testing.fails_on_everything_except('firebird', 'maxdb',
+ @testing.fails_on_everything_except('firebird',
'sqlite', '+pyodbc',
- '+mxodbc', '+zxjdbc', 'mysql+oursql',
- 'informix+informixdb')
+ '+mxodbc', '+zxjdbc', 'mysql+oursql')
def test_raw_qmark(self):
def go(conn):
conn.execute('insert into users (user_id, user_name) '
@@ -182,7 +181,7 @@ class ExecuteTest(fixtures.TestBase):
finally:
conn.close()
- @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle', 'informix+informixdb')
+ @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle')
def test_raw_named(self):
def go(conn):
conn.execute('insert into users (user_id, user_name) '
@@ -204,19 +203,36 @@ class ExecuteTest(fixtures.TestBase):
finally:
conn.close()
+ @testing.engines.close_open_connections
def test_exception_wrapping_dbapi(self):
- def go(conn):
+ conn = testing.db.connect()
+ for _c in testing.db, conn:
assert_raises_message(
tsa.exc.DBAPIError,
r"not_a_valid_statement",
- conn.execute, 'not_a_valid_statement'
+ _c.execute, 'not_a_valid_statement'
)
- go(testing.db)
- conn = testing.db.connect()
- try:
- go(conn)
- finally:
- conn.close()
+
+ @testing.requires.sqlite
+ def test_exception_wrapping_non_dbapi_error(self):
+ e = create_engine('sqlite://')
+ e.dialect.is_disconnect = is_disconnect = Mock()
+
+ with e.connect() as c:
+ c.connection.cursor = Mock(
+ return_value=Mock(
+ execute=Mock(
+ side_effect=TypeError("I'm not a DBAPI error")
+ ))
+ )
+
+ assert_raises_message(
+ TypeError,
+ "I'm not a DBAPI error",
+ c.execute, "select "
+ )
+ eq_(is_disconnect.call_count, 0)
+
def test_exception_wrapping_non_dbapi_statement(self):
class MyType(TypeDecorator):
@@ -227,7 +243,7 @@ class ExecuteTest(fixtures.TestBase):
def _go(conn):
assert_raises_message(
tsa.exc.StatementError,
- r"nope \(original cause: Exception: nope\) 'SELECT 1 ",
+ r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
conn.execute,
select([1]).\
where(
@@ -241,6 +257,25 @@ class ExecuteTest(fixtures.TestBase):
finally:
conn.close()
+ def test_stmt_exception_non_ascii(self):
+ name = util.u('méil')
+ with testing.db.connect() as conn:
+ assert_raises_message(
+ tsa.exc.StatementError,
+ util.u(
+ "A value is required for bind parameter 'uname'"
+ r'.*SELECT users.user_name AS .m\\xe9il.') if util.py2k
+ else
+ util.u(
+ "A value is required for bind parameter 'uname'"
+ '.*SELECT users.user_name AS .méil.')
+ ,
+ conn.execute,
+ select([users.c.user_name.label(name)]).where(
+ users.c.user_name == bindparam("uname")),
+ {'uname_incorrect': 'foo'}
+ )
+
def test_stmt_exception_pickleable_no_dbapi(self):
self._test_stmt_exception_pickleable(Exception("hello world"))
@@ -326,17 +361,17 @@ class ExecuteTest(fixtures.TestBase):
def test_engine_level_options(self):
eng = engines.testing_engine(options={'execution_options':
{'foo': 'bar'}})
- conn = eng.contextual_connect()
- eq_(conn._execution_options['foo'], 'bar')
- eq_(conn.execution_options(bat='hoho')._execution_options['foo'
- ], 'bar')
- eq_(conn.execution_options(bat='hoho')._execution_options['bat'
- ], 'hoho')
- eq_(conn.execution_options(foo='hoho')._execution_options['foo'
- ], 'hoho')
- eng.update_execution_options(foo='hoho')
- conn = eng.contextual_connect()
- eq_(conn._execution_options['foo'], 'hoho')
+ with eng.contextual_connect() as conn:
+ eq_(conn._execution_options['foo'], 'bar')
+ eq_(conn.execution_options(bat='hoho')._execution_options['foo'
+ ], 'bar')
+ eq_(conn.execution_options(bat='hoho')._execution_options['bat'
+ ], 'hoho')
+ eq_(conn.execution_options(foo='hoho')._execution_options['foo'
+ ], 'hoho')
+ eng.update_execution_options(foo='hoho')
+ conn = eng.contextual_connect()
+ eq_(conn._execution_options['foo'], 'hoho')
@testing.requires.ad_hoc_engines
def test_generative_engine_execution_options(self):
@@ -383,8 +418,8 @@ class ExecuteTest(fixtures.TestBase):
event.listen(eng, "before_execute", l2)
event.listen(eng1, "before_execute", l3)
- eng.execute(select([1]))
- eng1.execute(select([1]))
+ eng.execute(select([1])).close()
+ eng1.execute(select([1])).close()
eq_(canary, ["l1", "l2", "l3", "l1", "l2"])
@@ -892,45 +927,44 @@ class ResultProxyTest(fixtures.TestBase):
def test_no_rowcount_on_selects_inserts(self):
"""assert that rowcount is only called on deletes and updates.
- This because cursor.rowcount can be expensive on some dialects
- such as Firebird.
+ This because cursor.rowcount may can be expensive on some dialects
+ such as Firebird, however many dialects require it be called
+ before the cursor is closed.
"""
metadata = self.metadata
engine = engines.testing_engine()
- metadata.bind = engine
t = Table('t1', metadata,
Column('data', String(10))
)
- metadata.create_all()
+ metadata.create_all(engine)
- class BreakRowcountMixin(object):
- @property
- def rowcount(self):
- assert False
+ with patch.object(engine.dialect.execution_ctx_cls, "rowcount") as mock_rowcount:
+ mock_rowcount.__get__ = Mock()
+ engine.execute(t.insert(),
+ {'data': 'd1'},
+ {'data': 'd2'},
+ {'data': 'd3'})
- execution_ctx_cls = engine.dialect.execution_ctx_cls
- engine.dialect.execution_ctx_cls = type("FakeCtx",
- (BreakRowcountMixin,
- execution_ctx_cls),
- {})
+ eq_(len(mock_rowcount.__get__.mock_calls), 0)
- try:
- r = t.insert().execute({'data': 'd1'}, {'data': 'd2'},
- {'data': 'd3'})
- eq_(t.select().execute().fetchall(), [('d1', ), ('d2', ),
- ('d3', )])
- assert_raises(AssertionError, t.update().execute, {'data'
- : 'd4'})
- assert_raises(AssertionError, t.delete().execute)
- finally:
- engine.dialect.execution_ctx_cls = execution_ctx_cls
+ eq_(
+ engine.execute(t.select()).fetchall(),
+ [('d1', ), ('d2', ), ('d3', )]
+ )
+ eq_(len(mock_rowcount.__get__.mock_calls), 0)
+
+ engine.execute(t.update(), {'data': 'd4'})
+
+ eq_(len(mock_rowcount.__get__.mock_calls), 1)
+
+ engine.execute(t.delete())
+ eq_(len(mock_rowcount.__get__.mock_calls), 2)
- @testing.requires.python26
def test_rowproxy_is_sequence(self):
import collections
from sqlalchemy.engine import RowProxy
@@ -1016,7 +1050,7 @@ class ResultProxyTest(fixtures.TestBase):
class ExecutionOptionsTest(fixtures.TestBase):
def test_dialect_conn_options(self):
- engine = testing_engine("sqlite://")
+ engine = testing_engine("sqlite://", options=dict(_initialize=False))
engine.dialect = Mock()
conn = engine.connect()
c2 = conn.execution_options(foo="bar")
diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py
index 106bd0782..391b92144 100644
--- a/test/engine/test_parseconnect.py
+++ b/test/engine/test_parseconnect.py
@@ -1,13 +1,12 @@
-from sqlalchemy.testing import assert_raises, eq_
+from sqlalchemy.testing import assert_raises, eq_, assert_raises_message
from sqlalchemy.util.compat import configparser, StringIO
import sqlalchemy.engine.url as url
from sqlalchemy import create_engine, engine_from_config, exc, pool
-from sqlalchemy.engine.util import _coerce_config
from sqlalchemy.engine.default import DefaultDialect
import sqlalchemy as tsa
from sqlalchemy.testing import fixtures
from sqlalchemy import testing
-from sqlalchemy.testing.mock import Mock
+from sqlalchemy.testing.mock import Mock, MagicMock, patch
class ParseConnectTest(fixtures.TestBase):
@@ -15,6 +14,7 @@ class ParseConnectTest(fixtures.TestBase):
for text in (
'dbtype://username:password@hostspec:110//usr/db_file.db',
'dbtype://username:password@hostspec/database',
+ 'dbtype+apitype://username:password@hostspec/database',
'dbtype://username:password@hostspec',
'dbtype://username:password@/database',
'dbtype://username@hostspec',
@@ -22,25 +22,53 @@ class ParseConnectTest(fixtures.TestBase):
'dbtype://hostspec/database',
'dbtype://hostspec',
'dbtype://hostspec/?arg1=val1&arg2=val2',
- 'dbtype:///database',
+ 'dbtype+apitype:///database',
'dbtype:///:memory:',
'dbtype:///foo/bar/im/a/file',
'dbtype:///E:/work/src/LEM/db/hello.db',
'dbtype:///E:/work/src/LEM/db/hello.db?foo=bar&hoho=lala',
'dbtype://',
- 'dbtype://username:password@/db',
- 'dbtype:////usr/local/mailman/lists/_xtest@example.com/memb'
- 'ers.db',
- 'dbtype://username:apples%2Foranges@hostspec/mydatabase',
+ 'dbtype://username:password@/database',
+ 'dbtype:////usr/local/_xtest@example.com/members.db',
+ 'dbtype://username:apples%2Foranges@hostspec/database',
+ 'dbtype://username:password@[2001:da8:2004:1000:202:116:160:90]/database?foo=bar',
+ 'dbtype://username:password@[2001:da8:2004:1000:202:116:160:90]:80/database?foo=bar'
):
u = url.make_url(text)
- assert u.drivername == 'dbtype'
- assert u.username == 'username' or u.username is None
- assert u.password == 'password' or u.password \
- == 'apples/oranges' or u.password is None
- assert u.host == 'hostspec' or u.host == '127.0.0.1' \
- or not u.host
- assert str(u) == text
+
+ assert u.drivername in ('dbtype', 'dbtype+apitype')
+ assert u.username in ('username', None)
+ assert u.password in ('password', 'apples/oranges', None)
+ assert u.host in ('hostspec', '127.0.0.1',
+ '2001:da8:2004:1000:202:116:160:90', '', None), u.host
+ assert u.database in ('database',
+ '/usr/local/_xtest@example.com/members.db',
+ '/usr/db_file.db', ':memory:', '',
+ 'foo/bar/im/a/file',
+ 'E:/work/src/LEM/db/hello.db', None), u.database
+ eq_(str(u), text)
+
+ def test_rfc1738_password(self):
+ u = url.make_url("dbtype://user:pass word + other%3Awords@host/dbname")
+ eq_(u.password, "pass word + other:words")
+ eq_(str(u), "dbtype://user:pass word + other%3Awords@host/dbname")
+
+ u = url.make_url('dbtype://username:apples%2Foranges@hostspec/database')
+ eq_(u.password, "apples/oranges")
+ eq_(str(u), 'dbtype://username:apples%2Foranges@hostspec/database')
+
+ u = url.make_url('dbtype://username:apples%40oranges%40%40@hostspec/database')
+ eq_(u.password, "apples@oranges@@")
+ eq_(str(u), 'dbtype://username:apples%40oranges%40%40@hostspec/database')
+
+ u = url.make_url('dbtype://username%40:@hostspec/database')
+ eq_(u.password, '')
+ eq_(u.username, "username@")
+ eq_(str(u), 'dbtype://username%40:@hostspec/database')
+
+ u = url.make_url('dbtype://username:pass%2Fword@hostspec/database')
+ eq_(u.password, 'pass/word')
+ eq_(str(u), 'dbtype://username:pass%2Fword@hostspec/database')
class DialectImportTest(fixtures.TestBase):
def test_import_base_dialects(self):
@@ -81,50 +109,6 @@ class CreateEngineTest(fixtures.TestBase):
module=dbapi, _initialize=False)
c = e.connect()
- def test_coerce_config(self):
- raw = r"""
-[prefixed]
-sqlalchemy.url=postgresql://scott:tiger@somehost/test?fooz=somevalue
-sqlalchemy.convert_unicode=0
-sqlalchemy.echo=false
-sqlalchemy.echo_pool=1
-sqlalchemy.max_overflow=2
-sqlalchemy.pool_recycle=50
-sqlalchemy.pool_size=2
-sqlalchemy.pool_threadlocal=1
-sqlalchemy.pool_timeout=10
-[plain]
-url=postgresql://scott:tiger@somehost/test?fooz=somevalue
-convert_unicode=0
-echo=0
-echo_pool=1
-max_overflow=2
-pool_recycle=50
-pool_size=2
-pool_threadlocal=1
-pool_timeout=10
-"""
- ini = configparser.ConfigParser()
- ini.readfp(StringIO(raw))
-
- expected = {
- 'url': 'postgresql://scott:tiger@somehost/test?fooz=somevalue',
- 'convert_unicode': 0,
- 'echo': False,
- 'echo_pool': True,
- 'max_overflow': 2,
- 'pool_recycle': 50,
- 'pool_size': 2,
- 'pool_threadlocal': True,
- 'pool_timeout': 10,
- }
-
- prefixed = dict(ini.items('prefixed'))
- self.assert_(_coerce_config(prefixed, 'sqlalchemy.')
- == expected)
-
- plain = dict(ini.items('plain'))
- self.assert_(_coerce_config(plain, '') == expected)
def test_engine_from_config(self):
dbapi = mock_dbapi
@@ -141,19 +125,35 @@ pool_timeout=10
'z=somevalue')
assert e.echo is True
- for param, values in [
- ('convert_unicode', ('true', 'false', 'force')),
- ('echo', ('true', 'false', 'debug')),
- ('echo_pool', ('true', 'false', 'debug')),
- ('use_native_unicode', ('true', 'false')),
- ]:
- for value in values:
- config = {
- 'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test',
- 'sqlalchemy.%s' % param : value
- }
- cfg = _coerce_config(config, 'sqlalchemy.')
- assert cfg[param] == {'true':True, 'false':False}.get(value, value)
+
+ def test_engine_from_config_custom(self):
+ from sqlalchemy import util
+ from sqlalchemy.dialects import registry
+ tokens = __name__.split(".")
+
+ class MyDialect(MockDialect):
+ engine_config_types = {
+ "foobar": int,
+ "bathoho": util.bool_or_str('force')
+ }
+
+ def __init__(self, foobar=None, bathoho=None, **kw):
+ self.foobar = foobar
+ self.bathoho = bathoho
+
+ global dialect
+ dialect = MyDialect
+ registry.register("mockdialect.barb",
+ ".".join(tokens[0:-1]), tokens[-1])
+
+ config = {
+ "sqlalchemy.url": "mockdialect+barb://",
+ "sqlalchemy.foobar": "5",
+ "sqlalchemy.bathoho": "false"
+ }
+ e = engine_from_config(config, _initialize=False)
+ eq_(e.dialect.foobar, 5)
+ eq_(e.dialect.bathoho, False)
def test_custom(self):
@@ -227,17 +227,38 @@ pool_timeout=10
@testing.requires.sqlite
def test_wraps_connect_in_dbapi(self):
- # sqlite uses SingletonThreadPool which doesnt have max_overflow
+ e = create_engine('sqlite://')
+ sqlite3 = e.dialect.dbapi
- assert_raises(TypeError, create_engine, 'sqlite://',
- max_overflow=5, module=mock_sqlite_dbapi)
- e = create_engine('sqlite://', connect_args={'use_unicode'
- : True}, convert_unicode=True)
+ dbapi = MockDBAPI()
+ dbapi.Error = sqlite3.Error,
+ dbapi.ProgrammingError = sqlite3.ProgrammingError
+ dbapi.connect = Mock(side_effect=sqlite3.ProgrammingError("random error"))
try:
- e.connect()
+ create_engine('sqlite://', module=dbapi).connect()
+ assert False
except tsa.exc.DBAPIError as de:
assert not de.connection_invalidated
+
+ @testing.requires.sqlite
+ def test_dont_touch_non_dbapi_exception_on_connect(self):
+ e = create_engine('sqlite://')
+ sqlite3 = e.dialect.dbapi
+
+ dbapi = MockDBAPI()
+ dbapi.Error = sqlite3.Error,
+ dbapi.ProgrammingError = sqlite3.ProgrammingError
+ dbapi.connect = Mock(side_effect=TypeError("I'm not a DBAPI error"))
+ e = create_engine('sqlite://', module=dbapi)
+ e.dialect.is_disconnect = is_disconnect = Mock()
+ assert_raises_message(
+ TypeError,
+ "I'm not a DBAPI error",
+ e.connect
+ )
+ eq_(is_disconnect.call_count, 0)
+
def test_ensure_dialect_does_is_disconnect_no_conn(self):
"""test that is_disconnect() doesn't choke if no connection, cursor given."""
dialect = testing.db.dialect
@@ -277,6 +298,10 @@ pool_timeout=10
assert e.url.drivername == e2.url.drivername == 'mysql'
assert e.url.username == e2.url.username == 'scott'
assert e2.url is u
+ assert str(u) == 'mysql://scott:tiger@localhost/test'
+ assert repr(u) == 'mysql://scott:***@localhost/test'
+ assert repr(e) == 'Engine(mysql://scott:***@localhost/test)'
+ assert repr(e2) == 'Engine(mysql://scott:***@localhost/test)'
def test_poolargs(self):
"""test that connection pool args make it thru"""
@@ -363,7 +388,7 @@ def MockDBAPI(**assert_kwargs):
)
return connection
- return Mock(
+ return MagicMock(
sqlite_version_info=(99, 9, 9,),
version_info=(99, 9, 9,),
sqlite_version='99.9.9',
diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py
index 05c0487f8..2e4c2dc48 100644
--- a/test/engine/test_pool.py
+++ b/test/engine/test_pool.py
@@ -10,6 +10,8 @@ from sqlalchemy.testing import fixtures
from sqlalchemy.testing.mock import Mock, call
+join_timeout = 10
+
def MockDBAPI():
def cursor():
while True:
@@ -306,6 +308,13 @@ class PoolEventsTest(PoolTestBase):
return p, canary
+ def _invalidate_event_fixture(self):
+ p = self._queuepool_fixture()
+ canary = Mock()
+ event.listen(p, 'invalidate', canary)
+
+ return p, canary
+
def test_first_connect_event(self):
p, canary = self._first_connect_event_fixture()
@@ -409,6 +418,31 @@ class PoolEventsTest(PoolTestBase):
c1.close()
eq_(canary, ['reset'])
+ def test_invalidate_event_no_exception(self):
+ p, canary = self._invalidate_event_fixture()
+
+ c1 = p.connect()
+ c1.close()
+ assert not canary.called
+ c1 = p.connect()
+ dbapi_con = c1.connection
+ c1.invalidate()
+ assert canary.call_args_list[0][0][0] is dbapi_con
+ assert canary.call_args_list[0][0][2] is None
+
+ def test_invalidate_event_exception(self):
+ p, canary = self._invalidate_event_fixture()
+
+ c1 = p.connect()
+ c1.close()
+ assert not canary.called
+ c1 = p.connect()
+ dbapi_con = c1.connection
+ exc = Exception("hi")
+ c1.invalidate(exc)
+ assert canary.call_args_list[0][0][0] is dbapi_con
+ assert canary.call_args_list[0][0][2] is exc
+
def test_checkin_event_gc(self):
p, canary = self._checkin_event_fixture()
@@ -827,7 +861,7 @@ class QueuePoolTest(PoolTestBase):
th.start()
threads.append(th)
for th in threads:
- th.join()
+ th.join(join_timeout)
assert len(timeouts) > 0
for t in timeouts:
@@ -864,22 +898,109 @@ class QueuePoolTest(PoolTestBase):
th.start()
threads.append(th)
for th in threads:
- th.join()
+ th.join(join_timeout)
self.assert_(max(peaks) <= max_overflow)
lazy_gc()
assert not pool._refs
+
+ def test_overflow_reset_on_failed_connect(self):
+ dbapi = Mock()
+
+ def failing_dbapi():
+ time.sleep(2)
+ raise Exception("connection failed")
+
+ creator = dbapi.connect
+ def create():
+ return creator()
+
+ p = pool.QueuePool(creator=create, pool_size=2, max_overflow=3)
+ c1 = p.connect()
+ c2 = p.connect()
+ c3 = p.connect()
+ eq_(p._overflow, 1)
+ creator = failing_dbapi
+ assert_raises(Exception, p.connect)
+ eq_(p._overflow, 1)
+
+ @testing.requires.threading_with_mock
+ def test_hanging_connect_within_overflow(self):
+ """test that a single connect() call which is hanging
+ does not block other connections from proceeding."""
+
+ dbapi = Mock()
+ mutex = threading.Lock()
+
+ def hanging_dbapi():
+ time.sleep(2)
+ with mutex:
+ return dbapi.connect()
+
+ def fast_dbapi():
+ with mutex:
+ return dbapi.connect()
+
+ creator = threading.local()
+
+ def create():
+ return creator.mock_connector()
+
+ def run_test(name, pool, should_hang):
+ if should_hang:
+ creator.mock_connector = hanging_dbapi
+ else:
+ creator.mock_connector = fast_dbapi
+
+ conn = pool.connect()
+ conn.operation(name)
+ time.sleep(1)
+ conn.close()
+
+ p = pool.QueuePool(creator=create, pool_size=2, max_overflow=3)
+
+ threads = [
+ threading.Thread(
+ target=run_test, args=("success_one", p, False)),
+ threading.Thread(
+ target=run_test, args=("success_two", p, False)),
+ threading.Thread(
+ target=run_test, args=("overflow_one", p, True)),
+ threading.Thread(
+ target=run_test, args=("overflow_two", p, False)),
+ threading.Thread(
+ target=run_test, args=("overflow_three", p, False))
+ ]
+ for t in threads:
+ t.start()
+ time.sleep(.2)
+
+ for t in threads:
+ t.join(timeout=join_timeout)
+ eq_(
+ dbapi.connect().operation.mock_calls,
+ [call("success_one"), call("success_two"),
+ call("overflow_two"), call("overflow_three"),
+ call("overflow_one")]
+ )
+
+
@testing.requires.threading_with_mock
def test_waiters_handled(self):
"""test that threads waiting for connections are
handled when the pool is replaced.
"""
+ mutex = threading.Lock()
dbapi = MockDBAPI()
def creator():
- return dbapi.connect()
+ mutex.acquire()
+ try:
+ return dbapi.connect()
+ finally:
+ mutex.release()
success = []
for timeout in (None, 30):
@@ -897,21 +1018,27 @@ class QueuePoolTest(PoolTestBase):
c1 = p.connect()
c2 = p.connect()
+ threads = []
for i in range(2):
t = threading.Thread(target=waiter,
args=(p, timeout, max_overflow))
- t.setDaemon(True) # so the tests dont hang if this fails
+ t.daemon = True
t.start()
+ threads.append(t)
- c1.invalidate()
- c2.invalidate()
- p2 = p._replace()
+ # this sleep makes sure that the
+ # two waiter threads hit upon wait()
+ # inside the queue, before we invalidate the other
+ # two conns
time.sleep(.2)
+ p2 = p._replace()
+
+ for t in threads:
+ t.join(join_timeout)
eq_(len(success), 12, "successes: %s" % success)
@testing.requires.threading_with_mock
- @testing.requires.python26
def test_notify_waiters(self):
dbapi = MockDBAPI()
canary = []
@@ -924,9 +1051,7 @@ class QueuePoolTest(PoolTestBase):
p1 = pool.QueuePool(creator=creator1,
pool_size=1, timeout=None,
max_overflow=0)
- p2 = pool.QueuePool(creator=creator2,
- pool_size=1, timeout=None,
- max_overflow=-1)
+ p2 = pool.NullPool(creator=creator2)
def waiter(p):
conn = p.connect()
time.sleep(.5)
@@ -934,14 +1059,18 @@ class QueuePoolTest(PoolTestBase):
c1 = p1.connect()
+ threads = []
for i in range(5):
t = threading.Thread(target=waiter, args=(p1, ))
- t.setDaemon(True)
t.start()
+ threads.append(t)
time.sleep(.5)
eq_(canary, [1])
p1._pool.abort(p2)
- time.sleep(1)
+
+ for t in threads:
+ t.join(join_timeout)
+
eq_(canary, [1, 2, 2, 2, 2, 2])
def test_dispose_closes_pooled(self):
@@ -987,6 +1116,7 @@ class QueuePoolTest(PoolTestBase):
self._test_overflow(40, 5)
def test_mixed_close(self):
+ pool._refs.clear()
p = self._queuepool_fixture(pool_size=3, max_overflow=-1, use_threadlocal=True)
c1 = p.connect()
c2 = p.connect()
@@ -1198,6 +1328,96 @@ class QueuePoolTest(PoolTestBase):
c2 = p.connect()
assert c2.connection is not None
+class ResetOnReturnTest(PoolTestBase):
+ def _fixture(self, **kw):
+ dbapi = Mock()
+ return dbapi, pool.QueuePool(creator=lambda: dbapi.connect('foo.db'), **kw)
+
+ def test_plain_rollback(self):
+ dbapi, p = self._fixture(reset_on_return='rollback')
+
+ c1 = p.connect()
+ c1.close()
+ assert dbapi.connect().rollback.called
+ assert not dbapi.connect().commit.called
+
+ def test_plain_commit(self):
+ dbapi, p = self._fixture(reset_on_return='commit')
+
+ c1 = p.connect()
+ c1.close()
+ assert not dbapi.connect().rollback.called
+ assert dbapi.connect().commit.called
+
+ def test_plain_none(self):
+ dbapi, p = self._fixture(reset_on_return=None)
+
+ c1 = p.connect()
+ c1.close()
+ assert not dbapi.connect().rollback.called
+ assert not dbapi.connect().commit.called
+
+ def test_agent_rollback(self):
+ dbapi, p = self._fixture(reset_on_return='rollback')
+
+ class Agent(object):
+ def __init__(self, conn):
+ self.conn = conn
+
+ def rollback(self):
+ self.conn.special_rollback()
+
+ def commit(self):
+ self.conn.special_commit()
+
+ c1 = p.connect()
+ c1._reset_agent = Agent(c1)
+ c1.close()
+
+ assert dbapi.connect().special_rollback.called
+ assert not dbapi.connect().special_commit.called
+
+ assert not dbapi.connect().rollback.called
+ assert not dbapi.connect().commit.called
+
+ c1 = p.connect()
+ c1.close()
+ eq_(dbapi.connect().special_rollback.call_count, 1)
+ eq_(dbapi.connect().special_commit.call_count, 0)
+
+ assert dbapi.connect().rollback.called
+ assert not dbapi.connect().commit.called
+
+ def test_agent_commit(self):
+ dbapi, p = self._fixture(reset_on_return='commit')
+
+ class Agent(object):
+ def __init__(self, conn):
+ self.conn = conn
+
+ def rollback(self):
+ self.conn.special_rollback()
+
+ def commit(self):
+ self.conn.special_commit()
+
+ c1 = p.connect()
+ c1._reset_agent = Agent(c1)
+ c1.close()
+ assert not dbapi.connect().special_rollback.called
+ assert dbapi.connect().special_commit.called
+
+ assert not dbapi.connect().rollback.called
+ assert not dbapi.connect().commit.called
+
+ c1 = p.connect()
+ c1.close()
+
+ eq_(dbapi.connect().special_rollback.call_count, 0)
+ eq_(dbapi.connect().special_commit.call_count, 1)
+ assert not dbapi.connect().rollback.called
+ assert dbapi.connect().commit.called
+
class SingletonThreadPoolTest(PoolTestBase):
@testing.requires.threading_with_mock
@@ -1245,7 +1465,7 @@ class SingletonThreadPoolTest(PoolTestBase):
th.start()
threads.append(th)
for th in threads:
- th.join()
+ th.join(join_timeout)
assert len(p._all_conns) == 3
if strong_refs:
diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py
index 0a964cf63..ba336a1bf 100644
--- a/test/engine/test_reconnect.py
+++ b/test/engine/test_reconnect.py
@@ -413,8 +413,6 @@ class RealReconnectTest(fixtures.TestBase):
def teardown(self):
self.engine.dispose()
- @testing.fails_on('+informixdb',
- "Wrong error thrown, fix in informixdb?")
def test_reconnect(self):
conn = self.engine.connect()
@@ -539,8 +537,6 @@ class RealReconnectTest(fixtures.TestBase):
# pool was recreated
assert engine.pool is not p1
- @testing.fails_on('+informixdb',
- "Wrong error thrown, fix in informixdb?")
def test_null_pool(self):
engine = \
engines.reconnecting_engine(options=dict(poolclass=pool.NullPool))
@@ -554,8 +550,6 @@ class RealReconnectTest(fixtures.TestBase):
eq_(conn.execute(select([1])).scalar(), 1)
assert not conn.invalidated
- @testing.fails_on('+informixdb',
- "Wrong error thrown, fix in informixdb?")
def test_close(self):
conn = self.engine.connect()
eq_(conn.execute(select([1])).scalar(), 1)
@@ -569,8 +563,6 @@ class RealReconnectTest(fixtures.TestBase):
conn = self.engine.connect()
eq_(conn.execute(select([1])).scalar(), 1)
- @testing.fails_on('+informixdb',
- "Wrong error thrown, fix in informixdb?")
def test_with_transaction(self):
conn = self.engine.connect()
trans = conn.begin()
@@ -651,8 +643,6 @@ class InvalidateDuringResultTest(fixtures.TestBase):
'+cymysql', '+pymysql', '+pg8000'
], "Buffers the result set and doesn't check for "
"connection close")
- @testing.fails_on('+informixdb',
- "Wrong error thrown, fix in informixdb?")
def test_invalidate_on_results(self):
conn = self.engine.connect()
result = conn.execute('select * from sometable')
diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py
index 52cbc15e6..2f311f7e7 100644
--- a/test/engine/test_reflection.py
+++ b/test/engine/test_reflection.py
@@ -361,6 +361,27 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
self.assert_(isinstance(table.c.col4.type, sa.String))
@testing.provide_metadata
+ def test_override_upgrade_pk_flag(self):
+ meta = self.metadata
+ table = Table(
+ 'override_test', meta,
+ Column('col1', sa.Integer),
+ Column('col2', sa.String(20)),
+ Column('col3', sa.Numeric)
+ )
+ table.create()
+
+ meta2 = MetaData(testing.db)
+ table = Table(
+ 'override_test', meta2,
+ Column('col1', sa.Integer, primary_key=True),
+ autoload=True)
+
+ eq_(list(table.primary_key), [table.c.col1])
+ eq_(table.c.col1.primary_key, True)
+
+
+ @testing.provide_metadata
def test_override_pkfk(self):
"""test that you can override columns which contain foreign keys
to other reflected tables, where the foreign key column is also
@@ -602,6 +623,55 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
is a2.c.user_id
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
+ @testing.only_on(['postgresql', 'mysql'])
+ @testing.provide_metadata
+ def test_fk_options(self):
+ """test that foreign key reflection includes options (on
+ backends with {dialect}.get_foreign_keys() support)"""
+
+ if testing.against('postgresql'):
+ test_attrs = ('match', 'onupdate', 'ondelete', 'deferrable', 'initially')
+ addresses_user_id_fkey = sa.ForeignKey(
+ # Each option is specifically not a Postgres default, or
+ # it won't be returned by PG's inspection
+ 'users.id',
+ name = 'addresses_user_id_fkey',
+ match='FULL',
+ onupdate='RESTRICT',
+ ondelete='RESTRICT',
+ deferrable=True,
+ initially='DEFERRED'
+ )
+ elif testing.against('mysql'):
+ # MATCH, DEFERRABLE, and INITIALLY cannot be defined for MySQL
+ # ON UPDATE and ON DELETE have defaults of RESTRICT, which are
+ # elided by MySQL's inspection
+ addresses_user_id_fkey = sa.ForeignKey(
+ 'users.id',
+ name = 'addresses_user_id_fkey',
+ onupdate='CASCADE',
+ ondelete='CASCADE'
+ )
+ test_attrs = ('onupdate', 'ondelete')
+
+ meta = self.metadata
+ Table('users', meta,
+ Column('id', sa.Integer, primary_key=True),
+ Column('name', sa.String(30)),
+ test_needs_fk=True)
+ Table('addresses', meta,
+ Column('id', sa.Integer, primary_key=True),
+ Column('user_id', sa.Integer, addresses_user_id_fkey),
+ test_needs_fk=True)
+ meta.create_all()
+
+ meta2 = MetaData()
+ meta2.reflect(testing.db)
+ for fk in meta2.tables['addresses'].foreign_keys:
+ ref = addresses_user_id_fkey
+ for attr in test_attrs:
+ eq_(getattr(fk, attr), getattr(ref, attr))
+
def test_pks_not_uniques(self):
"""test that primary key reflection not tripped up by unique
indexes"""
@@ -705,10 +775,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
@testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on')
- @testing.fails_on('+informixdb',
- "FIXME: should be supported via the "
- "DELIMITED env var but that breaks "
- "everything else for now")
@testing.provide_metadata
def test_reserved(self):
@@ -725,7 +791,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
# There's currently no way to calculate identifier case
# normalization in isolation, so...
- if testing.against('firebird', 'oracle', 'maxdb'):
+ if testing.against('firebird', 'oracle'):
check_col = 'TRUE'
else:
check_col = 'true'
@@ -778,6 +844,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
def test_reflect_uses_bind_engine_reflect(self):
self._test_reflect_uses_bind(lambda e: MetaData().reflect(e))
+
@testing.provide_metadata
def test_reflect_all(self):
existing = testing.db.table_names()
@@ -833,6 +900,18 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
m8.reflect
)
+ m8_e1 = MetaData(testing.db)
+ rt_c = Table('rt_c', m8_e1)
+ m8_e1.reflect(extend_existing=True)
+ eq_(set(m8_e1.tables.keys()), set(names))
+ eq_(rt_c.c.keys(), ['id'])
+
+ m8_e2 = MetaData(testing.db)
+ rt_c = Table('rt_c', m8_e2)
+ m8_e2.reflect(extend_existing=True, only=['rt_a', 'rt_c'])
+ eq_(set(m8_e2.tables.keys()), set(['rt_a', 'rt_c']))
+ eq_(rt_c.c.keys(), ['id'])
+
if existing:
print("Other tables present in database, skipping some checks.")
else:
@@ -1423,6 +1502,7 @@ class CaseSensitiveTest(fixtures.TablesTest):
class ColumnEventsTest(fixtures.TestBase):
+
@classmethod
def setup_class(cls):
cls.metadata = MetaData()
@@ -1430,7 +1510,16 @@ class ColumnEventsTest(fixtures.TestBase):
'to_reflect',
cls.metadata,
Column('x', sa.Integer, primary_key=True),
+ Column('y', sa.Integer),
+ test_needs_fk=True
)
+ cls.related = Table(
+ 'related',
+ cls.metadata,
+ Column('q', sa.Integer, sa.ForeignKey('to_reflect.x')),
+ test_needs_fk=True
+ )
+ sa.Index("some_index", cls.to_reflect.c.y)
cls.metadata.create_all(testing.db)
@classmethod
@@ -1440,7 +1529,7 @@ class ColumnEventsTest(fixtures.TestBase):
def teardown(self):
events.SchemaEventTarget.dispatch._clear()
- def _do_test(self, col, update, assert_):
+ def _do_test(self, col, update, assert_, tablename="to_reflect"):
# load the actual Table class, not the test
# wrapper
from sqlalchemy.schema import Table
@@ -1450,22 +1539,54 @@ class ColumnEventsTest(fixtures.TestBase):
if column_info['name'] == col:
column_info.update(update)
- t = Table('to_reflect', m, autoload=True, listeners=[
+ t = Table(tablename, m, autoload=True, listeners=[
('column_reflect', column_reflect),
])
assert_(t)
m = MetaData(testing.db)
event.listen(Table, 'column_reflect', column_reflect)
- t2 = Table('to_reflect', m, autoload=True)
+ t2 = Table(tablename, m, autoload=True)
assert_(t2)
def test_override_key(self):
+ def assertions(table):
+ eq_(table.c.YXZ.name, "x")
+ eq_(set(table.primary_key), set([table.c.YXZ]))
+
self._do_test(
"x", {"key": "YXZ"},
- lambda table: eq_(table.c.YXZ.name, "x")
+ assertions
)
+ def test_override_index(self):
+ def assertions(table):
+ idx = list(table.indexes)[0]
+ eq_(idx.columns, [table.c.YXZ])
+
+ self._do_test(
+ "y", {"key": "YXZ"},
+ assertions
+ )
+
+ def test_override_key_fk(self):
+ m = MetaData(testing.db)
+ def column_reflect(insp, table, column_info):
+
+ if column_info['name'] == 'q':
+ column_info['key'] = 'qyz'
+ elif column_info['name'] == 'x':
+ column_info['key'] = 'xyz'
+
+ to_reflect = Table("to_reflect", m, autoload=True, listeners=[
+ ('column_reflect', column_reflect),
+ ])
+ related = Table("related", m, autoload=True, listeners=[
+ ('column_reflect', column_reflect),
+ ])
+
+ assert related.c.qyz.references(to_reflect.c.xyz)
+
def test_override_type(self):
def assert_(table):
assert isinstance(table.c.x.type, sa.String)
diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py
index ffc12b5b9..c373133d1 100644
--- a/test/engine/test_transaction.py
+++ b/test/engine/test_transaction.py
@@ -3,6 +3,7 @@ from sqlalchemy.testing import eq_, assert_raises, \
import sys
import time
import threading
+from sqlalchemy import event
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy import create_engine, MetaData, INT, VARCHAR, Sequence, \
select, Integer, String, func, text, exc
@@ -29,7 +30,6 @@ class TransactionTest(fixtures.TestBase):
testing.db.execute(users.delete()).close()
@classmethod
- @testing.crashes('mysql+cymysql', 'deadlock')
def teardown_class(cls):
users.drop(testing.db)
@@ -342,7 +342,8 @@ class TransactionTest(fixtures.TestBase):
transaction = connection.begin_twophase()
connection.execute(users.insert(), user_id=1, user_name='user1')
transaction.prepare()
- connection.close()
+ connection.invalidate()
+
connection2 = testing.db.connect()
eq_(connection2.execute(select([users.c.user_id]).
order_by(users.c.user_id)).fetchall(),
@@ -379,6 +380,138 @@ class TransactionTest(fixtures.TestBase):
eq_(result.fetchall(), [('user1', ), ('user4', )])
conn.close()
+ @testing.requires.two_phase_transactions
+ def test_reset_rollback_two_phase_no_rollback(self):
+ # test [ticket:2907], essentially that the
+ # TwoPhaseTransaction is given the job of "reset on return"
+ # so that picky backends like MySQL correctly clear out
+ # their state when a connection is closed without handling
+ # the transaction explicitly.
+
+ eng = testing_engine()
+
+ # MySQL raises if you call straight rollback() on
+ # a connection with an XID present
+ @event.listens_for(eng, "invalidate")
+ def conn_invalidated(dbapi_con, con_record, exception):
+ dbapi_con.close()
+ raise exception
+
+ with eng.connect() as conn:
+ rec = conn.connection._connection_record
+ raw_dbapi_con = rec.connection
+ xa = conn.begin_twophase()
+ conn.execute(users.insert(), user_id=1, user_name='user1')
+
+ assert rec.connection is raw_dbapi_con
+
+ with eng.connect() as conn:
+ result = \
+ conn.execute(select([users.c.user_name]).
+ order_by(users.c.user_id))
+ eq_(result.fetchall(), [])
+
+class ResetAgentTest(fixtures.TestBase):
+ def test_begin_close(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin()
+ assert connection.connection._reset_agent is trans
+ assert not trans.is_active
+
+ def test_begin_rollback(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin()
+ assert connection.connection._reset_agent is trans
+ trans.rollback()
+ assert connection.connection._reset_agent is None
+
+ def test_begin_commit(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin()
+ assert connection.connection._reset_agent is trans
+ trans.commit()
+ assert connection.connection._reset_agent is None
+
+ @testing.requires.savepoints
+ def test_begin_nested_close(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin_nested()
+ assert connection.connection._reset_agent is trans
+ assert not trans.is_active
+
+ @testing.requires.savepoints
+ def test_begin_begin_nested_close(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin()
+ trans2 = connection.begin_nested()
+ assert connection.connection._reset_agent is trans
+ assert trans2.is_active # was never closed
+ assert not trans.is_active
+
+ @testing.requires.savepoints
+ def test_begin_begin_nested_rollback_commit(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin()
+ trans2 = connection.begin_nested()
+ assert connection.connection._reset_agent is trans
+ trans2.rollback()
+ assert connection.connection._reset_agent is trans
+ trans.commit()
+ assert connection.connection._reset_agent is None
+
+ @testing.requires.savepoints
+ def test_begin_begin_nested_rollback_rollback(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin()
+ trans2 = connection.begin_nested()
+ assert connection.connection._reset_agent is trans
+ trans2.rollback()
+ assert connection.connection._reset_agent is trans
+ trans.rollback()
+ assert connection.connection._reset_agent is None
+
+ def test_begin_begin_rollback_rollback(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin()
+ trans2 = connection.begin()
+ assert connection.connection._reset_agent is trans
+ trans2.rollback()
+ assert connection.connection._reset_agent is None
+ trans.rollback()
+ assert connection.connection._reset_agent is None
+
+ def test_begin_begin_commit_commit(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin()
+ trans2 = connection.begin()
+ assert connection.connection._reset_agent is trans
+ trans2.commit()
+ assert connection.connection._reset_agent is trans
+ trans.commit()
+ assert connection.connection._reset_agent is None
+
+ @testing.requires.two_phase_transactions
+ def test_reset_via_agent_begin_twophase(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin_twophase()
+ assert connection.connection._reset_agent is trans
+
+ @testing.requires.two_phase_transactions
+ def test_reset_via_agent_begin_twophase_commit(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin_twophase()
+ assert connection.connection._reset_agent is trans
+ trans.commit()
+ assert connection.connection._reset_agent is None
+
+ @testing.requires.two_phase_transactions
+ def test_reset_via_agent_begin_twophase_rollback(self):
+ with testing.db.connect() as connection:
+ trans = connection.begin_twophase()
+ assert connection.connection._reset_agent is trans
+ trans.rollback()
+ assert connection.connection._reset_agent is None
+
class AutoRollbackTest(fixtures.TestBase):
@classmethod
@@ -504,7 +637,7 @@ class ExplicitAutoCommitTest(fixtures.TestBase):
conn2.close()
@testing.uses_deprecated(r'autocommit on select\(\) is deprecated',
- r'autocommit\(\) is deprecated')
+ r'``autocommit\(\)`` is deprecated')
def test_explicit_compiled_deprecated(self):
conn1 = testing.db.connect()
conn2 = testing.db.connect()
@@ -1036,7 +1169,6 @@ class ForUpdateTest(fixtures.TestBase):
@testing.crashes('mssql', 'FIXME: unknown')
@testing.crashes('firebird', 'FIXME: unknown')
@testing.crashes('sybase', 'FIXME: unknown')
- @testing.crashes('access', 'FIXME: unknown')
@testing.requires.independent_connections
def test_queued_update(self):
"""Test SELECT FOR UPDATE with concurrent modifications.
@@ -1101,7 +1233,6 @@ class ForUpdateTest(fixtures.TestBase):
@testing.crashes('mssql', 'FIXME: unknown')
@testing.crashes('firebird', 'FIXME: unknown')
@testing.crashes('sybase', 'FIXME: unknown')
- @testing.crashes('access', 'FIXME: unknown')
@testing.requires.independent_connections
def test_queued_select(self):
"""Simple SELECT FOR UPDATE conflict test"""
@@ -1113,7 +1244,6 @@ class ForUpdateTest(fixtures.TestBase):
@testing.fails_on('mysql', 'No support for NOWAIT')
@testing.crashes('firebird', 'FIXME: unknown')
@testing.crashes('sybase', 'FIXME: unknown')
- @testing.crashes('access', 'FIXME: unknown')
@testing.requires.independent_connections
def test_nowait_select(self):
"""Simple SELECT FOR UPDATE NOWAIT conflict test"""
diff --git a/test/ext/declarative/test_basic.py b/test/ext/declarative/test_basic.py
index 540f1623f..1f14d8164 100644
--- a/test/ext/declarative/test_basic.py
+++ b/test/ext/declarative/test_basic.py
@@ -4,14 +4,14 @@ from sqlalchemy.testing import eq_, assert_raises, \
from sqlalchemy.ext import declarative as decl
from sqlalchemy import exc
import sqlalchemy as sa
-from sqlalchemy import testing
+from sqlalchemy import testing, util
from sqlalchemy import MetaData, Integer, String, ForeignKey, \
ForeignKeyConstraint, Index
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import relationship, create_session, class_mapper, \
joinedload, configure_mappers, backref, clear_mappers, \
deferred, column_property, composite,\
- Session
+ Session, properties
from sqlalchemy.testing import eq_
from sqlalchemy.util import classproperty, with_metaclass
from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \
@@ -77,6 +77,26 @@ class DeclarativeTest(DeclarativeTestBase):
eq_(a1, Address(email='two'))
eq_(a1.user, User(name='u1'))
+ def test_unicode_string_resolve(self):
+ class User(Base, fixtures.ComparableEntity):
+ __tablename__ = 'users'
+
+ id = Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ name = Column('name', String(50))
+ addresses = relationship(util.u("Address"), backref="user")
+
+ class Address(Base, fixtures.ComparableEntity):
+ __tablename__ = 'addresses'
+
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ email = Column(String(50), key='_email')
+ user_id = Column('user_id', Integer, ForeignKey('users.id'),
+ key='_user_id')
+
+ assert User.addresses.property.mapper.class_ is Address
+
def test_no_table(self):
def go():
class User(Base):
@@ -123,6 +143,71 @@ class DeclarativeTest(DeclarativeTestBase):
assert class_mapper(Bar).get_property('some_data').columns[0] \
is t.c.data
+ def test_column_named_twice(self):
+ def go():
+ class Foo(Base):
+ __tablename__ = 'foo'
+
+ id = Column(Integer, primary_key=True)
+ x = Column('x', Integer)
+ y = Column('x', Integer)
+ assert_raises_message(
+ sa.exc.SAWarning,
+ "On class 'Foo', Column object 'x' named directly multiple times, "
+ "only one will be used: x, y",
+ go
+ )
+
+
+ def test_column_repeated_under_prop(self):
+ def go():
+ class Foo(Base):
+ __tablename__ = 'foo'
+
+ id = Column(Integer, primary_key=True)
+ x = Column('x', Integer)
+ y = column_property(x)
+ z = Column('x', Integer)
+
+ assert_raises_message(
+ sa.exc.SAWarning,
+ "On class 'Foo', Column object 'x' named directly multiple times, "
+ "only one will be used: x, y, z",
+ go
+ )
+
+ def test_relationship_level_msg_for_invalid_callable(self):
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+ class B(Base):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+ a_id = Column(Integer, ForeignKey('a.id'))
+ a = relationship('a')
+ assert_raises_message(
+ sa.exc.ArgumentError,
+ "relationship 'a' expects a class or a mapper "
+ "argument .received: .*Table",
+ configure_mappers
+ )
+
+ def test_relationship_level_msg_for_invalid_object(self):
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+ class B(Base):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+ a_id = Column(Integer, ForeignKey('a.id'))
+ a = relationship(A.__table__)
+ assert_raises_message(
+ sa.exc.ArgumentError,
+ "relationship 'a' expects a class or a mapper "
+ "argument .received: .*Table",
+ configure_mappers
+ )
+
def test_difficult_class(self):
"""test no getattr() errors with a customized class"""
@@ -202,10 +287,10 @@ class DeclarativeTest(DeclarativeTestBase):
user = relationship("User", primaryjoin=user_id == User.id,
backref="addresses")
- assert mapperlib._new_mappers is True
+ assert mapperlib.Mapper._new_mappers is True
u = User()
assert User.addresses
- assert mapperlib._new_mappers is False
+ assert mapperlib.Mapper._new_mappers is False
def test_string_dependency_resolution(self):
from sqlalchemy.sql import desc
@@ -707,6 +792,64 @@ class DeclarativeTest(DeclarativeTestBase):
eq_(a1, Address(email='two'))
eq_(a1.user, User(name='u1'))
+ def test_alt_name_attr_subclass_column_inline(self):
+ # [ticket:2900]
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column('id', Integer, primary_key=True)
+ data = Column('data')
+
+ class ASub(A):
+ brap = A.data
+ assert ASub.brap.property is A.data.property
+ assert isinstance(ASub.brap.original_property, properties.SynonymProperty)
+
+ def test_alt_name_attr_subclass_relationship_inline(self):
+ # [ticket:2900]
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column('id', Integer, primary_key=True)
+ b_id = Column(Integer, ForeignKey('b.id'))
+ b = relationship("B", backref="as_")
+
+ class B(Base):
+ __tablename__ = 'b'
+ id = Column('id', Integer, primary_key=True)
+
+ configure_mappers()
+ class ASub(A):
+ brap = A.b
+ assert ASub.brap.property is A.b.property
+ assert isinstance(ASub.brap.original_property, properties.SynonymProperty)
+ ASub(brap=B())
+
+ def test_alt_name_attr_subclass_column_attrset(self):
+ # [ticket:2900]
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column('id', Integer, primary_key=True)
+ data = Column('data')
+ A.brap = A.data
+ assert A.brap.property is A.data.property
+ assert isinstance(A.brap.original_property, properties.SynonymProperty)
+
+ def test_alt_name_attr_subclass_relationship_attrset(self):
+ # [ticket:2900]
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column('id', Integer, primary_key=True)
+ b_id = Column(Integer, ForeignKey('b.id'))
+ b = relationship("B", backref="as_")
+ A.brap = A.b
+ class B(Base):
+ __tablename__ = 'b'
+ id = Column('id', Integer, primary_key=True)
+
+ assert A.brap.property is A.b.property
+ assert isinstance(A.brap.original_property, properties.SynonymProperty)
+ A(brap=B())
+
+
def test_eager_order_by(self):
class Address(Base, fixtures.ComparableEntity):
@@ -1276,8 +1419,10 @@ class DeclarativeTest(DeclarativeTestBase):
# case
sa.orm.configure_mappers()
- eq_(str(list(Address.user_id.property.columns[0].foreign_keys)[0]),
- "ForeignKey('users.id')")
+ eq_(
+ list(Address.user_id.property.columns[0].foreign_keys)[0].column,
+ User.__table__.c.id
+ )
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
Address(email='two')])
diff --git a/test/ext/declarative/test_reflection.py b/test/ext/declarative/test_reflection.py
index 013439f93..f4bda6995 100644
--- a/test/ext/declarative/test_reflection.py
+++ b/test/ext/declarative/test_reflection.py
@@ -7,6 +7,8 @@ from sqlalchemy.orm import relationship, create_session, \
clear_mappers, \
Session
from sqlalchemy.testing import fixtures
+from sqlalchemy.testing.util import gc_collect
+from sqlalchemy.ext.declarative.base import _DeferredMapperConfig
class DeclarativeReflectionBase(fixtures.TablesTest):
__requires__ = 'reflectable_autoincrement',
@@ -47,9 +49,8 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
test_needs_fk=True,
)
- def test_basic(self):
- meta = MetaData(testing.db)
+ def test_basic(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'users'
@@ -80,8 +81,6 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
eq_(a1.user, User(name='u1'))
def test_rekey(self):
- meta = MetaData(testing.db)
-
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'users'
@@ -114,8 +113,6 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
assert_raises(TypeError, User, name='u3')
def test_supplied_fk(self):
- meta = MetaData(testing.db)
-
class IMHandle(Base, fixtures.ComparableEntity):
__tablename__ = 'imhandles'
@@ -151,9 +148,8 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
class DeferredReflectBase(DeclarativeReflectionBase):
def teardown(self):
- super(DeferredReflectBase,self).teardown()
- from sqlalchemy.ext.declarative.base import _MapperConfig
- _MapperConfig.configs.clear()
+ super(DeferredReflectBase, self).teardown()
+ _DeferredMapperConfig._configs.clear()
Base = None
@@ -275,7 +271,7 @@ class DeferredReflectionTest(DeferredReflectBase):
@decl.declared_attr
def __mapper_args__(cls):
return {
- "order_by":cls.__table__.c.name
+ "order_by": cls.__table__.c.name
}
decl.DeferredReflection.prepare(testing.db)
@@ -297,6 +293,80 @@ class DeferredReflectionTest(DeferredReflectBase):
]
)
+ @testing.requires.predictable_gc
+ def test_cls_not_strong_ref(self):
+ class User(decl.DeferredReflection, fixtures.ComparableEntity,
+ Base):
+ __tablename__ = 'users'
+ class Address(decl.DeferredReflection, fixtures.ComparableEntity,
+ Base):
+ __tablename__ = 'addresses'
+ eq_(len(_DeferredMapperConfig._configs), 2)
+ del Address
+ gc_collect()
+ eq_(len(_DeferredMapperConfig._configs), 1)
+ decl.DeferredReflection.prepare(testing.db)
+ assert not _DeferredMapperConfig._configs
+
+class DeferredSecondaryReflectionTest(DeferredReflectBase):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('users', metadata,
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
+ Column('name', String(50)), test_needs_fk=True)
+
+ Table('user_items', metadata,
+ Column('user_id', ForeignKey('users.id'), primary_key=True),
+ Column('item_id', ForeignKey('items.id'), primary_key=True),
+ test_needs_fk=True
+ )
+
+ Table('items', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ test_needs_fk=True
+ )
+
+ def _roundtrip(self):
+
+ User = Base._decl_class_registry['User']
+ Item = Base._decl_class_registry['Item']
+
+ u1 = User(name='u1', items=[Item(name='i1'), Item(name='i2')])
+
+ sess = Session()
+ sess.add(u1)
+ sess.commit()
+
+ eq_(sess.query(User).all(), [User(name='u1',
+ items=[Item(name='i1'), Item(name='i2')])])
+
+ def test_string_resolution(self):
+ class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+ __tablename__ = 'users'
+
+ items = relationship("Item", secondary="user_items")
+
+ class Item(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+ __tablename__ = 'items'
+
+ decl.DeferredReflection.prepare(testing.db)
+ self._roundtrip()
+
+ def test_table_resolution(self):
+ class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+ __tablename__ = 'users'
+
+ items = relationship("Item", secondary=Table("user_items", Base.metadata))
+
+ class Item(decl.DeferredReflection, fixtures.ComparableEntity, Base):
+ __tablename__ = 'items'
+
+ decl.DeferredReflection.prepare(testing.db)
+ self._roundtrip()
+
class DeferredInhReflectBase(DeferredReflectBase):
def _roundtrip(self):
Foo = Base._decl_class_registry['Foo']
@@ -338,11 +408,11 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
- "polymorphic_identity":"foo"}
+ __mapper_args__ = {"polymorphic_on": "type",
+ "polymorphic_identity": "foo"}
class Bar(Foo):
- __mapper_args__ = {"polymorphic_identity":"bar"}
+ __mapper_args__ = {"polymorphic_identity": "bar"}
decl.DeferredReflection.prepare(testing.db)
self._roundtrip()
@@ -351,11 +421,11 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
- "polymorphic_identity":"foo"}
+ __mapper_args__ = {"polymorphic_on": "type",
+ "polymorphic_identity": "foo"}
class Bar(Foo):
- __mapper_args__ = {"polymorphic_identity":"bar"}
+ __mapper_args__ = {"polymorphic_identity": "bar"}
bar_data = Column(String(30))
decl.DeferredReflection.prepare(testing.db)
@@ -365,12 +435,12 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
- "polymorphic_identity":"foo"}
+ __mapper_args__ = {"polymorphic_on": "type",
+ "polymorphic_identity": "foo"}
id = Column(Integer, primary_key=True)
class Bar(Foo):
- __mapper_args__ = {"polymorphic_identity":"bar"}
+ __mapper_args__ = {"polymorphic_identity": "bar"}
decl.DeferredReflection.prepare(testing.db)
self._roundtrip()
@@ -395,12 +465,12 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
- "polymorphic_identity":"foo"}
+ __mapper_args__ = {"polymorphic_on": "type",
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__tablename__ = 'bar'
- __mapper_args__ = {"polymorphic_identity":"bar"}
+ __mapper_args__ = {"polymorphic_identity": "bar"}
decl.DeferredReflection.prepare(testing.db)
self._roundtrip()
@@ -409,12 +479,12 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
- "polymorphic_identity":"foo"}
+ __mapper_args__ = {"polymorphic_on": "type",
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__tablename__ = 'bar'
- __mapper_args__ = {"polymorphic_identity":"bar"}
+ __mapper_args__ = {"polymorphic_identity": "bar"}
bar_data = Column(String(30))
decl.DeferredReflection.prepare(testing.db)
@@ -424,13 +494,13 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
- "polymorphic_identity":"foo"}
+ __mapper_args__ = {"polymorphic_on": "type",
+ "polymorphic_identity": "foo"}
id = Column(Integer, primary_key=True)
class Bar(Foo):
__tablename__ = 'bar'
- __mapper_args__ = {"polymorphic_identity":"bar"}
+ __mapper_args__ = {"polymorphic_identity": "bar"}
decl.DeferredReflection.prepare(testing.db)
self._roundtrip()
@@ -439,12 +509,12 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
- "polymorphic_identity":"foo"}
+ __mapper_args__ = {"polymorphic_on": "type",
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__tablename__ = 'bar'
- __mapper_args__ = {"polymorphic_identity":"bar"}
+ __mapper_args__ = {"polymorphic_identity": "bar"}
id = Column(Integer, ForeignKey('foo.id'), primary_key=True)
decl.DeferredReflection.prepare(testing.db)
diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py
index 4cfb58481..3450eeb2f 100644
--- a/test/ext/test_associationproxy.py
+++ b/test/ext/test_associationproxy.py
@@ -9,7 +9,6 @@ from sqlalchemy.ext.associationproxy import *
from sqlalchemy.ext.associationproxy import _AssociationList
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing.util import gc_collect
-from sqlalchemy.sql import not_
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table, Column
@@ -139,7 +138,7 @@ class _CollectionOperations(fixtures.TestBase):
self.assert_(len(p1._children) == 0)
self.assert_(len(p1.children) == 0)
- p1.children = ['a','b','c']
+ p1.children = ['a', 'b', 'c']
self.assert_(len(p1._children) == 3)
self.assert_(len(p1.children) == 3)
@@ -324,7 +323,7 @@ class CustomDictTest(DictTest):
self.assert_(len(p1._children) == 3)
self.assert_(len(p1.children) == 3)
- self.assert_(set(p1.children) == set(['d','e','f']))
+ self.assert_(set(p1.children) == set(['d', 'e', 'f']))
del ch
p1 = self.roundtrip(p1)
@@ -407,7 +406,7 @@ class SetTest(_CollectionOperations):
self.assert_(len(p1._children) == 0)
self.assert_(len(p1.children) == 0)
- p1.children = ['a','b','c']
+ p1.children = ['a', 'b', 'c']
self.assert_(len(p1._children) == 3)
self.assert_(len(p1.children) == 3)
@@ -421,13 +420,12 @@ class SetTest(_CollectionOperations):
self.assert_('b' in p1.children)
self.assert_('d' not in p1.children)
- self.assert_(p1.children == set(['a','b','c']))
+ self.assert_(p1.children == set(['a', 'b', 'c']))
- try:
- p1.children.remove('d')
- self.fail()
- except KeyError:
- pass
+ assert_raises(
+ KeyError,
+ p1.children.remove, "d"
+ )
self.assert_(len(p1.children) == 3)
p1.children.discard('d')
@@ -442,9 +440,9 @@ class SetTest(_CollectionOperations):
self.assert_(len(p1.children) == 2)
self.assert_(popped not in p1.children)
- p1.children = ['a','b','c']
+ p1.children = ['a', 'b', 'c']
p1 = self.roundtrip(p1)
- self.assert_(p1.children == set(['a','b','c']))
+ self.assert_(p1.children == set(['a', 'b', 'c']))
p1.children.discard('b')
p1 = self.roundtrip(p1)
@@ -476,12 +474,12 @@ class SetTest(_CollectionOperations):
Parent, Child = self.Parent, self.Child
p1 = Parent('P1')
- p1.children = ['a','b','c']
- control = set(['a','b','c'])
+ p1.children = ['a', 'b', 'c']
+ control = set(['a', 'b', 'c'])
- for other in (set(['a','b','c']), set(['a','b','c','d']),
- set(['a']), set(['a','b']),
- set(['c','d']), set(['e', 'f', 'g']),
+ for other in (set(['a', 'b', 'c']), set(['a', 'b', 'c', 'd']),
+ set(['a']), set(['a', 'b']),
+ set(['c', 'd']), set(['e', 'f', 'g']),
set()):
eq_(p1.children.union(other),
@@ -499,12 +497,12 @@ class SetTest(_CollectionOperations):
eq_(p1.children.issuperset(other),
control.issuperset(other))
- self.assert_((p1.children == other) == (control == other))
- self.assert_((p1.children != other) == (control != other))
- self.assert_((p1.children < other) == (control < other))
- self.assert_((p1.children <= other) == (control <= other))
- self.assert_((p1.children > other) == (control > other))
- self.assert_((p1.children >= other) == (control >= other))
+ self.assert_((p1.children == other) == (control == other))
+ self.assert_((p1.children != other) == (control != other))
+ self.assert_((p1.children < other) == (control < other))
+ self.assert_((p1.children <= other) == (control <= other))
+ self.assert_((p1.children > other) == (control > other))
+ self.assert_((p1.children >= other) == (control >= other))
def test_set_mutation(self):
Parent, Child = self.Parent, self.Child
@@ -513,9 +511,9 @@ class SetTest(_CollectionOperations):
for op in ('update', 'intersection_update',
'difference_update', 'symmetric_difference_update'):
for base in (['a', 'b', 'c'], []):
- for other in (set(['a','b','c']), set(['a','b','c','d']),
- set(['a']), set(['a','b']),
- set(['c','d']), set(['e', 'f', 'g']),
+ for other in (set(['a', 'b', 'c']), set(['a', 'b', 'c', 'd']),
+ set(['a']), set(['a', 'b']),
+ set(['c', 'd']), set(['e', 'f', 'g']),
set()):
p = Parent('p')
p.children = base[:]
@@ -544,9 +542,9 @@ class SetTest(_CollectionOperations):
# in-place mutations
for op in ('|=', '-=', '&=', '^='):
for base in (['a', 'b', 'c'], []):
- for other in (set(['a','b','c']), set(['a','b','c','d']),
- set(['a']), set(['a','b']),
- set(['c','d']), set(['e', 'f', 'g']),
+ for other in (set(['a', 'b', 'c']), set(['a', 'b', 'c', 'd']),
+ set(['a']), set(['a', 'b']),
+ set(['c', 'd']), set(['e', 'f', 'g']),
frozenset(['e', 'f', 'g']),
set()):
p = Parent('p')
@@ -599,12 +597,11 @@ class CustomObjectTest(_CollectionOperations):
# We didn't provide an alternate _AssociationList implementation
# for our ObjectCollection, so indexing will fail.
+ assert_raises(
+ TypeError,
+ p.children.__getitem__, 1
+ )
- try:
- v = p.children[1]
- self.fail()
- except TypeError:
- pass
class ProxyFactoryTest(ListTest):
def setup(self):
@@ -669,8 +666,9 @@ class ProxyFactoryTest(ListTest):
class ScalarTest(fixtures.TestBase):
+ @testing.provide_metadata
def test_scalar_proxy(self):
- metadata = MetaData(testing.db)
+ metadata = self.metadata
parents_table = Table('Parent', metadata,
Column('id', Integer, primary_key=True,
@@ -718,12 +716,8 @@ class ScalarTest(fixtures.TestBase):
p = Parent('p')
- # No child
- try:
- v = p.foo
- self.fail()
- except:
- pass
+ eq_(p.child, None)
+ eq_(p.foo, None)
p.child = Child(foo='a', bar='b', baz='c')
@@ -744,19 +738,13 @@ class ScalarTest(fixtures.TestBase):
p.child = None
- # No child again
- try:
- v = p.foo
- self.fail()
- except:
- pass
+ eq_(p.foo, None)
# Bogus creator for this scalar type
- try:
- p.foo = 'zzz'
- self.fail()
- except TypeError:
- pass
+ assert_raises(
+ TypeError,
+ setattr, p, "foo", "zzz"
+ )
p.bar = 'yyy'
@@ -786,6 +774,48 @@ class ScalarTest(fixtures.TestBase):
p2 = Parent('p2')
p2.bar = 'quux'
+ @testing.provide_metadata
+ def test_empty_scalars(self):
+ metadata = self.metadata
+
+ a = Table('a', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50))
+ )
+ a2b = Table('a2b', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('id_a', Integer, ForeignKey('a.id')),
+ Column('id_b', Integer, ForeignKey('b.id')),
+ Column('name', String(50))
+ )
+ b = Table('b', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50))
+ )
+ class A(object):
+ a2b_name = association_proxy("a2b_single", "name")
+ b_single = association_proxy("a2b_single", "b")
+
+ class A2B(object):
+ pass
+
+ class B(object):
+ pass
+
+ mapper(A, a, properties=dict(
+ a2b_single=relationship(A2B, uselist=False)
+ ))
+
+ mapper(A2B, a2b, properties=dict(
+ b=relationship(B)
+ ))
+ mapper(B, b)
+
+ a1 = A()
+ assert a1.a2b_name is None
+ assert a1.b_single is None
+
+
class LazyLoadTest(fixtures.TestBase):
def setup(self):
@@ -840,7 +870,7 @@ class LazyLoadTest(fixtures.TestBase):
collection_class=list)})
p = Parent('p')
- p.children = ['a','b','c']
+ p.children = ['a', 'b', 'c']
p = self.roundtrip(p)
@@ -858,7 +888,7 @@ class LazyLoadTest(fixtures.TestBase):
collection_class=list)})
p = Parent('p')
- p.children = ['a','b','c']
+ p.children = ['a', 'b', 'c']
p = self.roundtrip(p)
@@ -1024,7 +1054,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
@classmethod
def define_tables(cls, metadata):
Table('userkeywords', metadata,
- Column('keyword_id', Integer,ForeignKey('keywords.id'), primary_key=True),
+ Column('keyword_id', Integer, ForeignKey('keywords.id'), primary_key=True),
Column('user_id', Integer, ForeignKey('users.id'))
)
Table('users', metadata,
@@ -1094,15 +1124,15 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
cls.classes.Singular)
mapper(User, users, properties={
- 'singular':relationship(Singular)
+ 'singular': relationship(Singular)
})
mapper(Keyword, keywords, properties={
- 'user_keyword':relationship(UserKeyword, uselist=False)
+ 'user_keyword': relationship(UserKeyword, uselist=False)
})
mapper(UserKeyword, userkeywords, properties={
- 'user' : relationship(User, backref='user_keywords'),
- 'keyword' : relationship(Keyword)
+ 'user': relationship(User, backref='user_keywords'),
+ 'keyword': relationship(Keyword)
})
mapper(Singular, singular, properties={
'keywords': relationship(Keyword)
@@ -1300,7 +1330,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
self.session.query(User).filter(User.singular_value == None),
self.session.query(User).filter(
or_(
- User.singular.has(Singular.value==None),
+ User.singular.has(Singular.value == None),
User.singular == None
)
)
@@ -1324,7 +1354,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
self._equivalent(
self.session.query(User).filter(User.singular_value == "singular4"),
self.session.query(User).filter(
- User.singular.has(Singular.value=="singular4"),
+ User.singular.has(Singular.value == "singular4"),
)
)
@@ -1343,7 +1373,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
# a special case where we provide an empty has() on a
# non-object-targeted association proxy.
User = self.classes.User
- Singular = self.classes.Singular
+ self.classes.Singular
self._equivalent(
self.session.query(User).filter(User.singular_value.has()),
@@ -1356,7 +1386,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
# a special case where we provide an empty has() on a
# non-object-targeted association proxy.
User = self.classes.User
- Singular = self.classes.Singular
+ self.classes.Singular
self._equivalent(
self.session.query(User).filter(~User.singular_value.has()),
@@ -1368,7 +1398,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
def test_has_criterion_nul(self):
# but we don't allow that with any criterion...
User = self.classes.User
- Singular = self.classes.Singular
+ self.classes.Singular
assert_raises_message(
exc.ArgumentError,
@@ -1380,7 +1410,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
def test_has_kwargs_nul(self):
# ... or kwargs
User = self.classes.User
- Singular = self.classes.Singular
+ self.classes.Singular
assert_raises_message(
exc.ArgumentError,
@@ -1391,32 +1421,32 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
def test_filter_scalar_contains_fails_nul_nul(self):
Keyword = self.classes.Keyword
- assert_raises(exc.InvalidRequestError, lambda : \
- Keyword.user.contains(self.u))
+ assert_raises(exc.InvalidRequestError,
+ lambda: Keyword.user.contains(self.u))
def test_filter_scalar_any_fails_nul_nul(self):
Keyword = self.classes.Keyword
- assert_raises(exc.InvalidRequestError, lambda : \
- Keyword.user.any(name='user2'))
+ assert_raises(exc.InvalidRequestError,
+ lambda: Keyword.user.any(name='user2'))
def test_filter_collection_has_fails_ul_nul(self):
User = self.classes.User
- assert_raises(exc.InvalidRequestError, lambda : \
- User.keywords.has(keyword='quick'))
+ assert_raises(exc.InvalidRequestError,
+ lambda: User.keywords.has(keyword='quick'))
def test_filter_collection_eq_fails_ul_nul(self):
User = self.classes.User
- assert_raises(exc.InvalidRequestError, lambda : \
- User.keywords == self.kw)
+ assert_raises(exc.InvalidRequestError,
+ lambda: User.keywords == self.kw)
def test_filter_collection_ne_fails_ul_nul(self):
User = self.classes.User
- assert_raises(exc.InvalidRequestError, lambda : \
- User.keywords != self.kw)
+ assert_raises(exc.InvalidRequestError,
+ lambda: User.keywords != self.kw)
def test_join_separate_attr(self):
User = self.classes.User
@@ -1458,7 +1488,7 @@ class DictOfTupleUpdateTest(fixtures.TestBase):
b = Table('b', m, Column('id', Integer, primary_key=True),
Column('aid', Integer, ForeignKey('a.id')))
mapper(A, a, properties={
- 'orig':relationship(B, collection_class=attribute_mapped_collection('key'))
+ 'orig': relationship(B, collection_class=attribute_mapped_collection('key'))
})
mapper(B, b)
self.A = A
@@ -1467,22 +1497,22 @@ class DictOfTupleUpdateTest(fixtures.TestBase):
def test_update_one_elem_dict(self):
a1 = self.A()
a1.elements.update({("B", 3): 'elem2'})
- eq_(a1.elements, {("B",3):'elem2'})
+ eq_(a1.elements, {("B", 3): 'elem2'})
def test_update_multi_elem_dict(self):
a1 = self.A()
a1.elements.update({("B", 3): 'elem2', ("C", 4): "elem3"})
- eq_(a1.elements, {("B",3):'elem2', ("C", 4): "elem3"})
+ eq_(a1.elements, {("B", 3): 'elem2', ("C", 4): "elem3"})
def test_update_one_elem_list(self):
a1 = self.A()
a1.elements.update([(("B", 3), 'elem2')])
- eq_(a1.elements, {("B",3):'elem2'})
+ eq_(a1.elements, {("B", 3): 'elem2'})
def test_update_multi_elem_list(self):
a1 = self.A()
a1.elements.update([(("B", 3), 'elem2'), (("C", 4), "elem3")])
- eq_(a1.elements, {("B",3):'elem2', ("C", 4): "elem3"})
+ eq_(a1.elements, {("B", 3): 'elem2', ("C", 4): "elem3"})
def test_update_one_elem_varg(self):
a1 = self.A()
diff --git a/test/ext/test_automap.py b/test/ext/test_automap.py
new file mode 100644
index 000000000..9db85879d
--- /dev/null
+++ b/test/ext/test_automap.py
@@ -0,0 +1,146 @@
+from sqlalchemy.testing import fixtures, eq_
+from ..orm._fixtures import FixtureTest
+from sqlalchemy.ext.automap import automap_base
+from sqlalchemy.orm import relationship, interfaces, backref
+from sqlalchemy.ext.automap import generate_relationship
+from sqlalchemy.testing.mock import Mock, call
+
+class AutomapTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ FixtureTest.define_tables(metadata)
+
+ def test_relationship_o2m_default(self):
+ Base = automap_base(metadata=self.metadata)
+ Base.prepare()
+
+ User = Base.classes.users
+ Address = Base.classes.addresses
+
+ a1 = Address(email_address='e1')
+ u1 = User(name='u1', addresses_collection=[a1])
+ assert a1.users is u1
+
+ def test_relationship_explicit_override_o2m(self):
+ Base = automap_base(metadata=self.metadata)
+ prop = relationship("addresses", collection_class=set)
+ class User(Base):
+ __tablename__ = 'users'
+
+ addresses_collection = prop
+
+ Base.prepare()
+ assert User.addresses_collection.property is prop
+ Address = Base.classes.addresses
+
+ a1 = Address(email_address='e1')
+ u1 = User(name='u1', addresses_collection=set([a1]))
+ assert a1.user is u1
+
+ def test_relationship_explicit_override_m2o(self):
+ Base = automap_base(metadata=self.metadata)
+
+ prop = relationship("users")
+ class Address(Base):
+ __tablename__ = 'addresses'
+
+ users = prop
+
+ Base.prepare()
+ User = Base.classes.users
+
+ assert Address.users.property is prop
+ a1 = Address(email_address='e1')
+ u1 = User(name='u1', address_collection=[a1])
+ assert a1.users is u1
+
+
+ def test_relationship_self_referential(self):
+ Base = automap_base(metadata=self.metadata)
+ Base.prepare()
+
+ Node = Base.classes.nodes
+
+ n1 = Node()
+ n2 = Node()
+ n1.nodes_collection.append(n2)
+ assert n2.nodes is n1
+
+ def test_naming_schemes(self):
+ Base = automap_base(metadata=self.metadata)
+
+ def classname_for_table(base, tablename, table):
+ return str("cls_" + tablename)
+
+ def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
+ return "scalar_" + referred_cls.__name__
+
+ def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
+ return "coll_" + referred_cls.__name__
+
+ Base.prepare(
+ classname_for_table=classname_for_table,
+ name_for_scalar_relationship=name_for_scalar_relationship,
+ name_for_collection_relationship=name_for_collection_relationship
+ )
+
+ User = Base.classes.cls_users
+ Address = Base.classes.cls_addresses
+
+ u1 = User()
+ a1 = Address()
+ u1.coll_cls_addresses.append(a1)
+ assert a1.scalar_cls_users is u1
+
+ def test_relationship_m2m(self):
+ Base = automap_base(metadata=self.metadata)
+
+ Base.prepare()
+
+ Order, Item = Base.classes.orders, Base.classes['items']
+
+ o1 = Order()
+ i1 = Item()
+ o1.items_collection.append(i1)
+ assert o1 in i1.orders_collection
+
+ def test_relationship_explicit_override_forwards_m2m(self):
+ Base = automap_base(metadata=self.metadata)
+
+ class Order(Base):
+ __tablename__ = 'orders'
+
+ items_collection = relationship("items",
+ secondary="order_items",
+ collection_class=set)
+ Base.prepare()
+
+ Item = Base.classes['items']
+
+ o1 = Order()
+ i1 = Item()
+ o1.items_collection.add(i1)
+
+ # it's 'order_collection' because the class name is
+ # "Order" !
+ assert isinstance(i1.order_collection, list)
+ assert o1 in i1.order_collection
+
+ def test_relationship_pass_params(self):
+ Base = automap_base(metadata=self.metadata)
+
+ mock = Mock()
+ def _gen_relationship(base, direction, return_fn, attrname,
+ local_cls, referred_cls, **kw):
+ mock(base, direction, attrname)
+ return generate_relationship(base, direction, return_fn,
+ attrname, local_cls, referred_cls, **kw)
+
+ Base.prepare(generate_relationship=_gen_relationship)
+ assert set(tuple(c[1]) for c in mock.mock_calls).issuperset([
+ (Base, interfaces.MANYTOONE, "nodes"),
+ (Base, interfaces.MANYTOMANY, "keywords_collection"),
+ (Base, interfaces.MANYTOMANY, "items_collection"),
+ (Base, interfaces.MANYTOONE, "users"),
+ (Base, interfaces.ONETOMANY, "addresses_collection"),
+ ])
diff --git a/test/ext/test_compiler.py b/test/ext/test_compiler.py
index c1f8b6258..5ed50442f 100644
--- a/test/ext/test_compiler.py
+++ b/test/ext/test_compiler.py
@@ -4,7 +4,7 @@ from sqlalchemy.sql.expression import ClauseElement, ColumnClause,\
FunctionElement, Select, \
BindParameter
-from sqlalchemy.schema import DDLElement
+from sqlalchemy.schema import DDLElement, CreateColumn, CreateTable
from sqlalchemy.ext.compiler import compiles, deregister
from sqlalchemy import exc
from sqlalchemy.sql import table, column, visitors
@@ -34,6 +34,22 @@ class UserDefinedTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT >>x<<, >>y<< WHERE >>MYTHINGY!<< = :MYTHINGY!_1"
)
+ def test_create_column_skip(self):
+ @compiles(CreateColumn)
+ def skip_xmin(element, compiler, **kw):
+ if element.element.name == 'xmin':
+ return None
+ else:
+ return compiler.visit_create_column(element, **kw)
+
+ t = Table('t', MetaData(), Column('a', Integer),
+ Column('xmin', Integer),
+ Column('c', Integer))
+
+ self.assert_compile(
+ CreateTable(t),
+ "CREATE TABLE t (a INTEGER, c INTEGER)"
+ )
def test_types(self):
class MyType(TypeEngine):
pass
diff --git a/test/ext/test_extendedattr.py b/test/ext/test_extendedattr.py
index a550ae4d0..7a733696a 100644
--- a/test/ext/test_extendedattr.py
+++ b/test/ext/test_extendedattr.py
@@ -61,6 +61,8 @@ class MyTypesManager(instrumentation.InstrumentationManager):
class MyListLike(list):
# add @appender, @remover decorators as needed
_sa_iterator = list.__iter__
+ _sa_linker = None
+ _sa_converter = None
def _sa_appender(self, item, _sa_initiator=None):
if _sa_initiator is not False:
self._sa_adapter.fire_append_event(item, _sa_initiator)
diff --git a/test/ext/test_mutable.py b/test/ext/test_mutable.py
index 25c182f1d..ee1b8075e 100644
--- a/test/ext/test_mutable.py
+++ b/test/ext/test_mutable.py
@@ -153,9 +153,6 @@ class MutableWithScalarPickleTest(_MutableDictTestBase, fixtures.MappedTest):
self._test_non_mutable()
class MutableWithScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest):
- # json introduced in 2.6
- __skip_if__ = lambda: sys.version_info < (2, 6),
-
@classmethod
def define_tables(cls, metadata):
import json
@@ -245,9 +242,6 @@ class MutableAssociationScalarPickleTest(_MutableDictTestBase, fixtures.MappedTe
)
class MutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest):
- # json introduced in 2.6
- __skip_if__ = lambda: sys.version_info < (2, 6),
-
@classmethod
def define_tables(cls, metadata):
import json
diff --git a/test/ext/test_serializer.py b/test/ext/test_serializer.py
index 84fff1304..ffeac55c1 100644
--- a/test/ext/test_serializer.py
+++ b/test/ext/test_serializer.py
@@ -1,13 +1,15 @@
+# coding: utf-8
from sqlalchemy.ext import serializer
from sqlalchemy import testing
from sqlalchemy import Integer, String, ForeignKey, select, \
- desc, func, util
+ desc, func, util, MetaData
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.schema import Column
from sqlalchemy.orm import relationship, sessionmaker, scoped_session, \
class_mapper, mapper, joinedload, configure_mappers, aliased
-from sqlalchemy.testing import eq_
+from sqlalchemy.testing import eq_, AssertsCompiledSQL
+from sqlalchemy.util import u, ue
from sqlalchemy.testing import fixtures
@@ -19,7 +21,7 @@ class Address(fixtures.ComparableEntity):
users = addresses = Session = None
-class SerializeTest(fixtures.MappedTest):
+class SerializeTest(AssertsCompiledSQL, fixtures.MappedTest):
run_setup_mappers = 'once'
run_inserts = 'once'
@@ -77,7 +79,6 @@ class SerializeTest(fixtures.MappedTest):
assert serializer.loads(serializer.dumps(User.name, -1), None,
None) is User.name
- @testing.requires.python26 # crashes in 2.5
def test_expression(self):
expr = \
select([users]).select_from(users.join(addresses)).limit(5)
@@ -124,19 +125,20 @@ class SerializeTest(fixtures.MappedTest):
eq_(q2.all(), [User(name='fred')])
eq_(list(q2.values(User.id, User.name)), [(9, 'fred')])
- @testing.requires.non_broken_pickle
- def test_query_three(self):
- ua = aliased(User)
- q = \
- Session.query(ua).join(ua.addresses).\
- filter(Address.email.like('%fred%'))
- q2 = serializer.loads(serializer.dumps(q, -1), users.metadata,
- Session)
- eq_(q2.all(), [User(name='fred')])
-
+ # fails too often/randomly
+ #@testing.requires.non_broken_pickle
+ #def test_query_three(self):
+ # ua = aliased(User)
+ # q = \
+ # Session.query(ua).join(ua.addresses).\
+ # filter(Address.email.like('%fred%'))
+ # q2 = serializer.loads(serializer.dumps(q, -1), users.metadata,
+ # Session)
+ # eq_(q2.all(), [User(name='fred')])
+ #
# try to pull out the aliased entity here...
- ua_2 = q2._entities[0].entity_zero.entity
- eq_(list(q2.values(ua_2.id, ua_2.name)), [(9, 'fred')])
+ # ua_2 = q2._entities[0].entity_zero.entity
+ # eq_(list(q2.values(ua_2.id, ua_2.name)), [(9, 'fred')])
@testing.requires.non_broken_pickle
def test_orm_join(self):
@@ -149,7 +151,6 @@ class SerializeTest(fixtures.MappedTest):
assert j2.right is j.right
assert j2._target_adapter._next
- @testing.requires.python26 # namedtuple workaround not serializable in 2.5
@testing.exclude('sqlite', '<=', (3, 5, 9),
'id comparison failing on the buildbot')
def test_aliases(self):
@@ -172,6 +173,22 @@ class SerializeTest(fixtures.MappedTest):
x = serializer.loads(ser, users.metadata)
eq_(str(r), str(x))
+ def test_unicode(self):
+ m = MetaData()
+ t = Table(ue('\u6e2c\u8a66'), m,
+ Column(ue('\u6e2c\u8a66_id'), Integer))
+
+ expr = select([t]).where(t.c[ue('\u6e2c\u8a66_id')] == 5)
+
+ expr2 = serializer.loads(serializer.dumps(expr, -1), m)
+
+ self.assert_compile(
+ expr2,
+ ue('SELECT "\u6e2c\u8a66"."\u6e2c\u8a66_id" FROM "\u6e2c\u8a66" '
+ 'WHERE "\u6e2c\u8a66"."\u6e2c\u8a66_id" = :\u6e2c\u8a66_id_1'),
+ dialect="default"
+ )
+
if __name__ == '__main__':
testing.main()
diff --git a/test/orm/_fixtures.py b/test/orm/_fixtures.py
index c21833619..0f6e522d4 100644
--- a/test/orm/_fixtures.py
+++ b/test/orm/_fixtures.py
@@ -64,10 +64,13 @@ class FixtureTest(fixtures.MappedTest):
cls.classes.CompositePk, cls.tables.nodes, \
cls.classes.Order, cls.tables.orders, cls.tables.addresses
- mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', order_by=addresses.c.id),
- 'orders':relationship(Order, backref='user', order_by=orders.c.id), # o2m, m2o
- })
+ # use OrderedDict on this one to support some tests that
+ # assert the order of attributes (e.g. orm/test_inspect)
+ mapper(User, users, properties=util.OrderedDict(
+ [('addresses', relationship(Address, backref='user', order_by=addresses.c.id)),
+ ('orders', relationship(Order, backref='user', order_by=orders.c.id)), # o2m, m2o
+ ]
+ ))
mapper(Address, addresses, properties={
'dingaling':relationship(Dingaling, uselist=False, backref="address") #o2o
})
diff --git a/test/orm/inheritance/test_assorted_poly.py b/test/orm/inheritance/test_assorted_poly.py
index d05a22f39..da0e3b1a3 100644
--- a/test/orm/inheritance/test_assorted_poly.py
+++ b/test/orm/inheritance/test_assorted_poly.py
@@ -16,6 +16,7 @@ from test.orm import _fixtures
from sqlalchemy.testing import eq_
from sqlalchemy.testing.schema import Table, Column
+
class AttrSettable(object):
def __init__(self, **kwargs):
[setattr(self, k, v) for k, v in kwargs.items()]
diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py
index 41a167e72..1737d1ccb 100644
--- a/test/orm/inheritance/test_basic.py
+++ b/test/orm/inheritance/test_basic.py
@@ -964,7 +964,6 @@ class EagerLazyTest(fixtures.MappedTest):
Column('foo_id', Integer, ForeignKey('foo.id'))
)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_basic(self):
class Foo(object): pass
class Bar(Foo): pass
diff --git a/test/orm/inheritance/test_manytomany.py b/test/orm/inheritance/test_manytomany.py
index 31c4ba40a..51b797940 100644
--- a/test/orm/inheritance/test_manytomany.py
+++ b/test/orm/inheritance/test_manytomany.py
@@ -201,7 +201,6 @@ class InheritTest3(fixtures.MappedTest):
found = [repr(l[0])] + sorted([repr(o) for o in l[0].foos])
eq_(found, compare)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def testadvanced(self):
class Foo(object):
def __init__(self, data=None):
diff --git a/test/orm/inheritance/test_poly_linked_list.py b/test/orm/inheritance/test_poly_linked_list.py
index 1915007de..ec263b3b0 100644
--- a/test/orm/inheritance/test_poly_linked_list.py
+++ b/test/orm/inheritance/test_poly_linked_list.py
@@ -115,19 +115,15 @@ class PolymorphicCircularTest(fixtures.MappedTest):
configure_mappers()
assert table1_mapper.primary_key == (table1.c.id,), table1_mapper.primary_key
- @testing.fails_on('maxdb', 'FIXME: unknown')
def testone(self):
self._testlist([Table1, Table2, Table1, Table2])
- @testing.fails_on('maxdb', 'FIXME: unknown')
def testtwo(self):
self._testlist([Table3])
- @testing.fails_on('maxdb', 'FIXME: unknown')
def testthree(self):
self._testlist([Table2, Table1, Table1B, Table3, Table3, Table1B, Table1B, Table2, Table1])
- @testing.fails_on('maxdb', 'FIXME: unknown')
def testfour(self):
self._testlist([
Table2('t2', [Data('data1'), Data('data2')]),
diff --git a/test/orm/inheritance/test_relationship.py b/test/orm/inheritance/test_relationship.py
index ecb4bf407..db2cd1ec6 100644
--- a/test/orm/inheritance/test_relationship.py
+++ b/test/orm/inheritance/test_relationship.py
@@ -154,6 +154,7 @@ class SelfReferentialJ2JTest(fixtures.MappedTest):
managers.c.person_id == engineers.c.reports_to_id,
backref='engineers')})
+
def test_has(self):
m1 = Manager(name='dogbert')
e1 = Engineer(name='dilbert', primary_language='java', reports_to=m1)
@@ -415,7 +416,6 @@ class M2MFilterTest(fixtures.MappedTest):
sess = create_session()
e1 = sess.query(Person).filter(Engineer.name == 'e1').one()
- # this works
eq_(sess.query(Organization)
.filter(~Organization.engineers
.of_type(Engineer)
@@ -1354,9 +1354,144 @@ class SubClassToSubClassMultiTest(AssertsCompiledSQL, fixtures.MappedTest):
"JOIN ep2 ON anon_1.base2_id = ep2.base2_id"
)
+class JoinAcrossJoinedInhMultiPath(fixtures.DeclarativeMappedTest,
+ testing.AssertsCompiledSQL):
+ """test long join paths with a joined-inh in the middle, where we go multiple
+ times across the same joined-inh to the same target but with other classes
+ in the middle. E.g. test [ticket:2908]
+ """
+
+
+ run_setup_mappers = 'once'
+ __dialect__ = 'default'
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class Root(Base):
+ __tablename__ = 'root'
+
+ id = Column(Integer, primary_key=True)
+ sub1_id = Column(Integer, ForeignKey('sub1.id'))
+
+ intermediate = relationship("Intermediate")
+ sub1 = relationship("Sub1")
+
+ class Intermediate(Base):
+ __tablename__ = 'intermediate'
+
+ id = Column(Integer, primary_key=True)
+ sub1_id = Column(Integer, ForeignKey('sub1.id'))
+ root_id = Column(Integer, ForeignKey('root.id'))
+ sub1 = relationship("Sub1")
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+
+ id = Column(Integer, primary_key=True)
+
+ class Sub1(Parent):
+ __tablename__ = 'sub1'
+ id = Column(Integer, ForeignKey('parent.id'),
+ primary_key=True)
+
+ target = relationship("Target")
+
+ class Target(Base):
+ __tablename__ = 'target'
+ id = Column(Integer, primary_key=True)
+ sub1_id = Column(Integer, ForeignKey('sub1.id'))
+
+ def test_join(self):
+ Root, Intermediate, Sub1, Target = \
+ self.classes.Root, self.classes.Intermediate, \
+ self.classes.Sub1, self.classes.Target
+ s1_alias = aliased(Sub1)
+ s2_alias = aliased(Sub1)
+ t1_alias = aliased(Target)
+ t2_alias = aliased(Target)
+
+ sess = Session()
+ q = sess.query(Root).\
+ join(s1_alias, Root.sub1).join(t1_alias, s1_alias.target).\
+ join(Root.intermediate).join(s2_alias, Intermediate.sub1).\
+ join(t2_alias, s2_alias.target)
+ self.assert_compile(q,
+ "SELECT root.id AS root_id, root.sub1_id AS root_sub1_id "
+ "FROM root "
+ "JOIN (SELECT parent.id AS parent_id, sub1.id AS sub1_id "
+ "FROM parent JOIN sub1 ON parent.id = sub1.id) AS anon_1 "
+ "ON anon_1.sub1_id = root.sub1_id "
+ "JOIN target AS target_1 ON anon_1.sub1_id = target_1.sub1_id "
+ "JOIN intermediate ON root.id = intermediate.root_id "
+ "JOIN (SELECT parent.id AS parent_id, sub1.id AS sub1_id "
+ "FROM parent JOIN sub1 ON parent.id = sub1.id) AS anon_2 "
+ "ON anon_2.sub1_id = intermediate.sub1_id "
+ "JOIN target AS target_2 ON anon_2.sub1_id = target_2.sub1_id")
+
+ def test_join_flat(self):
+ Root, Intermediate, Sub1, Target = \
+ self.classes.Root, self.classes.Intermediate, \
+ self.classes.Sub1, self.classes.Target
+ s1_alias = aliased(Sub1, flat=True)
+ s2_alias = aliased(Sub1, flat=True)
+ t1_alias = aliased(Target)
+ t2_alias = aliased(Target)
+
+ sess = Session()
+ q = sess.query(Root).\
+ join(s1_alias, Root.sub1).join(t1_alias, s1_alias.target).\
+ join(Root.intermediate).join(s2_alias, Intermediate.sub1).\
+ join(t2_alias, s2_alias.target)
+ self.assert_compile(q,
+ "SELECT root.id AS root_id, root.sub1_id AS root_sub1_id "
+ "FROM root "
+ "JOIN (parent AS parent_1 JOIN sub1 AS sub1_1 ON parent_1.id = sub1_1.id) "
+ "ON sub1_1.id = root.sub1_id "
+ "JOIN target AS target_1 ON sub1_1.id = target_1.sub1_id "
+ "JOIN intermediate ON root.id = intermediate.root_id "
+ "JOIN (parent AS parent_2 JOIN sub1 AS sub1_2 ON parent_2.id = sub1_2.id) "
+ "ON sub1_2.id = intermediate.sub1_id "
+ "JOIN target AS target_2 ON sub1_2.id = target_2.sub1_id"
+ )
+
+ def test_joinedload(self):
+ Root, Intermediate, Sub1, Target = \
+ self.classes.Root, self.classes.Intermediate, \
+ self.classes.Sub1, self.classes.Target
+
+ sess = Session()
+ q = sess.query(Root).\
+ options(
+ joinedload(Root.sub1).joinedload(Sub1.target),
+ joinedload(Root.intermediate).joinedload(Intermediate.sub1).\
+ joinedload(Sub1.target),
+ )
+ self.assert_compile(q,
+ "SELECT root.id AS root_id, root.sub1_id AS root_sub1_id, "
+ "target_1.id AS target_1_id, target_1.sub1_id AS target_1_sub1_id, "
+ "sub1_1.id AS sub1_1_id, parent_1.id AS parent_1_id, "
+ "intermediate_1.id AS intermediate_1_id, "
+ "intermediate_1.sub1_id AS intermediate_1_sub1_id, "
+ "intermediate_1.root_id AS intermediate_1_root_id, "
+ "target_2.id AS target_2_id, target_2.sub1_id AS target_2_sub1_id, "
+ "sub1_2.id AS sub1_2_id, parent_2.id AS parent_2_id "
+ "FROM root "
+ "LEFT OUTER JOIN intermediate AS intermediate_1 "
+ "ON root.id = intermediate_1.root_id "
+ "LEFT OUTER JOIN (parent AS parent_1 JOIN sub1 AS sub1_1 "
+ "ON parent_1.id = sub1_1.id) ON sub1_1.id = intermediate_1.sub1_id "
+ "LEFT OUTER JOIN target AS target_1 ON sub1_1.id = target_1.sub1_id "
+ "LEFT OUTER JOIN (parent AS parent_2 JOIN sub1 AS sub1_2 "
+ "ON parent_2.id = sub1_2.id) ON sub1_2.id = root.sub1_id "
+ "LEFT OUTER JOIN target AS target_2 ON sub1_2.id = target_2.sub1_id")
+
+
class MultipleAdaptUsesEntityOverTableTest(AssertsCompiledSQL, fixtures.MappedTest):
__dialect__ = 'default'
run_create_tables = None
+ run_deletes = None
@classmethod
def define_tables(cls, metadata):
diff --git a/test/orm/inheritance/test_selects.py b/test/orm/inheritance/test_selects.py
index dd9c8c8b8..94f5faf8f 100644
--- a/test/orm/inheritance/test_selects.py
+++ b/test/orm/inheritance/test_selects.py
@@ -1,50 +1,101 @@
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import String, Integer, ForeignKey, select
+from sqlalchemy.orm import mapper, Session
from sqlalchemy import testing
-from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import fixtures, eq_
+from sqlalchemy.testing.schema import Table, Column
class InheritingSelectablesTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- global foo, bar, baz
foo = Table('foo', metadata,
Column('a', String(30), primary_key=1),
Column('b', String(30), nullable=0))
- bar = foo.select(foo.c.b == 'bar').alias('bar')
- baz = foo.select(foo.c.b == 'baz').alias('baz')
+ cls.tables.bar = foo.select(foo.c.b == 'bar').alias('bar')
+ cls.tables.baz = foo.select(foo.c.b == 'baz').alias('baz')
def test_load(self):
+ foo, bar, baz = self.tables.foo, self.tables.bar, self.tables.baz
# TODO: add persistence test also
testing.db.execute(foo.insert(), a='not bar', b='baz')
testing.db.execute(foo.insert(), a='also not bar', b='baz')
testing.db.execute(foo.insert(), a='i am bar', b='bar')
testing.db.execute(foo.insert(), a='also bar', b='bar')
- class Foo(fixtures.ComparableEntity): pass
- class Bar(Foo): pass
- class Baz(Foo): pass
+ class Foo(fixtures.ComparableEntity):
+ pass
+ class Bar(Foo):
+ pass
+ class Baz(Foo):
+ pass
mapper(Foo, foo, polymorphic_on=foo.c.b)
mapper(Baz, baz,
- with_polymorphic=('*', foo.join(baz, foo.c.b=='baz').alias('baz')),
+ with_polymorphic=('*', foo.join(baz, foo.c.b == 'baz').alias('baz')),
inherits=Foo,
- inherit_condition=(foo.c.a==baz.c.a),
+ inherit_condition=(foo.c.a == baz.c.a),
inherit_foreign_keys=[baz.c.a],
polymorphic_identity='baz')
mapper(Bar, bar,
- with_polymorphic=('*', foo.join(bar, foo.c.b=='bar').alias('bar')),
+ with_polymorphic=('*', foo.join(bar, foo.c.b == 'bar').alias('bar')),
inherits=Foo,
- inherit_condition=(foo.c.a==bar.c.a),
+ inherit_condition=(foo.c.a == bar.c.a),
inherit_foreign_keys=[bar.c.a],
polymorphic_identity='bar')
- s = sessionmaker(bind=testing.db)()
+ s = Session()
assert [Baz(), Baz(), Bar(), Bar()] == s.query(Foo).order_by(Foo.b.desc()).all()
assert [Bar(), Bar()] == s.query(Bar).all()
+
+class JoinFromSelectPersistenceTest(fixtures.MappedTest):
+ """test for [ticket:2885]"""
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('base', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(50))
+ )
+ Table('child', metadata,
+ # 1. name of column must be different, so that we rely on
+ # mapper._table_to_equated to link the two cols
+ Column('child_id', Integer, ForeignKey('base.id'), primary_key=True),
+ Column('name', String(50))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Base(cls.Comparable):
+ pass
+ class Child(Base):
+ pass
+
+ def test_map_to_select(self):
+ Base, Child = self.classes.Base, self.classes.Child
+ base, child = self.tables.base, self.tables.child
+
+ base_select = select([base]).alias()
+ mapper(Base, base_select, polymorphic_on=base_select.c.type,
+ polymorphic_identity='base')
+ mapper(Child, child, inherits=Base,
+ polymorphic_identity='child')
+
+ sess = Session()
+
+ # 2. use an id other than "1" here so can't rely on
+ # the two inserts having the same id
+ c1 = Child(id=12, name='c1')
+ sess.add(c1)
+
+ sess.commit()
+ sess.close()
+
+ c1 = sess.query(Child).one()
+ eq_(c1.name, 'c1')
diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py
index 83fccbf7a..09c8ea732 100644
--- a/test/orm/test_assorted_eager.py
+++ b/test/orm/test_assorted_eager.py
@@ -120,8 +120,8 @@ class EagerTest(fixtures.MappedTest):
self.tables.categories)
# I want to display a list of tests owned by owner 1
- # if someoption is false or he hasn't specified it yet (null)
- # but not if he set it to true (example someoption is for hiding)
+ # if someoption is false or they haven't specified it yet (null)
+ # but not if they set it to true (example someoption is for hiding)
# desired output for owner 1
# test_id, cat_name
@@ -286,7 +286,6 @@ class EagerTest2(fixtures.MappedTest):
lazy='joined',
backref=backref('middle', lazy='joined')))),
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_eager_terminate(self):
"""Eager query generation does not include the same mapper's table twice.
@@ -339,7 +338,6 @@ class EagerTest3(fixtures.MappedTest):
class Stat(cls.Basic):
pass
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_nesting_with_functions(self):
Stat, Foo, stats, foo, Data, datas = (self.classes.Stat,
self.classes.Foo,
@@ -423,7 +421,6 @@ class EagerTest4(fixtures.MappedTest):
class Employee(cls.Basic):
pass
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_basic(self):
Department, Employee, employees, departments = (self.classes.Department,
self.classes.Employee,
@@ -774,7 +771,6 @@ class EagerTest8(fixtures.MappedTest):
class Joined(cls.Comparable):
pass
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_nested_joins(self):
task, Task_Type, Joined, prj, task_type, msg = (self.tables.task,
self.classes.Task_Type,
@@ -867,7 +863,6 @@ class EagerTest9(fixtures.MappedTest):
backref=backref('entries', lazy='joined',
order_by=entries.c.entry_id))))
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_joinedload_on_path(self):
Entry, Account, Transaction = (self.classes.Entry,
self.classes.Account,
diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py
index 4bcecb71b..c282bc44c 100644
--- a/test/orm/test_attributes.py
+++ b/test/orm/test_attributes.py
@@ -294,6 +294,7 @@ class AttributesTest(fixtures.ORMTest):
assert state.obj() is None
assert state.dict == {}
+ @testing.requires.predictable_gc
def test_object_dereferenced_error(self):
class Foo(object):
pass
@@ -317,7 +318,8 @@ class AttributesTest(fixtures.ORMTest):
)
def test_deferred(self):
- class Foo(object):pass
+ class Foo(object):
+ pass
data = {'a':'this is a', 'b':12}
def loader(state, keys):
@@ -1162,12 +1164,8 @@ class BackrefTest(fixtures.ORMTest):
p2.children.append(c1)
assert c1.parent is p2
- # note its still in p1.children -
- # the event model currently allows only
- # one level deep. without the parent_token,
- # it keeps going until a ValueError is raised
- # and this condition changes.
- assert c1 in p1.children
+ # event propagates to remove as of [ticket:2789]
+ assert c1 not in p1.children
class CyclicBackrefAssertionTest(fixtures.TestBase):
"""test that infinite recursion due to incorrect backref assignments
@@ -1341,7 +1339,7 @@ class PendingBackrefTest(fixtures.ORMTest):
]
)
- def test_lazy_history(self):
+ def test_lazy_history_collection(self):
Post, Blog, lazy_posts = self._fixture()
p1, p2, p3 = Post("post 1"), Post("post 2"), Post("post 3")
@@ -1513,6 +1511,12 @@ class HistoryTest(fixtures.TestBase):
return Foo, Bar
def _someattr_history(self, f, **kw):
+ passive = kw.pop('passive', None)
+ if passive is True:
+ kw['passive'] = attributes.PASSIVE_NO_INITIALIZE
+ elif passive is False:
+ kw['passive'] = attributes.PASSIVE_OFF
+
return attributes.get_state_history(
attributes.instance_state(f),
'someattr', **kw)
@@ -1687,19 +1691,19 @@ class HistoryTest(fixtures.TestBase):
Foo = self._fixture(uselist=True, useobject=True,
active_history=True)
f = Foo()
- eq_(self._someattr_history(f, passive=True), ((), (), ()))
+ eq_(self._someattr_history(f, passive=True), (None, None, None))
def test_scalar_obj_never_set(self):
Foo = self._fixture(uselist=False, useobject=True,
active_history=True)
f = Foo()
- eq_(self._someattr_history(f, passive=True), ((), (), ()))
+ eq_(self._someattr_history(f, passive=True), (None, None, None))
def test_scalar_never_set(self):
Foo = self._fixture(uselist=False, useobject=False,
active_history=True)
f = Foo()
- eq_(self._someattr_history(f, passive=True), ((), (), ()))
+ eq_(self._someattr_history(f, passive=True), (None, None, None))
def test_scalar_active_set(self):
Foo = self._fixture(uselist=False, useobject=False,
@@ -1795,6 +1799,24 @@ class HistoryTest(fixtures.TestBase):
eq_(self._someattr_history(f), (['two'], (), ()))
+ def test_scalar_passive_flag(self):
+ Foo = self._fixture(uselist=False, useobject=False,
+ active_history=True)
+ f = Foo()
+ f.someattr = 'one'
+ eq_(self._someattr_history(f), (['one'], (), ()))
+
+ self._commit_someattr(f)
+
+ state = attributes.instance_state(f)
+ state._expire_attribute_pre_commit(state.dict, 'someattr')
+
+ def scalar_loader(state, toload):
+ state.dict['someattr'] = 'one'
+ state.manager.deferred_scalar_loader = scalar_loader
+
+ eq_(self._someattr_history(f), ((), ['one'], ()))
+
def test_scalar_inplace_mutation_set(self):
Foo = self._fixture(uselist=False, useobject=False,
@@ -1850,6 +1872,7 @@ class HistoryTest(fixtures.TestBase):
f.someattr = ['a']
eq_(self._someattr_history(f), ([['a']], (), ()))
+
def test_use_object_init(self):
Foo, Bar = self._two_obj_fixture(uselist=False)
f = Foo()
diff --git a/test/orm/test_backref_mutations.py b/test/orm/test_backref_mutations.py
index 925eedfa9..e9448d41c 100644
--- a/test/orm/test_backref_mutations.py
+++ b/test/orm/test_backref_mutations.py
@@ -75,10 +75,8 @@ class O2MCollectionTest(_fixtures.FixtureTest):
# backref fires
assert a1.user is u2
- # doesn't extend to the previous collection tho,
- # which was already loaded.
- # flushing at this point means its anyone's guess.
- assert a1 in u1.addresses
+ # a1 removed from u1.addresses as of [ticket:2789]
+ assert a1 not in u1.addresses
assert a1 in u2.addresses
def test_collection_move_notloaded(self):
@@ -699,9 +697,8 @@ class O2MStaleBackrefTest(_fixtures.FixtureTest):
u1.addresses.append(a1)
u2.addresses.append(a1)
- # events haven't updated
- # u1.addresses here.
- u1.addresses.remove(a1)
+ # a1 removed from u1.addresses as of [ticket:2789]
+ assert a1 not in u1.addresses
assert a1.user is u2
assert a1 in u2.addresses
diff --git a/test/orm/test_bundle.py b/test/orm/test_bundle.py
new file mode 100644
index 000000000..29b8e9382
--- /dev/null
+++ b/test/orm/test_bundle.py
@@ -0,0 +1,289 @@
+from sqlalchemy.testing import fixtures, eq_
+from sqlalchemy.testing.schema import Table, Column
+from sqlalchemy.orm import Bundle, Session
+from sqlalchemy.testing import AssertsCompiledSQL
+from sqlalchemy import Integer, select, ForeignKey, String, func
+from sqlalchemy.orm import mapper, relationship, aliased
+
+class BundleTest(fixtures.MappedTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ run_inserts = 'once'
+ run_setup_mappers = 'once'
+ run_deletes = None
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('data', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('d1', String(10)),
+ Column('d2', String(10)),
+ Column('d3', String(10))
+ )
+
+ Table('other', metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('data_id', ForeignKey('data.id')),
+ Column('o1', String(10))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Data(cls.Basic):
+ pass
+ class Other(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ mapper(cls.classes.Data, cls.tables.data, properties={
+ 'others': relationship(cls.classes.Other)
+ })
+ mapper(cls.classes.Other, cls.tables.other)
+
+ @classmethod
+ def insert_data(cls):
+ sess = Session()
+ sess.add_all([
+ cls.classes.Data(d1='d%dd1' % i, d2='d%dd2' % i, d3='d%dd3' % i,
+ others=[cls.classes.Other(o1="d%do%d" % (i, j)) for j in range(5)])
+ for i in range(10)
+ ])
+ sess.commit()
+
+ def test_c_attr(self):
+ Data = self.classes.Data
+
+ b1 = Bundle('b1', Data.d1, Data.d2)
+
+ self.assert_compile(
+ select([b1.c.d1, b1.c.d2]),
+ "SELECT data.d1, data.d2 FROM data"
+ )
+
+ def test_result(self):
+ Data = self.classes.Data
+ sess = Session()
+
+ b1 = Bundle('b1', Data.d1, Data.d2)
+
+ eq_(
+ sess.query(b1).filter(b1.c.d1.between('d3d1', 'd5d1')).all(),
+ [(('d3d1', 'd3d2'),), (('d4d1', 'd4d2'),), (('d5d1', 'd5d2'),)]
+ )
+
+ def test_subclass(self):
+ Data = self.classes.Data
+ sess = Session()
+
+ class MyBundle(Bundle):
+ def create_row_processor(self, query, procs, labels):
+ def proc(row, result):
+ return dict(
+ zip(labels, (proc(row, result) for proc in procs))
+ )
+ return proc
+
+ b1 = MyBundle('b1', Data.d1, Data.d2)
+
+ eq_(
+ sess.query(b1).filter(b1.c.d1.between('d3d1', 'd5d1')).all(),
+ [({'d2': 'd3d2', 'd1': 'd3d1'},),
+ ({'d2': 'd4d2', 'd1': 'd4d1'},),
+ ({'d2': 'd5d2', 'd1': 'd5d1'},)]
+ )
+
+ def test_multi_bundle(self):
+ Data = self.classes.Data
+ Other = self.classes.Other
+
+ d1 = aliased(Data)
+
+ b1 = Bundle('b1', d1.d1, d1.d2)
+ b2 = Bundle('b2', Data.d1, Other.o1)
+
+ sess = Session()
+
+ q = sess.query(b1, b2).join(Data.others).join(d1, d1.id == Data.id).\
+ filter(b1.c.d1 == 'd3d1')
+ eq_(
+ q.all(),
+ [
+ (('d3d1', 'd3d2'), ('d3d1', 'd3o0')),
+ (('d3d1', 'd3d2'), ('d3d1', 'd3o1')),
+ (('d3d1', 'd3d2'), ('d3d1', 'd3o2')),
+ (('d3d1', 'd3d2'), ('d3d1', 'd3o3')),
+ (('d3d1', 'd3d2'), ('d3d1', 'd3o4'))]
+ )
+
+ def test_single_entity(self):
+ Data = self.classes.Data
+ sess = Session()
+
+ b1 = Bundle('b1', Data.d1, Data.d2, single_entity=True)
+
+ eq_(
+ sess.query(b1).
+ filter(b1.c.d1.between('d3d1', 'd5d1')).
+ all(),
+ [('d3d1', 'd3d2'), ('d4d1', 'd4d2'), ('d5d1', 'd5d2')]
+ )
+
+ def test_single_entity_flag_but_multi_entities(self):
+ Data = self.classes.Data
+ sess = Session()
+
+ b1 = Bundle('b1', Data.d1, Data.d2, single_entity=True)
+ b2 = Bundle('b1', Data.d3, single_entity=True)
+
+ eq_(
+ sess.query(b1, b2).
+ filter(b1.c.d1.between('d3d1', 'd5d1')).
+ all(),
+ [
+ (('d3d1', 'd3d2'), ('d3d3',)),
+ (('d4d1', 'd4d2'), ('d4d3',)),
+ (('d5d1', 'd5d2'), ('d5d3',))
+ ]
+ )
+
+ def test_bundle_nesting(self):
+ Data = self.classes.Data
+ sess = Session()
+
+ b1 = Bundle('b1', Data.d1, Bundle('b2', Data.d2, Data.d3))
+
+ eq_(
+ sess.query(b1).
+ filter(b1.c.d1.between('d3d1', 'd7d1')).
+ filter(b1.c.b2.c.d2.between('d4d2', 'd6d2')).
+ all(),
+ [(('d4d1', ('d4d2', 'd4d3')),), (('d5d1', ('d5d2', 'd5d3')),),
+ (('d6d1', ('d6d2', 'd6d3')),)]
+ )
+
+ def test_bundle_nesting_unions(self):
+ Data = self.classes.Data
+ sess = Session()
+
+ b1 = Bundle('b1', Data.d1, Bundle('b2', Data.d2, Data.d3))
+
+ q1 = sess.query(b1).\
+ filter(b1.c.d1.between('d3d1', 'd7d1')).\
+ filter(b1.c.b2.c.d2.between('d4d2', 'd5d2'))
+
+ q2 = sess.query(b1).\
+ filter(b1.c.d1.between('d3d1', 'd7d1')).\
+ filter(b1.c.b2.c.d2.between('d5d2', 'd6d2'))
+
+ eq_(
+ q1.union(q2).all(),
+ [(('d4d1', ('d4d2', 'd4d3')),), (('d5d1', ('d5d2', 'd5d3')),),
+ (('d6d1', ('d6d2', 'd6d3')),)]
+ )
+
+ # naming structure is preserved
+ row = q1.union(q2).first()
+ eq_(row.b1.d1, 'd4d1')
+ eq_(row.b1.b2.d2, 'd4d2')
+
+
+ def test_query_count(self):
+ Data = self.classes.Data
+ b1 = Bundle('b1', Data.d1, Data.d2)
+ eq_(Session().query(b1).count(), 10)
+
+ def test_join_relationship(self):
+ Data = self.classes.Data
+
+ sess = Session()
+ b1 = Bundle('b1', Data.d1, Data.d2)
+ q = sess.query(b1).join(Data.others)
+ self.assert_compile(q,
+ "SELECT data.d1 AS data_d1, data.d2 AS data_d2 FROM data "
+ "JOIN other ON data.id = other.data_id"
+ )
+
+ def test_join_selectable(self):
+ Data = self.classes.Data
+ Other = self.classes.Other
+
+ sess = Session()
+ b1 = Bundle('b1', Data.d1, Data.d2)
+ q = sess.query(b1).join(Other)
+ self.assert_compile(q,
+ "SELECT data.d1 AS data_d1, data.d2 AS data_d2 FROM data "
+ "JOIN other ON data.id = other.data_id"
+ )
+
+
+ def test_joins_from_adapted_entities(self):
+ Data = self.classes.Data
+
+ # test for #1853 in terms of bundles
+ # specifically this exercises adapt_to_selectable()
+
+ b1 = Bundle('b1', Data.id, Data.d1, Data.d2)
+
+ session = Session()
+ first = session.query(b1)
+ second = session.query(b1)
+ unioned = first.union(second)
+ subquery = session.query(Data.id).subquery()
+ joined = unioned.outerjoin(subquery, subquery.c.id == Data.id)
+ joined = joined.order_by(Data.id, Data.d1, Data.d2)
+
+ self.assert_compile(
+ joined,
+ "SELECT anon_1.data_id AS anon_1_data_id, anon_1.data_d1 AS anon_1_data_d1, "
+ "anon_1.data_d2 AS anon_1_data_d2 FROM "
+ "(SELECT data.id AS data_id, data.d1 AS data_d1, data.d2 AS data_d2 FROM "
+ "data UNION SELECT data.id AS data_id, data.d1 AS data_d1, "
+ "data.d2 AS data_d2 FROM data) AS anon_1 "
+ "LEFT OUTER JOIN (SELECT data.id AS id FROM data) AS anon_2 "
+ "ON anon_2.id = anon_1.data_id "
+ "ORDER BY anon_1.data_id, anon_1.data_d1, anon_1.data_d2")
+
+ # tuple nesting still occurs
+ eq_(
+ joined.all(),
+ [((1, 'd0d1', 'd0d2'),), ((2, 'd1d1', 'd1d2'),),
+ ((3, 'd2d1', 'd2d2'),), ((4, 'd3d1', 'd3d2'),),
+ ((5, 'd4d1', 'd4d2'),), ((6, 'd5d1', 'd5d2'),),
+ ((7, 'd6d1', 'd6d2'),), ((8, 'd7d1', 'd7d2'),),
+ ((9, 'd8d1', 'd8d2'),), ((10, 'd9d1', 'd9d2'),)]
+ )
+
+ def test_filter_by(self):
+ Data = self.classes.Data
+
+ b1 = Bundle('b1', Data.id, Data.d1, Data.d2)
+
+ sess = Session()
+
+ self.assert_compile(
+ sess.query(b1).filter_by(d1='d1'),
+ "SELECT data.id AS data_id, data.d1 AS data_d1, "
+ "data.d2 AS data_d2 FROM data WHERE data.d1 = :d1_1"
+ )
+
+ def test_clause_expansion(self):
+ Data = self.classes.Data
+
+ b1 = Bundle('b1', Data.id, Data.d1, Data.d2)
+
+ sess = Session()
+ self.assert_compile(
+ sess.query(Data).order_by(b1),
+ "SELECT data.id AS data_id, data.d1 AS data_d1, "
+ "data.d2 AS data_d2, data.d3 AS data_d3 FROM data "
+ "ORDER BY data.id, data.d1, data.d2"
+ )
+
+ self.assert_compile(
+ sess.query(func.row_number().over(order_by=b1)),
+ "SELECT row_number() OVER (ORDER BY data.id, data.d1, data.d2) "
+ "AS anon_1 FROM data"
+ )
+
diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py
index 12196b4e7..615ae815d 100644
--- a/test/orm/test_cascade.py
+++ b/test/orm/test_cascade.py
@@ -1217,7 +1217,6 @@ class M2OCascadeDeleteOrphanTestOne(fixtures.MappedTest):
sess.flush()
sess.close()
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_orphan(self):
prefs, User, extra = (self.tables.prefs,
self.classes.User,
@@ -1282,7 +1281,6 @@ class M2OCascadeDeleteOrphanTestOne(fixtures.MappedTest):
assert p2 in sess
sess.commit()
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_orphan_on_update(self):
prefs, User, extra = (self.tables.prefs,
self.classes.User,
@@ -1715,7 +1713,7 @@ class M2MCascadeTest(fixtures.MappedTest):
a1.bs.remove(b1)
sess.flush()
- assert atob.count().scalar() ==0
+ assert atob.count().scalar() == 0
assert b.count().scalar() == 0
assert a.count().scalar() == 1
diff --git a/test/orm/test_collection.py b/test/orm/test_collection.py
index c9f9f6951..f94c742b3 100644
--- a/test/orm/test_collection.py
+++ b/test/orm/test_collection.py
@@ -128,9 +128,9 @@ class CollectionsTest(fixtures.ORMTest):
control = list()
def assert_eq():
- self.assert_(set(direct) == canary.data)
- self.assert_(set(adapter) == canary.data)
- self.assert_(direct == control)
+ eq_(set(direct), canary.data)
+ eq_(set(adapter), canary.data)
+ eq_(direct, control)
# assume append() is available for list tests
e = creator()
@@ -260,6 +260,11 @@ class CollectionsTest(fixtures.ORMTest):
control[-2:-1] = values
assert_eq()
+ values = [creator()]
+ direct[0:0] = values
+ control[0:0] = values
+ assert_eq()
+
if hasattr(direct, '__delitem__') or hasattr(direct, '__delslice__'):
for i in range(1, 4):
@@ -279,6 +284,16 @@ class CollectionsTest(fixtures.ORMTest):
del control[:]
assert_eq()
+ if hasattr(direct, 'clear'):
+ for i in range(1, 4):
+ e = creator()
+ direct.append(e)
+ control.append(e)
+
+ direct.clear()
+ control.clear()
+ assert_eq()
+
if hasattr(direct, 'extend'):
values = [creator(), creator(), creator()]
@@ -499,9 +514,9 @@ class CollectionsTest(fixtures.ORMTest):
control = set()
def assert_eq():
- self.assert_(set(direct) == canary.data)
- self.assert_(set(adapter) == canary.data)
- self.assert_(direct == control)
+ eq_(set(direct), canary.data)
+ eq_(set(adapter), canary.data)
+ eq_(direct, control)
def addall(*values):
for item in values:
@@ -519,10 +534,6 @@ class CollectionsTest(fixtures.ORMTest):
addall(e)
addall(e)
- if hasattr(direct, 'pop'):
- direct.pop()
- control.pop()
- assert_eq()
if hasattr(direct, 'remove'):
e = creator()
@@ -593,11 +604,19 @@ class CollectionsTest(fixtures.ORMTest):
except TypeError:
assert True
- if hasattr(direct, 'clear'):
- addall(creator(), creator())
- direct.clear()
- control.clear()
- assert_eq()
+ addall(creator(), creator())
+ direct.clear()
+ control.clear()
+ assert_eq()
+
+ # note: the clear test previously needs
+ # to have executed in order for this to
+ # pass in all cases; else there's the possibility
+ # of non-deterministic behavior.
+ addall(creator())
+ direct.pop()
+ control.pop()
+ assert_eq()
if hasattr(direct, 'difference_update'):
zap()
@@ -739,6 +758,7 @@ class CollectionsTest(fixtures.ORMTest):
except TypeError:
assert True
+
def _test_set_bulk(self, typecallable, creator=None):
if creator is None:
creator = self.entity_maker
@@ -809,6 +829,8 @@ class CollectionsTest(fixtures.ORMTest):
self.data.remove(item)
def discard(self, item):
self.data.discard(item)
+ def clear(self):
+ self.data.clear()
def pop(self):
return self.data.pop()
def update(self, other):
@@ -841,6 +863,8 @@ class CollectionsTest(fixtures.ORMTest):
self.data.update(other)
def __iter__(self):
return iter(self.data)
+ def clear(self):
+ self.data.clear()
__hash__ = object.__hash__
def __eq__(self, other):
return self.data == other
@@ -967,11 +991,10 @@ class CollectionsTest(fixtures.ORMTest):
control.update(d)
assert_eq()
- if sys.version_info >= (2, 4):
- kw = dict([(ee.a, ee) for ee in [e, creator()]])
- direct.update(**kw)
- control.update(**kw)
- assert_eq()
+ kw = dict([(ee.a, ee) for ee in [e, creator()]])
+ direct.update(**kw)
+ control.update(**kw)
+ assert_eq()
def _test_dict_bulk(self, typecallable, creator=None):
if creator is None:
diff --git a/test/orm/test_composites.py b/test/orm/test_composites.py
index 5e7b91f3e..f13720ef3 100644
--- a/test/orm/test_composites.py
+++ b/test/orm/test_composites.py
@@ -1,18 +1,14 @@
from sqlalchemy.testing import assert_raises, assert_raises_message
import sqlalchemy as sa
from sqlalchemy import testing
-from sqlalchemy import MetaData, Integer, String, ForeignKey, func, \
- util, select
+from sqlalchemy import Integer, String, ForeignKey, \
+ select
from sqlalchemy.testing.schema import Table, Column
-from sqlalchemy.orm import mapper, relationship, backref, \
- class_mapper, CompositeProperty, \
- validates, aliased
-from sqlalchemy.orm import attributes, \
- composite, relationship, \
- Session
+from sqlalchemy.orm import mapper, relationship, \
+ CompositeProperty, aliased
+from sqlalchemy.orm import composite, Session, configure_mappers
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
-from test.orm import _fixtures
class PointTest(fixtures.MappedTest):
@@ -214,17 +210,45 @@ class PointTest(fixtures.MappedTest):
((), [Point(x=None, y=None)], ())
)
- def test_query_cols(self):
+ def test_query_cols_legacy(self):
Edge = self.classes.Edge
sess = self._fixture()
eq_(
- sess.query(Edge.start, Edge.end).all(),
+ sess.query(Edge.start.clauses, Edge.end.clauses).all(),
[(3, 4, 5, 6), (14, 5, 2, 7)]
)
+ def test_query_cols(self):
+ Edge = self.classes.Edge
+ Point = self.classes.Point
+
+ sess = self._fixture()
+
+ start, end = Edge.start, Edge.end
+
+ eq_(
+ sess.query(start, end).filter(start == Point(3, 4)).all(),
+ [(Point(3, 4), Point(5, 6))]
+ )
+
+ def test_query_cols_labeled(self):
+ Edge = self.classes.Edge
+ Point = self.classes.Point
+
+ sess = self._fixture()
+
+ start, end = Edge.start, Edge.end
+
+ row = sess.query(start.label('s1'), end).filter(start == Point(3, 4)).first()
+ eq_(row.s1.x, 3)
+ eq_(row.s1.y, 4)
+ eq_(row.end.x, 5)
+ eq_(row.end.y, 6)
+
def test_delete(self):
+ Point = self.classes.Point
Graph, Edge = self.classes.Graph, self.classes.Edge
sess = self._fixture()
@@ -235,7 +259,10 @@ class PointTest(fixtures.MappedTest):
sess.flush()
eq_(
sess.query(Edge.start, Edge.end).all(),
- [(3, 4, 5, 6), (14, 5, None, None)]
+ [
+ (Point(x=3, y=4), Point(x=5, y=6)),
+ (Point(x=14, y=5), Point(x=None, y=None))
+ ]
)
def test_save_null(self):
@@ -712,6 +739,24 @@ class ConfigurationTest(fixtures.MappedTest):
})
self._test_roundtrip()
+ def test_check_prop_type(self):
+ edge, Edge, Point = (self.tables.edge,
+ self.classes.Edge,
+ self.classes.Point)
+ mapper(Edge, edge, properties={
+ 'start': sa.orm.composite(Point, (edge.c.x1,), edge.c.y1),
+ })
+ assert_raises_message(
+ sa.exc.ArgumentError,
+ # note that we also are checking that the tuple
+ # renders here, so the "%" operator in the string needs to
+ # apply the tuple also
+ r"Composite expects Column objects or mapped "
+ "attributes/attribute names as "
+ "arguments, got: \(Column",
+ configure_mappers
+ )
+
class ComparatorTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -863,3 +908,15 @@ class ComparatorTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
"edge_1.x2, edge_1.y2"
)
+ def test_clause_expansion(self):
+ self._fixture(False)
+ Edge = self.classes.Edge
+ from sqlalchemy.orm import configure_mappers
+ configure_mappers()
+
+ self.assert_compile(
+ select([Edge]).order_by(Edge.start),
+ "SELECT edge.id, edge.x1, edge.y1, edge.x2, edge.y2 FROM edge "
+ "ORDER BY edge.x1, edge.y1"
+ )
+
diff --git a/test/orm/test_default_strategies.py b/test/orm/test_default_strategies.py
index c1668cdd4..b1175fc51 100644
--- a/test/orm/test_default_strategies.py
+++ b/test/orm/test_default_strategies.py
@@ -149,11 +149,13 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
def test_star_must_be_alone(self):
sess = self._downgrade_fixture()
User = self.classes.User
+ opt = sa.orm.subqueryload('*', User.addresses)
assert_raises_message(
sa.exc.ArgumentError,
- "Wildcard identifier '\*' must be specified alone.",
- sa.orm.subqueryload, '*', User.addresses
+ "Wildcard token cannot be followed by another entity",
+ sess.query(User).options, opt
)
+
def test_select_with_joinedload(self):
"""Mapper load strategy defaults can be downgraded with
lazyload('*') option, while explicit joinedload() option
@@ -283,6 +285,23 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
+ def test_joined_path_wildcards(self):
+ sess = self._upgrade_fixture()
+ users = []
+
+ # test upgrade all to joined: 1 sql
+ def go():
+ users[:] = sess.query(self.classes.User)\
+ .options(sa.orm.joinedload('.*'))\
+ .options(sa.orm.joinedload("addresses.*"))\
+ .options(sa.orm.joinedload("orders.*"))\
+ .options(sa.orm.joinedload("orders.items.*"))\
+ .order_by(self.classes.User.id)\
+ .all()
+
+ self.assert_sql_count(testing.db, go, 1)
+ self._assert_fully_loaded(users)
+
def test_joined_with_lazyload(self):
"""Mapper load strategy defaults can be upgraded with
joinedload('*') option, while explicit lazyload() option
@@ -350,6 +369,24 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
+ def test_subquery_path_wildcards(self):
+ sess = self._upgrade_fixture()
+ users = []
+
+ # test upgrade all to subquery: 1 sql + 4 relationships = 5
+ def go():
+ users[:] = sess.query(self.classes.User)\
+ .options(sa.orm.subqueryload('.*'))\
+ .options(sa.orm.subqueryload('addresses.*'))\
+ .options(sa.orm.subqueryload('orders.*'))\
+ .options(sa.orm.subqueryload('orders.items.*'))\
+ .order_by(self.classes.User.id)\
+ .all()
+ self.assert_sql_count(testing.db, go, 5)
+
+ # verify everything loaded, with no additional sql needed
+ self._assert_fully_loaded(users)
+
def test_subquery_with_lazyload(self):
"""Mapper load strategy defaults can be upgraded with
subqueryload('*') option, while explicit lazyload() option
diff --git a/test/orm/test_deferred.py b/test/orm/test_deferred.py
new file mode 100644
index 000000000..88e7e8663
--- /dev/null
+++ b/test/orm/test_deferred.py
@@ -0,0 +1,566 @@
+import sqlalchemy as sa
+from sqlalchemy import testing, util
+from sqlalchemy.orm import mapper, deferred, defer, undefer, Load, \
+ load_only, undefer_group, create_session, synonym, relationship, Session,\
+ joinedload, defaultload
+from sqlalchemy.testing import eq_, AssertsCompiledSQL
+from test.orm import _fixtures
+from sqlalchemy.orm import strategies
+
+class DeferredTest(AssertsCompiledSQL, _fixtures.FixtureTest):
+
+ def test_basic(self):
+ """A basic deferred load."""
+
+ Order, orders = self.classes.Order, self.tables.orders
+
+
+ mapper(Order, orders, order_by=orders.c.id, properties={
+ 'description': deferred(orders.c.description)})
+
+ o = Order()
+ self.assert_(o.description is None)
+
+ q = create_session().query(Order)
+ def go():
+ l = q.all()
+ o2 = l[2]
+ x = o2.description
+
+ self.sql_eq_(go, [
+ ("SELECT orders.id AS orders_id, "
+ "orders.user_id AS orders_user_id, "
+ "orders.address_id AS orders_address_id, "
+ "orders.isopen AS orders_isopen "
+ "FROM orders ORDER BY orders.id", {}),
+ ("SELECT orders.description AS orders_description "
+ "FROM orders WHERE orders.id = :param_1",
+ {'param_1':3})])
+
+ def test_unsaved(self):
+ """Deferred loading does not kick in when just PK cols are set."""
+
+ Order, orders = self.classes.Order, self.tables.orders
+
+
+ mapper(Order, orders, properties={
+ 'description': deferred(orders.c.description)})
+
+ sess = create_session()
+ o = Order()
+ sess.add(o)
+ o.id = 7
+ def go():
+ o.description = "some description"
+ self.sql_count_(0, go)
+
+ def test_synonym_group_bug(self):
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties={
+ 'isopen':synonym('_isopen', map_column=True),
+ 'description':deferred(orders.c.description, group='foo')
+ })
+
+ sess = create_session()
+ o1 = sess.query(Order).get(1)
+ eq_(o1.description, "order 1")
+
+ def test_unsaved_2(self):
+ Order, orders = self.classes.Order, self.tables.orders
+
+ mapper(Order, orders, properties={
+ 'description': deferred(orders.c.description)})
+
+ sess = create_session()
+ o = Order()
+ sess.add(o)
+ def go():
+ o.description = "some description"
+ self.sql_count_(0, go)
+
+ def test_unsaved_group(self):
+ """Deferred loading doesnt kick in when just PK cols are set"""
+
+ orders, Order = self.tables.orders, self.classes.Order
+
+
+ mapper(Order, orders, order_by=orders.c.id, properties=dict(
+ description=deferred(orders.c.description, group='primary'),
+ opened=deferred(orders.c.isopen, group='primary')))
+
+ sess = create_session()
+ o = Order()
+ sess.add(o)
+ o.id = 7
+ def go():
+ o.description = "some description"
+ self.sql_count_(0, go)
+
+ def test_unsaved_group_2(self):
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, order_by=orders.c.id, properties=dict(
+ description=deferred(orders.c.description, group='primary'),
+ opened=deferred(orders.c.isopen, group='primary')))
+
+ sess = create_session()
+ o = Order()
+ sess.add(o)
+ def go():
+ o.description = "some description"
+ self.sql_count_(0, go)
+
+ def test_save(self):
+ Order, orders = self.classes.Order, self.tables.orders
+
+ m = mapper(Order, orders, properties={
+ 'description': deferred(orders.c.description)})
+
+ sess = create_session()
+ o2 = sess.query(Order).get(2)
+ o2.isopen = 1
+ sess.flush()
+
+ def test_group(self):
+ """Deferred load with a group"""
+
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties=util.OrderedDict([
+ ('userident', deferred(orders.c.user_id, group='primary')),
+ ('addrident', deferred(orders.c.address_id, group='primary')),
+ ('description', deferred(orders.c.description, group='primary')),
+ ('opened', deferred(orders.c.isopen, group='primary'))
+ ]))
+
+ sess = create_session()
+ q = sess.query(Order).order_by(Order.id)
+ def go():
+ l = q.all()
+ o2 = l[2]
+ eq_(o2.opened, 1)
+ eq_(o2.userident, 7)
+ eq_(o2.description, 'order 3')
+
+ self.sql_eq_(go, [
+ ("SELECT orders.id AS orders_id "
+ "FROM orders ORDER BY orders.id", {}),
+ ("SELECT orders.user_id AS orders_user_id, "
+ "orders.address_id AS orders_address_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen "
+ "FROM orders WHERE orders.id = :param_1",
+ {'param_1':3})])
+
+ o2 = q.all()[2]
+ eq_(o2.description, 'order 3')
+ assert o2 not in sess.dirty
+ o2.description = 'order 3'
+ def go():
+ sess.flush()
+ self.sql_count_(0, go)
+
+ def test_preserve_changes(self):
+ """A deferred load operation doesn't revert modifications on attributes"""
+
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties = {
+ 'userident': deferred(orders.c.user_id, group='primary'),
+ 'description': deferred(orders.c.description, group='primary'),
+ 'opened': deferred(orders.c.isopen, group='primary')
+ })
+ sess = create_session()
+ o = sess.query(Order).get(3)
+ assert 'userident' not in o.__dict__
+ o.description = 'somenewdescription'
+ eq_(o.description, 'somenewdescription')
+ def go():
+ eq_(o.opened, 1)
+ self.assert_sql_count(testing.db, go, 1)
+ eq_(o.description, 'somenewdescription')
+ assert o in sess.dirty
+
+ def test_commits_state(self):
+ """
+ When deferred elements are loaded via a group, they get the proper
+ CommittedState and don't result in changes being committed
+
+ """
+
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties = {
+ 'userident': deferred(orders.c.user_id, group='primary'),
+ 'description': deferred(orders.c.description, group='primary'),
+ 'opened': deferred(orders.c.isopen, group='primary')})
+
+ sess = create_session()
+ o2 = sess.query(Order).get(3)
+
+ # this will load the group of attributes
+ eq_(o2.description, 'order 3')
+ assert o2 not in sess.dirty
+ # this will mark it as 'dirty', but nothing actually changed
+ o2.description = 'order 3'
+ # therefore the flush() shouldnt actually issue any SQL
+ self.assert_sql_count(testing.db, sess.flush, 0)
+
+ def test_map_selectable_wo_deferred(self):
+ """test mapping to a selectable with deferred cols,
+ the selectable doesn't include the deferred col.
+
+ """
+
+ Order, orders = self.classes.Order, self.tables.orders
+
+
+ order_select = sa.select([
+ orders.c.id,
+ orders.c.user_id,
+ orders.c.address_id,
+ orders.c.description,
+ orders.c.isopen]).alias()
+ mapper(Order, order_select, properties={
+ 'description':deferred(order_select.c.description)
+ })
+
+ sess = Session()
+ o1 = sess.query(Order).order_by(Order.id).first()
+ assert 'description' not in o1.__dict__
+ eq_(o1.description, 'order 1')
+
+
+class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest):
+ __dialect__ = 'default'
+
+ def test_options(self):
+ """Options on a mapper to create deferred and undeferred columns"""
+
+ orders, Order = self.tables.orders, self.classes.Order
+
+
+ mapper(Order, orders)
+
+ sess = create_session()
+ q = sess.query(Order).order_by(Order.id).options(defer('user_id'))
+
+ def go():
+ q.all()[0].user_id
+
+ self.sql_eq_(go, [
+ ("SELECT orders.id AS orders_id, "
+ "orders.address_id AS orders_address_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen "
+ "FROM orders ORDER BY orders.id", {}),
+ ("SELECT orders.user_id AS orders_user_id "
+ "FROM orders WHERE orders.id = :param_1",
+ {'param_1':1})])
+ sess.expunge_all()
+
+ q2 = q.options(undefer('user_id'))
+ self.sql_eq_(q2.all, [
+ ("SELECT orders.id AS orders_id, "
+ "orders.user_id AS orders_user_id, "
+ "orders.address_id AS orders_address_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen "
+ "FROM orders ORDER BY orders.id",
+ {})])
+
+ def test_undefer_group(self):
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties=util.OrderedDict([
+ ('userident', deferred(orders.c.user_id, group='primary')),
+ ('description', deferred(orders.c.description, group='primary')),
+ ('opened', deferred(orders.c.isopen, group='primary'))
+ ]
+ ))
+
+ sess = create_session()
+ q = sess.query(Order).order_by(Order.id)
+ def go():
+ l = q.options(undefer_group('primary')).all()
+ o2 = l[2]
+ eq_(o2.opened, 1)
+ eq_(o2.userident, 7)
+ eq_(o2.description, 'order 3')
+
+ self.sql_eq_(go, [
+ ("SELECT orders.user_id AS orders_user_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen, "
+ "orders.id AS orders_id, "
+ "orders.address_id AS orders_address_id "
+ "FROM orders ORDER BY orders.id",
+ {})])
+
+ def test_undefer_star(self):
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties=util.OrderedDict([
+ ('userident', deferred(orders.c.user_id)),
+ ('description', deferred(orders.c.description)),
+ ('opened', deferred(orders.c.isopen))
+ ]
+ ))
+
+ sess = create_session()
+ q = sess.query(Order).options(Load(Order).undefer('*'))
+ self.assert_compile(q,
+ "SELECT orders.user_id AS orders_user_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen, "
+ "orders.id AS orders_id, "
+ "orders.address_id AS orders_address_id FROM orders"
+ )
+
+ def test_locates_col(self):
+ """Manually adding a column to the result undefers the column."""
+
+ orders, Order = self.tables.orders, self.classes.Order
+
+
+ mapper(Order, orders, properties={
+ 'description': deferred(orders.c.description)})
+
+ sess = create_session()
+ o1 = sess.query(Order).order_by(Order.id).first()
+ def go():
+ eq_(o1.description, 'order 1')
+ self.sql_count_(1, go)
+
+ sess = create_session()
+ o1 = (sess.query(Order).
+ order_by(Order.id).
+ add_column(orders.c.description).first())[0]
+ def go():
+ eq_(o1.description, 'order 1')
+ self.sql_count_(0, go)
+
+ def test_deep_options(self):
+ users, items, order_items, Order, Item, User, orders = (self.tables.users,
+ self.tables.items,
+ self.tables.order_items,
+ self.classes.Order,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.orders)
+
+ mapper(Item, items, properties=dict(
+ description=deferred(items.c.description)))
+ mapper(Order, orders, properties=dict(
+ items=relationship(Item, secondary=order_items)))
+ mapper(User, users, properties=dict(
+ orders=relationship(Order, order_by=orders.c.id)))
+
+ sess = create_session()
+ q = sess.query(User).order_by(User.id)
+ l = q.all()
+ item = l[0].orders[1].items[1]
+ def go():
+ eq_(item.description, 'item 4')
+ self.sql_count_(1, go)
+ eq_(item.description, 'item 4')
+
+ sess.expunge_all()
+ l = q.options(undefer('orders.items.description')).all()
+ item = l[0].orders[1].items[1]
+ def go():
+ eq_(item.description, 'item 4')
+ self.sql_count_(0, go)
+ eq_(item.description, 'item 4')
+
+ def test_path_entity(self):
+ """test the legacy *addl_attrs argument."""
+
+ User = self.classes.User
+ Order = self.classes.Order
+ Item = self.classes.Item
+
+ users = self.tables.users
+ orders = self.tables.orders
+ items = self.tables.items
+ order_items = self.tables.order_items
+
+ mapper(User, users, properties={
+ "orders": relationship(Order, lazy="joined")
+ })
+ mapper(Order, orders, properties={
+ "items": relationship(Item, secondary=order_items, lazy="joined")
+ })
+ mapper(Item, items)
+
+ sess = create_session()
+
+ exp = ("SELECT users.id AS users_id, users.name AS users_name, "
+ "items_1.id AS items_1_id, orders_1.id AS orders_1_id, "
+ "orders_1.user_id AS orders_1_user_id, orders_1.address_id "
+ "AS orders_1_address_id, orders_1.description AS "
+ "orders_1_description, orders_1.isopen AS orders_1_isopen "
+ "FROM users LEFT OUTER JOIN orders AS orders_1 "
+ "ON users.id = orders_1.user_id LEFT OUTER JOIN "
+ "(order_items AS order_items_1 JOIN items AS items_1 "
+ "ON items_1.id = order_items_1.item_id) "
+ "ON orders_1.id = order_items_1.order_id")
+
+ q = sess.query(User).options(defer(User.orders, Order.items, Item.description))
+ self.assert_compile(q, exp)
+
+
+ def test_chained_multi_col_options(self):
+ users, User = self.tables.users, self.classes.User
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(User, users, properties={
+ "orders": relationship(Order)
+ })
+ mapper(Order, orders)
+
+ sess = create_session()
+ q = sess.query(User).options(
+ joinedload(User.orders).defer("description").defer("isopen")
+ )
+ self.assert_compile(q,
+ "SELECT users.id AS users_id, users.name AS users_name, "
+ "orders_1.id AS orders_1_id, orders_1.user_id AS orders_1_user_id, "
+ "orders_1.address_id AS orders_1_address_id FROM users "
+ "LEFT OUTER JOIN orders AS orders_1 ON users.id = orders_1.user_id"
+ )
+
+ def test_load_only(self):
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders)
+
+ sess = create_session()
+ q = sess.query(Order).options(load_only("isopen", "description"))
+ self.assert_compile(q,
+ "SELECT orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen FROM orders")
+
+ def test_load_only_w_deferred(self):
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties={
+ "description": deferred(orders.c.description)
+ })
+
+ sess = create_session()
+ q = sess.query(Order).options(
+ load_only("isopen", "description"),
+ undefer("user_id")
+ )
+ self.assert_compile(q,
+ "SELECT orders.description AS orders_description, "
+ "orders.user_id AS orders_user_id, "
+ "orders.isopen AS orders_isopen FROM orders")
+
+ def test_load_only_propagate_unbound(self):
+ self._test_load_only_propagate(False)
+
+ def test_load_only_propagate_bound(self):
+ self._test_load_only_propagate(True)
+
+ def _test_load_only_propagate(self, use_load):
+ User = self.classes.User
+ Address = self.classes.Address
+
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ mapper(User, users, properties={
+ "addresses": relationship(Address)
+ })
+ mapper(Address, addresses)
+
+ sess = create_session()
+ expected = [
+ ("SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id IN (:id_1, :id_2)", {'id_2': 8, 'id_1': 7}),
+ ("SELECT addresses.id AS addresses_id, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 7}),
+ ("SELECT addresses.id AS addresses_id, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 8}),
+ ]
+
+ if use_load:
+ opt = Load(User).defaultload(User.addresses).load_only("id", "email_address")
+ else:
+ opt = defaultload(User.addresses).load_only("id", "email_address")
+ q = sess.query(User).options(opt).filter(User.id.in_([7, 8]))
+ def go():
+ for user in q:
+ user.addresses
+
+ self.sql_eq_(go, expected)
+
+
+ def test_load_only_parent_specific(self):
+ User = self.classes.User
+ Address = self.classes.Address
+ Order = self.classes.Order
+
+ users = self.tables.users
+ addresses = self.tables.addresses
+ orders = self.tables.orders
+
+ mapper(User, users)
+ mapper(Address, addresses)
+ mapper(Order, orders)
+
+ sess = create_session()
+ q = sess.query(User, Order, Address).options(
+ Load(User).load_only("name"),
+ Load(Order).load_only("id"),
+ Load(Address).load_only("id", "email_address")
+ )
+
+ self.assert_compile(q,
+ "SELECT users.name AS users_name, orders.id AS orders_id, "
+ "addresses.id AS addresses_id, addresses.email_address "
+ "AS addresses_email_address FROM users, orders, addresses"
+ )
+
+ def test_load_only_path_specific(self):
+ User = self.classes.User
+ Address = self.classes.Address
+ Order = self.classes.Order
+
+ users = self.tables.users
+ addresses = self.tables.addresses
+ orders = self.tables.orders
+
+ mapper(User, users, properties=util.OrderedDict([
+ ("addresses", relationship(Address, lazy="joined")),
+ ("orders", relationship(Order, lazy="joined"))
+ ]))
+
+ mapper(Address, addresses)
+ mapper(Order, orders)
+
+ sess = create_session()
+
+ q = sess.query(User).options(
+ load_only("name").defaultload("addresses").load_only("id", "email_address"),
+ defaultload("orders").load_only("id")
+ )
+
+ # hmmmm joinedload seems to be forcing users.id into here...
+ self.assert_compile(
+ q,
+ "SELECT users.name AS users_name, users.id AS users_id, "
+ "addresses_1.id AS addresses_1_id, "
+ "addresses_1.email_address AS addresses_1_email_address, "
+ "orders_1.id AS orders_1_id FROM users "
+ "LEFT OUTER JOIN addresses AS addresses_1 "
+ "ON users.id = addresses_1.user_id "
+ "LEFT OUTER JOIN orders AS orders_1 ON users.id = orders_1.user_id"
+ )
+
+
diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py
index e53ff6669..f2ba3cc27 100644
--- a/test/orm/test_eager_relations.py
+++ b/test/orm/test_eager_relations.py
@@ -4,7 +4,8 @@ from sqlalchemy.testing import eq_, is_, is_not_
import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy.orm import joinedload, deferred, undefer, \
- joinedload_all, backref, eagerload, Session, immediateload
+ joinedload_all, backref, eagerload, Session, immediateload,\
+ defaultload, Load
from sqlalchemy import Integer, String, Date, ForeignKey, and_, select, \
func
from sqlalchemy.testing.schema import Table, Column
@@ -599,7 +600,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
assert 'orders' not in noeagers[0].__dict__
assert 'addresses' not in noeagers[0].__dict__
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_limit(self):
"""Limit operations combined with lazy-load relationships."""
@@ -654,7 +654,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
eq_(self.static.user_address_result, l)
self.assert_sql_count(testing.db, go, 1)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_limit_2(self):
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
@@ -676,7 +675,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
eq_(self.static.item_keyword_result[1:3], l)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_limit_3(self):
"""test that the ORDER BY is propagated from the inner
select to the outer select, when using the
@@ -708,7 +706,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
q = sess.query(User)
- if not testing.against('maxdb', 'mssql'):
+ if not testing.against('mssql'):
l = q.join('orders').order_by(Order.user_id.desc()).limit(2).offset(1)
eq_([
User(id=9,
@@ -943,7 +941,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
eq_([User(id=7, address=Address(id=1))], l)
self.assert_sql_count(testing.db, go, 1)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_many_to_one(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
@@ -1412,6 +1409,52 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
"WHERE orders.description = :description_1"
)
+ def test_propagated_lazyload_wildcard_unbound(self):
+ self._test_propagated_lazyload_wildcard(False)
+
+ def test_propagated_lazyload_wildcard_bound(self):
+ self._test_propagated_lazyload_wildcard(True)
+
+ def _test_propagated_lazyload_wildcard(self, use_load):
+ users, items, order_items, Order, Item, User, orders = (self.tables.users,
+ self.tables.items,
+ self.tables.order_items,
+ self.classes.Order,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.orders)
+
+ mapper(User, users, properties=dict(
+ orders=relationship(Order, lazy="select")
+ ))
+ mapper(Order, orders, properties=dict(
+ items=relationship(Item, secondary=order_items, lazy="joined")
+ ))
+ mapper(Item, items)
+
+ sess = create_session()
+
+ if use_load:
+ opt = Load(User).defaultload("orders").lazyload("*")
+ else:
+ opt = defaultload("orders").lazyload("*")
+
+ q = sess.query(User).filter(User.id == 7).options(opt)
+
+ def go():
+ for u in q:
+ u.orders
+
+ self.sql_eq_(go, [
+ ("SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = :id_1", {"id_1": 7}),
+ ("SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
+ "orders.address_id AS orders_address_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen FROM orders "
+ "WHERE :param_1 = orders.user_id", {"param_1": 7}),
+ ])
+
class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
@@ -1875,7 +1918,6 @@ class SelfReferentialEagerTest(fixtures.MappedTest):
Column('parent_id', Integer, ForeignKey('nodes.id')),
Column('data', String(30)))
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_basic(self):
nodes = self.tables.nodes
@@ -2061,7 +2103,6 @@ class SelfReferentialEagerTest(fixtures.MappedTest):
)
)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_no_depth(self):
nodes = self.tables.nodes
@@ -2166,7 +2207,8 @@ class MixedSelfReferentialEagerTest(fixtures.MappedTest):
options(
joinedload('parent_b1'),
joinedload('parent_b2'),
- joinedload('parent_z')).
+ joinedload('parent_z')
+ ).
filter(B.id.in_([2, 8, 11])).order_by(B.id).all(),
[
B(id=2, parent_z=A(id=1), parent_b1=B(id=1), parent_b2=None),
@@ -2804,7 +2846,7 @@ class CyclicalInheritingEagerTestThree(fixtures.DeclarativeMappedTest,
Director = self.classes.Director
sess = create_session()
self.assert_compile(
- sess.query(PersistentObject).options(joinedload(Director.other, join_depth=1)),
+ sess.query(PersistentObject).options(joinedload(Director.other)),
"SELECT persistent.id AS persistent_id, director.id AS director_id, "
"director.other_id AS director_other_id, "
"director.name AS director_name, persistent_1.id AS "
diff --git a/test/orm/test_events.py b/test/orm/test_events.py
index d2dae8ba3..a84ead0fa 100644
--- a/test/orm/test_events.py
+++ b/test/orm/test_events.py
@@ -18,8 +18,6 @@ from sqlalchemy.testing.mock import Mock, call
class _RemoveListeners(object):
def teardown(self):
- # TODO: need to get remove() functionality
- # going
events.MapperEvents._clear()
events.InstanceEvents._clear()
events.SessionEvents._clear()
@@ -362,14 +360,25 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
class SubUser(User):
pass
- canary = []
+ class SubSubUser(SubUser):
+ pass
+
+ canary = Mock()
def evt(x, y, z):
canary.append(x)
- event.listen(User, "before_insert", evt, propagate=True, raw=True)
+ event.listen(User, "before_insert", canary, propagate=True, raw=True)
m = mapper(SubUser, users)
m.dispatch.before_insert(5, 6, 7)
- eq_(canary, [5])
+ eq_(canary.mock_calls,
+ [call(5, 6, 7)])
+
+ m2 = mapper(SubSubUser, users)
+
+ m2.dispatch.before_insert(8, 9, 10)
+ eq_(canary.mock_calls,
+ [call(5, 6, 7), call(8, 9, 10)])
+
def test_deferred_map_event_subclass_no_propagate(self):
"""
@@ -416,6 +425,35 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
m.dispatch.before_insert(5, 6, 7)
eq_(canary, [5])
+ def test_deferred_map_event_subclass_post_mapping_propagate_two(self):
+ """
+ 1. map only subclass of class
+ 2. mapper event listen on class, w propagate
+ 3. event fire should receive event
+
+ """
+ users, User = (self.tables.users,
+ self.classes.User)
+
+ class SubUser(User):
+ pass
+
+ class SubSubUser(SubUser):
+ pass
+
+ m = mapper(SubUser, users)
+
+ canary = Mock()
+ event.listen(User, "before_insert", canary, propagate=True, raw=True)
+
+ m2 = mapper(SubSubUser, users)
+
+ m.dispatch.before_insert(5, 6, 7)
+ eq_(canary.mock_calls, [call(5, 6, 7)])
+
+ m2.dispatch.before_insert(8, 9, 10)
+ eq_(canary.mock_calls, [call(5, 6, 7), call(8, 9, 10)])
+
def test_deferred_instance_event_subclass_post_mapping_propagate(self):
"""
1. map only subclass of class
@@ -507,23 +545,25 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
class SubUser2(User):
pass
- canary = []
- def evt(x):
- canary.append(x)
- event.listen(User, "load", evt, propagate=True, raw=True)
+ canary = Mock()
+ event.listen(User, "load", canary, propagate=True, raw=False)
+ # reversing these fixes....
m = mapper(SubUser, users)
m2 = mapper(User, users)
- m.class_manager.dispatch.load(5)
- eq_(canary, [5])
+ instance = Mock()
+ m.class_manager.dispatch.load(instance)
- m2.class_manager.dispatch.load(5)
- eq_(canary, [5, 5])
+ eq_(canary.mock_calls, [call(instance.obj())])
+
+ m2.class_manager.dispatch.load(instance)
+ eq_(canary.mock_calls, [call(instance.obj()), call(instance.obj())])
m3 = mapper(SubUser2, users)
- m3.class_manager.dispatch.load(5)
- eq_(canary, [5, 5, 5])
+ m3.class_manager.dispatch.load(instance)
+ eq_(canary.mock_calls, [call(instance.obj()),
+ call(instance.obj()), call(instance.obj())])
def test_deferred_instance_event_subclass_no_propagate(self):
"""
@@ -577,21 +617,17 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
class Bar(object):
pass
- listeners = instrumentation._instrumentation_factory.dispatch.\
- attribute_instrument.listeners
- assert not listeners
+ dispatch = instrumentation._instrumentation_factory.dispatch
+ assert not dispatch.attribute_instrument
- canary = []
- def evt(x):
- canary.append(x)
- event.listen(Bar, "attribute_instrument", evt)
+ event.listen(Bar, "attribute_instrument", lambda: None)
- eq_(len(listeners), 1)
+ eq_(len(dispatch.attribute_instrument), 1)
del Bar
gc_collect()
- assert not listeners
+ assert not dispatch.attribute_instrument
def test_deferred_instrument_event_subclass_propagate(self):
@@ -678,6 +714,70 @@ class LoadTest(_fixtures.FixtureTest):
eq_(canary, ['load'])
+class RemovalTest(_fixtures.FixtureTest):
+ run_inserts = None
+
+
+ def test_attr_propagated(self):
+ User = self.classes.User
+
+ users, addresses, User = (self.tables.users,
+ self.tables.addresses,
+ self.classes.User)
+
+ class AdminUser(User):
+ pass
+
+ mapper(User, users)
+ mapper(AdminUser, addresses, inherits=User)
+
+ fn = Mock()
+ event.listen(User.name, "set", fn, propagate=True)
+
+ au = AdminUser()
+ au.name = 'ed'
+
+ eq_(fn.call_count, 1)
+
+ event.remove(User.name, "set", fn)
+
+ au.name = 'jack'
+
+ eq_(fn.call_count, 1)
+
+ def test_unmapped_listen(self):
+ users = self.tables.users
+
+ class Foo(object):
+ pass
+
+ fn = Mock()
+
+ event.listen(Foo, "before_insert", fn, propagate=True)
+
+ class User(Foo):
+ pass
+
+ m = mapper(User, users)
+
+ u1 = User()
+ m.dispatch.before_insert(m, None, attributes.instance_state(u1))
+ eq_(fn.call_count, 1)
+
+ event.remove(Foo, "before_insert", fn)
+
+ # existing event is removed
+ m.dispatch.before_insert(m, None, attributes.instance_state(u1))
+ eq_(fn.call_count, 1)
+
+ # the _HoldEvents is also cleaned out
+ class Bar(Foo):
+ pass
+ m = mapper(Bar, users)
+ b1 = Bar()
+ m.dispatch.before_insert(m, None, attributes.instance_state(b1))
+ eq_(fn.call_count, 1)
+
class RefreshTest(_fixtures.FixtureTest):
run_inserts = None
diff --git a/test/orm/test_expire.py b/test/orm/test_expire.py
index 6eb124cd2..edd243181 100644
--- a/test/orm/test_expire.py
+++ b/test/orm/test_expire.py
@@ -4,7 +4,7 @@ from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy.testing.util import gc_collect
import sqlalchemy as sa
from sqlalchemy import testing
-from sqlalchemy import Integer, String, ForeignKey, exc as sa_exc
+from sqlalchemy import Integer, String, ForeignKey, exc as sa_exc, FetchedValue
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.schema import Column
from sqlalchemy.orm import mapper, relationship, create_session, \
@@ -12,7 +12,7 @@ from sqlalchemy.orm import mapper, relationship, create_session, \
strategies, state, lazyload, backref, Session
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
-
+from sqlalchemy.sql import select
class ExpireTest(_fixtures.FixtureTest):
@@ -375,7 +375,7 @@ class ExpireTest(_fixtures.FixtureTest):
o = sess.query(Order).get(3)
sess.expire(o)
- orders.update(id=3).execute(description='order 3 modified')
+ orders.update().execute(description='order 3 modified')
assert o.isopen == 1
assert attributes.instance_state(o).dict['description'] == 'order 3 modified'
def go():
@@ -850,11 +850,11 @@ class ExpireTest(_fixtures.FixtureTest):
assert len(u.addresses) == 3
sess.expire(u)
assert 'addresses' not in u.__dict__
- print("-------------------------------------------")
sess.query(User).filter_by(id=8).all()
assert 'addresses' in u.__dict__
assert len(u.addresses) == 3
+ @testing.requires.predictable_gc
def test_expire_all(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
@@ -869,16 +869,16 @@ class ExpireTest(_fixtures.FixtureTest):
sess = create_session()
userlist = sess.query(User).order_by(User.id).all()
- assert self.static.user_address_result == userlist
- assert len(list(sess)) == 9
+ eq_(self.static.user_address_result, userlist)
+ eq_(len(list(sess)), 9)
sess.expire_all()
gc_collect()
- assert len(list(sess)) == 4 # since addresses were gc'ed
+ eq_(len(list(sess)), 4) # since addresses were gc'ed
userlist = sess.query(User).order_by(User.id).all()
u = userlist[1]
eq_(self.static.user_address_result, userlist)
- assert len(list(sess)) == 9
+ eq_(len(list(sess)), 9)
def test_state_change_col_to_deferred(self):
"""Behavioral test to verify the current activity of loader callables."""
@@ -1184,6 +1184,152 @@ class ExpiredPendingTest(_fixtures.FixtureTest):
assert len(u1.addresses) == 3
+class LifecycleTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table("data", metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('data', String(30)),
+ )
+ Table("data_fetched", metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('data', String(30), FetchedValue()),
+ )
+ Table("data_defer", metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('data', String(30)),
+ Column('data2', String(30)),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Data(cls.Comparable):
+ pass
+ class DataFetched(cls.Comparable):
+ pass
+ class DataDefer(cls.Comparable):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ mapper(cls.classes.Data, cls.tables.data)
+ mapper(cls.classes.DataFetched, cls.tables.data_fetched)
+ mapper(cls.classes.DataDefer, cls.tables.data_defer, properties={
+ "data": deferred(cls.tables.data_defer.c.data)
+ })
+
+ def test_attr_not_inserted(self):
+ Data = self.classes.Data
+
+ sess = create_session()
+
+ d1 = Data()
+ sess.add(d1)
+ sess.flush()
+
+ # we didn't insert a value for 'data',
+ # so its not in dict, but also when we hit it, it isn't
+ # expired because there's no column default on it or anyhting like that
+ assert 'data' not in d1.__dict__
+ def go():
+ eq_(d1.data, None)
+
+ self.assert_sql_count(
+ testing.db,
+ go,
+ 0
+ )
+
+ def test_attr_not_inserted_expired(self):
+ Data = self.classes.Data
+
+ sess = create_session()
+
+ d1 = Data()
+ sess.add(d1)
+ sess.flush()
+
+ assert 'data' not in d1.__dict__
+
+ # with an expire, we emit
+ sess.expire(d1)
+
+ def go():
+ eq_(d1.data, None)
+
+ self.assert_sql_count(
+ testing.db,
+ go,
+ 1
+ )
+
+ def test_attr_not_inserted_fetched(self):
+ Data = self.classes.DataFetched
+
+ sess = create_session()
+
+ d1 = Data()
+ sess.add(d1)
+ sess.flush()
+
+ assert 'data' not in d1.__dict__
+ def go():
+ eq_(d1.data, None)
+
+ # this one is marked as "fetch" so we emit SQL
+ self.assert_sql_count(
+ testing.db,
+ go,
+ 1
+ )
+
+ def test_cols_missing_in_load(self):
+ Data = self.classes.Data
+
+ sess = create_session()
+
+ d1 = Data(data='d1')
+ sess.add(d1)
+ sess.flush()
+ sess.close()
+
+ sess = create_session()
+ d1 = sess.query(Data).from_statement(select([Data.id])).first()
+
+ # cols not present in the row are implicitly expired
+ def go():
+ eq_(d1.data, 'd1')
+
+ self.assert_sql_count(
+ testing.db, go, 1
+ )
+
+ def test_deferred_cols_missing_in_load_state_reset(self):
+ Data = self.classes.DataDefer
+
+ sess = create_session()
+
+ d1 = Data(data='d1')
+ sess.add(d1)
+ sess.flush()
+ sess.close()
+
+ sess = create_session()
+ d1 = sess.query(Data).from_statement(
+ select([Data.id])).options(undefer(Data.data)).first()
+ d1.data = 'd2'
+
+ # the deferred loader has to clear out any state
+ # on the col, including that 'd2' here
+ d1 = sess.query(Data).populate_existing().first()
+
+ def go():
+ eq_(d1.data, 'd1')
+
+ self.assert_sql_count(
+ testing.db, go, 1
+ )
+
class RefreshTest(_fixtures.FixtureTest):
def test_refresh(self):
@@ -1290,7 +1436,6 @@ class RefreshTest(_fixtures.FixtureTest):
s.expire(u)
assert len(u.addresses) == 3
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_refresh2(self):
"""test a hang condition that was occurring on expire/refresh"""
diff --git a/test/orm/test_froms.py b/test/orm/test_froms.py
index 2403f4aae..fd4bef71a 100644
--- a/test/orm/test_froms.py
+++ b/test/orm/test_froms.py
@@ -675,19 +675,18 @@ class AddEntityEquivalenceTest(fixtures.MappedTest, AssertsCompiledSQL):
class InstancesTest(QueryTest, AssertsCompiledSQL):
- def test_from_alias(self):
+ def test_from_alias_one(self):
User, addresses, users = (self.classes.User,
self.tables.addresses,
self.tables.users)
-
- query = users.select(users.c.id==7).\
- union(users.select(users.c.id>7)).\
+ query = users.select(users.c.id == 7).\
+ union(users.select(users.c.id > 7)).\
alias('ulist').\
outerjoin(addresses).\
select(use_labels=True,
order_by=['ulist.id', addresses.c.id])
- sess =create_session()
+ sess = create_session()
q = sess.query(User)
def go():
@@ -697,7 +696,19 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
- sess.expunge_all()
+ def test_from_alias_two(self):
+ User, addresses, users = (self.classes.User,
+ self.tables.addresses,
+ self.tables.users)
+
+ query = users.select(users.c.id == 7).\
+ union(users.select(users.c.id > 7)).\
+ alias('ulist').\
+ outerjoin(addresses).\
+ select(use_labels=True,
+ order_by=['ulist.id', addresses.c.id])
+ sess = create_session()
+ q = sess.query(User)
def go():
l = q.options(contains_alias('ulist'),
@@ -706,6 +717,19 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
+ def test_from_alias_three(self):
+ User, addresses, users = (self.classes.User,
+ self.tables.addresses,
+ self.tables.users)
+
+ query = users.select(users.c.id == 7).\
+ union(users.select(users.c.id > 7)).\
+ alias('ulist').\
+ outerjoin(addresses).\
+ select(use_labels=True,
+ order_by=['ulist.id', addresses.c.id])
+ sess = create_session()
+
# better way. use select_entity_from()
def go():
l = sess.query(User).select_entity_from(query).\
@@ -713,12 +737,19 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
+ def test_from_alias_four(self):
+ User, addresses, users = (self.classes.User,
+ self.tables.addresses,
+ self.tables.users)
+
+ sess = create_session()
+
# same thing, but alias addresses, so that the adapter
# generated by select_entity_from() is wrapped within
# the adapter created by contains_eager()
adalias = addresses.alias()
- query = users.select(users.c.id==7).\
- union(users.select(users.c.id>7)).\
+ query = users.select(users.c.id == 7).\
+ union(users.select(users.c.id > 7)).\
alias('ulist').\
outerjoin(adalias).\
select(use_labels=True,
@@ -902,6 +933,11 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
order_by(users.c.id, oalias.c.id, ialias.c.id)
# test using Alias with more than one level deep
+
+ # new way:
+ #from sqlalchemy.orm.strategy_options import Load
+ #opt = Load(User).contains_eager('orders', alias=oalias).contains_eager('items', alias=ialias)
+
def go():
l = list(q.options(
contains_eager('orders', alias=oalias),
@@ -1781,7 +1817,6 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
users, User = self.tables.users, self.classes.User
-
mapper(User, users)
sess = create_session()
@@ -1790,21 +1825,21 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
ualias = aliased(User)
self.assert_compile(
- sess.query(User).join(sel, User.id>sel.c.id),
+ sess.query(User).join(sel, User.id > sel.c.id),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN (SELECT users.id AS id, users.name AS name FROM "
"users WHERE users.id IN (:id_1, :id_2)) AS anon_1 ON users.id > anon_1.id",
)
self.assert_compile(
- sess.query(ualias).select_entity_from(sel).filter(ualias.id>sel.c.id),
+ sess.query(ualias).select_entity_from(sel).filter(ualias.id > sel.c.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM "
"users AS users_1, (SELECT users.id AS id, users.name AS name FROM "
"users WHERE users.id IN (:id_1, :id_2)) AS anon_1 WHERE users_1.id > anon_1.id",
)
self.assert_compile(
- sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id>sel.c.id),
+ sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id > sel.c.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM (SELECT users.id AS id, users.name AS name "
"FROM users WHERE users.id IN (:id_1, :id_2)) AS anon_1 "
@@ -1812,29 +1847,26 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
)
self.assert_compile(
- sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id>User.id),
+ sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id > User.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM (SELECT users.id AS id, users.name AS name FROM "
"users WHERE users.id IN (:id_1, :id_2)) AS anon_1 "
- "JOIN users AS users_1 ON anon_1.id < users_1.id"
+ "JOIN users AS users_1 ON users_1.id > anon_1.id"
)
salias = aliased(User, sel)
self.assert_compile(
- sess.query(salias).join(ualias, ualias.id>salias.id),
+ sess.query(salias).join(ualias, ualias.id > salias.id),
"SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name FROM "
"(SELECT users.id AS id, users.name AS name FROM users WHERE users.id "
"IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id",
)
-
- # this one uses an explicit join(left, right, onclause) so works
self.assert_compile(
- sess.query(ualias).select_entity_from(join(sel, ualias, ualias.id>sel.c.id)),
+ sess.query(ualias).select_entity_from(join(sel, ualias, ualias.id > sel.c.id)),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM "
"(SELECT users.id AS id, users.name AS name FROM users WHERE users.id "
- "IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id",
- use_default_dialect=True
+ "IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id"
)
@@ -1848,25 +1880,31 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
self.assert_compile(
sess.query(User).select_from(ua).join(User, ua.name > User.name),
"SELECT users.id AS users_id, users.name AS users_name "
- "FROM users AS users_1 JOIN users ON users.name < users_1.name"
+ "FROM users AS users_1 JOIN users ON users_1.name > users.name"
)
self.assert_compile(
sess.query(User.name).select_from(ua).join(User, ua.name > User.name),
"SELECT users.name AS users_name FROM users AS users_1 "
- "JOIN users ON users.name < users_1.name"
+ "JOIN users ON users_1.name > users.name"
)
self.assert_compile(
sess.query(ua.name).select_from(ua).join(User, ua.name > User.name),
"SELECT users_1.name AS users_1_name FROM users AS users_1 "
- "JOIN users ON users.name < users_1.name"
+ "JOIN users ON users_1.name > users.name"
)
self.assert_compile(
sess.query(ua).select_from(User).join(ua, ua.name > User.name),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
- "FROM users JOIN users AS users_1 ON users.name < users_1.name"
+ "FROM users JOIN users AS users_1 ON users_1.name > users.name"
+ )
+
+ self.assert_compile(
+ sess.query(ua).select_from(User).join(ua, User.name > ua.name),
+ "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
+ "FROM users JOIN users AS users_1 ON users.name > users_1.name"
)
# this is tested in many other places here, just adding it
diff --git a/test/orm/test_generative.py b/test/orm/test_generative.py
index 52858cc26..cbe559db9 100644
--- a/test/orm/test_generative.py
+++ b/test/orm/test_generative.py
@@ -43,7 +43,6 @@ class GenerativeQueryTest(fixtures.MappedTest):
assert res.order_by(Foo.bar)[0].bar == 5
assert res.order_by(sa.desc(Foo.bar))[0].bar == 95
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_slice(self):
Foo = self.classes.Foo
diff --git a/test/orm/test_inspect.py b/test/orm/test_inspect.py
index 61c1fd93e..5f5457943 100644
--- a/test/orm/test_inspect.py
+++ b/test/orm/test_inspect.py
@@ -130,8 +130,8 @@ class TestORMInspection(_fixtures.FixtureTest):
User = self.classes.User
insp = inspect(User)
eq_(
- set(insp.attrs.keys()),
- set(['addresses', 'orders', 'id', 'name', 'name_syn'])
+ list(insp.attrs.keys()),
+ ['addresses', 'orders', 'id', 'name', 'name_syn']
)
def test_col_filter(self):
@@ -365,7 +365,7 @@ class TestORMInspection(_fixtures.FixtureTest):
[]
)
- def test_instance_state_attr_hist(self):
+ def test_instance_state_collection_attr_hist(self):
User = self.classes.User
u1 = User(name='ed')
insp = inspect(u1)
@@ -379,6 +379,48 @@ class TestORMInspection(_fixtures.FixtureTest):
hist.unchanged, []
)
+ def test_instance_state_scalar_attr_hist(self):
+ User = self.classes.User
+ u1 = User(name='ed')
+ sess = Session()
+ sess.add(u1)
+ sess.commit()
+ assert 'name' not in u1.__dict__
+ insp = inspect(u1)
+ hist = insp.attrs.name.history
+ eq_(
+ hist.unchanged, None
+ )
+ assert 'name' not in u1.__dict__
+
+ def test_instance_state_collection_attr_load_hist(self):
+ User = self.classes.User
+ u1 = User(name='ed')
+ insp = inspect(u1)
+ hist = insp.attrs.addresses.load_history()
+ eq_(
+ hist.unchanged, ()
+ )
+ u1.addresses
+ hist = insp.attrs.addresses.load_history()
+ eq_(
+ hist.unchanged, []
+ )
+
+ def test_instance_state_scalar_attr_hist_load(self):
+ User = self.classes.User
+ u1 = User(name='ed')
+ sess = Session()
+ sess.add(u1)
+ sess.commit()
+ assert 'name' not in u1.__dict__
+ insp = inspect(u1)
+ hist = insp.attrs.name.load_history()
+ eq_(
+ hist.unchanged, ['ed']
+ )
+ assert 'name' in u1.__dict__
+
def test_instance_state_ident_transient(self):
User = self.classes.User
u1 = User(name='ed')
diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py
index 21b82f408..5f48b39b1 100644
--- a/test/orm/test_joins.py
+++ b/test/orm/test_joins.py
@@ -333,7 +333,32 @@ class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL):
, use_default_dialect = True
)
+ def test_auto_aliasing_multi_link(self):
+ # test [ticket:2903]
+ sess = create_session()
+ Company, Engineer, Manager, Boss = self.classes.Company, \
+ self.classes.Engineer, \
+ self.classes.Manager, self.classes.Boss
+ q = sess.query(Company).\
+ join(Company.employees.of_type(Engineer)).\
+ join(Company.employees.of_type(Manager)).\
+ join(Company.employees.of_type(Boss))
+
+ self.assert_compile(q,
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name FROM companies "
+ "JOIN (people JOIN engineers ON people.person_id = engineers.person_id) "
+ "ON companies.company_id = people.company_id "
+ "JOIN (people AS people_1 JOIN managers AS managers_1 "
+ "ON people_1.person_id = managers_1.person_id) "
+ "ON companies.company_id = people_1.company_id "
+ "JOIN (people AS people_2 JOIN managers AS managers_2 "
+ "ON people_2.person_id = managers_2.person_id JOIN boss AS boss_1 "
+ "ON managers_2.person_id = boss_1.boss_id) "
+ "ON companies.company_id = people_2.company_id",
+ use_default_dialect=True
+ )
class JoinTest(QueryTest, AssertsCompiledSQL):
@@ -1582,12 +1607,14 @@ class MultiplePathTest(fixtures.MappedTest, AssertsCompiledSQL):
self.tables.t1t2_2,
self.tables.t1)
- class T1(object):pass
- class T2(object):pass
+ class T1(object):
+ pass
+ class T2(object):
+ pass
mapper(T1, t1, properties={
- 't2s_1':relationship(T2, secondary=t1t2_1),
- 't2s_2':relationship(T2, secondary=t1t2_2),
+ 't2s_1': relationship(T2, secondary=t1t2_1),
+ 't2s_2': relationship(T2, secondary=t1t2_2),
})
mapper(T2, t2)
diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py
index 66b1eb5e4..37d290b58 100644
--- a/test/orm/test_lazy_relations.py
+++ b/test/orm/test_lazy_relations.py
@@ -178,7 +178,7 @@ class LazyTest(_fixtures.FixtureTest):
sess = create_session()
q = sess.query(User)
- if testing.against('maxdb', 'mssql'):
+ if testing.against('mssql'):
l = q.limit(2).all()
assert self.static.user_all_result[:2] == l
else:
diff --git a/test/orm/test_lockmode.py b/test/orm/test_lockmode.py
index 0fe82f394..fc473a329 100644
--- a/test/orm/test_lockmode.py
+++ b/test/orm/test_lockmode.py
@@ -2,12 +2,13 @@ from sqlalchemy.engine import default
from sqlalchemy.databases import *
from sqlalchemy.orm import mapper
from sqlalchemy.orm import Session
-from sqlalchemy.testing import AssertsCompiledSQL
+from sqlalchemy.testing import AssertsCompiledSQL, eq_
from sqlalchemy.testing import assert_raises_message
+from sqlalchemy import exc
from test.orm import _fixtures
-class LockModeTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+class LegacyLockModeTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
@@ -15,100 +16,184 @@ class LockModeTest(_fixtures.FixtureTest, AssertsCompiledSQL):
User, users = cls.classes.User, cls.tables.users
mapper(User, users)
- def test_default_update(self):
+ def _assert_legacy(self, arg, read=False, nowait=False):
+ User = self.classes.User
+ s = Session()
+ q = s.query(User).with_lockmode(arg)
+ sel = q._compile_context().statement
+
+ if arg is None:
+ assert q._for_update_arg is None
+ assert sel._for_update_arg is None
+ return
+
+ assert q._for_update_arg.read is read
+ assert q._for_update_arg.nowait is nowait
+
+ assert sel._for_update_arg.read is read
+ assert sel._for_update_arg.nowait is nowait
+
+ def test_false_legacy(self):
+ self._assert_legacy(None)
+
+ def test_plain_legacy(self):
+ self._assert_legacy("update")
+
+ def test_nowait_legacy(self):
+ self._assert_legacy("update_nowait", nowait=True)
+
+ def test_read_legacy(self):
+ self._assert_legacy("read", read=True)
+
+ def test_unknown_legacy_lock_mode(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode('update'),
- "SELECT users.id AS users_id FROM users FOR UPDATE",
- dialect=default.DefaultDialect()
+ assert_raises_message(
+ exc.ArgumentError, "Unknown with_lockmode argument: 'unknown_mode'",
+ sess.query(User.id).with_lockmode, 'unknown_mode'
)
- def test_not_supported_by_dialect_should_just_use_update(self):
+class ForUpdateTest(_fixtures.FixtureTest):
+ @classmethod
+ def setup_mappers(cls):
+ User, users = cls.classes.User, cls.tables.users
+ mapper(User, users)
+
+ def _assert(self, read=False, nowait=False, of=None,
+ assert_q_of=None, assert_sel_of=None):
User = self.classes.User
- sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode('read'),
- "SELECT users.id AS users_id FROM users FOR UPDATE",
- dialect=default.DefaultDialect()
+ s = Session()
+ q = s.query(User).with_for_update(read=read, nowait=nowait, of=of)
+ sel = q._compile_context().statement
+
+ assert q._for_update_arg.read is read
+ assert sel._for_update_arg.read is read
+
+ assert q._for_update_arg.nowait is nowait
+ assert sel._for_update_arg.nowait is nowait
+
+ eq_(q._for_update_arg.of, assert_q_of)
+ eq_(sel._for_update_arg.of, assert_sel_of)
+
+ def test_read(self):
+ self._assert(read=True)
+
+ def test_plain(self):
+ self._assert()
+
+ def test_nowait(self):
+ self._assert(nowait=True)
+
+ def test_of_single_col(self):
+ User, users = self.classes.User, self.tables.users
+ self._assert(
+ of=User.id,
+ assert_q_of=[users.c.id],
+ assert_sel_of=[users.c.id]
)
- def test_none_lock_mode(self):
+class CompileTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+ """run some compile tests, even though these are redundant."""
+ run_inserts = None
+
+ @classmethod
+ def setup_mappers(cls):
+ User, users = cls.classes.User, cls.tables.users
+ Address, addresses = cls.classes.Address, cls.tables.addresses
+ mapper(User, users)
+ mapper(Address, addresses)
+
+ def test_default_update(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode(None),
- "SELECT users.id AS users_id FROM users",
+ self.assert_compile(sess.query(User.id).with_for_update(),
+ "SELECT users.id AS users_id FROM users FOR UPDATE",
dialect=default.DefaultDialect()
)
- def test_unknown_lock_mode(self):
+ def test_not_supported_by_dialect_should_just_use_update(self):
User = self.classes.User
sess = Session()
- assert_raises_message(
- Exception, "Unknown lockmode 'unknown_mode'",
- self.assert_compile,
- sess.query(User.id).with_lockmode('unknown_mode'), None,
+ self.assert_compile(sess.query(User.id).with_for_update(read=True),
+ "SELECT users.id AS users_id FROM users FOR UPDATE",
dialect=default.DefaultDialect()
)
def test_postgres_read(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode('read'),
+ self.assert_compile(sess.query(User.id).with_for_update(read=True),
"SELECT users.id AS users_id FROM users FOR SHARE",
- dialect=postgresql.dialect()
+ dialect="postgresql"
)
def test_postgres_read_nowait(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode('read_nowait'),
+ self.assert_compile(sess.query(User.id).
+ with_for_update(read=True, nowait=True),
"SELECT users.id AS users_id FROM users FOR SHARE NOWAIT",
- dialect=postgresql.dialect()
+ dialect="postgresql"
)
def test_postgres_update(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode('update'),
+ self.assert_compile(sess.query(User.id).with_for_update(),
"SELECT users.id AS users_id FROM users FOR UPDATE",
- dialect=postgresql.dialect()
+ dialect="postgresql"
)
- def test_postgres_update_nowait(self):
+ def test_postgres_update_of(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode('update_nowait'),
- "SELECT users.id AS users_id FROM users FOR UPDATE NOWAIT",
- dialect=postgresql.dialect()
+ self.assert_compile(sess.query(User.id).with_for_update(of=User.id),
+ "SELECT users.id AS users_id FROM users FOR UPDATE OF users",
+ dialect="postgresql"
)
- def test_oracle_update(self):
+ def test_postgres_update_of_entity(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode('update'),
- "SELECT users.id AS users_id FROM users FOR UPDATE",
- dialect=oracle.dialect()
+ self.assert_compile(sess.query(User.id).with_for_update(of=User),
+ "SELECT users.id AS users_id FROM users FOR UPDATE OF users",
+ dialect="postgresql"
)
- def test_oracle_update_nowait(self):
+ def test_postgres_update_of_entity_list(self):
User = self.classes.User
+ Address = self.classes.Address
+
sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode('update_nowait'),
- "SELECT users.id AS users_id FROM users FOR UPDATE NOWAIT",
- dialect=oracle.dialect()
+ self.assert_compile(sess.query(User.id, Address.id).
+ with_for_update(of=[User, Address]),
+ "SELECT users.id AS users_id, addresses.id AS addresses_id "
+ "FROM users, addresses FOR UPDATE OF users, addresses",
+ dialect="postgresql"
)
- def test_mysql_read(self):
+ def test_postgres_update_of_list(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode('read'),
- "SELECT users.id AS users_id FROM users LOCK IN SHARE MODE",
- dialect=mysql.dialect()
+ self.assert_compile(sess.query(User.id).
+ with_for_update(of=[User.id, User.id, User.id]),
+ "SELECT users.id AS users_id FROM users FOR UPDATE OF users",
+ dialect="postgresql"
)
- def test_mysql_update(self):
+
+ def test_oracle_update(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_lockmode('update'),
+ self.assert_compile(sess.query(User.id).with_for_update(),
"SELECT users.id AS users_id FROM users FOR UPDATE",
- dialect=mysql.dialect()
+ dialect="oracle"
+ )
+
+ def test_mysql_read(self):
+ User = self.classes.User
+ sess = Session()
+ self.assert_compile(sess.query(User.id).with_for_update(read=True),
+ "SELECT users.id AS users_id FROM users LOCK IN SHARE MODE",
+ dialect="mysql"
)
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index 06ec4ce27..32126e0dd 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -17,6 +17,7 @@ from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.testing.assertsql import CompiledSQL
import logging
+import logging.handlers
class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -58,7 +59,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
addresses = self.tables.addresses
Address = self.classes.Address
- from sqlalchemy.orm.util import _is_mapped_class, _is_aliased_class
+ from sqlalchemy.orm.base import _is_mapped_class, _is_aliased_class
class Foo(object):
x = "something"
@@ -95,7 +96,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_entity_descriptor(self):
users = self.tables.users
- from sqlalchemy.orm.util import _entity_descriptor
+ from sqlalchemy.orm.base import _entity_descriptor
class Foo(object):
x = "something"
@@ -195,16 +196,16 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(User, users)
sa.orm.configure_mappers()
- assert sa.orm.mapperlib._new_mappers is False
+ assert sa.orm.mapperlib.Mapper._new_mappers is False
m = mapper(Address, addresses, properties={
'user': relationship(User, backref="addresses")})
assert m.configured is False
- assert sa.orm.mapperlib._new_mappers is True
+ assert sa.orm.mapperlib.Mapper._new_mappers is True
u = User()
assert User.addresses
- assert sa.orm.mapperlib._new_mappers is False
+ assert sa.orm.mapperlib.Mapper._new_mappers is False
def test_configure_on_session(self):
User, users = self.classes.User, self.tables.users
@@ -302,6 +303,22 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
})
assert User.addresses.property is m.get_property('addresses')
+ def test_unicode_relationship_backref_names(self):
+ # test [ticket:2901]
+ users, Address, addresses, User = (self.tables.users,
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
+
+ mapper(Address, addresses)
+ mapper(User, users, properties={
+ util.u('addresses'): relationship(Address, backref=util.u('user'))
+ })
+ u1 = User()
+ a1 = Address()
+ u1.addresses.append(a1)
+ assert a1.user is u1
+
def test_configure_on_prop_1(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
@@ -1566,6 +1583,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class_mapper, 5
)
+ def test_unmapped_not_type_error_iter_ok(self):
+ assert_raises_message(
+ sa.exc.ArgumentError,
+ r"Class object expected, got '\(5, 6\)'.",
+ class_mapper, (5, 6)
+ )
+
def test_unmapped_subclass_error_postmap(self):
users = self.tables.users
@@ -1706,7 +1730,6 @@ class ORMLoggingTest(_fixtures.FixtureTest):
class OptionsTest(_fixtures.FixtureTest):
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_synonym_options(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
@@ -1749,7 +1772,6 @@ class OptionsTest(_fixtures.FixtureTest):
eq_(l, self.static.user_address_result)
self.sql_count_(0, go)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_eager_options_with_limit(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
@@ -1775,7 +1797,6 @@ class OptionsTest(_fixtures.FixtureTest):
eq_(u.id, 8)
eq_(len(u.addresses), 3)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_lazy_options_with_limit(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
@@ -1924,12 +1945,11 @@ class OptionsTest(_fixtures.FixtureTest):
oalias = aliased(Order)
opt1 = sa.orm.joinedload(User.orders, Order.items)
- opt2a, opt2b = sa.orm.contains_eager(User.orders, Order.items, alias=oalias)
- u1 = sess.query(User).join(oalias, User.orders).options(opt1, opt2a, opt2b).first()
+ opt2 = sa.orm.contains_eager(User.orders, Order.items, alias=oalias)
+ u1 = sess.query(User).join(oalias, User.orders).options(opt1, opt2).first()
ustate = attributes.instance_state(u1)
assert opt1 in ustate.load_options
- assert opt2a not in ustate.load_options
- assert opt2b not in ustate.load_options
+ assert opt2 not in ustate.load_options
class DeepOptionsTest(_fixtures.FixtureTest):
@@ -2038,139 +2058,6 @@ class DeepOptionsTest(_fixtures.FixtureTest):
x = u[0].orders[1].items[0].keywords[1]
self.sql_count_(2, go)
-class ValidatorTest(_fixtures.FixtureTest):
- def test_scalar(self):
- users = self.tables.users
- canary = []
- class User(fixtures.ComparableEntity):
- @validates('name')
- def validate_name(self, key, name):
- canary.append((key, name))
- assert name != 'fred'
- return name + ' modified'
-
- mapper(User, users)
- sess = create_session()
- u1 = User(name='ed')
- eq_(u1.name, 'ed modified')
- assert_raises(AssertionError, setattr, u1, "name", "fred")
- eq_(u1.name, 'ed modified')
- eq_(canary, [('name', 'ed'), ('name', 'fred')])
- sess.add(u1)
- sess.flush()
- sess.expunge_all()
- eq_(sess.query(User).filter_by(name='ed modified').one(), User(name='ed'))
-
- def test_collection(self):
- users, addresses, Address = (self.tables.users,
- self.tables.addresses,
- self.classes.Address)
-
- canary = []
- class User(fixtures.ComparableEntity):
- @validates('addresses')
- def validate_address(self, key, ad):
- canary.append((key, ad))
- assert '@' in ad.email_address
- return ad
-
- mapper(User, users, properties={'addresses':relationship(Address)})
- mapper(Address, addresses)
- sess = create_session()
- u1 = User(name='edward')
- a0 = Address(email_address='noemail')
- assert_raises(AssertionError, u1.addresses.append, a0)
- a1 = Address(id=15, email_address='foo@bar.com')
- u1.addresses.append(a1)
- eq_(canary, [('addresses', a0), ('addresses', a1)])
- sess.add(u1)
- sess.flush()
- sess.expunge_all()
- eq_(
- sess.query(User).filter_by(name='edward').one(),
- User(name='edward', addresses=[Address(email_address='foo@bar.com')])
- )
-
- def test_validators_dict(self):
- users, addresses, Address = (self.tables.users,
- self.tables.addresses,
- self.classes.Address)
-
- class User(fixtures.ComparableEntity):
-
- @validates('name')
- def validate_name(self, key, name):
- assert name != 'fred'
- return name + ' modified'
-
- @validates('addresses')
- def validate_address(self, key, ad):
- assert '@' in ad.email_address
- return ad
-
- def simple_function(self, key, value):
- return key, value
-
- u_m = mapper(User,
- users,
- properties={'addresses':relationship(Address)})
- mapper(Address, addresses)
-
- eq_(
- dict((k, v[0].__name__) for k, v in list(u_m.validators.items())),
- {'name':'validate_name',
- 'addresses':'validate_address'}
- )
-
- def test_validator_w_removes(self):
- users, addresses, Address = (self.tables.users,
- self.tables.addresses,
- self.classes.Address)
- canary = []
- class User(fixtures.ComparableEntity):
-
- @validates('name', include_removes=True)
- def validate_name(self, key, item, remove):
- canary.append((key, item, remove))
- return item
-
- @validates('addresses', include_removes=True)
- def validate_address(self, key, item, remove):
- canary.append((key, item, remove))
- return item
-
- mapper(User,
- users,
- properties={'addresses':relationship(Address)})
- mapper(Address, addresses)
-
- u1 = User()
- u1.name = "ed"
- u1.name = "mary"
- del u1.name
- a1, a2, a3 = Address(), Address(), Address()
- u1.addresses.append(a1)
- u1.addresses.remove(a1)
- u1.addresses = [a1, a2]
- u1.addresses = [a2, a3]
-
- eq_(canary, [
- ('name', 'ed', False),
- ('name', 'mary', False),
- ('name', 'mary', True),
- # append a1
- ('addresses', a1, False),
- # remove a1
- ('addresses', a1, True),
- # set to [a1, a2] - this is two appends
- ('addresses', a1, False), ('addresses', a2, False),
- # set to [a2, a3] - this is a remove of a1,
- # append of a3. the appends are first.
- ('addresses', a3, False),
- ('addresses', a1, True),
- ]
- )
-
class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_kwarg_accepted(self):
users, Address = self.tables.users, self.classes.Address
@@ -2241,18 +2128,18 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
self.tables.addresses,
self.classes.User)
- from sqlalchemy.orm.properties import PropertyLoader
+ from sqlalchemy.orm.properties import RelationshipProperty
# NOTE: this API changed in 0.8, previously __clause_element__()
# gave the parent selecatable, now it gives the
# primaryjoin/secondaryjoin
- class MyFactory(PropertyLoader.Comparator):
+ class MyFactory(RelationshipProperty.Comparator):
__hash__ = None
def __eq__(self, other):
return func.foobar(self._source_selectable().c.user_id) == \
func.foobar(other.id)
- class MyFactory2(PropertyLoader.Comparator):
+ class MyFactory2(RelationshipProperty.Comparator):
__hash__ = None
def __eq__(self, other):
return func.foobar(self._source_selectable().c.id) == \
@@ -2285,349 +2172,6 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
dialect=default.DefaultDialect())
-class DeferredTest(_fixtures.FixtureTest):
-
- def test_basic(self):
- """A basic deferred load."""
-
- Order, orders = self.classes.Order, self.tables.orders
-
-
- mapper(Order, orders, order_by=orders.c.id, properties={
- 'description': deferred(orders.c.description)})
-
- o = Order()
- self.assert_(o.description is None)
-
- q = create_session().query(Order)
- def go():
- l = q.all()
- o2 = l[2]
- x = o2.description
-
- self.sql_eq_(go, [
- ("SELECT orders.id AS orders_id, "
- "orders.user_id AS orders_user_id, "
- "orders.address_id AS orders_address_id, "
- "orders.isopen AS orders_isopen "
- "FROM orders ORDER BY orders.id", {}),
- ("SELECT orders.description AS orders_description "
- "FROM orders WHERE orders.id = :param_1",
- {'param_1':3})])
-
- def test_unsaved(self):
- """Deferred loading does not kick in when just PK cols are set."""
-
- Order, orders = self.classes.Order, self.tables.orders
-
-
- mapper(Order, orders, properties={
- 'description': deferred(orders.c.description)})
-
- sess = create_session()
- o = Order()
- sess.add(o)
- o.id = 7
- def go():
- o.description = "some description"
- self.sql_count_(0, go)
-
- def test_synonym_group_bug(self):
- orders, Order = self.tables.orders, self.classes.Order
-
- mapper(Order, orders, properties={
- 'isopen':synonym('_isopen', map_column=True),
- 'description':deferred(orders.c.description, group='foo')
- })
-
- sess = create_session()
- o1 = sess.query(Order).get(1)
- eq_(o1.description, "order 1")
-
- def test_unsaved_2(self):
- Order, orders = self.classes.Order, self.tables.orders
-
- mapper(Order, orders, properties={
- 'description': deferred(orders.c.description)})
-
- sess = create_session()
- o = Order()
- sess.add(o)
- def go():
- o.description = "some description"
- self.sql_count_(0, go)
-
- def test_unsaved_group(self):
- """Deferred loading doesnt kick in when just PK cols are set"""
-
- orders, Order = self.tables.orders, self.classes.Order
-
-
- mapper(Order, orders, order_by=orders.c.id, properties=dict(
- description=deferred(orders.c.description, group='primary'),
- opened=deferred(orders.c.isopen, group='primary')))
-
- sess = create_session()
- o = Order()
- sess.add(o)
- o.id = 7
- def go():
- o.description = "some description"
- self.sql_count_(0, go)
-
- def test_unsaved_group_2(self):
- orders, Order = self.tables.orders, self.classes.Order
-
- mapper(Order, orders, order_by=orders.c.id, properties=dict(
- description=deferred(orders.c.description, group='primary'),
- opened=deferred(orders.c.isopen, group='primary')))
-
- sess = create_session()
- o = Order()
- sess.add(o)
- def go():
- o.description = "some description"
- self.sql_count_(0, go)
-
- def test_save(self):
- Order, orders = self.classes.Order, self.tables.orders
-
- m = mapper(Order, orders, properties={
- 'description': deferred(orders.c.description)})
-
- sess = create_session()
- o2 = sess.query(Order).get(2)
- o2.isopen = 1
- sess.flush()
-
- def test_group(self):
- """Deferred load with a group"""
-
- orders, Order = self.tables.orders, self.classes.Order
-
- mapper(Order, orders, properties=util.OrderedDict([
- ('userident', deferred(orders.c.user_id, group='primary')),
- ('addrident', deferred(orders.c.address_id, group='primary')),
- ('description', deferred(orders.c.description, group='primary')),
- ('opened', deferred(orders.c.isopen, group='primary'))
- ]))
-
- sess = create_session()
- q = sess.query(Order).order_by(Order.id)
- def go():
- l = q.all()
- o2 = l[2]
- eq_(o2.opened, 1)
- eq_(o2.userident, 7)
- eq_(o2.description, 'order 3')
-
- self.sql_eq_(go, [
- ("SELECT orders.id AS orders_id "
- "FROM orders ORDER BY orders.id", {}),
- ("SELECT orders.user_id AS orders_user_id, "
- "orders.address_id AS orders_address_id, "
- "orders.description AS orders_description, "
- "orders.isopen AS orders_isopen "
- "FROM orders WHERE orders.id = :param_1",
- {'param_1':3})])
-
- o2 = q.all()[2]
- eq_(o2.description, 'order 3')
- assert o2 not in sess.dirty
- o2.description = 'order 3'
- def go():
- sess.flush()
- self.sql_count_(0, go)
-
- def test_preserve_changes(self):
- """A deferred load operation doesn't revert modifications on attributes"""
-
- orders, Order = self.tables.orders, self.classes.Order
-
- mapper(Order, orders, properties = {
- 'userident': deferred(orders.c.user_id, group='primary'),
- 'description': deferred(orders.c.description, group='primary'),
- 'opened': deferred(orders.c.isopen, group='primary')
- })
- sess = create_session()
- o = sess.query(Order).get(3)
- assert 'userident' not in o.__dict__
- o.description = 'somenewdescription'
- eq_(o.description, 'somenewdescription')
- def go():
- eq_(o.opened, 1)
- self.assert_sql_count(testing.db, go, 1)
- eq_(o.description, 'somenewdescription')
- assert o in sess.dirty
-
- def test_commits_state(self):
- """
- When deferred elements are loaded via a group, they get the proper
- CommittedState and don't result in changes being committed
-
- """
-
- orders, Order = self.tables.orders, self.classes.Order
-
- mapper(Order, orders, properties = {
- 'userident':deferred(orders.c.user_id, group='primary'),
- 'description':deferred(orders.c.description, group='primary'),
- 'opened':deferred(orders.c.isopen, group='primary')})
-
- sess = create_session()
- o2 = sess.query(Order).get(3)
-
- # this will load the group of attributes
- eq_(o2.description, 'order 3')
- assert o2 not in sess.dirty
- # this will mark it as 'dirty', but nothing actually changed
- o2.description = 'order 3'
- # therefore the flush() shouldnt actually issue any SQL
- self.assert_sql_count(testing.db, sess.flush, 0)
-
- def test_options(self):
- """Options on a mapper to create deferred and undeferred columns"""
-
- orders, Order = self.tables.orders, self.classes.Order
-
-
- mapper(Order, orders)
-
- sess = create_session()
- q = sess.query(Order).order_by(Order.id).options(defer('user_id'))
-
- def go():
- q.all()[0].user_id
-
- self.sql_eq_(go, [
- ("SELECT orders.id AS orders_id, "
- "orders.address_id AS orders_address_id, "
- "orders.description AS orders_description, "
- "orders.isopen AS orders_isopen "
- "FROM orders ORDER BY orders.id", {}),
- ("SELECT orders.user_id AS orders_user_id "
- "FROM orders WHERE orders.id = :param_1",
- {'param_1':1})])
- sess.expunge_all()
-
- q2 = q.options(sa.orm.undefer('user_id'))
- self.sql_eq_(q2.all, [
- ("SELECT orders.id AS orders_id, "
- "orders.user_id AS orders_user_id, "
- "orders.address_id AS orders_address_id, "
- "orders.description AS orders_description, "
- "orders.isopen AS orders_isopen "
- "FROM orders ORDER BY orders.id",
- {})])
-
- def test_undefer_group(self):
- orders, Order = self.tables.orders, self.classes.Order
-
- mapper(Order, orders, properties=util.OrderedDict([
- ('userident',deferred(orders.c.user_id, group='primary')),
- ('description',deferred(orders.c.description, group='primary')),
- ('opened',deferred(orders.c.isopen, group='primary'))
- ]
- ))
-
- sess = create_session()
- q = sess.query(Order).order_by(Order.id)
- def go():
- l = q.options(sa.orm.undefer_group('primary')).all()
- o2 = l[2]
- eq_(o2.opened, 1)
- eq_(o2.userident, 7)
- eq_(o2.description, 'order 3')
-
- self.sql_eq_(go, [
- ("SELECT orders.user_id AS orders_user_id, "
- "orders.description AS orders_description, "
- "orders.isopen AS orders_isopen, "
- "orders.id AS orders_id, "
- "orders.address_id AS orders_address_id "
- "FROM orders ORDER BY orders.id",
- {})])
-
- def test_locates_col(self):
- """Manually adding a column to the result undefers the column."""
-
- orders, Order = self.tables.orders, self.classes.Order
-
-
- mapper(Order, orders, properties={
- 'description':deferred(orders.c.description)})
-
- sess = create_session()
- o1 = sess.query(Order).order_by(Order.id).first()
- def go():
- eq_(o1.description, 'order 1')
- self.sql_count_(1, go)
-
- sess = create_session()
- o1 = (sess.query(Order).
- order_by(Order.id).
- add_column(orders.c.description).first())[0]
- def go():
- eq_(o1.description, 'order 1')
- self.sql_count_(0, go)
-
- def test_map_selectable_wo_deferred(self):
- """test mapping to a selectable with deferred cols,
- the selectable doesn't include the deferred col.
-
- """
-
- Order, orders = self.classes.Order, self.tables.orders
-
-
- order_select = sa.select([
- orders.c.id,
- orders.c.user_id,
- orders.c.address_id,
- orders.c.description,
- orders.c.isopen]).alias()
- mapper(Order, order_select, properties={
- 'description':deferred(order_select.c.description)
- })
-
- sess = Session()
- o1 = sess.query(Order).order_by(Order.id).first()
- assert 'description' not in o1.__dict__
- eq_(o1.description, 'order 1')
-
- def test_deep_options(self):
- users, items, order_items, Order, Item, User, orders = (self.tables.users,
- self.tables.items,
- self.tables.order_items,
- self.classes.Order,
- self.classes.Item,
- self.classes.User,
- self.tables.orders)
-
- mapper(Item, items, properties=dict(
- description=deferred(items.c.description)))
- mapper(Order, orders, properties=dict(
- items=relationship(Item, secondary=order_items)))
- mapper(User, users, properties=dict(
- orders=relationship(Order, order_by=orders.c.id)))
-
- sess = create_session()
- q = sess.query(User).order_by(User.id)
- l = q.all()
- item = l[0].orders[1].items[1]
- def go():
- eq_(item.description, 'item 4')
- self.sql_count_(1, go)
- eq_(item.description, 'item 4')
-
- sess.expunge_all()
- l = q.options(sa.orm.undefer('orders.items.description')).all()
- item = l[0].orders[1].items[1]
- def go():
- eq_(item.description, 'item 4')
- self.sql_count_(0, go)
- eq_(item.description, 'item 4')
-
class SecondaryOptionsTest(fixtures.MappedTest):
"""test that the contains_eager() option doesn't bleed into a secondary load."""
diff --git a/test/orm/test_naturalpks.py b/test/orm/test_naturalpks.py
index 173408b82..a4663217f 100644
--- a/test/orm/test_naturalpks.py
+++ b/test/orm/test_naturalpks.py
@@ -391,7 +391,7 @@ class NaturalPKTest(fixtures.MappedTest):
# mysqldb executemany() of the association table fails to
# report the correct row count
@testing.fails_if(lambda: testing.against('mysql')
- and not testing.against('+zxjdbc'))
+ and not (testing.against('+zxjdbc') or testing.against('+cymysql')))
def test_manytomany_nonpassive(self):
self._test_manytomany(False)
diff --git a/test/orm/test_of_type.py b/test/orm/test_of_type.py
index 67baddb52..836d85cc7 100644
--- a/test/orm/test_of_type.py
+++ b/test/orm/test_of_type.py
@@ -506,7 +506,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
"FROM job AS job_1 LEFT OUTER JOIN subjob AS subjob_1 "
"ON job_1.id = subjob_1.id "
"WHERE data_container.id = job_1.container_id "
- "AND job.id > job_1.id)"
+ "AND job_1.id < job.id)"
)
def test_any_walias(self):
@@ -531,7 +531,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
"WHERE EXISTS (SELECT 1 "
"FROM job AS job_1 "
"WHERE data_container.id = job_1.container_id "
- "AND job.id > job_1.id AND job_1.type = :type_1)"
+ "AND job_1.id < job.id AND job_1.type = :type_1)"
)
def test_join_wpoly(self):
diff --git a/test/orm/test_options.py b/test/orm/test_options.py
new file mode 100644
index 000000000..6eba38d15
--- /dev/null
+++ b/test/orm/test_options.py
@@ -0,0 +1,760 @@
+from sqlalchemy import inspect
+from sqlalchemy.orm import attributes, mapper, relationship, backref, \
+ configure_mappers, create_session, synonym, Session, class_mapper, \
+ aliased, column_property, joinedload_all, joinedload, Query,\
+ util as orm_util, Load
+import sqlalchemy as sa
+from sqlalchemy import testing
+from sqlalchemy.testing.assertions import eq_, assert_raises, assert_raises_message
+from test.orm import _fixtures
+
+class QueryTest(_fixtures.FixtureTest):
+ run_setup_mappers = 'once'
+ run_inserts = 'once'
+ run_deletes = None
+
+ @classmethod
+ def setup_mappers(cls):
+ cls._setup_stock_mapping()
+
+class PathTest(object):
+ def _make_path(self, path):
+ r = []
+ for i, item in enumerate(path):
+ if i % 2 == 0:
+ if isinstance(item, type):
+ item = class_mapper(item)
+ else:
+ if isinstance(item, str):
+ item = inspect(r[-1]).mapper.attrs[item]
+ r.append(item)
+ return tuple(r)
+
+ def _make_path_registry(self, path):
+ return orm_util.PathRegistry.coerce(self._make_path(path))
+
+ def _assert_path_result(self, opt, q, paths):
+ q._attributes = q._attributes.copy()
+ attr = {}
+
+ for val in opt._to_bind:
+ val._bind_loader(q, attr, False)
+
+ assert_paths = [k[1] for k in attr]
+ eq_(
+ set([p for p in assert_paths]),
+ set([self._make_path(p) for p in paths])
+ )
+
+class LoadTest(PathTest, QueryTest):
+
+ def test_gen_path_attr_entity(self):
+ User = self.classes.User
+ Address = self.classes.Address
+
+ l = Load(User)
+ eq_(
+ l._generate_path(inspect(User)._path_registry, User.addresses, "relationship"),
+ self._make_path_registry([User, "addresses", Address])
+ )
+
+ def test_gen_path_attr_column(self):
+ User = self.classes.User
+
+ l = Load(User)
+ eq_(
+ l._generate_path(inspect(User)._path_registry, User.name, "column"),
+ self._make_path_registry([User, "name"])
+ )
+
+ def test_gen_path_string_entity(self):
+ User = self.classes.User
+ Address = self.classes.Address
+
+ l = Load(User)
+ eq_(
+ l._generate_path(inspect(User)._path_registry, "addresses", "relationship"),
+ self._make_path_registry([User, "addresses", Address])
+ )
+
+ def test_gen_path_string_column(self):
+ User = self.classes.User
+
+ l = Load(User)
+ eq_(
+ l._generate_path(inspect(User)._path_registry, "name", "column"),
+ self._make_path_registry([User, "name"])
+ )
+
+ def test_gen_path_invalid_from_col(self):
+ User = self.classes.User
+
+ l = Load(User)
+ l.path = self._make_path_registry([User, "name"])
+ assert_raises_message(
+ sa.exc.ArgumentError,
+ "Attribute 'name' of entity 'Mapper|User|users' does "
+ "not refer to a mapped entity",
+ l._generate_path, l.path, User.addresses, "relationship"
+
+ )
+ def test_gen_path_attr_entity_invalid_raiseerr(self):
+ User = self.classes.User
+ Order = self.classes.Order
+
+ l = Load(User)
+
+ assert_raises_message(
+ sa.exc.ArgumentError,
+ "Attribute 'Order.items' does not link from element 'Mapper|User|users'",
+ l._generate_path,
+ inspect(User)._path_registry, Order.items, "relationship",
+ )
+
+ def test_gen_path_attr_entity_invalid_noraiseerr(self):
+ User = self.classes.User
+ Order = self.classes.Order
+
+ l = Load(User)
+
+ eq_(
+ l._generate_path(
+ inspect(User)._path_registry, Order.items, "relationship", False
+ ),
+ None
+ )
+
+ def test_set_strat_ent(self):
+ User = self.classes.User
+
+ l1 = Load(User)
+ l2 = l1.joinedload("addresses")
+ eq_(
+ l1.context,
+ {
+ ('loader', self._make_path([User, "addresses"])): l2
+ }
+ )
+
+ def test_set_strat_col(self):
+ User = self.classes.User
+
+ l1 = Load(User)
+ l2 = l1.defer("name")
+ l3 = list(l2.context.values())[0]
+ eq_(
+ l1.context,
+ {
+ ('loader', self._make_path([User, "name"])): l3
+ }
+ )
+
+
+class OptionsTest(PathTest, QueryTest):
+
+ def _option_fixture(self, *arg):
+ from sqlalchemy.orm import strategy_options
+
+ return strategy_options._UnboundLoad._from_keys(
+ strategy_options._UnboundLoad.joinedload, arg, True, {})
+
+
+
+ def test_get_path_one_level_string(self):
+ User = self.classes.User
+
+ sess = Session()
+ q = sess.query(User)
+
+ opt = self._option_fixture("addresses")
+ self._assert_path_result(opt, q, [(User, 'addresses')])
+
+ def test_get_path_one_level_attribute(self):
+ User = self.classes.User
+
+ sess = Session()
+ q = sess.query(User)
+
+ opt = self._option_fixture(User.addresses)
+ self._assert_path_result(opt, q, [(User, 'addresses')])
+
+ def test_path_on_entity_but_doesnt_match_currentpath(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ # ensure "current path" is fully consumed before
+ # matching against current entities.
+ # see [ticket:2098]
+ sess = Session()
+ q = sess.query(User)
+ opt = self._option_fixture('email_address', 'id')
+ q = sess.query(Address)._with_current_path(
+ orm_util.PathRegistry.coerce([inspect(User),
+ inspect(User).attrs.addresses])
+ )
+ self._assert_path_result(opt, q, [])
+
+ def test_get_path_one_level_with_unrelated(self):
+ Order = self.classes.Order
+
+ sess = Session()
+ q = sess.query(Order)
+ opt = self._option_fixture("addresses")
+ self._assert_path_result(opt, q, [])
+
+ def test_path_multilevel_string(self):
+ Item, User, Order = (self.classes.Item,
+ self.classes.User,
+ self.classes.Order)
+
+ sess = Session()
+ q = sess.query(User)
+
+ opt = self._option_fixture("orders.items.keywords")
+ self._assert_path_result(opt, q, [
+ (User, 'orders'),
+ (User, 'orders', Order, 'items'),
+ (User, 'orders', Order, 'items', Item, 'keywords')
+ ])
+
+ def test_path_multilevel_attribute(self):
+ Item, User, Order = (self.classes.Item,
+ self.classes.User,
+ self.classes.Order)
+
+ sess = Session()
+ q = sess.query(User)
+
+ opt = self._option_fixture(User.orders, Order.items, Item.keywords)
+ self._assert_path_result(opt, q, [
+ (User, 'orders'),
+ (User, 'orders', Order, 'items'),
+ (User, 'orders', Order, 'items', Item, 'keywords')
+ ])
+
+ def test_with_current_matching_string(self):
+ Item, User, Order = (self.classes.Item,
+ self.classes.User,
+ self.classes.Order)
+
+ sess = Session()
+ q = sess.query(Item)._with_current_path(
+ self._make_path_registry([User, 'orders', Order, 'items'])
+ )
+
+ opt = self._option_fixture("orders.items.keywords")
+ self._assert_path_result(opt, q, [
+ (Item, 'keywords')
+ ])
+
+ def test_with_current_matching_attribute(self):
+ Item, User, Order = (self.classes.Item,
+ self.classes.User,
+ self.classes.Order)
+
+ sess = Session()
+ q = sess.query(Item)._with_current_path(
+ self._make_path_registry([User, 'orders', Order, 'items'])
+ )
+
+ opt = self._option_fixture(User.orders, Order.items, Item.keywords)
+ self._assert_path_result(opt, q, [
+ (Item, 'keywords')
+ ])
+
+ def test_with_current_nonmatching_string(self):
+ Item, User, Order = (self.classes.Item,
+ self.classes.User,
+ self.classes.Order)
+
+ sess = Session()
+ q = sess.query(Item)._with_current_path(
+ self._make_path_registry([User, 'orders', Order, 'items'])
+ )
+
+ opt = self._option_fixture("keywords")
+ self._assert_path_result(opt, q, [])
+
+ opt = self._option_fixture("items.keywords")
+ self._assert_path_result(opt, q, [])
+
+ def test_with_current_nonmatching_attribute(self):
+ Item, User, Order = (self.classes.Item,
+ self.classes.User,
+ self.classes.Order)
+
+ sess = Session()
+ q = sess.query(Item)._with_current_path(
+ self._make_path_registry([User, 'orders', Order, 'items'])
+ )
+
+ opt = self._option_fixture(Item.keywords)
+ self._assert_path_result(opt, q, [])
+
+ opt = self._option_fixture(Order.items, Item.keywords)
+ self._assert_path_result(opt, q, [])
+
+ def test_from_base_to_subclass_attr(self):
+ Dingaling, Address = self.classes.Dingaling, self.classes.Address
+
+ sess = Session()
+ class SubAddr(Address):
+ pass
+ mapper(SubAddr, inherits=Address, properties={
+ 'flub': relationship(Dingaling)
+ })
+
+ q = sess.query(Address)
+ opt = self._option_fixture(SubAddr.flub)
+
+ self._assert_path_result(opt, q, [(SubAddr, 'flub')])
+
+ def test_from_subclass_to_subclass_attr(self):
+ Dingaling, Address = self.classes.Dingaling, self.classes.Address
+
+ sess = Session()
+ class SubAddr(Address):
+ pass
+ mapper(SubAddr, inherits=Address, properties={
+ 'flub': relationship(Dingaling)
+ })
+
+ q = sess.query(SubAddr)
+ opt = self._option_fixture(SubAddr.flub)
+
+ self._assert_path_result(opt, q, [(SubAddr, 'flub')])
+
+ def test_from_base_to_base_attr_via_subclass(self):
+ Dingaling, Address = self.classes.Dingaling, self.classes.Address
+
+ sess = Session()
+ class SubAddr(Address):
+ pass
+ mapper(SubAddr, inherits=Address, properties={
+ 'flub': relationship(Dingaling)
+ })
+
+ q = sess.query(Address)
+ opt = self._option_fixture(SubAddr.user)
+
+ self._assert_path_result(opt, q,
+ [(Address, inspect(Address).attrs.user)])
+
+ def test_of_type(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = Session()
+ class SubAddr(Address):
+ pass
+ mapper(SubAddr, inherits=Address)
+
+ q = sess.query(User)
+ opt = self._option_fixture(User.addresses.of_type(SubAddr), SubAddr.user)
+
+ u_mapper = inspect(User)
+ a_mapper = inspect(Address)
+ self._assert_path_result(opt, q, [
+ (u_mapper, u_mapper.attrs.addresses),
+ (u_mapper, u_mapper.attrs.addresses, a_mapper, a_mapper.attrs.user)
+ ])
+
+ def test_of_type_plus_level(self):
+ Dingaling, User, Address = (self.classes.Dingaling,
+ self.classes.User,
+ self.classes.Address)
+
+ sess = Session()
+ class SubAddr(Address):
+ pass
+ mapper(SubAddr, inherits=Address, properties={
+ 'flub': relationship(Dingaling)
+ })
+
+ q = sess.query(User)
+ opt = self._option_fixture(User.addresses.of_type(SubAddr), SubAddr.flub)
+
+ u_mapper = inspect(User)
+ sa_mapper = inspect(SubAddr)
+ self._assert_path_result(opt, q, [
+ (u_mapper, u_mapper.attrs.addresses),
+ (u_mapper, u_mapper.attrs.addresses, sa_mapper, sa_mapper.attrs.flub)
+ ])
+
+ def test_aliased_single(self):
+ User = self.classes.User
+
+ sess = Session()
+ ualias = aliased(User)
+ q = sess.query(ualias)
+ opt = self._option_fixture(ualias.addresses)
+ self._assert_path_result(opt, q, [(inspect(ualias), 'addresses')])
+
+ def test_with_current_aliased_single(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = Session()
+ ualias = aliased(User)
+ q = sess.query(ualias)._with_current_path(
+ self._make_path_registry([Address, 'user'])
+ )
+ opt = self._option_fixture(Address.user, ualias.addresses)
+ self._assert_path_result(opt, q, [(inspect(ualias), 'addresses')])
+
+ def test_with_current_aliased_single_nonmatching_option(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = Session()
+ ualias = aliased(User)
+ q = sess.query(User)._with_current_path(
+ self._make_path_registry([Address, 'user'])
+ )
+ opt = self._option_fixture(Address.user, ualias.addresses)
+ self._assert_path_result(opt, q, [])
+
+ def test_with_current_aliased_single_nonmatching_entity(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = Session()
+ ualias = aliased(User)
+ q = sess.query(ualias)._with_current_path(
+ self._make_path_registry([Address, 'user'])
+ )
+ opt = self._option_fixture(Address.user, User.addresses)
+ self._assert_path_result(opt, q, [])
+
+ def test_multi_entity_opt_on_second(self):
+ Item = self.classes.Item
+ Order = self.classes.Order
+ opt = self._option_fixture(Order.items)
+ sess = Session()
+ q = sess.query(Item, Order)
+ self._assert_path_result(opt, q, [(Order, "items")])
+
+ def test_multi_entity_opt_on_string(self):
+ Item = self.classes.Item
+ Order = self.classes.Order
+ opt = self._option_fixture("items")
+ sess = Session()
+ q = sess.query(Item, Order)
+ self._assert_path_result(opt, q, [])
+
+ def test_multi_entity_no_mapped_entities(self):
+ Item = self.classes.Item
+ Order = self.classes.Order
+ opt = self._option_fixture("items")
+ sess = Session()
+ q = sess.query(Item.id, Order.id)
+ self._assert_path_result(opt, q, [])
+
+ def test_path_exhausted(self):
+ User = self.classes.User
+ Item = self.classes.Item
+ Order = self.classes.Order
+ opt = self._option_fixture(User.orders)
+ sess = Session()
+ q = sess.query(Item)._with_current_path(
+ self._make_path_registry([User, 'orders', Order, 'items'])
+ )
+ self._assert_path_result(opt, q, [])
+
+ def test_chained(self):
+ User = self.classes.User
+ Order = self.classes.Order
+ Item = self.classes.Item
+ sess = Session()
+ q = sess.query(User)
+ opt = self._option_fixture(User.orders).joinedload("items")
+ self._assert_path_result(opt, q, [
+ (User, 'orders'),
+ (User, 'orders', Order, "items")
+ ])
+
+ def test_chained_plus_dotted(self):
+ User = self.classes.User
+ Order = self.classes.Order
+ Item = self.classes.Item
+ sess = Session()
+ q = sess.query(User)
+ opt = self._option_fixture("orders.items").joinedload("keywords")
+ self._assert_path_result(opt, q, [
+ (User, 'orders'),
+ (User, 'orders', Order, "items"),
+ (User, 'orders', Order, "items", Item, "keywords")
+ ])
+
+ def test_chained_plus_multi(self):
+ User = self.classes.User
+ Order = self.classes.Order
+ Item = self.classes.Item
+ sess = Session()
+ q = sess.query(User)
+ opt = self._option_fixture(User.orders, Order.items).joinedload("keywords")
+ self._assert_path_result(opt, q, [
+ (User, 'orders'),
+ (User, 'orders', Order, "items"),
+ (User, 'orders', Order, "items", Item, "keywords")
+ ])
+
+
+class OptionsNoPropTest(_fixtures.FixtureTest):
+ """test the error messages emitted when using property
+ options in conjunection with column-only entities, or
+ for not existing options
+
+ """
+
+ run_create_tables = False
+ run_inserts = None
+ run_deletes = None
+
+ def test_option_with_mapper_basestring(self):
+ Item = self.classes.Item
+
+ self._assert_option([Item], 'keywords')
+
+ def test_option_with_mapper_PropCompatator(self):
+ Item = self.classes.Item
+
+ self._assert_option([Item], Item.keywords)
+
+ def test_option_with_mapper_then_column_basestring(self):
+ Item = self.classes.Item
+
+ self._assert_option([Item, Item.id], 'keywords')
+
+ def test_option_with_mapper_then_column_PropComparator(self):
+ Item = self.classes.Item
+
+ self._assert_option([Item, Item.id], Item.keywords)
+
+ def test_option_with_column_then_mapper_basestring(self):
+ Item = self.classes.Item
+
+ self._assert_option([Item.id, Item], 'keywords')
+
+ def test_option_with_column_then_mapper_PropComparator(self):
+ Item = self.classes.Item
+
+ self._assert_option([Item.id, Item], Item.keywords)
+
+ def test_option_with_column_basestring(self):
+ Item = self.classes.Item
+
+ message = \
+ "Query has only expression-based entities - "\
+ "can't find property named 'keywords'."
+ self._assert_eager_with_just_column_exception(Item.id,
+ 'keywords', message)
+
+ def test_option_with_column_PropComparator(self):
+ Item = self.classes.Item
+
+ self._assert_eager_with_just_column_exception(Item.id,
+ Item.keywords,
+ "Query has only expression-based entities "
+ "- can't find property named 'keywords'."
+ )
+
+ def test_option_against_nonexistent_PropComparator(self):
+ Item = self.classes.Item
+ Keyword = self.classes.Keyword
+ self._assert_eager_with_entity_exception(
+ [Keyword],
+ (joinedload(Item.keywords), ),
+ r"Can't find property 'keywords' on any entity specified "
+ r"in this Query. Note the full path from root "
+ r"\(Mapper\|Keyword\|keywords\) to target entity must be specified."
+ )
+
+ def test_option_against_nonexistent_basestring(self):
+ Item = self.classes.Item
+ self._assert_eager_with_entity_exception(
+ [Item],
+ (joinedload("foo"), ),
+ r"Can't find property named 'foo' on the mapped "
+ r"entity Mapper\|Item\|items in this Query."
+ )
+
+ def test_option_against_nonexistent_twolevel_basestring(self):
+ Item = self.classes.Item
+ self._assert_eager_with_entity_exception(
+ [Item],
+ (joinedload("keywords.foo"), ),
+ r"Can't find property named 'foo' on the mapped entity "
+ r"Mapper\|Keyword\|keywords in this Query."
+ )
+
+ def test_option_against_nonexistent_twolevel_all(self):
+ Item = self.classes.Item
+ self._assert_eager_with_entity_exception(
+ [Item],
+ (joinedload_all("keywords.foo"), ),
+ r"Can't find property named 'foo' on the mapped entity "
+ r"Mapper\|Keyword\|keywords in this Query."
+ )
+
+ @testing.fails_if(lambda: True,
+ "PropertyOption doesn't yet check for relation/column on end result")
+ def test_option_against_non_relation_basestring(self):
+ Item = self.classes.Item
+ Keyword = self.classes.Keyword
+ self._assert_eager_with_entity_exception(
+ [Keyword, Item],
+ (joinedload_all("keywords"), ),
+ r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' "
+ "does not refer to a mapped entity"
+ )
+
+ @testing.fails_if(lambda: True,
+ "PropertyOption doesn't yet check for relation/column on end result")
+ def test_option_against_multi_non_relation_basestring(self):
+ Item = self.classes.Item
+ Keyword = self.classes.Keyword
+ self._assert_eager_with_entity_exception(
+ [Keyword, Item],
+ (joinedload_all("keywords"), ),
+ r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' "
+ "does not refer to a mapped entity"
+ )
+
+ def test_option_against_wrong_entity_type_basestring(self):
+ Item = self.classes.Item
+ self._assert_eager_with_entity_exception(
+ [Item],
+ (joinedload_all("id", "keywords"), ),
+ r"Attribute 'id' of entity 'Mapper\|Item\|items' does not "
+ r"refer to a mapped entity"
+ )
+
+ def test_option_against_multi_non_relation_twolevel_basestring(self):
+ Item = self.classes.Item
+ Keyword = self.classes.Keyword
+ self._assert_eager_with_entity_exception(
+ [Keyword, Item],
+ (joinedload_all("id", "keywords"), ),
+ r"Attribute 'id' of entity 'Mapper\|Keyword\|keywords' "
+ "does not refer to a mapped entity"
+ )
+
+ def test_option_against_multi_nonexistent_basestring(self):
+ Item = self.classes.Item
+ Keyword = self.classes.Keyword
+ self._assert_eager_with_entity_exception(
+ [Keyword, Item],
+ (joinedload_all("description"), ),
+ r"Can't find property named 'description' on the mapped "
+ r"entity Mapper\|Keyword\|keywords in this Query."
+ )
+
+ def test_option_against_multi_no_entities_basestring(self):
+ Item = self.classes.Item
+ Keyword = self.classes.Keyword
+ self._assert_eager_with_entity_exception(
+ [Keyword.id, Item.id],
+ (joinedload_all("keywords"), ),
+ r"Query has only expression-based entities - can't find property "
+ "named 'keywords'."
+ )
+
+ def test_option_against_wrong_multi_entity_type_attr_one(self):
+ Item = self.classes.Item
+ Keyword = self.classes.Keyword
+ self._assert_eager_with_entity_exception(
+ [Keyword, Item],
+ (joinedload_all(Keyword.id, Item.keywords), ),
+ r"Attribute 'id' of entity 'Mapper\|Keyword\|keywords' "
+ "does not refer to a mapped entity"
+ )
+
+ def test_option_against_wrong_multi_entity_type_attr_two(self):
+ Item = self.classes.Item
+ Keyword = self.classes.Keyword
+ self._assert_eager_with_entity_exception(
+ [Keyword, Item],
+ (joinedload_all(Keyword.keywords, Item.keywords), ),
+ r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' "
+ "does not refer to a mapped entity"
+ )
+
+ def test_option_against_wrong_multi_entity_type_attr_three(self):
+ Item = self.classes.Item
+ Keyword = self.classes.Keyword
+ self._assert_eager_with_entity_exception(
+ [Keyword.id, Item.id],
+ (joinedload_all(Keyword.keywords, Item.keywords), ),
+ r"Query has only expression-based entities - "
+ "can't find property named 'keywords'."
+ )
+
+ def test_wrong_type_in_option(self):
+ Item = self.classes.Item
+ Keyword = self.classes.Keyword
+ self._assert_eager_with_entity_exception(
+ [Item],
+ (joinedload_all(Keyword), ),
+ r"mapper option expects string key or list of attributes"
+ )
+
+ def test_non_contiguous_all_option(self):
+ User = self.classes.User
+ self._assert_eager_with_entity_exception(
+ [User],
+ (joinedload_all(User.addresses, User.orders), ),
+ r"Attribute 'User.orders' does not link "
+ "from element 'Mapper|Address|addresses'"
+ )
+
+ def test_non_contiguous_all_option_of_type(self):
+ User = self.classes.User
+ Order = self.classes.Order
+ self._assert_eager_with_entity_exception(
+ [User],
+ (joinedload_all(User.addresses, User.orders.of_type(Order)), ),
+ r"Attribute 'User.orders' does not link "
+ "from element 'Mapper|Address|addresses'"
+ )
+
+ @classmethod
+ def setup_mappers(cls):
+ users, User, addresses, Address, orders, Order = (
+ cls.tables.users, cls.classes.User,
+ cls.tables.addresses, cls.classes.Address,
+ cls.tables.orders, cls.classes.Order)
+ mapper(User, users, properties={
+ 'addresses': relationship(Address),
+ 'orders': relationship(Order)
+ })
+ mapper(Address, addresses)
+ mapper(Order, orders)
+ keywords, items, item_keywords, Keyword, Item = (cls.tables.keywords,
+ cls.tables.items,
+ cls.tables.item_keywords,
+ cls.classes.Keyword,
+ cls.classes.Item)
+ mapper(Keyword, keywords, properties={
+ "keywords": column_property(keywords.c.name + "some keyword")
+ })
+ mapper(Item, items,
+ properties=dict(keywords=relationship(Keyword,
+ secondary=item_keywords)))
+
+ def _assert_option(self, entity_list, option):
+ Item = self.classes.Item
+
+ q = create_session().query(*entity_list).\
+ options(joinedload(option))
+ key = ('loader', (inspect(Item), inspect(Item).attrs.keywords))
+ assert key in q._attributes
+
+ def _assert_eager_with_entity_exception(self, entity_list, options,
+ message):
+ assert_raises_message(sa.exc.ArgumentError,
+ message,
+ create_session().query(*entity_list).options,
+ *options)
+
+ def _assert_eager_with_just_column_exception(self, column,
+ eager_option, message):
+ assert_raises_message(sa.exc.ArgumentError, message,
+ create_session().query(column).options,
+ joinedload(eager_option))
+
diff --git a/test/orm/test_pickled.py b/test/orm/test_pickled.py
index b54af93f2..35f1b19d1 100644
--- a/test/orm/test_pickled.py
+++ b/test/orm/test_pickled.py
@@ -20,6 +20,7 @@ from sqlalchemy.testing.pickleable import User, Address, Dingaling, Order, \
class PickleTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
@@ -170,6 +171,7 @@ class PickleTest(fixtures.MappedTest):
sess.add(u2)
assert u2.addresses
+ @testing.requires.non_broken_pickle
def test_instance_deferred_cols(self):
users, addresses = (self.tables.users,
self.tables.addresses)
@@ -240,6 +242,7 @@ class PickleTest(fixtures.MappedTest):
eq_(u1, u2)
+ @testing.requires.non_broken_pickle
def test_options_with_descriptors(self):
users, addresses, dingalings = (self.tables.users,
self.tables.addresses,
@@ -267,7 +270,7 @@ class PickleTest(fixtures.MappedTest):
sa.orm.joinedload("addresses", Address.dingaling),
]:
opt2 = pickle.loads(pickle.dumps(opt))
- eq_(opt.key, opt2.key)
+ eq_(opt.path, opt2.path)
u1 = sess.query(User).options(opt).first()
u2 = pickle.loads(pickle.dumps(u1))
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 0973dc357..fea2337ca 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -19,7 +19,7 @@ from sqlalchemy.testing.assertions import eq_, assert_raises, assert_raises_mess
from sqlalchemy.testing import AssertsCompiledSQL
from test.orm import _fixtures
from sqlalchemy.testing import fixtures, engines
-
+from sqlalchemy.orm import Bundle
from sqlalchemy.orm.util import join, outerjoin, with_parent
class QueryTest(_fixtures.FixtureTest):
@@ -74,6 +74,7 @@ class RowTupleTest(QueryTest):
address_alias = aliased(Address, name='aalias')
fn = func.count(User.id)
name_label = User.name.label('uname')
+ bundle = Bundle('b1', User.id, User.name)
for q, asserted in [
(
sess.query(User),
@@ -112,6 +113,15 @@ class RowTupleTest(QueryTest):
'expr':fn
},
]
+ ),
+ (
+ sess.query(bundle),
+ [
+ {'aliased': False,
+ 'expr': bundle,
+ 'type': Bundle,
+ 'name': 'b1'}
+ ]
)
]:
eq_(
@@ -119,6 +129,7 @@ class RowTupleTest(QueryTest):
asserted
)
+
def test_unhashable_type(self):
from sqlalchemy.types import TypeDecorator, Integer
from sqlalchemy.sql import type_coerce
@@ -216,10 +227,13 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL):
where(uu.id == Address.user_id).\
correlate(uu).as_scalar()
]),
- # curious, "address.user_id = uu.id" is reversed here
+ # for a long time, "uu.id = address.user_id" was reversed;
+ # this was resolved as of #2872 and had to do with
+ # InstrumentedAttribute.__eq__() taking precedence over
+ # QueryableAttribute.__eq__()
"SELECT uu.name, addresses.id, "
"(SELECT count(addresses.id) AS count_1 "
- "FROM addresses WHERE addresses.user_id = uu.id) AS anon_1 "
+ "FROM addresses WHERE uu.id = addresses.user_id) AS anon_1 "
"FROM users AS uu, addresses"
)
@@ -688,6 +702,7 @@ class InvalidGenerationsTest(QueryTest, AssertsCompiledSQL):
meth, q, *arg, **kw
)
+
class OperatorTest(QueryTest, AssertsCompiledSQL):
"""test sql.Comparator implementation for MapperProperties"""
@@ -1325,7 +1340,6 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
assert [User(id=10)] == sess.query(User).outerjoin("addresses", aliased=True).filter(~User.addresses.any()).all()
- @testing.crashes('maxdb', 'can dump core')
def test_has(self):
Dingaling, User, Address = (self.classes.Dingaling,
self.classes.User,
@@ -1726,16 +1740,37 @@ class AggregateTest(QueryTest):
class ExistsTest(QueryTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
def test_exists(self):
User = self.classes.User
sess = create_session()
- q1 = sess.query(User).filter(User.name == 'fred')
+
+ q1 = sess.query(User)
self.assert_compile(sess.query(q1.exists()),
'SELECT EXISTS ('
+ 'SELECT 1 FROM users'
+ ') AS anon_1'
+ )
+
+ q2 = sess.query(User).filter(User.name == 'fred')
+ self.assert_compile(sess.query(q2.exists()),
+ 'SELECT EXISTS ('
'SELECT 1 FROM users WHERE users.name = :name_1'
- ') AS anon_1',
- dialect=default.DefaultDialect()
+ ') AS anon_1'
+ )
+
+ def test_exists_col_warning(self):
+ User = self.classes.User
+ Address = self.classes.Address
+ sess = create_session()
+
+ q1 = sess.query(User, Address).filter(User.id == Address.user_id)
+ self.assert_compile(sess.query(q1.exists()),
+ 'SELECT EXISTS ('
+ 'SELECT 1 FROM users, addresses '
+ 'WHERE users.id = addresses.user_id'
+ ') AS anon_1'
)
@@ -1955,7 +1990,7 @@ class HintsTest(QueryTest, AssertsCompiledSQL):
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users INNER JOIN users AS users_1 USE INDEX (col1_index,col2_index) "
- "ON users.id < users_1.id",
+ "ON users_1.id > users.id",
dialect=dialect
)
@@ -2443,584 +2478,3 @@ class ExecutionOptionsTest(QueryTest):
q1.all()
-class OptionsTest(QueryTest):
- """Test the _process_paths() method of PropertyOption."""
-
- def _option_fixture(self, *arg):
- from sqlalchemy.orm import interfaces
- class Opt(interfaces.PropertyOption):
- pass
- return Opt(arg)
-
- def _make_path(self, path):
- r = []
- for i, item in enumerate(path):
- if i % 2 == 0:
- if isinstance(item, type):
- item = class_mapper(item)
- else:
- if isinstance(item, str):
- item = inspect(r[-1]).mapper.attrs[item]
- r.append(item)
- return tuple(r)
-
- def _make_path_registry(self, path):
- return orm_util.PathRegistry.coerce(self._make_path(path))
-
- def _assert_path_result(self, opt, q, paths):
- q._attributes = q._attributes.copy()
- assert_paths = opt._process_paths(q, False)
- eq_(
- [p.path for p in assert_paths],
- [self._make_path(p) for p in paths]
- )
-
- def test_get_path_one_level_string(self):
- User = self.classes.User
-
- sess = Session()
- q = sess.query(User)
-
- opt = self._option_fixture("addresses")
- self._assert_path_result(opt, q, [(User, 'addresses')])
-
- def test_get_path_one_level_attribute(self):
- User = self.classes.User
-
- sess = Session()
- q = sess.query(User)
-
- opt = self._option_fixture(User.addresses)
- self._assert_path_result(opt, q, [(User, 'addresses')])
-
- def test_path_on_entity_but_doesnt_match_currentpath(self):
- User, Address = self.classes.User, self.classes.Address
-
- # ensure "current path" is fully consumed before
- # matching against current entities.
- # see [ticket:2098]
- sess = Session()
- q = sess.query(User)
- opt = self._option_fixture('email_address', 'id')
- q = sess.query(Address)._with_current_path(
- orm_util.PathRegistry.coerce([inspect(User),
- inspect(User).attrs.addresses])
- )
- self._assert_path_result(opt, q, [])
-
- def test_get_path_one_level_with_unrelated(self):
- Order = self.classes.Order
-
- sess = Session()
- q = sess.query(Order)
- opt = self._option_fixture("addresses")
- self._assert_path_result(opt, q, [])
-
- def test_path_multilevel_string(self):
- Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
-
- sess = Session()
- q = sess.query(User)
-
- opt = self._option_fixture("orders.items.keywords")
- self._assert_path_result(opt, q, [
- (User, 'orders'),
- (User, 'orders', Order, 'items'),
- (User, 'orders', Order, 'items', Item, 'keywords')
- ])
-
- def test_path_multilevel_attribute(self):
- Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
-
- sess = Session()
- q = sess.query(User)
-
- opt = self._option_fixture(User.orders, Order.items, Item.keywords)
- self._assert_path_result(opt, q, [
- (User, 'orders'),
- (User, 'orders', Order, 'items'),
- (User, 'orders', Order, 'items', Item, 'keywords')
- ])
-
- def test_with_current_matching_string(self):
- Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
-
- sess = Session()
- q = sess.query(Item)._with_current_path(
- self._make_path_registry([User, 'orders', Order, 'items'])
- )
-
- opt = self._option_fixture("orders.items.keywords")
- self._assert_path_result(opt, q, [
- (Item, 'keywords')
- ])
-
- def test_with_current_matching_attribute(self):
- Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
-
- sess = Session()
- q = sess.query(Item)._with_current_path(
- self._make_path_registry([User, 'orders', Order, 'items'])
- )
-
- opt = self._option_fixture(User.orders, Order.items, Item.keywords)
- self._assert_path_result(opt, q, [
- (Item, 'keywords')
- ])
-
- def test_with_current_nonmatching_string(self):
- Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
-
- sess = Session()
- q = sess.query(Item)._with_current_path(
- self._make_path_registry([User, 'orders', Order, 'items'])
- )
-
- opt = self._option_fixture("keywords")
- self._assert_path_result(opt, q, [])
-
- opt = self._option_fixture("items.keywords")
- self._assert_path_result(opt, q, [])
-
- def test_with_current_nonmatching_attribute(self):
- Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
-
- sess = Session()
- q = sess.query(Item)._with_current_path(
- self._make_path_registry([User, 'orders', Order, 'items'])
- )
-
- opt = self._option_fixture(Item.keywords)
- self._assert_path_result(opt, q, [])
-
- opt = self._option_fixture(Order.items, Item.keywords)
- self._assert_path_result(opt, q, [])
-
- def test_from_base_to_subclass_attr(self):
- Dingaling, Address = self.classes.Dingaling, self.classes.Address
-
- sess = Session()
- class SubAddr(Address):
- pass
- mapper(SubAddr, inherits=Address, properties={
- 'flub': relationship(Dingaling)
- })
-
- q = sess.query(Address)
- opt = self._option_fixture(SubAddr.flub)
-
- self._assert_path_result(opt, q, [(SubAddr, 'flub')])
-
- def test_from_subclass_to_subclass_attr(self):
- Dingaling, Address = self.classes.Dingaling, self.classes.Address
-
- sess = Session()
- class SubAddr(Address):
- pass
- mapper(SubAddr, inherits=Address, properties={
- 'flub': relationship(Dingaling)
- })
-
- q = sess.query(SubAddr)
- opt = self._option_fixture(SubAddr.flub)
-
- self._assert_path_result(opt, q, [(SubAddr, 'flub')])
-
- def test_from_base_to_base_attr_via_subclass(self):
- Dingaling, Address = self.classes.Dingaling, self.classes.Address
-
- sess = Session()
- class SubAddr(Address):
- pass
- mapper(SubAddr, inherits=Address, properties={
- 'flub': relationship(Dingaling)
- })
-
- q = sess.query(Address)
- opt = self._option_fixture(SubAddr.user)
-
- self._assert_path_result(opt, q,
- [(Address, inspect(Address).attrs.user)])
-
- def test_of_type(self):
- User, Address = self.classes.User, self.classes.Address
-
- sess = Session()
- class SubAddr(Address):
- pass
- mapper(SubAddr, inherits=Address)
-
- q = sess.query(User)
- opt = self._option_fixture(User.addresses.of_type(SubAddr), SubAddr.user)
-
- u_mapper = inspect(User)
- a_mapper = inspect(Address)
- self._assert_path_result(opt, q, [
- (u_mapper, u_mapper.attrs.addresses),
- (u_mapper, u_mapper.attrs.addresses, a_mapper, a_mapper.attrs.user)
- ])
-
- def test_of_type_plus_level(self):
- Dingaling, User, Address = (self.classes.Dingaling,
- self.classes.User,
- self.classes.Address)
-
- sess = Session()
- class SubAddr(Address):
- pass
- mapper(SubAddr, inherits=Address, properties={
- 'flub': relationship(Dingaling)
- })
-
- q = sess.query(User)
- opt = self._option_fixture(User.addresses.of_type(SubAddr), SubAddr.flub)
-
- u_mapper = inspect(User)
- sa_mapper = inspect(SubAddr)
- self._assert_path_result(opt, q, [
- (u_mapper, u_mapper.attrs.addresses),
- (u_mapper, u_mapper.attrs.addresses, sa_mapper, sa_mapper.attrs.flub)
- ])
-
- def test_aliased_single(self):
- User = self.classes.User
-
- sess = Session()
- ualias = aliased(User)
- q = sess.query(ualias)
- opt = self._option_fixture(ualias.addresses)
- self._assert_path_result(opt, q, [(inspect(ualias), 'addresses')])
-
- def test_with_current_aliased_single(self):
- User, Address = self.classes.User, self.classes.Address
-
- sess = Session()
- ualias = aliased(User)
- q = sess.query(ualias)._with_current_path(
- self._make_path_registry([Address, 'user'])
- )
- opt = self._option_fixture(Address.user, ualias.addresses)
- self._assert_path_result(opt, q, [(inspect(ualias), 'addresses')])
-
- def test_with_current_aliased_single_nonmatching_option(self):
- User, Address = self.classes.User, self.classes.Address
-
- sess = Session()
- ualias = aliased(User)
- q = sess.query(User)._with_current_path(
- self._make_path_registry([Address, 'user'])
- )
- opt = self._option_fixture(Address.user, ualias.addresses)
- self._assert_path_result(opt, q, [])
-
- def test_with_current_aliased_single_nonmatching_entity(self):
- User, Address = self.classes.User, self.classes.Address
-
- sess = Session()
- ualias = aliased(User)
- q = sess.query(ualias)._with_current_path(
- self._make_path_registry([Address, 'user'])
- )
- opt = self._option_fixture(Address.user, User.addresses)
- self._assert_path_result(opt, q, [])
-
- def test_multi_entity_opt_on_second(self):
- Item = self.classes.Item
- Order = self.classes.Order
- opt = self._option_fixture(Order.items)
- sess = Session()
- q = sess.query(Item, Order)
- self._assert_path_result(opt, q, [(Order, "items")])
-
- def test_multi_entity_opt_on_string(self):
- Item = self.classes.Item
- Order = self.classes.Order
- opt = self._option_fixture("items")
- sess = Session()
- q = sess.query(Item, Order)
- self._assert_path_result(opt, q, [])
-
- def test_multi_entity_no_mapped_entities(self):
- Item = self.classes.Item
- Order = self.classes.Order
- opt = self._option_fixture("items")
- sess = Session()
- q = sess.query(Item.id, Order.id)
- self._assert_path_result(opt, q, [])
-
- def test_path_exhausted(self):
- User = self.classes.User
- Item = self.classes.Item
- Order = self.classes.Order
- opt = self._option_fixture(User.orders)
- sess = Session()
- q = sess.query(Item)._with_current_path(
- self._make_path_registry([User, 'orders', Order, 'items'])
- )
- self._assert_path_result(opt, q, [])
-
-class OptionsNoPropTest(_fixtures.FixtureTest):
- """test the error messages emitted when using property
- options in conjunection with column-only entities, or
- for not existing options
-
- """
-
- run_create_tables = False
- run_inserts = None
- run_deletes = None
-
- def test_option_with_mapper_basestring(self):
- Item = self.classes.Item
-
- self._assert_option([Item], 'keywords')
-
- def test_option_with_mapper_PropCompatator(self):
- Item = self.classes.Item
-
- self._assert_option([Item], Item.keywords)
-
- def test_option_with_mapper_then_column_basestring(self):
- Item = self.classes.Item
-
- self._assert_option([Item, Item.id], 'keywords')
-
- def test_option_with_mapper_then_column_PropComparator(self):
- Item = self.classes.Item
-
- self._assert_option([Item, Item.id], Item.keywords)
-
- def test_option_with_column_then_mapper_basestring(self):
- Item = self.classes.Item
-
- self._assert_option([Item.id, Item], 'keywords')
-
- def test_option_with_column_then_mapper_PropComparator(self):
- Item = self.classes.Item
-
- self._assert_option([Item.id, Item], Item.keywords)
-
- def test_option_with_column_basestring(self):
- Item = self.classes.Item
-
- message = \
- "Query has only expression-based entities - "\
- "can't find property named 'keywords'."
- self._assert_eager_with_just_column_exception(Item.id,
- 'keywords', message)
-
- def test_option_with_column_PropComparator(self):
- Item = self.classes.Item
-
- self._assert_eager_with_just_column_exception(Item.id,
- Item.keywords,
- "Query has only expression-based entities "
- "- can't find property named 'keywords'."
- )
-
- def test_option_against_nonexistent_PropComparator(self):
- Item = self.classes.Item
- Keyword = self.classes.Keyword
- self._assert_eager_with_entity_exception(
- [Keyword],
- (joinedload(Item.keywords), ),
- r"Can't find property 'keywords' on any entity specified "
- r"in this Query. Note the full path from root "
- r"\(Mapper\|Keyword\|keywords\) to target entity must be specified."
- )
-
- def test_option_against_nonexistent_basestring(self):
- Item = self.classes.Item
- self._assert_eager_with_entity_exception(
- [Item],
- (joinedload("foo"), ),
- r"Can't find property named 'foo' on the mapped "
- r"entity Mapper\|Item\|items in this Query."
- )
-
- def test_option_against_nonexistent_twolevel_basestring(self):
- Item = self.classes.Item
- self._assert_eager_with_entity_exception(
- [Item],
- (joinedload("keywords.foo"), ),
- r"Can't find property named 'foo' on the mapped entity "
- r"Mapper\|Keyword\|keywords in this Query."
- )
-
- def test_option_against_nonexistent_twolevel_all(self):
- Item = self.classes.Item
- self._assert_eager_with_entity_exception(
- [Item],
- (joinedload_all("keywords.foo"), ),
- r"Can't find property named 'foo' on the mapped entity "
- r"Mapper\|Keyword\|keywords in this Query."
- )
-
- @testing.fails_if(lambda:True,
- "PropertyOption doesn't yet check for relation/column on end result")
- def test_option_against_non_relation_basestring(self):
- Item = self.classes.Item
- Keyword = self.classes.Keyword
- self._assert_eager_with_entity_exception(
- [Keyword, Item],
- (joinedload_all("keywords"), ),
- r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' "
- "does not refer to a mapped entity"
- )
-
- @testing.fails_if(lambda:True,
- "PropertyOption doesn't yet check for relation/column on end result")
- def test_option_against_multi_non_relation_basestring(self):
- Item = self.classes.Item
- Keyword = self.classes.Keyword
- self._assert_eager_with_entity_exception(
- [Keyword, Item],
- (joinedload_all("keywords"), ),
- r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' "
- "does not refer to a mapped entity"
- )
-
- def test_option_against_wrong_entity_type_basestring(self):
- Item = self.classes.Item
- self._assert_eager_with_entity_exception(
- [Item],
- (joinedload_all("id", "keywords"), ),
- r"Attribute 'id' of entity 'Mapper\|Item\|items' does not "
- r"refer to a mapped entity"
- )
-
- def test_option_against_multi_non_relation_twolevel_basestring(self):
- Item = self.classes.Item
- Keyword = self.classes.Keyword
- self._assert_eager_with_entity_exception(
- [Keyword, Item],
- (joinedload_all("id", "keywords"), ),
- r"Attribute 'id' of entity 'Mapper\|Keyword\|keywords' "
- "does not refer to a mapped entity"
- )
-
- def test_option_against_multi_nonexistent_basestring(self):
- Item = self.classes.Item
- Keyword = self.classes.Keyword
- self._assert_eager_with_entity_exception(
- [Keyword, Item],
- (joinedload_all("description"), ),
- r"Can't find property named 'description' on the mapped "
- r"entity Mapper\|Keyword\|keywords in this Query."
- )
-
- def test_option_against_multi_no_entities_basestring(self):
- Item = self.classes.Item
- Keyword = self.classes.Keyword
- self._assert_eager_with_entity_exception(
- [Keyword.id, Item.id],
- (joinedload_all("keywords"), ),
- r"Query has only expression-based entities - can't find property "
- "named 'keywords'."
- )
-
- def test_option_against_wrong_multi_entity_type_attr_one(self):
- Item = self.classes.Item
- Keyword = self.classes.Keyword
- self._assert_eager_with_entity_exception(
- [Keyword, Item],
- (joinedload_all(Keyword.id, Item.keywords), ),
- r"Attribute 'Keyword.id' of entity 'Mapper\|Keyword\|keywords' "
- "does not refer to a mapped entity"
- )
-
- def test_option_against_wrong_multi_entity_type_attr_two(self):
- Item = self.classes.Item
- Keyword = self.classes.Keyword
- self._assert_eager_with_entity_exception(
- [Keyword, Item],
- (joinedload_all(Keyword.keywords, Item.keywords), ),
- r"Attribute 'Keyword.keywords' of entity 'Mapper\|Keyword\|keywords' "
- "does not refer to a mapped entity"
- )
-
- def test_option_against_wrong_multi_entity_type_attr_three(self):
- Item = self.classes.Item
- Keyword = self.classes.Keyword
- self._assert_eager_with_entity_exception(
- [Keyword.id, Item.id],
- (joinedload_all(Keyword.keywords, Item.keywords), ),
- r"Query has only expression-based entities - "
- "can't find property named 'keywords'."
- )
-
- def test_wrong_type_in_option(self):
- Item = self.classes.Item
- Keyword = self.classes.Keyword
- self._assert_eager_with_entity_exception(
- [Item],
- (joinedload_all(Keyword), ),
- r"mapper option expects string key or list of attributes"
- )
-
- def test_non_contiguous_all_option(self):
- User = self.classes.User
- self._assert_eager_with_entity_exception(
- [User],
- (joinedload_all(User.addresses, User.orders), ),
- r"Attribute 'User.orders' does not link "
- "from element 'Mapper|Address|addresses'"
- )
-
- @classmethod
- def setup_mappers(cls):
- users, User, addresses, Address, orders, Order = (
- cls.tables.users, cls.classes.User,
- cls.tables.addresses, cls.classes.Address,
- cls.tables.orders, cls.classes.Order)
- mapper(User, users, properties={
- 'addresses': relationship(Address),
- 'orders': relationship(Order)
- })
- mapper(Address, addresses)
- mapper(Order, orders)
- keywords, items, item_keywords, Keyword, Item = (cls.tables.keywords,
- cls.tables.items,
- cls.tables.item_keywords,
- cls.classes.Keyword,
- cls.classes.Item)
- mapper(Keyword, keywords, properties={
- "keywords": column_property(keywords.c.name + "some keyword")
- })
- mapper(Item, items,
- properties=dict(keywords=relationship(Keyword,
- secondary=item_keywords)))
-
- def _assert_option(self, entity_list, option):
- Item = self.classes.Item
-
- q = create_session().query(*entity_list).\
- options(joinedload(option))
- key = ('loaderstrategy', (inspect(Item), inspect(Item).attrs.keywords))
- assert key in q._attributes
-
- def _assert_eager_with_entity_exception(self, entity_list, options,
- message):
- assert_raises_message(sa.exc.ArgumentError,
- message,
- create_session().query(*entity_list).options,
- *options)
-
- def _assert_eager_with_just_column_exception(self, column,
- eager_option, message):
- assert_raises_message(sa.exc.ArgumentError, message,
- create_session().query(column).options,
- joinedload(eager_option))
-
diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py
index 8dc9c3c52..717f136c0 100644
--- a/test/orm/test_relationships.py
+++ b/test/orm/test_relationships.py
@@ -14,6 +14,7 @@ from sqlalchemy.testing import eq_, startswith_, AssertsCompiledSQL, is_
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy import exc
+from sqlalchemy import inspect
class _RelationshipErrors(object):
def _assert_raises_no_relevant_fks(self, fn, expr, relname,
@@ -1516,6 +1517,117 @@ class TypedAssociationTable(fixtures.MappedTest):
assert t3.count().scalar() == 1
+class ViewOnlyHistoryTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table("t1", metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
+ Table("t2", metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t1id', Integer, ForeignKey('t1.id')))
+
+ def _assert_fk(self, a1, b1, is_set):
+ s = Session(testing.db)
+ s.add_all([a1, b1])
+ s.flush()
+
+ if is_set:
+ eq_(b1.t1id, a1.id)
+ else:
+ eq_(b1.t1id, None)
+
+ return s
+
+ def test_o2m_viewonly_oneside(self):
+ class A(fixtures.ComparableEntity):
+ pass
+ class B(fixtures.ComparableEntity):
+ pass
+
+ mapper(A, self.tables.t1, properties={
+ "bs": relationship(B, viewonly=True,
+ backref=backref("a", viewonly=False))
+ })
+ mapper(B, self.tables.t2)
+
+ a1 = A()
+ b1 = B()
+ a1.bs.append(b1)
+ assert b1.a is a1
+ assert not inspect(a1).attrs.bs.history.has_changes()
+ assert inspect(b1).attrs.a.history.has_changes()
+
+ sess = self._assert_fk(a1, b1, True)
+
+ a1.bs.remove(b1)
+ assert a1 not in sess.dirty
+ assert b1 in sess.dirty
+
+ def test_m2o_viewonly_oneside(self):
+ class A(fixtures.ComparableEntity):
+ pass
+ class B(fixtures.ComparableEntity):
+ pass
+
+ mapper(A, self.tables.t1, properties={
+ "bs": relationship(B, viewonly=False,
+ backref=backref("a", viewonly=True))
+ })
+ mapper(B, self.tables.t2)
+
+ a1 = A()
+ b1 = B()
+ b1.a = a1
+ assert b1 in a1.bs
+ assert inspect(a1).attrs.bs.history.has_changes()
+ assert not inspect(b1).attrs.a.history.has_changes()
+
+ sess = self._assert_fk(a1, b1, True)
+
+ a1.bs.remove(b1)
+ assert a1 in sess.dirty
+ assert b1 not in sess.dirty
+
+ def test_o2m_viewonly_only(self):
+ class A(fixtures.ComparableEntity):
+ pass
+ class B(fixtures.ComparableEntity):
+ pass
+
+ mapper(A, self.tables.t1, properties={
+ "bs": relationship(B, viewonly=True)
+ })
+ mapper(B, self.tables.t2)
+
+ a1 = A()
+ b1 = B()
+ a1.bs.append(b1)
+ assert not inspect(a1).attrs.bs.history.has_changes()
+
+ self._assert_fk(a1, b1, False)
+
+ def test_m2o_viewonly_only(self):
+ class A(fixtures.ComparableEntity):
+ pass
+ class B(fixtures.ComparableEntity):
+ pass
+
+ mapper(A, self.tables.t1)
+ mapper(B, self.tables.t2, properties={
+ 'a': relationship(A, viewonly=True)
+ })
+
+ a1 = A()
+ b1 = B()
+ b1.a = a1
+ assert not inspect(b1).attrs.a.history.has_changes()
+
+ self._assert_fk(a1, b1, False)
+
class ViewOnlyM2MBackrefTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -1551,6 +1663,8 @@ class ViewOnlyM2MBackrefTest(fixtures.MappedTest):
a1 = A()
b1 = B(as_=[a1])
+ assert not inspect(b1).attrs.as_.history.has_changes()
+
sess.add(a1)
sess.flush()
eq_(
@@ -2232,7 +2346,7 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
assert_raises_message(sa.exc.ArgumentError,
"T1.t1s and back-reference T1.parent are "
- "both of the same direction <symbol 'ONETOMANY>. Did you "
+ r"both of the same direction symbol\('ONETOMANY'\). Did you "
"mean to set remote_side on the many-to-one side ?",
configure_mappers)
@@ -2247,7 +2361,7 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
assert_raises_message(sa.exc.ArgumentError,
"T1.t1s and back-reference T1.parent are "
- "both of the same direction <symbol 'MANYTOONE>. Did you "
+ r"both of the same direction symbol\('MANYTOONE'\). Did you "
"mean to set remote_side on the many-to-one side ?",
configure_mappers)
@@ -2261,7 +2375,7 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
# can't be sure of ordering here
assert_raises_message(sa.exc.ArgumentError,
- "both of the same direction <symbol 'ONETOMANY>. Did you "
+ r"both of the same direction symbol\('ONETOMANY'\). Did you "
"mean to set remote_side on the many-to-one side ?",
configure_mappers)
@@ -2277,7 +2391,7 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
# can't be sure of ordering here
assert_raises_message(sa.exc.ArgumentError,
- "both of the same direction <symbol 'MANYTOONE>. Did you "
+ r"both of the same direction symbol\('MANYTOONE'\). Did you "
"mean to set remote_side on the many-to-one side ?",
configure_mappers)
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index 45164483b..4eb498ee9 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -395,6 +395,27 @@ class SessionStateTest(_fixtures.FixtureTest):
run_inserts = None
+ def test_info(self):
+ s = Session()
+ eq_(s.info, {})
+
+ maker = sessionmaker(info={"global": True, "s1": 5})
+
+ s1 = maker()
+ s2 = maker(info={"s1": 6, "s2": True})
+
+ eq_(s1.info, {"global": True, "s1": 5})
+ eq_(s2.info, {"global": True, "s1": 6, "s2": True})
+ s2.info["global"] = False
+ s2.info["s1"] = 7
+
+ s3 = maker()
+ eq_(s3.info, {"global": True, "s1": 5})
+
+ maker2 = sessionmaker()
+ s4 = maker2(info={'s4': 8})
+ eq_(s4.info, {'s4': 8})
+
@testing.requires.independent_connections
@engines.close_open_connections
def test_autoflush(self):
@@ -418,7 +439,6 @@ class SessionStateTest(_fixtures.FixtureTest):
eq_(bind.connect().execute("select count(1) from users").scalar(), 1)
sess.close()
- @testing.requires.python26
def test_with_no_autoflush(self):
User, users = self.classes.User, self.tables.users
diff --git a/test/orm/test_subquery_relations.py b/test/orm/test_subquery_relations.py
index a6cc37691..f36820e70 100644
--- a/test/orm/test_subquery_relations.py
+++ b/test/orm/test_subquery_relations.py
@@ -10,6 +10,7 @@ from sqlalchemy.testing import eq_, assert_raises, \
assert_raises_message
from sqlalchemy.testing.assertsql import CompiledSQL
from sqlalchemy.testing import fixtures
+from sqlalchemy.testing.entities import ComparableEntity
from test.orm import _fixtures
import sqlalchemy as sa
@@ -632,7 +633,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
], q.all())
self.assert_sql_count(testing.db, go, 6)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_limit(self):
"""Limit operations combined with lazy-load relationships."""
@@ -706,7 +706,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
eq_([User(id=7, address=Address(id=1))], l)
self.assert_sql_count(testing.db, go, 2)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_many_to_one(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
@@ -1144,7 +1143,6 @@ class SelfReferentialTest(fixtures.MappedTest):
Column('parent_id', Integer, ForeignKey('nodes.id')),
Column('data', String(30)))
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_basic(self):
nodes = self.tables.nodes
@@ -1309,7 +1307,6 @@ class SelfReferentialTest(fixtures.MappedTest):
]), d)
self.assert_sql_count(testing.db, go, 3)
- @testing.fails_on('maxdb', 'FIXME: unknown')
def test_no_depth(self):
"""no join depth is set, so no eager loading occurs."""
@@ -1563,3 +1560,251 @@ class CyclicalInheritingEagerTestTwo(fixtures.DeclarativeMappedTest,
d = session.query(Director).options(subqueryload('*')).first()
assert len(list(session)) == 3
+
+class SubqueryloadDistinctTest(fixtures.DeclarativeMappedTest,
+ testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ run_inserts = 'once'
+ run_deletes = None
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class Director(Base):
+ __tablename__ = 'director'
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ name = Column(String(50))
+
+ class DirectorPhoto(Base):
+ __tablename__ = 'director_photo'
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ path = Column(String(255))
+ director_id = Column(Integer, ForeignKey('director.id'))
+ director = relationship(Director, backref="photos")
+
+ class Movie(Base):
+ __tablename__ = 'movie'
+ id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ director_id = Column(Integer, ForeignKey('director.id'))
+ director = relationship(Director, backref="movies")
+ title = Column(String(50))
+ credits = relationship("Credit", backref="movie")
+
+ class Credit(Base):
+ __tablename__ = 'credit'
+ id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ movie_id = Column(Integer, ForeignKey('movie.id'))
+
+ @classmethod
+ def insert_data(cls):
+ Movie = cls.classes.Movie
+ Director = cls.classes.Director
+ DirectorPhoto = cls.classes.DirectorPhoto
+ Credit = cls.classes.Credit
+
+ d = Director(name='Woody Allen')
+ d.photos = [DirectorPhoto(path='/1.jpg'),
+ DirectorPhoto(path='/2.jpg')]
+ d.movies = [Movie(title='Manhattan', credits=[Credit(), Credit()]),
+ Movie(title='Sweet and Lowdown', credits=[Credit()])]
+ sess = create_session()
+ sess.add_all([d])
+ sess.flush()
+
+ def test_distinct_strategy_opt_m2o(self):
+ self._run_test_m2o(True, None)
+ self._run_test_m2o(False, None)
+
+ def test_distinct_unrelated_opt_m2o(self):
+ self._run_test_m2o(None, True)
+ self._run_test_m2o(None, False)
+
+ def _run_test_m2o(self,
+ director_strategy_level,
+ photo_strategy_level):
+
+ # test where the innermost is m2o, e.g.
+ # Movie->director
+
+ Movie = self.classes.Movie
+ Director = self.classes.Director
+
+ Movie.director.property.distinct_target_key = director_strategy_level
+ Director.photos.property.distinct_target_key = photo_strategy_level
+
+ # the DISTINCT is controlled by
+ # only the Movie->director relationship, *not* the
+ # Director.photos
+ expect_distinct = director_strategy_level in (True, None)
+
+ s = create_session()
+
+ q = (
+ s.query(Movie)
+ .options(
+ subqueryload(Movie.director)
+ .subqueryload(Director.photos)
+ )
+ )
+ ctx = q._compile_context()
+
+ q2 = ctx.attributes[
+ ('subquery', (inspect(Movie), inspect(Movie).attrs.director))
+ ]
+ self.assert_compile(
+ q2,
+ 'SELECT director.id AS director_id, '
+ 'director.name AS director_name, '
+ 'anon_1.movie_director_id AS anon_1_movie_director_id '
+ 'FROM (SELECT%s movie.director_id AS movie_director_id '
+ 'FROM movie) AS anon_1 '
+ 'JOIN director ON director.id = anon_1.movie_director_id '
+ 'ORDER BY anon_1.movie_director_id' % (
+ " DISTINCT" if expect_distinct else "")
+ )
+
+ ctx2 = q2._compile_context()
+ result = s.execute(q2)
+ rows = result.fetchall()
+
+ if expect_distinct:
+ eq_(rows, [
+ (1, 'Woody Allen', 1),
+ ])
+ else:
+ eq_(rows, [
+ (1, 'Woody Allen', 1), (1, 'Woody Allen', 1),
+ ])
+
+ q3 = ctx2.attributes[
+ ('subquery', (inspect(Director), inspect(Director).attrs.photos))
+ ]
+
+ self.assert_compile(
+ q3,
+ 'SELECT director_photo.id AS director_photo_id, '
+ 'director_photo.path AS director_photo_path, '
+ 'director_photo.director_id AS director_photo_director_id, '
+ 'director_1.id AS director_1_id '
+ 'FROM (SELECT%s movie.director_id AS movie_director_id '
+ 'FROM movie) AS anon_1 '
+ 'JOIN director AS director_1 ON director_1.id = anon_1.movie_director_id '
+ 'JOIN director_photo ON director_1.id = director_photo.director_id '
+ 'ORDER BY director_1.id' % (
+ " DISTINCT" if expect_distinct else "")
+ )
+ result = s.execute(q3)
+ rows = result.fetchall()
+ if expect_distinct:
+ eq_(set(tuple(t) for t in rows), set([
+ (1, u'/1.jpg', 1, 1),
+ (2, u'/2.jpg', 1, 1),
+ ]))
+ else:
+ # oracle might not order the way we expect here
+ eq_(set(tuple(t) for t in rows), set([
+ (1, u'/1.jpg', 1, 1),
+ (2, u'/2.jpg', 1, 1),
+ (1, u'/1.jpg', 1, 1),
+ (2, u'/2.jpg', 1, 1),
+ ]))
+
+
+ movies = q.all()
+
+ # check number of persistent objects in session
+ eq_(len(list(s)), 5)
+
+ def test_cant_do_distinct_in_joins(self):
+ """the DISTINCT feature here works when the m2o is in the innermost
+ mapper, but when we are just joining along relationships outside
+ of that, we can still have dupes, and there's no solution to that.
+
+ """
+ Movie = self.classes.Movie
+ Credit = self.classes.Credit
+
+ s = create_session()
+
+ q = (
+ s.query(Credit)
+ .options(
+ subqueryload(Credit.movie)
+ .subqueryload(Movie.director)
+ )
+ )
+
+ ctx = q._compile_context()
+
+ q2 = ctx.attributes[
+ ('subquery', (inspect(Credit), Credit.movie.property))
+ ]
+ ctx2 = q2._compile_context()
+ q3 = ctx2.attributes[
+ ('subquery', (inspect(Movie), Movie.director.property))
+ ]
+
+ result = s.execute(q3)
+ eq_(
+ result.fetchall(),
+ [
+ (1, 'Woody Allen', 1), (1, 'Woody Allen', 1),
+ ]
+ )
+
+
+class JoinedNoLoadConflictTest(fixtures.DeclarativeMappedTest):
+ """test for [ticket:2887]"""
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class Parent(ComparableEntity, Base):
+ __tablename__ = 'parent'
+
+ id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ name = Column(String(20))
+
+ children = relationship('Child',
+ back_populates='parent',
+ lazy='noload'
+ )
+
+ class Child(ComparableEntity, Base):
+ __tablename__ = 'child'
+
+ id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ name = Column(String(20))
+ parent_id = Column(Integer, ForeignKey('parent.id'))
+
+ parent = relationship('Parent', back_populates='children', lazy='joined')
+
+ @classmethod
+ def insert_data(cls):
+ Parent = cls.classes.Parent
+ Child = cls.classes.Child
+
+ s = Session()
+ s.add(Parent(name='parent', children=[Child(name='c1')]))
+ s.commit()
+
+ def test_subqueryload_on_joined_noload(self):
+ Parent = self.classes.Parent
+ Child = self.classes.Child
+
+ s = Session()
+
+ # here we have Parent->subqueryload->Child->joinedload->parent->noload->children.
+ # the actual subqueryload has to emit *after* we've started populating
+ # Parent->subqueryload->child.
+ parent = s.query(Parent).options([subqueryload('children')]).first()
+ eq_(
+ parent.children,
+ [Child(name='c1')]
+ )
+
diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py
index 4b9799d47..386280a50 100644
--- a/test/orm/test_unitofwork.py
+++ b/test/orm/test_unitofwork.py
@@ -849,21 +849,40 @@ class DefaultTest(fixtures.MappedTest):
eq_(h5.foober, 'im the new foober')
@testing.fails_on('firebird', 'Data type unknown on the parameter')
+ @testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug")
def test_eager_defaults(self):
hohoval, default_t, Hoho = (self.other.hohoval,
self.tables.default_t,
self.classes.Hoho)
+ Secondary = self.classes.Secondary
+
+ mapper(Hoho, default_t, eager_defaults=True, properties={
+ "sec": relationship(Secondary),
+ "syn": sa.orm.synonym(default_t.c.counter)
+ })
- mapper(Hoho, default_t, eager_defaults=True)
+ mapper(Secondary, self.tables.secondary_table)
h1 = Hoho()
session = create_session()
session.add(h1)
- session.flush()
+
+ if testing.db.dialect.implicit_returning:
+ self.sql_count_(1, session.flush)
+ else:
+ self.sql_count_(2, session.flush)
self.sql_count_(0, lambda: eq_(h1.hoho, hohoval))
+ # no actual eager defaults, make sure error isn't raised
+ h2 = Hoho(hoho=hohoval, counter=5)
+ session.add(h2)
+ session.flush()
+ eq_(h2.hoho, hohoval)
+ eq_(h2.counter, 5)
+
+
def test_insert_nopostfetch(self):
default_t, Hoho = self.tables.default_t, self.classes.Hoho
diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py
index 6915ac8a2..ac94fde2f 100644
--- a/test/orm/test_update_delete.py
+++ b/test/orm/test_update_delete.py
@@ -545,12 +545,14 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
def define_tables(cls, metadata):
Table('users', metadata,
Column('id', Integer, primary_key=True),
+ Column('samename', String(10)),
)
Table('documents', metadata,
Column('id', Integer, primary_key=True),
Column('user_id', None, ForeignKey('users.id')),
Column('title', String(32)),
- Column('flag', Boolean)
+ Column('flag', Boolean),
+ Column('samename', String(10)),
)
@classmethod
@@ -659,6 +661,34 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
])
)
+ @testing.only_on('mysql', 'Multi table update')
+ def test_update_from_multitable_same_names(self):
+ Document = self.classes.Document
+ User = self.classes.User
+
+ s = Session()
+
+ s.query(Document).\
+ filter(User.id == Document.user_id).\
+ filter(User.id == 2).update({
+ Document.samename: 'd_samename',
+ User.samename: 'u_samename'
+ }
+ )
+ eq_(
+ s.query(User.id, Document.samename, User.samename).
+ filter(User.id == Document.user_id).
+ order_by(User.id).all(),
+ [
+ (1, None, None),
+ (1, None, None),
+ (2, 'd_samename', 'u_samename'),
+ (2, 'd_samename', 'u_samename'),
+ (3, None, None),
+ (3, None, None),
+ ]
+ )
+
class ExpressionUpdateTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -786,3 +816,5 @@ class InheritTest(fixtures.DeclarativeMappedTest):
set(s.query(Person.name, Engineer.engineer_name)),
set([('e1', 'e1', ), ('e22', 'e55')])
)
+
+
diff --git a/test/orm/test_utils.py b/test/orm/test_utils.py
index d15881075..ae225ad92 100644
--- a/test/orm/test_utils.py
+++ b/test/orm/test_utils.py
@@ -5,27 +5,31 @@ from sqlalchemy import util
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Table
-from sqlalchemy.orm import aliased, with_polymorphic
-from sqlalchemy.orm import mapper, create_session
+from sqlalchemy.orm import aliased, with_polymorphic, synonym
+from sqlalchemy.orm import mapper, create_session, Session
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.testing import eq_, is_
-from sqlalchemy.orm.util import PathRegistry
+from sqlalchemy.orm.path_registry import PathRegistry, RootRegistry
from sqlalchemy import inspect
+from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
+from sqlalchemy.testing import AssertsCompiledSQL
-class AliasedClassTest(fixtures.TestBase):
- def point_map(self, cls):
+class AliasedClassTest(fixtures.TestBase, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def _fixture(self, cls, properties={}):
table = Table('point', MetaData(),
Column('id', Integer(), primary_key=True),
Column('x', Integer),
Column('y', Integer))
- mapper(cls, table)
+ mapper(cls, table, properties=properties)
return table
def test_simple(self):
class Point(object):
pass
- table = self.point_map(Point)
+ table = self._fixture(Point)
alias = aliased(Point)
@@ -36,48 +40,51 @@ class AliasedClassTest(fixtures.TestBase):
assert Point.id.__clause_element__().table is table
assert alias.id.__clause_element__().table is not table
- def test_notcallable(self):
+ def test_not_instantiatable(self):
class Point(object):
pass
- table = self.point_map(Point)
+ table = self._fixture(Point)
alias = aliased(Point)
assert_raises(TypeError, alias)
- def test_instancemethods(self):
+ def test_instancemethod(self):
class Point(object):
def zero(self):
self.x, self.y = 0, 0
- table = self.point_map(Point)
+ table = self._fixture(Point)
alias = aliased(Point)
assert Point.zero
+ # TODO: I don't quite understand this
+ # still
if util.py2k:
- # TODO: what is this testing ??
assert not getattr(alias, 'zero')
+ else:
+ assert getattr(alias, 'zero')
- def test_classmethods(self):
+ def test_classmethod(self):
class Point(object):
@classmethod
def max_x(cls):
return 100
- table = self.point_map(Point)
+ table = self._fixture(Point)
alias = aliased(Point)
assert Point.max_x
assert alias.max_x
- assert Point.max_x() == alias.max_x()
+ assert Point.max_x() == alias.max_x() == 100
- def test_simpleproperties(self):
+ def test_simple_property(self):
class Point(object):
@property
def max_x(self):
return 100
- table = self.point_map(Point)
+ table = self._fixture(Point)
alias = aliased(Point)
assert Point.max_x
@@ -86,7 +93,6 @@ class AliasedClassTest(fixtures.TestBase):
assert Point.max_x is alias.max_x
def test_descriptors(self):
- """Tortured..."""
class descriptor(object):
def __init__(self, fn):
@@ -105,7 +111,7 @@ class AliasedClassTest(fixtures.TestBase):
def thing(self, arg):
return arg.center
- table = self.point_map(Point)
+ table = self._fixture(Point)
alias = aliased(Point)
assert Point.thing != (0, 0)
@@ -115,74 +121,106 @@ class AliasedClassTest(fixtures.TestBase):
assert alias.thing != (0, 0)
assert alias.thing.method() == 'method'
- def test_hybrid_descriptors(self):
+ def _assert_has_table(self, expr, table):
from sqlalchemy import Column # override testlib's override
- import types
-
- class MethodDescriptor(object):
- def __init__(self, func):
- self.func = func
- def __get__(self, instance, owner):
- if instance is None:
- if util.py2k:
- args = (self.func, owner, owner.__class__)
- else:
- args = (self.func, owner)
- else:
- if util.py2k:
- args = (self.func, instance, owner)
- else:
- args = (self.func, instance)
- return types.MethodType(*args)
-
- class PropertyDescriptor(object):
- def __init__(self, fget, fset, fdel):
- self.fget = fget
- self.fset = fset
- self.fdel = fdel
- def __get__(self, instance, owner):
- if instance is None:
- return self.fget(owner)
- else:
- return self.fget(instance)
- def __set__(self, instance, value):
- self.fset(instance, value)
- def __delete__(self, instance):
- self.fdel(instance)
- hybrid = MethodDescriptor
- def hybrid_property(fget, fset=None, fdel=None):
- return PropertyDescriptor(fget, fset, fdel)
-
- def assert_table(expr, table):
- for child in expr.get_children():
- if isinstance(child, Column):
- assert child.table is table
+ for child in expr.get_children():
+ if isinstance(child, Column):
+ assert child.table is table
+ def test_hybrid_descriptor_one(self):
class Point(object):
def __init__(self, x, y):
self.x, self.y = x, y
- @hybrid
+
+ @hybrid_method
def left_of(self, other):
return self.x < other.x
- double_x = hybrid_property(lambda self: self.x * 2)
+ self._fixture(Point)
+ alias = aliased(Point)
+ sess = Session()
+
+ self.assert_compile(
+ sess.query(alias).filter(alias.left_of(Point)),
+ "SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, "
+ "point_1.y AS point_1_y FROM point AS point_1, point "
+ "WHERE point_1.x < point.x"
+ )
+
+ def test_hybrid_descriptor_two(self):
+ class Point(object):
+ def __init__(self, x, y):
+ self.x, self.y = x, y
+
+ @hybrid_property
+ def double_x(self):
+ return self.x * 2
- table = self.point_map(Point)
+ self._fixture(Point)
alias = aliased(Point)
- alias_table = alias.x.__clause_element__().table
- assert table is not alias_table
- p1 = Point(-10, -10)
- p2 = Point(20, 20)
+ eq_(str(Point.double_x), "point.x * :x_1")
+ eq_(str(alias.double_x), "point_1.x * :x_1")
- assert p1.left_of(p2)
- assert p1.double_x == -20
+ sess = Session()
+
+ self.assert_compile(
+ sess.query(alias).filter(alias.double_x > Point.x),
+ "SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, "
+ "point_1.y AS point_1_y FROM point AS point_1, point "
+ "WHERE point_1.x * :x_1 > point.x"
+ )
+
+ def test_hybrid_descriptor_three(self):
+ class Point(object):
+ def __init__(self, x, y):
+ self.x, self.y = x, y
- assert_table(Point.double_x, table)
- assert_table(alias.double_x, alias_table)
+ @hybrid_property
+ def x_alone(self):
+ return self.x
- assert_table(Point.left_of(p2), table)
- assert_table(alias.left_of(p2), alias_table)
+ self._fixture(Point)
+ alias = aliased(Point)
+
+ eq_(str(Point.x_alone), "Point.x")
+ eq_(str(alias.x_alone), "AliasedClass_Point.x")
+
+ assert Point.x_alone is Point.x
+
+ eq_(str(alias.x_alone == alias.x), "point_1.x = point_1.x")
+
+ a2 = aliased(Point)
+ eq_(str(a2.x_alone == alias.x), "point_1.x = point_2.x")
+
+ sess = Session()
+
+ self.assert_compile(
+ sess.query(alias).filter(alias.x_alone > Point.x),
+ "SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, "
+ "point_1.y AS point_1_y FROM point AS point_1, point "
+ "WHERE point_1.x > point.x"
+ )
+
+ def test_proxy_descriptor_one(self):
+ class Point(object):
+ def __init__(self, x, y):
+ self.x, self.y = x, y
+
+ self._fixture(Point, properties={
+ 'x_syn': synonym("x")
+ })
+ alias = aliased(Point)
+
+ eq_(str(Point.x_syn), "Point.x_syn")
+ eq_(str(alias.x_syn), "AliasedClass_Point.x_syn")
+
+ sess = Session()
+ self.assert_compile(
+ sess.query(alias.x_syn).filter(alias.x_syn > Point.x_syn),
+ "SELECT point_1.x AS point_1_x FROM point AS point_1, point "
+ "WHERE point_1.x > point.x"
+ )
class IdentityKeyTest(_fixtures.FixtureTest):
run_inserts = None
@@ -241,12 +279,12 @@ class PathRegistryTest(_fixtures.FixtureTest):
def test_root_registry(self):
umapper = inspect(self.classes.User)
is_(
- orm_util.RootRegistry()[umapper],
+ RootRegistry()[umapper],
umapper._path_registry
)
eq_(
- orm_util.RootRegistry()[umapper],
- orm_util.PathRegistry.coerce((umapper,))
+ RootRegistry()[umapper],
+ PathRegistry.coerce((umapper,))
)
def test_expand(self):
diff --git a/test/orm/test_validators.py b/test/orm/test_validators.py
new file mode 100644
index 000000000..417554f46
--- /dev/null
+++ b/test/orm/test_validators.py
@@ -0,0 +1,281 @@
+from test.orm import _fixtures
+from sqlalchemy.testing import fixtures, assert_raises, eq_, ne_
+from sqlalchemy.orm import mapper, Session, validates, relationship
+from sqlalchemy.testing.mock import Mock, call
+
+
+class ValidatorTest(_fixtures.FixtureTest):
+ def test_scalar(self):
+ users = self.tables.users
+ canary = Mock()
+
+ class User(fixtures.ComparableEntity):
+ @validates('name')
+ def validate_name(self, key, name):
+ canary(key, name)
+ ne_(name, 'fred')
+ return name + ' modified'
+
+ mapper(User, users)
+ sess = Session()
+ u1 = User(name='ed')
+ eq_(u1.name, 'ed modified')
+ assert_raises(AssertionError, setattr, u1, "name", "fred")
+ eq_(u1.name, 'ed modified')
+ eq_(canary.mock_calls, [call('name', 'ed'), call('name', 'fred')])
+
+ sess.add(u1)
+ sess.commit()
+
+ eq_(
+ sess.query(User).filter_by(name='ed modified').one(),
+ User(name='ed')
+ )
+
+ def test_collection(self):
+ users, addresses, Address = (self.tables.users,
+ self.tables.addresses,
+ self.classes.Address)
+
+ canary = Mock()
+ class User(fixtures.ComparableEntity):
+ @validates('addresses')
+ def validate_address(self, key, ad):
+ canary(key, ad)
+ assert '@' in ad.email_address
+ return ad
+
+ mapper(User, users, properties={
+ 'addresses': relationship(Address)}
+ )
+ mapper(Address, addresses)
+ sess = Session()
+ u1 = User(name='edward')
+ a0 = Address(email_address='noemail')
+ assert_raises(AssertionError, u1.addresses.append, a0)
+ a1 = Address(id=15, email_address='foo@bar.com')
+ u1.addresses.append(a1)
+ eq_(canary.mock_calls, [call('addresses', a0), call('addresses', a1)])
+ sess.add(u1)
+ sess.commit()
+
+ eq_(
+ sess.query(User).filter_by(name='edward').one(),
+ User(name='edward', addresses=[Address(email_address='foo@bar.com')])
+ )
+
+ def test_validators_dict(self):
+ users, addresses, Address = (self.tables.users,
+ self.tables.addresses,
+ self.classes.Address)
+
+ class User(fixtures.ComparableEntity):
+
+ @validates('name')
+ def validate_name(self, key, name):
+ ne_(name, 'fred')
+ return name + ' modified'
+
+ @validates('addresses')
+ def validate_address(self, key, ad):
+ assert '@' in ad.email_address
+ return ad
+
+ def simple_function(self, key, value):
+ return key, value
+
+ u_m = mapper(User, users, properties={
+ 'addresses': relationship(Address)
+ }
+ )
+ mapper(Address, addresses)
+
+ eq_(
+ dict((k, v[0].__name__) for k, v in list(u_m.validators.items())),
+ {'name': 'validate_name',
+ 'addresses': 'validate_address'}
+ )
+
+ def test_validator_w_removes(self):
+ users, addresses, Address = (self.tables.users,
+ self.tables.addresses,
+ self.classes.Address)
+ canary = Mock()
+ class User(fixtures.ComparableEntity):
+
+ @validates('name', include_removes=True)
+ def validate_name(self, key, item, remove):
+ canary(key, item, remove)
+ return item
+
+ @validates('addresses', include_removes=True)
+ def validate_address(self, key, item, remove):
+ canary(key, item, remove)
+ return item
+
+ mapper(User, users, properties={
+ 'addresses': relationship(Address)
+ })
+ mapper(Address, addresses)
+
+ u1 = User()
+ u1.name = "ed"
+ u1.name = "mary"
+ del u1.name
+ a1, a2, a3 = Address(), Address(), Address()
+ u1.addresses.append(a1)
+ u1.addresses.remove(a1)
+ u1.addresses = [a1, a2]
+ u1.addresses = [a2, a3]
+
+ eq_(canary.mock_calls, [
+ call('name', 'ed', False),
+ call('name', 'mary', False),
+ call('name', 'mary', True),
+ # append a1
+ call('addresses', a1, False),
+ # remove a1
+ call('addresses', a1, True),
+ # set to [a1, a2] - this is two appends
+ call('addresses', a1, False), call('addresses', a2, False),
+ # set to [a2, a3] - this is a remove of a1,
+ # append of a3. the appends are first.
+ call('addresses', a3, False),
+ call('addresses', a1, True),
+ ]
+ )
+
+ def test_validator_wo_backrefs_wo_removes(self):
+ self._test_validator_backrefs(False, False)
+
+ def test_validator_wo_backrefs_w_removes(self):
+ self._test_validator_backrefs(False, True)
+
+ def test_validator_w_backrefs_wo_removes(self):
+ self._test_validator_backrefs(True, False)
+
+ def test_validator_w_backrefs_w_removes(self):
+ self._test_validator_backrefs(True, True)
+
+ def _test_validator_backrefs(self, include_backrefs, include_removes):
+ users, addresses = (self.tables.users,
+ self.tables.addresses)
+ canary = Mock()
+ class User(fixtures.ComparableEntity):
+
+ if include_removes:
+ @validates('addresses', include_removes=True,
+ include_backrefs=include_backrefs)
+ def validate_address(self, key, item, remove):
+ canary(key, item, remove)
+ return item
+ else:
+ @validates('addresses', include_removes=False,
+ include_backrefs=include_backrefs)
+ def validate_address(self, key, item):
+ canary(key, item)
+ return item
+
+ class Address(fixtures.ComparableEntity):
+ if include_removes:
+ @validates('user', include_backrefs=include_backrefs,
+ include_removes=True)
+ def validate_user(self, key, item, remove):
+ canary(key, item, remove)
+ return item
+ else:
+ @validates('user', include_backrefs=include_backrefs)
+ def validate_user(self, key, item):
+ canary(key, item)
+ return item
+
+ mapper(User, users, properties={
+ 'addresses': relationship(Address, backref="user")
+ })
+ mapper(Address, addresses)
+
+ u1 = User()
+ u2 = User()
+ a1, a2 = Address(), Address()
+
+ # 3 append/set, two removes
+ u1.addresses.append(a1)
+ u1.addresses.append(a2)
+ a2.user = u2
+ del a1.user
+ u2.addresses.remove(a2)
+
+ # copy, so that generation of the
+ # comparisons don't get caught
+ calls = list(canary.mock_calls)
+
+ if include_backrefs:
+ if include_removes:
+ eq_(calls,
+ [
+ # append #1
+ call('addresses', Address(), False),
+
+ # backref for append
+ call('user', User(addresses=[]), False),
+
+ # append #2
+ call('addresses', Address(user=None), False),
+
+ # backref for append
+ call('user', User(addresses=[]), False),
+
+ # assign a2.user = u2
+ call('user', User(addresses=[]), False),
+
+ # backref for u1.addresses.remove(a2)
+ call('addresses', Address(user=None), True),
+
+ # backref for u2.addresses.append(a2)
+ call('addresses', Address(user=None), False),
+
+ # del a1.user
+ call('user', User(addresses=[]), True),
+
+ # backref for u1.addresses.remove(a1)
+ call('addresses', Address(), True),
+
+ # u2.addresses.remove(a2)
+ call('addresses', Address(user=None), True),
+
+ # backref for a2.user = None
+ call('user', None, False)
+ ]
+ )
+ else:
+ eq_(calls,
+ [
+ call('addresses', Address()),
+ call('user', User(addresses=[])),
+ call('addresses', Address(user=None)),
+ call('user', User(addresses=[])),
+ call('user', User(addresses=[])),
+ call('addresses', Address(user=None)),
+ call('user', None)
+ ]
+ )
+ else:
+ if include_removes:
+ eq_(calls,
+ [
+ call('addresses', Address(), False),
+ call('addresses', Address(user=None), False),
+ call('user', User(addresses=[]), False),
+ call('user', User(addresses=[]), True),
+ call('addresses', Address(user=None), True)
+ ]
+
+ )
+ else:
+ eq_(calls,
+ [
+ call('addresses', Address()),
+ call('addresses', Address(user=None)),
+ call('user', User(addresses=[]))
+ ]
+ )
diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py
index abb08c536..9379543ed 100644
--- a/test/orm/test_versioning.py
+++ b/test/orm/test_versioning.py
@@ -11,7 +11,7 @@ from sqlalchemy.orm import mapper, relationship, Session, \
from sqlalchemy.testing import eq_, ne_, assert_raises, assert_raises_message
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
-from sqlalchemy.testing import fixtures
+from sqlalchemy.testing.assertsql import AllOf, CompiledSQL
_uuids = [
@@ -461,12 +461,12 @@ class AlternateGeneratorTest(fixtures.MappedTest):
cls.classes.P)
mapper(P, p, version_id_col=p.c.version_id,
- version_id_generator=lambda x:make_uuid(),
+ version_id_generator=lambda x: make_uuid(),
properties={
- 'c':relationship(C, uselist=False, cascade='all, delete-orphan')
+ 'c': relationship(C, uselist=False, cascade='all, delete-orphan')
})
mapper(C, c, version_id_col=c.c.version_id,
- version_id_generator=lambda x:make_uuid(),
+ version_id_generator=lambda x: make_uuid(),
)
@testing.emits_warning_on('+zxjdbc', r'.*does not support updated rowcount')
@@ -643,3 +643,276 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest):
mapper,
Sub, sub, inherits=Base,
version_id_col=sub.c.version_id)
+
+
+class ServerVersioningTest(fixtures.MappedTest):
+ run_define_tables = 'each'
+
+ @classmethod
+ def define_tables(cls, metadata):
+ from sqlalchemy.sql import ColumnElement
+ from sqlalchemy.ext.compiler import compiles
+ import itertools
+
+ counter = itertools.count(1)
+
+ class IncDefault(ColumnElement):
+ pass
+
+ @compiles(IncDefault)
+ def compile(element, compiler, **kw):
+ # cache the counter value on the statement
+ # itself so the assertsql system gets the same
+ # value when it compiles the statement a second time
+ stmt = compiler.statement
+ if hasattr(stmt, "_counter"):
+ return stmt._counter
+ else:
+ stmt._counter = str(next(counter))
+ return stmt._counter
+
+ Table('version_table', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('version_id', Integer, nullable=False,
+ default=IncDefault(), onupdate=IncDefault()),
+ Column('value', String(40), nullable=False))
+
+ @classmethod
+ def setup_classes(cls):
+ class Foo(cls.Basic):
+ pass
+ class Bar(cls.Basic):
+ pass
+
+ def _fixture(self, expire_on_commit=True):
+ Foo, version_table = self.classes.Foo, self.tables.version_table
+
+ mapper(Foo, version_table,
+ version_id_col=version_table.c.version_id,
+ version_id_generator=False,
+ )
+
+ s1 = Session(expire_on_commit=expire_on_commit)
+ return s1
+
+ def test_insert_col(self):
+ sess = self._fixture()
+
+ f1 = self.classes.Foo(value='f1')
+ sess.add(f1)
+
+ statements = [
+ # note that the assertsql tests the rule against
+ # "default" - on a "returning" backend, the statement
+ # includes "RETURNING"
+ CompiledSQL(
+ "INSERT INTO version_table (version_id, value) "
+ "VALUES (1, :value)",
+ lambda ctx: [{'value': 'f1'}]
+ )
+ ]
+ if not testing.db.dialect.implicit_returning:
+ # DBs without implicit returning, we must immediately
+ # SELECT for the new version id
+ statements.append(
+ CompiledSQL(
+ "SELECT version_table.version_id AS version_table_version_id "
+ "FROM version_table WHERE version_table.id = :param_1",
+ lambda ctx: [{"param_1": 1}]
+ )
+ )
+ self.assert_sql_execution(testing.db, sess.flush, *statements)
+
+ def test_update_col(self):
+ sess = self._fixture()
+
+ f1 = self.classes.Foo(value='f1')
+ sess.add(f1)
+ sess.flush()
+
+ f1.value = 'f2'
+
+ statements = [
+ # note that the assertsql tests the rule against
+ # "default" - on a "returning" backend, the statement
+ # includes "RETURNING"
+ CompiledSQL(
+ "UPDATE version_table SET version_id=2, value=:value "
+ "WHERE version_table.id = :version_table_id AND "
+ "version_table.version_id = :version_table_version_id",
+ lambda ctx: [{"version_table_id": 1,
+ "version_table_version_id": 1, "value": "f2"}]
+ )
+ ]
+ if not testing.db.dialect.implicit_returning:
+ # DBs without implicit returning, we must immediately
+ # SELECT for the new version id
+ statements.append(
+ CompiledSQL(
+ "SELECT version_table.version_id AS version_table_version_id "
+ "FROM version_table WHERE version_table.id = :param_1",
+ lambda ctx: [{"param_1": 1}]
+ )
+ )
+ self.assert_sql_execution(testing.db, sess.flush, *statements)
+
+
+ def test_delete_col(self):
+ sess = self._fixture()
+
+ f1 = self.classes.Foo(value='f1')
+ sess.add(f1)
+ sess.flush()
+
+ sess.delete(f1)
+
+ statements = [
+ # note that the assertsql tests the rule against
+ # "default" - on a "returning" backend, the statement
+ # includes "RETURNING"
+ CompiledSQL(
+ "DELETE FROM version_table "
+ "WHERE version_table.id = :id AND "
+ "version_table.version_id = :version_id",
+ lambda ctx: [{"id": 1, "version_id": 1}]
+ )
+ ]
+ self.assert_sql_execution(testing.db, sess.flush, *statements)
+
+ def test_concurrent_mod_err_expire_on_commit(self):
+ sess = self._fixture()
+
+ f1 = self.classes.Foo(value='f1')
+ sess.add(f1)
+ sess.commit()
+
+ f1.value
+
+ s2 = Session()
+ f2 = s2.query(self.classes.Foo).first()
+ f2.value = 'f2'
+ s2.commit()
+
+ f1.value = 'f3'
+
+ assert_raises_message(
+ orm.exc.StaleDataError,
+ r"UPDATE statement on table 'version_table' expected to "
+ r"update 1 row\(s\); 0 were matched.",
+ sess.commit
+ )
+
+ def test_concurrent_mod_err_noexpire_on_commit(self):
+ sess = self._fixture(expire_on_commit=False)
+
+ f1 = self.classes.Foo(value='f1')
+ sess.add(f1)
+ sess.commit()
+
+ # here, we're not expired overall, so no load occurs and we
+ # stay without a version id, unless we've emitted
+ # a SELECT for it within the flush.
+ f1.value
+
+ s2 = Session(expire_on_commit=False)
+ f2 = s2.query(self.classes.Foo).first()
+ f2.value = 'f2'
+ s2.commit()
+
+ f1.value = 'f3'
+
+ assert_raises_message(
+ orm.exc.StaleDataError,
+ r"UPDATE statement on table 'version_table' expected to "
+ r"update 1 row\(s\); 0 were matched.",
+ sess.commit
+ )
+
+class ManualVersionTest(fixtures.MappedTest):
+ run_define_tables = 'each'
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table("a", metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('data', String(30)),
+ Column('vid', Integer)
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Basic):
+ pass
+
+
+ @classmethod
+ def setup_mappers(cls):
+ mapper(cls.classes.A, cls.tables.a,
+ version_id_col=cls.tables.a.c.vid,
+ version_id_generator=False)
+
+ def test_insert(self):
+ sess = Session()
+ a1 = self.classes.A()
+
+ a1.vid = 1
+ sess.add(a1)
+ sess.commit()
+
+ eq_(a1.vid, 1)
+
+ def test_update(self):
+ sess = Session()
+ a1 = self.classes.A()
+
+ a1.vid = 1
+ a1.data = 'd1'
+ sess.add(a1)
+ sess.commit()
+
+ a1.vid = 2
+ a1.data = 'd2'
+
+ sess.commit()
+
+ eq_(a1.vid, 2)
+
+ def test_update_concurrent_check(self):
+ sess = Session()
+ a1 = self.classes.A()
+
+ a1.vid = 1
+ a1.data = 'd1'
+ sess.add(a1)
+ sess.commit()
+
+ a1.vid = 2
+ sess.execute(self.tables.a.update().values(vid=3))
+ a1.data = 'd2'
+ assert_raises(
+ orm_exc.StaleDataError,
+ sess.commit
+ )
+
+ def test_update_version_conditional(self):
+ sess = Session()
+ a1 = self.classes.A()
+
+ a1.vid = 1
+ a1.data = 'd1'
+ sess.add(a1)
+ sess.commit()
+
+ # change the data and UPDATE without
+ # incrementing version id
+ a1.data = 'd2'
+ sess.commit()
+
+ eq_(a1.vid, 1)
+
+ a1.data = 'd3'
+ a1.vid = 2
+ sess.commit()
+
+ eq_(a1.vid, 2) \ No newline at end of file
diff --git a/test/perf/orm2010.py b/test/perf/orm2010.py
index 937e6ddff..8036ac268 100644
--- a/test/perf/orm2010.py
+++ b/test/perf/orm2010.py
@@ -1,7 +1,7 @@
-# monkeypatch the "cdecimal" library in.
-# this is a drop-in replacement for "decimal".
-# All SQLA versions support cdecimal except
-# for the MS-SQL dialect, which is fixed in 0.7
+import warnings
+warnings.filterwarnings("ignore", r".*Decimal objects natively")
+
+# speed up cdecimal if available
try:
import cdecimal
import sys
@@ -13,11 +13,7 @@ from sqlalchemy import __version__
from sqlalchemy import Column, Integer, create_engine, ForeignKey, \
String, Numeric
-if __version__ < "0.6":
- from sqlalchemy.orm.session import Session
- from sqlalchemy.orm import relation as relationship
-else:
- from sqlalchemy.orm import Session, relationship
+from sqlalchemy.orm import Session, relationship
from sqlalchemy.ext.declarative import declarative_base
import random
@@ -33,7 +29,7 @@ class Employee(Base):
name = Column(String(100), nullable=False)
type = Column(String(50), nullable=False)
- __mapper_args__ = {'polymorphic_on':type}
+ __mapper_args__ = {'polymorphic_on': type}
class Boss(Employee):
__tablename__ = 'boss'
@@ -41,7 +37,7 @@ class Boss(Employee):
id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
golf_average = Column(Numeric)
- __mapper_args__ = {'polymorphic_identity':'boss'}
+ __mapper_args__ = {'polymorphic_identity': 'boss'}
class Grunt(Employee):
__tablename__ = 'grunt'
@@ -51,32 +47,10 @@ class Grunt(Employee):
employer_id = Column(Integer, ForeignKey('boss.id'))
- # Configure an 'employer' relationship, where Grunt references
- # Boss. This is a joined-table subclass to subclass relationship,
- # which is a less typical case.
-
- # In 0.7, "Boss.id" is the "id" column of "boss", as would be expected.
- if __version__ >= "0.7":
- employer = relationship("Boss", backref="employees",
- primaryjoin=Boss.id==employer_id)
-
- # Prior to 0.7, "Boss.id" is the "id" column of "employee".
- # Long story. So we hardwire the relationship against the "id"
- # column of Boss' table.
- elif __version__ >= "0.6":
- employer = relationship("Boss", backref="employees",
- primaryjoin=Boss.__table__.c.id==employer_id)
-
- # In 0.5, the many-to-one loader wouldn't recognize the above as a
- # simple "identity map" fetch. So to give 0.5 a chance to emit
- # the same amount of SQL as 0.6, we hardwire the relationship against
- # "employee.id" to work around the bug.
- else:
- employer = relationship("Boss", backref="employees",
- primaryjoin=Employee.__table__.c.id==employer_id,
- foreign_keys=employer_id)
+ employer = relationship("Boss", backref="employees",
+ primaryjoin=Boss.id == employer_id)
- __mapper_args__ = {'polymorphic_identity':'grunt'}
+ __mapper_args__ = {'polymorphic_identity': 'grunt'}
if os.path.exists('orm2010.db'):
os.remove('orm2010.db')
@@ -88,101 +62,122 @@ Base.metadata.create_all(engine)
sess = Session(engine)
-def runit():
- # create 1000 Boss objects.
+def runit(status, factor=1):
+ num_bosses = 100 * factor
+ num_grunts = num_bosses * 100
+
bosses = [
Boss(
name="Boss %d" % i,
golf_average=Decimal(random.randint(40, 150))
)
- for i in range(1000)
+ for i in range(num_bosses)
]
sess.add_all(bosses)
+ status("Added %d boss objects" % num_bosses)
-
- # create 10000 Grunt objects.
grunts = [
Grunt(
name="Grunt %d" % i,
savings=Decimal(random.randint(5000000, 15000000) / 100)
)
- for i in range(10000)
+ for i in range(num_grunts)
]
+ status("Added %d grunt objects" % num_grunts)
- # Assign each Grunt a Boss. Look them up in the DB
- # to simulate a little bit of two-way activity with the
- # DB while we populate. Autoflush occurs on each query.
- # In 0.7 executemany() is used for all the "boss" and "grunt"
- # tables since priamry key fetching is not needed.
while grunts:
+ # this doesn't associate grunts with bosses evenly,
+ # just associates lots of them with a relatively small
+ # handful of bosses
+ batch_size = 100
+ batch_num = (num_grunts - len(grunts)) / batch_size
boss = sess.query(Boss).\
- filter_by(name="Boss %d" % (101 - len(grunts) / 100)).\
+ filter_by(name="Boss %d" % batch_num).\
first()
- for grunt in grunts[0:100]:
+ for grunt in grunts[0:batch_size]:
grunt.employer = boss
- grunts = grunts[100:]
+ grunts = grunts[batch_size:]
sess.commit()
+ status("Associated grunts w/ bosses and committed")
+
+ # do some heavier reading
+ for i in range(int(round(factor / 2.0))):
+ status("Heavy query run #%d" % (i + 1))
- report = []
+ report = []
- # load all the Grunts, print a report with their name, stats,
- # and their bosses' stats.
- for grunt in sess.query(Grunt):
- # here, the overhead of a many-to-one fetch of
- # "grunt.employer" directly from the identity map
- # is less than half of that of 0.6.
- report.append((
- grunt.name,
- grunt.savings,
- grunt.employer.name,
- grunt.employer.golf_average
- ))
+ # load all the Grunts, print a report with their name, stats,
+ # and their bosses' stats.
+ for grunt in sess.query(Grunt):
+ report.append((
+ grunt.name,
+ grunt.savings,
+ grunt.employer.name,
+ grunt.employer.golf_average
+ ))
-import cProfile, os, pstats
+ sess.close() # close out the session
-filename = "orm2010.profile"
-cProfile.runctx('runit()', globals(), locals(), filename)
-stats = pstats.Stats(filename)
+def run_with_profile(runsnake=False, dump=False):
+ import cProfile
+ import pstats
+ filename = "orm2010.profile"
-counts_by_methname = dict((key[2], stats.stats[key][0]) for key in stats.stats)
+ def status(msg):
+ print(msg)
-print("SQLA Version: %s" % __version__)
-print("Total calls %d" % stats.total_calls)
-print("Total cpu seconds: %.2f" % stats.total_tt)
-print('Total execute calls: %d' \
- % counts_by_methname["<method 'execute' of 'sqlite3.Cursor' "
- "objects>"])
-print('Total executemany calls: %d' \
- % counts_by_methname.get("<method 'executemany' of 'sqlite3.Cursor' "
- "objects>", 0))
+ cProfile.runctx('runit(status)', globals(), locals(), filename)
+ stats = pstats.Stats(filename)
-#stats.sort_stats('time', 'calls')
-#stats.print_stats()
-os.system("runsnake %s" % filename)
+ counts_by_methname = dict((key[2], stats.stats[key][0]) for key in stats.stats)
-# SQLA Version: 0.7b1
-# Total calls 4956750
-# Total execute calls: 11201
-# Total executemany calls: 101
+ print("SQLA Version: %s" % __version__)
+ print("Total calls %d" % stats.total_calls)
+ print("Total cpu seconds: %.2f" % stats.total_tt)
+ print('Total execute calls: %d' \
+ % counts_by_methname["<method 'execute' of 'sqlite3.Cursor' "
+ "objects>"])
+ print('Total executemany calls: %d' \
+ % counts_by_methname.get("<method 'executemany' of 'sqlite3.Cursor' "
+ "objects>", 0))
-# SQLA Version: 0.6.6
-# Total calls 7963214
-# Total execute calls: 22201
-# Total executemany calls: 0
+ if dump:
+ stats.sort_stats('time', 'calls')
+ stats.print_stats()
-# SQLA Version: 0.5.8
-# Total calls 10556480
-# Total execute calls: 22201
-# Total executemany calls: 0
+ if runsnake:
+ os.system("runsnake %s" % filename)
+def run_with_time():
+ import time
+ now = time.time()
+ def status(msg):
+ print("%d - %s" % (time.time() - now, msg))
+ runit(status, 10)
+ print("Total time: %d" % (time.time() - now))
+if __name__ == '__main__':
+ import argparse
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--profile', action='store_true',
+ help='run shorter test suite w/ cprofilng')
+ parser.add_argument('--dump', action='store_true',
+ help='dump full call profile (implies --profile)')
+ parser.add_argument('--runsnake', action='store_true',
+ help='invoke runsnakerun (implies --profile)')
+ args = parser.parse_args()
+ args.profile = args.profile or args.dump or args.runsnake
+ if args.profile:
+ run_with_profile(runsnake=args.runsnake, dump=args.dump)
+ else:
+ run_with_time()
diff --git a/test/profiles.txt b/test/profiles.txt
index c2ea3e959..4f833ef1c 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -1,21 +1,22 @@
# /Users/classic/dev/sqlalchemy/test/profiles.txt
# This file is written out on a per-environment basis.
-# For each test in aaa_profiling, the corresponding function and
+# For each test in aaa_profiling, the corresponding function and
# environment is located within this file. If it doesn't exist,
# the test is skipped.
-# If a callcount does exist, it is compared to what we received.
+# If a callcount does exist, it is compared to what we received.
# assertions are raised if the counts do not match.
-#
-# To add a new callcount test, apply the function_call_count
-# decorator and re-run the tests using the --write-profiles
+#
+# To add a new callcount test, apply the function_call_count
+# decorator and re-run the tests using the --write-profiles
# option - this file will be rewritten including the new count.
-#
+#
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.6_sqlite_pysqlite_nocextensions 72
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 72
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_oursql_nocextensions 72
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_oracle_cx_oracle_nocextensions 72
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 72
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 72
@@ -23,8 +24,12 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cex
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 72
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.2_postgresql_psycopg2_nocextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.2_sqlite_pysqlite_nocextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_oursql_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_oursql_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_oracle_cx_oracle_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_cextensions 76
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 74
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select
@@ -32,64 +37,75 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_noc
test.aaa_profiling.test_compiler.CompileTest.test_select 2.6_sqlite_pysqlite_nocextensions 141
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 141
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 141
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_nocextensions 141
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_oursql_nocextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 141
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 141
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 141
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 141
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.2_postgresql_psycopg2_nocextensions 151
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.2_sqlite_pysqlite_nocextensions 151
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_oracle_cx_oracle_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 151
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 151
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_oursql_cextensions 163
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_oursql_nocextensions 163
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_cextensions 163
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 163
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 163
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.6_sqlite_pysqlite_nocextensions 175
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 175
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 175
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_oursql_nocextensions 181
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_oracle_cx_oracle_nocextensions 175
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 175
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 175
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 175
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 175
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.2_postgresql_psycopg2_nocextensions 185
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.2_sqlite_pysqlite_nocextensions 185
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_oracle_cx_oracle_nocextensions 187
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 185
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 185
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_oursql_cextensions 196
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_oursql_nocextensions 196
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_cextensions 196
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 196
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 196
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
test.aaa_profiling.test_compiler.CompileTest.test_update 2.6_sqlite_pysqlite_nocextensions 75
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 75
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 75
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_oursql_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_oracle_cx_oracle_nocextensions 75
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 75
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 75
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 75
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 75
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.2_postgresql_psycopg2_nocextensions 75
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.2_sqlite_pysqlite_nocextensions 75
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_oracle_cx_oracle_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 75
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 75
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_oursql_cextensions 80
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_oursql_nocextensions 80
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_cextensions 80
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 80
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_cextensions 80
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.6_sqlite_pysqlite_nocextensions 137
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 137
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 137
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_nocextensions 137
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 137
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 137
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 137
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 137
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.2_postgresql_psycopg2_nocextensions 136
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.2_sqlite_pysqlite_nocextensions 136
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_oracle_cx_oracle_nocextensions 138
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 136
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 136
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 149
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 149
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_oursql_nocextensions 149
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_nocextensions 149
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 149
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 149
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 149
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 149
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_oursql_cextensions 151
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_oursql_nocextensions 151
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_cextensions 151
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 151
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_cextensions 151
+
+# TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set
+
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4265
+
+# TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove
+
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_cextensions 6525
# TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline
@@ -99,6 +115,9 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycop
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 51049
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 30008
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 39025
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_cextensions 32141
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 41144
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 31190
# TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols
@@ -108,6 +127,9 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 32835
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 29812
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 32817
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_cextensions 31858
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 34861
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 30960
# TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity
@@ -122,7 +144,9 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.2_postgresql_psycopg2_nocextensions 18987
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.2_sqlite_pysqlite_nocextensions 18987
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_oracle_cx_oracle_nocextensions 18987
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_cextensions 18987
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18987
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18987
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18987
# TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity
@@ -138,7 +162,9 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.2_postgresql_psycopg2_nocextensions 121790
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.2_sqlite_pysqlite_nocextensions 121822
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_oracle_cx_oracle_nocextensions 130792
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_cextensions 126077
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 121822
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 164074
# TEST: test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks
@@ -152,7 +178,9 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 21790
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.2_postgresql_psycopg2_nocextensions 20424
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_oracle_cx_oracle_nocextensions 21244
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_cextensions 20268
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 20344
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 23404
# TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_load
@@ -166,7 +194,9 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cexten
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1521
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.2_postgresql_psycopg2_nocextensions 1332
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_oracle_cx_oracle_nocextensions 1366
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_cextensions 1358
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1357
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1598
# TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_no_load
@@ -181,7 +211,9 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_noc
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.2_postgresql_psycopg2_nocextensions 127,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.2_sqlite_pysqlite_nocextensions 127,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_oracle_cx_oracle_nocextensions 134,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_cextensions 132,20
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 127,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 134,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 127,19
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect
@@ -197,7 +229,9 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlit
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.2_postgresql_psycopg2_nocextensions 75
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.2_sqlite_pysqlite_nocextensions 75
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_oracle_cx_oracle_nocextensions 74
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_cextensions 74
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 74
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 74
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 74
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect
@@ -213,7 +247,9 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqli
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.2_postgresql_psycopg2_nocextensions 23
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.2_sqlite_pysqlite_nocextensions 23
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_oracle_cx_oracle_nocextensions 22
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_cextensions 23
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 22
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 23
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 22
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect
@@ -229,7 +265,9 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sq
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.2_postgresql_psycopg2_nocextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.2_sqlite_pysqlite_nocextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_oracle_cx_oracle_nocextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_cextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 8
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
@@ -245,7 +283,9 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.2_postgresql_psycopg2_nocextensions 41
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.2_sqlite_pysqlite_nocextensions 41
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_oracle_cx_oracle_nocextensions 41
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 41
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 41
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 41
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 41
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
@@ -261,7 +301,9 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.2_postgresql_psycopg2_nocextensions 71
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.2_sqlite_pysqlite_nocextensions 71
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_oracle_cx_oracle_nocextensions 71
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 71
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 71
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 71
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 71
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile
@@ -277,93 +319,106 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.2_postgresql_psycopg2_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.2_sqlite_pysqlite_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_oracle_cx_oracle_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 15
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.6_sqlite_pysqlite_nocextensions 15447
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 485
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 512
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15505
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_oracle_cx_oracle_nocextensions 35582
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20471
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35491
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 427
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 455
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15447
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.2_postgresql_psycopg2_nocextensions 14459
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.2_sqlite_pysqlite_nocextensions 14430
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_oracle_cx_oracle_nocextensions 14548
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 497
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14457
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 453
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14430
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.6_sqlite_pysqlite_nocextensions 15447
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 485
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 512
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45505
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_oracle_cx_oracle_nocextensions 35572
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20471
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35491
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 427
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 455
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15447
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.2_postgresql_psycopg2_nocextensions 14459
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.2_sqlite_pysqlite_nocextensions 14430
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_oracle_cx_oracle_nocextensions 14548
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 497
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14457
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 453
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14430
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 5175
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_cextensions 5340
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 5470
test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.2_postgresql_psycopg2_nocextensions 4828
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_nocextensions 4792
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_cextensions 5157
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_nocextensions 5179
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 256
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 256
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_nocextensions 251
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 270
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 270
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_cextensions 259
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_nocextensions 259
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 3425
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 3625
test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 3749
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.2_postgresql_psycopg2_nocextensions 3401
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 3385
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_cextensions 3569
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 3665
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 11045
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 12747
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.2_postgresql_psycopg2_nocextensions 11849
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_nocextensions 11803
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 11688
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 13440
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_cextensions 11548
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_nocextensions 12720
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates
test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_cextensions 1050
test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_nocextensions 1167
test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.2_postgresql_psycopg2_nocextensions 1114
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_cextensions 1044
test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_nocextensions 1106
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 1811
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 1858
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 1905
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 1958
test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.2_postgresql_psycopg2_nocextensions 1731
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_nocextensions 1721
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_cextensions 1846
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_nocextensions 1853
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_cextensions 2300
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_cextensions 2424
test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_nocextensions 2559
test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.2_postgresql_psycopg2_nocextensions 2483
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.3_postgresql_psycopg2_nocextensions 2473
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.3_postgresql_psycopg2_cextensions 2460
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.3_postgresql_psycopg2_nocextensions 2652
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_cextensions 6157
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 6276
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.2_postgresql_psycopg2_nocextensions 6252
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_cextensions 6286
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_nocextensions 6251
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert
@@ -371,20 +426,23 @@ test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.3_pos
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 391
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 398
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.2_postgresql_psycopg2_nocextensions 395
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_cextensions 391
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_nocextensions 394
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 6422
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 6654
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 6765
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 7056
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.2_postgresql_psycopg2_nocextensions 6560
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 6560
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_cextensions 6895
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 6999
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 19145
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 20576
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.2_postgresql_psycopg2_nocextensions 20279
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_cextensions 20117
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_nocextensions 20279
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates
@@ -392,6 +450,7 @@ test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.3_p
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_cextensions 1063
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_nocextensions 1171
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.2_postgresql_psycopg2_nocextensions 1120
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_cextensions 1059
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_nocextensions 1113
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing
@@ -399,4 +458,5 @@ test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.3_po
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 2686
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 2749
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.2_postgresql_psycopg2_nocextensions 2749
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_cextensions 2796
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_nocextensions 2749
diff --git a/test/requirements.py b/test/requirements.py
index a56c037d1..29b7d9997 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -85,8 +85,6 @@ class DefaultRequirements(SuiteRequirements):
no_support('oracle', 'not supported by database'),
no_support('mssql', 'not supported by database'),
no_support('sybase', 'not supported by database'),
- no_support('maxdb', 'FIXME: verify not supported by database'),
- no_support('informix', 'not supported by database'),
])
@property
@@ -125,6 +123,18 @@ class DefaultRequirements(SuiteRequirements):
)
@property
+ def insert_from_select(self):
+ return skip_if(
+ ["firebird"], "crashes for unknown reason"
+ )
+
+ @property
+ def fetch_rows_post_commit(self):
+ return skip_if(
+ ["firebird"], "not supported"
+ )
+
+ @property
def binary_comparisons(self):
"""target database/driver can allow BLOB/BINARY fields to be compared
against a bound parameter value.
@@ -226,7 +236,6 @@ class DefaultRequirements(SuiteRequirements):
"sqlite",
"sybase",
("mysql", "<", (5, 0, 3)),
- ("informix", "<", (11, 55, "xC3"))
], "savepoints not supported")
@@ -283,14 +292,14 @@ class DefaultRequirements(SuiteRequirements):
"""Target database must support INTERSECT or equivalent."""
return fails_if([
- "firebird", "mysql", "sybase", "informix"
+ "firebird", "mysql", "sybase",
], 'no support for INTERSECT')
@property
def except_(self):
"""Target database must support EXCEPT or equivalent (i.e. MINUS)."""
return fails_if([
- "firebird", "mysql", "sybase", "informix"
+ "firebird", "mysql", "sybase",
], 'no support for EXCEPT')
@property
@@ -313,7 +322,6 @@ class DefaultRequirements(SuiteRequirements):
return skip_if([
no_support('firebird', 'no SA implementation'),
- no_support('maxdb', 'two-phase xact not supported by database'),
no_support('mssql', 'two-phase xact not supported by drivers'),
no_support('oracle', 'two-phase xact not implemented in SQLA/oracle'),
no_support('drizzle', 'two-phase xact not supported by database'),
@@ -366,7 +374,6 @@ class DefaultRequirements(SuiteRequirements):
"""Target driver must support some degree of non-ascii symbol names."""
# TODO: expand to exclude MySQLdb versions w/ broken unicode
return skip_if([
- no_support('maxdb', 'database support flakey'),
no_support('oracle', 'FIXME: no support in database?'),
no_support('sybase', 'FIXME: guessing, needs confirmation'),
no_support('mssql+pymssql', 'no FreeTDS support'),
@@ -394,7 +401,7 @@ class DefaultRequirements(SuiteRequirements):
return fails_on_everything_except('mysql+mysqldb', 'mysql+oursql',
'sqlite+pysqlite', 'mysql+pymysql',
'mysql+cymysql',
- 'sybase', 'mssql+pyodbc', 'mssql+mxodbc')
+ 'sybase', 'mssql')
@property
def implements_get_lastrowid(self):
@@ -408,7 +415,8 @@ class DefaultRequirements(SuiteRequirements):
cursor object.
"""
- return fails_on_everything_except('mysql+mysqldb', 'mysql+oursql',
+ return skip_if('mssql+pymssql', 'crashes on pymssql') + \
+ fails_on_everything_except('mysql+mysqldb', 'mysql+oursql',
'sqlite+pysqlite', 'mysql+pymysql',
'mysql+cymysql')
@@ -432,6 +440,15 @@ class DefaultRequirements(SuiteRequirements):
'sybase')
@property
+ def datetime_literals(self):
+ """target dialect supports rendering of a date, time, or datetime as a
+ literal string, e.g. via the TypeEngine.literal_processor() method.
+
+ """
+
+ return fails_on_everything_except("sqlite")
+
+ @property
def datetime(self):
"""target dialect supports representation of Python
datetime.datetime() objects."""
@@ -486,23 +503,24 @@ class DefaultRequirements(SuiteRequirements):
def precision_numerics_general(self):
"""target backend has general support for moderately high-precision
numerics."""
- return fails_if('mssql+pymssql', 'FIXME: improve pymssql dec handling')
+ return exclusions.open()
@property
def precision_numerics_enotation_small(self):
"""target backend supports Decimal() objects using E notation
to represent very small values."""
- return fails_if('mssql+pymssql', 'FIXME: improve pymssql dec handling')
+ # NOTE: this exclusion isn't used in current tests.
+ return exclusions.open()
@property
def precision_numerics_enotation_large(self):
"""target backend supports Decimal() objects using E notation
to represent very large values."""
- return fails_if(
- ("sybase+pyodbc", None, None,
+ return skip_if(
+ [("sybase+pyodbc", None, None,
"Don't know how do get these values through FreeTDS + Sybase"),
- ("firebird", None, None, "Precision must be from 1 to 18"),
+ ("firebird", None, None, "Precision must be from 1 to 18"),]
)
@property
@@ -537,8 +555,39 @@ class DefaultRequirements(SuiteRequirements):
)
@property
+ def precision_generic_float_type(self):
+ """target backend will return native floating point numbers with at
+ least seven decimal places when using the generic Float type."""
+
+ return fails_if([
+ ('mysql', None, None,
+ 'mysql FLOAT type only returns 4 decimals'),
+ ('firebird', None, None,
+ "firebird FLOAT type isn't high precision"),
+ ])
+
+ @property
def floats_to_four_decimals(self):
- return fails_if("mysql+oursql", "Floating point error")
+ return fails_if([
+ ("mysql+oursql", None, None, "Floating point error"),
+ ("firebird", None, None,
+ "Firebird still has FP inaccuracy even "
+ "with only four decimal places"),
+ ('mssql+pyodbc', None, None,
+ 'mssql+pyodbc has FP inaccuracy even with '
+ 'only four decimal places '
+ ),
+ ('mssql+pymssql', None, None,
+ 'mssql+pymssql has FP inaccuracy even with '
+ 'only four decimal places '
+ )
+ ])
+
+ @property
+ def fetch_null_from_numeric(self):
+ return skip_if(
+ ("mssql+pyodbc", None, None, "crashes due to bug #351"),
+ )
@property
def python2(self):
@@ -555,20 +604,6 @@ class DefaultRequirements(SuiteRequirements):
)
@property
- def python26(self):
- return skip_if(
- lambda: sys.version_info < (2, 6),
- "Python version 2.6 or greater is required"
- )
-
- @property
- def python25(self):
- return skip_if(
- lambda: sys.version_info < (2, 5),
- "Python version 2.5 or greater is required"
- )
-
- @property
def cpython(self):
return only_if(lambda: util.cpython,
"cPython interpreter needed"
@@ -579,8 +614,9 @@ class DefaultRequirements(SuiteRequirements):
def non_broken_pickle(self):
from sqlalchemy.util import pickle
return only_if(
- lambda: pickle.__name__ == 'cPickle' or sys.version_info >= (3, 2),
- "Needs cPickle or newer Python 3 pickle"
+ lambda: not util.pypy and pickle.__name__ == 'cPickle'
+ or sys.version_info >= (3, 2),
+ "Needs cPickle+cPython or newer Python 3 pickle"
)
diff --git a/test/sql/test_case_statement.py b/test/sql/test_case_statement.py
index 944a15384..998a55cd8 100644
--- a/test/sql/test_case_statement.py
+++ b/test/sql/test_case_statement.py
@@ -32,7 +32,6 @@ class CaseTest(fixtures.TestBase, AssertsCompiledSQL):
info_table.drop()
@testing.fails_on('firebird', 'FIXME: unknown')
- @testing.fails_on('maxdb', 'FIXME: unknown')
@testing.requires.subqueries
def test_case(self):
inner = select([case([
@@ -130,7 +129,6 @@ class CaseTest(fixtures.TestBase, AssertsCompiledSQL):
@testing.fails_on('firebird', 'FIXME: unknown')
- @testing.fails_on('maxdb', 'FIXME: unknown')
def testcase_with_dict(self):
query = select([case({
info_table.c.pk < 3: 'lessthan3',
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index bdfcccb22..53b9f68fc 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -18,7 +18,7 @@ from sqlalchemy import Integer, String, MetaData, Table, Column, select, \
literal, and_, null, type_coerce, alias, or_, literal_column,\
Float, TIMESTAMP, Numeric, Date, Text, collate, union, except_,\
intersect, union_all, Boolean, distinct, join, outerjoin, asc, desc,\
- over, subquery, case
+ over, subquery, case, true
import decimal
from sqlalchemy.util import u
from sqlalchemy import exc, sql, util, types, schema
@@ -272,9 +272,10 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT foo() AS foo_1"
)
+ # this is native_boolean=False for default dialect
self.assert_compile(
select([not_(True)], use_labels=True),
- "SELECT NOT :param_1"
+ "SELECT :param_1 = 0"
)
self.assert_compile(
@@ -852,6 +853,17 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
'otherid_1': 9, 'myid_1': 12}
)
+ # test a generator
+ self.assert_compile(
+ and_(
+ conj for conj in [
+ table1.c.myid == 12,
+ table1.c.name == 'asdf'
+ ]
+ ),
+ "mytable.myid = :myid_1 AND mytable.name = :name_1"
+ )
+
def test_nested_conjunctions_short_circuit(self):
"""test that empty or_(), and_() conjunctions are collapsed by
an enclosing conjunction."""
@@ -874,6 +886,26 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT t.x FROM t WHERE t.x = :x_1 OR t.x = :x_2"
)
+ def test_true_short_circuit(self):
+ t = table('t', column('x'))
+
+ self.assert_compile(
+ select([t]).where(true()),
+ "SELECT t.x FROM t WHERE 1 = 1",
+ dialect=default.DefaultDialect(supports_native_boolean=False)
+ )
+ self.assert_compile(
+ select([t]).where(true()),
+ "SELECT t.x FROM t WHERE true",
+ dialect=default.DefaultDialect(supports_native_boolean=True)
+ )
+
+ self.assert_compile(
+ select([t]),
+ "SELECT t.x FROM t",
+ dialect=default.DefaultDialect(supports_native_boolean=True)
+ )
+
def test_distinct(self):
self.assert_compile(
select([table1.c.myid.distinct()]),
@@ -1024,80 +1056,22 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
def test_for_update(self):
self.assert_compile(
- table1.select(table1.c.myid == 7, for_update=True),
+ table1.select(table1.c.myid == 7).with_for_update(),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE")
- self.assert_compile(
- table1.select(table1.c.myid == 7, for_update=False),
- "SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = :myid_1")
-
# not supported by dialect, should just use update
self.assert_compile(
- table1.select(table1.c.myid == 7, for_update='nowait'),
- "SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE")
-
- # unknown lock mode
- self.assert_compile(
- table1.select(table1.c.myid == 7, for_update='unknown_mode'),
+ table1.select(table1.c.myid == 7).with_for_update(nowait=True),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE")
- # ----- mysql
-
- self.assert_compile(
- table1.select(table1.c.myid == 7, for_update=True),
- "SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = %s FOR UPDATE",
- dialect=mysql.dialect())
-
- self.assert_compile(
- table1.select(table1.c.myid == 7, for_update="read"),
- "SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = %s LOCK IN SHARE MODE",
- dialect=mysql.dialect())
-
- # ----- oracle
-
- self.assert_compile(
- table1.select(table1.c.myid == 7, for_update=True),
- "SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE",
- dialect=oracle.dialect())
-
- self.assert_compile(
- table1.select(table1.c.myid == 7, for_update="nowait"),
- "SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE NOWAIT",
- dialect=oracle.dialect())
-
- # ----- postgresql
-
- self.assert_compile(
- table1.select(table1.c.myid == 7, for_update=True),
- "SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = %(myid_1)s FOR UPDATE",
- dialect=postgresql.dialect())
-
- self.assert_compile(
- table1.select(table1.c.myid == 7, for_update="nowait"),
- "SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = %(myid_1)s FOR UPDATE NOWAIT",
- dialect=postgresql.dialect())
-
- self.assert_compile(
- table1.select(table1.c.myid == 7, for_update="read"),
- "SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = %(myid_1)s FOR SHARE",
- dialect=postgresql.dialect())
+ assert_raises_message(
+ exc.ArgumentError,
+ "Unknown for_update argument: 'unknown_mode'",
+ table1.select, table1.c.myid == 7, for_update='unknown_mode'
+ )
- self.assert_compile(
- table1.select(table1.c.myid == 7, for_update="read_nowait"),
- "SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = %(myid_1)s FOR SHARE NOWAIT",
- dialect=postgresql.dialect())
def test_alias(self):
# test the alias for a table1. column names stay the same,
@@ -1171,172 +1145,6 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=mysql.dialect()
)
- def test_text(self):
- self.assert_compile(
- text("select * from foo where lala = bar"),
- "select * from foo where lala = bar"
- )
-
- # test bytestring
- self.assert_compile(select(
- ["foobar(a)", "pk_foo_bar(syslaal)"],
- "a = 12",
- from_obj=["foobar left outer join lala on foobar.foo = lala.foo"]
- ),
- "SELECT foobar(a), pk_foo_bar(syslaal) FROM foobar "
- "left outer join lala on foobar.foo = lala.foo WHERE a = 12"
- )
-
- # test unicode
- self.assert_compile(select(
- ["foobar(a)", "pk_foo_bar(syslaal)"],
- "a = 12",
- from_obj=["foobar left outer join lala on foobar.foo = lala.foo"]
- ),
- "SELECT foobar(a), pk_foo_bar(syslaal) FROM foobar "
- "left outer join lala on foobar.foo = lala.foo WHERE a = 12"
- )
-
- # test building a select query programmatically with text
- s = select()
- s.append_column("column1")
- s.append_column("column2")
- s.append_whereclause("column1=12")
- s.append_whereclause("column2=19")
- s = s.order_by("column1")
- s.append_from("table1")
- self.assert_compile(s, "SELECT column1, column2 FROM table1 WHERE "
- "column1=12 AND column2=19 ORDER BY column1")
-
- self.assert_compile(
- select(["column1", "column2"],
- from_obj=table1).alias('somealias').select(),
- "SELECT somealias.column1, somealias.column2 FROM "
- "(SELECT column1, column2 FROM mytable) AS somealias"
- )
-
- # test that use_labels doesnt interfere with literal columns
- self.assert_compile(
- select(["column1", "column2", table1.c.myid], from_obj=table1,
- use_labels=True),
- "SELECT column1, column2, mytable.myid AS mytable_myid "
- "FROM mytable"
- )
-
- # test that use_labels doesnt interfere
- # with literal columns that have textual labels
- self.assert_compile(
- select(["column1 AS foobar", "column2 AS hoho", table1.c.myid],
- from_obj=table1, use_labels=True),
- "SELECT column1 AS foobar, column2 AS hoho, "
- "mytable.myid AS mytable_myid FROM mytable"
- )
-
- # test that "auto-labeling of subquery columns"
- # doesnt interfere with literal columns,
- # exported columns dont get quoted
- self.assert_compile(
- select(["column1 AS foobar", "column2 AS hoho", table1.c.myid],
- from_obj=[table1]).select(),
- "SELECT column1 AS foobar, column2 AS hoho, myid FROM "
- "(SELECT column1 AS foobar, column2 AS hoho, "
- "mytable.myid AS myid FROM mytable)"
- )
-
- self.assert_compile(
- select(['col1', 'col2'], from_obj='tablename').alias('myalias'),
- "SELECT col1, col2 FROM tablename"
- )
-
- def test_binds_in_text(self):
- self.assert_compile(
- text("select * from foo where lala=:bar and hoho=:whee",
- bindparams=[bindparam('bar', 4), bindparam('whee', 7)]),
- "select * from foo where lala=:bar and hoho=:whee",
- checkparams={'bar': 4, 'whee': 7},
- )
-
- self.assert_compile(
- text("select * from foo where clock='05:06:07'"),
- "select * from foo where clock='05:06:07'",
- checkparams={},
- params={},
- )
-
- dialect = postgresql.dialect()
- self.assert_compile(
- text("select * from foo where lala=:bar and hoho=:whee",
- bindparams=[bindparam('bar', 4), bindparam('whee', 7)]),
- "select * from foo where lala=%(bar)s and hoho=%(whee)s",
- checkparams={'bar': 4, 'whee': 7},
- dialect=dialect
- )
-
- # test escaping out text() params with a backslash
- self.assert_compile(
- text("select * from foo where clock='05:06:07' "
- "and mork='\:mindy'"),
- "select * from foo where clock='05:06:07' and mork=':mindy'",
- checkparams={},
- params={},
- dialect=dialect
- )
-
- dialect = sqlite.dialect()
- self.assert_compile(
- text("select * from foo where lala=:bar and hoho=:whee",
- bindparams=[bindparam('bar', 4), bindparam('whee', 7)]),
- "select * from foo where lala=? and hoho=?",
- checkparams={'bar': 4, 'whee': 7},
- dialect=dialect
- )
-
- self.assert_compile(select(
- [table1, table2.c.otherid, "sysdate()", "foo, bar, lala"],
- and_(
- "foo.id = foofoo(lala)",
- "datetime(foo) = Today",
- table1.c.myid == table2.c.otherid,
- )
- ),
- "SELECT mytable.myid, mytable.name, mytable.description, "
- "myothertable.otherid, sysdate(), foo, bar, lala "
- "FROM mytable, myothertable WHERE foo.id = foofoo(lala) AND "
- "datetime(foo) = Today AND mytable.myid = myothertable.otherid")
-
- self.assert_compile(select(
- [alias(table1, 't'), "foo.f"],
- "foo.f = t.id",
- from_obj=["(select f from bar where lala=heyhey) foo"]
- ),
- "SELECT t.myid, t.name, t.description, foo.f FROM mytable AS t, "
- "(select f from bar where lala=heyhey) foo WHERE foo.f = t.id")
-
- # test Text embedded within select_from(), using binds
- generate_series = text(
- "generate_series(:x, :y, :z) as s(a)",
- bindparams=[bindparam('x', None),
- bindparam('y', None), bindparam('z', None)]
- )
-
- s = select([
- (func.current_date() +
- literal_column("s.a")).label("dates")
- ]).select_from(generate_series)
- self.assert_compile(
- s,
- "SELECT CURRENT_DATE + s.a AS dates FROM "
- "generate_series(:x, :y, :z) as s(a)",
- checkparams={'y': None, 'x': None, 'z': None}
- )
-
- self.assert_compile(
- s.params(x=5, y=6, z=7),
- "SELECT CURRENT_DATE + s.a AS dates FROM "
- "generate_series(:x, :y, :z) as s(a)",
- checkparams={'y': 6, 'x': 5, 'z': 7}
- )
-
@testing.emits_warning('.*empty sequence.*')
def test_render_binds_as_literal(self):
"""test a compiler that renders binds inline into
@@ -1377,8 +1185,9 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=dialect
)
- assert_raises(
+ assert_raises_message(
exc.CompileError,
+ "Bind parameter 'foo' without a renderable value not allowed here.",
bindparam("foo").in_([]).compile, dialect=dialect
)
@@ -1422,58 +1231,6 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"/ values.val1 > :param_1"
)
- def test_collate(self):
- for expr in (select([table1.c.name.collate('latin1_german2_ci')]),
- select([collate(table1.c.name, 'latin1_german2_ci')])):
- self.assert_compile(
- expr, "SELECT mytable.name COLLATE latin1_german2_ci "
- "AS anon_1 FROM mytable")
-
- assert table1.c.name.collate('latin1_german2_ci').type is \
- table1.c.name.type
-
- expr = select([table1.c.name.collate('latin1_german2_ci').\
- label('k1')]).order_by('k1')
- self.assert_compile(expr,
- "SELECT mytable.name "
- "COLLATE latin1_german2_ci AS k1 FROM mytable ORDER BY k1")
-
- expr = select([collate('foo', 'latin1_german2_ci').label('k1')])
- self.assert_compile(expr,
- "SELECT :param_1 COLLATE latin1_german2_ci AS k1")
-
- expr = select([table1.c.name.collate('latin1_german2_ci').like('%x%')])
- self.assert_compile(expr,
- "SELECT mytable.name COLLATE latin1_german2_ci "
- "LIKE :param_1 AS anon_1 FROM mytable")
-
- expr = select([table1.c.name.like(collate('%x%',
- 'latin1_german2_ci'))])
- self.assert_compile(expr,
- "SELECT mytable.name "
- "LIKE :param_1 COLLATE latin1_german2_ci AS anon_1 "
- "FROM mytable")
-
- expr = select([table1.c.name.collate('col1').like(
- collate('%x%', 'col2'))])
- self.assert_compile(expr,
- "SELECT mytable.name COLLATE col1 "
- "LIKE :param_1 COLLATE col2 AS anon_1 "
- "FROM mytable")
-
- expr = select([func.concat('a', 'b').\
- collate('latin1_german2_ci').label('x')])
- self.assert_compile(expr,
- "SELECT concat(:param_1, :param_2) "
- "COLLATE latin1_german2_ci AS x")
-
-
- expr = select([table1.c.name]).\
- order_by(table1.c.name.collate('latin1_german2_ci'))
- self.assert_compile(expr,
- "SELECT mytable.name FROM mytable ORDER BY "
- "mytable.name COLLATE latin1_german2_ci")
-
def test_percent_chars(self):
t = table("table%name",
column("percent%"),
@@ -2785,10 +2542,6 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
schema.CreateTable(t1).compile
)
- # there's some unicode issue in the assertion
- # regular expression that appears to be resolved
- # in 2.6, not exactly sure what it is
- @testing.requires.python26
def test_reraise_of_column_spec_issue_unicode(self):
MyType = self._illegal_type_fixture()
t1 = Table('t', MetaData(),
@@ -2800,6 +2553,22 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
schema.CreateTable(t1).compile
)
+ def test_system_flag(self):
+ m = MetaData()
+ t = Table('t', m, Column('x', Integer),
+ Column('y', Integer, system=True),
+ Column('z', Integer))
+ self.assert_compile(
+ schema.CreateTable(t),
+ "CREATE TABLE t (x INTEGER, z INTEGER)"
+ )
+ m2 = MetaData()
+ t2 = t.tometadata(m2)
+ self.assert_compile(
+ schema.CreateTable(t2),
+ "CREATE TABLE t (x INTEGER, z INTEGER)"
+ )
+
class InlineDefaultTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -2909,6 +2678,7 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL):
"(:rem_id, :datatype_id, :value)")
+
class CorrelateTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -3238,13 +3008,34 @@ class CorrelateTest(fixtures.TestBase, AssertsCompiledSQL):
)
class CoercionTest(fixtures.TestBase, AssertsCompiledSQL):
- __dialect__ = 'default'
+ __dialect__ = default.DefaultDialect(supports_native_boolean=True)
def _fixture(self):
m = MetaData()
return Table('foo', m,
Column('id', Integer))
+ bool_table = table('t', column('x', Boolean))
+
+ def test_coerce_bool_where(self):
+ self.assert_compile(
+ select([self.bool_table]).where(self.bool_table.c.x),
+ "SELECT t.x FROM t WHERE t.x"
+ )
+
+ def test_coerce_bool_where_non_native(self):
+ self.assert_compile(
+ select([self.bool_table]).where(self.bool_table.c.x),
+ "SELECT t.x FROM t WHERE t.x = 1",
+ dialect=default.DefaultDialect(supports_native_boolean=False)
+ )
+
+ self.assert_compile(
+ select([self.bool_table]).where(~self.bool_table.c.x),
+ "SELECT t.x FROM t WHERE t.x = 0",
+ dialect=default.DefaultDialect(supports_native_boolean=False)
+ )
+
def test_null_constant(self):
self.assert_compile(_literal_as_text(None), "NULL")
@@ -3257,12 +3048,12 @@ class CoercionTest(fixtures.TestBase, AssertsCompiledSQL):
def test_val_and_false(self):
t = self._fixture()
self.assert_compile(and_(t.c.id == 1, False),
- "foo.id = :id_1 AND false")
+ "false")
def test_val_and_true_coerced(self):
t = self._fixture()
self.assert_compile(and_(t.c.id == 1, True),
- "foo.id = :id_1 AND true")
+ "foo.id = :id_1")
def test_val_is_null_coerced(self):
t = self._fixture()
@@ -3270,26 +3061,21 @@ class CoercionTest(fixtures.TestBase, AssertsCompiledSQL):
"foo.id IS NULL")
def test_val_and_None(self):
- # current convention is None in and_() or
- # other clauselist is ignored. May want
- # to revise this at some point.
t = self._fixture()
self.assert_compile(and_(t.c.id == 1, None),
- "foo.id = :id_1")
+ "foo.id = :id_1 AND NULL")
def test_None_and_val(self):
- # current convention is None in and_() or
- # other clauselist is ignored. May want
- # to revise this at some point.
t = self._fixture()
- self.assert_compile(and_(t.c.id == 1, None),
- "foo.id = :id_1")
+ self.assert_compile(and_(None, t.c.id == 1),
+ "NULL AND foo.id = :id_1")
def test_None_and_nothing(self):
# current convention is None in and_()
# returns None May want
# to revise this at some point.
- assert and_(None) is None
+ self.assert_compile(
+ and_(None), "NULL")
def test_val_and_null(self):
t = self._fixture()
diff --git a/test/sql/test_constraints.py b/test/sql/test_constraints.py
index b44a65190..cb4b73ec8 100644
--- a/test/sql/test_constraints.py
+++ b/test/sql/test_constraints.py
@@ -544,6 +544,28 @@ class ConstraintCompilationTest(fixtures.TestBase, AssertsCompiledSQL):
"FOREIGN KEY(foo_bar) REFERENCES foo (bar))"
)
+ def test_empty_pkc(self):
+ # test that an empty primary key is ignored
+ metadata = MetaData()
+ tbl = Table('test', metadata,
+ Column('x', Integer, autoincrement=False),
+ Column('y', Integer, autoincrement=False),
+ PrimaryKeyConstraint())
+ self.assert_compile(schema.CreateTable(tbl),
+ "CREATE TABLE test (x INTEGER, y INTEGER)"
+ )
+
+ def test_empty_uc(self):
+ # test that an empty constraint is ignored
+ metadata = MetaData()
+ tbl = Table('test', metadata,
+ Column('x', Integer, autoincrement=False),
+ Column('y', Integer, autoincrement=False),
+ UniqueConstraint())
+ self.assert_compile(schema.CreateTable(tbl),
+ "CREATE TABLE test (x INTEGER, y INTEGER)"
+ )
+
def test_deferrable_column_check(self):
t = Table('tbl', MetaData(),
Column('a', Integer),
@@ -726,6 +748,27 @@ class ConstraintCompilationTest(fixtures.TestBase, AssertsCompiledSQL):
"ALTER TABLE tbl ADD PRIMARY KEY (a)"
)
+ def test_render_check_constraint_sql_literal(self):
+ t, t2 = self._constraint_create_fixture()
+
+ constraint = CheckConstraint(t.c.a > 5)
+
+ self.assert_compile(
+ schema.AddConstraint(constraint),
+ "ALTER TABLE tbl ADD CHECK (a > 5)"
+ )
+
+ def test_render_index_sql_literal(self):
+ t, t2 = self._constraint_create_fixture()
+
+ constraint = Index('name', t.c.a + 5)
+
+ self.assert_compile(
+ schema.CreateIndex(constraint),
+ "CREATE INDEX name ON tbl (a + 5)"
+ )
+
+
class ConstraintAPITest(fixtures.TestBase):
def test_double_fk_usage_raises(self):
f = ForeignKey('b.id')
diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py
index 28756873f..0f6831375 100644
--- a/test/sql/test_cte.py
+++ b/test/sql/test_cte.py
@@ -312,6 +312,22 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL):
"FROM regional_sales"
)
+ def test_multi_subq_quote(self):
+ cte = select([literal(1).label("id")]).cte(name='CTE')
+
+ s1 = select([cte.c.id]).alias()
+ s2 = select([cte.c.id]).alias()
+
+ s = select([s1, s2])
+ self.assert_compile(
+ s,
+ 'WITH "CTE" AS (SELECT :param_1 AS id) '
+ 'SELECT anon_1.id, anon_2.id FROM '
+ '(SELECT "CTE".id AS id FROM "CTE") AS anon_1, '
+ '(SELECT "CTE".id AS id FROM "CTE") AS anon_2'
+ )
+
+
def test_positional_binds(self):
orders = table('orders',
column('order'),
@@ -351,3 +367,32 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL):
dialect=dialect
)
+
+ def test_all_aliases(self):
+ orders = table('order', column('order'))
+ s = select([orders.c.order]).cte("regional_sales")
+
+ r1 = s.alias()
+ r2 = s.alias()
+
+ s2 = select([r1, r2]).where(r1.c.order > r2.c.order)
+
+ self.assert_compile(
+ s2,
+ 'WITH regional_sales AS (SELECT "order"."order" '
+ 'AS "order" FROM "order") '
+ 'SELECT anon_1."order", anon_2."order" '
+ 'FROM regional_sales AS anon_1, '
+ 'regional_sales AS anon_2 WHERE anon_1."order" > anon_2."order"'
+ )
+
+ s3 = select([orders]).select_from(orders.join(r1, r1.c.order == orders.c.order))
+
+ self.assert_compile(
+ s3,
+ 'WITH regional_sales AS '
+ '(SELECT "order"."order" AS "order" '
+ 'FROM "order")'
+ ' SELECT "order"."order" '
+ 'FROM "order" JOIN regional_sales AS anon_1 ON anon_1."order" = "order"."order"'
+ ) \ No newline at end of file
diff --git a/test/engine/test_ddlemit.py b/test/sql/test_ddlemit.py
index e773d0ced..be75f63ec 100644
--- a/test/engine/test_ddlemit.py
+++ b/test/sql/test_ddlemit.py
@@ -1,5 +1,5 @@
from sqlalchemy.testing import fixtures
-from sqlalchemy.engine.ddl import SchemaGenerator, SchemaDropper
+from sqlalchemy.sql.ddl import SchemaGenerator, SchemaDropper
from sqlalchemy.engine import default
from sqlalchemy import MetaData, Table, Column, Integer, Sequence
from sqlalchemy import schema
diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py
index 1508c0532..1622c4ed8 100644
--- a/test/sql/test_defaults.py
+++ b/test/sql/test_defaults.py
@@ -45,9 +45,14 @@ class DefaultTest(fixtures.TestBase):
# since its a "branched" connection
conn.close()
- use_function_defaults = testing.against('postgresql', 'mssql', 'maxdb')
+ use_function_defaults = testing.against('postgresql', 'mssql')
is_oracle = testing.against('oracle')
+ class MyClass(object):
+ @classmethod
+ def gen_default(cls, ctx):
+ return "hi"
+
# select "count(1)" returns different results on different DBs also
# correct for "current_date" compatible as column default, value
# differences
@@ -68,9 +73,7 @@ class DefaultTest(fixtures.TestBase):
f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar()
def1 = currenttime
deftype = sa.Date
- if testing.against('maxdb'):
- def2 = sa.text("curdate")
- elif testing.against('mssql'):
+ if testing.against('mssql'):
def2 = sa.text("getdate()")
else:
def2 = sa.text("current_date")
@@ -125,7 +128,12 @@ class DefaultTest(fixtures.TestBase):
# combo
Column('col9', String(20),
default='py',
- server_default='ddl'))
+ server_default='ddl'),
+
+ # python method w/ context
+ Column('col10', String(20), default=MyClass.gen_default)
+ )
+
t.create()
@classmethod
@@ -285,7 +293,7 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(), [
(x, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py')
+ 12, today, 'py', 'hi')
for x in range(51, 54)])
t.insert().execute(col9=None)
@@ -295,7 +303,7 @@ class DefaultTest(fixtures.TestBase):
eq_(t.select(t.c.col1 == 54).execute().fetchall(),
[(54, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, None)])
+ 12, today, None, 'hi')])
@testing.fails_on('firebird', 'Data type unknown')
def test_insertmany(self):
@@ -311,11 +319,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py'),
+ 12, today, 'py', 'hi'),
(52, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py'),
+ 12, today, 'py', 'hi'),
(53, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py')])
+ 12, today, 'py', 'hi')])
def test_no_embed_in_sql(self):
"""Using a DefaultGenerator, Sequence, DefaultClause
@@ -379,11 +387,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'im the update', f2, ts, ts, ctexec, False, False,
- 13, today, 'py'),
+ 13, today, 'py', 'hi'),
(52, 'im the update', f2, ts, ts, ctexec, True, False,
- 13, today, 'py'),
+ 13, today, 'py', 'hi'),
(53, 'im the update', f2, ts, ts, ctexec, True, False,
- 13, today, 'py')])
+ 13, today, 'py', 'hi')])
@testing.fails_on('firebird', 'Data type unknown')
def test_update(self):
@@ -395,7 +403,7 @@ class DefaultTest(fixtures.TestBase):
l = l.first()
eq_(l,
(pk, 'im the update', f2, None, None, ctexec, True, False,
- 13, datetime.date.today(), 'py'))
+ 13, datetime.date.today(), 'py', 'hi'))
eq_(11, f2)
@testing.fails_on('firebird', 'Data type unknown')
@@ -607,6 +615,33 @@ class AutoIncrementTest(fixtures.TablesTest):
nonai.insert().execute(id=1, data='row 1')
+
+ def test_col_w_sequence_non_autoinc_no_firing(self):
+ metadata = self.metadata
+ # plain autoincrement/PK table in the actual schema
+ Table("x", metadata,
+ Column("set_id", Integer, primary_key=True)
+ )
+ metadata.create_all()
+
+ # for the INSERT use a table with a Sequence
+ # and autoincrement=False. Using a ForeignKey
+ # would have the same effect
+ dataset_no_autoinc = Table("x", MetaData(),
+ Column("set_id", Integer, Sequence("some_seq"),
+ primary_key=True, autoincrement=False)
+ )
+
+ testing.db.execute(
+ dataset_no_autoinc.insert()
+ )
+ eq_(
+ testing.db.scalar(dataset_no_autoinc.count()), 1
+ )
+
+
+
+
class SequenceDDLTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -879,6 +914,7 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
assert not self._has_sequence('s1')
assert not self._has_sequence('s2')
+
cartitems = sometable = metadata = None
class TableBoundSequenceTest(fixtures.TestBase):
__requires__ = ('sequences',)
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index ee503dbb7..ee1d61f85 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -21,13 +21,12 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
functions._registry.clear()
def test_compile(self):
- for dialect in all_dialects(exclude=('sybase', 'access',
- 'informix', 'maxdb')):
+ for dialect in all_dialects(exclude=('sybase', )):
bindtemplate = BIND_TEMPLATES[dialect.paramstyle]
self.assert_compile(func.current_timestamp(),
"CURRENT_TIMESTAMP", dialect=dialect)
self.assert_compile(func.localtime(), "LOCALTIME", dialect=dialect)
- if dialect.name in ('firebird', 'maxdb'):
+ if dialect.name in ('firebird',):
self.assert_compile(func.nosuchfunction(),
"nosuchfunction", dialect=dialect)
else:
diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py
index 09b20d8ea..5a65cecef 100644
--- a/test/sql/test_generative.py
+++ b/test/sql/test_generative.py
@@ -428,13 +428,13 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
class Vis(CloningVisitor):
def visit_textclause(self, text):
text.text = text.text + " SOME MODIFIER=:lala"
- text.bindparams['lala'] = bindparam('lala')
+ text._bindparams['lala'] = bindparam('lala')
clause2 = Vis().traverse(clause)
assert c1 == str(clause)
assert str(clause2) == c1 + " SOME MODIFIER=:lala"
- assert list(clause.bindparams.keys()) == ['bar']
- assert set(clause2.bindparams.keys()) == set(['bar', 'lala'])
+ assert list(clause._bindparams.keys()) == ['bar']
+ assert set(clause2._bindparams.keys()) == set(['bar', 'lala'])
def test_select(self):
s2 = select([t1])
diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py
index e1171532d..5c3b9b6c9 100644
--- a/test/sql/test_insert.py
+++ b/test/sql/test_insert.py
@@ -133,6 +133,35 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo"}
)
+ def test_insert_from_select_select_alt_ordering(self):
+ table1 = self.tables.mytable
+ sel = select([table1.c.name, table1.c.myid]).where(table1.c.name == 'foo')
+ ins = self.tables.myothertable.insert().\
+ from_select(("othername", "otherid"), sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO myothertable (othername, otherid) "
+ "SELECT mytable.name, mytable.myid FROM mytable "
+ "WHERE mytable.name = :name_1",
+ checkparams={"name_1": "foo"}
+ )
+
+ def test_insert_from_select_select_no_defaults(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=func.foobar()))
+ table1 = self.tables.mytable
+ sel = select([table1.c.myid]).where(table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id) SELECT mytable.myid "
+ "FROM mytable WHERE mytable.name = :name_1",
+ checkparams={"name_1": "foo"}
+ )
+
def test_insert_mix_select_values_exception(self):
table1 = self.tables.mytable
sel = select([table1.c.myid, table1.c.name]).where(table1.c.name == 'foo')
diff --git a/test/sql/test_join_rewriting.py b/test/sql/test_join_rewriting.py
index 5a9bdd1d3..801d5ce9a 100644
--- a/test/sql/test_join_rewriting.py
+++ b/test/sql/test_join_rewriting.py
@@ -1,10 +1,11 @@
from sqlalchemy import Table, Column, Integer, MetaData, ForeignKey, select
-from sqlalchemy.testing import fixtures, AssertsCompiledSQL
+from sqlalchemy.testing import fixtures, AssertsCompiledSQL, eq_
from sqlalchemy import util
from sqlalchemy.engine import default
from sqlalchemy import testing
+
m = MetaData()
a = Table('a', m,
@@ -30,6 +31,15 @@ e = Table('e', m,
Column('id', Integer, primary_key=True)
)
+b_key = Table('b_key', m,
+ Column('id', Integer, primary_key=True, key='bid'),
+ )
+
+a_to_b_key = Table('a_to_b_key', m,
+ Column('aid', Integer, ForeignKey('a.id')),
+ Column('bid', Integer, ForeignKey('b_key.bid')),
+ )
+
class _JoinRewriteTestBase(AssertsCompiledSQL):
def _test(self, s, assert_):
self.assert_compile(
@@ -38,10 +48,22 @@ class _JoinRewriteTestBase(AssertsCompiledSQL):
)
compiled = s.compile(dialect=self.__dialect__)
- for key, col in zip([c.key for c in s.c], s.inner_columns):
+
+ # column name should be in result map, as we never render
+ # .key in SQL
+ for key, col in zip([c.name for c in s.c], s.inner_columns):
key = key % compiled.anon_map
assert col in compiled.result_map[key][1]
+ _a_bkeyselect_bkey = ""
+
+ def test_a_bkeyselect_bkey(self):
+ assoc = a_to_b_key.select().alias()
+ j1 = assoc.join(b_key)
+ j2 = a.join(j1)
+
+ s = select([a, b_key], use_labels=True).select_from(j2)
+ self._test(s, self._a_bkeyselect_bkey)
def test_a_bc(self):
j1 = b.join(c)
@@ -60,6 +82,27 @@ class _JoinRewriteTestBase(AssertsCompiledSQL):
self._test(s, self._a_bc)
+ def test_a_bkeyassoc(self):
+ j1 = b_key.join(a_to_b_key)
+ j2 = a.join(j1)
+
+ s = select([a, b_key.c.bid], use_labels=True).\
+ select_from(j2)
+
+ self._test(s, self._a_bkeyassoc)
+
+ def test_a_bkeyassoc_aliased(self):
+ bkey_alias = b_key.alias()
+ a_to_b_key_alias = a_to_b_key.alias()
+
+ j1 = bkey_alias.join(a_to_b_key_alias)
+ j2 = a.join(j1)
+
+ s = select([a, bkey_alias.c.bid], use_labels=True).\
+ select_from(j2)
+
+ self._test(s, self._a_bkeyassoc_aliased)
+
def test_a__b_dc(self):
j1 = c.join(d)
j2 = b.join(j1)
@@ -94,6 +137,7 @@ class _JoinRewriteTestBase(AssertsCompiledSQL):
self._a_bc_comma_a1_selbc
)
+
class JoinRewriteTest(_JoinRewriteTestBase, fixtures.TestBase):
"""test rendering of each join with right-nested rewritten as
aliased SELECT statements.."""
@@ -149,6 +193,36 @@ class JoinRewriteTest(_JoinRewriteTestBase, fixtures.TestBase):
"ON a_1.id = anon_2.b_a_id ORDER BY anon_2.b_id"
)
+ _a_bkeyassoc = (
+ "SELECT a.id AS a_id, anon_1.b_key_id AS b_key_id "
+ "FROM a JOIN "
+ "(SELECT b_key.id AS b_key_id, a_to_b_key.aid AS a_to_b_key_aid, "
+ "a_to_b_key.bid AS a_to_b_key_bid FROM b_key "
+ "JOIN a_to_b_key ON b_key.id = a_to_b_key.bid) AS anon_1 "
+ "ON a.id = anon_1.a_to_b_key_aid"
+ )
+
+ _a_bkeyassoc_aliased = (
+ "SELECT a.id AS a_id, anon_1.b_key_1_id AS b_key_1_id "
+ "FROM a JOIN (SELECT b_key_1.id AS b_key_1_id, "
+ "a_to_b_key_1.aid AS a_to_b_key_1_aid, "
+ "a_to_b_key_1.bid AS a_to_b_key_1_bid FROM b_key AS b_key_1 "
+ "JOIN a_to_b_key AS a_to_b_key_1 ON b_key_1.id = a_to_b_key_1.bid) AS "
+ "anon_1 ON a.id = anon_1.a_to_b_key_1_aid"
+ )
+
+ _a_bkeyselect_bkey = (
+ "SELECT a.id AS a_id, anon_2.anon_1_aid AS anon_1_aid, "
+ "anon_2.anon_1_bid AS anon_1_bid, anon_2.b_key_id AS b_key_id "
+ "FROM a JOIN (SELECT anon_1.aid AS anon_1_aid, anon_1.bid AS anon_1_bid, "
+ "b_key.id AS b_key_id "
+ "FROM (SELECT a_to_b_key.aid AS aid, a_to_b_key.bid AS bid "
+ "FROM a_to_b_key) AS anon_1 "
+ "JOIN b_key ON b_key.id = anon_1.bid) AS anon_2 ON a.id = anon_2.anon_1_aid"
+ )
+
+
+
class JoinPlainTest(_JoinRewriteTestBase, fixtures.TestBase):
"""test rendering of each join with normal nesting."""
@util.classproperty
@@ -156,6 +230,12 @@ class JoinPlainTest(_JoinRewriteTestBase, fixtures.TestBase):
dialect = default.DefaultDialect()
return dialect
+ _a_bkeyselect_bkey = (
+ "SELECT a.id AS a_id, b_key.id AS b_key_id FROM a JOIN "
+ "((SELECT a_to_b_key.aid AS aid, a_to_b_key.bid AS bid "
+ "FROM a_to_b_key) AS anon_1 JOIN b_key ON b_key.id = anon_1.bid) "
+ "ON a.id = anon_1.aid"
+ )
_a__b_dc = (
"SELECT a.id AS a_id, b.id AS b_id, "
"b.a_id AS b_a_id, c.id AS c_id, "
@@ -194,6 +274,19 @@ class JoinPlainTest(_JoinRewriteTestBase, fixtures.TestBase):
"ON a_1.id = anon_1.b_a_id ORDER BY anon_1.b_id"
)
+ _a_bkeyassoc = (
+ "SELECT a.id AS a_id, b_key.id AS b_key_id "
+ "FROM a JOIN "
+ "(b_key JOIN a_to_b_key ON b_key.id = a_to_b_key.bid) "
+ "ON a.id = a_to_b_key.aid"
+ )
+
+ _a_bkeyassoc_aliased = (
+ "SELECT a.id AS a_id, b_key_1.id AS b_key_1_id FROM a "
+ "JOIN (b_key AS b_key_1 JOIN a_to_b_key AS a_to_b_key_1 "
+ "ON b_key_1.id = a_to_b_key_1.bid) ON a.id = a_to_b_key_1.aid"
+ )
+
class JoinNoUseLabelsTest(_JoinRewriteTestBase, fixtures.TestBase):
@util.classproperty
def __dialect__(cls):
@@ -208,6 +301,12 @@ class JoinNoUseLabelsTest(_JoinRewriteTestBase, fixtures.TestBase):
assert_
)
+ _a_bkeyselect_bkey = (
+ "SELECT a.id, b_key.id FROM a JOIN ((SELECT a_to_b_key.aid AS aid, "
+ "a_to_b_key.bid AS bid FROM a_to_b_key) AS anon_1 "
+ "JOIN b_key ON b_key.id = anon_1.bid) ON a.id = anon_1.aid"
+ )
+
_a__b_dc = (
"SELECT a.id, b.id, "
"b.a_id, c.id, "
@@ -245,10 +344,21 @@ class JoinNoUseLabelsTest(_JoinRewriteTestBase, fixtures.TestBase):
"ON a_1.id = anon_1.b_a_id ORDER BY anon_1.b_id"
)
+ _a_bkeyassoc = (
+ "SELECT a.id, b_key.id FROM a JOIN (b_key JOIN a_to_b_key "
+ "ON b_key.id = a_to_b_key.bid) ON a.id = a_to_b_key.aid"
+ )
+
+ _a_bkeyassoc_aliased = (
+ "SELECT a.id, b_key_1.id FROM a JOIN (b_key AS b_key_1 "
+ "JOIN a_to_b_key AS a_to_b_key_1 ON b_key_1.id = a_to_b_key_1.bid) "
+ "ON a.id = a_to_b_key_1.aid"
+ )
+
class JoinExecTest(_JoinRewriteTestBase, fixtures.TestBase):
"""invoke the SQL on the current backend to ensure compatibility"""
- _a_bc = _a_bc_comma_a1_selbc = _a__b_dc = None
+ _a_bc = _a_bc_comma_a1_selbc = _a__b_dc = _a_bkeyassoc = _a_bkeyassoc_aliased = None
@classmethod
def setup_class(cls):
@@ -259,7 +369,9 @@ class JoinExecTest(_JoinRewriteTestBase, fixtures.TestBase):
m.drop_all(testing.db)
def _test(self, selectable, assert_):
- testing.db.execute(selectable)
+ result = testing.db.execute(selectable)
+ for col in selectable.inner_columns:
+ assert col in result._metadata._keymap
class DialectFlagTest(fixtures.TestBase, AssertsCompiledSQL):
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index 851e9b920..f933a2494 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -5,15 +5,16 @@ from sqlalchemy.testing import emits_warning
import pickle
from sqlalchemy import Integer, String, UniqueConstraint, \
CheckConstraint, ForeignKey, MetaData, Sequence, \
- ForeignKeyConstraint, ColumnDefault, Index, event,\
- events, Unicode, types as sqltypes
-from sqlalchemy.testing.schema import Table, Column
+ ForeignKeyConstraint, PrimaryKeyConstraint, ColumnDefault, Index, event,\
+ events, Unicode, types as sqltypes, bindparam, \
+ Table, Column
from sqlalchemy import schema, exc
import sqlalchemy as tsa
from sqlalchemy.testing import fixtures
from sqlalchemy import testing
from sqlalchemy.testing import ComparesTables, AssertsCompiledSQL
-from sqlalchemy.testing import eq_, is_
+from sqlalchemy.testing import eq_, is_, mock
+from contextlib import contextmanager
class MetaDataTest(fixtures.TestBase, ComparesTables):
def test_metadata_connect(self):
@@ -236,6 +237,45 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
go
)
+ def test_fk_given_non_col(self):
+ not_a_col = bindparam('x')
+ assert_raises_message(
+ exc.ArgumentError,
+ "String, Column, or Column-bound argument expected, got Bind",
+ ForeignKey, not_a_col
+ )
+
+ def test_fk_given_non_col_clauseelem(self):
+ class Foo(object):
+ def __clause_element__(self):
+ return bindparam('x')
+ assert_raises_message(
+ exc.ArgumentError,
+ "String, Column, or Column-bound argument expected, got Bind",
+ ForeignKey, Foo()
+ )
+
+ def test_fk_given_col_non_table(self):
+ t = Table('t', MetaData(), Column('x', Integer))
+ xa = t.alias().c.x
+ assert_raises_message(
+ exc.ArgumentError,
+ "ForeignKey received Column not bound to a Table, got: .*Alias",
+ ForeignKey, xa
+ )
+
+ def test_fk_given_col_non_table_clauseelem(self):
+ t = Table('t', MetaData(), Column('x', Integer))
+ class Foo(object):
+ def __clause_element__(self):
+ return t.alias().c.x
+
+ assert_raises_message(
+ exc.ArgumentError,
+ "ForeignKey received Column not bound to a Table, got: .*Alias",
+ ForeignKey, Foo()
+ )
+
def test_fk_no_such_target_col_error_upfront(self):
meta = MetaData()
a = Table('a', meta, Column('a', Integer))
@@ -268,6 +308,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
@testing.exclude('mysql', '<', (4, 1, 1), 'early types are squirrely')
def test_to_metadata(self):
+ from sqlalchemy.testing.schema import Table
meta = MetaData()
table = Table('mytable', meta,
@@ -280,7 +321,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
Column('description', String(30),
CheckConstraint("description='hi'")),
UniqueConstraint('name'),
- test_needs_fk=True,
+ test_needs_fk=True
)
table2 = Table('othertable', meta,
@@ -288,7 +329,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
Column('myid', Integer,
ForeignKey('mytable.myid'),
),
- test_needs_fk=True,
+ test_needs_fk=True
)
def test_to_metadata():
@@ -447,13 +488,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
Column('description', String(30),
CheckConstraint("description='hi'")),
UniqueConstraint('name'),
- test_needs_fk=True,
)
table2 = Table('othertable', meta,
Column('id', Integer, primary_key=True),
Column('myid', Integer, ForeignKey('mytable.myid')),
- test_needs_fk=True,
)
meta2 = MetaData()
@@ -474,14 +513,12 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
Column('description', String(30),
CheckConstraint("description='hi'")),
UniqueConstraint('name'),
- test_needs_fk=True,
schema='myschema',
)
table2 = Table('othertable', meta,
Column('id', Integer, primary_key=True),
Column('myid', Integer, ForeignKey('myschema.mytable.myid')),
- test_needs_fk=True,
schema='myschema',
)
@@ -494,6 +531,47 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
eq_(str(table_c.join(table2_c).onclause),
'myschema.mytable.myid = myschema.othertable.myid')
+ def test_tometadata_copy_info(self):
+ m = MetaData()
+ fk = ForeignKey('t2.id')
+ c = Column('c', Integer, fk)
+ ck = CheckConstraint('c > 5')
+ t = Table('t', m, c, ck)
+
+ m.info['minfo'] = True
+ fk.info['fkinfo'] = True
+ c.info['cinfo'] = True
+ ck.info['ckinfo'] = True
+ t.info['tinfo'] = True
+ t.primary_key.info['pkinfo'] = True
+ fkc = [const for const in t.constraints if
+ isinstance(const, ForeignKeyConstraint)][0]
+ fkc.info['fkcinfo'] = True
+
+ m2 = MetaData()
+ t2 = t.tometadata(m2)
+
+ m.info['minfo'] = False
+ fk.info['fkinfo'] = False
+ c.info['cinfo'] = False
+ ck.info['ckinfo'] = False
+ t.primary_key.info['pkinfo'] = False
+ fkc.info['fkcinfo'] = False
+
+ eq_(m2.info, {})
+ eq_(t2.info, {"tinfo": True})
+ eq_(t2.c.c.info, {"cinfo": True})
+ eq_(list(t2.c.c.foreign_keys)[0].info, {"fkinfo": True})
+ eq_(t2.primary_key.info, {"pkinfo": True})
+
+ fkc2 = [const for const in t2.constraints
+ if isinstance(const, ForeignKeyConstraint)][0]
+ eq_(fkc2.info, {"fkcinfo": True})
+
+ ck2 = [const for const in
+ t2.constraints if isinstance(const, CheckConstraint)][0]
+ eq_(ck2.info, {"ckinfo": True})
+
def test_tometadata_kwargs(self):
meta = MetaData()
@@ -506,6 +584,8 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
meta2 = MetaData()
table_c = table.tometadata(meta2)
+ eq_(table.kwargs, {"mysql_engine": "InnoDB"})
+
eq_(table.kwargs, table_c.kwargs)
def test_tometadata_indexes(self):
@@ -581,11 +661,13 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
kw['quote_schema'] = quote_schema
t = Table(name, metadata, **kw)
eq_(t.schema, exp_schema, "test %d, table schema" % i)
- eq_(t.quote_schema, exp_quote_schema,
+ eq_(t.schema.quote if t.schema is not None else None,
+ exp_quote_schema,
"test %d, table quote_schema" % i)
seq = Sequence(name, metadata=metadata, **kw)
eq_(seq.schema, exp_schema, "test %d, seq schema" % i)
- eq_(seq.quote_schema, exp_quote_schema,
+ eq_(seq.schema.quote if seq.schema is not None else None,
+ exp_quote_schema,
"test %d, seq quote_schema" % i)
def test_manual_dependencies(self):
@@ -614,13 +696,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
Column('name', String(40), nullable=True),
Column('description', String(30), CheckConstraint("description='hi'")),
UniqueConstraint('name'),
- test_needs_fk=True
)
table2 = Table('othertable', meta,
Column('id', Integer, primary_key=True),
Column('myid', Integer, ForeignKey('myschema.mytable.myid')),
- test_needs_fk=True
)
meta2 = MetaData(schema='someschema')
@@ -641,13 +721,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
Column('description', String(30),
CheckConstraint("description='hi'")),
UniqueConstraint('name'),
- test_needs_fk=True,
)
table2 = Table('othertable', meta,
Column('id', Integer, primary_key=True),
Column('myid', Integer, ForeignKey('mytable.myid')),
- test_needs_fk=True,
)
meta2 = MetaData()
@@ -764,6 +842,77 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL):
)
is_(t._autoincrement_column, t.c.id)
+ def test_pk_args_standalone(self):
+ m = MetaData()
+ t = Table('t', m,
+ Column('x', Integer, primary_key=True),
+ PrimaryKeyConstraint(mssql_clustered=True)
+ )
+ eq_(
+ list(t.primary_key), [t.c.x]
+ )
+ eq_(
+ t.primary_key.dialect_kwargs, {"mssql_clustered": True}
+ )
+
+ def test_pk_cols_sets_flags(self):
+ m = MetaData()
+ t = Table('t', m,
+ Column('x', Integer),
+ Column('y', Integer),
+ Column('z', Integer),
+ PrimaryKeyConstraint('x', 'y')
+ )
+ eq_(t.c.x.primary_key, True)
+ eq_(t.c.y.primary_key, True)
+ eq_(t.c.z.primary_key, False)
+
+ def test_pk_col_mismatch_one(self):
+ m = MetaData()
+ assert_raises_message(
+ exc.SAWarning,
+ "Table 't' specifies columns 'x' as primary_key=True, "
+ "not matching locally specified columns 'q'",
+ Table, 't', m,
+ Column('x', Integer, primary_key=True),
+ Column('q', Integer),
+ PrimaryKeyConstraint('q')
+ )
+
+ def test_pk_col_mismatch_two(self):
+ m = MetaData()
+ assert_raises_message(
+ exc.SAWarning,
+ "Table 't' specifies columns 'a', 'b', 'c' as primary_key=True, "
+ "not matching locally specified columns 'b', 'c'",
+ Table, 't', m,
+ Column('a', Integer, primary_key=True),
+ Column('b', Integer, primary_key=True),
+ Column('c', Integer, primary_key=True),
+ PrimaryKeyConstraint('b', 'c')
+ )
+
+ @testing.emits_warning("Table 't'")
+ def test_pk_col_mismatch_three(self):
+ m = MetaData()
+ t = Table('t', m,
+ Column('x', Integer, primary_key=True),
+ Column('q', Integer),
+ PrimaryKeyConstraint('q')
+ )
+ eq_(list(t.primary_key), [t.c.q])
+
+ @testing.emits_warning("Table 't'")
+ def test_pk_col_mismatch_four(self):
+ m = MetaData()
+ t = Table('t', m,
+ Column('a', Integer, primary_key=True),
+ Column('b', Integer, primary_key=True),
+ Column('c', Integer, primary_key=True),
+ PrimaryKeyConstraint('b', 'c')
+ )
+ eq_(list(t.primary_key), [t.c.b, t.c.c])
+
class SchemaTypeTest(fixtures.TestBase):
class MyType(sqltypes.SchemaType, sqltypes.TypeEngine):
column = None
@@ -1039,7 +1188,7 @@ class UseExistingTest(fixtures.TablesTest):
meta2 = self._useexisting_fixture()
users = Table('users', meta2, quote=True, autoload=True,
keep_existing=True)
- assert not users.quote
+ assert not users.name.quote
def test_keep_existing_add_column(self):
meta2 = self._useexisting_fixture()
@@ -1055,12 +1204,15 @@ class UseExistingTest(fixtures.TablesTest):
autoload=True, keep_existing=True)
assert isinstance(users.c.name.type, Unicode)
+ @testing.skip_if(
+ lambda: testing.db.dialect.requires_name_normalize,
+ "test depends on lowercase as case insensitive")
def test_keep_existing_quote_no_orig(self):
meta2 = self._notexisting_fixture()
users = Table('users', meta2, quote=True,
autoload=True,
keep_existing=True)
- assert users.quote
+ assert users.name.quote
def test_keep_existing_add_column_no_orig(self):
meta2 = self._notexisting_fixture()
@@ -1080,7 +1232,7 @@ class UseExistingTest(fixtures.TablesTest):
meta2 = self._useexisting_fixture()
users = Table('users', meta2, quote=True,
keep_existing=True)
- assert not users.quote
+ assert not users.name.quote
def test_keep_existing_add_column_no_reflection(self):
meta2 = self._useexisting_fixture()
@@ -1097,9 +1249,12 @@ class UseExistingTest(fixtures.TablesTest):
def test_extend_existing_quote(self):
meta2 = self._useexisting_fixture()
- users = Table('users', meta2, quote=True, autoload=True,
- extend_existing=True)
- assert users.quote
+ assert_raises_message(
+ tsa.exc.ArgumentError,
+ "Can't redefine 'quote' or 'quote_schema' arguments",
+ Table, 'users', meta2, quote=True, autoload=True,
+ extend_existing=True
+ )
def test_extend_existing_add_column(self):
meta2 = self._useexisting_fixture()
@@ -1115,12 +1270,15 @@ class UseExistingTest(fixtures.TablesTest):
autoload=True, extend_existing=True)
assert isinstance(users.c.name.type, Unicode)
+ @testing.skip_if(
+ lambda: testing.db.dialect.requires_name_normalize,
+ "test depends on lowercase as case insensitive")
def test_extend_existing_quote_no_orig(self):
meta2 = self._notexisting_fixture()
users = Table('users', meta2, quote=True,
autoload=True,
extend_existing=True)
- assert users.quote
+ assert users.name.quote
def test_extend_existing_add_column_no_orig(self):
meta2 = self._notexisting_fixture()
@@ -1138,9 +1296,12 @@ class UseExistingTest(fixtures.TablesTest):
def test_extend_existing_quote_no_reflection(self):
meta2 = self._useexisting_fixture()
- users = Table('users', meta2, quote=True,
- extend_existing=True)
- assert users.quote
+ assert_raises_message(
+ tsa.exc.ArgumentError,
+ "Can't redefine 'quote' or 'quote_schema' arguments",
+ Table, 'users', meta2, quote=True,
+ extend_existing=True
+ )
def test_extend_existing_add_column_no_reflection(self):
meta2 = self._useexisting_fixture()
@@ -1546,6 +1707,28 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase):
assert c.name == 'named'
assert c.name == c.key
+ def test_unique_index_flags_default_to_none(self):
+ c = Column(Integer)
+ eq_(c.unique, None)
+ eq_(c.index, None)
+
+ c = Column('c', Integer, index=True)
+ eq_(c.unique, None)
+ eq_(c.index, True)
+
+ t = Table('t', MetaData(), c)
+ eq_(list(t.indexes)[0].unique, False)
+
+ c = Column(Integer, unique=True)
+ eq_(c.unique, True)
+ eq_(c.index, None)
+
+ c = Column('c', Integer, index=True, unique=True)
+ eq_(c.unique, True)
+ eq_(c.index, True)
+
+ t = Table('t', MetaData(), c)
+ eq_(list(t.indexes)[0].unique, True)
def test_bogus(self):
assert_raises(exc.ArgumentError, Column, 'foo', name='bar')
@@ -1841,7 +2024,6 @@ class ColumnOptionsTest(fixtures.TestBase):
c.info['bar'] = 'zip'
assert c.info['bar'] == 'zip'
-
class CatchAllEventsTest(fixtures.TestBase):
def teardown(self):
@@ -1890,6 +2072,7 @@ class CatchAllEventsTest(fixtures.TestBase):
parent.__class__.__name__))
def after_attach(obj, parent):
+ assert hasattr(obj, 'name') # so we can change it
canary.append("%s->%s" % (target.__name__, parent))
event.listen(target, "before_parent_attach", before_attach)
event.listen(target, "after_parent_attach", after_attach)
@@ -1897,14 +2080,15 @@ class CatchAllEventsTest(fixtures.TestBase):
for target in [
schema.ForeignKeyConstraint, schema.PrimaryKeyConstraint,
schema.UniqueConstraint,
- schema.CheckConstraint
+ schema.CheckConstraint,
+ schema.Index
]:
evt(target)
m = MetaData()
Table('t1', m,
Column('id', Integer, Sequence('foo_id'), primary_key=True),
- Column('bar', String, ForeignKey('t2.id')),
+ Column('bar', String, ForeignKey('t2.id'), index=True),
Column('bat', Integer, unique=True),
)
Table('t2', m,
@@ -1912,17 +2096,291 @@ class CatchAllEventsTest(fixtures.TestBase):
Column('bar', Integer),
Column('bat', Integer),
CheckConstraint("bar>5"),
- UniqueConstraint('bar', 'bat')
+ UniqueConstraint('bar', 'bat'),
+ Index(None, 'bar', 'bat')
)
eq_(
canary,
[
'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1',
+ 'Index->Table', 'Index->t1',
'ForeignKeyConstraint->Table', 'ForeignKeyConstraint->t1',
'UniqueConstraint->Table', 'UniqueConstraint->t1',
'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2',
'CheckConstraint->Table', 'CheckConstraint->t2',
- 'UniqueConstraint->Table', 'UniqueConstraint->t2'
+ 'UniqueConstraint->Table', 'UniqueConstraint->t2',
+ 'Index->Table', 'Index->t2'
]
)
+class DialectKWArgTest(fixtures.TestBase):
+ @contextmanager
+ def _fixture(self):
+ from sqlalchemy.engine.default import DefaultDialect
+ class ParticipatingDialect(DefaultDialect):
+ construct_arguments = [
+ (schema.Index, {
+ "x": 5,
+ "y": False,
+ "z_one": None
+ }),
+ (schema.ForeignKeyConstraint, {
+ "foobar": False
+ })
+ ]
+
+ class ParticipatingDialect2(DefaultDialect):
+ construct_arguments = [
+ (schema.Index, {
+ "x": 9,
+ "y": True,
+ "pp": "default"
+ }),
+ (schema.Table, {
+ "*": None
+ })
+ ]
+
+ class NonParticipatingDialect(DefaultDialect):
+ construct_arguments = None
+
+ def load(dialect_name):
+ if dialect_name == "participating":
+ return ParticipatingDialect
+ elif dialect_name == "participating2":
+ return ParticipatingDialect2
+ elif dialect_name == "nonparticipating":
+ return NonParticipatingDialect
+ else:
+ raise exc.NoSuchModuleError("no dialect %r" % dialect_name)
+ with mock.patch("sqlalchemy.dialects.registry.load", load):
+ yield
+
+ def test_participating(self):
+ with self._fixture():
+ idx = Index('a', 'b', 'c', participating_y=True)
+ eq_(
+ idx.dialect_options,
+ {"participating": {"x": 5, "y": True, "z_one": None}}
+ )
+ eq_(
+ idx.dialect_kwargs,
+ {
+ 'participating_y': True,
+ }
+ )
+
+ def test_nonparticipating(self):
+ with self._fixture():
+ idx = Index('a', 'b', 'c', nonparticipating_y=True, nonparticipating_q=5)
+ eq_(
+ idx.dialect_kwargs,
+ {
+ 'nonparticipating_y': True,
+ 'nonparticipating_q': 5
+ }
+ )
+
+ def test_unknown_dialect_warning(self):
+ with self._fixture():
+ assert_raises_message(
+ exc.SAWarning,
+ "Can't validate argument 'unknown_y'; can't locate "
+ "any SQLAlchemy dialect named 'unknown'",
+ Index, 'a', 'b', 'c', unknown_y=True
+ )
+
+ def test_participating_bad_kw(self):
+ with self._fixture():
+ assert_raises_message(
+ exc.ArgumentError,
+ "Argument 'participating_q_p_x' is not accepted by dialect "
+ "'participating' on behalf of "
+ "<class 'sqlalchemy.sql.schema.Index'>",
+ Index, 'a', 'b', 'c', participating_q_p_x=8
+ )
+
+ def test_participating_unknown_schema_item(self):
+ with self._fixture():
+ # the dialect doesn't include UniqueConstraint in
+ # its registry at all.
+ assert_raises_message(
+ exc.ArgumentError,
+ "Argument 'participating_q_p_x' is not accepted by dialect "
+ "'participating' on behalf of "
+ "<class 'sqlalchemy.sql.schema.UniqueConstraint'>",
+ UniqueConstraint, 'a', 'b', participating_q_p_x=8
+ )
+
+ @testing.emits_warning("Can't validate")
+ def test_unknown_dialect_warning_still_populates(self):
+ with self._fixture():
+ idx = Index('a', 'b', 'c', unknown_y=True)
+ eq_(idx.dialect_kwargs, {"unknown_y": True}) # still populates
+
+ @testing.emits_warning("Can't validate")
+ def test_unknown_dialect_warning_still_populates_multiple(self):
+ with self._fixture():
+ idx = Index('a', 'b', 'c', unknown_y=True, unknown_z=5,
+ otherunknown_foo='bar', participating_y=8)
+ eq_(
+ idx.dialect_options,
+ {
+ "unknown": {'y': True, 'z': 5, '*': None},
+ "otherunknown": {'foo': 'bar', '*': None},
+ "participating": {'x': 5, 'y': 8, 'z_one': None}
+ }
+ )
+ eq_(idx.dialect_kwargs,
+ {'unknown_z': 5, 'participating_y': 8,
+ 'unknown_y': True,
+ 'otherunknown_foo': 'bar'}
+ ) # still populates
+
+ def test_combined(self):
+ with self._fixture():
+ idx = Index('a', 'b', 'c', participating_x=7,
+ nonparticipating_y=True)
+
+ eq_(
+ idx.dialect_options,
+ {
+ 'participating': {'y': False, 'x': 7, 'z_one': None},
+ 'nonparticipating': {'y': True, '*': None}
+ }
+ )
+ eq_(
+ idx.dialect_kwargs,
+ {
+ 'participating_x': 7,
+ 'nonparticipating_y': True,
+ }
+ )
+
+ def test_multiple_participating(self):
+ with self._fixture():
+ idx = Index('a', 'b', 'c',
+ participating_x=7,
+ participating2_x=15,
+ participating2_y="lazy"
+ )
+ eq_(
+ idx.dialect_options,
+ {
+ "participating": {'x': 7, 'y': False, 'z_one': None},
+ "participating2": {'x': 15, 'y': 'lazy', 'pp': 'default'},
+ }
+ )
+ eq_(
+ idx.dialect_kwargs,
+ {
+ 'participating_x': 7,
+ 'participating2_x': 15,
+ 'participating2_y': 'lazy'
+ }
+ )
+
+ def test_foreign_key_propagate(self):
+ with self._fixture():
+ m = MetaData()
+ fk = ForeignKey('t2.id', participating_foobar=True)
+ t = Table('t', m, Column('id', Integer, fk))
+ fkc = [c for c in t.constraints if isinstance(c, ForeignKeyConstraint)][0]
+ eq_(
+ fkc.dialect_kwargs,
+ {'participating_foobar': True}
+ )
+
+ def test_foreign_key_propagate_exceptions_delayed(self):
+ with self._fixture():
+ m = MetaData()
+ fk = ForeignKey('t2.id', participating_fake=True)
+ c1 = Column('id', Integer, fk)
+ assert_raises_message(
+ exc.ArgumentError,
+ "Argument 'participating_fake' is not accepted by "
+ "dialect 'participating' on behalf of "
+ "<class 'sqlalchemy.sql.schema.ForeignKeyConstraint'>",
+ Table, 't', m, c1
+ )
+
+ def test_wildcard(self):
+ with self._fixture():
+ m = MetaData()
+ t = Table('x', m, Column('x', Integer),
+ participating2_xyz='foo',
+ participating2_engine='InnoDB',
+ )
+ eq_(
+ t.dialect_kwargs,
+ {
+ 'participating2_xyz': 'foo',
+ 'participating2_engine': 'InnoDB'
+ }
+ )
+
+ def test_uninit_wildcard(self):
+ with self._fixture():
+ m = MetaData()
+ t = Table('x', m, Column('x', Integer))
+ eq_(
+ t.dialect_options['participating2'], {'*': None}
+ )
+ eq_(
+ t.dialect_kwargs, {}
+ )
+
+ def test_not_contains_wildcard(self):
+ with self._fixture():
+ m = MetaData()
+ t = Table('x', m, Column('x', Integer))
+ assert 'foobar' not in t.dialect_options['participating2']
+
+ def test_contains_wildcard(self):
+ with self._fixture():
+ m = MetaData()
+ t = Table('x', m, Column('x', Integer), participating2_foobar=5)
+ assert 'foobar' in t.dialect_options['participating2']
+
+
+ def test_update(self):
+ with self._fixture():
+ idx = Index('a', 'b', 'c', participating_x=20)
+ eq_(idx.dialect_kwargs, {
+ "participating_x": 20,
+ })
+ idx._validate_dialect_kwargs({
+ "participating_x": 25,
+ "participating_z_one": "default"})
+ eq_(idx.dialect_options, {
+ "participating": {"x": 25, "y": False, "z_one": "default"}
+ })
+ eq_(idx.dialect_kwargs, {
+ "participating_x": 25,
+ 'participating_z_one': "default"
+ })
+
+ idx._validate_dialect_kwargs({
+ "participating_x": 25,
+ "participating_z_one": "default"})
+
+ eq_(idx.dialect_options, {
+ "participating": {"x": 25, "y": False, "z_one": "default"}
+ })
+ eq_(idx.dialect_kwargs, {
+ "participating_x": 25,
+ 'participating_z_one': "default"
+ })
+
+ idx._validate_dialect_kwargs({
+ "participating_y": True,
+ 'participating2_y': "p2y"})
+ eq_(idx.dialect_options, {
+ "participating": {"x": 25, "y": True, "z_one": "default"},
+ "participating2": {"y": "p2y", "pp": "default", "x": 9}
+ })
+ eq_(idx.dialect_kwargs, {
+ "participating_x": 25,
+ "participating_y": True,
+ 'participating2_y': "p2y",
+ "participating_z_one": "default"})
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index b3919d0da..670d088d2 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -9,14 +9,18 @@ from sqlalchemy.sql import operators, table
import operator
from sqlalchemy import String, Integer
from sqlalchemy import exc
+from sqlalchemy.engine import default
+from sqlalchemy.sql.elements import _literal_as_text
from sqlalchemy.schema import Column, Table, MetaData
-from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType
+from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType, Boolean
from sqlalchemy.dialects import mysql, firebird, postgresql, oracle, \
sqlite, mssql
from sqlalchemy import util
import datetime
import collections
from sqlalchemy import text, literal_column
+from sqlalchemy import and_, not_, between, or_
+from sqlalchemy.sql import true, false, null
class LoopOperate(operators.ColumnOperators):
def operate(self, op, *other, **kwargs):
@@ -35,11 +39,11 @@ class DefaultColumnComparatorTest(fixtures.TestBase):
left = column('left')
assert left.comparator.operate(operator, right).compare(
- BinaryExpression(left, right, operator)
+ BinaryExpression(_literal_as_text(left), _literal_as_text(right), operator)
)
assert operator(left, right).compare(
- BinaryExpression(left, right, operator)
+ BinaryExpression(_literal_as_text(left), _literal_as_text(right), operator)
)
self._loop_test(operator, right)
@@ -352,7 +356,6 @@ class ExtensionOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"x -> :x_1"
)
- @testing.requires.python26
def test_op_not_an_iterator(self):
# see [ticket:2726]
class MyType(UserDefinedType):
@@ -385,7 +388,205 @@ class ExtensionOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"x -> :x_1"
)
-from sqlalchemy import and_, not_, between
+
+class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+ """test standalone booleans being wrapped in an AsBoolean, as well
+ as true/false compilation."""
+
+ def _dialect(self, native_boolean):
+ d = default.DefaultDialect()
+ d.supports_native_boolean = native_boolean
+ return d
+
+ def test_one(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ select([c]).where(c),
+ "SELECT x WHERE x",
+ dialect=self._dialect(True)
+ )
+
+ def test_two(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ select([c]).where(c),
+ "SELECT x WHERE x = 1",
+ dialect=self._dialect(False)
+ )
+
+ def test_three(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ select([c]).where(~c),
+ "SELECT x WHERE x = 0",
+ dialect=self._dialect(False)
+ )
+
+ def test_four(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ select([c]).where(~c),
+ "SELECT x WHERE NOT x",
+ dialect=self._dialect(True)
+ )
+
+ def test_five(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ select([c]).having(c),
+ "SELECT x HAVING x = 1",
+ dialect=self._dialect(False)
+ )
+
+ def test_six(self):
+ self.assert_compile(
+ or_(false(), true()),
+ "1 = 1",
+ dialect=self._dialect(False)
+ )
+
+ def test_eight(self):
+ self.assert_compile(
+ and_(false(), true()),
+ "false",
+ dialect=self._dialect(True)
+ )
+
+ def test_nine(self):
+ self.assert_compile(
+ and_(false(), true()),
+ "0 = 1",
+ dialect=self._dialect(False)
+ )
+
+ def test_ten(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ c == 1,
+ "x = :x_1",
+ dialect=self._dialect(False)
+ )
+
+ def test_eleven(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ c.is_(true()),
+ "x IS true",
+ dialect=self._dialect(True)
+ )
+
+ def test_twelve(self):
+ c = column('x', Boolean)
+ # I don't have a solution for this one yet,
+ # other than adding some heavy-handed conditionals
+ # into compiler
+ self.assert_compile(
+ c.is_(true()),
+ "x IS 1",
+ dialect=self._dialect(False)
+ )
+
+
+class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+ """test interaction of and_()/or_() with boolean , null constants
+ """
+ __dialect__ = default.DefaultDialect(supports_native_boolean=True)
+
+ def test_one(self):
+ self.assert_compile(~and_(true()), "false")
+
+ def test_two(self):
+ self.assert_compile(or_(~and_(true())), "false")
+
+ def test_three(self):
+ self.assert_compile(or_(and_()), "")
+
+ def test_four(self):
+ x = column('x')
+ self.assert_compile(
+ and_(or_(x == 5), or_(x == 7)),
+ "x = :x_1 AND x = :x_2")
+
+
+ def test_five(self):
+ x = column("x")
+ self.assert_compile(
+ and_(true()._ifnone(None), x == 7),
+ "x = :x_1"
+ )
+
+ def test_six(self):
+ x = column("x")
+ self.assert_compile(or_(true(), x == 7), "true")
+ self.assert_compile(or_(x == 7, true()), "true")
+ self.assert_compile(~or_(x == 7, true()), "false")
+
+ def test_six_pt_five(self):
+ x = column("x")
+ self.assert_compile(select([x]).where(or_(x == 7, true())),
+ "SELECT x WHERE true")
+
+ self.assert_compile(select([x]).where(or_(x == 7, true())),
+ "SELECT x WHERE 1 = 1",
+ dialect=default.DefaultDialect(supports_native_boolean=False))
+
+ def test_seven(self):
+ x = column("x")
+ self.assert_compile(
+ and_(true(), x == 7, true(), x == 9),
+ "x = :x_1 AND x = :x_2")
+
+ def test_eight(self):
+ x = column("x")
+ self.assert_compile(
+ or_(false(), x == 7, false(), x == 9),
+ "x = :x_1 OR x = :x_2")
+
+ def test_nine(self):
+ x = column("x")
+ self.assert_compile(
+ and_(x == 7, x == 9, false(), x == 5),
+ "false"
+ )
+ self.assert_compile(
+ ~and_(x == 7, x == 9, false(), x == 5),
+ "true"
+ )
+
+ def test_ten(self):
+ self.assert_compile(
+ and_(None, None),
+ "NULL AND NULL"
+ )
+
+ def test_eleven(self):
+ x = column("x")
+ self.assert_compile(
+ select([x]).where(None).where(None),
+ "SELECT x WHERE NULL AND NULL"
+ )
+
+ def test_twelve(self):
+ x = column("x")
+ self.assert_compile(
+ select([x]).where(and_(None, None)),
+ "SELECT x WHERE NULL AND NULL"
+ )
+
+ def test_thirteen(self):
+ x = column("x")
+ self.assert_compile(
+ select([x]).where(~and_(None, None)),
+ "SELECT x WHERE NOT (NULL AND NULL)"
+ )
+
+ def test_fourteen(self):
+ x = column("x")
+ self.assert_compile(
+ select([x]).where(~null()),
+ "SELECT x WHERE NOT NULL"
+ )
+
class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -472,6 +673,58 @@ class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
self.table2.c.field).is_(None)),
"SELECT op.field FROM op WHERE (op.field MATCH op.field) IS NULL")
+ def test_operator_precedence_collate_1(self):
+ self.assert_compile(
+ self.table1.c.name == literal('foo').collate('utf-8'),
+ "mytable.name = (:param_1 COLLATE utf-8)"
+ )
+
+ def test_operator_precedence_collate_2(self):
+ self.assert_compile(
+ (self.table1.c.name == literal('foo')).collate('utf-8'),
+ "mytable.name = :param_1 COLLATE utf-8"
+ )
+
+ def test_operator_precedence_collate_3(self):
+ self.assert_compile(
+ self.table1.c.name.collate('utf-8') == 'foo',
+ "(mytable.name COLLATE utf-8) = :param_1"
+ )
+
+ def test_operator_precedence_collate_4(self):
+ self.assert_compile(
+ and_(
+ (self.table1.c.name == literal('foo')).collate('utf-8'),
+ (self.table2.c.field == literal('bar')).collate('utf-8'),
+ ),
+ "mytable.name = :param_1 COLLATE utf-8 "
+ "AND op.field = :param_2 COLLATE utf-8"
+ )
+
+ def test_operator_precedence_collate_5(self):
+ self.assert_compile(
+ select([self.table1.c.name]).order_by(
+ self.table1.c.name.collate('utf-8').desc()),
+ "SELECT mytable.name FROM mytable "
+ "ORDER BY mytable.name COLLATE utf-8 DESC"
+ )
+
+ def test_operator_precedence_collate_6(self):
+ self.assert_compile(
+ select([self.table1.c.name]).order_by(
+ self.table1.c.name.collate('utf-8').desc().nullslast()),
+ "SELECT mytable.name FROM mytable "
+ "ORDER BY mytable.name COLLATE utf-8 DESC NULLS LAST"
+ )
+
+ def test_operator_precedence_collate_7(self):
+ self.assert_compile(
+ select([self.table1.c.name]).order_by(
+ self.table1.c.name.collate('utf-8').asc()),
+ "SELECT mytable.name FROM mytable "
+ "ORDER BY mytable.name COLLATE utf-8 ASC"
+ )
+
def test_commutative_operators(self):
self.assert_compile(
literal("a") + literal("b") * literal("c"),
@@ -768,6 +1021,17 @@ class InTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"mytable.myid IN (NULL)"
)
+ @testing.emits_warning('.*empty sequence.*')
+ def test_in_29(self):
+ self.assert_compile(self.table1.c.myid.notin_([]),
+ "mytable.myid = mytable.myid")
+
+ @testing.emits_warning('.*empty sequence.*')
+ def test_in_30(self):
+ self.assert_compile(~self.table1.c.myid.in_([]),
+ "mytable.myid = mytable.myid")
+
+
class MathOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index 39c896266..40c63b179 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -68,7 +68,7 @@ class QueryTest(fixtures.TestBase):
r"A value is required for bind parameter 'user_name', in "
"parameter group 2 \(original cause: (sqlalchemy.exc.)?InvalidRequestError: A "
"value is required for bind parameter 'user_name', in "
- "parameter group 2\) 'INSERT INTO query_users",
+ "parameter group 2\) u?'INSERT INTO query_users",
users.insert().execute,
{'user_id':7, 'user_name':'jack'},
{'user_id':8, 'user_name':'ed'},
@@ -1090,6 +1090,19 @@ class QueryTest(fixtures.TestBase):
eq_(len(r), 1)
+ def test_sorting_in_python(self):
+ users.insert().execute(
+ dict(user_id=1, user_name='foo'),
+ dict(user_id=2, user_name='bar'),
+ dict(user_id=3, user_name='def'),
+ )
+
+ rows = users.select().order_by(users.c.user_name).execute().fetchall()
+
+ eq_(rows, [(2, 'bar'), (3, 'def'), (1, 'foo')])
+
+ eq_(sorted(rows), [(1, 'foo'), (2, 'bar'), (3, 'def')])
+
def test_column_order_with_simple_query(self):
# should return values in column definition order
users.insert().execute(user_id=1, user_name='foo')
@@ -1110,7 +1123,6 @@ class QueryTest(fixtures.TestBase):
@testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()')
@testing.crashes('firebird', 'An identifier must begin with a letter')
- @testing.crashes('maxdb', 'FIXME: unknown, verify not fails_on()')
def test_column_accessor_shadow(self):
meta = MetaData(testing.db)
shadowed = Table('test_shadowed', meta,
@@ -1900,7 +1912,6 @@ class CompoundTest(fixtures.TestBase):
eq_(u.execute().fetchall(), wanted)
@testing.fails_on('firebird', "doesn't like ORDER BY with UNIONs")
- @testing.fails_on('maxdb', 'FIXME: unknown')
@testing.requires.subqueries
def test_union_ordered_alias(self):
(s1, s2) = (
@@ -1919,7 +1930,6 @@ class CompoundTest(fixtures.TestBase):
@testing.fails_on('firebird', "has trouble extracting anonymous column from union subquery")
@testing.fails_on('mysql', 'FIXME: unknown')
@testing.fails_on('sqlite', 'FIXME: unknown')
- @testing.fails_on('informix', "FIXME: unknown (maybe the second alias isn't allows)")
def test_union_all(self):
e = union_all(
select([t1.c.col3]),
diff --git a/test/sql/test_quote.py b/test/sql/test_quote.py
index c92f1ac80..3cab3dc79 100644
--- a/test/sql/test_quote.py
+++ b/test/sql/test_quote.py
@@ -1,9 +1,10 @@
from sqlalchemy import *
from sqlalchemy import sql, schema
from sqlalchemy.sql import compiler
-from sqlalchemy.testing import fixtures, AssertsCompiledSQL
+from sqlalchemy.testing import fixtures, AssertsCompiledSQL, eq_
from sqlalchemy import testing
-
+from sqlalchemy.sql.elements import quoted_name, _truncated_label, _anonymous_label
+from sqlalchemy.testing.util import picklers
class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -61,6 +62,49 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
assert 'MixedCase' in t2.c
+ @testing.provide_metadata
+ def test_has_table_case_sensitive(self):
+ preparer = testing.db.dialect.identifier_preparer
+ if testing.db.dialect.requires_name_normalize:
+ testing.db.execute("CREATE TABLE TAB1 (id INTEGER)")
+ else:
+ testing.db.execute("CREATE TABLE tab1 (id INTEGER)")
+ testing.db.execute('CREATE TABLE %s (id INTEGER)' %
+ preparer.quote_identifier("tab2"))
+ testing.db.execute('CREATE TABLE %s (id INTEGER)' %
+ preparer.quote_identifier("TAB3"))
+ testing.db.execute('CREATE TABLE %s (id INTEGER)' %
+ preparer.quote_identifier("TAB4"))
+
+ t1 = Table('tab1', self.metadata,
+ Column('id', Integer, primary_key=True),
+ )
+ t2 = Table('tab2', self.metadata,
+ Column('id', Integer, primary_key=True),
+ quote=True
+ )
+ t3 = Table('TAB3', self.metadata,
+ Column('id', Integer, primary_key=True),
+ )
+ t4 = Table('TAB4', self.metadata,
+ Column('id', Integer, primary_key=True),
+ quote=True)
+
+ insp = inspect(testing.db)
+ assert testing.db.has_table(t1.name)
+ eq_([c['name'] for c in insp.get_columns(t1.name)], ['id'])
+
+ assert testing.db.has_table(t2.name)
+ eq_([c['name'] for c in insp.get_columns(t2.name)], ['id'])
+
+ assert testing.db.has_table(t3.name)
+ eq_([c['name'] for c in insp.get_columns(t3.name)], ['id'])
+
+ assert testing.db.has_table(t4.name)
+ eq_([c['name'] for c in insp.get_columns(t4.name)], ['id'])
+
+
+
def test_basic(self):
table1.insert().execute(
{'lowercase': 1, 'UPPERCASE': 2, 'MixedCase': 3, 'a123': 4},
@@ -299,7 +343,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
'FROM create.foreign'
)
- def test_subquery(self):
+ def test_subquery_one(self):
# Lower case names, should not quote
metadata = MetaData()
t1 = Table('t1', metadata,
@@ -318,6 +362,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
'WHERE anon.col1 = :col1_1'
)
+ def test_subquery_two(self):
# Lower case names, quotes on, should quote
metadata = MetaData()
t1 = Table('t1', metadata,
@@ -336,6 +381,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
'WHERE anon."col1" = :col1_1'
)
+ def test_subquery_three(self):
# Not lower case names, should quote
metadata = MetaData()
t1 = Table('T1', metadata,
@@ -355,6 +401,8 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
'"Anon"."Col1" = :Col1_1'
)
+ def test_subquery_four(self):
+
# Not lower case names, quotes off, should not quote
metadata = MetaData()
t1 = Table('T1', metadata,
@@ -513,7 +561,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
') AS "Alias1"'
)
- def test_apply_labels(self):
+ def test_apply_labels_should_quote(self):
# Not lower case names, should quote
metadata = MetaData()
t1 = Table('T1', metadata,
@@ -527,6 +575,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
'"Foo"."T1"'
)
+ def test_apply_labels_shouldnt_quote(self):
# Not lower case names, quotes off
metadata = MetaData()
t1 = Table('T1', metadata,
@@ -563,7 +612,20 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
'CREATE INDEX foo ON t ("x")'
)
+ def test_quote_flag_propagate_anon_label(self):
+ m = MetaData()
+ t = Table('t', m, Column('x', Integer, quote=True))
+ self.assert_compile(
+ select([t.alias()]).apply_labels(),
+ 'SELECT t_1."x" AS "t_1_x" FROM t AS t_1'
+ )
+
+ t2 = Table('t2', m, Column('x', Integer), quote=True)
+ self.assert_compile(
+ select([t2.c.x]).apply_labels(),
+ 'SELECT "t2".x AS "t2_x" FROM "t2"'
+ )
class PreparerTest(fixtures.TestBase):
"""Test the db-agnostic quoting services of IdentifierPreparer."""
@@ -619,3 +681,95 @@ class PreparerTest(fixtures.TestBase):
a_eq(unformat('`foo`.bar'), ['foo', 'bar'])
a_eq(unformat('`foo`.`b``a``r`.`baz`'), ['foo', 'b`a`r', 'baz'])
+class QuotedIdentTest(fixtures.TestBase):
+ def test_concat_quotetrue(self):
+ q1 = quoted_name("x", True)
+ self._assert_not_quoted("y" + q1)
+
+ def test_concat_quotefalse(self):
+ q1 = quoted_name("x", False)
+ self._assert_not_quoted("y" + q1)
+
+ def test_concat_quotenone(self):
+ q1 = quoted_name("x", None)
+ self._assert_not_quoted("y" + q1)
+
+ def test_rconcat_quotetrue(self):
+ q1 = quoted_name("x", True)
+ self._assert_not_quoted("y" + q1)
+
+ def test_rconcat_quotefalse(self):
+ q1 = quoted_name("x", False)
+ self._assert_not_quoted("y" + q1)
+
+ def test_rconcat_quotenone(self):
+ q1 = quoted_name("x", None)
+ self._assert_not_quoted("y" + q1)
+
+ def test_concat_anon(self):
+ q1 = _anonymous_label(quoted_name("x", True))
+ assert isinstance(q1, _anonymous_label)
+ value = q1 + "y"
+ assert isinstance(value, _anonymous_label)
+ self._assert_quoted(value, True)
+
+ def test_rconcat_anon(self):
+ q1 = _anonymous_label(quoted_name("x", True))
+ assert isinstance(q1, _anonymous_label)
+ value = "y" + q1
+ assert isinstance(value, _anonymous_label)
+ self._assert_quoted(value, True)
+
+ def test_coerce_quoted_switch(self):
+ q1 = quoted_name("x", False)
+ q2 = quoted_name(q1, True)
+ eq_(q2.quote, True)
+
+ def test_coerce_quoted_none(self):
+ q1 = quoted_name("x", False)
+ q2 = quoted_name(q1, None)
+ eq_(q2.quote, False)
+
+ def test_coerce_quoted_retain(self):
+ q1 = quoted_name("x", False)
+ q2 = quoted_name(q1, False)
+ eq_(q2.quote, False)
+
+ def test_coerce_none(self):
+ q1 = quoted_name(None, False)
+ eq_(q1, None)
+
+ def test_apply_map_quoted(self):
+ q1 = _anonymous_label(quoted_name("x%s", True))
+ q2 = q1.apply_map(('bar'))
+ eq_(q2, "xbar")
+ eq_(q2.quote, True)
+
+ def test_apply_map_plain(self):
+ q1 = _anonymous_label(quoted_name("x%s", None))
+ q2 = q1.apply_map(('bar'))
+ eq_(q2, "xbar")
+ self._assert_not_quoted(q2)
+
+ def test_pickle_quote(self):
+ q1 = quoted_name("x", True)
+ for loads, dumps in picklers():
+ q2 = loads(dumps(q1))
+ eq_(str(q1), str(q2))
+ eq_(q1.quote, q2.quote)
+
+ def test_pickle_anon_label(self):
+ q1 = _anonymous_label(quoted_name("x", True))
+ for loads, dumps in picklers():
+ q2 = loads(dumps(q1))
+ assert isinstance(q2, _anonymous_label)
+ eq_(str(q1), str(q2))
+ eq_(q1.quote, q2.quote)
+
+ def _assert_quoted(self, value, quote):
+ assert isinstance(value, quoted_name)
+ eq_(value.quote, quote)
+
+ def _assert_not_quoted(self, value):
+ assert not isinstance(value, quoted_name)
+
diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py
index 6a42b0625..e7245aa3c 100644
--- a/test/sql/test_returning.py
+++ b/test/sql/test_returning.py
@@ -6,6 +6,7 @@ from sqlalchemy.types import TypeDecorator
from sqlalchemy.testing import fixtures, AssertsExecutionResults, engines, \
assert_raises_message
from sqlalchemy import exc as sa_exc
+import itertools
class ReturningTest(fixtures.TestBase, AssertsExecutionResults):
__requires__ = 'returning',
@@ -184,6 +185,129 @@ class KeyReturningTest(fixtures.TestBase, AssertsExecutionResults):
assert row[table.c.foo_id] == row['id'] == 1
+class ReturnDefaultsTest(fixtures.TablesTest):
+ __requires__ = ('returning', )
+ run_define_tables = 'each'
+
+ @classmethod
+ def define_tables(cls, metadata):
+ from sqlalchemy.sql import ColumnElement
+ from sqlalchemy.ext.compiler import compiles
+
+ counter = itertools.count()
+
+ class IncDefault(ColumnElement):
+ pass
+
+ @compiles(IncDefault)
+ def compile(element, compiler, **kw):
+ return str(next(counter))
+
+ Table("t1", metadata,
+ Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
+ Column("data", String(50)),
+ Column("insdef", Integer, default=IncDefault()),
+ Column("upddef", Integer, onupdate=IncDefault())
+ )
+
+ def test_chained_insert_pk(self):
+ t1 = self.tables.t1
+ result = testing.db.execute(
+ t1.insert().values(upddef=1).return_defaults(t1.c.insdef)
+ )
+ eq_(
+ [result.returned_defaults[k] for k in (t1.c.id, t1.c.insdef)],
+ [1, 0]
+ )
+
+ def test_arg_insert_pk(self):
+ t1 = self.tables.t1
+ result = testing.db.execute(
+ t1.insert(return_defaults=[t1.c.insdef]).values(upddef=1)
+ )
+ eq_(
+ [result.returned_defaults[k] for k in (t1.c.id, t1.c.insdef)],
+ [1, 0]
+ )
+
+ def test_chained_update_pk(self):
+ t1 = self.tables.t1
+ testing.db.execute(
+ t1.insert().values(upddef=1)
+ )
+ result = testing.db.execute(t1.update().values(data='d1').
+ return_defaults(t1.c.upddef))
+ eq_(
+ [result.returned_defaults[k] for k in (t1.c.upddef,)],
+ [1]
+ )
+
+ def test_arg_update_pk(self):
+ t1 = self.tables.t1
+ testing.db.execute(
+ t1.insert().values(upddef=1)
+ )
+ result = testing.db.execute(t1.update(return_defaults=[t1.c.upddef]).
+ values(data='d1'))
+ eq_(
+ [result.returned_defaults[k] for k in (t1.c.upddef,)],
+ [1]
+ )
+
+ def test_insert_non_default(self):
+ """test that a column not marked at all as a
+ default works with this feature."""
+
+ t1 = self.tables.t1
+ result = testing.db.execute(
+ t1.insert().values(upddef=1).return_defaults(t1.c.data)
+ )
+ eq_(
+ [result.returned_defaults[k] for k in (t1.c.id, t1.c.data,)],
+ [1, None]
+ )
+
+ def test_update_non_default(self):
+ """test that a column not marked at all as a
+ default works with this feature."""
+
+ t1 = self.tables.t1
+ testing.db.execute(
+ t1.insert().values(upddef=1)
+ )
+ result = testing.db.execute(t1.update().
+ values(upddef=2).return_defaults(t1.c.data))
+ eq_(
+ [result.returned_defaults[k] for k in (t1.c.data,)],
+ [None]
+ )
+
+ @testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug")
+ def test_insert_non_default_plus_default(self):
+ t1 = self.tables.t1
+ result = testing.db.execute(
+ t1.insert().values(upddef=1).return_defaults(
+ t1.c.data, t1.c.insdef)
+ )
+ eq_(
+ dict(result.returned_defaults),
+ {"id": 1, "data": None, "insdef": 0}
+ )
+
+ @testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug")
+ def test_update_non_default_plus_default(self):
+ t1 = self.tables.t1
+ testing.db.execute(
+ t1.insert().values(upddef=1)
+ )
+ result = testing.db.execute(t1.update().
+ values(insdef=2).return_defaults(
+ t1.c.data, t1.c.upddef))
+ eq_(
+ dict(result.returned_defaults),
+ {"data": None, 'upddef': 1}
+ )
+
class ImplicitReturningFlag(fixtures.TestBase):
def test_flag_turned_off(self):
e = engines.testing_engine(options={'implicit_returning':False})
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index df174fb25..8c7bf43b0 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -10,6 +10,7 @@ from sqlalchemy.sql import util as sql_util, visitors, expression
from sqlalchemy import exc
from sqlalchemy.sql import table, column, null
from sqlalchemy import util
+from sqlalchemy.schema import Column, Table, MetaData
metadata = MetaData()
table1 = Table('table1', metadata,
@@ -513,6 +514,18 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
"SELECT c FROM (SELECT (SELECT (SELECT table1.col1 AS a FROM table1) AS b) AS c)"
)
+ def test_self_referential_select_raises(self):
+ t = table('t', column('x'))
+
+ s = select([t])
+
+ s.append_whereclause(s.c.x > 5)
+ assert_raises_message(
+ exc.InvalidRequestError,
+ r"select\(\) construct refers to itself as a FROM",
+ s.compile
+ )
+
def test_unusual_column_elements_text(self):
"""test that .c excludes text()."""
@@ -1460,6 +1473,12 @@ class AnnotationsTest(fixtures.TestBase):
c1.name = 'somename'
eq_(c1_a.name, 'somename')
+ def test_late_table_add(self):
+ c1 = Column("foo", Integer)
+ c1_a = c1._annotate({"foo": "bar"})
+ t = Table('t', MetaData(), c1)
+ is_(c1_a.table, t)
+
def test_custom_constructions(self):
from sqlalchemy.schema import Column
class MyColumn(Column):
@@ -1884,3 +1903,64 @@ class WithLabelsTest(fixtures.TestBase):
['t1_x', 't2_x']
)
self._assert_result_keys(sel, ['t1_a', 't2_b'])
+
+class ForUpdateTest(fixtures.TestBase, AssertsCompiledSQL):
+ __dialect__ = "default"
+
+ def _assert_legacy(self, leg, read=False, nowait=False):
+ t = table('t', column('c'))
+ s1 = select([t], for_update=leg)
+
+ if leg is False:
+ assert s1._for_update_arg is None
+ assert s1.for_update is None
+ else:
+ eq_(
+ s1._for_update_arg.read, read
+ )
+ eq_(
+ s1._for_update_arg.nowait, nowait
+ )
+ eq_(s1.for_update, leg)
+
+ def test_false_legacy(self):
+ self._assert_legacy(False)
+
+ def test_plain_true_legacy(self):
+ self._assert_legacy(True)
+
+ def test_read_legacy(self):
+ self._assert_legacy("read", read=True)
+
+ def test_nowait_legacy(self):
+ self._assert_legacy("nowait", nowait=True)
+
+ def test_read_nowait_legacy(self):
+ self._assert_legacy("read_nowait", read=True, nowait=True)
+
+ def test_legacy_setter(self):
+ t = table('t', column('c'))
+ s = select([t])
+ s.for_update = 'nowait'
+ eq_(s._for_update_arg.nowait, True)
+
+ def test_basic_clone(self):
+ t = table('t', column('c'))
+ s = select([t]).with_for_update(read=True, of=t.c.c)
+ s2 = visitors.ReplacingCloningVisitor().traverse(s)
+ assert s2._for_update_arg is not s._for_update_arg
+ eq_(s2._for_update_arg.read, True)
+ eq_(s2._for_update_arg.of, [t.c.c])
+ self.assert_compile(s2,
+ "SELECT t.c FROM t FOR SHARE OF t",
+ dialect="postgresql")
+
+ def test_adapt(self):
+ t = table('t', column('c'))
+ s = select([t]).with_for_update(read=True, of=t.c.c)
+ a = t.alias()
+ s2 = sql_util.ClauseAdapter(a).traverse(s)
+ eq_(s2._for_update_arg.of, [a.c.c])
+ self.assert_compile(s2,
+ "SELECT t_1.c FROM t AS t_1 FOR SHARE OF t_1",
+ dialect="postgresql")
diff --git a/test/sql/test_text.py b/test/sql/test_text.py
new file mode 100644
index 000000000..37346437e
--- /dev/null
+++ b/test/sql/test_text.py
@@ -0,0 +1,371 @@
+"""Test the TextClause and related constructs."""
+
+from sqlalchemy.testing import fixtures, AssertsCompiledSQL, eq_, assert_raises_message
+from sqlalchemy import text, select, Integer, String, Float, \
+ bindparam, and_, func, literal_column, exc
+from sqlalchemy.types import NullType
+from sqlalchemy.sql import table, column
+
+table1 = table('mytable',
+ column('myid', Integer),
+ column('name', String),
+ column('description', String),
+)
+
+table2 = table(
+ 'myothertable',
+ column('otherid', Integer),
+ column('othername', String),
+)
+
+class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_basic(self):
+ self.assert_compile(
+ text("select * from foo where lala = bar"),
+ "select * from foo where lala = bar"
+ )
+
+class SelectCompositionTest(fixtures.TestBase, AssertsCompiledSQL):
+ """test the usage of text() implicit within the select() construct
+ when strings are passed."""
+
+ __dialect__ = 'default'
+
+ def test_select_composition_one(self):
+ self.assert_compile(select(
+ ["foobar(a)", "pk_foo_bar(syslaal)"],
+ "a = 12",
+ from_obj=["foobar left outer join lala on foobar.foo = lala.foo"]
+ ),
+ "SELECT foobar(a), pk_foo_bar(syslaal) FROM foobar "
+ "left outer join lala on foobar.foo = lala.foo WHERE a = 12"
+ )
+
+ def test_select_composition_two(self):
+ s = select()
+ s.append_column("column1")
+ s.append_column("column2")
+ s.append_whereclause("column1=12")
+ s.append_whereclause("column2=19")
+ s = s.order_by("column1")
+ s.append_from("table1")
+ self.assert_compile(s, "SELECT column1, column2 FROM table1 WHERE "
+ "column1=12 AND column2=19 ORDER BY column1")
+
+ def test_select_composition_three(self):
+ self.assert_compile(
+ select(["column1", "column2"],
+ from_obj=table1).alias('somealias').select(),
+ "SELECT somealias.column1, somealias.column2 FROM "
+ "(SELECT column1, column2 FROM mytable) AS somealias"
+ )
+
+ def test_select_composition_four(self):
+ # test that use_labels doesnt interfere with literal columns
+ self.assert_compile(
+ select(["column1", "column2", table1.c.myid], from_obj=table1,
+ use_labels=True),
+ "SELECT column1, column2, mytable.myid AS mytable_myid "
+ "FROM mytable"
+ )
+
+ def test_select_composition_five(self):
+ # test that use_labels doesnt interfere
+ # with literal columns that have textual labels
+ self.assert_compile(
+ select(["column1 AS foobar", "column2 AS hoho", table1.c.myid],
+ from_obj=table1, use_labels=True),
+ "SELECT column1 AS foobar, column2 AS hoho, "
+ "mytable.myid AS mytable_myid FROM mytable"
+ )
+
+ def test_select_composition_six(self):
+ # test that "auto-labeling of subquery columns"
+ # doesnt interfere with literal columns,
+ # exported columns dont get quoted
+ self.assert_compile(
+ select(["column1 AS foobar", "column2 AS hoho", table1.c.myid],
+ from_obj=[table1]).select(),
+ "SELECT column1 AS foobar, column2 AS hoho, myid FROM "
+ "(SELECT column1 AS foobar, column2 AS hoho, "
+ "mytable.myid AS myid FROM mytable)"
+ )
+
+ def test_select_composition_seven(self):
+ self.assert_compile(
+ select(['col1', 'col2'], from_obj='tablename').alias('myalias'),
+ "SELECT col1, col2 FROM tablename"
+ )
+
+ def test_select_composition_eight(self):
+ self.assert_compile(select(
+ [table1.alias('t'), "foo.f"],
+ "foo.f = t.id",
+ from_obj=["(select f from bar where lala=heyhey) foo"]
+ ),
+ "SELECT t.myid, t.name, t.description, foo.f FROM mytable AS t, "
+ "(select f from bar where lala=heyhey) foo WHERE foo.f = t.id")
+
+ def test_select_bundle_columns(self):
+ self.assert_compile(select(
+ [table1, table2.c.otherid, "sysdate()", "foo, bar, lala"],
+ and_(
+ "foo.id = foofoo(lala)",
+ "datetime(foo) = Today",
+ table1.c.myid == table2.c.otherid,
+ )
+ ),
+ "SELECT mytable.myid, mytable.name, mytable.description, "
+ "myothertable.otherid, sysdate(), foo, bar, lala "
+ "FROM mytable, myothertable WHERE foo.id = foofoo(lala) AND "
+ "datetime(foo) = Today AND mytable.myid = myothertable.otherid")
+
+class BindParamTest(fixtures.TestBase, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_legacy(self):
+ t = text("select * from foo where lala=:bar and hoho=:whee",
+ bindparams=[bindparam('bar', 4), bindparam('whee', 7)])
+
+ self.assert_compile(
+ t,
+ "select * from foo where lala=:bar and hoho=:whee",
+ checkparams={'bar': 4, 'whee': 7},
+ )
+
+ def test_positional(self):
+ t = text("select * from foo where lala=:bar and hoho=:whee")
+ t = t.bindparams(bindparam('bar', 4), bindparam('whee', 7))
+
+ self.assert_compile(
+ t,
+ "select * from foo where lala=:bar and hoho=:whee",
+ checkparams={'bar': 4, 'whee': 7},
+ )
+
+ def test_kw(self):
+ t = text("select * from foo where lala=:bar and hoho=:whee")
+ t = t.bindparams(bar=4, whee=7)
+
+ self.assert_compile(
+ t,
+ "select * from foo where lala=:bar and hoho=:whee",
+ checkparams={'bar': 4, 'whee': 7},
+ )
+
+ def test_positional_plus_kw(self):
+ t = text("select * from foo where lala=:bar and hoho=:whee")
+ t = t.bindparams(bindparam('bar', 4), whee=7)
+
+ self.assert_compile(
+ t,
+ "select * from foo where lala=:bar and hoho=:whee",
+ checkparams={'bar': 4, 'whee': 7},
+ )
+
+ def test_literal_binds(self):
+ t = text("select * from foo where lala=:bar and hoho=:whee")
+ t = t.bindparams(bindparam('bar', 4), whee='whee')
+
+ self.assert_compile(
+ t,
+ "select * from foo where lala=4 and hoho='whee'",
+ checkparams={},
+ literal_binds=True
+ )
+
+ def _assert_type_map(self, t, compare):
+ map_ = dict(
+ (b.key, b.type) for b in t._bindparams.values()
+ )
+ for k in compare:
+ assert compare[k]._type_affinity is map_[k]._type_affinity
+
+ def test_typing_construction(self):
+ t = text("select * from table :foo :bar :bat")
+
+ self._assert_type_map(t, {"foo": NullType(),
+ "bar": NullType(),
+ "bat": NullType()})
+
+ t = t.bindparams(bindparam('foo', type_=String))
+
+ self._assert_type_map(t, {"foo": String(),
+ "bar": NullType(),
+ "bat": NullType()})
+
+ t = t.bindparams(bindparam('bar', type_=Integer))
+
+ self._assert_type_map(t, {"foo": String(),
+ "bar": Integer(),
+ "bat": NullType()})
+
+ t = t.bindparams(bat=45.564)
+
+ self._assert_type_map(t, {"foo": String(),
+ "bar": Integer(),
+ "bat": Float()})
+
+
+ def test_binds_compiled_named(self):
+ self.assert_compile(
+ text("select * from foo where lala=:bar and hoho=:whee").
+ bindparams(bar=4, whee=7),
+ "select * from foo where lala=%(bar)s and hoho=%(whee)s",
+ checkparams={'bar': 4, 'whee': 7},
+ dialect="postgresql"
+ )
+
+ def test_binds_compiled_positional(self):
+ self.assert_compile(
+ text("select * from foo where lala=:bar and hoho=:whee").
+ bindparams(bar=4, whee=7),
+ "select * from foo where lala=? and hoho=?",
+ checkparams={'bar': 4, 'whee': 7},
+ dialect="sqlite"
+ )
+
+ def test_missing_bind_kw(self):
+ assert_raises_message(
+ exc.ArgumentError,
+ "This text\(\) construct doesn't define a bound parameter named 'bar'",
+ text(":foo").bindparams,
+ foo=5, bar=7
+ )
+
+ def test_missing_bind_posn(self):
+ assert_raises_message(
+ exc.ArgumentError,
+ "This text\(\) construct doesn't define a bound parameter named 'bar'",
+ text(":foo").bindparams,
+ bindparam('foo', value=5), bindparam('bar', value=7)
+ )
+
+ def test_escaping_colons(self):
+ # test escaping out text() params with a backslash
+ self.assert_compile(
+ text("select * from foo where clock='05:06:07' "
+ "and mork='\:mindy'"),
+ "select * from foo where clock='05:06:07' and mork=':mindy'",
+ checkparams={},
+ params={},
+ dialect="postgresql"
+ )
+
+
+ def test_text_in_select_nonfrom(self):
+
+ generate_series = text("generate_series(:x, :y, :z) as s(a)").\
+ bindparams(x=None, y=None, z=None)
+
+ s = select([
+ (func.current_date() + literal_column("s.a")).label("dates")
+ ]).select_from(generate_series)
+
+ self.assert_compile(
+ s,
+ "SELECT CURRENT_DATE + s.a AS dates FROM "
+ "generate_series(:x, :y, :z) as s(a)",
+ checkparams={'y': None, 'x': None, 'z': None}
+ )
+
+ self.assert_compile(
+ s.params(x=5, y=6, z=7),
+ "SELECT CURRENT_DATE + s.a AS dates FROM "
+ "generate_series(:x, :y, :z) as s(a)",
+ checkparams={'y': 6, 'x': 5, 'z': 7}
+ )
+
+class AsFromTest(fixtures.TestBase, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_basic_toplevel_resultmap_positional(self):
+ t = text("select id, name from user").columns(
+ column('id', Integer),
+ column('name')
+ )
+
+ compiled = t.compile()
+ eq_(
+ compiled.result_map,
+ {
+ 'id': ('id', (t.c.id,), t.c.id.type),
+ 'name': ('name', (t.c.name,), t.c.name.type)
+ }
+ )
+
+ def test_basic_toplevel_resultmap(self):
+ t = text("select id, name from user").columns(id=Integer, name=String)
+
+ compiled = t.compile()
+ eq_(
+ compiled.result_map,
+ {
+ 'id': ('id', (t.c.id,), t.c.id.type),
+ 'name': ('name', (t.c.name,), t.c.name.type)
+ }
+ )
+
+ def test_basic_subquery_resultmap(self):
+ t = text("select id, name from user").columns(id=Integer, name=String)
+
+ stmt = select([table1.c.myid]).select_from(
+ table1.join(t, table1.c.myid == t.c.id))
+ compiled = stmt.compile()
+ eq_(
+ compiled.result_map,
+ {
+ "myid": ("myid",
+ (table1.c.myid, "myid", "myid"), table1.c.myid.type),
+ }
+ )
+
+ def test_cte(self):
+ t = text("select id, name from user").columns(id=Integer, name=String).cte('t')
+
+ s = select([table1]).where(table1.c.myid == t.c.id)
+ self.assert_compile(
+ s,
+ "WITH t AS (select id, name from user) "
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable, t WHERE mytable.myid = t.id"
+ )
+
+
+ def test_alias(self):
+ t = text("select id, name from user").columns(id=Integer, name=String).alias('t')
+
+ s = select([table1]).where(table1.c.myid == t.c.id)
+ self.assert_compile(
+ s,
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable, (select id, name from user) AS t "
+ "WHERE mytable.myid = t.id"
+ )
+
+ def test_scalar_subquery(self):
+ t = text("select id from user").columns(id=Integer)
+ subq = t.as_scalar()
+
+ assert subq.type._type_affinity is Integer()._type_affinity
+
+ s = select([table1.c.myid, subq]).where(table1.c.myid == subq)
+ self.assert_compile(
+ s,
+ "SELECT mytable.myid, (select id from user) AS anon_1 "
+ "FROM mytable WHERE mytable.myid = (select id from user)"
+ )
+
+ def test_build_bindparams(self):
+ t = text("select id from user :foo :bar :bat")
+ t = t.bindparams(bindparam("foo", type_=Integer))
+ t = t.columns(id=Integer)
+ t = t.bindparams(bar=String)
+ t = t.bindparams(bindparam('bat', value='bat'))
+
+ eq_(
+ set(t.element._bindparams),
+ set(["bat", "foo", "bar"])
+ ) \ No newline at end of file
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index 2a22224a2..3a263aab2 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -8,6 +8,7 @@ from sqlalchemy import exc, types, util, dialects
for name in dialects.__all__:
__import__("sqlalchemy.dialects.%s" % name)
from sqlalchemy.sql import operators, column, table
+from sqlalchemy.schema import CheckConstraint, AddConstraint
from sqlalchemy.engine import default
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy import testing
@@ -166,14 +167,6 @@ class AdaptTest(fixtures.TestBase):
t1 = typ()
repr(t1)
- def test_plain_init_deprecation_warning(self):
- for typ in (Integer, Date, SmallInteger):
- assert_raises_message(
- exc.SADeprecationWarning,
- "Passing arguments to type object "
- "constructor %s is deprecated" % typ,
- typ, 11
- )
class TypeAffinityTest(fixtures.TestBase):
def test_type_affinity(self):
@@ -272,6 +265,36 @@ class UserDefinedTest(fixtures.TablesTest, AssertsCompiledSQL):
for col in row[3], row[4]:
assert isinstance(col, util.text_type)
+ def test_typedecorator_literal_render(self):
+ class MyType(types.TypeDecorator):
+ impl = String
+
+ def process_literal_param(self, value, dialect):
+ return "HI->%s<-THERE" % value
+
+ self.assert_compile(
+ select([literal("test", MyType)]),
+ "SELECT 'HI->test<-THERE' AS anon_1",
+ dialect='default',
+ literal_binds=True
+ )
+
+ def test_typedecorator_literal_render_fallback_bound(self):
+ # fall back to process_bind_param for literal
+ # value rendering.
+ class MyType(types.TypeDecorator):
+ impl = String
+
+ def process_bind_param(self, value, dialect):
+ return "HI->%s<-THERE" % value
+
+ self.assert_compile(
+ select([literal("test", MyType)]),
+ "SELECT 'HI->test<-THERE' AS anon_1",
+ dialect='default',
+ literal_binds=True
+ )
+
def test_typedecorator_impl(self):
for impl_, exp, kw in [
(Float, "FLOAT", {}),
@@ -381,75 +404,6 @@ class UserDefinedTest(fixtures.TablesTest, AssertsCompiledSQL):
eq_(a.foo, 'foo')
eq_(a.dialect_specific_args['bar'], 'bar')
- @testing.provide_metadata
- def test_type_coerce(self):
- """test ad-hoc usage of custom types with type_coerce()."""
-
- metadata = self.metadata
- class MyType(types.TypeDecorator):
- impl = String
-
- def process_bind_param(self, value, dialect):
- return value[0:-8]
-
- def process_result_value(self, value, dialect):
- return value + "BIND_OUT"
-
- t = Table('t', metadata, Column('data', String(50)))
- metadata.create_all()
-
- t.insert().values(data=type_coerce('d1BIND_OUT', MyType)).execute()
-
- eq_(
- select([type_coerce(t.c.data, MyType)]).execute().fetchall(),
- [('d1BIND_OUT', )]
- )
-
- eq_(
- select([t.c.data, type_coerce(t.c.data, MyType)]).execute().fetchall(),
- [('d1', 'd1BIND_OUT')]
- )
-
- eq_(
- select([t.c.data, type_coerce(t.c.data, MyType)]).
- alias().select().execute().fetchall(),
- [('d1', 'd1BIND_OUT')]
- )
-
- eq_(
- select([t.c.data, type_coerce(t.c.data, MyType)]).\
- where(type_coerce(t.c.data, MyType) == 'd1BIND_OUT').\
- execute().fetchall(),
- [('d1', 'd1BIND_OUT')]
- )
-
- eq_(
- select([t.c.data, type_coerce(t.c.data, MyType)]).\
- where(t.c.data == type_coerce('d1BIND_OUT', MyType)).\
- execute().fetchall(),
- [('d1', 'd1BIND_OUT')]
- )
-
- eq_(
- select([t.c.data, type_coerce(t.c.data, MyType)]).\
- where(t.c.data == type_coerce(None, MyType)).\
- execute().fetchall(),
- []
- )
-
- eq_(
- select([t.c.data, type_coerce(t.c.data, MyType)]).\
- where(type_coerce(t.c.data, MyType) == None).\
- execute().fetchall(),
- []
- )
-
- eq_(
- testing.db.scalar(
- select([type_coerce(literal('d1BIND_OUT'), MyType)])
- ),
- 'd1BIND_OUT'
- )
@classmethod
def define_tables(cls, metadata):
@@ -550,6 +504,220 @@ class UserDefinedTest(fixtures.TablesTest, AssertsCompiledSQL):
Column('goofy9', MyNewIntSubClass, nullable=False),
)
+class TypeCoerceCastTest(fixtures.TablesTest):
+
+ @classmethod
+ def define_tables(cls, metadata):
+ class MyType(types.TypeDecorator):
+ impl = String
+
+ def process_bind_param(self, value, dialect):
+ return "BIND_IN" + str(value)
+
+ def process_result_value(self, value, dialect):
+ return value + "BIND_OUT"
+
+ cls.MyType = MyType
+
+ Table('t', metadata,
+ Column('data', String(50))
+ )
+
+ @testing.fails_on("oracle",
+ "oracle doesn't like CAST in the VALUES of an INSERT")
+ def test_insert_round_trip_cast(self):
+ self._test_insert_round_trip(cast)
+
+ def test_insert_round_trip_type_coerce(self):
+ self._test_insert_round_trip(type_coerce)
+
+ def _test_insert_round_trip(self, coerce_fn):
+ MyType = self.MyType
+ t = self.tables.t
+
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ eq_(
+ select([coerce_fn(t.c.data, MyType)]).execute().fetchall(),
+ [('BIND_INd1BIND_OUT', )]
+ )
+
+ @testing.fails_on("oracle",
+ "ORA-00906: missing left parenthesis - "
+ "seems to be CAST(:param AS type)")
+ def test_coerce_from_nulltype_cast(self):
+ self._test_coerce_from_nulltype(cast)
+
+ def test_coerce_from_nulltype_type_coerce(self):
+ self._test_coerce_from_nulltype(type_coerce)
+
+ def _test_coerce_from_nulltype(self, coerce_fn):
+ MyType = self.MyType
+
+ # test coerce from nulltype - e.g. use an object that
+ # doens't match to a known type
+ class MyObj(object):
+ def __str__(self):
+ return "THISISMYOBJ"
+
+ eq_(
+ testing.db.execute(
+ select([coerce_fn(MyObj(), MyType)])
+ ).fetchall(),
+ [('BIND_INTHISISMYOBJBIND_OUT',)]
+ )
+
+ @testing.fails_on("oracle",
+ "oracle doesn't like CAST in the VALUES of an INSERT")
+ def test_vs_non_coerced_cast(self):
+ self._test_vs_non_coerced(cast)
+
+ def test_vs_non_coerced_type_coerce(self):
+ self._test_vs_non_coerced(type_coerce)
+
+ def _test_vs_non_coerced(self, coerce_fn):
+ MyType = self.MyType
+ t = self.tables.t
+
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ eq_(
+ select([t.c.data, coerce_fn(t.c.data, MyType)]).execute().fetchall(),
+ [('BIND_INd1', 'BIND_INd1BIND_OUT')]
+ )
+
+ @testing.fails_on("oracle",
+ "oracle doesn't like CAST in the VALUES of an INSERT")
+ def test_vs_non_coerced_alias_cast(self):
+ self._test_vs_non_coerced_alias(cast)
+
+ def test_vs_non_coerced_alias_type_coerce(self):
+ self._test_vs_non_coerced_alias(type_coerce)
+
+ def _test_vs_non_coerced_alias(self, coerce_fn):
+ MyType = self.MyType
+ t = self.tables.t
+
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ eq_(
+ select([t.c.data, coerce_fn(t.c.data, MyType)]).
+ alias().select().execute().fetchall(),
+ [('BIND_INd1', 'BIND_INd1BIND_OUT')]
+ )
+
+ @testing.fails_on("oracle",
+ "oracle doesn't like CAST in the VALUES of an INSERT")
+ def test_vs_non_coerced_where_cast(self):
+ self._test_vs_non_coerced_where(cast)
+
+ def test_vs_non_coerced_where_type_coerce(self):
+ self._test_vs_non_coerced_where(type_coerce)
+
+ def _test_vs_non_coerced_where(self, coerce_fn):
+ MyType = self.MyType
+
+ t = self.tables.t
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ # coerce on left side
+ eq_(
+ select([t.c.data, coerce_fn(t.c.data, MyType)]).\
+ where(coerce_fn(t.c.data, MyType) == 'd1').\
+ execute().fetchall(),
+ [('BIND_INd1', 'BIND_INd1BIND_OUT')]
+ )
+
+ # coerce on right side
+ eq_(
+ select([t.c.data, coerce_fn(t.c.data, MyType)]).\
+ where(t.c.data == coerce_fn('d1', MyType)).\
+ execute().fetchall(),
+ [('BIND_INd1', 'BIND_INd1BIND_OUT')]
+ )
+
+ @testing.fails_on("oracle",
+ "oracle doesn't like CAST in the VALUES of an INSERT")
+ def test_coerce_none_cast(self):
+ self._test_coerce_none(cast)
+
+ def test_coerce_none_type_coerce(self):
+ self._test_coerce_none(type_coerce)
+
+ def _test_coerce_none(self, coerce_fn):
+ MyType = self.MyType
+
+ t = self.tables.t
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+ eq_(
+ select([t.c.data, coerce_fn(t.c.data, MyType)]).\
+ where(t.c.data == coerce_fn(None, MyType)).\
+ execute().fetchall(),
+ []
+ )
+
+ eq_(
+ select([t.c.data, coerce_fn(t.c.data, MyType)]).\
+ where(coerce_fn(t.c.data, MyType) == None).\
+ execute().fetchall(),
+ []
+ )
+
+ @testing.fails_on("oracle",
+ "oracle doesn't like CAST in the VALUES of an INSERT")
+ def test_resolve_clause_element_cast(self):
+ self._test_resolve_clause_element(cast)
+
+ def test_resolve_clause_element_type_coerce(self):
+ self._test_resolve_clause_element(type_coerce)
+
+ def _test_resolve_clause_element(self, coerce_fn):
+ MyType = self.MyType
+
+ t = self.tables.t
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ class MyFoob(object):
+ def __clause_element__(self):
+ return t.c.data
+
+ eq_(
+ testing.db.execute(
+ select([t.c.data, coerce_fn(MyFoob(), MyType)])
+ ).fetchall(),
+ [('BIND_INd1', 'BIND_INd1BIND_OUT')]
+ )
+
+ @testing.fails_on("oracle",
+ "ORA-00906: missing left parenthesis - "
+ "seems to be CAST(:param AS type)")
+ def test_cast_existing_typed(self):
+ MyType = self.MyType
+ coerce_fn = cast
+
+ # when cast() is given an already typed value,
+ # the type does not take effect on the value itself.
+ eq_(
+ testing.db.scalar(
+ select([coerce_fn(literal('d1'), MyType)])
+ ),
+ 'd1BIND_OUT'
+ )
+
+ def test_type_coerce_existing_typed(self):
+ MyType = self.MyType
+ coerce_fn = type_coerce
+ # type_coerce does upgrade the given expression to the
+ # given type.
+ eq_(
+ testing.db.scalar(
+ select([coerce_fn(literal('d1'), MyType)])
+ ),
+ 'BIND_INd1BIND_OUT'
+ )
+
+
+
class VariantTest(fixtures.TestBase, AssertsCompiledSQL):
def setup(self):
class UTypeOne(types.UserDefinedType):
@@ -685,8 +853,11 @@ class UnicodeTest(fixtures.TestBase):
testing.db.dialect.returns_unicode_strings,
True if util.py3k else False
)
-
-
+ elif testing.against('oracle+cx_oracle'):
+ eq_(
+ testing.db.dialect.returns_unicode_strings,
+ True if util.py3k else "conditional"
+ )
else:
expected = (testing.db.name, testing.db.driver) in \
(
@@ -699,7 +870,6 @@ class UnicodeTest(fixtures.TestBase):
('mysql', 'mysqlconnector'),
('sqlite', 'pysqlite'),
('oracle', 'zxjdbc'),
- ('oracle', 'cx_oracle'),
)
eq_(
@@ -768,7 +938,7 @@ class UnicodeTest(fixtures.TestBase):
)
-class EnumTest(fixtures.TestBase):
+class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
@classmethod
def setup_class(cls):
global enum_table, non_native_enum_table, metadata
@@ -851,6 +1021,42 @@ class EnumTest(fixtures.TestBase):
{'id': 4, 'someenum': 'four'}
)
+ def test_non_native_constraint_custom_type(self):
+ class Foob(object):
+ def __init__(self, name):
+ self.name = name
+
+ class MyEnum(types.SchemaType, TypeDecorator):
+ def __init__(self, values):
+ self.impl = Enum(
+ *[v.name for v in values],
+ name="myenum",
+ native_enum=False
+ )
+
+
+ def _set_table(self, table, column):
+ self.impl._set_table(table, column)
+
+ # future method
+ def process_literal_param(self, value, dialect):
+ return value.name
+
+ def process_bind_param(self, value, dialect):
+ return value.name
+
+ m = MetaData()
+ t1 = Table('t', m, Column('x', MyEnum([Foob('a'), Foob('b')])))
+ const = [c for c in t1.constraints if isinstance(c, CheckConstraint)][0]
+
+ self.assert_compile(
+ AddConstraint(const),
+ "ALTER TABLE t ADD CONSTRAINT myenum CHECK (x IN ('a', 'b'))",
+ dialect="default"
+ )
+
+
+
@testing.fails_on('mysql',
"the CHECK constraint doesn't raise an exception for unknown reason")
def test_non_native_constraint(self):
@@ -873,6 +1079,14 @@ class EnumTest(fixtures.TestBase):
# depending on backend.
assert "('x'," in e.print_sql()
+ def test_repr(self):
+ e = Enum("x", "y", name="somename", convert_unicode=True,
+ quote=True, inherit_schema=True)
+ eq_(
+ repr(e),
+ "Enum('x', 'y', name='somename', inherit_schema=True)"
+ )
+
class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
__excluded_on__ = (
('mysql', '<', (4, 1, 1)), # screwy varbinary types
@@ -995,6 +1209,8 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
def process(value):
return value / 10
return process
+
+ class MyOldCustomType(MyCustomType):
def adapt_operator(self, op):
return {operators.add: operators.sub,
operators.sub: operators.add}.get(op, op)
@@ -1071,6 +1287,26 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
datetime.date(2007, 10, 15), 25, 'BIND_INfooBIND_OUT')]
)
+ def test_bind_adapt_update(self):
+ bp = bindparam("somevalue")
+ stmt = test_table.update().values(avalue=bp)
+ compiled = stmt.compile()
+ eq_(bp.type._type_affinity, types.NullType)
+ eq_(compiled.binds['somevalue'].type._type_affinity, MyCustomType)
+
+ def test_bind_adapt_insert(self):
+ bp = bindparam("somevalue")
+ stmt = test_table.insert().values(avalue=bp)
+ compiled = stmt.compile()
+ eq_(bp.type._type_affinity, types.NullType)
+ eq_(compiled.binds['somevalue'].type._type_affinity, MyCustomType)
+
+ def test_bind_adapt_expression(self):
+ bp = bindparam("somevalue")
+ stmt = test_table.c.avalue == bp
+ eq_(bp.type._type_affinity, types.NullType)
+ eq_(stmt.right.type._type_affinity, MyCustomType)
+
def test_literal_adapt(self):
# literals get typed based on the types dictionary, unless
# compatible with the left side type
@@ -1150,15 +1386,18 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
)
self.assert_compile(
and_(c1 == True, c2 == True, c3 == True),
- "x = :x_1 AND x = true AND x = :x_2"
+ "x = :x_1 AND x = true AND x = :x_2",
+ dialect=default.DefaultDialect(supports_native_boolean=True)
)
self.assert_compile(
and_(c1 == 3, c2 == 3, c3 == 3),
- "x = :x_1 AND x = :x_2 AND x = :x_3"
+ "x = :x_1 AND x = :x_2 AND x = :x_3",
+ dialect=default.DefaultDialect(supports_native_boolean=True)
)
self.assert_compile(
and_(c1.is_(True), c2.is_(True), c3.is_(True)),
- "x IS :x_1 AND x IS true AND x IS :x_2"
+ "x IS :x_1 AND x IS true AND x IS :x_2",
+ dialect=default.DefaultDialect(supports_native_boolean=True)
)
@@ -1202,7 +1441,9 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
assert expr.right.type._type_affinity is MyFoobarType
# untyped bind - it gets assigned MyFoobarType
- expr = column("foo", MyFoobarType) + bindparam("foo")
+ bp = bindparam("foo")
+ expr = column("foo", MyFoobarType) + bp
+ assert bp.type._type_affinity is types.NullType
assert expr.right.type._type_affinity is MyFoobarType
expr = column("foo", MyFoobarType) + bindparam("foo", type_=Integer)
@@ -1453,7 +1694,7 @@ class IntervalTest(fixtures.TestBase, AssertsExecutionResults):
eq_(row['non_native_interval'], None)
-class BooleanTest(fixtures.TestBase, AssertsExecutionResults):
+class BooleanTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
@classmethod
def setup_class(cls):
global bool_table
@@ -1515,6 +1756,35 @@ class BooleanTest(fixtures.TestBase, AssertsExecutionResults):
testing.db.execute(
"insert into booltest (id, unconstrained_value) values (1, 5)")
+ def test_non_native_constraint_custom_type(self):
+ class Foob(object):
+ def __init__(self, value):
+ self.value = value
+
+ class MyBool(types.SchemaType, TypeDecorator):
+ impl = Boolean()
+
+ def _set_table(self, table, column):
+ self.impl._set_table(table, column)
+
+ # future method
+ def process_literal_param(self, value, dialect):
+ return value.value
+
+ def process_bind_param(self, value, dialect):
+ return value.value
+
+ m = MetaData()
+ t1 = Table('t', m, Column('x', MyBool()))
+ const = [c for c in t1.constraints if isinstance(c, CheckConstraint)][0]
+
+ self.assert_compile(
+ AddConstraint(const),
+ "ALTER TABLE t ADD CHECK (x IN (0, 1))",
+ dialect="sqlite"
+ )
+
+
class PickleTest(fixtures.TestBase):
def test_eq_comparison(self):
p1 = PickleType()
diff --git a/test/sql/test_unicode.py b/test/sql/test_unicode.py
index ffcef903f..8a8cbd06c 100644
--- a/test/sql/test_unicode.py
+++ b/test/sql/test_unicode.py
@@ -2,7 +2,7 @@
"""verrrrry basic unicode column name testing"""
from sqlalchemy import *
-from sqlalchemy.testing import fixtures, engines
+from sqlalchemy.testing import fixtures, engines, eq_
from sqlalchemy import testing
from sqlalchemy.testing.engines import utf8_engine
from sqlalchemy.sql import column
@@ -114,6 +114,20 @@ class UnicodeSchemaTest(fixtures.TestBase):
meta.drop_all()
metadata.create_all()
+ def test_repr(self):
+
+ m = MetaData()
+ t = Table(ue('\u6e2c\u8a66'), m, Column(ue('\u6e2c\u8a66_id'), Integer))
+
+ # I hardly understand what's going on with the backslashes in
+ # this one on py2k vs. py3k
+ eq_(
+ repr(t),
+ (
+ "Table('\\u6e2c\\u8a66', MetaData(bind=None), "
+ "Column('\\u6e2c\\u8a66_id', Integer(), table=<\u6e2c\u8a66>), "
+ "schema=None)"))
+
class EscapesDefaultsTest(fixtures.TestBase):
def test_default_exec(self):
metadata = MetaData(testing.db)
diff --git a/test/sql/test_update.py b/test/sql/test_update.py
index a8510f374..10306372b 100644
--- a/test/sql/test_update.py
+++ b/test/sql/test_update.py
@@ -192,22 +192,6 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
'UPDATE A B C D mytable SET myid=%s, name=%s, description=%s',
dialect=mysql.dialect())
- def test_alias(self):
- table1 = self.tables.mytable
- talias1 = table1.alias('t1')
-
- self.assert_compile(update(talias1, talias1.c.myid == 7),
- 'UPDATE mytable AS t1 '
- 'SET name=:name '
- 'WHERE t1.myid = :myid_1',
- params={table1.c.name: 'fred'})
-
- self.assert_compile(update(talias1, table1.c.myid == 7),
- 'UPDATE mytable AS t1 '
- 'SET name=:name '
- 'FROM mytable '
- 'WHERE mytable.myid = :myid_1',
- params={table1.c.name: 'fred'})
def test_update_to_expression(self):
"""test update from an expression.
@@ -268,6 +252,64 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest,
run_create_tables = run_inserts = run_deletes = None
+ def test_alias_one(self):
+ table1 = self.tables.mytable
+ talias1 = table1.alias('t1')
+
+ # this case is nonsensical. the UPDATE is entirely
+ # against the alias, but we name the table-bound column
+ # in values. The behavior here isn't really defined
+ self.assert_compile(
+ update(talias1, talias1.c.myid == 7).
+ values({table1.c.name: "fred"}),
+ 'UPDATE mytable AS t1 '
+ 'SET name=:name '
+ 'WHERE t1.myid = :myid_1')
+
+ def test_alias_two(self):
+ table1 = self.tables.mytable
+ talias1 = table1.alias('t1')
+
+ # Here, compared to
+ # test_alias_one(), here we actually have UPDATE..FROM,
+ # which is causing the "table1.c.name" param to be handled
+ # as an "extra table", hence we see the full table name rendered.
+ self.assert_compile(
+ update(talias1, table1.c.myid == 7).
+ values({table1.c.name: 'fred'}),
+ 'UPDATE mytable AS t1 '
+ 'SET name=:mytable_name '
+ 'FROM mytable '
+ 'WHERE mytable.myid = :myid_1',
+ checkparams={'mytable_name': 'fred', 'myid_1': 7},
+ )
+
+ def test_alias_two_mysql(self):
+ table1 = self.tables.mytable
+ talias1 = table1.alias('t1')
+
+ self.assert_compile(
+ update(talias1, table1.c.myid == 7).
+ values({table1.c.name: 'fred'}),
+ "UPDATE mytable AS t1, mytable SET mytable.name=%s "
+ "WHERE mytable.myid = %s",
+ checkparams={'mytable_name': 'fred', 'myid_1': 7},
+ dialect='mysql')
+
+ def test_update_from_multitable_same_name_mysql(self):
+ users, addresses = self.tables.users, self.tables.addresses
+
+ self.assert_compile(
+ users.update().
+ values(name='newname').\
+ values({addresses.c.name: "new address"}).\
+ where(users.c.id == addresses.c.user_id),
+ "UPDATE users, addresses SET addresses.name=%s, "
+ "users.name=%s WHERE users.id = addresses.user_id",
+ checkparams={u'addresses_name': 'new address', 'name': 'newname'},
+ dialect='mysql'
+ )
+
def test_render_table(self):
users, addresses = self.tables.users, self.tables.addresses
@@ -455,6 +497,36 @@ class UpdateFromRoundTripTest(_UpdateFromTestBase, fixtures.TablesTest):
(10, 'chuck')]
self._assert_users(users, expected)
+ @testing.only_on('mysql', 'Multi table update')
+ def test_exec_multitable_same_name(self):
+ users, addresses = self.tables.users, self.tables.addresses
+
+ values = {
+ addresses.c.name: 'ad_ed2',
+ users.c.name: 'ed2'
+ }
+
+ testing.db.execute(
+ addresses.update().
+ values(values).
+ where(users.c.id == addresses.c.user_id).
+ where(users.c.name == 'ed'))
+
+ expected = [
+ (1, 7, 'x', 'jack@bean.com'),
+ (2, 8, 'ad_ed2', 'ed@wood.com'),
+ (3, 8, 'ad_ed2', 'ed@bettyboop.com'),
+ (4, 8, 'ad_ed2', 'ed@lala.com'),
+ (5, 9, 'x', 'fred@fred.com')]
+ self._assert_addresses(addresses, expected)
+
+ expected = [
+ (7, 'jack'),
+ (8, 'ed2'),
+ (9, 'fred'),
+ (10, 'chuck')]
+ self._assert_users(users, expected)
+
def _assert_addresses(self, addresses, expected):
stmt = addresses.select().order_by(addresses.c.id)
eq_(testing.db.execute(stmt).fetchall(), expected)
@@ -478,7 +550,16 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('user_id', None, ForeignKey('users.id')),
- Column('email_address', String(50), nullable=False))
+ Column('email_address', String(50), nullable=False),
+ )
+
+ Table('foobar', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', None, ForeignKey('users.id')),
+ Column('data', String(30)),
+ Column('some_update', String(30), onupdate='im the other update')
+ )
@classmethod
def fixtures(cls):
@@ -494,6 +575,12 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase,
(3, 8, 'ed@bettyboop.com'),
(4, 9, 'fred@fred.com')
),
+ foobar=(
+ ('id', 'user_id', 'data'),
+ (2, 8, 'd1'),
+ (3, 8, 'd2'),
+ (4, 9, 'd3')
+ )
)
@testing.only_on('mysql', 'Multi table update')
@@ -525,6 +612,37 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase,
self._assert_users(users, expected)
@testing.only_on('mysql', 'Multi table update')
+ def test_defaults_second_table_same_name(self):
+ users, foobar = self.tables.users, self.tables.foobar
+
+ values = {
+ foobar.c.data: foobar.c.data + 'a',
+ users.c.name: 'ed2'
+ }
+
+ ret = testing.db.execute(
+ users.update().
+ values(values).
+ where(users.c.id == foobar.c.user_id).
+ where(users.c.name == 'ed'))
+
+ eq_(
+ set(ret.prefetch_cols()),
+ set([users.c.some_update, foobar.c.some_update])
+ )
+
+ expected = [
+ (2, 8, 'd1a', 'im the other update'),
+ (3, 8, 'd2a', 'im the other update'),
+ (4, 9, 'd3', None)]
+ self._assert_foobar(foobar, expected)
+
+ expected = [
+ (8, 'ed2', 'im the update'),
+ (9, 'fred', 'value')]
+ self._assert_users(users, expected)
+
+ @testing.only_on('mysql', 'Multi table update')
def test_no_defaults_second_table(self):
users, addresses = self.tables.users, self.tables.addresses
@@ -548,6 +666,10 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase,
(9, 'fred', 'value')]
self._assert_users(users, expected)
+ def _assert_foobar(self, foobar, expected):
+ stmt = foobar.select().order_by(foobar.c.id)
+ eq_(testing.db.execute(stmt).fetchall(), expected)
+
def _assert_addresses(self, addresses, expected):
stmt = addresses.select().order_by(addresses.c.id)
eq_(testing.db.execute(stmt).fetchall(), expected)