summaryrefslogtreecommitdiff
path: root/examples/performance
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2014-09-03 20:30:52 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2014-09-03 20:30:52 -0400
commiteb81531275c07a0ab8c74eadc7881cfcff27ba21 (patch)
treec94916d3db85c9a79454c8db244c25d9dbedfcf1 /examples/performance
parentcbef6a7d58ee42e33167a14e6a31a124aa0bf08e (diff)
downloadsqlalchemy-eb81531275c07a0ab8c74eadc7881cfcff27ba21.tar.gz
tweak
Diffstat (limited to 'examples/performance')
-rw-r--r--examples/performance/bulk_inserts.py11
-rw-r--r--examples/performance/large_resultsets.py4
2 files changed, 9 insertions, 6 deletions
diff --git a/examples/performance/bulk_inserts.py b/examples/performance/bulk_inserts.py
index 648d5f2aa..531003aa6 100644
--- a/examples/performance/bulk_inserts.py
+++ b/examples/performance/bulk_inserts.py
@@ -1,3 +1,8 @@
+"""This series of tests illustrates different ways to INSERT a large number
+of rows in bulk.
+
+
+"""
from . import Profiler
from sqlalchemy.ext.declarative import declarative_base
@@ -69,7 +74,7 @@ def test_flush_pk_given(n):
@Profiler.profile
def test_bulk_save(n):
- """Batched INSERT statements via the ORM in "bulk", discarding PK values."""
+ """Batched INSERT statements via the ORM in "bulk", discarding PKs."""
session = Session(bind=engine)
session.bulk_save_objects([
Customer(
@@ -83,7 +88,7 @@ def test_bulk_save(n):
@Profiler.profile
def test_bulk_insert_mappings(n):
- """Batched INSERT statements via the ORM "bulk", using dictionaries instead of objects"""
+ """Batched INSERT statements via the ORM "bulk", using dictionaries."""
session = Session(bind=engine)
session.bulk_insert_mappings(Customer, [
dict(
@@ -112,7 +117,7 @@ def test_core_insert(n):
@Profiler.profile
def test_dbapi_raw(n):
- """The DBAPI's pure C API inserting rows in bulk, no pure Python at all"""
+ """The DBAPI's API inserting rows in bulk."""
conn = engine.pool._creator()
cursor = conn.cursor()
diff --git a/examples/performance/large_resultsets.py b/examples/performance/large_resultsets.py
index c9ce23d61..77c0246fc 100644
--- a/examples/performance/large_resultsets.py
+++ b/examples/performance/large_resultsets.py
@@ -121,7 +121,7 @@ def test_core_fetchmany(n):
@Profiler.profile
def test_dbapi_fetchall_plus_append_objects(n):
- """Load rows using DBAPI fetchall(), make a list of objects."""
+ """Load rows using DBAPI fetchall(), generate an object for each row."""
_test_dbapi_raw(n, True)
@@ -156,12 +156,10 @@ def _test_dbapi_raw(n, make_objects):
cursor.execute(sql)
if make_objects:
- result = []
for row in cursor.fetchall():
# ensure that we fully fetch!
customer = SimpleCustomer(
id=row[0], name=row[1], description=row[2])
- result.append(customer)
else:
for row in cursor.fetchall():
# ensure that we fully fetch!