diff options
| author | Jason Kirtland <jek@discorporate.us> | 2008-01-12 22:03:42 +0000 |
|---|---|---|
| committer | Jason Kirtland <jek@discorporate.us> | 2008-01-12 22:03:42 +0000 |
| commit | 17d3c8764e020379e54053bca0b0a2bc71d48aa0 (patch) | |
| tree | 0b46f1ddc57292b8f5bfbc28ab1679230f63e426 /test/perf | |
| parent | c194962019d1bc7322e20b82c33aa1bab3bc2a28 (diff) | |
| download | sqlalchemy-17d3c8764e020379e54053bca0b0a2bc71d48aa0.tar.gz | |
- testbase is gone, replaced by testenv
- Importing testenv has no side effects- explicit functions provide similar behavior to the old immediate behavior of testbase
- testing.db has the configured db
- Fixed up the perf/* scripts
Diffstat (limited to 'test/perf')
| -rw-r--r-- | test/perf/cascade_speed.py | 6 | ||||
| -rw-r--r-- | test/perf/insertspeed.py | 14 | ||||
| -rw-r--r-- | test/perf/masscreate.py | 17 | ||||
| -rw-r--r-- | test/perf/masscreate2.py | 23 | ||||
| -rw-r--r-- | test/perf/masseagerload.py | 12 | ||||
| -rw-r--r-- | test/perf/massload.py | 28 | ||||
| -rw-r--r-- | test/perf/massload2.py | 47 | ||||
| -rw-r--r-- | test/perf/masssave.py | 18 | ||||
| -rw-r--r-- | test/perf/objselectspeed.py | 4 | ||||
| -rw-r--r-- | test/perf/objupdatespeed.py | 4 | ||||
| -rw-r--r-- | test/perf/ormsession.py | 12 | ||||
| -rw-r--r-- | test/perf/poolload.py | 12 | ||||
| -rw-r--r-- | test/perf/threaded_compile.py | 17 | ||||
| -rw-r--r-- | test/perf/wsgi.py | 10 |
14 files changed, 109 insertions, 115 deletions
diff --git a/test/perf/cascade_speed.py b/test/perf/cascade_speed.py index 34d046381..dbf41a7f7 100644 --- a/test/perf/cascade_speed.py +++ b/test/perf/cascade_speed.py @@ -1,10 +1,10 @@ -import testbase +import testenv; testenv.simple_setup() from sqlalchemy import * from sqlalchemy.orm import * -from testlib import * from timeit import Timer import sys + meta = MetaData() orders = Table('orders', meta, @@ -62,7 +62,7 @@ class TimeTrial(object): for valueid in range(5): val = Value() val.attribute = attr - + def run(self, number): s = create_session() self.order = order = Order() diff --git a/test/perf/insertspeed.py b/test/perf/insertspeed.py index cb6a9bf34..32877560e 100644 --- a/test/perf/insertspeed.py +++ b/test/perf/insertspeed.py @@ -1,8 +1,8 @@ -import testbase +import testenv; testenv.simple_setup() import sys, time from sqlalchemy import * from sqlalchemy.orm import * -from testlib import * +from testlib import profiling db = create_engine('sqlite://') metadata = MetaData(db) @@ -21,7 +21,7 @@ def sqlite_unprofiled_insertmany(n): c = conn.cursor() persons = [('john doe', 1, 35) for i in xrange(n)] c.executemany("insert into Person(name, sex, age) values (?,?,?)", persons) - + @profiling.profiled('sa_profiled_insert_many', always=True) def sa_profiled_insert_many(n): i = Person_table.insert() @@ -35,7 +35,7 @@ def sqlite_unprofiled_insert(n): c = conn.cursor() for j in xrange(n): c.execute("insert into Person(name, sex, age) values (?,?,?)", - ('john doe', 1, 35)) + ('john doe', 1, 35)) def sa_unprofiled_insert(n): # Another option is to build Person_table.insert() outside of the @@ -59,7 +59,7 @@ def run_timed(fn, label, *args, **kw): sys.stdout.write("%s (%s): " % (label, ', '.join([str(a) for a in args]))) sys.stdout.flush() - + t = time.clock() fn(*args, **kw) t2 = time.clock() @@ -80,7 +80,7 @@ def all(): run_timed(sqlite_unprofiled_insertmany, 'pysqlite bulk insert', 50000) - + run_timed(sa_unprofiled_insertmany, 'SQLAlchemy bulk insert', 50000) @@ -102,7 +102,7 @@ def all(): run_profiled(sa_profiled_insert, 'SQLAlchemy individual insert/select, profiled', 1000) - + finally: metadata.drop_all() diff --git a/test/perf/masscreate.py b/test/perf/masscreate.py index 346a725e3..ae32f83e2 100644 --- a/test/perf/masscreate.py +++ b/test/perf/masscreate.py @@ -1,7 +1,7 @@ # times how long it takes to create 26000 objects -import testbase +import testenv; testenv.simple_setup() -from sqlalchemy.orm.attributes import * +from sqlalchemy.orm import attributes import time import gc @@ -13,18 +13,17 @@ class User(object): class Address(object): pass -attr_manager = AttributeManager() if manage_attributes: - attr_manager.register_attribute(User, 'id', uselist=False) - attr_manager.register_attribute(User, 'name', uselist=False) - attr_manager.register_attribute(User, 'addresses', uselist=True, trackparent=True) - attr_manager.register_attribute(Address, 'email', uselist=False) + attributes.register_attribute(User, 'id', False, False) + attributes.register_attribute(User, 'name', False, False) + attributes.register_attribute(User, 'addresses', True, False, trackparent=True) + attributes.register_attribute(Address, 'email', False, False) now = time.time() for i in range(0,130): u = User() if init_attributes: - attr_manager.init_attr(u) + attributes.manage(u) u.id = i u.name = "user " + str(i) if not manage_attributes: @@ -32,7 +31,7 @@ for i in range(0,130): for j in range(0,200): a = Address() if init_attributes: - attr_manager.init_attr(a) + attributes.manage(a) a.email = 'foo@bar.com' u.addresses.append(a) # gc.collect() diff --git a/test/perf/masscreate2.py b/test/perf/masscreate2.py index 2e29a6327..25d4b4915 100644 --- a/test/perf/masscreate2.py +++ b/test/perf/masscreate2.py @@ -1,37 +1,36 @@ -import testbase +import testenv; testenv.simple_setup() import gc import random, string -from sqlalchemy.orm.attributes import * +from sqlalchemy.orm import attributes # with this test, run top. make sure the Python process doenst grow in size arbitrarily. class User(object): pass - + class Address(object): pass -attr_manager = AttributeManager() -attr_manager.register_attribute(User, 'id', uselist=False) -attr_manager.register_attribute(User, 'name', uselist=False) -attr_manager.register_attribute(User, 'addresses', uselist=True) -attr_manager.register_attribute(Address, 'email', uselist=False) -attr_manager.register_attribute(Address, 'user', uselist=False) - +attributes.register_attribute(User, 'id', False, False) +attributes.register_attribute(User, 'name', False, False) +attributes.register_attribute(User, 'addresses', True, False) +attributes.register_attribute(Address, 'email', False, False) +attributes.register_attribute(Address, 'user', False, False) + for i in xrange(1000): for j in xrange(1000): u = User() + attributes.manage(u) u.name = str(random.randint(0, 100000000)) for k in xrange(10): a = Address() a.email_address = str(random.randint(0, 100000000)) + attributes.manage(a) u.addresses.append(a) a.user = u print "clearing" #managed_attributes.clear() gc.collect() - - diff --git a/test/perf/masseagerload.py b/test/perf/masseagerload.py index 38696e85b..bc2834ff7 100644 --- a/test/perf/masseagerload.py +++ b/test/perf/masseagerload.py @@ -1,4 +1,4 @@ -import testbase +import testenv; testenv.configure_for_tests() from sqlalchemy import * from sqlalchemy.orm import * from testlib import * @@ -6,11 +6,11 @@ from testlib import * NUM = 500 DIVISOR = 50 -meta = MetaData(testbase.db) -items = Table('items', meta, +meta = MetaData(testing.db) +items = Table('items', meta, Column('item_id', Integer, primary_key=True), Column('value', String(100))) -subitems = Table('subitems', meta, +subitems = Table('subitems', meta, Column('sub_id', Integer, primary_key=True), Column('parent_id', Integer, ForeignKey('items.item_id')), Column('value', String(100))) @@ -33,12 +33,12 @@ def load(): z = ((x-1) * DIVISOR) + y l.append({'sub_id':z,'value':'this is item #%d' % z, 'parent_id':x}) #print l - subitems.insert().execute(*l) + subitems.insert().execute(*l) @profiling.profiled('masseagerload', always=True, sort=['cumulative']) def masseagerload(session): query = session.query(Item) - l = query.select() + l = query.all() print "loaded ", len(l), " items each with ", len(l[0].subs), "subitems" def all(): diff --git a/test/perf/massload.py b/test/perf/massload.py index 92cf0fe92..6fdda3f4d 100644 --- a/test/perf/massload.py +++ b/test/perf/massload.py @@ -1,4 +1,4 @@ -import testbase +import testenv; testenv.configure_for_tests() import time #import gc #import sqlalchemy.orm.attributes as attributes @@ -6,19 +6,22 @@ from sqlalchemy import * from sqlalchemy.orm import * from testlib import * -NUM = 2500 +""" +we are testing session.expunge() here, also that the attributes and unitofwork +packages dont keep dereferenced stuff hanging around. + +for best results, dont run with sqlite :memory: database, and keep an eye on +top while it runs """ -we are testing session.expunge() here, also that the attributes and unitofwork packages dont keep dereferenced -stuff hanging around. -for best results, dont run with sqlite :memory: database, and keep an eye on top while it runs""" +NUM = 2500 class LoadTest(AssertMixin): def setUpAll(self): global items, meta - meta = MetaData(testbase.db) - items = Table('items', meta, + meta = MetaData(testing.db) + items = Table('items', meta, Column('item_id', Integer, primary_key=True), Column('value', String(100))) items.create() @@ -30,10 +33,10 @@ class LoadTest(AssertMixin): for y in range(x*500-500 + 1, x*500 + 1): l.append({'item_id':y, 'value':'this is item #%d' % y}) items.insert().execute(*l) - + def testload(self): class Item(object):pass - + m = mapper(Item, items) sess = create_session() now = time.time() @@ -41,7 +44,7 @@ class LoadTest(AssertMixin): for x in range (1,NUM/100): # this is not needed with cpython which clears non-circular refs immediately #gc.collect() - l = query.select(items.c.item_id.between(x*100 - 100 + 1, x*100)) + l = query.filter(items.c.item_id.between(x*100 - 100 + 1, x*100)).all() assert len(l) == 100 print "loaded ", len(l), " items " # modifying each object will insure that the objects get placed in the "dirty" list @@ -56,6 +59,7 @@ class LoadTest(AssertMixin): #objectstore.expunge(*l) total = time.time() -now print "total time ", total - + + if __name__ == "__main__": - testbase.main() + testenv.main() diff --git a/test/perf/massload2.py b/test/perf/massload2.py index d6424eb07..a3deb932f 100644 --- a/test/perf/massload2.py +++ b/test/perf/massload2.py @@ -1,45 +1,38 @@ -import sys -sys.path.insert(0, './lib/') - -try: -# import sqlalchemy.mods.threadlocal - pass -except: - pass -from sqlalchemy import * -from testbase import Table, Column +import testenv; testenv.simple_setup() import time +from sqlalchemy import * +from sqlalchemy.orm import * -metadata = create_engine('sqlite://', echo=True) +metadata = MetaData(create_engine('sqlite://', echo=True)) -t1s = Table( 't1s', metadata, +t1s = Table( 't1s', metadata, Column( 'id', Integer, primary_key=True), Column('data', String(100)) - ) + ) -t2s = Table( 't2s', metadata, +t2s = Table( 't2s', metadata, Column( 'id', Integer, primary_key=True), Column( 't1id', Integer, ForeignKey("t1s.id"), nullable=True )) -t3s = Table( 't3s', metadata, +t3s = Table( 't3s', metadata, Column( 'id', Integer, primary_key=True), Column( 't2id', Integer, ForeignKey("t2s.id"), nullable=True )) -t4s = Table( 't4s', metadata, - Column( 'id', Integer, primary_key=True), +t4s = Table( 't4s', metadata, + Column( 'id', Integer, primary_key=True), Column( 't3id', Integer, ForeignKey("t3s.id"), nullable=True )) - + [t.create() for t in [t1s,t2s,t3s,t4s]] class T1( object ): pass class T2( object ): pass class T3( object ): pass -class T4( object ): pass +class T4( object ): pass mapper( T1, t1s ) -mapper( T2, t2s ) -mapper( T3, t3s ) -mapper( T4, t4s ) +mapper( T2, t2s ) +mapper( T3, t3s ) +mapper( T4, t4s ) cascade = "all, delete-orphan" use_backref = True @@ -55,22 +48,22 @@ else: now = time.time() print "start" -sess = create_session() +sess = create_session() o1 = T1() -sess.save(o1) +sess.save(o1) for i2 in range(10): o2 = T2() o1.t2s.append( o2 ) - + for i3 in range( 10 ): o3 = T3() o2.t3s.append( o3 ) - + for i4 in range( 10 ): o3.t4s.append ( T4() ) print i2, i3, i4 -print len([s for s in sess]) +print len([s for s in sess]) print "flushing" sess.flush() total = time.time() - now diff --git a/test/perf/masssave.py b/test/perf/masssave.py index dd03f3962..c0d041a41 100644 --- a/test/perf/masssave.py +++ b/test/perf/masssave.py @@ -1,29 +1,29 @@ -import testbase +import testenv; testenv.configure_for_tests() import types from sqlalchemy import * from sqlalchemy.orm import * from testlib import * -NUM = 250000 +NUM = 2500 class SaveTest(AssertMixin): def setUpAll(self): global items, metadata - metadata = MetaData(testbase.db) - items = Table('items', metadata, + metadata = MetaData(testing.db) + items = Table('items', metadata, Column('item_id', Integer, primary_key=True), Column('value', String(100))) items.create() def tearDownAll(self): clear_mappers() metadata.drop_all() - + def testsave(self): class Item(object):pass - + m = mapper(Item, items) - + for x in range(0,NUM/50): sess = create_session() query = sess.query(Item) @@ -48,5 +48,7 @@ class SaveTest(AssertMixin): rep.sort(sorter) for x in rep[0:30]: print x + + if __name__ == "__main__": - testbase.main() + testenv.main() diff --git a/test/perf/objselectspeed.py b/test/perf/objselectspeed.py index f01c74c81..896fd4c49 100644 --- a/test/perf/objselectspeed.py +++ b/test/perf/objselectspeed.py @@ -1,8 +1,8 @@ -import testbase +import testenv; testenv.simple_setup() import time, gc, resource from sqlalchemy import * from sqlalchemy.orm import * -from testlib import * + db = create_engine('sqlite://') metadata = MetaData(db) diff --git a/test/perf/objupdatespeed.py b/test/perf/objupdatespeed.py index d355d5ece..a49eb4724 100644 --- a/test/perf/objupdatespeed.py +++ b/test/perf/objupdatespeed.py @@ -1,4 +1,4 @@ -import testbase +import testenv; testenv.configure_for_tests() import time, gc, resource from sqlalchemy import * from sqlalchemy.orm import * @@ -6,7 +6,7 @@ from testlib import * NUM = 100 -metadata = MetaData(testbase.db) +metadata = MetaData(testing.db) Person_table = Table('Person', metadata, Column('id', Integer, primary_key=True), Column('name', String(40)), diff --git a/test/perf/ormsession.py b/test/perf/ormsession.py index 3cd21ea98..b0187a787 100644 --- a/test/perf/ormsession.py +++ b/test/perf/ormsession.py @@ -1,4 +1,4 @@ -import testbase +import testenv; testenv.configure_for_tests() import time from datetime import datetime @@ -78,7 +78,7 @@ def insert_data(): q_sub_per_item = 10 q_customers = 1000 - con = testbase.db.connect() + con = testing.db.connect() transaction = con.begin() data, subdata = [], [] @@ -146,8 +146,8 @@ def insert_data(): def run_queries(): session = create_session() # no explicit transaction here. - - # build a report of summarizing the last 50 purchases and + + # build a report of summarizing the last 50 purchases and # the top 20 items from all purchases q = session.query(Purchase). \ @@ -165,7 +165,7 @@ def run_queries(): for item in purchase.items: report.append(item.name) report.extend([s.name for s in item.subitems]) - + # mix a little low-level with orm # pull a report of the top 20 items of all time _item_id = purchaseitems.c.item_id @@ -212,7 +212,7 @@ def default(): @profiled('all') def main(): - metadata.bind = testbase.db + metadata.bind = testing.db try: define_tables() setup_mappers() diff --git a/test/perf/poolload.py b/test/perf/poolload.py index 1a2ff6978..8d66da84f 100644 --- a/test/perf/poolload.py +++ b/test/perf/poolload.py @@ -1,11 +1,11 @@ # load test of connection pool - -import testbase +import testenv; testenv.configure_for_tests() +import thread, time from sqlalchemy import * import sqlalchemy.pool as pool -import thread,time +from testlib import testing -db = create_engine(testbase.db.url, pool_timeout=30, echo_pool=True) +db = create_engine(testing.db.url, pool_timeout=30, echo_pool=True) metadata = MetaData(db) users_table = Table('users', metadata, @@ -30,8 +30,8 @@ def runfast(): # time.sleep(.005) # result.close() print "runfast cycle complete" - -#thread.start_new_thread(runslow, ()) + +#thread.start_new_thread(runslow, ()) for x in xrange(0,50): thread.start_new_thread(runfast, ()) diff --git a/test/perf/threaded_compile.py b/test/perf/threaded_compile.py index 13ec31fd6..6809f2560 100644 --- a/test/perf/threaded_compile.py +++ b/test/perf/threaded_compile.py @@ -1,21 +1,21 @@ """test that mapper compilation is threadsafe, including -when additional mappers are created while the existing +when additional mappers are created while the existing collection is being compiled.""" -import testbase +import testenv; testenv.simple_setup() from sqlalchemy import * from sqlalchemy.orm import * import thread, time from sqlalchemy.orm import mapperlib -from testlib import * + meta = MetaData('sqlite:///foo.db') -t1 = Table('t1', meta, +t1 = Table('t1', meta, Column('c1', Integer, primary_key=True), Column('c2', String(30)) ) - + t2 = Table('t2', meta, Column('c1', Integer, primary_key=True), Column('c2', String(30)), @@ -32,13 +32,13 @@ class T1(object): class T2(object): pass - + class FakeLock(object): def acquire(self):pass def release(self):pass # uncomment this to disable the mutex in mapper compilation; -# should produce thread collisions +# should produce thread collisions #mapperlib._COMPILE_MUTEX = FakeLock() def run1(): @@ -62,7 +62,7 @@ def run3(): class_mapper(Foo).compile() foo() time.sleep(.05) - + mapper(T1, t1, properties={'t2':relation(T2, backref="t1")}) mapper(T2, t2) print "START" @@ -74,4 +74,3 @@ for j in range(0, 5): thread.start_new_thread(run3, ()) print "WAIT" time.sleep(5) - diff --git a/test/perf/wsgi.py b/test/perf/wsgi.py index d22eeb76a..6fc8149bc 100644 --- a/test/perf/wsgi.py +++ b/test/perf/wsgi.py @@ -1,7 +1,7 @@ #!/usr/bin/python """Uses ``wsgiref``, standard in Python 2.5 and also in the cheeseshop.""" -import testbase +import testenv; testenv.configure_for_tests() from sqlalchemy import * from sqlalchemy.orm import * import thread @@ -14,8 +14,8 @@ logging.basicConfig() logging.getLogger('sqlalchemy.pool').setLevel(logging.INFO) threadids = set() -meta = MetaData(testbase.db) -foo = Table('foo', meta, +meta = MetaData(testing.db) +foo = Table('foo', meta, Column('id', Integer, primary_key=True), Column('data', String(30))) class Foo(object): @@ -41,7 +41,7 @@ def serve(environ, start_response): " total threads ", len(threadids)) return [str("\n".join([x.data for x in l]))] - + if __name__ == '__main__': from wsgiref import simple_server try: @@ -51,5 +51,3 @@ if __name__ == '__main__': server.serve_forever() finally: meta.drop_all() - - |
