summaryrefslogtreecommitdiff
path: root/tests/delete_regress
diff options
context:
space:
mode:
authorAymeric Augustin <aymeric.augustin@m4x.org>2014-03-22 21:28:12 +0100
committerAymeric Augustin <aymeric.augustin@m4x.org>2014-03-22 21:35:46 +0100
commitb66e85342b6ab28b7f48b47fe3fcfbccd5d9943f (patch)
tree8655a0b67f2a697c3550a84189be3021ce7b276c /tests/delete_regress
parent3a97f992fbfbcf8b0480875b257e5d541a4b8315 (diff)
downloaddjango-b66e85342b6ab28b7f48b47fe3fcfbccd5d9943f.tar.gz
Fixed #22308 -- Regression from 0f956085.
Rewrote the test for #9479 according to the original ticket.
Diffstat (limited to 'tests/delete_regress')
-rw-r--r--tests/delete_regress/tests.py32
1 files changed, 15 insertions, 17 deletions
diff --git a/tests/delete_regress/tests.py b/tests/delete_regress/tests.py
index 0d33c9df9f..abfa248aea 100644
--- a/tests/delete_regress/tests.py
+++ b/tests/delete_regress/tests.py
@@ -15,46 +15,44 @@ from .models import (Book, Award, AwardNote, Person, Child, Toy, PlayedWith,
# Can't run this test under SQLite, because you can't
# get two connections to an in-memory database.
+@skipUnlessDBFeature('test_db_allows_multiple_connections')
class DeleteLockingTest(TransactionTestCase):
available_apps = ['delete_regress']
def setUp(self):
- transaction.set_autocommit(False)
# Create a second connection to the default database
new_connections = ConnectionHandler(settings.DATABASES)
self.conn2 = new_connections[DEFAULT_DB_ALIAS]
self.conn2.set_autocommit(False)
def tearDown(self):
- transaction.rollback()
- transaction.set_autocommit(True)
# Close down the second connection.
self.conn2.rollback()
self.conn2.close()
- @skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_concurrent_delete(self):
- "Deletes on concurrent transactions don't collide and lock the database. Regression for #9479"
-
- # Create some dummy data
+ """Concurrent deletes don't collide and lock the database (#9479)."""
with transaction.atomic():
Book.objects.create(id=1, pagecount=100)
Book.objects.create(id=2, pagecount=200)
Book.objects.create(id=3, pagecount=300)
- self.assertEqual(3, Book.objects.count())
-
- # Delete something using connection 2.
- cursor2 = self.conn2.cursor()
- cursor2.execute('DELETE from delete_regress_book WHERE id=1')
- self.conn2._commit()
- # Now perform a queryset delete that covers the object
- # deleted in connection 2. This causes an infinite loop
- # under MySQL InnoDB unless we keep track of already
- # deleted objects.
with transaction.atomic():
+ # Start a transaction on the main connection.
+ self.assertEqual(3, Book.objects.count())
+
+ # Delete something using another database connection.
+ with self.conn2.cursor() as cursor2:
+ cursor2.execute("DELETE from delete_regress_book WHERE id = 1")
+ self.conn2.commit()
+
+ # In the same transaction on the main connection, perform a
+ # queryset delete that covers the object deleted with the other
+ # connection. This causes an infinite loop under MySQL InnoDB
+ # unless we keep track of already deleted objects.
Book.objects.filter(pagecount__lt=250).delete()
+
self.assertEqual(1, Book.objects.count())