summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorGunnar Aastrand Grimnes <gromgull@users.noreply.github.com>2014-07-10 21:02:42 +0200
committerGunnar Aastrand Grimnes <gromgull@users.noreply.github.com>2014-07-10 21:02:42 +0200
commitd70e84c2358cb54c739a6c375bcb42bbc4833295 (patch)
treeb3e3d742ee9e78cc9cb1c551452e249b8aaa8c51 /test
parent9321c66aa97bf8320d28162d1b70d52b90ff9fe3 (diff)
parente1d115bc43ecf396c4a79e7a83a9b5ed4dcce9f0 (diff)
downloadrdflib-d70e84c2358cb54c739a6c375bcb42bbc4833295.tar.gz
Merge pull request #402 from bcogrel/sparqlstore-graph-awareness
SPARQLStore graph awareness
Diffstat (limited to 'test')
-rw-r--r--test/test_dataset.py91
-rw-r--r--test/test_sparqlupdatestore.py10
2 files changed, 74 insertions, 27 deletions
diff --git a/test/test_dataset.py b/test/test_dataset.py
index 0559364f..8ead20c4 100644
--- a/test/test_dataset.py
+++ b/test/test_dataset.py
@@ -24,41 +24,57 @@ class DatasetTestCase(unittest.TestCase):
if self.store == "SQLite":
_, self.tmppath = mkstemp(
prefix='test', dir='/tmp', suffix='.sqlite')
+ elif self.store == "SPARQLUpdateStore":
+ root = "http://localhost:3030/ukpp/"
+ self.graph.open((root + "sparql", root + "update"))
else:
self.tmppath = mkdtemp()
- self.graph.open(self.tmppath, create=True)
- self.michel = URIRef(u'michel')
- self.tarek = URIRef(u'tarek')
- self.bob = URIRef(u'bob')
- self.likes = URIRef(u'likes')
- self.hates = URIRef(u'hates')
- self.pizza = URIRef(u'pizza')
- self.cheese = URIRef(u'cheese')
-
- self.c1 = URIRef(u'context-1')
- self.c2 = URIRef(u'context-2')
+
+ if self.store != "SPARQLUpdateStore":
+ self.graph.open(self.tmppath, create=True)
+ self.michel = URIRef(u'urn:michel')
+ self.tarek = URIRef(u'urn:tarek')
+ self.bob = URIRef(u'urn:bob')
+ self.likes = URIRef(u'urn:likes')
+ self.hates = URIRef(u'urn:hates')
+ self.pizza = URIRef(u'urn:pizza')
+ self.cheese = URIRef(u'urn:cheese')
+
+ # Use regular URIs because SPARQL endpoints like Fuseki alter short names
+ self.c1 = URIRef(u'urn:context-1')
+ self.c2 = URIRef(u'urn:context-2')
# delete the graph for each test!
self.graph.remove((None, None, None))
+ for c in self.graph.contexts():
+ c.remove((None, None, None))
+ assert len(c) == 0
+ self.graph.remove_graph(c)
def tearDown(self):
self.graph.close()
- if os.path.isdir(self.tmppath):
- shutil.rmtree(self.tmppath)
+ if self.store == "SPARQLUpdateStore":
+ pass
else:
- os.remove(self.tmppath)
+ if os.path.isdir(self.tmppath):
+ shutil.rmtree(self.tmppath)
+ else:
+ os.remove(self.tmppath)
- def testGraphAware(self):
+ def testGraphAware(self):
+
if not self.graph.store.graph_aware: return
g = self.graph
g1 = g.graph(self.c1)
-
-
- # added graph exists
- self.assertEquals(set(x.identifier for x in self.graph.contexts()),
- set([self.c1, DATASET_DEFAULT_GRAPH_ID]))
+
+ # Some SPARQL endpoint backends (e.g. TDB) do not consider
+ # empty named graphs
+ if self.store != "SPARQLUpdateStore":
+ # added graph exists
+ self.assertEquals(set(x.identifier for x in self.graph.contexts()),
+ set([self.c1, DATASET_DEFAULT_GRAPH_ID]))
# added graph is empty
self.assertEquals(len(g1), 0)
@@ -66,7 +82,7 @@ class DatasetTestCase(unittest.TestCase):
g1.add( (self.tarek, self.likes, self.pizza) )
# added graph still exists
- self.assertEquals(set(x.identifier for x in self.graph.contexts()),
+ self.assertEquals(set(x.identifier for x in self.graph.contexts()),
set([self.c1, DATASET_DEFAULT_GRAPH_ID]))
# added graph contains one triple
@@ -77,9 +93,12 @@ class DatasetTestCase(unittest.TestCase):
# added graph is empty
self.assertEquals(len(g1), 0)
- # graph still exists, although empty
- self.assertEquals(set(x.identifier for x in self.graph.contexts()),
- set([self.c1, DATASET_DEFAULT_GRAPH_ID]))
+ # Some SPARQL endpoint backends (e.g. TDB) do not consider
+ # empty named graphs
+ if self.store != "SPARQLUpdateStore":
+ # graph still exists, although empty
+ self.assertEquals(set(x.identifier for x in self.graph.contexts()),
+ set([self.c1, DATASET_DEFAULT_GRAPH_ID]))
g.remove_graph(self.c1)
@@ -87,7 +106,11 @@ class DatasetTestCase(unittest.TestCase):
self.assertEquals(set(x.identifier for x in self.graph.contexts()),
set([DATASET_DEFAULT_GRAPH_ID]))
- def testDefaultGraph(self):
+ def testDefaultGraph(self):
+ # Something the default graph is read-only (e.g. TDB in union mode)
+ if self.store == "SPARQLUpdateStore":
+ print "Please make sure updating the default graph " \
+ "is supported by your SPARQL endpoint"
self.graph.add(( self.tarek, self.likes, self.pizza))
self.assertEquals(len(self.graph), 1)
@@ -103,7 +126,11 @@ class DatasetTestCase(unittest.TestCase):
self.assertEquals(set(x.identifier for x in self.graph.contexts()),
set([DATASET_DEFAULT_GRAPH_ID]))
- def testNotUnion(self):
+ def testNotUnion(self):
+ # Union depends on the SPARQL endpoint configuration
+ if self.store == "SPARQLUpdateStore":
+ print "Please make sure your SPARQL endpoint has not configured " \
+ "its default graph as the union of the named graphs"
g1 = self.graph.graph(self.c1)
g1.add((self.tarek, self.likes, self.pizza))
@@ -120,13 +147,23 @@ if __name__ == '__main__':
pluginname = sys.argv[1]
tests = 0
+
for s in plugin.plugins(pluginname, plugin.Store):
if s.name in ('default', 'IOMemory', 'Auditable',
- 'Concurrent', 'SPARQLStore', 'SPARQLUpdateStore'):
+ 'Concurrent', 'SPARQLStore'):
continue # these are tested by default
+
if not s.getClass().graph_aware:
continue
+ if s.name == "SPARQLUpdateStore":
+ import urllib2
+ try:
+ assert len(urllib2.urlopen("http://localhost:3030/").read()) > 0
+ except:
+ sys.stderr.write("No SPARQL endpoint for %s (tests skipped)\n" % s.name)
+ continue
+
locals()["t%d" % tests] = type("%sContextTestCase" % s.name, (
DatasetTestCase,), {"store": s.name})
tests += 1
diff --git a/test/test_sparqlupdatestore.py b/test/test_sparqlupdatestore.py
index eacad25e..2d54d0cb 100644
--- a/test/test_sparqlupdatestore.py
+++ b/test/test_sparqlupdatestore.py
@@ -258,6 +258,16 @@ class TestSparql11(unittest.TestCase):
'only michel likes pizza'
)
+ def testEmptyNamedGraph(self):
+ empty_graph_iri = u"urn:empty-graph-1"
+ self.graph.update(u"CREATE GRAPH <%s>" % empty_graph_iri)
+ named_graphs = [unicode(r[0]) for r in self.graph.query(
+ "SELECT ?name WHERE { GRAPH ?name {} }")]
+ # Some SPARQL endpoint backends (like TDB) are not able to find empty named graphs
+ # (at least with this query)
+ if empty_graph_iri in named_graphs:
+ self.assertTrue(empty_graph_iri in [unicode(g.identifier)
+ for g in self.graph.contexts()])
from nose import SkipTest
import urllib2