summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNicholas Car <nicholas.car@surroundaustralia.com>2021-07-02 21:24:43 +1000
committerNicholas Car <nicholas.car@surroundaustralia.com>2021-07-02 21:24:43 +1000
commit9ed2c799d311fb88cdfaba07b2b76315a0e25c96 (patch)
tree5cca1a13059bf59d3ff685ea50bba728a49934d5
parentf00f678d4258011a169fa3b0ec031c1b00572995 (diff)
parent7cf5c38ead0c2d2ac590eb5ab72677250f52e6ab (diff)
downloadrdflib-9ed2c799d311fb88cdfaba07b2b76315a0e25c96.tar.gz
merge master
-rw-r--r--.drone.yml22
-rw-r--r--examples/datasets.py146
-rw-r--r--examples/simple_example.py59
-rw-r--r--examples/sleepycat_example.py2
-rw-r--r--examples/sparqlstore_example.py2
-rw-r--r--rdflib/compat.py32
-rw-r--r--rdflib/extras/infixowl.py6
-rw-r--r--rdflib/graph.py113
-rw-r--r--rdflib/namespace.py144
-rw-r--r--rdflib/parser.py22
-rw-r--r--rdflib/paths.py12
-rw-r--r--rdflib/plugin.py8
-rw-r--r--rdflib/plugins/parsers/ntriples.py2
-rw-r--r--rdflib/plugins/serializers/turtle.py2
-rw-r--r--rdflib/plugins/sparql/algebra.py430
-rw-r--r--rdflib/plugins/sparql/parser.py13
-rw-r--r--rdflib/plugins/stores/memory.py29
-rw-r--r--rdflib/plugins/stores/sparqlconnector.py2
-rw-r--r--rdflib/plugins/stores/sparqlstore.py8
-rw-r--r--rdflib/query.py29
-rw-r--r--rdflib/resource.py2
-rw-r--r--rdflib/term.py31
-rw-r--r--setup.cfg6
-rw-r--r--setup.py2
-rw-r--r--test/helper.py4
-rw-r--r--test/test_bnode_ncname.py33
-rw-r--r--test/test_conneg.py91
-rw-r--r--test/test_core_sparqlstore.py26
-rw-r--r--test/test_dawg.py29
-rw-r--r--test/test_graph_http.py168
-rw-r--r--test/test_graph_operator.py27
-rw-r--r--test/test_issue274.py2
-rw-r--r--test/test_literal.py5
-rw-r--r--test/test_rdfxml.py5
-rw-r--r--test/test_serialize.py43
-rw-r--r--test/test_sparql_parser.py49
-rw-r--r--test/test_sparqlstore.py393
-rw-r--r--test/test_swap_n3.py5
-rw-r--r--test/testutils.py300
-rw-r--r--test/translate_algebra/main.py789
-rw-r--r--test/translate_algebra/test_base.py150
-rw-r--r--test/translate_algebra/test_data/test_functions__functional_forms.txt20
-rw-r--r--test/translate_algebra/test_data/test_functions__functional_forms_not_exists.txt6
-rw-r--r--test/translate_algebra/test_data/test_functions__functions_on_dates_and_time.txt5
-rw-r--r--test/translate_algebra/test_data/test_functions__functions_on_numerics.txt3
-rw-r--r--test/translate_algebra/test_data/test_functions__functions_on_rdf_terms.txt16
-rw-r--r--test/translate_algebra/test_data/test_functions__functions_on_strings.txt18
-rw-r--r--test/translate_algebra/test_data/test_functions__hash_functions.txt9
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__aggregate_join.txt13
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__bgp.txt11
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__extend.txt12
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__filter.txt13
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__graph.txt23
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__group.txt6
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__having.txt10
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__join.txt11
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__left_join.txt14
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__minus.txt8
-rw-r--r--test/translate_algebra/test_data/test_graph_patterns__union.txt14
-rw-r--r--test/translate_algebra/test_data/test_integration__complex_query1.txt34
-rw-r--r--test/translate_algebra/test_data/test_operators__arithmetics.txt3
-rw-r--r--test/translate_algebra/test_data/test_operators__conditional_and.txt5
-rw-r--r--test/translate_algebra/test_data/test_operators__conditional_or.txt5
-rw-r--r--test/translate_algebra/test_data/test_operators__relational.txt5
-rw-r--r--test/translate_algebra/test_data/test_operators__unary.txt5
-rw-r--r--test/translate_algebra/test_data/test_other__service1.txt16
-rw-r--r--test/translate_algebra/test_data/test_other__service2.txt13
-rw-r--r--test/translate_algebra/test_data/test_other__values.txt14
-rw-r--r--test/translate_algebra/test_data/test_property_path__alternative_path.txt7
-rw-r--r--test/translate_algebra/test_data/test_property_path__inverse_path.txt11
-rw-r--r--test/translate_algebra/test_data/test_property_path__negated_property_set.txt7
-rw-r--r--test/translate_algebra/test_data/test_property_path__one_or_more_path.txt7
-rw-r--r--test/translate_algebra/test_data/test_property_path__predicate_path.txt1
-rw-r--r--test/translate_algebra/test_data/test_property_path__sequence_path.txt7
-rw-r--r--test/translate_algebra/test_data/test_property_path__zero_or_more_path.txt7
-rw-r--r--test/translate_algebra/test_data/test_property_path__zero_or_one_path.txt7
-rw-r--r--test/translate_algebra/test_data/test_solution_modifiers__distinct.txt6
-rw-r--r--test/translate_algebra/test_data/test_solution_modifiers__order_by.txt7
-rw-r--r--test/translate_algebra/test_data/test_solution_modifiers__project.txt2
-rw-r--r--test/translate_algebra/test_data/test_solution_modifiers__reduced.txt7
-rw-r--r--test/translate_algebra/test_data/test_solution_modifiers__slice.txt8
-rw-r--r--test/translate_algebra/test_data/test_solution_modifiers__to_multiset.txt11
-rw-r--r--tox.ini24
83 files changed, 3191 insertions, 483 deletions
diff --git a/.drone.yml b/.drone.yml
index e40f9358..d6d839db 100644
--- a/.drone.yml
+++ b/.drone.yml
@@ -17,8 +17,8 @@ steps:
- pip install --default-timeout 60 coveralls && export HAS_COVERALLS=1
- python setup.py install
- flake8 --exit-zero rdflib
- - PYTHONWARNINGS=default nosetests --with-timer --timer-top-n 42 --with-coverage --cover-tests --cover-package=rdflib
- - coverage report
+ - PYTHONWARNINGS=default nosetests --with-timer --timer-top-n 42
+
---
kind: pipeline
@@ -31,7 +31,18 @@ platform:
steps:
- name: test
image: python:3.7
+ environment:
+ COVERALLS_SERVICE_NAME: RDFLib-Drone
+ COVERALLS_REPO_TOKEN:
+ from_secret: coveralls_token
commands:
+ - export COVERALLS_SERVICE_NUMBER="$DRONE_BUILD_NUMBER"
+ - export COVERALLS_SERVICE_JOB_ID="$DRONE_STAGE_NAME"
+ - export COVERALLS_SERVICE_JOB_NUMBER="$DRONE_BUILD_NUMBER"
+ - export COVERALLS_FLAG_NAME="$DRONE_STAGE_KIND"
+ - export COVERALLS_GIT_REPO="$DRONE_REPO_NAME"
+ - export COVERALLS_GIT_BRANCH="$DRONE_SOURCE_BRANCH"
+ - export CI_BRANCH="$DRONE_SOURCE_BRANCH"
- bash .travis.fuseki_install_optional.sh
- pip install --default-timeout 60 -r requirements.txt
- pip install --default-timeout 60 -r requirements.dev.txt
@@ -39,7 +50,8 @@ steps:
- python setup.py install
- flake8 --exit-zero rdflib
- PYTHONWARNINGS=default nosetests --with-timer --timer-top-n 42 --with-coverage --cover-tests --cover-package=rdflib
- - coverage report
+ - coverage report --skip-covered
+ - coveralls
---
kind: pipeline
@@ -59,5 +71,5 @@ steps:
- pip install --default-timeout 60 coveralls && export HAS_COVERALLS=1
- python setup.py install
- flake8 --exit-zero rdflib
- - PYTHONWARNINGS=default nosetests --with-timer --timer-top-n 42 --with-coverage --cover-tests --cover-package=rdflib
- - coverage report
+ - PYTHONWARNINGS=default nosetests --with-timer --timer-top-n 42
+
diff --git a/examples/datasets.py b/examples/datasets.py
new file mode 100644
index 00000000..76f8ae73
--- /dev/null
+++ b/examples/datasets.py
@@ -0,0 +1,146 @@
+"""
+An RDFLib Dataset is a slight extension to ConjunctiveGraph: it uses simpler terminology
+and has a few additional convenience method extensions, for example add() can be used to
+add quads directly to a specific Graph within the Dataset.
+
+This example file shows how to decalre a Dataset, add content to it, serialise it, query it
+and remove things from it.
+"""
+
+from rdflib import Dataset, URIRef, Literal, Namespace
+
+#
+# Create & Add
+#
+
+# Create an empty Dataset
+d = Dataset()
+# Add a namespace prefix to it, just like for Graph
+d.bind("ex", Namespace("http://example.com/"))
+
+# Declare a Graph URI to be used to identify a Graph
+graph_1 = URIRef("http://example.com/graph-1")
+
+# Add an empty Graph, identified by graph_1, to the Dataset
+d.graph(identifier=graph_1)
+
+# Add two quads to Graph graph_1 in the Dataset
+d.add((
+ URIRef("http://example.com/subject-x"),
+ URIRef("http://example.com/predicate-x"),
+ Literal("Triple X"),
+ graph_1
+))
+d.add((
+ URIRef("http://example.com/subject-z"),
+ URIRef("http://example.com/predicate-z"),
+ Literal("Triple Z"),
+ graph_1
+))
+
+# Add another quad to the Dataset to a non-existent Graph:
+# the Graph is created automatically
+d.add((
+ URIRef("http://example.com/subject-y"),
+ URIRef("http://example.com/predicate-y"),
+ Literal("Triple Y"),
+ URIRef("http://example.com/graph-2")
+))
+
+# printing the Dataset like this: print(d.serialize(format="trig"))
+# produces a result like this:
+"""
+@prefix ex: <http://example.com/> .
+
+ex:graph-1 {
+ ex:subject-x ex:predicate-x "Triple X" .
+
+ ex:subject-z ex:predicate-z "Triple Z" .
+}
+
+ex:graph-2 {
+ ex:subject-y ex:predicate-y "Triple Y" .
+}
+"""
+print("Printing Serialised Dataset:")
+print("---")
+print(d.serialize(format="trig"))
+print("---")
+print()
+print()
+
+#
+# Use & Query
+#
+
+# print the length of the Dataset, i.e. the count of all triples in all Graphs
+# we should get
+"""
+3
+"""
+print("Printing Dataset Length:")
+print("---")
+print(len(d))
+print("---")
+print()
+print()
+
+# Query one graph in the Dataset for all it's triples
+# we should get
+"""
+(rdflib.term.URIRef('http://example.com/subject-z'), rdflib.term.URIRef('http://example.com/predicate-z'), rdflib.term.Literal('Triple Z'))
+(rdflib.term.URIRef('http://example.com/subject-x'), rdflib.term.URIRef('http://example.com/predicate-x'), rdflib.term.Literal('Triple X'))
+"""
+print("Printing all triple from one Graph in the Dataset:")
+print("---")
+for triple in d.triples((None, None, None, graph_1)):
+ print(triple)
+print("---")
+print()
+print()
+
+# Query the union of all graphs in the dataset for all triples
+# we should get Nothing:
+"""
+"""
+# A Dataset's default union graph does not exist by default (default_union property is False)
+print("Attempt #1 to print all triples in the Dataset:")
+print("---")
+for triple in d.triples((None, None, None, None)):
+ print(triple)
+print("---")
+print()
+print()
+
+# Set the Dataset's default_union property to True and re-query
+d.default_union = True
+print("Attempt #2 to print all triples in the Dataset:")
+print("---")
+for triple in d.triples((None, None, None, None)):
+ print(triple)
+print("---")
+print()
+print()
+
+
+#
+# Remove
+#
+
+# Remove Graph graph_1 from the Dataset
+d.remove_graph(graph_1)
+
+# printing the Dataset like this: print(d.serialize(format="trig"))
+# now produces a result like this:
+
+"""
+ex:graph-2 {
+ ex:subject-y ex:predicate-y "Triple Y" .
+}
+"""
+print("Printing Serialised Dataset after graph_1 removal:")
+print("---")
+print(d.serialize(format="trig").strip())
+print("---")
+print()
+print()
diff --git a/examples/simple_example.py b/examples/simple_example.py
new file mode 100644
index 00000000..077382a3
--- /dev/null
+++ b/examples/simple_example.py
@@ -0,0 +1,59 @@
+from rdflib import Graph, Literal, BNode, RDF
+from rdflib.namespace import FOAF, DC
+
+if __name__ == "__main__":
+
+ store = Graph()
+
+ # Bind a few prefix, namespace pairs for pretty output
+ store.bind("dc", DC)
+ store.bind("foaf", FOAF)
+
+ # Create an identifier to use as the subject for Donna.
+ donna = BNode()
+
+ # Add triples using store's add method.
+ store.add((donna, RDF.type, FOAF.Person))
+ store.add((donna, FOAF.nick, Literal("donna", lang="foo")))
+ store.add((donna, FOAF.name, Literal("Donna Fales")))
+
+ # Iterate over triples in store and print them out.
+ print("--- printing raw triples ---")
+ for s, p, o in store:
+ print(s, p, o)
+
+ # For each foaf:Person in the store print out its mbox property.
+ print()
+ print("--- printing mboxes ---")
+ for person in store.subjects(RDF.type, FOAF["Person"]):
+ for mbox in store.objects(person, FOAF["mbox"]):
+ print(mbox)
+
+ print("--- saving RDF to a file (donna_foaf.rdf) ---")
+ # Serialize the store as RDF/XML to the file donna_foaf.rdf.
+ store.serialize("donna_foaf.rdf", format="pretty-xml", max_depth=3)
+
+ # Let's show off the serializers
+ print()
+ print("RDF Serializations:")
+
+ # Serialize as XML
+ print("--- start: rdf-xml ---")
+ print(store.serialize(format="pretty-xml"))
+ print("--- end: rdf-xml ---\n")
+
+ # Serialize as Turtle
+ print("--- start: turtle ---")
+ print(store.serialize(format="turtle"))
+ print("--- end: turtle ---\n")
+
+ # Serialize as NTriples
+ print("--- start: ntriples ---")
+ print(store.serialize(format="nt"))
+ print("--- end: ntriples ---\n")
+
+ # Serialize as JSON-LD
+ # only if you have the JSON-LD plugin installed!
+ print("--- start: JSON-LD ---")
+ print(store.serialize(format="json-ld"))
+ print("--- end: JSON-LD ---\n")
diff --git a/examples/sleepycat_example.py b/examples/sleepycat_example.py
index 1130da0e..484484b9 100644
--- a/examples/sleepycat_example.py
+++ b/examples/sleepycat_example.py
@@ -39,8 +39,6 @@ if __name__ == "__main__":
# close when done, otherwise sleepycat will leak lock entries.
graph.close()
- graph = None
-
# reopen the graph
graph = ConjunctiveGraph("Sleepycat")
diff --git a/examples/sparqlstore_example.py b/examples/sparqlstore_example.py
index 7d0ac681..9d9d3fd4 100644
--- a/examples/sparqlstore_example.py
+++ b/examples/sparqlstore_example.py
@@ -17,7 +17,7 @@ if __name__ == "__main__":
print(
"According to DBPedia, Berlin has a population of {0:,}".format(
- int(pop), ",d"
+ int(pop)
).replace(",", ".")
)
print()
diff --git a/rdflib/compat.py b/rdflib/compat.py
index a2194ac0..6020a630 100644
--- a/rdflib/compat.py
+++ b/rdflib/compat.py
@@ -6,29 +6,16 @@ and different versions of support libraries.
import re
import codecs
import warnings
+import typing as t
-
-# clean ElementTree import
-try:
- from lxml import etree
-except ImportError:
+if t.TYPE_CHECKING:
+ import xml.etree.ElementTree as etree
+else:
try:
- # Python 2.5
- import xml.etree.cElementTree as etree
+ from lxml import etree
except ImportError:
- try:
- # Python 2.5
- import xml.etree.ElementTree as etree
- except ImportError:
- try:
- # normal cElementTree install
- import cElementTree as etree
- except ImportError:
- try:
- # normal ElementTree install
- import elementtree.ElementTree as etree
- except ImportError:
- raise Exception("Failed to import ElementTree from any known place")
+ import xml.etree.ElementTree as etree
+
try:
etree_register_namespace = etree.register_namespace
@@ -117,6 +104,11 @@ def decodeUnicodeEscape(s):
s is a unicode string
replace ``\\n`` and ``\\u00AC`` unicode escapes
"""
+ if "\\" not in s:
+ # Most of times, there are no backslashes in strings.
+ # In the general case, it could use maketrans and translate.
+ return s
+
s = s.replace("\\t", "\t")
s = s.replace("\\n", "\n")
s = s.replace("\\r", "\r")
diff --git a/rdflib/extras/infixowl.py b/rdflib/extras/infixowl.py
index d3464bee..83f02bce 100644
--- a/rdflib/extras/infixowl.py
+++ b/rdflib/extras/infixowl.py
@@ -989,7 +989,7 @@ class Class(AnnotatableTerms):
for annotation in self.graph.objects(subject=self, predicate=term):
yield annotation
- annotation = property(_get_annotation, lambda x: x)
+ annotation = property(_get_annotation, lambda x: x) # type: ignore[arg-type,misc]
def _get_extentQuery(self):
return (Variable("CLASS"), RDF.type, self.identifier)
@@ -1496,14 +1496,14 @@ class BooleanClass(OWLRDFListProxy, Class):
@BooleanClassExtentHelper(OWL_NS.intersectionOf)
@Callable
- def getIntersections():
+ def getIntersections(): # type: ignore[misc]
pass
getIntersections = Callable(getIntersections)
@BooleanClassExtentHelper(OWL_NS.unionOf)
@Callable
- def getUnions():
+ def getUnions(): # type: ignore[misc]
pass
getUnions = Callable(getUnions)
diff --git a/rdflib/graph.py b/rdflib/graph.py
index d2a5e872..1b83b22e 100644
--- a/rdflib/graph.py
+++ b/rdflib/graph.py
@@ -1,4 +1,4 @@
-from typing import Optional, Union
+from typing import Optional, Union, Type, cast, overload
import logging
from warnings import warn
import random
@@ -19,8 +19,9 @@ from rdflib.exceptions import ParserError
import os
import shutil
import tempfile
+import pathlib
-from io import BytesIO
+from io import BytesIO, BufferedIOBase
from urllib.parse import urlparse
assert Literal # avoid warning
@@ -557,7 +558,10 @@ class Graph(Node):
def __add__(self, other):
"""Set-theoretic union
BNode IDs are not changed."""
- retval = Graph()
+ try:
+ retval = type(self)()
+ except TypeError:
+ retval = Graph()
for (prefix, uri) in set(list(self.namespaces()) + list(other.namespaces())):
retval.bind(prefix, uri)
for x in self:
@@ -569,7 +573,10 @@ class Graph(Node):
def __mul__(self, other):
"""Set-theoretic intersection.
BNode IDs are not changed."""
- retval = Graph()
+ try:
+ retval = type(self)()
+ except TypeError:
+ retval = Graph()
for x in other:
if x in self:
retval.add(x)
@@ -578,7 +585,10 @@ class Graph(Node):
def __sub__(self, other):
"""Set-theoretic difference.
BNode IDs are not changed."""
- retval = Graph()
+ try:
+ retval = type(self)()
+ except TypeError:
+ retval = Graph()
for x in self:
if x not in other:
retval.add(x)
@@ -956,8 +966,82 @@ class Graph(Node):
"""Turn uri into an absolute URI if it's not one already"""
return self.namespace_manager.absolutize(uri, defrag)
+ # no destination and non-None positional encoding
+ @overload
+ def serialize(
+ self, destination: None, format: str, base: Optional[str], encoding: str, **args
+ ) -> bytes:
+ ...
+
+ # no destination and non-None keyword encoding
+ @overload
def serialize(
- self, destination=None, format="turtle", base=None, encoding=None, **args
+ self,
+ *,
+ destination: None = ...,
+ format: str = ...,
+ base: Optional[str] = ...,
+ encoding: str,
+ **args
+ ) -> bytes:
+ ...
+
+ # no destination and None positional encoding
+ @overload
+ def serialize(
+ self,
+ destination: None,
+ format: str,
+ base: Optional[str],
+ encoding: None,
+ **args
+ ) -> str:
+ ...
+
+ # no destination and None keyword encoding
+ @overload
+ def serialize(
+ self,
+ *,
+ destination: None = ...,
+ format: str = ...,
+ base: Optional[str] = ...,
+ encoding: None = None,
+ **args
+ ) -> str:
+ ...
+
+ # non-none destination
+ @overload
+ def serialize(
+ self,
+ destination: Union[str, BufferedIOBase],
+ format: str = ...,
+ base: Optional[str] = ...,
+ encoding: Optional[str] = ...,
+ **args
+ ) -> None:
+ ...
+
+ # fallback
+ @overload
+ def serialize(
+ self,
+ destination: Union[str, BufferedIOBase, None] = None,
+ format: str = "turtle",
+ base: Optional[str] = None,
+ encoding: Optional[str] = None,
+ **args
+ ) -> Optional[Union[bytes, str]]:
+ ...
+
+ def serialize(
+ self,
+ destination: Union[str, BufferedIOBase, None] = None,
+ format: str = "turtle",
+ base: Optional[str] = None,
+ encoding: Optional[str] = None,
+ **args
) -> Optional[Union[bytes, str]]:
"""Serialize the Graph to destination
@@ -978,6 +1062,7 @@ class Graph(Node):
base = self.base
serializer = plugin.get(format, Serializer)(self)
+ stream: BufferedIOBase
if destination is None:
stream = BytesIO()
if encoding is None:
@@ -987,16 +1072,19 @@ class Graph(Node):
serializer.serialize(stream, base=base, encoding=encoding, **args)
return stream.getvalue()
if hasattr(destination, "write"):
- stream = destination
+ stream = cast(BufferedIOBase, destination)
serializer.serialize(stream, base=base, encoding=encoding, **args)
else:
- location = destination
+ if isinstance(destination, pathlib.PurePath):
+ location = str(destination)
+ else:
+ location = cast(str, destination)
scheme, netloc, path, params, _query, fragment = urlparse(location)
if netloc != "":
print(
"WARNING: not saving as location" + "is not a local file reference"
)
- return
+ return None
fd, name = tempfile.mkstemp()
stream = os.fdopen(fd, "wb")
serializer.serialize(stream, base=base, encoding=encoding, **args)
@@ -1007,6 +1095,7 @@ class Graph(Node):
else:
shutil.copy(name, dest)
os.remove(name)
+ return None
def print(self, format="turtle", encoding="utf-8", out=None):
print(
@@ -1146,8 +1235,8 @@ class Graph(Node):
def query(
self,
query_object,
- processor: str = "sparql",
- result: str = "sparql",
+ processor: Union[str, query.Processor] = "sparql",
+ result: Union[str, Type[query.Result]] = "sparql",
initNs=None,
initBindings=None,
use_store_provided: bool = True,
@@ -1183,7 +1272,7 @@ class Graph(Node):
pass # store has no own implementation
if not isinstance(result, query.Result):
- result = plugin.get(result, query.Result)
+ result = plugin.get(cast(str, result), query.Result)
if not isinstance(processor, query.Processor):
processor = plugin.get(processor, query.Processor)(self)
diff --git a/rdflib/namespace.py b/rdflib/namespace.py
index 3563b8c1..a53f2d1c 100644
--- a/rdflib/namespace.py
+++ b/rdflib/namespace.py
@@ -428,87 +428,101 @@ PROF = Namespace("http://www.w3.org/ns/dx/prof/")
PROV = ClosedNamespace(
uri=URIRef("http://www.w3.org/ns/prov#"),
terms=[
- "Entity",
"Activity",
+ "ActivityInfluence",
"Agent",
- "wasGeneratedBy",
- "wasDerivedFrom",
- "wasAttributedTo",
- "startedAtTime",
- "used",
- "wasInformedBy",
- "endedAtTime",
- "wasAssociatedWith",
- "actedOnBehalfOf",
+ "AgentInfluence",
+ "Association",
+ "Attribution",
+ "Bundle",
"Collection",
+ "Communication",
+ "Delegation",
+ "Derivation",
"EmptyCollection",
- "Bundle",
+ "End",
+ "Entity",
+ "EntityInfluence",
+ "Generation",
+ "Influence",
+ "InstantaneousEvent",
+ "Invalidation",
+ "Location",
+ "Organization",
"Person",
+ "Plan",
+ "PrimarySource",
+ "Quotation",
+ "Revision",
+ "Role",
"SoftwareAgent",
- "Organization",
- "Location",
+ "Start",
+ "Usage",
+ "actedOnBehalfOf",
+ "activity",
+ "agent",
"alternateOf",
- "specializationOf",
+ "aq",
+ "atLocation",
+ "atTime",
+ "category",
+ "component",
+ "constraints",
+ "definition",
+ "dm",
+ "editorialNote",
+ "editorsDefinition",
+ "endedAtTime",
+ "entity",
+ "generated",
"generatedAtTime",
- "hadPrimarySource",
- "value",
- "wasQuotedFrom",
- "wasRevisionOf",
- "invalidatedAtTime",
- "wasInvalidatedBy",
+ "hadActivity",
+ "hadGeneration",
"hadMember",
- "wasStartedBy",
- "wasEndedBy",
- "invalidated",
+ "hadPlan",
+ "hadPrimarySource",
+ "hadRole",
+ "hadUsage",
"influenced",
- "atLocation",
- "generated",
- "Influence",
- "EntityInfluence",
- "Usage",
- "Start",
- "End",
- "Derivation",
- "PrimarySource",
- "Quotation",
- "Revision",
- "ActivityInfluence",
- "Generation",
- "Communication",
- "Invalidation",
- "AgentInfluence",
- "Attribution",
- "Association",
- "Plan",
- "Delegation",
- "InstantaneousEvent",
- "Role",
- "wasInfluencedBy",
- "qualifiedInfluence",
- "qualifiedGeneration",
+ "influencer",
+ "invalidated",
+ "invalidatedAtTime",
+ "inverse",
+ "n",
+ "order",
+ "qualifiedAssociation",
+ "qualifiedAttribution",
+ "qualifiedCommunication",
+ "qualifiedDelegation",
"qualifiedDerivation",
+ "qualifiedEnd",
+ "qualifiedForm",
+ "qualifiedGeneration",
+ "qualifiedInfluence",
+ "qualifiedInvalidation",
"qualifiedPrimarySource",
"qualifiedQuotation",
"qualifiedRevision",
- "qualifiedAttribution",
- "qualifiedInvalidation",
"qualifiedStart",
"qualifiedUsage",
- "qualifiedCommunication",
- "qualifiedAssociation",
- "qualifiedEnd",
- "qualifiedDelegation",
- "influencer",
- "entity",
- "hadUsage",
- "hadGeneration",
- "activity",
- "agent",
- "hadPlan",
- "hadActivity",
- "atTime",
- "hadRole",
- ],
+ "sharesDefinitionWith",
+ "specializationOf",
+ "startedAtTime",
+ "unqualifiedForm",
+ "used",
+ "value",
+ "wasAssociatedWith",
+ "wasAttributedTo",
+ "wasDerivedFrom",
+ "wasEndedBy",
+ "wasGeneratedBy",
+ "wasInfluencedBy",
+ "wasInformedBy",
+ "wasInvalidatedBy",
+ "wasQuotedFrom",
+ "wasRevisionOf",
+ "wasStartedBy"
+ ]
)
QB = Namespace("http://purl.org/linked-data/cube#")
RDF = _RDFNamespace()
diff --git a/rdflib/parser.py b/rdflib/parser.py
index a6f155a6..ce4e5a2d 100644
--- a/rdflib/parser.py
+++ b/rdflib/parser.py
@@ -22,6 +22,7 @@ from urllib.request import Request
from urllib.request import url2pathname
from urllib.parse import urljoin
from urllib.request import urlopen
+from urllib.error import HTTPError
from xml.sax import xmlreader
@@ -39,7 +40,7 @@ __all__ = [
class Parser(object):
- __slots__ = set()
+ __slots__ = ()
def __init__(self):
pass
@@ -160,7 +161,22 @@ class URLInputSource(InputSource):
)
req = Request(system_id, None, myheaders)
- file = urlopen(req)
+
+ def _urlopen(req: Request):
+ try:
+ return urlopen(req)
+ except HTTPError as ex:
+ # 308 (Permanent Redirect) is not supported by current python version(s)
+ # See https://bugs.python.org/issue40321
+ # This custom error handling should be removed once all
+ # supported versions of python support 308.
+ if ex.code == 308:
+ req.full_url = ex.headers.get("Location")
+ return _urlopen(req)
+ else:
+ raise
+
+ file = _urlopen(req)
# Fix for issue 130 https://github.com/RDFLib/rdflib/issues/130
self.url = file.geturl() # in case redirections took place
self.setPublicId(self.url)
@@ -222,7 +238,7 @@ def create_input_source(
else:
if isinstance(source, str):
location = source
- elif isinstance(source, pathlib.Path):
+ elif isinstance(source, pathlib.PurePath):
location = str(source)
elif isinstance(source, bytes):
data = source
diff --git a/rdflib/paths.py b/rdflib/paths.py
index 532130ee..48cb2e7c 100644
--- a/rdflib/paths.py
+++ b/rdflib/paths.py
@@ -182,6 +182,7 @@ No vars specified:
from rdflib.term import URIRef, Node
+from typing import Union, Callable
# property paths
@@ -192,6 +193,13 @@ ZeroOrOne = "?"
class Path(object):
+
+ __or__: Callable[["Path", Union["URIRef", "Path"]], "AlternativePath"]
+ __invert__: Callable[["Path"], "InvPath"]
+ __neg__: Callable[["Path"], "NegatedPath"]
+ __truediv__: Callable[["Path", Union["URIRef", "Path"]], "SequencePath"]
+ __mul__: Callable[["Path", str], "MulPath"]
+
def eval(self, graph, subj=None, obj=None):
raise NotImplementedError()
@@ -500,7 +508,9 @@ else:
# as it would introduce circular imports)
URIRef.__or__ = path_alternative
- URIRef.__mul__ = mul_path
+ # ignore typing here as URIRef inherits from str,
+ # which has an incompatible definition of __mul__.
+ URIRef.__mul__ = mul_path # type: ignore
URIRef.__invert__ = inv_path
URIRef.__neg__ = neg_path
URIRef.__truediv__ = path_sequence
diff --git a/rdflib/plugin.py b/rdflib/plugin.py
index e1d47628..1e364e2c 100644
--- a/rdflib/plugin.py
+++ b/rdflib/plugin.py
@@ -36,6 +36,7 @@ from rdflib.query import (
UpdateProcessor,
)
from rdflib.exceptions import Error
+from typing import Type, TypeVar
__all__ = ["register", "get", "plugins", "PluginException", "Plugin", "PKGPlugin"]
@@ -85,7 +86,7 @@ class PKGPlugin(Plugin):
return self._class
-def register(name, kind, module_path, class_name):
+def register(name: str, kind, module_path, class_name):
"""
Register the plugin for (name, kind). The module_path and
class_name should be the path to a plugin class.
@@ -94,7 +95,10 @@ def register(name, kind, module_path, class_name):
_plugins[(name, kind)] = p
-def get(name, kind):
+PluginT = TypeVar("PluginT")
+
+
+def get(name: str, kind: Type[PluginT]) -> Type[PluginT]:
"""
Return the class for the specified (name, kind). Raises a
PluginException if unable to do so.
diff --git a/rdflib/plugins/parsers/ntriples.py b/rdflib/plugins/parsers/ntriples.py
index e728fc35..e4fe5833 100644
--- a/rdflib/plugins/parsers/ntriples.py
+++ b/rdflib/plugins/parsers/ntriples.py
@@ -307,7 +307,7 @@ class NTParser(Parser):
See http://www.w3.org/TR/rdf-testcases/#ntriples"""
- __slots__ = set()
+ __slots__ = ()
@classmethod
def parse(cls, source, sink, **kwargs):
diff --git a/rdflib/plugins/serializers/turtle.py b/rdflib/plugins/serializers/turtle.py
index 8ec9b479..2209b864 100644
--- a/rdflib/plugins/serializers/turtle.py
+++ b/rdflib/plugins/serializers/turtle.py
@@ -42,7 +42,7 @@ class RecursiveSerializer(Serializer):
predicateOrder = [RDF.type, RDFS.label]
maxDepth = 10
indentString = " "
- roundtrip_prefixes = tuple()
+ roundtrip_prefixes = ()
def __init__(self, store):
diff --git a/rdflib/plugins/sparql/algebra.py b/rdflib/plugins/sparql/algebra.py
index 466f8d4b..b4d48284 100644
--- a/rdflib/plugins/sparql/algebra.py
+++ b/rdflib/plugins/sparql/algebra.py
@@ -28,6 +28,9 @@ from pyparsing import ParseResults
# ---------------------------
# Some convenience methods
+from rdflib.term import Identifier
+
+
def OrderBy(p, expr):
return CompValue("OrderBy", p=p, expr=expr)
@@ -795,6 +798,433 @@ def translateQuery(q, base=None, initNs=None):
return Query(prologue, res)
+class ExpressionNotCoveredException(Exception):
+ pass
+
+
+def translateAlgebra(query_algebra: Query = None):
+ """
+
+ :param query_algebra: An algebra returned by the function call algebra.translateQuery(parse_tree).
+ :return: The query form generated from the SPARQL 1.1 algebra tree for select queries.
+
+ """
+ def overwrite(text):
+ file = open("query.txt", "w+")
+ file.write(text)
+ file.close()
+
+ def replace(old, new, search_from_match: str = None, search_from_match_occurrence: int = None, count: int = 1):
+ # Read in the file
+ with open('query.txt', 'r') as file:
+ filedata = file.read()
+
+ def find_nth(haystack, needle, n):
+ start = haystack.lower().find(needle)
+ while start >= 0 and n > 1:
+ start = haystack.lower().find(needle, start + len(needle))
+ n -= 1
+ return start
+
+ if search_from_match and search_from_match_occurrence:
+ position = find_nth(filedata, search_from_match, search_from_match_occurrence)
+ filedata_pre = filedata[:position]
+ filedata_post = filedata[position:].replace(old, new, count)
+ filedata = filedata_pre + filedata_post
+ else:
+ filedata = filedata.replace(old, new, count)
+
+ # Write the file out again
+ with open('query.txt', 'w') as file:
+ file.write(filedata)
+
+ def convert_node_arg(node_arg):
+ if isinstance(node_arg, Identifier):
+ return node_arg.n3()
+ elif isinstance(node_arg, CompValue):
+ return "{" + node_arg.name + "}"
+ elif isinstance(node_arg, Expr):
+ return "{" + node_arg.name + "}"
+ elif isinstance(node_arg, str):
+ return node_arg
+ else:
+ raise ExpressionNotCoveredException(
+ "The expression {0} might not be covered yet.".format(node_arg))
+
+ def sparql_query_text(node):
+ """
+ https://www.w3.org/TR/sparql11-query/#sparqlSyntax
+
+ :param node:
+ :return:
+ """
+
+ if isinstance(node, CompValue):
+ # 18.2 Query Forms
+ if node.name == "SelectQuery":
+ overwrite("-*-SELECT-*- " + "{" + node.p.name + "}")
+
+ # 18.2 Graph Patterns
+ elif node.name == "BGP":
+ # Identifiers or Paths
+ # Negated path throws a type error. Probably n3() method of negated paths should be fixed
+ triples = "".join(triple[0].n3() + " " + triple[1].n3() + " " + triple[2].n3() + "."
+ for triple in node.triples)
+ replace("{BGP}", triples)
+ # The dummy -*-SELECT-*- is placed during a SelectQuery or Multiset pattern in order to be able
+ # to match extended variables in a specific Select-clause (see "Extend" below)
+ replace("-*-SELECT-*-", "SELECT", count=-1)
+ # If there is no "Group By" clause the placeholder will simply be deleted. Otherwise there will be
+ # no matching {GroupBy} placeholder because it has already been replaced by "group by variables"
+ replace("{GroupBy}", "", count=-1)
+ replace("{Having}", "", count=-1)
+ elif node.name == "Join":
+ replace("{Join}", "{" + node.p1.name + "}{" + node.p2.name + "}") #
+ elif node.name == "LeftJoin":
+ replace("{LeftJoin}", "{" + node.p1.name + "}OPTIONAL{{" + node.p2.name + "}}")
+ elif node.name == "Filter":
+ if isinstance(node.expr, CompValue):
+ expr = node.expr.name
+ else:
+ raise ExpressionNotCoveredException("This expression might not be covered yet.")
+ if node.p:
+ # Filter with p=AggregateJoin = Having
+ if node.p.name == "AggregateJoin":
+ replace("{Filter}", "{" + node.p.name + "}")
+ replace("{Having}", "HAVING({" + expr + "})")
+ else:
+ replace("{Filter}", "FILTER({" + expr + "}) {" + node.p.name + "}")
+ else:
+ replace("{Filter}", "FILTER({" + expr + "})")
+
+ elif node.name == "Union":
+ replace("{Union}", "{{" + node.p1.name + "}}UNION{{" + node.p2.name + "}}")
+ elif node.name == "Graph":
+ expr = "GRAPH " + node.term.n3() + " {{" + node.p.name + "}}"
+ replace("{Graph}", expr)
+ elif node.name == "Extend":
+ query_string = open('query.txt', 'r').read().lower()
+ select_occurrences = query_string.count('-*-select-*-')
+ replace(node.var.n3(), "(" + convert_node_arg(node.expr) + " as " + node.var.n3() + ")",
+ search_from_match='-*-select-*-', search_from_match_occurrence=select_occurrences)
+ replace("{Extend}", "{" + node.p.name + "}")
+ elif node.name == "Minus":
+ expr = "{" + node.p1.name + "}MINUS{{" + node.p2.name + "}}"
+ replace("{Minus}", expr)
+ elif node.name == "Group":
+ group_by_vars = []
+ if node.expr:
+ for var in node.expr:
+ if isinstance(var, Identifier):
+ group_by_vars.append(var.n3())
+ else:
+ raise ExpressionNotCoveredException("This expression might not be covered yet.")
+ replace("{Group}", "{" + node.p.name + "}")
+ replace("{GroupBy}", "GROUP BY " + " ".join(group_by_vars) + " ")
+ else:
+ replace("{Group}", "{" + node.p.name + "}")
+ elif node.name == "AggregateJoin":
+ replace("{AggregateJoin}", "{" + node.p.name + "}")
+ for agg_func in node.A:
+ if isinstance(agg_func.res, Identifier):
+ identifier = agg_func.res.n3()
+ else:
+ raise ExpressionNotCoveredException("This expression might not be covered yet.")
+ agg_func_name = agg_func.name.split('_')[1]
+ distinct = ""
+ if agg_func.distinct:
+ distinct = agg_func.distinct + " "
+ if agg_func_name == 'GroupConcat':
+ replace(identifier, "GROUP_CONCAT" + "(" + distinct
+ + agg_func.vars.n3() + ";SEPARATOR=" + agg_func.separator.n3() + ")")
+ else:
+ replace(identifier,
+ agg_func_name.upper() + "(" + distinct + convert_node_arg(agg_func.vars) + ")")
+ # For non-aggregated variables the aggregation function "sample" is automatically assigned.
+ # However, we do not want to have "sample" wrapped around non-aggregated variables. That is
+ # why we replace it. If "sample" is used on purpose it will not be replaced as the alias
+ # must be different from the variable in this case.
+ replace("(SAMPLE({0}) as {0})".format(convert_node_arg(agg_func.vars)),
+ convert_node_arg(agg_func.vars))
+ elif node.name == "GroupGraphPatternSub":
+ replace("GroupGraphPatternSub", " ".join([convert_node_arg(pattern) for pattern in node.part]))
+ elif node.name == "TriplesBlock":
+ print("triplesblock")
+ replace("{TriplesBlock}", "".join(triple[0].n3() + " " + triple[1].n3() + " " + triple[2].n3() + "."
+ for triple in node.triples))
+
+ # 18.2 Solution modifiers
+ elif node.name == "ToList":
+ raise ExpressionNotCoveredException("This expression might not be covered yet.")
+ elif node.name == "OrderBy":
+ order_conditions = []
+ for c in node.expr:
+ if isinstance(c.expr, Identifier):
+ var = c.expr.n3()
+ if c.order is not None:
+ cond = var + "(" + c.order + ")"
+ else:
+ cond = var
+ order_conditions.append(cond)
+ else:
+ raise ExpressionNotCoveredException("This expression might not be covered yet.")
+ replace("{OrderBy}", "{" + node.p.name + "}")
+ replace("{OrderConditions}", " ".join(order_conditions) + " ")
+ elif node.name == "Project":
+ project_variables = []
+ for var in node.PV:
+ if isinstance(var, Identifier):
+ project_variables.append(var.n3())
+ else:
+ raise ExpressionNotCoveredException("This expression might not be covered yet.")
+ order_by_pattern = ""
+ if node.p.name == "OrderBy":
+ order_by_pattern = "ORDER BY {OrderConditions}"
+ replace("{Project}", " ".join(project_variables) + "{{" + node.p.name + "}}"
+ + "{GroupBy}" + order_by_pattern + "{Having}")
+ elif node.name == "Distinct":
+ replace("{Distinct}", "DISTINCT {" + node.p.name + "}")
+ elif node.name == "Reduced":
+ replace("{Reduced}", "REDUCED {" + node.p.name + "}")
+ elif node.name == "Slice":
+ slice = "OFFSET " + str(node.start) + " LIMIT " + str(node.length)
+ replace("{Slice}", "{" + node.p.name + "}" + slice)
+ elif node.name == "ToMultiSet":
+ if node.p.name == "values":
+ replace("{ToMultiSet}", "{{" + node.p.name + "}}")
+ else:
+ replace("{ToMultiSet}", "{-*-SELECT-*- " + "{" + node.p.name + "}" + "}")
+
+ # 18.2 Property Path
+
+ # 17 Expressions and Testing Values
+ # # 17.3 Operator Mapping
+ elif node.name == "RelationalExpression":
+ expr = convert_node_arg(node.expr)
+ op = node.op
+ if isinstance(list, type(node.other)):
+ other = "(" + ", ".join(convert_node_arg(expr) for expr in node.other) + ")"
+ else:
+ other = convert_node_arg(node.other)
+ condition = "{left} {operator} {right}".format(left=expr, operator=op, right=other)
+ replace("{RelationalExpression}", condition)
+ elif node.name == "ConditionalAndExpression":
+ inner_nodes = " && ".join([convert_node_arg(expr) for expr in node.other])
+ replace("{ConditionalAndExpression}", convert_node_arg(node.expr) + " && " + inner_nodes)
+ elif node.name == "ConditionalOrExpression":
+ inner_nodes = " || ".join([convert_node_arg(expr) for expr in node.other])
+ replace("{ConditionalOrExpression}", "(" + convert_node_arg(node.expr) + " || " + inner_nodes + ")")
+ elif node.name == "MultiplicativeExpression":
+ left_side = convert_node_arg(node.expr)
+ multiplication = left_side
+ for i, operator in enumerate(node.op):
+ multiplication += operator + " " + convert_node_arg(node.other[i]) + " "
+ replace("{MultiplicativeExpression}", multiplication)
+ elif node.name == "AdditiveExpression":
+ left_side = convert_node_arg(node.expr)
+ addition = left_side
+ for i, operator in enumerate(node.op):
+ addition += operator + " " + convert_node_arg(node.other[i]) + " "
+ replace("{AdditiveExpression}", addition)
+ elif node.name == "UnaryNot":
+ replace("{UnaryNot}", "!" + convert_node_arg(node.expr))
+
+ # # 17.4 Function Definitions
+ # # # 17.4.1 Functional Forms
+ elif node.name.endswith('BOUND'):
+ bound_var = convert_node_arg(node.arg)
+ replace("{Builtin_BOUND}", "bound(" + bound_var + ")")
+ elif node.name.endswith('IF'):
+ arg2 = convert_node_arg(node.arg2)
+ arg3 = convert_node_arg(node.arg3)
+
+ if_expression = "IF(" + "{" + node.arg1.name + "}, " + arg2 + ", " + arg3 + ")"
+ replace("{Builtin_IF}", if_expression)
+ elif node.name.endswith('COALESCE'):
+ replace("{Builtin_COALESCE}", "COALESCE(" + ", ".join(convert_node_arg(arg) for arg in node.arg) + ")")
+ elif node.name.endswith('Builtin_EXISTS'):
+ # The node's name which we get with node.graph.name returns "Join" instead of GroupGraphPatternSub
+ # According to https://www.w3.org/TR/2013/REC-sparql11-query-20130321/#rExistsFunc
+ # ExistsFunc can only have a GroupGraphPattern as parameter. However, when we print the query algebra
+ # we get a GroupGraphPatternSub
+ replace("{Builtin_EXISTS}", "EXISTS " + "{{" + node.graph.name + "}}")
+ traverse(node.graph, visitPre=sparql_query_text)
+ return node.graph
+ elif node.name.endswith('Builtin_NOTEXISTS'):
+ # The node's name which we get with node.graph.name returns "Join" instead of GroupGraphPatternSub
+ # According to https://www.w3.org/TR/2013/REC-sparql11-query-20130321/#rNotExistsFunc
+ # NotExistsFunc can only have a GroupGraphPattern as parameter. However, when we print the query algebra
+ # we get a GroupGraphPatternSub
+ print(node.graph.name)
+ replace("{Builtin_NOTEXISTS}", "NOT EXISTS " + "{{" + node.graph.name + "}}")
+ traverse(node.graph, visitPre=sparql_query_text)
+ return node.graph
+ # # # # 17.4.1.5 logical-or: Covered in "RelationalExpression"
+ # # # # 17.4.1.6 logical-and: Covered in "RelationalExpression"
+ # # # # 17.4.1.7 RDFterm-equal: Covered in "RelationalExpression"
+ elif node.name.endswith('sameTerm'):
+ replace("{Builtin_sameTerm}", "SAMETERM(" + convert_node_arg(node.arg1)
+ + ", " + convert_node_arg(node.arg2) + ")")
+ # # # # IN: Covered in "RelationalExpression"
+ # # # # NOT IN: Covered in "RelationalExpression"
+
+ # # # 17.4.2 Functions on RDF Terms
+ elif node.name.endswith('Builtin_isIRI'):
+ replace("{Builtin_isIRI}", "isIRI(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_isBLANK'):
+ replace("{Builtin_isBLANK}", "isBLANK(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_isLITERAL'):
+ replace("{Builtin_isLITERAL}", "isLITERAL(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_isNUMERIC'):
+ replace("{Builtin_isNUMERIC}", "isNUMERIC(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_STR'):
+ replace("{Builtin_STR}", "STR(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_LANG'):
+ replace("{Builtin_LANG}", "LANG(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_DATATYPE'):
+ replace("{Builtin_DATATYPE}", "DATATYPE(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_IRI'):
+ replace("{Builtin_IRI}", "IRI(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_BNODE'):
+ replace("{Builtin_BNODE}", "BNODE(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('STRDT'):
+ replace("{Builtin_STRDT}", "STRDT(" + convert_node_arg(node.arg1)
+ + ", " + convert_node_arg(node.arg2) + ")")
+ elif node.name.endswith('Builtin_STRLANG'):
+ replace("{Builtin_STRLANG}", "STRLANG(" + convert_node_arg(node.arg1)
+ + ", " + convert_node_arg(node.arg2) + ")")
+ elif node.name.endswith('Builtin_UUID'):
+ replace("{Builtin_UUID}", "UUID()")
+ elif node.name.endswith('Builtin_STRUUID'):
+ replace("{Builtin_STRUUID}", "STRUUID()")
+
+ # # # 17.4.3 Functions on Strings
+ elif node.name.endswith('Builtin_STRLEN'):
+ replace("{Builtin_STRLEN}", "STRLEN(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_SUBSTR'):
+ args = [node.arg.n3(), node.start]
+ if node.length:
+ args.append(node.length)
+ expr = "SUBSTR(" + ", ".join(args) + ")"
+ replace("{Builtin_SUBSTR}", expr)
+ elif node.name.endswith('Builtin_UCASE'):
+ replace("{Builtin_UCASE}", "UCASE(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_LCASE'):
+ replace("{Builtin_LCASE}", "LCASE(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_STRSTARTS'):
+ replace("{Builtin_STRSTARTS}", "STRSTARTS(" + convert_node_arg(node.arg1)
+ + ", " + convert_node_arg(node.arg2) + ")")
+ elif node.name.endswith('Builtin_STRENDS'):
+ replace("{Builtin_STRENDS}", "STRENDS(" + convert_node_arg(node.arg1)
+ + ", " + convert_node_arg(node.arg2) + ")")
+ elif node.name.endswith('Builtin_CONTAINS'):
+ replace("{Builtin_CONTAINS}", "CONTAINS(" + convert_node_arg(node.arg1)
+ + ", " + convert_node_arg(node.arg2) + ")")
+ elif node.name.endswith('Builtin_STRBEFORE'):
+ replace("{Builtin_STRBEFORE}", "STRBEFORE(" + convert_node_arg(node.arg1)
+ + ", " + convert_node_arg(node.arg2) + ")")
+ elif node.name.endswith('Builtin_STRAFTER'):
+ replace("{Builtin_STRAFTER}", "STRAFTER(" + convert_node_arg(node.arg1)
+ + ", " + convert_node_arg(node.arg2) + ")")
+ elif node.name.endswith('Builtin_ENCODE_FOR_URI'):
+ replace("{Builtin_ENCODE_FOR_URI}", "ENCODE_FOR_URI(" + convert_node_arg(node.arg) + ")")
+ elif node.name.endswith('Builtin_CONCAT'):
+ expr = 'CONCAT({vars})'.format(vars=", ".join(elem.n3() for elem in node.arg))
+ replace("{Builtin_CONCAT}", expr)
+ elif node.name.endswith('Builtin_LANGMATCHES'):
+ replace("{Builtin_LANGMATCHES}", "LANGMATCHES(" + convert_node_arg(node.arg1)
+ + ", " + convert_node_arg(node.arg2) + ")")
+ elif node.name.endswith('REGEX'):
+ args = [convert_node_arg(node.text), convert_node_arg(node.pattern)]
+ expr = "REGEX(" + ", ".join(args) + ")"
+ replace("{Builtin_REGEX}", expr)
+ elif node.name.endswith('REPLACE'):
+ replace("{Builtin_REPLACE}", "REPLACE(" + convert_node_arg(node.arg)
+ + ", " + convert_node_arg(node.pattern) + ", " + convert_node_arg(node.replacement) + ")")
+
+ # # # 17.4.4 Functions on Numerics
+ elif node.name == 'Builtin_ABS':
+ replace("{Builtin_ABS}", "ABS(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_ROUND':
+ replace("{Builtin_ROUND}", "ROUND(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_CEIL':
+ replace("{Builtin_CEIL}", "CEIL(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_FLOOR':
+ replace("{Builtin_FLOOR}", "FLOOR(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_RAND':
+ replace("{Builtin_RAND}", "RAND()")
+
+ # # # 17.4.5 Functions on Dates and Times
+ elif node.name == 'Builtin_NOW':
+ replace("{Builtin_NOW}", "NOW()")
+ elif node.name == 'Builtin_YEAR':
+ replace("{Builtin_YEAR}", "YEAR(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_MONTH':
+ replace("{Builtin_MONTH}", "MONTH(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_DAY':
+ replace("{Builtin_DAY}", "DAY(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_HOURS':
+ replace("{Builtin_HOURS}", "HOURS(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_MINUTES':
+ replace("{Builtin_MINUTES}", "MINUTES(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_SECONDS':
+ replace("{Builtin_SECONDS}", "SECONDS(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_TIMEZONE':
+ replace("{Builtin_TIMEZONE}", "TIMEZONE(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_TZ':
+ replace("{Builtin_TZ}", "TZ(" + convert_node_arg(node.arg) + ")")
+
+ # # # 17.4.6 Hash functions
+ elif node.name == 'Builtin_MD5':
+ replace("{Builtin_MD5}", "MD5(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_SHA1':
+ replace("{Builtin_SHA1}", "SHA1(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_SHA256':
+ replace("{Builtin_SHA256}", "SHA256(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_SHA384':
+ replace("{Builtin_SHA384}", "SHA384(" + convert_node_arg(node.arg) + ")")
+ elif node.name == 'Builtin_SHA512':
+ replace("{Builtin_SHA512}", "SHA512(" + convert_node_arg(node.arg) + ")")
+
+ # Other
+ elif node.name == 'values':
+ columns = []
+ for key in node.res[0].keys():
+ if isinstance(key, Identifier):
+ columns.append(key.n3())
+ else:
+ raise ExpressionNotCoveredException("The expression {0} might not be covered yet.".format(key))
+ values = "VALUES (" + " ".join(columns) + ")"
+
+ rows = ""
+ for elem in node.res:
+ row = []
+ for term in elem.values():
+ if isinstance(term, Identifier):
+ row.append(term.n3()) # n3() is not part of Identifier class but every subclass has it
+ elif isinstance(term, str):
+ row.append(term)
+ else:
+ raise ExpressionNotCoveredException(
+ "The expression {0} might not be covered yet.".format(term))
+ rows += "(" + " ".join(row) + ")"
+
+ replace("values", values + "{" + rows + "}")
+ elif node.name == 'ServiceGraphPattern':
+ replace("{ServiceGraphPattern}", "SERVICE " + convert_node_arg(node.term)
+ + "{" + node.graph.name + "}")
+ traverse(node.graph, visitPre=sparql_query_text)
+ return node.graph
+ # else:
+ # raise ExpressionNotCoveredException("The expression {0} might not be covered yet.".format(node.name))
+
+ traverse(query_algebra.algebra, visitPre=sparql_query_text)
+ query_from_algebra = open("query.txt", "r").read()
+ os.remove("query.txt")
+
+ return query_from_algebra
+
+
def pprintAlgebra(q):
def pp(p, ind=" "):
# if isinstance(p, list):
diff --git a/rdflib/plugins/sparql/parser.py b/rdflib/plugins/sparql/parser.py
index 1de46918..bcf9ed38 100644
--- a/rdflib/plugins/sparql/parser.py
+++ b/rdflib/plugins/sparql/parser.py
@@ -584,10 +584,15 @@ TriplesNodePath <<= CollectionPath | BlankNodePropertyListPath
TriplesSameSubject = VarOrTerm + PropertyListNotEmpty | TriplesNode + PropertyList
TriplesSameSubject.setParseAction(expandTriples)
-# [52] TriplesTemplate ::= TriplesSameSubject ( '.' Optional(TriplesTemplate) )?
-TriplesTemplate = Forward()
-TriplesTemplate <<= ParamList("triples", TriplesSameSubject) + Optional(
- Suppress(".") + Optional(TriplesTemplate)
+# [52] TriplesTemplate ::= TriplesSameSubject ( '.' TriplesTemplate? )?
+# NOTE: pyparsing.py handling of recursive rules is limited by python's recusion
+# limit.
+# (https://docs.python.org/3/library/sys.html#sys.setrecursionlimit)
+# To accomodate aribtrary amounts of triples this rule is rewritten to not be
+# recursive:
+# [52*] TriplesTemplate ::= TriplesSameSubject ( '.' TriplesSameSubject? )*
+TriplesTemplate = ParamList("triples", TriplesSameSubject) + ZeroOrMore(
+ Suppress(".") + Optional(ParamList("triples", TriplesSameSubject))
)
# [51] QuadsNotTriples ::= 'GRAPH' VarOrIri '{' Optional(TriplesTemplate) '}'
diff --git a/rdflib/plugins/stores/memory.py b/rdflib/plugins/stores/memory.py
index 8bf40a21..e2446c1c 100644
--- a/rdflib/plugins/stores/memory.py
+++ b/rdflib/plugins/stores/memory.py
@@ -222,7 +222,6 @@ class Memory(Store):
if context is not None:
self.__all_contexts.add(context)
subject, predicate, object_ = triple
- self.__add_triple_context(triple, context, quoted)
spo = self.__spo
try:
@@ -233,7 +232,19 @@ class Memory(Store):
o = po[predicate]
except LookupError:
o = po[predicate] = {}
- o[object_] = 1
+
+ try:
+ _ = o[object_]
+ # This cannot be reached if (s, p, o) was not inserted before.
+ triple_exists = True
+ except KeyError:
+ o[object_] = 1
+ triple_exists = False
+ self.__add_triple_context(triple, triple_exists, context, quoted)
+
+ if triple_exists:
+ # No need to insert twice this triple.
+ return
pos = self.__pos
try:
@@ -436,13 +447,11 @@ class Memory(Store):
pass # we didn't know this graph, no problem
# internal utility methods below
- def __add_triple_context(self, triple, context, quoted):
+ def __add_triple_context(self, triple, triple_exists, context, quoted):
"""add the given context to the set of contexts for the triple"""
ctx = self.__ctx_to_str(context)
quoted = bool(quoted)
- try:
- subj, pred, obj = triple
- _ = self.__spo[subj][pred][obj]
+ if triple_exists:
# we know the triple exists somewhere in the store
try:
triple_context = self.__tripleContexts[triple]
@@ -456,7 +465,7 @@ class Memory(Store):
if not quoted:
triple_context[None] = quoted
- except KeyError:
+ else:
# the triple didn't exist before in the store
if quoted: # this context only
triple_context = self.__tripleContexts[triple] = {ctx: quoted}
@@ -509,15 +518,13 @@ class Memory(Store):
return None
try:
# ctx could be a graph. In that case, use its identifier
- ctx_str = "{}:{}".format(
- str(ctx.identifier.__class__.__name__), str(ctx.identifier)
- )
+ ctx_str = "{}:{}".format(ctx.identifier.__class__.__name__, ctx.identifier)
self.__context_obj_map[ctx_str] = ctx
return ctx_str
except AttributeError:
# otherwise, ctx should be a URIRef or BNode or str
if isinstance(ctx, str):
- ctx_str = "{}:{}".format(str(ctx.__class__.__name__), str(ctx))
+ ctx_str = "{}:{}".format(ctx.__class__.__name__, ctx)
if ctx_str in self.__context_obj_map:
return ctx_str
self.__context_obj_map[ctx_str] = ctx
diff --git a/rdflib/plugins/stores/sparqlconnector.py b/rdflib/plugins/stores/sparqlconnector.py
index ffd8b30a..e0b70e08 100644
--- a/rdflib/plugins/stores/sparqlconnector.py
+++ b/rdflib/plugins/stores/sparqlconnector.py
@@ -128,7 +128,7 @@ class SPARQLConnector(object):
params["using-graph-uri"] = default_graph
if named_graph is not None:
- params["using-named-graph-uri"] = default_graph
+ params["using-named-graph-uri"] = named_graph
headers = {
"Accept": _response_mime_types[self.returnFormat],
diff --git a/rdflib/plugins/stores/sparqlstore.py b/rdflib/plugins/stores/sparqlstore.py
index 26436f20..5cb8809d 100644
--- a/rdflib/plugins/stores/sparqlstore.py
+++ b/rdflib/plugins/stores/sparqlstore.py
@@ -16,6 +16,7 @@ from rdflib.store import Store
from rdflib import Variable, BNode
from rdflib.graph import DATASET_DEFAULT_GRAPH_ID
from rdflib.term import Node
+from typing import Union, Tuple
# Defines some SPARQL keywords
LIMIT = "LIMIT"
@@ -34,7 +35,7 @@ def _node_to_sparql(node):
return node.n3()
-class SPARQLStore(SPARQLConnector, Store):
+class SPARQLStore(SPARQLConnector, Store): # type: ignore[misc]
"""An RDFLib store around a SPARQL endpoint
This is context-aware and should work as expected
@@ -522,7 +523,7 @@ class SPARQLUpdateStore(SPARQLStore):
self._edits = None
self._updates = 0
- def open(self, configuration: str or tuple, create=False):
+ def open(self, configuration: Union[str, Tuple[str, str]], create=False):
"""This method is included so that calls to this Store via Graph, e.g. Graph("SPARQLStore"),
can set the required parameters
"""
@@ -557,7 +558,8 @@ class SPARQLUpdateStore(SPARQLStore):
self.commit()
return SPARQLStore.__len__(self, *args, **kwargs)
- def open(self, configuration, create=False):
+ # TODO: FIXME: open is defined twice
+ def open(self, configuration, create=False): # type: ignore[no-redef]
"""
sets the endpoint URLs for this SPARQLStore
:param configuration: either a tuple of (query_endpoint, update_endpoint),
diff --git a/rdflib/query.py b/rdflib/query.py
index c67f15ea..da174cd1 100644
--- a/rdflib/query.py
+++ b/rdflib/query.py
@@ -4,9 +4,9 @@ import shutil
import tempfile
import warnings
import types
-from typing import Optional
+from typing import Optional, Union, cast
-from io import BytesIO
+from io import BytesIO, BufferedIOBase
from urllib.parse import urlparse
@@ -207,11 +207,11 @@ class Result(object):
return parser.parse(source, content_type=content_type, **kwargs)
def serialize(
- self,
- destination: Optional[str] = None,
- encoding: str = "utf-8",
- format: str = "xml",
- **args,
+ self,
+ destination: Optional[Union[str, BufferedIOBase]] = None,
+ encoding: str = "utf-8",
+ format: str = "xml",
+ **args,
) -> Optional[bytes]:
"""
Serialize the query result.
@@ -223,7 +223,7 @@ class Result(object):
- txt: :class:`~rdflib.plugins.sparql.results.txtresults.TXTResultSerializer`
- xml: :class:`~rdflib.plugins.sparql.results.xmlresults.XMLResultSerializer`
- :param destination: Path of file output.
+ :param destination: Path of file output or BufferedIOBase object to write the output to.
:param encoding: Encoding of output.
:param format: One of ['csv', 'json', 'txt', xml']
:param args:
@@ -239,21 +239,21 @@ class Result(object):
serializer = plugin.get(format, ResultSerializer)(self)
if destination is None:
- stream = BytesIO()
- stream2 = EncodeOnlyUnicode(stream)
+ streamb: BytesIO = BytesIO()
+ stream2 = EncodeOnlyUnicode(streamb)
serializer.serialize(stream2, encoding=encoding, **args)
- return stream.getvalue()
+ return streamb.getvalue()
if hasattr(destination, "write"):
- stream = destination
+ stream = cast(BufferedIOBase, destination)
serializer.serialize(stream, encoding=encoding, **args)
else:
- location = destination
+ location = cast(str, destination)
scheme, netloc, path, params, query, fragment = urlparse(location)
if netloc != "":
print(
"WARNING: not saving as location" + "is not a local file reference"
)
- return
+ return None
fd, name = tempfile.mkstemp()
stream = os.fdopen(fd, "wb")
serializer.serialize(stream, encoding=encoding, **args)
@@ -263,6 +263,7 @@ class Result(object):
else:
shutil.copy(name, path)
os.remove(name)
+ return None
def __len__(self):
if self.type == "ASK":
diff --git a/rdflib/resource.py b/rdflib/resource.py
index bc1e07b0..fe99f4da 100644
--- a/rdflib/resource.py
+++ b/rdflib/resource.py
@@ -355,8 +355,6 @@ class Resource(object):
def __unicode__(self):
return str(self._identifier)
- __str__ = __unicode__
-
def add(self, p, o):
if isinstance(o, Resource):
o = o._identifier
diff --git a/rdflib/term.py b/rdflib/term.py
index 61d76ffc..d0c5e8a8 100644
--- a/rdflib/term.py
+++ b/rdflib/term.py
@@ -63,11 +63,15 @@ from urllib.parse import urljoin
from urllib.parse import urlparse
from decimal import Decimal
+from typing import TYPE_CHECKING, Dict, Callable, Union, Type
+
+if TYPE_CHECKING:
+ from .paths import AlternativePath, InvPath, NegatedPath, SequencePath, Path
logger = logging.getLogger(__name__)
skolem_genid = "/.well-known/genid/"
rdflib_skolem_genid = "/.well-known/genid/rdflib/"
-skolems = {}
+skolems: Dict[str, "BNode"] = {}
_invalid_uri_chars = '<>" {}|\\^`'
@@ -218,6 +222,11 @@ class URIRef(Identifier):
__slots__ = ()
+ __or__: Callable[["URIRef", Union["URIRef", "Path"]], "AlternativePath"]
+ __invert__: Callable[["URIRef"], "InvPath"]
+ __neg__: Callable[["URIRef"], "NegatedPath"]
+ __truediv__: Callable[["URIRef", Union["URIRef", "Path"]], "SequencePath"]
+
def __new__(cls, value, base=None):
if base is not None:
ends_in_hash = value.endswith("#")
@@ -969,10 +978,11 @@ class Literal(Identifier):
"""
# don't use super()... for efficiency reasons, see Identifier.__hash__
res = str.__hash__(self)
- if self.language:
- res ^= hash(self.language.lower())
- if self.datatype:
- res ^= hash(self.datatype)
+ # Directly accessing the member is faster than the property.
+ if self._language:
+ res ^= hash(self._language.lower())
+ if self._datatype:
+ res ^= hash(self._datatype)
return res
def __eq__(self, other):
@@ -1015,11 +1025,12 @@ class Literal(Identifier):
return True
if other is None:
return False
+ # Directly accessing the member is faster than the property.
if isinstance(other, Literal):
return (
- self.datatype == other.datatype
- and (self.language.lower() if self.language else None)
- == (other.language.lower() if other.language else None)
+ self._datatype == other._datatype
+ and (self._language.lower() if self._language else None)
+ == (other._language.lower() if other._language else None)
and str.__eq__(self, other)
)
@@ -1544,7 +1555,7 @@ _GenericPythonToXSDRules = [
(bool, (lambda i: str(i).lower(), _XSD_BOOLEAN)),
(int, (None, _XSD_INTEGER)),
(long_type, (None, _XSD_INTEGER)),
- (Decimal, (None, _XSD_DECIMAL)),
+ (Decimal, (lambda i: f"{i:f}", _XSD_DECIMAL)),
(datetime, (lambda i: i.isoformat(), _XSD_DATETIME)),
(date, (lambda i: i.isoformat(), _XSD_DATE)),
(time, (lambda i: i.isoformat(), _XSD_TIME)),
@@ -1721,7 +1732,7 @@ class Statement(Node, tuple):
# See http://www.w3.org/TR/sparql11-query/#modOrderBy
# we leave "space" for more subclasses of Node elsewhere
# default-dict to grazefully fail for new subclasses
-_ORDERING = defaultdict(int)
+_ORDERING: Dict[Type[Node], int] = defaultdict(int)
_ORDERING.update({BNode: 10, Variable: 20, URIRef: 30, Literal: 40})
diff --git a/setup.cfg b/setup.cfg
index b6cf95e7..a334b051 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -27,3 +27,9 @@ exclude_lines =
if 0:
if __name__ == .__main__.:
if __name__==.__main__.:
+
+[mypy]
+python_version = 3.6
+warn_unused_configs = True
+ignore_missing_imports = True
+disallow_subclassing_any = False
diff --git a/setup.py b/setup.py
index e2edc084..8d5a73c1 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ import re
from setuptools import setup, find_packages
kwargs = {}
-kwargs["install_requires"] = ["isodate", "pyparsing"]
+kwargs["install_requires"] = ["isodate", "pyparsing", "setuptools"]
kwargs["tests_require"] = [
"html5lib",
"networkx",
diff --git a/test/helper.py b/test/helper.py
index 240d90c7..acecde3d 100644
--- a/test/helper.py
+++ b/test/helper.py
@@ -7,7 +7,9 @@ import rdflib.query
MAX_RETRY = 10
BACKOFF_FACTOR = 1.5
-def query_with_retry(graph: rdflib.Graph, query: str, **kwargs) -> rdflib.query.Result:
+
+
+def query_with_retry(graph: rdflib.Graph, query: str, **kwargs) -> rdflib.query.Result: # type: ignore[return]
"""Query graph an retry on failure, returns preloaded result
The tests run against outside network targets which results
diff --git a/test/test_bnode_ncname.py b/test/test_bnode_ncname.py
index 3e621579..4077997b 100644
--- a/test/test_bnode_ncname.py
+++ b/test/test_bnode_ncname.py
@@ -1,38 +1,7 @@
# -*- coding: utf-8 -*-
import re
from hashlib import md5
-
-
-try:
- from uuid import uuid4
-except ImportError:
-
- def uuid4():
- """
- Generates a uuid on behalf of Python 2.4
- """
- import random
- import os
- import time
- import socket
-
- try:
- preseed = os.urandom(16)
- except NotImplementedError:
- preseed = ""
- # Have doubts about this. random.seed will just hash the string
- random.seed("%s%s%s" % (preseed, os.getpid(), time.time()))
- del preseed
- t = int(time.time() * 1000.0)
- r = int(random.random() * 100000000000000000)
- try:
- a = socket.gethostbyname(socket.gethostname())
- except:
- # if we can't get a network address, just imagine one
- a = random.random() * 100000000000000000
- strdata = str(t) + " " + str(r) + " " + str(a)
- data = md5(strdata.encode("ascii")).hexdigest()
- yield data
+from uuid import uuid4
# Adapted from http://icodesnip.com/snippet/python/simple-universally-unique-id-uuid-or-guid
diff --git a/test/test_conneg.py b/test/test_conneg.py
deleted file mode 100644
index b8eee3bc..00000000
--- a/test/test_conneg.py
+++ /dev/null
@@ -1,91 +0,0 @@
-import time
-
-
-from rdflib import Graph
-
-import _thread
-from http.server import HTTPServer, BaseHTTPRequestHandler
-
-"""
-Test that correct content negoation headers are passed
-by graph.parse
-"""
-
-
-xmltestdoc = """<?xml version="1.0" encoding="UTF-8"?>
-<rdf:RDF
- xmlns="http://example.org/"
- xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
->
- <rdf:Description rdf:about="http://example.org/a">
- <b rdf:resource="http://example.org/c"/>
- </rdf:Description>
-</rdf:RDF>
-"""
-
-n3testdoc = """@prefix : <http://example.org/> .
-
-:a :b :c .
-"""
-
-nttestdoc = "<http://example.org/a> <http://example.org/b> <http://example.org/c> .\n"
-
-
-class TestHTTPHandler(BaseHTTPRequestHandler):
- def do_GET(self):
-
- self.send_response(200, "OK")
- # fun fun fun parsing accept header.
-
- acs = self.headers["Accept"].split(",")
- acq = [x.split(";") for x in acs if ";" in x]
- acn = [(x, "q=1") for x in acs if ";" not in x]
- acs = [(x[0], float(x[1].strip()[2:])) for x in acq + acn]
- ac = sorted(acs, key=lambda x: x[1])
- ct = ac[-1]
-
- if "application/rdf+xml" in ct:
- rct = "application/rdf+xml"
- content = xmltestdoc
- elif "text/n3" in ct:
- rct = "text/n3"
- content = n3testdoc
- elif "text/plain" in ct:
- rct = "text/plain"
- content = nttestdoc
-
- self.send_header("Content-type", rct)
- self.end_headers()
- self.wfile.write(content.encode("utf-8"))
-
- def log_message(self, *args):
- pass
-
-
-def runHttpServer(server_class=HTTPServer, handler_class=TestHTTPHandler):
- """Start a server than can handle 3 requests :)"""
- server_address = ("localhost", 12345)
- httpd = server_class(server_address, handler_class)
-
- httpd.handle_request()
- httpd.handle_request()
- httpd.handle_request()
-
-
-def testConNeg():
- _thread.start_new_thread(runHttpServer, tuple())
- # hang on a second while server starts
- time.sleep(1)
- graph = Graph()
- graph.parse("http://localhost:12345/foo", format="xml")
- graph.parse("http://localhost:12345/foo", format="n3")
- graph.parse("http://localhost:12345/foo", format="nt")
-
-
-if __name__ == "__main__":
-
- import sys
- import nose
-
- if len(sys.argv) == 1:
- nose.main(defaultTest=sys.argv[0])
diff --git a/test/test_core_sparqlstore.py b/test/test_core_sparqlstore.py
deleted file mode 100644
index 622e4a24..00000000
--- a/test/test_core_sparqlstore.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import unittest
-from rdflib.graph import Graph
-
-
-class TestSPARQLStoreGraphCore(unittest.TestCase):
-
- store_name = "SPARQLStore"
- path = "http://dbpedia.org/sparql"
- storetest = True
- create = False
-
- def setUp(self):
- self.graph = Graph(store="SPARQLStore")
- self.graph.open(self.path, create=self.create)
- ns = list(self.graph.namespaces())
- assert len(ns) > 0, ns
-
- def tearDown(self):
- self.graph.close()
-
- def test(self):
- print("Done")
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/test/test_dawg.py b/test/test_dawg.py
index 2b1884fc..0ded4bda 100644
--- a/test/test_dawg.py
+++ b/test/test_dawg.py
@@ -6,27 +6,12 @@ import sys
sys.setrecursionlimit(6000) # default is 1000
-try:
- from collections import Counter
-except:
-
- # cheap Counter impl for py 2.5
- # not a complete implementation - only good enough for the use here!
- from collections import defaultdict
- from operator import itemgetter
-
- class Counter(defaultdict):
- def __init__(self):
- defaultdict.__init__(self, int)
-
- def most_common(self, N):
- return [
- x[0] for x in sorted(self.items(), key=itemgetter(1), reverse=True)[:10]
- ]
+from collections import Counter
import datetime
import isodate
+import typing
from rdflib import Dataset, Graph, URIRef, BNode
@@ -101,8 +86,8 @@ DETAILEDASSERT = True
NAME = None
-fails = Counter()
-errors = Counter()
+fails: typing.Counter[str] = Counter()
+errors: typing.Counter[str] = Counter()
failed_tests = []
error_tests = []
@@ -122,7 +107,7 @@ try:
]
)
except IOError:
- skiptests = set()
+ skiptests = dict()
def _fmt(f):
@@ -606,8 +591,8 @@ if __name__ == "__main__":
print("Most common fails:")
for failed in fails.most_common(10):
- failed = str(failed)
- print(failed[:450] + (failed[450:] and "..."))
+ failed_str = str(failed)
+ print(failed_str[:450] + (failed_str[450:] and "..."))
print("\n----------------------------------------------------\n")
diff --git a/test/test_graph_http.py b/test/test_graph_http.py
new file mode 100644
index 00000000..1ee8292e
--- /dev/null
+++ b/test/test_graph_http.py
@@ -0,0 +1,168 @@
+from rdflib import Graph, Namespace
+
+from http.server import BaseHTTPRequestHandler
+from urllib.error import HTTPError
+from .testutils import SimpleHTTPMock, MockHTTPResponse, ctx_http_server, GraphHelper
+import unittest
+
+
+"""
+Test that correct content negoation headers are passed
+by graph.parse
+"""
+
+
+xmltestdoc = """<?xml version="1.0" encoding="UTF-8"?>
+<rdf:RDF
+ xmlns="http://example.org/"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+>
+ <rdf:Description rdf:about="http://example.org/a">
+ <b rdf:resource="http://example.org/c"/>
+ </rdf:Description>
+</rdf:RDF>
+"""
+
+n3testdoc = """@prefix : <http://example.org/> .
+
+:a :b :c .
+"""
+
+nttestdoc = "<http://example.org/a> <http://example.org/b> <http://example.org/c> .\n"
+
+
+class ContentNegotiationHandler(BaseHTTPRequestHandler):
+ def do_GET(self):
+
+ self.send_response(200, "OK")
+ # fun fun fun parsing accept header.
+
+ acs = self.headers["Accept"].split(",")
+ acq = [x.split(";") for x in acs if ";" in x]
+ acn = [(x, "q=1") for x in acs if ";" not in x]
+ acs = [(x[0], float(x[1].strip()[2:])) for x in acq + acn]
+ ac = sorted(acs, key=lambda x: x[1])
+ ct = ac[-1]
+
+ if "application/rdf+xml" in ct:
+ rct = "application/rdf+xml"
+ content = xmltestdoc
+ elif "text/n3" in ct:
+ rct = "text/n3"
+ content = n3testdoc
+ elif "text/plain" in ct:
+ rct = "text/plain"
+ content = nttestdoc
+
+ self.send_header("Content-type", rct)
+ self.end_headers()
+ self.wfile.write(content.encode("utf-8"))
+
+ def log_message(self, *args):
+ pass
+
+
+class TestGraphHTTP(unittest.TestCase):
+ def content_negotiation(self) -> None:
+ EG = Namespace("http://example.org/")
+ expected = Graph()
+ expected.add((EG["a"], EG["b"], EG["c"]))
+ expected_triples = GraphHelper.triple_set(expected)
+
+ with ctx_http_server(ContentNegotiationHandler) as server:
+ (host, port) = server.server_address
+ url = f"http://{host}:{port}/foo"
+ for format in ("xml", "n3", "nt"):
+ graph = Graph()
+ graph.parse(url, format=format)
+ self.assertEqual(expected_triples, GraphHelper.triple_set(graph))
+
+ def test_3xx(self) -> None:
+ EG = Namespace("http://example.com/")
+ expected = Graph()
+ expected.add((EG["a"], EG["b"], EG["c"]))
+ expected_triples = GraphHelper.triple_set(expected)
+
+ httpmock = SimpleHTTPMock()
+ with ctx_http_server(httpmock.Handler) as server:
+ (host, port) = server.server_address
+ url = f"http://{host}:{port}/"
+
+ for idx in range(3):
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(
+ 302, "FOUND", "".encode(), {"Location": [f"{url}loc/302/{idx}"]}
+ )
+ )
+ for idx in range(3):
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(
+ 303,
+ "See Other",
+ "".encode(),
+ {"Location": [f"{url}loc/303/{idx}"]},
+ )
+ )
+ for idx in range(3):
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(
+ 308,
+ "Permanent Redirect",
+ "".encode(),
+ {"Location": [f"{url}loc/308/{idx}"]},
+ )
+ )
+
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(
+ 200,
+ "OK",
+ f"<{EG['a']}> <{EG['b']}> <{EG['c']}>.".encode(),
+ {"Content-Type": ["text/turtle"]},
+ )
+ )
+
+ graph = Graph()
+ graph.parse(location=url, format="turtle")
+ self.assertEqual(expected_triples, GraphHelper.triple_set(graph))
+
+ httpmock.do_get_mock.assert_called()
+ assert len(httpmock.do_get_requests) == 10
+ for request in httpmock.do_get_requests:
+ self.assertRegex(request.headers.get("Accept"), "text/turtle")
+
+ request_paths = [request.path for request in httpmock.do_get_requests]
+ self.assertEqual(
+ request_paths,
+ [
+ "/",
+ "/loc/302/0",
+ "/loc/302/1",
+ "/loc/302/2",
+ "/loc/303/0",
+ "/loc/303/1",
+ "/loc/303/2",
+ "/loc/308/0",
+ "/loc/308/1",
+ "/loc/308/2",
+ ],
+ )
+
+ def test_5xx(self):
+ httpmock = SimpleHTTPMock()
+ with ctx_http_server(httpmock.Handler) as server:
+ (host, port) = server.server_address
+ url = f"http://{host}:{port}/"
+ response = MockHTTPResponse(500, "Internal Server Error", "".encode(), {})
+ httpmock.do_get_responses.append(response)
+
+ graph = Graph()
+
+ with self.assertRaises(HTTPError) as raised:
+ graph.parse(location=url, format="turtle")
+
+ self.assertEqual(raised.exception.code, 500)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/test/test_graph_operator.py b/test/test_graph_operator.py
new file mode 100644
index 00000000..87502c54
--- /dev/null
+++ b/test/test_graph_operator.py
@@ -0,0 +1,27 @@
+from rdflib import Graph
+
+
+class MyGraph(Graph):
+ def my_method(self):
+ pass
+
+
+def test_subclass_add_operator():
+ g = MyGraph()
+
+ g = g + g
+ assert "my_method" in dir(g)
+
+
+def test_subclass_sub_operator():
+ g = MyGraph()
+
+ g = g - g
+ assert "my_method" in dir(g)
+
+
+def test_subclass_mul_operator():
+ g = MyGraph()
+
+ g = g * g
+ assert "my_method" in dir(g)
diff --git a/test/test_issue274.py b/test/test_issue274.py
index 79fc4d15..d52d80e4 100644
--- a/test/test_issue274.py
+++ b/test/test_issue274.py
@@ -164,7 +164,7 @@ def test_cast_bool_to_bool():
eq_(list(res)[0][0], Literal("true", datatype=XSD.boolean))
-def test_cast_bool_to_bool():
+def test_call_exf():
res = query("""SELECT (ex:f(42, "hello") as ?x) {}""")
eq_(len(list(res)), 0)
diff --git a/test/test_literal.py b/test/test_literal.py
index 80de82a6..0248ce52 100644
--- a/test/test_literal.py
+++ b/test/test_literal.py
@@ -1,3 +1,4 @@
+from decimal import Decimal
import unittest
import datetime
@@ -225,6 +226,9 @@ class TestXsdLiterals(unittest.TestCase):
("", XSD.hexBinary, bytes),
("UkRGTGli", XSD.base64Binary, bytes),
("", XSD.base64Binary, bytes),
+ ("0.0000000000000000000000000000001", XSD.decimal, Decimal),
+ ("0.1", XSD.decimal, Decimal),
+ ("1", XSD.integer, int),
]
self.check_make_literals(inputs)
@@ -243,6 +247,7 @@ class TestXsdLiterals(unittest.TestCase):
("1921-05-01+00:00", XSD.date, datetime.date),
("1921-05-01+00:00", XSD.date, datetime.date),
("1921-05-01T00:00:00Z", XSD.dateTime, datetime.datetime),
+ ("1e-31", XSD.decimal, None), # This is not a valid decimal value
]
self.check_make_literals(inputs)
diff --git a/test/test_rdfxml.py b/test/test_rdfxml.py
index eef09219..c64d6a7c 100644
--- a/test/test_rdfxml.py
+++ b/test/test_rdfxml.py
@@ -20,13 +20,8 @@ _logger = logging.getLogger("parser_rdfcore")
verbose = 0
-sw = StreamWriter(sys.stdout)
-
-
def write(msg):
_logger.info(msg + "\n")
- # sw.write(msg+"\n")
-
class TestStore(Graph):
__test__ = False
diff --git a/test/test_serialize.py b/test/test_serialize.py
new file mode 100644
index 00000000..cfc8a9d7
--- /dev/null
+++ b/test/test_serialize.py
@@ -0,0 +1,43 @@
+import unittest
+from rdflib import Graph, URIRef
+from tempfile import NamedTemporaryFile, TemporaryDirectory
+from pathlib import Path, PurePath
+
+
+class TestSerialize(unittest.TestCase):
+ def setUp(self) -> None:
+
+ graph = Graph()
+ subject = URIRef("example:subject")
+ predicate = URIRef("example:predicate")
+ object = URIRef("example:object")
+ self.triple = (
+ subject,
+ predicate,
+ object,
+ )
+ graph.add(self.triple)
+ self.graph = graph
+ return super().setUp()
+
+ def test_serialize_to_purepath(self):
+ with TemporaryDirectory() as td:
+ tfpath = PurePath(td) / "out.nt"
+ self.graph.serialize(destination=tfpath, format="nt")
+ graph_check = Graph()
+ graph_check.parse(source=tfpath, format="nt")
+
+ self.assertEqual(self.triple, next(iter(graph_check)))
+
+ def test_serialize_to_path(self):
+ with NamedTemporaryFile() as tf:
+ tfpath = Path(tf.name)
+ self.graph.serialize(destination=tfpath, format="nt")
+ graph_check = Graph()
+ graph_check.parse(source=tfpath, format="nt")
+
+ self.assertEqual(self.triple, next(iter(graph_check)))
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/test/test_sparql_parser.py b/test/test_sparql_parser.py
new file mode 100644
index 00000000..f5d6267e
--- /dev/null
+++ b/test/test_sparql_parser.py
@@ -0,0 +1,49 @@
+from rdflib import Graph, Literal
+from rdflib.term import Node
+from rdflib.namespace import Namespace
+from rdflib.plugins.sparql.processor import processUpdate
+import unittest
+import sys
+import math
+from typing import Set, Tuple
+
+
+def triple_set(graph: Graph) -> Set[Tuple[Node, Node, Node]]:
+ return set(graph.triples((None, None, None)))
+
+
+class SPARQLParserTests(unittest.TestCase):
+ def test_insert_recursionlimit(self) -> None:
+ # These values are experimentally determined
+ # to cause the RecursionError reported in
+ # https://github.com/RDFLib/rdflib/issues/1336
+ resource_count = math.ceil(sys.getrecursionlimit() / (33 - 3))
+ self.do_insert(resource_count)
+
+ def test_insert_large(self) -> None:
+ self.do_insert(200)
+
+ def do_insert(self, resource_count: int) -> None:
+ EGV = Namespace("http://example.org/vocab#")
+ EGI = Namespace("http://example.org/instance#")
+ prop0, prop1, prop2 = EGV["prop0"], EGV["prop1"], EGV["prop2"]
+ g0 = Graph()
+ for index in range(resource_count):
+ resource = EGI[f"resource{index}"]
+ g0.add((resource, prop0, Literal(index)))
+ g0.add((resource, prop1, Literal("example resource")))
+ g0.add((resource, prop2, Literal(f"resource #{index}")))
+
+ g0ntriples = g0.serialize(format="ntriples")
+ g1 = Graph()
+
+ self.assertNotEqual(triple_set(g0), triple_set(g1))
+
+ processUpdate(g1, f"INSERT DATA {{ {g0ntriples!s} }}")
+
+ self.assertEqual(triple_set(g0), triple_set(g1))
+
+
+if __name__ == "__main__":
+
+ unittest.main()
diff --git a/test/test_sparqlstore.py b/test/test_sparqlstore.py
index 8b11a4c4..7859a375 100644
--- a/test/test_sparqlstore.py
+++ b/test/test_sparqlstore.py
@@ -1,39 +1,40 @@
from rdflib import Graph, URIRef, Literal
-from urllib.request import urlopen
-from urllib.error import HTTPError
import unittest
-from nose import SkipTest
-from http.server import BaseHTTPRequestHandler, HTTPServer, SimpleHTTPRequestHandler
+from http.server import BaseHTTPRequestHandler, HTTPServer
import socket
from threading import Thread
-from contextlib import contextmanager
-from unittest.mock import MagicMock, Mock, patch
-import typing as t
-import random
-import collections
-from urllib.parse import ParseResult, urlparse, parse_qs
+from unittest.mock import patch
from rdflib.namespace import RDF, XSD, XMLNS, FOAF, RDFS
from rdflib.plugins.stores.sparqlstore import SPARQLConnector
-import email.message
+from typing import ClassVar
from . import helper
+from .testutils import (
+ MockHTTPResponse,
+ ServedSimpleHTTPMock,
+)
-try:
- assert len(urlopen("http://dbpedia.org/sparql").read()) > 0
-except:
- raise SkipTest("No HTTP connection.")
+class SPARQLStoreFakeDBPediaTestCase(unittest.TestCase):
+ store_name = "SPARQLStore"
+ path: ClassVar[str]
+ httpmock: ClassVar[ServedSimpleHTTPMock]
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.httpmock = ServedSimpleHTTPMock()
+ cls.path = f"{cls.httpmock.url}/sparql"
-class SPARQLStoreDBPediaTestCase(unittest.TestCase):
- store_name = "SPARQLStore"
- path = "http://dbpedia.org/sparql"
- storetest = True
- create = False
+ @classmethod
+ def tearDownClass(cls) -> None:
+ super().tearDownClass()
+ cls.httpmock.stop()
def setUp(self):
+ self.httpmock.reset()
self.graph = Graph(store="SPARQLStore")
- self.graph.open(self.path, create=self.create)
+ self.graph.open(self.path, create=True)
ns = list(self.graph.namespaces())
assert len(ns) > 0, ns
@@ -43,11 +44,29 @@ class SPARQLStoreDBPediaTestCase(unittest.TestCase):
def test_Query(self):
query = "select distinct ?Concept where {[] a ?Concept} LIMIT 1"
_query = SPARQLConnector.query
+ self.httpmock.do_get_responses.append(
+ MockHTTPResponse(
+ 200,
+ "OK",
+ b"""\
+<sparql xmlns="http://www.w3.org/2005/sparql-results#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.w3.org/2001/sw/DataAccess/rf1/result2.xsd">
+ <head>
+ <variable name="Concept"/>
+ </head>
+ <results distinct="false" ordered="true">
+ <result>
+ <binding name="Concept"><uri>http://www.w3.org/2000/01/rdf-schema#Datatype</uri></binding>
+ </result>
+ </results>
+</sparql>""",
+ {"Content-Type": ["application/sparql-results+xml; charset=UTF-8"]},
+ )
+ )
with patch("rdflib.plugins.stores.sparqlstore.SPARQLConnector.query") as mock:
SPARQLConnector.query.side_effect = lambda *args, **kwargs: _query(
self.graph.store, *args, **kwargs
)
- res = helper.query_with_retry(self.graph, query, initNs={})
+ res = self.graph.query(query, initNs={})
count = 0
for i in res:
count += 1
@@ -62,24 +81,97 @@ class SPARQLStoreDBPediaTestCase(unittest.TestCase):
(mquery, _, _) = unpacker(*args, *kwargs)
for _, uri in self.graph.namespaces():
assert mquery.count(f"<{uri}>") == 1
+ self.assertEqual(self.httpmock.do_get_mock.call_count, 1)
+ req = self.httpmock.do_get_requests.pop(0)
+ self.assertRegex(req.path, r"^/sparql")
+ self.assertIn(query, req.path_query["query"][0])
def test_initNs(self):
query = """\
SELECT ?label WHERE
{ ?s a xyzzy:Concept ; xyzzy:prefLabel ?label . } LIMIT 10
"""
- res = helper.query_with_retry(self.graph,
+ self.httpmock.do_get_responses.append(
+ MockHTTPResponse(
+ 200,
+ "OK",
+ """\
+<sparql xmlns="http://www.w3.org/2005/sparql-results#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.w3.org/2001/sw/DataAccess/rf1/result2.xsd">
+ <head>
+ <variable name="label"/>
+ </head>
+ <results distinct="false" ordered="true">
+ <result>
+ <binding name="label"><literal xml:lang="en">189</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 Scottish Football League</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 United States collegiate men&#39;s ice hockey season</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 Western Conference men&#39;s basketball season</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 collegiate men&#39;s basketball independents season in the United States</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 domestic association football cups</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 domestic association football leagues</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 in American ice hockey by league</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 in American ice hockey by team</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 in Belgian football</literal></binding>
+ </result>
+ </results>
+</sparql>""".encode(
+ "utf8"
+ ),
+ {"Content-Type": ["application/sparql-results+xml; charset=UTF-8"]},
+ )
+ )
+ res = self.graph.query(
query, initNs={"xyzzy": "http://www.w3.org/2004/02/skos/core#"}
)
for i in res:
assert type(i[0]) == Literal, i[0].n3()
+ self.assertEqual(self.httpmock.do_get_mock.call_count, 1)
+ req = self.httpmock.do_get_requests.pop(0)
+ self.assertRegex(req.path, r"^/sparql")
+ self.assertIn(query, req.path_query["query"][0])
+
def test_noinitNs(self):
query = """\
SELECT ?label WHERE
{ ?s a xyzzy:Concept ; xyzzy:prefLabel ?label . } LIMIT 10
"""
- self.assertRaises(ValueError, self.graph.query, query)
+ self.httpmock.do_get_responses.append(
+ MockHTTPResponse(
+ 400,
+ "Bad Request",
+ b"""\
+Virtuoso 37000 Error SP030: SPARQL compiler, line 1: Undefined namespace prefix in prefix:localpart notation at 'xyzzy:Concept' before ';'
+
+SPARQL query:
+SELECT ?label WHERE { ?s a xyzzy:Concept ; xyzzy:prefLabel ?label . } LIMIT 10""",
+ {"Content-Type": ["text/plain"]},
+ )
+ )
+ with self.assertRaises(ValueError):
+ self.graph.query(query)
+ self.assertEqual(self.httpmock.do_get_mock.call_count, 1)
+ req = self.httpmock.do_get_requests.pop(0)
+ self.assertRegex(req.path, r"^/sparql")
+ self.assertIn(query, req.path_query["query"][0])
def test_query_with_added_prolog(self):
prologue = """\
@@ -89,9 +181,60 @@ class SPARQLStoreDBPediaTestCase(unittest.TestCase):
SELECT ?label WHERE
{ ?s a xyzzy:Concept ; xyzzy:prefLabel ?label . } LIMIT 10
"""
+ self.httpmock.do_get_responses.append(
+ MockHTTPResponse(
+ 200,
+ "OK",
+ """\
+<sparql xmlns="http://www.w3.org/2005/sparql-results#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.w3.org/2001/sw/DataAccess/rf1/result2.xsd">
+ <head>
+ <variable name="label"/>
+ </head>
+ <results distinct="false" ordered="true">
+ <result>
+ <binding name="label"><literal xml:lang="en">189</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 Scottish Football League</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 United States collegiate men&#39;s ice hockey season</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 Western Conference men&#39;s basketball season</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 collegiate men&#39;s basketball independents season in the United States</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 domestic association football cups</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 domestic association football leagues</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 in American ice hockey by league</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 in American ice hockey by team</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 in Belgian football</literal></binding>
+ </result>
+ </results>
+</sparql>""".encode(
+ "utf8"
+ ),
+ {"Content-Type": ["application/sparql-results+xml; charset=UTF-8"]},
+ )
+ )
res = helper.query_with_retry(self.graph, prologue + query)
for i in res:
assert type(i[0]) == Literal, i[0].n3()
+ self.assertEqual(self.httpmock.do_get_mock.call_count, 1)
+ req = self.httpmock.do_get_requests.pop(0)
+ self.assertRegex(req.path, r"^/sparql")
+ self.assertIn(query, req.path_query["query"][0])
def test_query_with_added_rdf_prolog(self):
prologue = """\
@@ -102,9 +245,60 @@ class SPARQLStoreDBPediaTestCase(unittest.TestCase):
SELECT ?label WHERE
{ ?s a xyzzy:Concept ; xyzzy:prefLabel ?label . } LIMIT 10
"""
+ self.httpmock.do_get_responses.append(
+ MockHTTPResponse(
+ 200,
+ "OK",
+ """\
+<sparql xmlns="http://www.w3.org/2005/sparql-results#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.w3.org/2001/sw/DataAccess/rf1/result2.xsd">
+ <head>
+ <variable name="label"/>
+ </head>
+ <results distinct="false" ordered="true">
+ <result>
+ <binding name="label"><literal xml:lang="en">189</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 Scottish Football League</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 United States collegiate men&#39;s ice hockey season</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 Western Conference men&#39;s basketball season</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 collegiate men&#39;s basketball independents season in the United States</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 domestic association football cups</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 domestic association football leagues</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 in American ice hockey by league</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 in American ice hockey by team</literal></binding>
+ </result>
+ <result>
+ <binding name="label"><literal xml:lang="en">1899–1900 in Belgian football</literal></binding>
+ </result>
+ </results>
+</sparql>""".encode(
+ "utf8"
+ ),
+ {"Content-Type": ["application/sparql-results+xml; charset=UTF-8"]},
+ )
+ )
res = helper.query_with_retry(self.graph, prologue + query)
for i in res:
assert type(i[0]) == Literal, i[0].n3()
+ self.assertEqual(self.httpmock.do_get_mock.call_count, 1)
+ req = self.httpmock.do_get_requests.pop(0)
+ self.assertRegex(req.path, r"^/sparql")
+ self.assertIn(query, req.path_query["query"][0])
def test_counting_graph_and_store_queries(self):
query = """
@@ -117,21 +311,62 @@ class SPARQLStoreDBPediaTestCase(unittest.TestCase):
g = Graph("SPARQLStore")
g.open(self.path)
count = 0
- result = helper.query_with_retry(g, query)
+ response = MockHTTPResponse(
+ 200,
+ "OK",
+ """\
+ <sparql xmlns="http://www.w3.org/2005/sparql-results#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.w3.org/2001/sw/DataAccess/rf1/result2.xsd">
+ <head>
+ <variable name="s"/>
+ </head>
+ <results distinct="false" ordered="true">
+ <result>
+ <binding name="s"><uri>http://www.openlinksw.com/virtrdf-data-formats#default-iid</uri></binding>
+ </result>
+ <result>
+ <binding name="s"><uri>http://www.openlinksw.com/virtrdf-data-formats#default-iid-nullable</uri></binding>
+ </result>
+ <result>
+ <binding name="s"><uri>http://www.openlinksw.com/virtrdf-data-formats#default-iid-blank</uri></binding>
+ </result>
+ <result>
+ <binding name="s"><uri>http://www.openlinksw.com/virtrdf-data-formats#default-iid-blank-nullable</uri></binding>
+ </result>
+ <result>
+ <binding name="s"><uri>http://www.openlinksw.com/virtrdf-data-formats#default-iid-nonblank</uri></binding>
+ </result>
+ </results>
+ </sparql>""".encode(
+ "utf8"
+ ),
+ {"Content-Type": ["application/sparql-results+xml; charset=UTF-8"]},
+ )
+
+ self.httpmock.do_get_responses.append(response)
+
+ result = g.query(query)
for _ in result:
count += 1
- assert count == 5, "Graph(\"SPARQLStore\") didn't return 5 records"
+ assert count == 5, 'Graph("SPARQLStore") didn\'t return 5 records'
from rdflib.plugins.stores.sparqlstore import SPARQLStore
+
st = SPARQLStore(query_endpoint=self.path)
count = 0
- result = helper.query_with_retry(st, query)
+ self.httpmock.do_get_responses.append(response)
+ result = st.query(query)
for _ in result:
count += 1
assert count == 5, "SPARQLStore() didn't return 5 records"
+ self.assertEqual(self.httpmock.do_get_mock.call_count, 2)
+ for _ in range(2):
+ req = self.httpmock.do_get_requests.pop(0)
+ self.assertRegex(req.path, r"^/sparql")
+ self.assertIn(query, req.path_query["query"][0])
+
class SPARQLStoreUpdateTestCase(unittest.TestCase):
def setUp(self):
@@ -222,93 +457,7 @@ class SPARQL11ProtocolStoreMock(BaseHTTPRequestHandler):
return
-def get_random_ip(parts: t.List[str] = None) -> str:
- if parts is None:
- parts = ["127"]
- for index in range(4 - len(parts)):
- parts.append(f"{random.randint(0, 255)}")
- return ".".join(parts)
-
-
-@contextmanager
-def ctx_http_server(handler: t.Type[BaseHTTPRequestHandler]) -> t.Iterator[HTTPServer]:
- host = get_random_ip()
- server = HTTPServer((host, 0), handler)
- server_thread = Thread(target=server.serve_forever)
- server_thread.daemon = True
- server_thread.start()
- yield server
- server.shutdown()
- server.socket.close()
- server_thread.join()
-
-
-GenericT = t.TypeVar("GenericT", bound=t.Any)
-
-
-def make_spypair(method: GenericT) -> t.Tuple[GenericT, Mock]:
- m = MagicMock()
-
- def wrapper(self: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any:
- m(*args, **kwargs)
- return method(self, *args, **kwargs)
-
- setattr(wrapper, "mock", m)
- return t.cast(GenericT, wrapper), m
-
-
-HeadersT = t.Dict[str, t.List[str]]
-PathQueryT = t.Dict[str, t.List[str]]
-
-
-class MockHTTPRequests(t.NamedTuple):
- path: str
- parsed_path: ParseResult
- path_query: PathQueryT
- headers: email.message.Message
-
-
-class MockHTTPResponse(t.NamedTuple):
- status_code: int
- reason_phrase: str
- body: bytes
- headers: HeadersT = collections.defaultdict(list)
-
-
class SPARQLMockTests(unittest.TestCase):
- requests: t.List[MockHTTPRequests] = []
- responses: t.List[MockHTTPResponse] = []
-
- def setUp(self):
- _tc = self
-
- class Handler(SimpleHTTPRequestHandler):
- tc = _tc
-
- def _do_GET(self):
- parsed_path = urlparse(self.path)
- path_query = parse_qs(parsed_path.query)
- request = MockHTTPRequests(
- self.path, parsed_path, path_query, self.headers
- )
- self.tc.requests.append(request)
-
- response = self.tc.responses.pop(0)
- self.send_response(response.status_code, response.reason_phrase)
- for header, values in response.headers.items():
- for value in values:
- self.send_header(header, value)
- self.end_headers()
-
- self.wfile.write(response.body)
- self.wfile.flush()
- return
-
- (do_GET, do_GET_mock) = make_spypair(_do_GET)
- self.Handler = Handler
- self.requests.clear()
- self.responses.clear()
-
def test_query(self):
triples = {
(RDFS.Resource, RDF.type, RDFS.Class),
@@ -318,9 +467,9 @@ class SPARQLMockTests(unittest.TestCase):
}
rows = "\n".join([f'"{s}","{p}","{o}"' for s, p, o in triples])
response_body = f"s,p,o\n{rows}".encode()
- response = MockHTTPResponse(200, "OK", response_body)
- response.headers["Content-Type"].append("text/csv; charset=utf-8")
- self.responses.append(response)
+ response = MockHTTPResponse(
+ 200, "OK", response_body, {"Content-Type": ["text/csv; charset=utf-8"]}
+ )
graph = Graph(store="SPARQLStore", identifier="http://example.com")
graph.bind("xsd", XSD)
@@ -330,9 +479,9 @@ class SPARQLMockTests(unittest.TestCase):
assert len(list(graph.namespaces())) >= 4
- with ctx_http_server(self.Handler) as server:
- (host, port) = server.server_address
- url = f"http://{host}:{port}/query"
+ with ServedSimpleHTTPMock() as httpmock:
+ httpmock.do_get_responses.append(response)
+ url = f"{httpmock.url}/query"
graph.open(url)
query_result = graph.query("SELECT ?s ?p ?o WHERE { ?s ?p ?o }")
@@ -341,9 +490,9 @@ class SPARQLMockTests(unittest.TestCase):
for triple in triples:
assert triple in rows
- self.Handler.do_GET_mock.assert_called_once()
- assert len(self.requests) == 1
- request = self.requests.pop()
+ httpmock.do_get_mock.assert_called_once()
+ assert len(httpmock.do_get_requests) == 1
+ request = httpmock.do_get_requests.pop()
assert len(request.path_query["query"]) == 1
query = request.path_query["query"][0]
diff --git a/test/test_swap_n3.py b/test/test_swap_n3.py
index f7071bec..2ecafe30 100644
--- a/test/test_swap_n3.py
+++ b/test/test_swap_n3.py
@@ -3,10 +3,7 @@ import os
import sys
import unittest
-try:
- maketrans = str.maketrans
-except AttributeError:
- from string import maketrans
+maketrans = str.maketrans
import rdflib
"""
diff --git a/test/testutils.py b/test/testutils.py
index 5fd46d4d..f95619d2 100644
--- a/test/testutils.py
+++ b/test/testutils.py
@@ -1,12 +1,41 @@
import sys
+from types import TracebackType
import isodate
import datetime
+import random
+from contextlib import AbstractContextManager, contextmanager
+from typing import (
+ List,
+ Optional,
+ TYPE_CHECKING,
+ Type,
+ Iterator,
+ Set,
+ Tuple,
+ Dict,
+ Any,
+ TypeVar,
+ cast,
+ NamedTuple,
+)
+from urllib.parse import ParseResult, urlparse, parse_qs
from traceback import print_exc
+from threading import Thread
+from http.server import BaseHTTPRequestHandler, HTTPServer, SimpleHTTPRequestHandler
+import email.message
from nose import SkipTest
from .earl import add_test, report
+import unittest
from rdflib import BNode, Graph, ConjunctiveGraph
+from rdflib.term import Node
+from unittest.mock import MagicMock, Mock
+from urllib.error import HTTPError
+from urllib.request import urlopen
+
+if TYPE_CHECKING:
+ import typing_extensions as te
# TODO: make an introspective version (like this one) of
@@ -105,3 +134,274 @@ def nose_tst_earl_report(generator, earl_report_name=None):
report.serialize(earl_report, format="n3")
report.serialize("test_reports/%s-latest.ttl" % earl_report_name, format="n3")
print("Wrote EARL-report to '%s'" % earl_report)
+
+
+def get_random_ip(parts: List[str] = None) -> str:
+ if parts is None:
+ parts = ["127"]
+ for _ in range(4 - len(parts)):
+ parts.append(f"{random.randint(0, 255)}")
+ return ".".join(parts)
+
+
+@contextmanager
+def ctx_http_server(handler: Type[BaseHTTPRequestHandler]) -> Iterator[HTTPServer]:
+ host = get_random_ip()
+ server = HTTPServer((host, 0), handler)
+ server_thread = Thread(target=server.serve_forever)
+ server_thread.daemon = True
+ server_thread.start()
+ yield server
+ server.shutdown()
+ server.socket.close()
+ server_thread.join()
+
+
+class GraphHelper:
+ @classmethod
+ def triple_set(cls, graph: Graph) -> Set[Tuple[Node, Node, Node]]:
+ return set(graph.triples((None, None, None)))
+
+ @classmethod
+ def equals(cls, lhs: Graph, rhs: Graph) -> bool:
+ return cls.triple_set(lhs) == cls.triple_set(rhs)
+
+
+GenericT = TypeVar("GenericT", bound=Any)
+
+
+def make_spypair(method: GenericT) -> Tuple[GenericT, Mock]:
+ m = MagicMock()
+
+ def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any:
+ m(*args, **kwargs)
+ return method(self, *args, **kwargs)
+
+ setattr(wrapper, "mock", m)
+ return cast(GenericT, wrapper), m
+
+
+HeadersT = Dict[str, List[str]]
+PathQueryT = Dict[str, List[str]]
+
+
+class MockHTTPRequests(NamedTuple):
+ method: str
+ path: str
+ parsed_path: ParseResult
+ path_query: PathQueryT
+ headers: email.message.Message
+
+
+class MockHTTPResponse(NamedTuple):
+ status_code: int
+ reason_phrase: str
+ body: bytes
+ headers: HeadersT
+
+
+class SimpleHTTPMock:
+ """
+ SimpleHTTPMock allows testing of code that relies on an HTTP server.
+
+ NOTE: Currently only the GET method is supported.
+
+ Objects of this class has a list of responses for each method (GET, POST, etc...)
+ and returns these responses for these methods in sequence.
+
+ All request received are appended to a method specific list.
+
+ Example usage:
+ >>> httpmock = SimpleHTTPMock()
+ >>> with ctx_http_server(httpmock.Handler) as server:
+ ... url = "http://{}:{}".format(*server.server_address)
+ ... # add a response the server should give:
+ ... httpmock.do_get_responses.append(
+ ... MockHTTPResponse(404, "Not Found", b"gone away", {})
+ ... )
+ ...
+ ... # send a request to get the first response
+ ... http_error: Optional[HTTPError] = None
+ ... try:
+ ... urlopen(f"{url}/bad/path")
+ ... except HTTPError as caught:
+ ... http_error = caught
+ ...
+ ... assert http_error is not None
+ ... assert http_error.code == 404
+ ...
+ ... # get and validate request that the mock received
+ ... req = httpmock.do_get_requests.pop(0)
+ ... assert req.path == "/bad/path"
+ """
+
+ # TODO: add additional methods (POST, PUT, ...) similar to get
+ def __init__(self):
+ self.do_get_requests: List[MockHTTPRequests] = []
+ self.do_get_responses: List[MockHTTPResponse] = []
+
+ _http_mock = self
+
+ class Handler(SimpleHTTPRequestHandler):
+ http_mock = _http_mock
+
+ def _do_GET(self):
+ parsed_path = urlparse(self.path)
+ path_query = parse_qs(parsed_path.query)
+ request = MockHTTPRequests(
+ "GET", self.path, parsed_path, path_query, self.headers
+ )
+ self.http_mock.do_get_requests.append(request)
+
+ response = self.http_mock.do_get_responses.pop(0)
+ self.send_response(response.status_code, response.reason_phrase)
+ for header, values in response.headers.items():
+ for value in values:
+ self.send_header(header, value)
+ self.end_headers()
+
+ self.wfile.write(response.body)
+ self.wfile.flush()
+ return
+
+ (do_GET, do_GET_mock) = make_spypair(_do_GET)
+
+ def log_message(self, format: str, *args: Any) -> None:
+ pass
+
+ self.Handler = Handler
+ self.do_get_mock = Handler.do_GET_mock
+
+ def reset(self):
+ self.do_get_requests.clear()
+ self.do_get_responses.clear()
+ self.do_get_mock.reset_mock()
+
+
+class SimpleHTTPMockTests(unittest.TestCase):
+ def test_example(self) -> None:
+ httpmock = SimpleHTTPMock()
+ with ctx_http_server(httpmock.Handler) as server:
+ url = "http://{}:{}".format(*server.server_address)
+ # add two responses the server should give:
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(404, "Not Found", b"gone away", {})
+ )
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(200, "OK", b"here it is", {})
+ )
+
+ # send a request to get the first response
+ with self.assertRaises(HTTPError) as raised:
+ urlopen(f"{url}/bad/path")
+ assert raised.exception.code == 404
+
+ # get and validate request that the mock received
+ req = httpmock.do_get_requests.pop(0)
+ self.assertEqual(req.path, "/bad/path")
+
+ # send a request to get the second response
+ resp = urlopen(f"{url}/")
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(resp.read(), b"here it is")
+
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(404, "Not Found", b"gone away", {})
+ )
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(200, "OK", b"here it is", {})
+ )
+
+
+class ServedSimpleHTTPMock(SimpleHTTPMock, AbstractContextManager):
+ """
+ ServedSimpleHTTPMock is a ServedSimpleHTTPMock with a HTTP server.
+
+ Example usage:
+ >>> with ServedSimpleHTTPMock() as httpmock:
+ ... # add a response the server should give:
+ ... httpmock.do_get_responses.append(
+ ... MockHTTPResponse(404, "Not Found", b"gone away", {})
+ ... )
+ ...
+ ... # send a request to get the first response
+ ... http_error: Optional[HTTPError] = None
+ ... try:
+ ... urlopen(f"{httpmock.url}/bad/path")
+ ... except HTTPError as caught:
+ ... http_error = caught
+ ...
+ ... assert http_error is not None
+ ... assert http_error.code == 404
+ ...
+ ... # get and validate request that the mock received
+ ... req = httpmock.do_get_requests.pop(0)
+ ... assert req.path == "/bad/path"
+ """
+
+ def __init__(self):
+ super().__init__()
+ host = get_random_ip()
+ self.server = HTTPServer((host, 0), self.Handler)
+ self.server_thread = Thread(target=self.server.serve_forever)
+ self.server_thread.daemon = True
+ self.server_thread.start()
+
+ def stop(self) -> None:
+ self.server.shutdown()
+ self.server.socket.close()
+ self.server_thread.join()
+
+ @property
+ def address_string(self) -> str:
+ (host, port) = self.server.server_address
+ return f"{host}:{port}"
+
+ @property
+ def url(self) -> str:
+ return f"http://{self.address_string}"
+
+ def __enter__(self) -> "ServedSimpleHTTPMock":
+ return self
+
+ def __exit__(
+ self,
+ __exc_type: Optional[Type[BaseException]],
+ __exc_value: Optional[BaseException],
+ __traceback: Optional[TracebackType],
+ ) -> "te.Literal[False]":
+ self.stop()
+ return False
+
+
+class ServedSimpleHTTPMockTests(unittest.TestCase):
+ def test_example(self) -> None:
+ with ServedSimpleHTTPMock() as httpmock:
+ # add two responses the server should give:
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(404, "Not Found", b"gone away", {})
+ )
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(200, "OK", b"here it is", {})
+ )
+
+ # send a request to get the first response
+ with self.assertRaises(HTTPError) as raised:
+ urlopen(f"{httpmock.url}/bad/path")
+ assert raised.exception.code == 404
+
+ # get and validate request that the mock received
+ req = httpmock.do_get_requests.pop(0)
+ self.assertEqual(req.path, "/bad/path")
+
+ # send a request to get the second response
+ resp = urlopen(f"{httpmock.url}/")
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(resp.read(), b"here it is")
+
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(404, "Not Found", b"gone away", {})
+ )
+ httpmock.do_get_responses.append(
+ MockHTTPResponse(200, "OK", b"here it is", {})
+ )
diff --git a/test/translate_algebra/main.py b/test/translate_algebra/main.py
new file mode 100644
index 00000000..b11a6e88
--- /dev/null
+++ b/test/translate_algebra/main.py
@@ -0,0 +1,789 @@
+from test_base import Test, TestExecution, format_text
+from rdflib.plugins.sparql.algebra import translateAlgebra
+import rdflib.plugins.sparql.parser as parser
+import rdflib.plugins.sparql.algebra as algebra
+import sys
+import logging
+
+
+def _pprint_query(query: str):
+ p = '{'
+ q = "}"
+ i = 0
+ f = 1
+
+ for e in query:
+ if e in p:
+ f or print()
+ print(' ' * i + e)
+ i += 4
+ f = 1
+ elif e in q:
+ f or print()
+ i -= 4
+ f = 1
+ print(' ' * i + e)
+ else:
+ not f or print(' ' * i, end='')
+ f = print(e, end='')
+
+
+class TestAlgebraToTest(TestExecution):
+
+ def __init__(self, annotated_tests: bool = False):
+ super().__init__(annotated_tests)
+ self.rdf_engine = None
+ self.query_text = None
+ self.query_algebra = None
+ self.query_from_algebra = None
+ self.query_from_query_from_algebra = None
+
+ def before_single_test(self, test_name: str):
+ """
+
+ :return:
+ """
+
+ print("Executing before_single_tests ...")
+
+ if self.annotated_tests:
+ test_name = test_name[2:]
+
+ self.query_text = open("test_data/{0}.txt".format(test_name), "r").read()
+
+ def test_functions__functional_forms(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+
+ test = Test(test_number=1,
+ tc_desc='Test if functional forms are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ try:
+ self.rdf_engine.get_data(self.query_from_query_from_algebra, yn_timestamp_query=False)
+ except Exception as e:
+ print(e)
+ print("The query must be executable. Otherwise, the test has failed.")
+ return Test(test_number=test.test_number, tc_desc=test.tc_desc, expected_result="0",
+ actual_result="not_executable")
+
+ return test
+
+ def test_functions__functional_forms_not_exists(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=2,
+ tc_desc='Test if the not exists form is properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_functions__functions_on_rdf_terms(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=3,
+ tc_desc='Test if functions on rdf terms are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_functions__functions_on_strings(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=4,
+ tc_desc='Test if functions on strings are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_functions__functions_on_numerics(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=5,
+ tc_desc='Test if functions on numerics are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_functions__hash_functions(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=6,
+ tc_desc='Test if hash functions are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_functions__functions_on_dates_and_time(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=7,
+ tc_desc='Test if functions on dates and time are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__aggregate_join(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=8,
+ tc_desc='Test if aggregate join including all aggregation functions '
+ 'are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__bgp(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=9,
+ tc_desc='Test if basic graph patterns are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__extend(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=10,
+ tc_desc='Test if "extend" (=Bind explicitly or implicitly in projection) '
+ 'gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__filter(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=11,
+ tc_desc='Test if filter gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__graph(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=12,
+ tc_desc='Test if "graph" gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__group(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=13,
+ tc_desc='Test if "group" gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__having(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=14,
+ tc_desc='Test if "having" gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__join(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=15,
+ tc_desc='Test if "join" gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__left_join(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=16,
+ tc_desc='Test if "left join" gets properly translated into "OPTIONAL {...}" in the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__minus(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=17,
+ tc_desc='Test if "minus" gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_graph_patterns__union(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=18,
+ tc_desc='Test if "union" gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_operators__arithmetics(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=19,
+ tc_desc='Test if arithmetics are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_operators__conditional_and(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=20,
+ tc_desc='Test if "conditional ands (&&)" are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_operators__conditional_or(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=21,
+ tc_desc='Test if "conditional ors (||)" are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_operators__relational(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=22,
+ tc_desc='Test if relational expressions are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_operators__unary(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=23,
+ tc_desc='Test if unary expressions are properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_other__service1(self):
+ tc_desc = 'Test if a nested service pattern is properly translated ' \
+ 'into the query text. ' \
+ 'The query must also be executable and shall not violate any SPARQL query syntax.'
+ try:
+ query_tree = parser.parseQuery(self.query_text)
+ except Exception as e:
+ print(e)
+ return Test(test_number=24, tc_desc=tc_desc, expected_result="0",
+ actual_result="Not executable. Error returned from parseQuery")
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=24,
+ tc_desc=tc_desc,
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_other__service2(self):
+ tc_desc = 'Test if "service" along with its service string is properly translated ' \
+ 'into the query text. ' \
+ 'The query must also be executable and shall not violate any SPARQL query syntax.'
+ try:
+ query_tree = parser.parseQuery(self.query_text)
+ except Exception as e:
+ print(e)
+ return Test(test_number=25, tc_desc=tc_desc, expected_result="0",
+ actual_result="Not executable. Error returned from parseQuery().")
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=25,
+ tc_desc=tc_desc,
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_other__values(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=26,
+ tc_desc='Test if "values" gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_property_path__alternative_path(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=27,
+ tc_desc='Test if an alternative path gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_property_path__inverse_path(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=28,
+ tc_desc='Test if an inverse path gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_property_path__negated_property_set(self):
+ tc_desc = 'Test if a negated property set gets properly translated into the query text. ' \
+ 'The query must also be executable and shall not violate any SPARQL query syntax.'
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ try:
+ self.query_from_algebra = translateAlgebra(query_algebra)
+ except TypeError as e:
+ print(e)
+ return Test(test_number=29, tc_desc=tc_desc, expected_result="0",
+ actual_result="Not executable. n3() method of NegatedPath class should be fixed. ")
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=29,
+ tc_desc=tc_desc,
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_property_path__one_or_more_path(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=30,
+ tc_desc='Test if a oneOrMore path gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_property_path__sequence_path(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=31,
+ tc_desc='Test if a sequence path gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_property_path__zero_or_more_path(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=32,
+ tc_desc='Test if a zeroOrMore path gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_property_path__zero_or_one_path(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=33,
+ tc_desc='Test if a zeroOrOne path gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_solution_modifiers__distinct(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=34,
+ tc_desc='Test if "distinct" gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_solution_modifiers__order_by(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=35,
+ tc_desc='Test if "order by" gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_solution_modifiers__reduced(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=36,
+ tc_desc='Test if "reduced" gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_solution_modifiers__slice(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=37,
+ tc_desc='Test if slice get properly translated into the limit and offset. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_solution_modifiers__to_multiset(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=38,
+ tc_desc='Test if subqueries get properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+ def test_integration__complex_query1(self):
+ query_tree = parser.parseQuery(self.query_text)
+ query_algebra = algebra.translateQuery(query_tree)
+ self.query_from_algebra = translateAlgebra(query_algebra)
+
+ query_tree_2 = parser.parseQuery(self.query_from_algebra)
+ query_algebra_2 = algebra.translateQuery(query_tree_2)
+ self.query_from_query_from_algebra = translateAlgebra(query_algebra_2)
+ _pprint_query(self.query_from_query_from_algebra)
+
+ test = Test(test_number=39,
+ tc_desc='Test a query with multiple graph patterns and solution modifiers '
+ 'gets properly translated into the query text. '
+ 'The query must also be executable and shall not violate any SPARQL query syntax.',
+ expected_result=self.query_from_algebra,
+ actual_result=self.query_from_query_from_algebra)
+
+ return test
+
+
+t = TestAlgebraToTest(annotated_tests=False)
+t.run_tests()
+t.print_test_results()
diff --git a/test/translate_algebra/test_base.py b/test/translate_algebra/test_base.py
new file mode 100644
index 00000000..f341b43a
--- /dev/null
+++ b/test/translate_algebra/test_base.py
@@ -0,0 +1,150 @@
+import pandas as pd
+import configparser
+import os
+import sys
+from tabulate import tabulate
+import logging
+
+
+def format_text(comment, max_line_length):
+ # accumulated line length
+ ACC_length = 0
+ words = comment.split(" ")
+ formatted_text = ""
+ for word in words:
+ # if ACC_length + len(word) and a space is <= max_line_length
+ if ACC_length + (len(word) + 1) <= max_line_length:
+ # append the word and a space
+ formatted_text = formatted_text + word + " "
+ # length = length + length of word + length of space
+ ACC_length = ACC_length + len(word) + 1
+ else:
+ # append a line break, then the word and a space
+ formatted_text = formatted_text + "\n" + word + " "
+ # reset counter of length to the length of a word and a space
+ ACC_length = len(word) + 1
+ return formatted_text
+
+
+class Test:
+
+ def __init__(self, tc_desc: str, expected_result: str = None, actual_result: str = None, test_number: int = None,
+ test_name: str = None):
+ self.test_number = test_number
+ self.test_name = test_name
+ self.tc_desc = tc_desc
+ self.actual_result = actual_result
+ self.expected_result = expected_result
+ self.yn_passed = False
+
+ def test(self):
+ """
+
+ :return:
+ """
+ assert self.actual_result
+
+ if self.expected_result == self.actual_result:
+ self.yn_passed = True
+
+
+class TestExecution:
+
+ def __init__(self, annotated_tests: bool = False):
+ """
+
+ :param annotated_tests: If this flag is set only tests with the prefix "x_test" will be executed.
+ """
+ test_module_path = os.path.dirname(sys.modules[__class__.__module__].__file__)
+ config_path = test_module_path + "/../config.ini"
+ self.test_config = configparser.ConfigParser()
+ self.test_config.read(config_path)
+ self.annotated_tests = annotated_tests
+ self.tests = []
+
+ def before_all_tests(self):
+ """
+
+ :return:
+ """
+
+ print("Executing before_tests ...")
+
+ def before_single_test(self, test_name: str):
+ """
+
+ :return:
+ """
+
+ print("Executing before_single_tests ...")
+
+ def after_single_test(self):
+ """
+
+ :return:
+ """
+
+ print("Executing after_single_test")
+
+ def after_all_tests(self):
+ """
+
+ :return:
+ """
+
+ print("Executing after_tests ...")
+
+ def run_tests(self):
+ """
+
+ :return:
+ """
+ print("Executing tests ...")
+ logging.getLogger().setLevel(int(self.test_config.get('TEST', 'log_level')))
+
+ self.before_all_tests()
+ test_prefix = 'test_'
+ if self.annotated_tests:
+ test_prefix = 'x_test_'
+ test_functions = [func for func in dir(self) if callable(getattr(self, func)) and func.startswith(test_prefix)]
+ try:
+ test_number = 1
+ for func in test_functions:
+ logging.info("Executing test: " + func)
+ self.before_single_test(func)
+ test_function = getattr(self, func)
+ test = test_function()
+ test_number += 1
+ test.test_name = func
+ test.test()
+ self.tests.append(test)
+ self.after_single_test()
+ except Exception as e:
+ print(e)
+ finally:
+ self.after_all_tests()
+
+ def print_test_results(self):
+ """
+
+ :return:
+ """
+
+ tests_df = pd.DataFrame(columns=['test_number', 'test_passed', 'test_name', 'test_case_description',
+ 'expected_result', 'actual_result'])
+ for test in self.tests:
+ if isinstance(test, Test):
+ formatted_tc_desc = format_text(test.tc_desc, 100)
+ formatted_expected_result = format_text(test.expected_result, 50)
+ formatted_actual_result = format_text(test.actual_result, 50)
+
+ tests_df = tests_df.append({'test_number': test.test_number,
+ 'test_passed': test.yn_passed,
+ 'test_name': test.test_name,
+ 'test_case_description': formatted_tc_desc,
+ 'expected_result': formatted_expected_result,
+ 'actual_result': formatted_actual_result}, ignore_index=True)
+
+ tests_df.sort_values('test_number', inplace=True)
+ pdtabulate = lambda df: tabulate(df, headers='keys', tablefmt='grid', )
+ print(pdtabulate(tests_df))
diff --git a/test/translate_algebra/test_data/test_functions__functional_forms.txt b/test/translate_algebra/test_data/test_functions__functional_forms.txt
new file mode 100644
index 00000000..d6ccd90b
--- /dev/null
+++ b/test/translate_algebra/test_data/test_functions__functional_forms.txt
@@ -0,0 +1,20 @@
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+PREFIX dc: <http://purl.org/dc/elements/1.1/>
+PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
+SELECT (IF(?givenName = "Obama", if(?givenName = "Obama", "yes", "no"), "no") as ?givenName2) (COALESCE(1/0 *3 *4 +5 + 6, ?x) as ?col)
+WHERE {
+ ?x foaf:givenName ?givenName .
+ OPTIONAL {
+ ?x dc:date ?date
+ } .
+ FILTER ( bound(?date) )
+ FILTER NOT EXISTS {
+ ?givenName foaf:name ?name.
+ filter(?givenName = "Clark")
+ }
+ FILTER EXISTS {
+ ?givenName foaf:name ?name.
+ filter((?givenName = "Obama" || ?givenName = "Obama2") && ?givenName = "Stern")
+ FILTER (sameTerm(?givenName, ?givenName) && !sameTerm(?givenName, ?givenName2))
+ }
+} \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_functions__functional_forms_not_exists.txt b/test/translate_algebra/test_data/test_functions__functional_forms_not_exists.txt
new file mode 100644
index 00000000..4d846eec
--- /dev/null
+++ b/test/translate_algebra/test_data/test_functions__functional_forms_not_exists.txt
@@ -0,0 +1,6 @@
+PREFIX citing: <https://github.com/GreenfishK/DataCitation/citing/>
+
+select ?s ?p ?o {
+ ?s ?p ?o .
+ filter not exists {?s citing:valid_from ?valid_from} .
+}
diff --git a/test/translate_algebra/test_data/test_functions__functions_on_dates_and_time.txt b/test/translate_algebra/test_data/test_functions__functions_on_dates_and_time.txt
new file mode 100644
index 00000000..4eb62f98
--- /dev/null
+++ b/test/translate_algebra/test_data/test_functions__functions_on_dates_and_time.txt
@@ -0,0 +1,5 @@
+select (now() as ?now) (year(now()) as ?year) (month(now()) as ?month) (day(now()) as ?day)
+(hours(now()) as ?hours) (minutes(now()) as ?minutes) (seconds(now()) as ?seconds)
+(timezone(now()) as ?timezone) (tz(now()) as ?tz) where {
+ ?s ?p ?o .
+} limit 1
diff --git a/test/translate_algebra/test_data/test_functions__functions_on_numerics.txt b/test/translate_algebra/test_data/test_functions__functions_on_numerics.txt
new file mode 100644
index 00000000..626a8e9d
--- /dev/null
+++ b/test/translate_algebra/test_data/test_functions__functions_on_numerics.txt
@@ -0,0 +1,3 @@
+select (abs(1.6) as ?abs) (round(5.5) as ?round) (ceil(5.5) as ?ceil) (floor(5.5) as ?floor) (rand() as ?rand) where {
+ ?s ?p ?o .
+} limit 1
diff --git a/test/translate_algebra/test_data/test_functions__functions_on_rdf_terms.txt b/test/translate_algebra/test_data/test_functions__functions_on_rdf_terms.txt
new file mode 100644
index 00000000..c92fd2f6
--- /dev/null
+++ b/test/translate_algebra/test_data/test_functions__functions_on_rdf_terms.txt
@@ -0,0 +1,16 @@
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
+SELECT ?name ?mbox (STRDT("iiii", <http://example/romanNumeral>) as ?strdt) (STRLANG("chat", "en") as ?strlang) (uuid() as ?uuid) (STRUUID() as ?struuid)
+WHERE {
+ ?x foaf:name ?name ;
+ foaf:mbox ?mbox .
+ FILTER isIRI(?mbox)
+ FILTER isBlank(?mbox)
+ FILTER isLiteral(?mbox)
+ FILTER isNumeric(?mbox)
+ FILTER STR(?mbox)
+ FILTER LANG(?mbox)
+ FILTER DATATYPE(?mbox)
+ FILTER IRI(?mobox)
+ FILTER BNODE("string")
+} \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_functions__functions_on_strings.txt b/test/translate_algebra/test_data/test_functions__functions_on_strings.txt
new file mode 100644
index 00000000..a5921c6e
--- /dev/null
+++ b/test/translate_algebra/test_data/test_functions__functions_on_strings.txt
@@ -0,0 +1,18 @@
+select
+(strlen("chat") as ?strlen)
+(substr("foobar", 4) as ?substr)
+(ucase("foo") as ?ucase)
+(lcase("FOO") as ?lcase)
+(strStarts("foobar", "foo") as ?strstarts)
+(strEnds("foobar", "bar") as ?strends)
+(contains("foobar", "bar") as ?contains)
+(strbefore("abc","b") as ?strbefore)
+(strafter("abc","b") as ?strafter)
+(encode_for_uri("Los Angeles") as ?encode_for_uri)
+(concat("foo"@en, "bar"@en) as ?concat)
+(replace("abcd", "b", "Z") as ?replace)
+(regex(substr("foobar", 4), "bar", "bar") as ?regex)
+where {
+ ?s ?p ?o .
+ FILTER langMatches(lang(?o), "EN" )
+} limit 1
diff --git a/test/translate_algebra/test_data/test_functions__hash_functions.txt b/test/translate_algebra/test_data/test_functions__hash_functions.txt
new file mode 100644
index 00000000..6d83468e
--- /dev/null
+++ b/test/translate_algebra/test_data/test_functions__hash_functions.txt
@@ -0,0 +1,9 @@
+select
+(md5("abc") as ?md5)
+(sha1("abc") as ?sha1)
+(SHA256("abc") as ?SHA256)
+(SHA384("abc") as ?SHA384)
+(SHA512("abc") as ?SHA512)
+where {
+ ?s ?p ?o .
+} limit 1
diff --git a/test/translate_algebra/test_data/test_graph_patterns__aggregate_join.txt b/test/translate_algebra/test_data/test_graph_patterns__aggregate_join.txt
new file mode 100644
index 00000000..965e5c97
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__aggregate_join.txt
@@ -0,0 +1,13 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select
+(sum(?s) as ?sum)
+(count(distinct ?s) as ?count)
+(min(?s) as ?min)
+(max(?s) as ?max)
+(avg(?s) as ?avg)
+(sample(?s) as ?sample)
+(GROUP_CONCAT(?s;SEPARATOR="|") AS ?group_concat)
+where {
+ ?s ?p ?o .
+ filter(?s = rdf:type)
+} limit 1
diff --git a/test/translate_algebra/test_data/test_graph_patterns__bgp.txt b/test/translate_algebra/test_data/test_graph_patterns__bgp.txt
new file mode 100644
index 00000000..61a248e5
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__bgp.txt
@@ -0,0 +1,11 @@
+PREFIX pub: <http://ontology.ontotext.com/taxonomy/>
+PREFIX publishing: <http://ontology.ontotext.com/publishing#>
+
+select ?personLabel ?party_label ?document ?mention where {
+ ?mention publishing:hasInstance ?person .
+ ?document publishing:containsMention ?mention .
+ ?person pub:memberOfPoliticalParty ?party .
+ ?person pub:preferredLabel ?personLabel .
+ ?party pub:hasValue ?value .
+ ?value pub:preferredLabel ?party_label .
+} order by ?mention \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_graph_patterns__extend.txt b/test/translate_algebra/test_data/test_graph_patterns__extend.txt
new file mode 100644
index 00000000..76cbabc9
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__extend.txt
@@ -0,0 +1,12 @@
+PREFIX pub: <http://ontology.ontotext.com/taxonomy/>
+PREFIX publishing: <http://ontology.ontotext.com/publishing#>
+
+select ?personLabel ?party_name ?document (?mention as ?men) where {
+ ?mention publishing:hasInstance ?person .
+ ?document publishing:containsMention ?mention .
+ ?person pub:memberOfPoliticalParty ?party .
+ ?person pub:preferredLabel ?personLabel .
+ ?party pub:hasValue ?value .
+ ?value pub:preferredLabel ?party_label .
+ Bind(?party_label as ?party_name)
+} order by ?mention \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_graph_patterns__filter.txt b/test/translate_algebra/test_data/test_graph_patterns__filter.txt
new file mode 100644
index 00000000..215bf729
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__filter.txt
@@ -0,0 +1,13 @@
+PREFIX pub: <http://ontology.ontotext.com/taxonomy/>
+PREFIX publishing: <http://ontology.ontotext.com/publishing#>
+
+select ?personLabel ?party_label ?document ?mention where {
+ ?mention publishing:hasInstance ?person .
+ ?document publishing:containsMention ?mention .
+ ?person pub:memberOfPoliticalParty ?party .
+ ?person pub:preferredLabel ?personLabel .
+ ?party pub:hasValue ?value .
+ ?value pub:preferredLabel ?party_label .
+
+ filter((?personLabel = "Barack Obama"@en || ?personLabel = "Judy Chu"@en) && ?personLabel = "Michelle Obama"@en )
+} order by ?mention \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_graph_patterns__graph.txt b/test/translate_algebra/test_data/test_graph_patterns__graph.txt
new file mode 100644
index 00000000..40d6ee4e
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__graph.txt
@@ -0,0 +1,23 @@
+PREFIX data: <http://example.org/foaf/>
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+
+SELECT ?mbox ?nick ?ppd
+FROM NAMED <http://example.org/foaf/aliceFoaf>
+FROM NAMED <http://example.org/foaf/bobFoaf>
+WHERE
+{
+ GRAPH data:aliceFoaf
+ {
+ ?alice foaf:mbox <mailto:alice@work.example> ;
+ foaf:knows ?whom .
+ ?whom foaf:mbox ?mbox ;
+ rdfs:seeAlso ?ppd .
+ ?ppd a foaf:PersonalProfileDocument .
+ } .
+ GRAPH ?ppd
+ {
+ ?w foaf:mbox ?mbox ;
+ foaf:nick ?nick
+ }
+} \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_graph_patterns__group.txt b/test/translate_algebra/test_data/test_graph_patterns__group.txt
new file mode 100644
index 00000000..27e60d97
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__group.txt
@@ -0,0 +1,6 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+
+SELECT (SUM(?val) AS ?sum) (COUNT(?a) AS ?count)
+WHERE {
+ ?a rdf:value ?val .
+} GROUP BY ?a \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_graph_patterns__having.txt b/test/translate_algebra/test_data/test_graph_patterns__having.txt
new file mode 100644
index 00000000..38f081c0
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__having.txt
@@ -0,0 +1,10 @@
+PREFIX : <http://books.example/>
+SELECT (SUM(?lprice) AS ?totalPrice)
+WHERE {
+ ?org :affiliates ?auth .
+ ?auth :writesBook ?book .
+ ?book :price ?lprice .
+ filter(?lprice < 5)
+}
+GROUP BY ?org
+HAVING (SUM(?lprice) > 10) \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_graph_patterns__join.txt b/test/translate_algebra/test_data/test_graph_patterns__join.txt
new file mode 100644
index 00000000..91248c88
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__join.txt
@@ -0,0 +1,11 @@
+PREFIX ex: <http://people.example/>
+SELECT ?select ?minName
+WHERE {
+ ex:alice ex:knows ?select .
+ {
+ SELECT (MIN(?name) AS ?minName) ?select
+ WHERE {
+ ?select ex:name ?name .
+ } GROUP BY ?select
+ }
+} \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_graph_patterns__left_join.txt b/test/translate_algebra/test_data/test_graph_patterns__left_join.txt
new file mode 100644
index 00000000..c2543b4c
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__left_join.txt
@@ -0,0 +1,14 @@
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+PREFIX dc: <http://purl.org/dc/elements/1.1/>
+PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
+SELECT ?givenName
+WHERE {
+ ?x foaf:givenName ?givenName .
+ OPTIONAL {
+ ?x dc:date ?date
+ } .
+ OPTIONAL {
+ ?x dc:datetime ?datetime
+ } .
+ FILTER ( bound(?date) )
+} \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_graph_patterns__minus.txt b/test/translate_algebra/test_data/test_graph_patterns__minus.txt
new file mode 100644
index 00000000..2ad733d9
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__minus.txt
@@ -0,0 +1,8 @@
+PREFIX : <http://example/>
+SELECT * WHERE {
+ ?x :p ?n
+ MINUS {
+ ?x :q ?m .
+ FILTER(?n = ?m)
+ }
+} \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_graph_patterns__union.txt b/test/translate_algebra/test_data/test_graph_patterns__union.txt
new file mode 100644
index 00000000..aeaaf897
--- /dev/null
+++ b/test/translate_algebra/test_data/test_graph_patterns__union.txt
@@ -0,0 +1,14 @@
+PREFIX dc10: <http://purl.org/dc/elements/1.0/>
+PREFIX dc11: <http://purl.org/dc/elements/1.1/>
+SELECT ?title ?author
+WHERE {
+ {
+ ?book dc10:title ?title .
+ ?book dc10:creator ?author
+ }
+ UNION
+ {
+ ?book dc11:title ?title .
+ ?book dc11:creator ?author
+ }
+} \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_integration__complex_query1.txt b/test/translate_algebra/test_data/test_integration__complex_query1.txt
new file mode 100644
index 00000000..38700755
--- /dev/null
+++ b/test/translate_algebra/test_data/test_integration__complex_query1.txt
@@ -0,0 +1,34 @@
+# Prefixes
+PREFIX pub: <http://ontology.ontotext.com/taxonomy/>
+PREFIX publishing: <http://ontology.ontotext.com/publishing#>
+PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
+
+select ?document ?mention ?personLabel ?party_label {
+ {
+ select * {
+ ?document publishing:containsMention ?mention .
+ ?person pub:memberOfPoliticalParty ?party .
+ ?person pub:preferredLabel ?personLabel .
+ ?party pub:hasValue ?value .
+ ?value pub:preferredLabel ?party_label .
+ filter(?personLabel = "Judy Chu"@en)
+
+ {
+ Select * where {
+ ?mention publishing:hasInstance ?person .
+
+ }
+ }
+ }
+ }
+ union
+ {
+ select * where {
+ ?mention publishing:hasInstance ?person .
+ ?document publishing:containsMention ?mention .
+ ?person pub:memberOfPoliticalParty / pub:hasValue / pub:preferredLabel ?party_label .
+ ?person pub:preferredLabel ?personLabel .
+ filter(?personLabel = "Barack Obama"@en)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_operators__arithmetics.txt b/test/translate_algebra/test_data/test_operators__arithmetics.txt
new file mode 100644
index 00000000..ac3be172
--- /dev/null
+++ b/test/translate_algebra/test_data/test_operators__arithmetics.txt
@@ -0,0 +1,3 @@
+select (2*4 -5 + 3 as ?test_arithmetics) where {
+ ?s ?p ?o .
+} limit 1
diff --git a/test/translate_algebra/test_data/test_operators__conditional_and.txt b/test/translate_algebra/test_data/test_operators__conditional_and.txt
new file mode 100644
index 00000000..1a050a19
--- /dev/null
+++ b/test/translate_algebra/test_data/test_operators__conditional_and.txt
@@ -0,0 +1,5 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select (2*4 -5 + 3 as ?test_arithmetics) where {
+ ?s ?p ?o .
+ filter(2=2 && 1=1)
+} limit 1
diff --git a/test/translate_algebra/test_data/test_operators__conditional_or.txt b/test/translate_algebra/test_data/test_operators__conditional_or.txt
new file mode 100644
index 00000000..517ff32b
--- /dev/null
+++ b/test/translate_algebra/test_data/test_operators__conditional_or.txt
@@ -0,0 +1,5 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select (2*4 -5 + 3 as ?test_arithmetics) where {
+ ?s ?p ?o .
+ filter(2=2 || 1=1)
+} limit 1
diff --git a/test/translate_algebra/test_data/test_operators__relational.txt b/test/translate_algebra/test_data/test_operators__relational.txt
new file mode 100644
index 00000000..15c21a47
--- /dev/null
+++ b/test/translate_algebra/test_data/test_operators__relational.txt
@@ -0,0 +1,5 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select (2*4 -5 + 3 as ?test_arithmetics) where {
+ ?s ?p ?o .
+ filter(3>2 || 1<2 || 2>=2 || 2<=2 || 1!=2)
+} limit 1
diff --git a/test/translate_algebra/test_data/test_operators__unary.txt b/test/translate_algebra/test_data/test_operators__unary.txt
new file mode 100644
index 00000000..770b20d6
--- /dev/null
+++ b/test/translate_algebra/test_data/test_operators__unary.txt
@@ -0,0 +1,5 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select (2*4 -5 + 3 as ?test_arithmetics) where {
+ ?s ?p ?o .
+ filter(?o || ?o)
+} limit 1
diff --git a/test/translate_algebra/test_data/test_other__service1.txt b/test/translate_algebra/test_data/test_other__service1.txt
new file mode 100644
index 00000000..feedc495
--- /dev/null
+++ b/test/translate_algebra/test_data/test_other__service1.txt
@@ -0,0 +1,16 @@
+# https://www.w3.org/TR/sparql11-federated-query/
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+SELECT ?person ?interest ?known
+WHERE
+{
+ SERVICE <http://people.example.org/sparql> {
+ ?person foaf:name ?name .
+ OPTIONAL {
+ ?person foaf:interest ?interest .
+ SERVICE <http://people2.example.org/sparql> {
+ ?person foaf:knows ?known .
+ }
+ }
+ }
+}
+# Error message: maximum recursion depth exceeded in comparison
diff --git a/test/translate_algebra/test_data/test_other__service2.txt b/test/translate_algebra/test_data/test_other__service2.txt
new file mode 100644
index 00000000..d3ba6559
--- /dev/null
+++ b/test/translate_algebra/test_data/test_other__service2.txt
@@ -0,0 +1,13 @@
+# https://www.w3.org/TR/sparql11-federated-query/
+
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+
+SELECT ?name
+FROM <http://example.org/myfoaf.rdf>
+WHERE
+{
+ <http://example.org/myfoaf/I> foaf:knows ?person .
+ SERVICE <http://people.example.org/sparql> {
+ ?person foaf:name ?name . }
+}
+# Error message: Unknown namespace prefix : foaf
diff --git a/test/translate_algebra/test_data/test_other__values.txt b/test/translate_algebra/test_data/test_other__values.txt
new file mode 100644
index 00000000..ba0bea64
--- /dev/null
+++ b/test/translate_algebra/test_data/test_other__values.txt
@@ -0,0 +1,14 @@
+PREFIX dc: <http://purl.org/dc/elements/1.1/>
+PREFIX : <http://example.org/book/>
+PREFIX ns: <http://example.org/ns#>
+
+SELECT ?book ?title ?price
+{
+ ?book dc:title ?title ;
+ ns:price ?price .
+ VALUES (?book ?title)
+ {
+ (UNDEF "SPARQL Tutorial")
+ (:book2 UNDEF)
+ }
+} \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_property_path__alternative_path.txt b/test/translate_algebra/test_data/test_property_path__alternative_path.txt
new file mode 100644
index 00000000..8015f1c2
--- /dev/null
+++ b/test/translate_algebra/test_data/test_property_path__alternative_path.txt
@@ -0,0 +1,7 @@
+PREFIX dc: <http://purl.org/dc/elements/1.1/>
+PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+PREFIX : <http://example.org/book/>
+
+select * where {
+ { :book1 dc:title|rdfs:label ?displayString }
+} limit 100
diff --git a/test/translate_algebra/test_data/test_property_path__inverse_path.txt b/test/translate_algebra/test_data/test_property_path__inverse_path.txt
new file mode 100644
index 00000000..8f7dde78
--- /dev/null
+++ b/test/translate_algebra/test_data/test_property_path__inverse_path.txt
@@ -0,0 +1,11 @@
+PREFIX dc: <http://purl.org/dc/elements/1.1/>
+PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+PREFIX : <http://example.org/book/>
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+
+select * where {
+ {
+ ?x foaf:mbox <mailto:alice@example> .
+ ?x ^foaf:knows/foaf:name ?name .
+ }
+} limit 100
diff --git a/test/translate_algebra/test_data/test_property_path__negated_property_set.txt b/test/translate_algebra/test_data/test_property_path__negated_property_set.txt
new file mode 100644
index 00000000..b5bf2b87
--- /dev/null
+++ b/test/translate_algebra/test_data/test_property_path__negated_property_set.txt
@@ -0,0 +1,7 @@
+# Throw a type error wenn n3() of the negatedPath class is called. Probably n3() method should be fixed
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select * where
+{
+ ?x !(rdf:type|^rdf:type) ?y
+}
+limit 100 \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_property_path__one_or_more_path.txt b/test/translate_algebra/test_data/test_property_path__one_or_more_path.txt
new file mode 100644
index 00000000..0a52e1d2
--- /dev/null
+++ b/test/translate_algebra/test_data/test_property_path__one_or_more_path.txt
@@ -0,0 +1,7 @@
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+select * where
+{
+ ?x foaf:mbox <mailto:alice@example> .
+ ?x foaf:knows+/foaf:name ?name .
+}
+limit 100 \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_property_path__predicate_path.txt b/test/translate_algebra/test_data/test_property_path__predicate_path.txt
new file mode 100644
index 00000000..8b2c8318
--- /dev/null
+++ b/test/translate_algebra/test_data/test_property_path__predicate_path.txt
@@ -0,0 +1 @@
+# Just an IRI. Nothing to test. Also no examples found here: https://www.w3.org/TR/sparql11-query/ \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_property_path__sequence_path.txt b/test/translate_algebra/test_data/test_property_path__sequence_path.txt
new file mode 100644
index 00000000..b0a87575
--- /dev/null
+++ b/test/translate_algebra/test_data/test_property_path__sequence_path.txt
@@ -0,0 +1,7 @@
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+select * where
+ {
+ ?x foaf:mbox <mailto:alice@example> .
+ ?x foaf:knows/foaf:knows/foaf:name ?name .
+ }
+limit 100 \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_property_path__zero_or_more_path.txt b/test/translate_algebra/test_data/test_property_path__zero_or_more_path.txt
new file mode 100644
index 00000000..4c53bcf7
--- /dev/null
+++ b/test/translate_algebra/test_data/test_property_path__zero_or_more_path.txt
@@ -0,0 +1,7 @@
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+select * where
+{
+ ?x foaf:mbox <mailto:alice@example> .
+ ?x foaf:knows*/foaf:name ?name .
+}
+limit 100 \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_property_path__zero_or_one_path.txt b/test/translate_algebra/test_data/test_property_path__zero_or_one_path.txt
new file mode 100644
index 00000000..e1adfabc
--- /dev/null
+++ b/test/translate_algebra/test_data/test_property_path__zero_or_one_path.txt
@@ -0,0 +1,7 @@
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+select * where
+{
+ ?x foaf:mbox <mailto:alice@example> .
+ ?x foaf:knows?/foaf:name ?name .
+}
+limit 100 \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_solution_modifiers__distinct.txt b/test/translate_algebra/test_data/test_solution_modifiers__distinct.txt
new file mode 100644
index 00000000..8aad6f56
--- /dev/null
+++ b/test/translate_algebra/test_data/test_solution_modifiers__distinct.txt
@@ -0,0 +1,6 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select distinct ?x (count(distinct ?y) as ?cnt) where
+{
+ ?x (rdf:type|^rdf:type) ?y
+}
+group by ?x \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_solution_modifiers__order_by.txt b/test/translate_algebra/test_data/test_solution_modifiers__order_by.txt
new file mode 100644
index 00000000..09e2aa8a
--- /dev/null
+++ b/test/translate_algebra/test_data/test_solution_modifiers__order_by.txt
@@ -0,0 +1,7 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select distinct ?x (count(distinct ?y) as ?cnt) where
+{
+ ?x (rdf:type|^rdf:type) ?y
+}
+group by ?x
+order by ?x \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_solution_modifiers__project.txt b/test/translate_algebra/test_data/test_solution_modifiers__project.txt
new file mode 100644
index 00000000..04ed2f2d
--- /dev/null
+++ b/test/translate_algebra/test_data/test_solution_modifiers__project.txt
@@ -0,0 +1,2 @@
+# Just the projection of variables within the select clause. It is comprised in every other test
+# Nothing to test explicitly \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_solution_modifiers__reduced.txt b/test/translate_algebra/test_data/test_solution_modifiers__reduced.txt
new file mode 100644
index 00000000..c6463015
--- /dev/null
+++ b/test/translate_algebra/test_data/test_solution_modifiers__reduced.txt
@@ -0,0 +1,7 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select reduced ?x (count(distinct ?y) as ?cnt) where
+{
+ ?x (rdf:type|^rdf:type) ?y
+}
+group by ?x
+order by ?x \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_solution_modifiers__slice.txt b/test/translate_algebra/test_data/test_solution_modifiers__slice.txt
new file mode 100644
index 00000000..a99481a4
--- /dev/null
+++ b/test/translate_algebra/test_data/test_solution_modifiers__slice.txt
@@ -0,0 +1,8 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select reduced ?x (count(distinct ?y) as ?cnt) where
+{
+ ?x (rdf:type|^rdf:type) ?y
+}
+group by ?x
+order by ?x
+limit 50 \ No newline at end of file
diff --git a/test/translate_algebra/test_data/test_solution_modifiers__to_multiset.txt b/test/translate_algebra/test_data/test_solution_modifiers__to_multiset.txt
new file mode 100644
index 00000000..09ed78aa
--- /dev/null
+++ b/test/translate_algebra/test_data/test_solution_modifiers__to_multiset.txt
@@ -0,0 +1,11 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+select reduced ?x (count(distinct ?y) as ?cnt) where
+{
+ ?x (rdf:type|^rdf:type) ?y
+ {
+ ?y ?a ?z
+ }
+}
+group by ?x
+order by ?x
+limit 50 \ No newline at end of file
diff --git a/tox.ini b/tox.ini
index b5b588fb..2fd97716 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
envlist =
- py35,py36,py37,py38
+ py36,py37,py38,py39
[testenv]
setenv =
@@ -8,15 +8,10 @@ setenv =
commands =
{envpython} setup.py clean --all
{envpython} setup.py build
- {envpython} run_tests.py --with-xunit
+ {envpython} run_tests.py
deps =
- nose
- isodate
- html5lib
- pyparsing
- bsddb3
- six
- SPARQLWrapper>=1.6.2
+ -rrequirements.txt
+ -rrequirements.dev.txt
[testenv:cover]
basepython =
@@ -25,12 +20,7 @@ commands =
{envpython} run_tests.py --where=./ \
--with-coverage --cover-html --cover-html-dir=./coverage \
--cover-package=rdflib --cover-inclusive
+
deps =
- coverage
- nose
- isodate
- html5lib
- pyparsing
- bsddb3
- six
- SPARQLWrapper>=1.6.2
+ -rrequirements.txt
+ -rrequirements.dev.txt