blob: 6e60bfe5d147c25953cb18adbf3c4d296120da1f (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
|
# Test of various things you can do with an RDF schema for definitions.
# Using SurfRDF: https://github.com/cosminbasca/surfrdf
import rdflib
import surf
import yaml
import os
import warnings
import parse
import validate
DATABASE = 'memory'
#DATABASE = 'virtuoso'
surf.ns.register(baserock='http://baserock.org/definitions/example-schema#')
if DATABASE == 'memory':
# For testing
store = surf.Store(reader='rdflib', writer='rdflib', rdflib_store='IOMemory')
elif DATABASE == 'virtuoso':
# For importing into Virtuoso database
# See: https://pythonhosted.org/SuRF/integration/virtuoso.html
# Note that you need to work around a bug in Virtuoso in order to
# use this. See https://github.com/RDFLib/rdflib/issues/298 for more
# info. Virtuoso expects the url parameter in the SPARQL update
# request to be called 'query' rather than 'update'. You can change
# this on line 488 of SPARQLWrapper/Wrapper.py of
# <https://github.com/RDFLib/sparqlwrapper/> as a nasty workaround.
store = surf.Store(reader='sparql_protocol',
writer='sparql_protocol',
endpoint='http://localhost:8890/sparql',
default_context='http://example.com')
session = surf.Session(store)
parse.load_all_morphologies(session, store)
schema = rdflib.Graph()
schema.parse("baserock-owl-schema.turtle", format="turtle")
# Only works for 'memory' database, but I don't really care any more.
all_data = store.reader.graph
validate.check_data_against_schema(
data=all_data,
schema=schema)
def serialize_to_json_ld(surflib_resource):
rdflib_graph = surflib_resource.graph()
context = {
"@vocab": "http://baserock.org/definitions/example-schema#",
"@language": "en"
}
# requires rdflib-jsonld Python module.
return rdflib_graph.serialize(format='json-ld', indent=4, context=context)
Cluster = session.get_class(surf.ns.BASEROCK.Cluster)
cluster = Cluster.all()
for s in cluster:
s.load()
print serialize_to_json_ld(s)
|