diff --git a/rdflib/plugins/stores/sparqlwrapper.py b/rdflib/plugins/stores/sparqlconnector.py similarity index 83% rename from rdflib/plugins/stores/sparqlwrapper.py rename to rdflib/plugins/stores/sparqlconnector.py index c3fbb946f..ee981419a 100644 --- a/rdflib/plugins/stores/sparqlwrapper.py +++ b/rdflib/plugins/stores/sparqlconnector.py @@ -11,16 +11,20 @@ log = logging.getLogger(__name__) -class SPARQLWrapperException(Exception): +class SPARQLConnectorException(Exception): pass - +# TODO: Pull in these from the result implementation plugins? _response_mime_types = { 'xml': 'application/sparql-results+xml, application/rdf+xml', + 'json': 'application/sparql-results+json', + 'csv': 'text/csv', + 'tsv': 'text/tab-separated-values', + 'application/rdf+xml': 'application/rdf+xml', } -class SPARQLWrapper(object): +class SPARQLConnector(object): """ this class deals with nitty gritty details of talking to a SPARQL server @@ -56,14 +60,14 @@ def method(self): @method.setter def method(self, method): if method not in ('GET', 'POST'): - raise SPARQLWrapperException('Method must be "GET" or "POST"') + raise SPARQLConnectorException('Method must be "GET" or "POST"') self._method = method def query(self, query, default_graph=None): if not self.query_endpoint: - raise SPARQLWrapperException("Query endpoint not set!") + raise SPARQLConnectorException("Query endpoint not set!") params = {'query': query} if default_graph: @@ -85,7 +89,7 @@ def query(self, query, default_graph=None): elif self.method == 'POST': args['data'] = params else: - raise SPARQLWrapperException("Unknown method %s" % self.method) + raise SPARQLConnectorException("Unknown method %s" % self.method) res = self.session.request(self.method, **args) @@ -95,7 +99,7 @@ def query(self, query, default_graph=None): def update(self, update, default_graph=None): if not self.update_endpoint: - raise SPARQLWrapperException("Query endpoint not set!") + raise SPARQLConnectorException("Query endpoint not set!") params = {} diff --git a/rdflib/plugins/stores/sparqlstore.py b/rdflib/plugins/stores/sparqlstore.py index 42e8d2a20..5f7446ced 100644 --- a/rdflib/plugins/stores/sparqlstore.py +++ b/rdflib/plugins/stores/sparqlstore.py @@ -14,7 +14,7 @@ import re import collections -from .sparqlwrapper import SPARQLWrapper +from .sparqlconnector import SPARQLConnector from rdflib.plugins.stores.regexmatching import NATIVE_REGEX @@ -37,11 +37,10 @@ def _node_to_sparql(node): return node.n3() -class SPARQLStore(SPARQLWrapper, Store): - """ - An RDFLib store around a SPARQL endpoint +class SPARQLStore(SPARQLConnector, Store): + """An RDFLib store around a SPARQL endpoint - This is in theory context-aware and should work as expected + This is context-aware and should work as expected when a context is specified. For ConjunctiveGraphs, reading is done from the "default graph". Exactly @@ -51,7 +50,7 @@ class SPARQLStore(SPARQLWrapper, Store): motivated by the SPARQL 1.1. Fuseki/TDB has a flag for specifying that the default graph - is the union of all graphs (tdb:unionDefaultGraph in the Fuseki config). + is the union of all graphs (``tdb:unionDefaultGraph`` in the Fuseki config). .. warning:: By default the SPARQL Store does not support blank-nodes! @@ -61,9 +60,9 @@ class SPARQLStore(SPARQLWrapper, Store): See http://www.w3.org/TR/sparql11-query/#BGPsparqlBNodes - You can make use of such extensions through the node_to_sparql and - node_from_result arguments. For example if you want to transform - BNode('0001') into "", you can use a function like this: + You can make use of such extensions through the ``node_to_sparql`` + argument. For example if you want to transform BNode('0001') into + "", you can use a function like this: >>> def my_bnode_ext(node): ... if isinstance(node, BNode): @@ -72,6 +71,22 @@ class SPARQLStore(SPARQLWrapper, Store): >>> store = SPARQLStore('http://dbpedia.org/sparql', ... node_to_sparql=my_bnode_ext) + You can request a particular result serialization with the + ``returnFormat`` parameter. This is a string that must have a + matching plugin registered. Built in is support for ``xml``, + ``json``, ``csv``, ``tsv`` and ``application/rdf+xml``. + + The underlying SPARQLConnector builds in the requests library. + Any extra kwargs passed to the SPARQLStore connector are passed to + requests when doing HTTP calls. I.e. you have full control of + cookies/auth/headers. + + Form example: + + >>> store = SPARQLStore('...my endpoint ...', auth=('user','pass')) + + will use HTTP basic auth. + """ formula_aware = False transaction_aware = False @@ -83,11 +98,11 @@ def __init__(self, sparql11=True, context_aware=True, node_to_sparql=_node_to_sparql, returnFormat='xml', - **sparqlwrapper_kwargs): + **sparqlconnector_kwargs): """ """ super(SPARQLStore, self).__init__( - endpoint, returnFormat=returnFormat, **sparqlwrapper_kwargs) + endpoint, returnFormat=returnFormat, **sparqlconnector_kwargs) self.node_to_sparql = node_to_sparql self.nsBindings = {} @@ -353,7 +368,7 @@ def _is_contextual(self, graph): return graph.identifier != DATASET_DEFAULT_GRAPH_ID def close(self, commit_pending_transaction=None): - SPARQLWrapper.close(self) + SPARQLConnector.close(self) class SPARQLUpdateStore(SPARQLStore): @@ -588,7 +603,7 @@ def _update(self, update): self._updates += 1 - SPARQLWrapper.update(self, update) + SPARQLConnector.update(self, update) def update(self, query, initNs={}, diff --git a/requirements.txt b/requirements.txt index a481da766..215e257a8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,5 +2,6 @@ flake8 html5lib isodate pyparsing +requests six doctest-ignore-unicode