MARMOTTA-259: Initial Implementation of Sparql ServiceDescription
Project: http://git-wip-us.apache.org/repos/asf/marmotta/repo Commit: http://git-wip-us.apache.org/repos/asf/marmotta/commit/d2c2193d Tree: http://git-wip-us.apache.org/repos/asf/marmotta/tree/d2c2193d Diff: http://git-wip-us.apache.org/repos/asf/marmotta/diff/d2c2193d Branch: refs/heads/develop Commit: d2c2193dce6034cf9628062c8827970fdad1ed2f Parents: 74bf1bf Author: Jakob Frank <[email protected]> Authored: Tue Dec 10 15:48:01 2013 +0100 Committer: Jakob Frank <[email protected]> Committed: Tue Dec 10 15:48:01 2013 +0100 ---------------------------------------------------------------------- .../marmotta/commons/vocabulary/SPARQL_SD.java | 345 +++++++++++++++++++ .../sparql/api/sparql/SparqlService.java | 16 +- .../services/sparql/SparqlServiceImpl.java | 187 ++++++++-- .../sparql/webservices/SparqlWebService.java | 64 +++- 4 files changed, 577 insertions(+), 35 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/marmotta/blob/d2c2193d/commons/marmotta-commons/src/main/java/org/apache/marmotta/commons/vocabulary/SPARQL_SD.java ---------------------------------------------------------------------- diff --git a/commons/marmotta-commons/src/main/java/org/apache/marmotta/commons/vocabulary/SPARQL_SD.java b/commons/marmotta-commons/src/main/java/org/apache/marmotta/commons/vocabulary/SPARQL_SD.java new file mode 100644 index 0000000..a83efe4 --- /dev/null +++ b/commons/marmotta-commons/src/main/java/org/apache/marmotta/commons/vocabulary/SPARQL_SD.java @@ -0,0 +1,345 @@ +package org.apache.marmotta.commons.vocabulary; + +import org.openrdf.model.URI; +import org.openrdf.model.ValueFactory; +import org.openrdf.model.impl.ValueFactoryImpl; + +/** + * Namespace SPARQL_SD - SparqlServiceDescription. + * @see http://www.w3.org/TR/sparql11-service-description/ + */ +public class SPARQL_SD { + + public static final String NAMESPACE = "http://www.w3.org/ns/sparql-service-description#"; + + public static final String PREFIX = "sd"; + + /** + * An instance of sd:Aggregate represents an aggregate that may be used in a + * SPARQL aggregate query (for instance in a HAVING clause or SELECT + * expression) besides the standard list of supported aggregates COUNT, SUM, + * MIN, MAX, AVG, GROUP_CONCAT, and SAMPLE. + */ + public static final URI Aggregate; + + /** + * sd:BasicFederatedQuery, when used as the object of the sd:feature + * property, indicates that the SPARQL service supports basic federated + * query using the SERVICE keyword as defined by SPARQL 1.1 Federation + * Extensions. + */ + public static final URI BasicFederatedQuery; + + /** + * An instance of sd:Dataset represents a RDF Dataset comprised of a default + * graph and zero or more named graphs. + */ + public static final URI Dataset; + + /** + * sd:DereferencesURIs, when used as the object of the sd:feature property, + * indicates that a SPARQL service will dereference URIs used in FROM/FROM + * NAMED and USING/USING NAMED clauses and use the resulting RDF in the + * dataset during query evaluation. + */ + public static final URI DereferencesURIs; + + /** + * sd:EmptyGraphs, when used as the object of the sd:feature property, + * indicates that the underlying graph store supports empty graphs. A graph + * store that supports empty graphs MUST NOT remove graphs that are left + * empty after triples are removed from them. + */ + public static final URI EmptyGraphs; + + /** + * An instance of sd:EntailmentProfile represents a profile of an entailment + * regime. An entailment profile MAY impose restrictions on what constitutes + * valid RDF with respect to entailment. + */ + public static final URI EntailmentProfile; + + /** + * An instance of sd:EntailmentRegime represents an entailment regime used + * in basic graph pattern matching (as described by SPARQL 1.1 Query + * Language). + */ + public static final URI EntailmentRegime; + + /** + * An instance of sd:Feature represents a feature of a SPARQL service. + * Specific types of features include functions, aggregates, languages, and + * entailment regimes and profiles. This document defines five instances of + * sd:Feature: sd:DereferencesURIs, sd:UnionDefaultGraph, + * sd:RequiresDataset, sd:EmptyGraphs, and sd:BasicFederatedQuery. + */ + public static final URI Feature; + + /** + * An instance of sd:Function represents a function that may be used in a + * SPARQL SELECT expression or a FILTER, HAVING, GROUP BY, ORDER BY, or BIND + * clause. + */ + public static final URI Function; + + /** + * An instance of sd:Graph represents the description of an RDF graph. + */ + public static final URI Graph; + + /** + * An instance of sd:GraphCollection represents a collection of zero or more + * named graph descriptions. Each named graph description belonging to an + * sd:GraphCollection MUST be linked with the sd:namedGraph predicate. + */ + public static final URI GraphCollection; + + /** + * An instance of sd:Language represents one of the SPARQL languages, + * including specific configurations providing particular features or + * extensions. This document defines three instances of sd:Language: + * sd:SPARQL10Query, sd:SPARQL11Query, and sd:SPARQL11Update. + */ + public static final URI Language; + + /** + * An instance of sd:NamedGraph represents a named graph having a name (via + * sd:name) and an optional graph description (via sd:graph). + */ + public static final URI NamedGraph; + + /** + * sd:RequiresDataset, when used as the object of the sd:feature property, + * indicates that the SPARQL service requires an explicit dataset + * declaration (based on either FROM/FROM NAMED clauses in a query, + * USING/USING NAMED clauses in an update, or the appropriate SPARQL + * Protocol parameters). + */ + public static final URI RequiresDataset; + + /** + * sd:SPARQL10Query is an sd:Language representing the SPARQL 1.0 Query + * language. + */ + public static final URI SPARQL10Query; + + /** + * sd:SPARQL11Query is an sd:Language representing the SPARQL 1.1 Query + * language. + */ + public static final URI SPARQL11Query; + + /** + * sd:SPARQLUpdate is an sd:Language representing the SPARQL 1.1 Update + * language. + */ + public static final URI SPARQL11Update; + + /** + * An instance of sd:Service represents a SPARQL service made available via + * the SPARQL Protocol. + */ + public static final URI Service; + + /** + * sd:UnionDefaultGraph, when used as the object of the sd:feature property, + * indicates that the default graph of the dataset used during query and + * update evaluation (when an explicit dataset is not specified) is + * comprised of the union of all the named graphs in that dataset. + */ + public static final URI UnionDefaultGraph; + + /** + * Relates an instance of sd:Service to a description of the graphs which + * are allowed in the construction of a dataset either via the SPARQL + * Protocol, with FROM/FROM NAMED clauses in a query, or with USING/USING + * NAMED in an update request, if the service limits the scope of dataset + * construction. + */ + public static final URI availableGraphs; + + /** + * Relates an instance of sd:Service to a description of the default dataset + * available when no explicit dataset is specified in the query, update + * request or via protocol parameters. + */ + public static final URI defaultDataset; + + /** + * Relates an instance of sd:Service with a resource representing an + * entailment regime used for basic graph pattern matching. This property is + * intended for use when a single entailment regime by default applies to + * all graphs in the default dataset of the service. In situations where a + * different entailment regime applies to a specific graph in the dataset, + * the sd:entailmentRegime property should be used to indicate this fact in + * the description of that graph. + */ + public static final URI defaultEntailmentRegime; + + /** + * Relates an instance of sd:Dataset to the description of its default + * graph. + */ + public static final URI defaultGraph; + + /** + * Relates an instance of sd:Service with a resource representing a + * supported profile of the default entailment regime (as declared by + * sd:defaultEntailmentRegime). + */ + public static final URI defaultSupportedEntailmentProfile; + + /** + * The SPARQL endpoint of an sd:Service that implements the SPARQL Protocol + * service. The object of the sd:endpoint property is an IRI. + */ + public static final URI endpoint; + + /** + * Relates a named graph description with a resource representing an + * entailment regime used for basic graph pattern matching over that graph. + */ + public static final URI entailmentRegime; + + /** + * Relates an instance of sd:Service to an aggregate that may be used in a + * SPARQL aggregate query (for instance in a HAVING clause or SELECT + * expression) besides the standard list of supported aggregates COUNT, SUM, + * MIN, MAX, AVG, GROUP_CONCAT, and SAMPLE + */ + public static final URI extensionAggregate; + + /** + * Relates an instance of sd:Service to a function that may be used in a + * SPARQL SELECT expression or a FILTER, HAVING, GROUP BY, ORDER BY, or BIND + * clause. + */ + public static final URI extensionFunction; + + /** + * Relates an instance of sd:Service with a resource representing a + * supported feature. + */ + public static final URI feature; + + /** + * Relates a named graph to its graph description. + */ + public static final URI graph; + + /** + * Relates an instance of sd:Service to a format that is supported for + * parsing RDF input; for example, via a SPARQL 1.1 Update LOAD statement, + * or when URIs are dereferenced in FROM/FROM NAMED/USING/USING NAMED + * clauses. + */ + public static final URI inputFormat; + + /** + * Relates an instance of sd:Service to a resource representing an + * implemented extension to the SPARQL Query or Update language. + */ + public static final URI languageExtension; + + /** + * Relates a named graph to the name by which it may be referenced in a + * FROM/FROM NAMED clause. The object of the sd:name property is an IRI. + */ + public static final URI name; + + /** + * Relates an instance of sd:GraphCollection (or its subclass sd:Dataset) to + * the description of one of its named graphs. The description of such a + * named graph MUST include the sd:name property and MAY include the + * sd:graph property. + */ + public static final URI namedGraph; + + /** + * Relates an instance of sd:Service to a resource representing an + * implemented feature that extends the SPARQL Query or Update language and + * that is accessed by using the named property. + */ + public static final URI propertyFeature; + + /** + * Relates an instance of sd:Service to a format that is supported for + * serializing query results. + */ + public static final URI resultFormat; + + /** + * Relates a named graph description with a resource representing a + * supported profile of the entailment regime (as declared by + * sd:entailmentRegime) used for basic graph pattern matching over that + * graph. + */ + public static final URI supportedEntailmentProfile; + + /** + * Relates an instance of sd:Service to a SPARQL language (e.g. Query and + * Update) that it implements. + */ + public static final URI supportedLanguage; + + static { + ValueFactory factory = ValueFactoryImpl.getInstance(); + Aggregate = factory.createURI(SPARQL_SD.NAMESPACE, "Aggregate"); + BasicFederatedQuery = factory.createURI(SPARQL_SD.NAMESPACE, + "BasicFederatedQuery"); + Dataset = factory.createURI(SPARQL_SD.NAMESPACE, "Dataset"); + DereferencesURIs = factory.createURI(SPARQL_SD.NAMESPACE, + "DereferencesURIs"); + EmptyGraphs = factory.createURI(SPARQL_SD.NAMESPACE, "EmptyGraphs"); + EntailmentProfile = factory.createURI(SPARQL_SD.NAMESPACE, + "EntailmentProfile"); + EntailmentRegime = factory.createURI(SPARQL_SD.NAMESPACE, + "EntailmentRegime"); + Feature = factory.createURI(SPARQL_SD.NAMESPACE, "Feature"); + Function = factory.createURI(SPARQL_SD.NAMESPACE, "Function"); + Graph = factory.createURI(SPARQL_SD.NAMESPACE, "Graph"); + GraphCollection = factory.createURI(SPARQL_SD.NAMESPACE, + "GraphCollection"); + Language = factory.createURI(SPARQL_SD.NAMESPACE, "Language"); + NamedGraph = factory.createURI(SPARQL_SD.NAMESPACE, "NamedGraph"); + RequiresDataset = factory.createURI(SPARQL_SD.NAMESPACE, + "RequiresDataset"); + SPARQL10Query = factory.createURI(SPARQL_SD.NAMESPACE, "SPARQL10Query"); + SPARQL11Query = factory.createURI(SPARQL_SD.NAMESPACE, "SPARQL11Query"); + SPARQL11Update = factory.createURI(SPARQL_SD.NAMESPACE, + "SPARQL11Update"); + Service = factory.createURI(SPARQL_SD.NAMESPACE, "Service"); + UnionDefaultGraph = factory.createURI(SPARQL_SD.NAMESPACE, + "UnionDefaultGraph"); + availableGraphs = factory.createURI(SPARQL_SD.NAMESPACE, + "availableGraphs"); + defaultDataset = factory.createURI(SPARQL_SD.NAMESPACE, + "defaultDataset"); + defaultEntailmentRegime = factory.createURI(SPARQL_SD.NAMESPACE, + "defaultEntailmentRegime"); + defaultGraph = factory.createURI(SPARQL_SD.NAMESPACE, "defaultGraph"); + defaultSupportedEntailmentProfile = factory.createURI( + SPARQL_SD.NAMESPACE, "defaultSupportedEntailmentProfile"); + endpoint = factory.createURI(SPARQL_SD.NAMESPACE, "endpoint"); + entailmentRegime = factory.createURI(SPARQL_SD.NAMESPACE, + "entailmentRegime"); + extensionAggregate = factory.createURI(SPARQL_SD.NAMESPACE, + "extensionAggregate"); + extensionFunction = factory.createURI(SPARQL_SD.NAMESPACE, + "extensionFunction"); + feature = factory.createURI(SPARQL_SD.NAMESPACE, "feature"); + graph = factory.createURI(SPARQL_SD.NAMESPACE, "graph"); + inputFormat = factory.createURI(SPARQL_SD.NAMESPACE, "inputFormat"); + languageExtension = factory.createURI(SPARQL_SD.NAMESPACE, + "languageExtension"); + name = factory.createURI(SPARQL_SD.NAMESPACE, "name"); + namedGraph = factory.createURI(SPARQL_SD.NAMESPACE, "namedGraph"); + propertyFeature = factory.createURI(SPARQL_SD.NAMESPACE, + "propertyFeature"); + resultFormat = factory.createURI(SPARQL_SD.NAMESPACE, "resultFormat"); + supportedEntailmentProfile = factory.createURI(SPARQL_SD.NAMESPACE, + "supportedEntailmentProfile"); + supportedLanguage = factory.createURI(SPARQL_SD.NAMESPACE, + "supportedLanguage"); + } +} http://git-wip-us.apache.org/repos/asf/marmotta/blob/d2c2193d/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/api/sparql/SparqlService.java ---------------------------------------------------------------------- diff --git a/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/api/sparql/SparqlService.java b/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/api/sparql/SparqlService.java index 67984eb..d6bda04 100644 --- a/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/api/sparql/SparqlService.java +++ b/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/api/sparql/SparqlService.java @@ -35,7 +35,8 @@ import org.openrdf.query.resultio.BooleanQueryResultWriter; import org.openrdf.query.resultio.QueryResultWriter; import org.openrdf.query.resultio.TupleQueryResultWriter; import org.openrdf.repository.RepositoryException; -import org.openrdf.rio.RDFHandler; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFWriter; /** * Add file description here! @@ -140,5 +141,18 @@ public interface SparqlService { */ @Deprecated void query(QueryLanguage queryLanguage, String query, QueryResultWriter writer, int timeoutInSeconds) throws MarmottaException, MalformedQueryException, QueryEvaluationException, TimeoutException; + + /** + * Write a SparqlServiceDescription of the Service to the provided {@link RDFWriter}. + * + * @param writer the {@link RDFWriter} to write the description to + * @param string the URI of the SPARQL Endpoint + * @param isUpdate if the endpoint is a update or select/query endpoint + * + * @see http://www.w3.org/TR/sparql11-service-description/ + */ + void createServiceDescription(RDFWriter writer, String string, boolean isUpdate) throws RDFHandlerException; + + } http://git-wip-us.apache.org/repos/asf/marmotta/blob/d2c2193d/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/services/sparql/SparqlServiceImpl.java ---------------------------------------------------------------------- diff --git a/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/services/sparql/SparqlServiceImpl.java b/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/services/sparql/SparqlServiceImpl.java index bdefc8b..c3dd7b1 100644 --- a/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/services/sparql/SparqlServiceImpl.java +++ b/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/services/sparql/SparqlServiceImpl.java @@ -17,12 +17,16 @@ */ package org.apache.marmotta.platform.sparql.services.sparql; +import info.aduna.lang.FileFormat; + import java.io.IOException; import java.io.OutputStream; import java.util.HashMap; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -36,6 +40,8 @@ import javax.annotation.PostConstruct; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; +import org.apache.commons.lang3.StringUtils; +import org.apache.marmotta.commons.vocabulary.SPARQL_SD; import org.apache.marmotta.platform.core.api.config.ConfigurationService; import org.apache.marmotta.platform.core.api.templating.TemplatingService; import org.apache.marmotta.platform.core.api.triplestore.SesameService; @@ -44,10 +50,15 @@ import org.apache.marmotta.platform.core.exception.MarmottaException; import org.apache.marmotta.platform.sparql.api.sparql.QueryType; import org.apache.marmotta.platform.sparql.api.sparql.SparqlService; import org.apache.marmotta.platform.sparql.services.sparqlio.rdf.SPARQLGraphResultWriter; -import org.apache.marmotta.platform.sparql.services.sparqlio.sparqlhtml.SPARQLResultsHTMLFormat; import org.apache.marmotta.platform.sparql.services.sparqlio.sparqlhtml.SPARQLHTMLSettings; import org.apache.marmotta.platform.sparql.webservices.SparqlWebService; +import org.openrdf.model.BNode; +import org.openrdf.model.Resource; +import org.openrdf.model.URI; import org.openrdf.model.Value; +import org.openrdf.model.ValueFactory; +import org.openrdf.model.impl.ValueFactoryImpl; +import org.openrdf.model.vocabulary.RDF; import org.openrdf.query.Binding; import org.openrdf.query.BindingSet; import org.openrdf.query.BooleanQuery; @@ -70,17 +81,24 @@ import org.openrdf.query.parser.QueryParser; import org.openrdf.query.parser.QueryParserUtil; import org.openrdf.query.resultio.BooleanQueryResultFormat; import org.openrdf.query.resultio.BooleanQueryResultWriter; +import org.openrdf.query.resultio.QueryResultFormat; import org.openrdf.query.resultio.QueryResultIO; import org.openrdf.query.resultio.QueryResultWriter; import org.openrdf.query.resultio.TupleQueryResultFormat; import org.openrdf.query.resultio.TupleQueryResultWriter; +import org.openrdf.repository.Repository; import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryException; +import org.openrdf.repository.RepositoryResult; +import org.openrdf.repository.sail.SailRepository; +import org.openrdf.repository.sail.SailRepositoryConnection; import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandler; import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFWriter; +import org.openrdf.rio.RDFWriterRegistry; import org.openrdf.rio.Rio; import org.openrdf.rio.UnsupportedRDFormatException; +import org.openrdf.sail.memory.MemoryStore; import org.slf4j.Logger; /** @@ -93,6 +111,28 @@ import org.slf4j.Logger; public class SparqlServiceImpl implements SparqlService { /** + * @deprecated beginning with Sesame 2.8, use {@link RDFFormat#getStandardURI()} or {@link QueryResultFormat#etStandardURI()} + */ + @Deprecated + private static final Map<FileFormat, String> w3cFormatID = new HashMap<FileFormat, String>() {{ + put(RDFFormat.JSONLD, "http://www.w3.org/ns/formats/JSON-LD"); + put(RDFFormat.N3, "http://www.w3.org/ns/formats/N3"); + put(RDFFormat.NTRIPLES, "http://www.w3.org/ns/formats/N-Triples"); + put(RDFFormat.NQUADS, "http://www.w3.org/ns/formats/N-Quads"); + put(RDFFormat.RDFA, "http://www.w3.org/ns/formats/RDFa"); + put(RDFFormat.RDFJSON, "http://www.w3.org/ns/formats/RDF_JSON"); + put(RDFFormat.RDFXML, "http://www.w3.org/ns/formats/RDF_XML"); + put(RDFFormat.TURTLE, "http://www.w3.org/ns/formats/Turtle"); + put(RDFFormat.TRIG, "http://www.w3.org/ns/formats/TriG"); + + put(TupleQueryResultFormat.CSV, "http://www.w3.org/ns/formats/SPARQL_Results_CSV"); + put(TupleQueryResultFormat.JSON, "http://www.w3.org/ns/formats/SPARQL_Results_JSON"); + put(TupleQueryResultFormat.TSV, "http://www.w3.org/ns/formats/SPARQL_Results_TSV"); + put(TupleQueryResultFormat.SPARQL, "http://www.w3.org/ns/formats/SPARQL_Results_XML"); + }}; + + + /** * Get the seam logger for issuing logging statements. */ @Inject @@ -134,17 +174,17 @@ public class SparqlServiceImpl implements SparqlService { } return sparqlQuery; } - + @Override public QueryType getQueryType(QueryLanguage language, String query) throws MalformedQueryException { - QueryParser parser = QueryParserUtil.createParser(language); - ParsedQuery parsedQuery = parser.parseQuery(query, configurationService.getServerUri() + SparqlWebService.PATH + "/" + SparqlWebService.SELECT); + QueryParser parser = QueryParserUtil.createParser(language); + ParsedQuery parsedQuery = parser.parseQuery(query, configurationService.getServerUri() + SparqlWebService.PATH + "/" + SparqlWebService.SELECT); if (parsedQuery instanceof ParsedTupleQuery) { return QueryType.TUPLE; } else if (parsedQuery instanceof ParsedBooleanQuery) { - return QueryType.BOOL; + return QueryType.BOOL; } else if (parsedQuery instanceof ParsedGraphQuery) { - return QueryType.GRAPH; + return QueryType.GRAPH; } else { return null; } @@ -215,7 +255,7 @@ public class SparqlServiceImpl implements SparqlService { } } } - + @Override @Deprecated public void query(final QueryLanguage queryLanguage, final String query, final QueryResultWriter writer, final int timeoutInSeconds) throws MarmottaException, MalformedQueryException, QueryEvaluationException, TimeoutException { @@ -226,29 +266,29 @@ public class SparqlServiceImpl implements SparqlService { public Boolean call() throws Exception { long start = System.currentTimeMillis(); try { - RepositoryConnection connection = sesameService.getConnection(); - try { - connection.begin(); - Query sparqlQuery = connection.prepareQuery(queryLanguage, query); - - if (sparqlQuery instanceof TupleQuery) { - query((TupleQuery) sparqlQuery, (TupleQueryResultWriter)writer); - } else if (sparqlQuery instanceof BooleanQuery) { - query((BooleanQuery) sparqlQuery, (BooleanQueryResultWriter)writer); - } else if (sparqlQuery instanceof GraphQuery) { - query((GraphQuery) sparqlQuery, ((SPARQLGraphResultWriter)writer).getOutputStream(), ((SPARQLGraphResultWriter)writer).getFormat()); - } else { - connection.rollback(); - throw new InvalidArgumentException("SPARQL query type " + sparqlQuery.getClass() + " not supported!"); - } - - connection.commit(); - } catch (Exception ex) { - connection.rollback(); - throw ex; - } finally { - connection.close(); - } + RepositoryConnection connection = sesameService.getConnection(); + try { + connection.begin(); + Query sparqlQuery = connection.prepareQuery(queryLanguage, query); + + if (sparqlQuery instanceof TupleQuery) { + query((TupleQuery) sparqlQuery, (TupleQueryResultWriter)writer); + } else if (sparqlQuery instanceof BooleanQuery) { + query((BooleanQuery) sparqlQuery, (BooleanQueryResultWriter)writer); + } else if (sparqlQuery instanceof GraphQuery) { + query((GraphQuery) sparqlQuery, ((SPARQLGraphResultWriter)writer).getOutputStream(), ((SPARQLGraphResultWriter)writer).getFormat()); + } else { + connection.rollback(); + throw new InvalidArgumentException("SPARQL query type " + sparqlQuery.getClass() + " not supported!"); + } + + connection.commit(); + } catch (Exception ex) { + connection.rollback(); + throw ex; + } finally { + connection.close(); + } } catch(RepositoryException e) { log.error("error while getting repository connection: {}", e); throw new MarmottaException("error while getting repository connection", e); @@ -281,7 +321,7 @@ public class SparqlServiceImpl implements SparqlService { } } - @Override + @Override public void query(final QueryLanguage language, final String query, final OutputStream output, final String format, int timeoutInSeconds) throws MarmottaException, TimeoutException, MalformedQueryException { log.debug("executing SPARQL query:\n{}", query); Future<Boolean> future = executorService.submit(new Callable<Boolean>() { @@ -510,6 +550,89 @@ public class SparqlServiceImpl implements SparqlService { return result; } + @Override + public void createServiceDescription(RDFWriter writer, String requestURL, boolean isUpdate) throws RDFHandlerException { + try { + writer.startRDF(); + final ValueFactory vf = new ValueFactoryImpl(); + writer.handleNamespace(SPARQL_SD.PREFIX, SPARQL_SD.NAMESPACE); + writer.handleNamespace("formats", "http://www.w3.org/ns/formats/"); + writer.handleNamespace("void", "http://rdfs.org/ns/void#"); + + final BNode sd = vf.createBNode(); + writer.handleStatement(vf.createStatement(sd, RDF.TYPE, SPARQL_SD.Service)); + writer.handleStatement(vf.createStatement(sd, SPARQL_SD.endpoint, vf.createURI(requestURL))); + writer.handleStatement(vf.createStatement(sd, SPARQL_SD.supportedLanguage, isUpdate?SPARQL_SD.SPARQL11Update:SPARQL_SD.SPARQL11Query)); + + if (!isUpdate) { + // FIXME: really? these types? + final Set<FileFormat> formats = new HashSet<>(); + formats.addAll(RDFWriterRegistry.getInstance().getKeys()); + formats.addAll(TupleQueryResultFormat.values()); + for (FileFormat f: formats) { + final String formatUri = w3cFormatID.get(f); + if (StringUtils.isNotBlank(formatUri)) { + writer.handleStatement(vf.createStatement(sd, SPARQL_SD.resultFormat, vf.createURI(formatUri))); + } else { + final BNode fNode = vf.createBNode(); + writer.handleStatement(vf.createStatement(sd, SPARQL_SD.resultFormat, fNode)); + writer.handleStatement(vf.createStatement(fNode, RDF.TYPE, vf.createURI("http://www.w3.org/ns/formats/Format"))); + writer.handleStatement(vf.createStatement(fNode, vf.createURI("http://www.w3.org/ns/formats/media_type"), vf.createLiteral(f.getDefaultMIMEType()))); + writer.handleStatement(vf.createStatement(fNode, vf.createURI("http://www.w3.org/ns/formats/preferred_suffix"), vf.createLiteral("."+f.getDefaultFileExtension()))); + } + } + } + + final BNode dataset = vf.createBNode(); + writer.handleStatement(vf.createStatement(sd, SPARQL_SD.defaultDataset, dataset)); + writer.handleStatement(vf.createStatement(dataset, RDF.TYPE, SPARQL_SD.Dataset)); + + final RepositoryConnection kiwiCon = sesameService.getConnection(); + try { + kiwiCon.begin(); + // FIXME: Default graph, in KiWi this is all - is it not? + final BNode defaultGraph = vf.createBNode(); + writer.handleStatement(vf.createStatement(dataset, SPARQL_SD.defaultGraph, defaultGraph)); + writer.handleStatement(vf.createStatement(defaultGraph, RDF.TYPE, SPARQL_SD.Graph)); + // TODO: Number of triples here? This can be expensive! + writer.handleStatement(vf.createStatement(defaultGraph, vf.createURI("http://rdfs.org/ns/void#triples"), vf.createLiteral(kiwiCon.size()))); + + final RepositoryResult<Resource> cID = kiwiCon.getContextIDs(); + try { + while (cID.hasNext()) { + final Resource c = cID.next(); + if (c instanceof URI) { + // A named graph + final BNode ng = vf.createBNode(); + writer.handleStatement(vf.createStatement(dataset, SPARQL_SD.namedGraph, ng)); + writer.handleStatement(vf.createStatement(ng, RDF.TYPE, SPARQL_SD.NamedGraph)); + writer.handleStatement(vf.createStatement(ng, SPARQL_SD.name, c)); + final BNode g = vf.createBNode(); + writer.handleStatement(vf.createStatement(ng, SPARQL_SD.graph, g)); + writer.handleStatement(vf.createStatement(g, RDF.TYPE, SPARQL_SD.Graph)); + // TODO: Number of triples here? This can be expensive! + writer.handleStatement(vf.createStatement(g, vf.createURI("http://rdfs.org/ns/void#triples"), vf.createLiteral(kiwiCon.size(c)))); + + } + } + } finally { + cID.close(); + } + + kiwiCon.commit(); + } catch (final Throwable t){ + kiwiCon.rollback(); + throw t; + } finally { + kiwiCon.close(); + } + + writer.endRDF(); + } catch (RepositoryException e) { + throw new RDFHandlerException("Could not build SparqlServiceDescription"); + } + } + private TupleQueryResultWriter getTupleResultWriter(String format, OutputStream os) { TupleQueryResultFormat resultFormat; if(format == null) { http://git-wip-us.apache.org/repos/asf/marmotta/blob/d2c2193d/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/webservices/SparqlWebService.java ---------------------------------------------------------------------- diff --git a/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/webservices/SparqlWebService.java b/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/webservices/SparqlWebService.java index 974ef7f..f1e8084 100644 --- a/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/webservices/SparqlWebService.java +++ b/platform/marmotta-sparql/src/main/java/org/apache/marmotta/platform/sparql/webservices/SparqlWebService.java @@ -17,12 +17,15 @@ */ package org.apache.marmotta.platform.sparql.webservices; +import static org.openrdf.rio.RDFFormat.RDFXML; + import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -43,8 +46,10 @@ import javax.ws.rs.Path; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; +import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.StreamingOutput; import javax.ws.rs.core.UriBuilder; @@ -59,6 +64,7 @@ import org.apache.marmotta.platform.core.exception.MarmottaException; import org.apache.marmotta.platform.core.util.WebServiceUtil; import org.apache.marmotta.platform.sparql.api.sparql.QueryType; import org.apache.marmotta.platform.sparql.api.sparql.SparqlService; +import org.jboss.resteasy.spi.NoLogWebApplicationException; import org.openrdf.query.MalformedQueryException; import org.openrdf.query.QueryLanguage; import org.openrdf.query.UpdateExecutionException; @@ -66,6 +72,10 @@ import org.openrdf.query.resultio.BooleanQueryResultWriterRegistry; import org.openrdf.query.resultio.QueryResultIO; import org.openrdf.query.resultio.TupleQueryResultFormat; import org.openrdf.query.resultio.TupleQueryResultWriterRegistry; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFWriter; +import org.openrdf.rio.Rio; import org.slf4j.Logger; import com.google.common.collect.Lists; @@ -75,7 +85,8 @@ import com.google.common.io.CharStreams; * Execute SPARQL query (both query and update) on the LMF triple store * according the SPARQL 1.1 Protocol * - * @link http://www.w3.org/TR/sparql11-protocol/ + * @see http://www.w3.org/TR/sparql11-protocol/ + * @see http://www.w3.org/TR/sparql11-service-description/ * * @author Sebastian Schaffert * @author Sergio Fernández @@ -168,7 +179,10 @@ public class SparqlWebService { */ @GET @Path(SELECT) - public Response selectGet(@QueryParam("query") String query, @QueryParam("output") String resultType, @Context HttpServletRequest request) { + public Response selectGet(@QueryParam("query") String query, @QueryParam("output") String resultType, @Context HttpServletRequest request) { + if (StringUtils.isBlank(query)) { + return createServiceDescriptionResponse(request, false); + } //get real return type: even it is not in the standard, this is useful if(resultType != null && outputMapper.containsKey(resultType)) resultType = outputMapper.get(resultType); return select(query, resultType, request); @@ -340,6 +354,9 @@ public class SparqlWebService { @Path(UPDATE) public Response updateGet(@QueryParam("update") String update, @QueryParam("query") String query, @QueryParam("output") String resultType, @Context HttpServletRequest request) { String q = getUpdateQuery(update, query); + if (StringUtils.isBlank(q)) { + return createServiceDescriptionResponse(request, true); + } return update(q, resultType, request); } @@ -474,6 +491,49 @@ public class SparqlWebService { return params; } + private Response createServiceDescriptionResponse(final HttpServletRequest request, final boolean isUpdate) { + final List<ContentType> acceptedTypes; + if (StringUtils.isBlank(request.getHeader("Accept"))) { + acceptedTypes = Collections.singletonList(MarmottaHttpUtils.parseContentType(RDFXML.getDefaultMIMEType())); + } else { + acceptedTypes = MarmottaHttpUtils.parseAcceptHeader(request.getHeader("Accept")); + } + + ContentType _bestType = null; + RDFFormat _format = null; + for (ContentType ct : acceptedTypes) { + final RDFFormat f = Rio.getWriterFormatForMIMEType(ct.getMime()); + if (f != null) { + _bestType = ct; + _format = f; + break; + } + } + if (_bestType == null || _format == null) { + // FIXME: todo + return Response.status(Status.BAD_REQUEST).entity("Could not determine Format").build(); + } + + final RDFFormat format = _format; + final ContentType returnType = _bestType; + + final StreamingOutput entity = new StreamingOutput() { + @Override + public void write(OutputStream outputStream) throws IOException, + WebApplicationException { + try { + final RDFWriter writer = Rio.createWriter(format, outputStream); + sparqlService.createServiceDescription(writer, request.getRequestURL().toString(), isUpdate); + } catch (RDFHandlerException e) { + log.warn("Could not send SpaqlServiceDescription: {}", e); + throw new NoLogWebApplicationException(e, Response.serverError().entity(e).build()); + } + } + }; + + return Response.ok(entity, new MediaType(returnType.getType(), returnType.getSubtype(), returnType.getCharset().name())).build(); + } + private Response buildQueryResponse(final ContentType format, final String query, final QueryType queryType) throws Exception { StreamingOutput entity = new StreamingOutput() { @Override
