diff --git a/jena-arq/src/main/java/org/apache/jena/http/sys/ExecHTTPBuilder.java b/jena-arq/src/main/java/org/apache/jena/http/sys/ExecHTTPBuilder.java index c62c468534f..9bdd292bf6d 100644 --- a/jena-arq/src/main/java/org/apache/jena/http/sys/ExecHTTPBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/http/sys/ExecHTTPBuilder.java @@ -27,6 +27,7 @@ import org.apache.jena.query.*; import org.apache.jena.riot.WebContent; import org.apache.jena.riot.web.HttpNames; +import org.apache.jena.sparql.adapter.ParseCheckUtils; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.binding.Binding; import org.apache.jena.sparql.exec.http.Params; @@ -46,7 +47,7 @@ public abstract class ExecHTTPBuilder { protected String serviceURL = null; private Query query = null; protected String queryString = null; - protected boolean parseCheck = true; + protected Boolean parseCheck = null; private HttpClient httpClient = null; protected Map httpHeaders = new HashMap<>(); protected Params params = Params.create(); @@ -88,6 +89,10 @@ public Y parseCheck(boolean parseCheck) { return thisBuilder(); } + protected boolean effectiveParseCheck() { + return ParseCheckUtils.effectiveParseCheck(parseCheck, contextAcc); + } + /** Set the query - this also sets the query string to agree with the query argument. */ public Y query(Query query) { Objects.requireNonNull(query); @@ -102,14 +107,14 @@ public Y query(Query query) { */ public Y query(String queryStr) { Objects.requireNonNull(queryStr); - Query query = parseCheck ? QueryFactory.create(queryStr) : null; + Query query = effectiveParseCheck() ? QueryFactory.create(queryStr) : null; setQuery(query, queryStr); return thisBuilder(); } public Y query(String queryStr, Syntax syntax) { Objects.requireNonNull(queryStr); - Query query = QueryFactory.create(queryStr, syntax); + Query query = effectiveParseCheck() ? QueryFactory.create(queryStr, syntax) : null; setQuery(query, queryStr); return thisBuilder(); } diff --git a/jena-arq/src/main/java/org/apache/jena/http/sys/ExecUpdateHTTPBuilder.java b/jena-arq/src/main/java/org/apache/jena/http/sys/ExecUpdateHTTPBuilder.java index 22e551c4cee..83f1275528d 100644 --- a/jena-arq/src/main/java/org/apache/jena/http/sys/ExecUpdateHTTPBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/http/sys/ExecUpdateHTTPBuilder.java @@ -19,13 +19,18 @@ package org.apache.jena.http.sys; import java.net.http.HttpClient; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; import org.apache.jena.graph.Node; import org.apache.jena.http.HttpEnv; import org.apache.jena.query.ARQ; +import org.apache.jena.sparql.adapter.ParseCheckUtils; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.binding.Binding; import org.apache.jena.sparql.exec.http.Params; @@ -37,110 +42,14 @@ import org.apache.jena.sys.JenaSystem; import org.apache.jena.update.Update; import org.apache.jena.update.UpdateException; -import org.apache.jena.update.UpdateFactory; import org.apache.jena.update.UpdateRequest; public abstract class ExecUpdateHTTPBuilder { - /** Update element. Either an Update object or a string. */ - private record UpdateElt(Update update, String updateString) { - UpdateElt(Update update) { this(Objects.requireNonNull(update), null); } - UpdateElt(String updateString) { this(null, Objects.requireNonNull(updateString)); } - boolean isParsed() { return update != null; } - - @Override - public String toString() { - return isParsed() - ? new UpdateRequest(update()).toString() // Reuse UpdateRequest's serialization approach - : updateString(); - } - } - - /** Accumulator for update elements. Can build an overall string or UpdateRequest from the elements. */ - private class UpdateEltAcc implements Iterable { - /** Delimiter for joining multiple SPARQL update strings into a single one. - * The delimiter takes into account that the last line of a statement may be a single-line-comment. */ - public static final String DELIMITER = "\n;\n"; - - private List updateOperations = new ArrayList<>(); - private List updateOperationsView = Collections.unmodifiableList(updateOperations); - private boolean isParsed = true; // True iff there are no strings in updateOperations - - public boolean isParsed() { - return isParsed; - } - - public void add(UpdateElt updateElt) { - isParsed = isParsed && updateElt.isParsed(); - updateOperations.add(updateElt); - } - - public void add(Update update) { - add(new UpdateElt(update)); - } - - /** Add a string by parsing it. */ - public void add(String updateRequestString) { - UpdateRequest updateRequest = UpdateFactory.create(updateRequestString); - add(updateRequest); - } - - public void add(UpdateRequest updateRequest) { - updateRequest.getOperations().forEach(this::add); - } - - /** Add a string without parsing it. */ - public void addString(String updateRequestString) { - add(new UpdateElt(updateRequestString)); - } - - /** Attempt to build an UpdateRequest from the state of this accumulator. Attempts to parse any string elements. */ - public UpdateRequest buildUpdateRequest() { - return addToUpdateRequest(new UpdateRequest()); - } - - public UpdateRequest addToUpdateRequest(UpdateRequest updateRequest) { - for (UpdateElt elt : updateOperations) { - if (elt.isParsed()) { - updateRequest.add(elt.update()); - } else { - try { - updateRequest.add(elt.updateString()); - } catch (Exception e) { - // Expose the string that failed to parse - e.addSuppressed(new RuntimeException("Failed to parse: " + elt.updateString())); - throw e; - } - } - } - return updateRequest; - } - - public void clear() { - updateOperations.clear(); - isParsed = true; - } - - public boolean isEmpty() { - return updateOperations.isEmpty(); - } - - @Override - public Iterator iterator() { - return updateOperationsView.iterator(); - } - - public String buildString() { - return updateOperations.stream() - .map(UpdateElt::toString) - .collect(Collectors.joining(DELIMITER)); - } - } - static { JenaSystem.init(); } protected String serviceURL; - protected boolean parseCheck = true; + protected Boolean parseCheck = null; private UpdateEltAcc updateEltAcc = new UpdateEltAcc(); protected Params params = Params.create(); @@ -173,7 +82,7 @@ public Y update(UpdateRequest updateRequest) { public Y update(String updateRequestString) { Objects.requireNonNull(updateRequestString); - if (parseCheck) { + if (effectiveParseCheck()) { updateEltAcc.add(updateRequestString); } else { updateEltAcc.addString(updateRequestString); @@ -205,6 +114,10 @@ public Y parseCheck(boolean parseCheck) { return thisBuilder(); } + protected boolean effectiveParseCheck() { + return ParseCheckUtils.effectiveParseCheck(parseCheck, contextAcc); + } + public Y substitution(Binding binding) { binding.forEach(this.substitutionMap::put); return thisBuilder(); diff --git a/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateElt.java b/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateElt.java new file mode 100644 index 00000000000..f7f73d061e9 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateElt.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.http.sys; + +import java.util.Objects; + +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateRequest; + +/** Update element. Either an Update object or a string. */ +public record UpdateElt(Update update, String updateString) { + UpdateElt(Update update) { this(Objects.requireNonNull(update), null); } + UpdateElt(String updateString) { this(null, Objects.requireNonNull(updateString)); } + public boolean isParsed() { return update != null; } + + @Override + public String toString() { + return isParsed() + ? new UpdateRequest(update()).toString() // Reuse UpdateRequest's serialization approach + : updateString(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateEltAcc.java b/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateEltAcc.java new file mode 100644 index 00000000000..b65554fd0f5 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/http/sys/UpdateEltAcc.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.http.sys; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateFactory; +import org.apache.jena.update.UpdateRequest; + +/** Accumulator for update elements. Can build an overall string or UpdateRequest from the elements. */ +public class UpdateEltAcc implements Iterable { + /** Delimiter for joining multiple SPARQL update strings into a single one. + * The delimiter takes into account that the last line of a statement may be a single-line-comment. */ + public static final String DELIMITER = "\n;\n"; + + private List updateOperations = new ArrayList<>(); + private List updateOperationsView = Collections.unmodifiableList(updateOperations); + private boolean isParsed = true; // True iff there are no strings in updateOperations + + public boolean isParsed() { + return isParsed; + } + + public void add(UpdateElt updateElt) { + isParsed = isParsed && updateElt.isParsed(); + updateOperations.add(updateElt); + } + + public void add(Update update) { + add(new UpdateElt(update)); + } + + /** Add a string by parsing it. */ + public void add(String updateRequestString) { + UpdateRequest updateRequest = UpdateFactory.create(updateRequestString); + add(updateRequest); + } + + public void add(UpdateRequest updateRequest) { + updateRequest.getOperations().forEach(this::add); + } + + /** Add a string without parsing it. */ + public void addString(String updateRequestString) { + add(new UpdateElt(updateRequestString)); + } + + /** Attempt to build an UpdateRequest from the state of this accumulator. Attempts to parse any string elements. */ + public UpdateRequest buildUpdateRequest() { + return addToUpdateRequest(new UpdateRequest()); + } + + public UpdateRequest addToUpdateRequest(UpdateRequest updateRequest) { + for (UpdateElt elt : updateOperations) { + if (elt.isParsed()) { + updateRequest.add(elt.update()); + } else { + try { + updateRequest.add(elt.updateString()); + } catch (Exception e) { + // Expose the string that failed to parse + e.addSuppressed(new RuntimeException("Failed to parse: " + elt.updateString())); + throw e; + } + } + } + return updateRequest; + } + + public void clear() { + updateOperations.clear(); + isParsed = true; + } + + public boolean isEmpty() { + return updateOperations.isEmpty(); + } + + @Override + public Iterator iterator() { + return updateOperationsView.iterator(); + } + + public String buildString() { + return updateOperations.stream() + .map(UpdateElt::toString) + .collect(Collectors.joining(DELIMITER)); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/query/QueryExecutionDatasetBuilder.java b/jena-arq/src/main/java/org/apache/jena/query/QueryExecutionDatasetBuilder.java index 882e1056426..f9b0b7e7c43 100644 --- a/jena-arq/src/main/java/org/apache/jena/query/QueryExecutionDatasetBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/query/QueryExecutionDatasetBuilder.java @@ -30,6 +30,7 @@ import org.apache.jena.sparql.engine.binding.Binding; import org.apache.jena.sparql.engine.binding.BindingLib; import org.apache.jena.sparql.exec.QueryExecDatasetBuilder; +import org.apache.jena.sparql.exec.QueryExecDatasetBuilderDeferred; import org.apache.jena.sparql.exec.QueryExecutionCompat; import org.apache.jena.sparql.util.Context; import org.apache.jena.sparql.util.Symbol; @@ -46,7 +47,7 @@ public class QueryExecutionDatasetBuilder implements QueryExecutionBuilder { private Dataset dataset = null; public QueryExecutionDatasetBuilder() { - builder = QueryExecDatasetBuilder.create(); + builder = QueryExecDatasetBuilderDeferred.create(); } @Override diff --git a/jena-arq/src/main/java/org/apache/jena/riot/system/StreamRDFOps.java b/jena-arq/src/main/java/org/apache/jena/riot/system/StreamRDFOps.java index dae935e5816..8483ab18997 100644 --- a/jena-arq/src/main/java/org/apache/jena/riot/system/StreamRDFOps.java +++ b/jena-arq/src/main/java/org/apache/jena/riot/system/StreamRDFOps.java @@ -104,6 +104,11 @@ public static void sendGraphToStream(Graph graph, StreamRDF stream, String baseU stream.base(baseURI); if ( prefixMap != null ) sendPrefixesToStream(prefixMap, stream) ; + sendGraphTriplesToStream(graph, stream); + } + + /** Send only the triples of graph to a StreamRDF */ + public static void sendGraphTriplesToStream(Graph graph, StreamRDF stream) { ExtendedIterator iter = graph.find(null, null, null) ; try { StreamRDFOps.sendTriplesToStream(iter, stream) ; diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/ARQConstants.java b/jena-arq/src/main/java/org/apache/jena/sparql/ARQConstants.java index 8c19d110b04..1d3fb188539 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/ARQConstants.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/ARQConstants.java @@ -317,6 +317,12 @@ public class ARQConstants public static final Symbol registryExtensions = SystemARQ.allocSymbol("registryExtensions") ; - public static void init() {} + public static final Symbol registrySparqlDispatchers = + SystemARQ.allocSymbol("registrySparqlDispatchers") ; + + /** Symbol for disabling parse checks of queries and updates when executing them against a dataset */ + public static final Symbol parseCheck = + SystemARQ.allocSymbol("parseCheck") ; + public static void init() {} } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/adapter/DatasetGraphSPARQL.java b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/DatasetGraphSPARQL.java new file mode 100644 index 00000000000..e26d1a9706c --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/DatasetGraphSPARQL.java @@ -0,0 +1,537 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.adapter; + +import static org.apache.jena.query.ReadWrite.WRITE; + +import java.util.Iterator; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; + +import org.apache.jena.atlas.iterator.Iter; +import org.apache.jena.atlas.iterator.IteratorCloseable; +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Node; +import org.apache.jena.graph.Triple; +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryFactory; +import org.apache.jena.query.ReadWrite; +import org.apache.jena.query.TxnType; +import org.apache.jena.riot.system.PrefixMap; +import org.apache.jena.riot.system.PrefixMapFactory; +import org.apache.jena.riot.system.Prefixes; +import org.apache.jena.riot.system.StreamRDF; +import org.apache.jena.sparql.JenaTransactionException; +import org.apache.jena.sparql.core.BasicPattern; +import org.apache.jena.sparql.core.DatasetGraphBase; +import org.apache.jena.sparql.core.GraphView; +import org.apache.jena.sparql.core.Quad; +import org.apache.jena.sparql.core.Substitute; +import org.apache.jena.sparql.core.Transactional; +import org.apache.jena.sparql.core.TransactionalNull; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.engine.binding.BindingFactory; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.expr.Expr; +import org.apache.jena.sparql.expr.aggregate.AggCount; +import org.apache.jena.sparql.modify.request.QuadAcc; +import org.apache.jena.sparql.modify.request.QuadDataAcc; +import org.apache.jena.sparql.modify.request.Target; +import org.apache.jena.sparql.modify.request.UpdateClear; +import org.apache.jena.sparql.modify.request.UpdateDataDelete; +import org.apache.jena.sparql.modify.request.UpdateDataInsert; +import org.apache.jena.sparql.modify.request.UpdateDeleteWhere; +import org.apache.jena.sparql.modify.request.UpdateDrop; +import org.apache.jena.sparql.syntax.Element; +import org.apache.jena.sparql.syntax.ElementNamedGraph; +import org.apache.jena.sparql.syntax.ElementTriplesBlock; +import org.apache.jena.sparql.syntax.ElementUnion; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateRequest; + +/** + * This class provides a base implementation of the Jena DatasetGraph interface + * by means of SPARQL statement executions. Efficiency not guaranteed. + * + *

+ * This class also serves as a base for custom DatasetGraph implementations. + * For such custom dataset graphs, specialized {@link SparqlAdapterProvider} implementations + * can be created that deliver improved SPARQL update and query performance over direct use of the + * DatasetGraph API. + * + *

+ * All returned iterators must be closed to free any allocated resources. + * + *

+ * This base class does not support transactions. + * + *

+ * All inserts are passed on as SPARQL update requests. + * Beware that blank nodes are likely to become renamed across separate requests. + * + *

+ * All method invocations build single, possibly compound, update request. + */ +public abstract class DatasetGraphSPARQL + extends DatasetGraphBase +{ + private PrefixMap prefixes = PrefixMapFactory.create(); + private Transactional transactional = TransactionalNull.create(); + + public DatasetGraphSPARQL() { + super(); + initContext(); + } + + protected PrefixMap getPrefixes() { + return prefixes; + } + + protected Transactional getTransactional() { + return transactional; + } + + protected void initContext() { + Context cxt = getContext(); + // Use the context to advertise that SPARQL statements should not be parsed. + ParseCheckUtils.setParseCheck(cxt, false); + } + + protected abstract QueryExec query(Query query); + protected abstract UpdateExec update(UpdateRequest UpdateRequest); + + protected void execUpdate(Update update) { + execUpdate(new UpdateRequest(update)); + } + + protected void execUpdate(UpdateRequest updateRequest) { + UpdateExec uExec = update(updateRequest); + uExec.execute(); + } + + /** + * This method must return a StreamRDF instance that handles bulk inserts of RDF tuples (triples or quads). + * The default implementation flushes every 1000 tuples. + * Alternative implementations could e.g. flush by the string length of the update request. + */ + protected StreamRDF newUpdateSink() { + StreamRDF sink = new StreamRDFToUpdateRequest(this::execUpdate, Prefixes.adapt(getPrefixes()), 1000); + return sink; + } + + @Override + public Iterator listGraphNodes() { + QueryExec qExec = query(graphsQuery); + return Iter.onClose( + Iter.map(qExec.select(), b -> b.get(vg)), + qExec::close); + } + + @Override + public Iterator find(Node g, Node s, Node p, Node o) { + Iterator result; + if (g == null || Node.ANY.equals(g)) { + result = findTriplesOrQuads(this::query, s, p, o); + } else if (Quad.isDefaultGraph(g)) { + Iterator base = findTriples(this::query, s, p, o); + result = Iter.map(base, t -> Quad.create(Quad.defaultGraphIRI, t)); + } else { + result = findQuads(this::query, g, s, p, o); + } + return result; + } + + @Override + public Iterator findNG(Node g, Node s, Node p, Node o) { + Iterator result = findQuads(this::query, g, s, p, o); + return result; + } + + @Override + public Graph getDefaultGraph() { + DatasetGraphSPARQL self = this; + return new GraphView(this, Quad.defaultGraphNodeGenerated) { + @Override + protected int graphBaseSize() { + long size = sizeLong(); + return (size < Integer.MAX_VALUE) ? (int)size : Integer.MAX_VALUE; + } + + @Override + public long sizeLong() { + long result = fetchLong(self::query, defaultGraphSizeQuery, vc); + return result; + } + }; + } + + @Override + public Graph getGraph(Node graphNode) { + DatasetGraphSPARQL self = this; + return new GraphView(this, graphNode) { + @Override + protected int graphBaseSize() { + long size = sizeLong(); + return (size < Integer.MAX_VALUE) ? (int)size : Integer.MAX_VALUE; + } + + @Override + public long sizeLong() { + Query q = createQueryNamedGraphSize(graphNode, vc); + long result = fetchLong(self::query, q, vc); + return result; + } + }; + } + + @Override + public void addGraph(Node graphName, Graph graph) { + StreamRDF sink = newUpdateSink(); + try { + sink.start(); + StreamRDFToUpdateRequest.sendGraphTriplesToStream(graph, graphName, sink); + } finally { + sink.finish(); + } + } + + @Override + public void removeGraph(Node graphName) { + Objects.requireNonNull(graphName); + delete(graphName, Node.ANY, Node.ANY, Node.ANY); + // UpdateRequest ur = new UpdateRequest(new UpdateDrop(graphName, true)); + // execUpdate(ur); + } + + @Override + public void add(Quad quad) { + Quad q = harmonizeTripleInQuad(quad); + if (!q.isConcrete()) { + throw new IllegalArgumentException("Concrete quad expected."); + } + Update update = new UpdateDataInsert(new QuadDataAcc(List.of(q))); + execUpdate(new UpdateRequest(update)); + } + + @Override + public void delete(Quad quad) { + Quad q = harmonizeTripleInQuad(quad); + if (!q.isConcrete()) { + throw new IllegalArgumentException("Concrete quad expected."); + } + Update update = new UpdateDataDelete(new QuadDataAcc(List.of(q))); + execUpdate(update); + } + + @Override + public void deleteAny(Node g, Node s, Node p, Node o) { + boolean allowDrop = true; + UpdateRequest updateRequest; + if (allowDrop && isWildcard(s) && isWildcard(p) && isWildcard(o)) { + updateRequest = new UpdateRequest(buildDeleteByGraph(g)); + } else { + updateRequest = buildDeleteByPattern(g, s, p, o); + } + execUpdate(updateRequest); + } + + @Override + public long size() { + long result = fetchLong(this::query, graphsCountQuery, vc); + return result; + } + + @Override + public boolean supportsTransactions() { + return false; + } + + @Override + public boolean supportsTransactionAbort() { + return false; + } + + @Override + public void abort() { + getTransactional().abort(); + } + + @Override + public void begin(ReadWrite readWrite) { + getTransactional().begin(readWrite); + } + + @Override + public void commit() { + getTransactional().commit(); + } + + @Override + public void end() { + // Note: AbstractTestRDFConnection.transaction_bad_01() expects + // a JenaTransactionException to be thrown if the + // conditions of the if-statement below are satisfied. + try { + if (isInTransaction()) { + if (transactionMode().equals(WRITE)) { + String msg = "end() called for WRITE transaction without commit or abort having been called. This causes a forced abort."; + throw new JenaTransactionException(msg); + } + } + } finally { + getTransactional().end(); + } + } + + @Override + public boolean isInTransaction() { + return getTransactional().isInTransaction(); + } + + @Override + public void begin(TxnType type) { + getTransactional().begin(type); + } + + @Override + public boolean promote(Promote mode) { + return getTransactional().promote(mode); + } + + @Override + public ReadWrite transactionMode() { + return getTransactional().transactionMode(); + } + + @Override + public TxnType transactionType() { + return getTransactional().transactionType(); + } + + @Override + public PrefixMap prefixes() { + return prefixes; + } + + // ----- SPARQL Statement Generation ----- + + private static final Var vg = Var.alloc("g"); + private static final Var vs = Var.alloc("s"); + private static final Var vp = Var.alloc("p"); + private static final Var vo = Var.alloc("o"); + private static final Query graphsQuery = QueryFactory.create("SELECT ?g { GRAPH ?g { } }"); + + private static final Var vc = Var.alloc("c"); + private static final Query graphsCountQuery = QueryFactory.create("SELECT (COUNT(*) AS ?c) { GRAPH ?g { } }"); + + private static final Query defaultGraphSizeQuery = QueryFactory.create("SELECT (COUNT(*) AS ?c) { ?s ?p ?o }"); + + private static IteratorCloseable findTriples(Function executor, Node s, Node p, Node o) { + Triple triple = matchTriple(Triple.create(s, p, o)); + Query query = createQueryTriple(triple); + QueryExec qExec = executor.apply(query); + return Iter.onClose( + Iter.map(qExec.select(), b -> Substitute.substitute(triple, b)), + qExec::close); + } + + private static IteratorCloseable findQuads(Function executor, Node g, Node s, Node p, Node o) { + Quad quad = matchQuad(g, s, p, o); + Query query = createQueryQuad(quad); + QueryExec qExec = executor.apply(query); + return Iter.onClose( + Iter.map(qExec.select(), b -> Substitute.substitute(quad, b)), + qExec::close); + } + + private static IteratorCloseable findTriplesOrQuads(Function executor, Node s, Node p, Node o) { + Quad quad = matchQuad(vg, s, p, o); + Query query = createQueryTriplesAndQuads(s, p, o); + QueryExec qExec = executor.apply(query); + return Iter.onClose( + Iter.map(qExec.select(), b -> { + if (!b.contains(vg)) { + // Unbound graph variable -> default graph. + b = BindingFactory.binding(b, vg, Quad.defaultGraphIRI); + } + return Substitute.substitute(quad, b); + }), + qExec::close); + } + + private static long fetchLong(Function executor, Query query, Var numberVar) { + long result; + try (QueryExec qExec = executor.apply(query)) { + Binding b = qExec.select().next(); + Number number = (Number)b.get(numberVar).getLiteralValue(); + result = number.longValue(); + } + return result; + } + + private static Node matchNode(Node n, Node d) { + return n == null || n.equals(Node.ANY) ? d : n; + } + + private static Triple matchTriple(Triple triple) { + return matchTriple(triple.getSubject(), triple.getPredicate(), triple.getObject()); + } + + private static Triple matchTriple(Node s, Node p, Node o) { + return Triple.create(matchNode(s, vs), matchNode(p, vp), matchNode(o, vo)); + } + + private static Quad harmonizeTripleInQuad(Quad quad) { + Quad result = quad.isTriple() ? new Quad(Quad.defaultGraphIRI, quad.asTriple()) : quad; + return result; + } + + private static Quad matchQuad(Node g, Node s, Node p, Node o) { + return Quad.create(matchNode(g, vg), matchNode(s, vs), matchNode(p, vp), matchNode(o, vo)); + } + + /** + * Generates the query: + *

+     * SELECT * {
+     *     { ?s ?p ?o }
+     *   UNION
+     *     { GRAPH ?g { ?s ?p ?o } }
+     * }
+     * 
+ */ + private static Query createQueryTriplesAndQuads(Node s, Node p, Node o) { + BasicPattern bgpTriples = new BasicPattern(); + bgpTriples.add(matchTriple(s, p, o)); + + Quad quad = matchQuad(vg, s, p, o); + BasicPattern bgpQuads = new BasicPattern(); + bgpQuads.add(quad.asTriple()); + + ElementUnion union = new ElementUnion(); + union.addElement(new ElementTriplesBlock(bgpTriples)); + union.addElement(new ElementNamedGraph(vg, new ElementTriplesBlock(bgpQuads))); + + Query query = new Query(); + query.setQuerySelectType(); + query.setQueryResultStar(true); + query.setQueryPattern(union); + return query; + } + + /** + * Generates the query: + *
+     * SELECT * { GRAPH ?g { ?s ?p ?o } }
+     * 
+ */ + private static Query createQueryQuad(Quad quad) { + BasicPattern bgp = new BasicPattern(); + bgp.add(quad.asTriple()); + Element element = new ElementTriplesBlock(bgp); + element = new ElementNamedGraph(quad.getGraph(), element); + + Query query = new Query(); + query.setQuerySelectType(); + query.setQueryResultStar(true); + query.setQueryPattern(element); + return query; + } + + /** + * Generates the query: + *
+     * SELECT * { ?s ?p ?o }
+     * 
+ */ + private static Query createQueryTriple(Triple m) { + BasicPattern bgp = new BasicPattern(); + bgp.add(m); + Element element = new ElementTriplesBlock(bgp); + + Query query = new Query(); + query.setQuerySelectType(); + query.setQueryResultStar(true); + query.setQueryPattern(element); + return query; + } + + /** + * Generates the query: + *
+     * SELECT (COUNT(*) AS ?c) { GRAPH <g> { ?s ?p ?o } }
+     * 
+ */ + private static Query createQueryNamedGraphSize(Node graphName, Var outputVar) { + BasicPattern bgp = new BasicPattern(); + bgp.add(Triple.create(vs, vp, vo)); + Element element = new ElementNamedGraph(graphName, new ElementTriplesBlock(bgp)); + + Query query = new Query(); + query.setQuerySelectType(); + query.setQueryPattern(element); + Expr exprAgg = query.allocAggregate(new AggCount()); + query.getProject().add(outputVar, exprAgg); + return query; + } + + private static Update buildDelete(Node g, Node s, Node p, Node o) { + Quad quad = matchQuad(g, s, p, o); + Update update = quad.isConcrete() + ? new UpdateDataDelete(new QuadDataAcc(List.of(quad))) + : new UpdateDeleteWhere(new QuadAcc(List.of(quad))); + return update; + } + + private static UpdateRequest buildDeleteByPattern(Node g, Node s, Node p, Node o) { + UpdateRequest updateRequest = new UpdateRequest(); + if (isWildcard(g)) { + updateRequest.add(buildDelete(Quad.defaultGraphIRI, s, p, o)); + updateRequest.add(buildDelete(g, s, p, o)); + } else { + updateRequest.add(buildDelete(g, s, p, o)); + } + return updateRequest; + } + + private static Update buildDeleteByGraph(Node g) { + Target target = chooseTarget(g); + boolean silent = true; + boolean useDrop = true; + Update update = useDrop + ? new UpdateDrop(target, silent) + : new UpdateClear(target, silent); + return update; + } + + private static Target chooseTarget(Node g) { + Target target = Quad.isDefaultGraph(g) + ? Target.DEFAULT + : Quad.isUnionGraph(g) + ? Target.NAMED + : (g == null || Node.ANY.equals(g)) + ? Target.ALL + : Target.create(g); + return target; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/adapter/ParseCheckUtils.java b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/ParseCheckUtils.java new file mode 100644 index 00000000000..9371f156a83 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/ParseCheckUtils.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.adapter; + +import java.util.Optional; + +import org.apache.jena.sparql.ARQConstants; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.ContextAccumulator; + +public class ParseCheckUtils +{ + // ----- Parse Check ----- + + public static void setParseCheck(Context cxt, Boolean value) { + cxt.set(ARQConstants.parseCheck, value); + } + + public static Optional getParseCheck(DatasetGraph dsg) { + return Optional.ofNullable(dsg).map(DatasetGraph::getContext).flatMap(ParseCheckUtils::getParseCheck); + } + + public static Optional getParseCheck(Context cxt) { + return Optional.ofNullable(cxt).map(c -> c.get(ARQConstants.parseCheck)); + } + + public static Optional getParseCheck(ContextAccumulator cxtAcc) { + return Optional.ofNullable(cxtAcc).map(ca -> ca.get(ARQConstants.parseCheck)); + } + + public static boolean effectiveParseCheck(Boolean parseCheck, Context cxt) { + return Optional.ofNullable(parseCheck).orElseGet(() -> getParseCheck(cxt).orElse(true)); + } + + public static boolean effectiveParseCheck(Boolean parseCheck, ContextAccumulator cxtAcc) { + return Optional.ofNullable(parseCheck).orElseGet(() -> getParseCheck(cxtAcc).orElse(true)); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapter.java b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapter.java new file mode 100644 index 00000000000..9190c50a6e6 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapter.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.adapter; + +import org.apache.jena.sparql.exec.QueryExecBuilder; +import org.apache.jena.sparql.exec.UpdateExecBuilder; + +public interface SparqlAdapter { + QueryExecBuilder newQuery(); + UpdateExecBuilder newUpdate(); +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterDefault.java b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterDefault.java new file mode 100644 index 00000000000..7a2cdbc6953 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterDefault.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.adapter; + +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.exec.QueryExecBuilder; +import org.apache.jena.sparql.exec.QueryExecDatasetBuilderImpl; +import org.apache.jena.sparql.exec.UpdateExecBuilder; +import org.apache.jena.sparql.exec.UpdateExecDatasetBuilderImpl; + +/** + * The default adapter that executes SPARQL statements + * against a DatasetGraph using the default engines. + */ +public class SparqlAdapterDefault + implements SparqlAdapter +{ + private final DatasetGraph dsg; + + public SparqlAdapterDefault(DatasetGraph dsg) { + super(); + this.dsg = dsg; + } + + @Override + public QueryExecBuilder newQuery() { + return QueryExecDatasetBuilderImpl.create() + .dataset(dsg) + // Execution tracking via pre-configured builder. Alternative: hard code into build() method. + // .transformExec(QueryExecTransformExecTracking.get()) + ; + } + + @Override + public UpdateExecBuilder newUpdate() { + return UpdateExecDatasetBuilderImpl.create() + .dataset(dsg) + // Execution tracking via pre-configured builder. Alternative: hard code into build() method. + // .transformExec(UpdateExecTransformExecTracking.get()) + ; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterProvider.java b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterProvider.java new file mode 100644 index 00000000000..134095a0261 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterProvider.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.adapter; + +import org.apache.jena.sparql.core.DatasetGraph; + +public interface SparqlAdapterProvider { + SparqlAdapter adapt(DatasetGraph dsg); +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterProviderMain.java b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterProviderMain.java new file mode 100644 index 00000000000..55c103f244a --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterProviderMain.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.adapter; + +import org.apache.jena.sparql.core.DatasetGraph; + +public class SparqlAdapterProviderMain + implements SparqlAdapterProvider +{ + @Override + public SparqlAdapter adapt(DatasetGraph dsg) { + return new SparqlAdapterDefault(dsg); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterRegistry.java b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterRegistry.java new file mode 100644 index 00000000000..ad2bcb6a22e --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/SparqlAdapterRegistry.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.adapter; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +import org.apache.jena.sparql.ARQConstants; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.DatasetGraphOne; +import org.apache.jena.sparql.util.Context; + +public class SparqlAdapterRegistry { + List providers = Collections.synchronizedList(new ArrayList<>()); + + // Singleton + private static SparqlAdapterRegistry registry; + static { init(); } + + static public SparqlAdapterRegistry get() + { + return registry; + } + + public List getProviders() { + return providers; + } + + /** If there is a registry in the context then return it otherwise yield the global instance */ + static public SparqlAdapterRegistry chooseRegistry(Context context) + { + SparqlAdapterRegistry result = get(context); + if (result == null) { + result = get(); + } + return result; + } + + /** Get the query engine registry from the context or null if there is none. + * Returns null if the context is null. */ + static public SparqlAdapterRegistry get(Context context) + { + SparqlAdapterRegistry result = context == null + ? null + : context.get(ARQConstants.registrySparqlDispatchers); + return result; + } + + static public void set(Context context, ParseCheckUtils registry) + { + context.set(ARQConstants.registrySparqlDispatchers, registry); + } + + public SparqlAdapterRegistry copy() { + SparqlAdapterRegistry result = new SparqlAdapterRegistry(); + result.providers.addAll(providers); + return result; + } + + /** Create a copy of the registry from the context or return a new instance */ + public static SparqlAdapterRegistry copyFrom(Context context) { + SparqlAdapterRegistry tmp = get(context); + SparqlAdapterRegistry result = tmp != null + ? tmp.copy() + : new SparqlAdapterRegistry(); + return result; + } + + public SparqlAdapterRegistry() { } + + private static void init() + { + registry = new SparqlAdapterRegistry(); + + registry.add(new SparqlAdapterProviderMain()); + } + + // ----- Query ----- + + /** Add a link provider to the default registry */ + public static void addProvider(SparqlAdapterProvider f) { get().add(f); } + + /** Add a query dispatcher */ + public void add(SparqlAdapterProvider f) + { + // Add to low end so that newer factories are tried first + providers.add(0, f); + } + + /** Remove a query dispatcher */ + public static void removeProvider(SparqlAdapterProvider f) { get().remove(f); } + + /** Remove a query dispatcher */ + public void remove(SparqlAdapterProvider f) { providers.remove(f); } + + /** Check whether a query dispatcher is already registered in the default registry */ + public static boolean containsFactory(SparqlAdapterProvider f) { return get().contains(f); } + + /** Check whether a query dispatcher is already registered */ + public boolean contains(SparqlAdapterProvider f) { return providers.contains(f); } + + // FIXME Do we need a separate context (originating from builder config) to choose the adapter registry? + // Probably yes! + public static SparqlAdapter adapt(DatasetGraph dsg) { + /** If the dataset is null then use Jena's ARQ query engine. */ + if (dsg == null) { + return new SparqlAdapterDefault(dsg); + } + + /** FIXME Unwrap Graph view over a DatasetGraph? */ +// if (dsg instanceof DatasetGraphOne dsg1) { +// dsg1.getBacking() +// } + + + Context cxt = dsg.getContext(); + SparqlAdapterRegistry registry = chooseRegistry(cxt); + + SparqlAdapter result = null; + for (SparqlAdapterProvider provider : registry.providers) { + result = provider.adapt(dsg); + if (result != null) { + break; + } + } + Objects.requireNonNull(result, "No provider found for " + dsg.getClass()); + return result; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/adapter/StreamRDFToUpdateRequest.java b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/StreamRDFToUpdateRequest.java new file mode 100644 index 00000000000..ab227d48459 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/adapter/StreamRDFToUpdateRequest.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.adapter; + +import java.util.Objects; +import java.util.function.Consumer; + +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Node; +import org.apache.jena.graph.Triple; +import org.apache.jena.riot.system.StreamRDF; +import org.apache.jena.riot.system.StreamRDFOps; +import org.apache.jena.riot.system.StreamRDFWrapper; +import org.apache.jena.shared.PrefixMapping; +import org.apache.jena.sparql.core.Quad; +import org.apache.jena.sparql.modify.request.QuadDataAcc; +import org.apache.jena.sparql.modify.request.UpdateDataInsert; +import org.apache.jena.update.UpdateRequest; + +/** + * {@link StreamRDF} that writes to an {@link RDFLink}. + */ +/* package */ class StreamRDFToUpdateRequest implements StreamRDF { + public static final int DFT_BUFFER_SIZE = 1000; + + private Consumer sink; + private int bufferSize; + private PrefixMapping prefixes; + private QuadDataAcc quadAcc = new QuadDataAcc(); + + /** + * Constructs the StreamRDFToRDFLink using default {@value #DFT_BUFFER_SIZE} quad buffer size. + * + * @param link the link to talk to. + */ + public StreamRDFToUpdateRequest(Consumer sink) { + this(sink, null); + } + + public StreamRDFToUpdateRequest(Consumer sink, PrefixMapping prefixes) { + this(sink, prefixes, DFT_BUFFER_SIZE); + } + + public StreamRDFToUpdateRequest(Consumer sink, PrefixMapping prefixes, int bufferSize) { + super(); + if (bufferSize < 1) { + throw new IllegalArgumentException("Buffer size must be at least 1"); + } + + this.sink = Objects.requireNonNull(sink); + this.prefixes = prefixes; + this.bufferSize = bufferSize; + } + + /** + * See if we should flush the buffer. + */ + private void isBufferFull() { + if ( quadAcc.getQuads().size() >= bufferSize ) { + flush(); + } + } + + /** + * Flushes the buffer to the connection. + */ + private void flush() { + if (!quadAcc.getQuads().isEmpty()) { + UpdateRequest updateRequest = new UpdateRequest(new UpdateDataInsert(quadAcc)); + if (prefixes != null) { + updateRequest.setPrefixMapping(prefixes); + } + try { + sink.accept(updateRequest); + } finally { + quadAcc.close(); + } + quadAcc = new QuadDataAcc(); + } + } + + @Override + public void start() { + // does nothing. + } + + @Override + public void triple(Triple triple) { + quadAcc.addTriple(triple); + isBufferFull(); + } + + @Override + public void quad(Quad quad) { + quadAcc.addQuad(quad); + isBufferFull(); + } + + @Override + public void base(String base) { + // do nothing + } + + @Override + public void version(String version) {} + + @Override + public void prefix(String prefix, String iri) { + if (prefixes != null) { + prefixes.setNsPrefix(prefix, iri); + } + } + + @Override + public void finish() { + flush(); + quadAcc.close(); + } + + // ----- Utils; move to StreamRDFOps? ----- + + static class StreamRDFTriplesToQuads + extends StreamRDFWrapper { + + protected final Node graphName; + + public StreamRDFTriplesToQuads(StreamRDF other, Node graphName) { + super(other); + this.graphName = Objects.requireNonNull(graphName); + } + + @Override + public void triple(Triple triple) { + Quad quad = Quad.create(graphName, triple); + get().quad(quad); + } + } + + /** Send triples of the source graph as quads in the given target graph to the sink. */ + static void sendGraphTriplesToStream(Graph sourceGraph, Node targetGraphName, StreamRDF sink) { + boolean isSinkDefaultGraph = targetGraphName == null || Quad.isDefaultGraph(targetGraphName); + StreamRDF effectiveSink = isSinkDefaultGraph ? sink : new StreamRDFTriplesToQuads(sink, targetGraphName); + StreamRDFOps.sendGraphTriplesToStream(sourceGraph, effectiveSink); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/engine/Timeouts.java b/jena-arq/src/main/java/org/apache/jena/sparql/engine/Timeouts.java index 5eb30c690ba..e91e63293ee 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/engine/Timeouts.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/engine/Timeouts.java @@ -233,13 +233,23 @@ public static String toString(Timeout timeout) { return result; } - // Set times from context if not set directly. e..g Context provides default values. - // Contrast with SPARQLQueryProcessor where the context is limiting values of the protocol parameter. + /** + * Update unset values in the builder with values from the context. + * + * Set times from context if not set directly, i.e. context provides default values. + * Contrast with SPARQLQueryProcessor where the context is limiting values of the protocol parameter. + */ public static void applyDefaultQueryTimeoutFromContext(TimeoutBuilderImpl builder, Context cxt) { Timeout queryTimeout = extractQueryTimeout(cxt); applyDefaultTimeout(builder, queryTimeout); } + /** Update unset values in the builder with values from the context. */ + public static void applyDefaultUpdateTimeoutFromContext(TimeoutBuilderImpl builder, Context cxt) { + Timeout queryTimeout = extractUpdateTimeout(cxt); + applyDefaultTimeout(builder, queryTimeout); + } + /** Returns milliseconds if the given time unit is null. */ private static TimeUnit nullToMillis(TimeUnit unit) { return unit != null ? unit : TimeUnit.MILLISECONDS; diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExec.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExec.java index fb8236d465c..7f2b69a6e9c 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExec.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExec.java @@ -26,6 +26,8 @@ import org.apache.jena.graph.Triple; import org.apache.jena.query.Query; import org.apache.jena.query.QueryExecution; +import org.apache.jena.sparql.adapter.SparqlAdapter; +import org.apache.jena.sparql.adapter.SparqlAdapterRegistry; import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.DatasetGraphFactory; import org.apache.jena.sparql.core.Quad; @@ -46,24 +48,41 @@ public interface QueryExec extends AutoCloseable { * to get a {@link QueryExecDatasetBuilder}. */ public static QueryExecBuilder dataset(DatasetGraph dataset) { - return QueryExecDatasetBuilder.create().dataset(dataset); + return SparqlAdapterRegistry.adapt(dataset).newQuery(); } /** Create a {@link QueryExecBuilder} for a graph. */ public static QueryExecBuilder graph(Graph graph) { - return QueryExecDatasetBuilder.create().graph(graph); + DatasetGraph dsg = DatasetGraphFactory.wrap(graph); + return dataset(dsg); } /** Create a {@link QueryExecBuilder} for a remote endpoint. */ public static QueryExecBuilder service(String serviceURL) { + // FIXME Apply execution tracking here or in the builder + // if there is a global event tracker? return QueryExecHTTPBuilder.create().endpoint(serviceURL); } - /** Create an uninitialized {@link QueryExecDatasetBuilder}. */ + /** + * Create an uninitialized {@link QueryExecDatasetBuilderDeferred}. + * This builder is not tied to a specific dataset implementation, and + * its dataset property can be freely modified. + * Upon build, the specialized builder for the dataset is chosen and the + * settings are transferred. + */ public static QueryExecDatasetBuilder newBuilder() { - return QueryExecDatasetBuilder.create(); + return QueryExecDatasetBuilderDeferred.create(); } + /** + * Create an uninitialized {@link QueryExecDatasetBuilder} that goes + * to the ARQ's native QueryEngineFactory system. + * Does not use the {@link SparqlAdapter} indirection mechanism. */ +// public static QueryExecDatasetBuilder newNativeBuilder() { +// return QueryExecDatasetBuilderImpl.create(); +// } + /** * The dataset against which the query will execute. May be null - the dataset * may be remote or the query itself has a dataset description. diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilder.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilder.java index ddfba85abc0..eceeb09704e 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilder.java @@ -30,6 +30,7 @@ import org.apache.jena.sparql.algebra.TableFactory; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; import org.apache.jena.sparql.util.Context; import org.apache.jena.sparql.util.Symbol; @@ -71,10 +72,21 @@ public default QueryExecBuilder substitution(String var, Node value) { return substitution(Var.alloc(var), value); } + public QueryExecBuilder transformExec(QueryExecTransform queryExecTransform); + /** Set the overall query execution timeout. */ @Override public QueryExecBuilder timeout(long value, TimeUnit timeUnit); + @Override + public QueryExecBuilder timeout(long timeout); + + @Override + public QueryExecBuilder initialTimeout(long timeout, TimeUnit timeUnit); + + @Override + public QueryExecBuilder overallTimeout(long timeout, TimeUnit timeUnit); + /** * Build the {@link QueryExec}. Further changes to he builder do not affect this * {@link QueryExec}. diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilderAdapter.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilderAdapter.java index 897abe353f6..ab31c6ea8a9 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilderAdapter.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilderAdapter.java @@ -18,6 +18,8 @@ package org.apache.jena.sparql.exec; +import java.util.ArrayList; +import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.concurrent.TimeUnit; @@ -31,6 +33,7 @@ import org.apache.jena.sparql.core.ResultBinding; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; import org.apache.jena.sparql.util.Context; import org.apache.jena.sparql.util.ModelUtils; import org.apache.jena.sparql.util.Symbol; @@ -39,6 +42,7 @@ public class QueryExecBuilderAdapter implements QueryExecBuilder { protected QueryExecutionBuilder builder; + protected List queryExecTransforms = new ArrayList<>(); protected QueryExecBuilderAdapter(QueryExecutionBuilder builder) { super(); @@ -67,7 +71,7 @@ private static Optional tryCast(Class clz, Object obj) { } @Override - public QueryExecMod initialTimeout(long timeout, TimeUnit timeUnit) { + public QueryExecBuilder initialTimeout(long timeout, TimeUnit timeUnit) { tryCast(QueryExecutionDatasetBuilder.class, builder) .orElseThrow(() -> new UnsupportedOperationException("QueryExecBuilderAdapter.initialTimeout()")) .initialTimeout(timeout, timeUnit); @@ -75,7 +79,7 @@ public QueryExecMod initialTimeout(long timeout, TimeUnit timeUnit) { } @Override - public QueryExecMod overallTimeout(long timeout, TimeUnit timeUnit) { + public QueryExecBuilder overallTimeout(long timeout, TimeUnit timeUnit) { tryCast(QueryExecutionDatasetBuilder.class, builder) .orElseThrow(() -> new UnsupportedOperationException("QueryExecBuilderAdapter.overallTimeout()")) .overallTimeout(timeout, timeUnit); @@ -152,10 +156,20 @@ public QueryExecBuilder timeout(long timeout) { return timeout(timeout, TimeUnit.MILLISECONDS); } + @Override + public QueryExecBuilder transformExec(QueryExecTransform queryExecTransform) { + Objects.requireNonNull(queryExecTransform); + queryExecTransforms.add(queryExecTransform); + return this; + } + @Override public QueryExec build() { QueryExecution qExec = builder.build(); QueryExec result = QueryExecAdapter.adapt(qExec); + for (QueryExecTransform queryExecTransform : queryExecTransforms) { + result = queryExecTransform.transform(result); + } return result; } } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilderWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilderWrapper.java new file mode 100644 index 00000000000..d75141bf46a --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecBuilderWrapper.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.concurrent.TimeUnit; + +import org.apache.jena.graph.Node; +import org.apache.jena.query.ARQ; +import org.apache.jena.query.Query; +import org.apache.jena.query.Syntax; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.Symbol; + +public class QueryExecBuilderWrapper + extends QueryExecModWrapper + implements QueryExecBuilder +{ + public QueryExecBuilderWrapper(T delegate) { + super(delegate); + } + + @Override + public X query(Query query) { + getDelegate().query(query); + return self(); + } + + /** Set the query. */ + @Override + public X query(String queryString) { + getDelegate().query(queryString); + return self(); + } + + @Override + public QueryExecBuilder parseCheck(boolean parseCheck) { + getDelegate().parseCheck(parseCheck); + return self(); + } + + /** Set the query. */ + @Override + public X query(String queryString, Syntax syntax) { + getDelegate().query(queryString, syntax); + return self(); + } + + /** Set a context entry. */ + @Override + public X set(Symbol symbol, Object value) { + getDelegate().set(symbol, value); + return self(); + } + + /** Set a context entry. */ + @Override + public X set(Symbol symbol, boolean value) { + getDelegate().set(symbol, value); + return self(); + } + + /** + * Set the context. If not set, publics to the system context + * ({@link ARQ#getContext}). + */ + @Override + public X context(Context context) { + getDelegate().context(context); + return self(); + } + + /** Provide a set of (Var, Node) for substitution in the query when QueryExec is built. */ + @Override + public X substitution(Binding binding) { + getDelegate().substitution(binding); + return self(); + } + + /** Provide a (Var, Node) for substitution in the query when QueryExec is built. */ + @Override + public X substitution(Var var, Node value) { + getDelegate().substitution(var, value); + return self(); + } + + /** Provide a (var name, Node) for substitution in the query when QueryExec is built. */ + @Override + public X substitution(String var, Node value) { + getDelegate().substitution(var, value); + return self(); + } + + /** Set the overall query execution timeout. */ + @Override + public X timeout(long value, TimeUnit timeUnit) { + getDelegate().timeout(value, timeUnit); + return self(); + } + + @Override + public QueryExecBuilder transformExec(QueryExecTransform queryExecTransform) { + getDelegate().transformExec(queryExecTransform); + return self(); + } + + /** + * Build the {@link QueryExec}. Further changes to he builder do not affect this + * {@link QueryExec}. + */ + @Override + public QueryExec build() { + return getDelegate().build(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDataset.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDataset.java index 32fdd214076..feca6688c5f 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDataset.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDataset.java @@ -18,10 +18,15 @@ package org.apache.jena.sparql.exec; -import java.util.*; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; + import org.apache.jena.atlas.iterator.Iter; import org.apache.jena.atlas.json.JsonArray; import org.apache.jena.atlas.json.JsonObject; @@ -48,10 +53,10 @@ import org.apache.jena.sparql.engine.Plan; import org.apache.jena.sparql.engine.QueryEngineFactory; import org.apache.jena.sparql.engine.QueryIterator; +import org.apache.jena.sparql.engine.Timeouts.Timeout; import org.apache.jena.sparql.engine.binding.Binding; import org.apache.jena.sparql.engine.binding.BindingFactory; import org.apache.jena.sparql.engine.iterator.QueryIteratorWrapper; -import org.apache.jena.sparql.engine.Timeouts.Timeout; import org.apache.jena.sparql.graph.GraphOps; import org.apache.jena.sparql.modify.TemplateLib; import org.apache.jena.sparql.syntax.ElementGroup; @@ -63,7 +68,7 @@ public class QueryExecDataset implements QueryExec { - public static QueryExecDatasetBuilder newBuilder() { return QueryExecDatasetBuilder.create(); } + public static QueryExecDatasetBuilder newBuilder() { return QueryExecDatasetBuilderDeferred.create(); } private final Query query; private String queryString = null; diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilder.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilder.java index 4c3d84b6591..dd8161dbb26 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilder.java @@ -18,207 +18,72 @@ package org.apache.jena.sparql.exec; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; import java.util.concurrent.TimeUnit; -import org.apache.jena.atlas.logging.Log; -import org.apache.jena.graph.Graph; import org.apache.jena.graph.Node; -import org.apache.jena.query.*; -import org.apache.jena.sparql.ARQConstants; +import org.apache.jena.query.Query; +import org.apache.jena.query.Syntax; import org.apache.jena.sparql.core.DatasetGraph; -import org.apache.jena.sparql.core.DatasetGraphFactory; import org.apache.jena.sparql.core.Var; -import org.apache.jena.sparql.engine.QueryEngineFactory; -import org.apache.jena.sparql.engine.QueryEngineRegistry; import org.apache.jena.sparql.engine.binding.Binding; -import org.apache.jena.sparql.engine.Timeouts; -import org.apache.jena.sparql.engine.Timeouts.Timeout; -import org.apache.jena.sparql.engine.Timeouts.TimeoutBuilderImpl; -import org.apache.jena.sparql.syntax.syntaxtransform.QueryTransformOps; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; import org.apache.jena.sparql.util.Context; -import org.apache.jena.sparql.util.ContextAccumulator; import org.apache.jena.sparql.util.Symbol; -import org.apache.jena.sys.JenaSystem; -/** - * Query execution for local datasets - builder style. - */ -public class QueryExecDatasetBuilder implements QueryExecMod, QueryExecBuilder { - - static { JenaSystem.init(); } +/** Interface for dataset-centric query exec builders. */ +public interface QueryExecDatasetBuilder + extends QueryExecBuilder +{ + /** Create an uninitialized {@link QueryExecDatasetBuilderDeferred}. */ + public static QueryExecDatasetBuilder newBuilder() { + return QueryExecDatasetBuilderDeferred.create(); + } - /** Create a new builder of {@link QueryExec} for a local dataset. */ public static QueryExecDatasetBuilder create() { - QueryExecDatasetBuilder builder = new QueryExecDatasetBuilder(); - return builder; + return QueryExecDatasetBuilderDeferred.create(); } - private static final long UNSET = -1; - - private DatasetGraph dataset = null; - private Query query = null; - private String queryString = null; - - private ContextAccumulator contextAcc = - ContextAccumulator.newBuilder(()->ARQ.getContext(), ()->Context.fromDataset(dataset)); - - // Uses query rewrite to replace variables by values. - private Map substitutionMap = null; + // TODO SparqlAdapter binds QueryExecBuilder to a dsg - must not set it afterwards. + // However, QueryExecDatasetBuilder{Deferred, Impl} would both allow for changing the dataset. + QueryExecDatasetBuilder dataset(DatasetGraph dsg); - // Uses initial binding to execution (old, original) feature - private Binding initialBinding = null; - private TimeoutBuilderImpl timeoutBuilder = new TimeoutBuilderImpl(); + @Override public QueryExecDatasetBuilder query(Query query); + @Override public QueryExecDatasetBuilder query(String queryString); + @Override public QueryExecDatasetBuilder query(String queryString, Syntax syntax); + @Override public QueryExecDatasetBuilder parseCheck(boolean parseCheck); + @Override public QueryExecDatasetBuilder set(Symbol symbol, Object value); + @Override public QueryExecDatasetBuilder set(Symbol symbol, boolean value); + @Override public QueryExecDatasetBuilder context(Context context); + @Override public QueryExecDatasetBuilder substitution(Binding binding); - private QueryExecDatasetBuilder() { } + @Override public QueryExecDatasetBuilder substitution(Var var, Node value); - public Query getQuery() { return query; } - public String getQueryString() { return queryString; } - - @Override - public QueryExecDatasetBuilder query(Query query) { - this.query = query; - return this; + /** Provide a (var name, Node) for substitution in the query when QueryExec is built. */ + @Override public default QueryExecDatasetBuilder substitution(String var, Node value) { + return substitution(Var.alloc(var), value); } @Override - public QueryExecDatasetBuilder query(String queryString) { - query(queryString, Syntax.syntaxARQ); - return this; - } + public QueryExecDatasetBuilder transformExec(QueryExecTransform queryExecTransform); - /** The parse-check flag has no effect for query execs over datasets. */ + /** Set the overall query execution timeout. */ @Override - public QueryExecDatasetBuilder parseCheck(boolean parseCheck) { - return this; - } + public QueryExecDatasetBuilder timeout(long value, TimeUnit timeUnit); @Override - public QueryExecDatasetBuilder query(String queryString, Syntax syntax) { - this.queryString = queryString; - this.query = QueryFactory.create(queryString, syntax); - return this; - } - - public QueryExecDatasetBuilder dataset(DatasetGraph dsg) { - this.dataset = dsg; - return this; - } - - public QueryExecDatasetBuilder graph(Graph graph) { - DatasetGraph dsg = DatasetGraphFactory.wrap(graph); - dataset(dsg); - return this; - } + public QueryExecDatasetBuilder timeout(long timeout); @Override - public QueryExecDatasetBuilder set(Symbol symbol, Object value) { - contextAcc.set(symbol, value); - return this; - } + public QueryExecDatasetBuilder initialTimeout(long timeout, TimeUnit timeUnit); @Override - public QueryExecDatasetBuilder set(Symbol symbol, boolean value) { - contextAcc.set(symbol, value); - return this; - } + public QueryExecDatasetBuilder overallTimeout(long timeout, TimeUnit timeUnit); - @Override - public QueryExecDatasetBuilder context(Context cxt) { - contextAcc.context(cxt); - return this; - } - - @Override - public Context getContext() { - return contextAcc.context(); - } - - @Override - public QueryExecDatasetBuilder substitution(Binding binding) { - ensureSubstitutionMap(); - binding.forEach(this.substitutionMap::put); - return this; - } - - @Override - public QueryExecDatasetBuilder substitution(Var var, Node value) { - ensureSubstitutionMap(); - this.substitutionMap.put(var, value); - return this; - } - - private void ensureSubstitutionMap() { - if ( substitutionMap == null ) - substitutionMap = new HashMap<>(); - } /** @deprecated Use {@link #substitution(Binding)} */ @Deprecated(forRemoval = true) - public QueryExecDatasetBuilder initialBinding(Binding binding) { - this.initialBinding = binding; - return this; - } - - @Override - public QueryExecDatasetBuilder timeout(long timeout) { - return timeout(timeout, TimeUnit.MILLISECONDS); - } + public QueryExecDatasetBuilder initialBinding(Binding binding); - @Override - public QueryExecDatasetBuilder timeout(long timeout, TimeUnit timeUnit) { - timeoutBuilder.timeout(timeout, timeUnit); - return this; - } - - @Override - public QueryExecDatasetBuilder initialTimeout(long timeout, TimeUnit timeUnit) { - timeoutBuilder.initialTimeout(timeout, timeUnit); - return this; - } - - @Override - public QueryExecDatasetBuilder overallTimeout(long timeout, TimeUnit timeUnit) { - timeoutBuilder.overallTimeout(timeout, timeUnit); - return this; - } - - @Override - public QueryExec build() { - Objects.requireNonNull(query, "No query for QueryExec"); - // Queries can have FROM/FROM NAMED or VALUES to get data. - //Objects.requireNonNull(dataset, "No dataset for QueryExec"); - query.ensureResultVars(); - Context cxt = getContext(); - - QueryEngineFactory qeFactory = QueryEngineRegistry.findFactory(query, dataset, cxt); - if ( qeFactory == null ) { - Log.warn(QueryExecDatasetBuilder.class, "Failed to find a QueryEngineFactory"); - return null; - } - - // Initial bindings / parameterized query - Query queryActual = query; - String queryStringActual = queryString; - - if ( substitutionMap != null && ! substitutionMap.isEmpty() ) { - queryActual = QueryTransformOps.replaceVars(query, substitutionMap); - queryStringActual = null; - } - - Timeouts.applyDefaultQueryTimeoutFromContext(this.timeoutBuilder, cxt); - - if ( dataset != null ) - cxt.set(ARQConstants.sysCurrentDataset, DatasetFactory.wrap(dataset)); - if ( queryActual != null ) - cxt.set(ARQConstants.sysCurrentQuery, queryActual); - - Timeout timeout = timeoutBuilder.build(); - - QueryExec qExec = new QueryExecDataset(queryActual, queryStringActual, dataset, cxt, qeFactory, - timeout, initialBinding); - return qExec; - } + Query getQuery(); + String getQueryString(); } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderBase.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderBase.java new file mode 100644 index 00000000000..900b02f0b96 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderBase.java @@ -0,0 +1,193 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +import org.apache.jena.graph.Node; +import org.apache.jena.query.ARQ; +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryFactory; +import org.apache.jena.query.Syntax; +import org.apache.jena.sparql.adapter.ParseCheckUtils; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.Timeouts.TimeoutBuilderImpl; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.ContextAccumulator; +import org.apache.jena.sparql.util.Symbol; +import org.apache.jena.sys.JenaSystem; + +/** + * Query execution for local datasets - builder style. + */ +// TODO Merge this class into QueryExecDatasetBuilderDeferredBase and remove it? +public abstract class QueryExecDatasetBuilderBase> + implements QueryExecDatasetBuilder +{ + + static { JenaSystem.init(); } + + protected DatasetGraph dataset = null; + protected Query query = null; + protected String queryString = null; + protected Syntax syntax = null; + protected Boolean parseCheck = null; + + protected ContextAccumulator contextAcc = + ContextAccumulator.newBuilder(()->ARQ.getContext(), ()->Context.fromDataset(dataset)); + + // Uses query rewrite to replace variables by values. + protected Map substitutionMap = null; + + // Uses initial binding to execution (old, original) feature + protected Binding initialBinding = null; + protected TimeoutBuilderImpl timeoutBuilder = new TimeoutBuilderImpl(); + + protected List queryExecTransforms = new ArrayList<>(); + + protected QueryExecDatasetBuilderBase() { } + + public Query getQuery() { return query; } + public String getQueryString() { return queryString; } + + @SuppressWarnings("unchecked") + protected X thisBuilder() { + return (X)this; + } + + @Override + public X query(Query query) { + this.query = query; + this.queryString = null; + this.syntax = null; + return thisBuilder(); + } + + @Override + public X query(String queryString) { + query(queryString, Syntax.syntaxARQ); + return thisBuilder(); + } + + @Override + public X query(String queryString, Syntax syntax) { + boolean parseCheck = effectiveParseCheck(); + this.query = parseCheck ? QueryFactory.create(queryString, syntax) : null; + this.queryString = queryString; + this.syntax = syntax; + return thisBuilder(); + } + + @Override + public X parseCheck(boolean parseCheck) { + this.parseCheck = parseCheck; + return thisBuilder(); + } + + protected boolean effectiveParseCheck() { + return ParseCheckUtils.effectiveParseCheck(parseCheck, contextAcc); + } + + @Override + public X set(Symbol symbol, Object value) { + contextAcc.set(symbol, value); + return thisBuilder(); + } + + @Override + public X set(Symbol symbol, boolean value) { + contextAcc.set(symbol, value); + return thisBuilder(); + } + + @Override + public X context(Context cxt) { + contextAcc.context(cxt); + return thisBuilder(); + } + + @Override + public Context getContext() { + return contextAcc.context(); + } + + @Override + public X substitution(Binding binding) { + ensureSubstitutionMap(); + binding.forEach(this.substitutionMap::put); + return thisBuilder(); + } + + @Override + public X substitution(Var var, Node value) { + ensureSubstitutionMap(); + this.substitutionMap.put(var, value); + return thisBuilder(); + } + + private void ensureSubstitutionMap() { + if ( substitutionMap == null ) + substitutionMap = new HashMap<>(); + } + + /** @deprecated Use {@link #substitution(Binding)} */ + @Deprecated(forRemoval = true) + public X initialBinding(Binding binding) { + this.initialBinding = binding; + return thisBuilder(); + } + + @Override + public X timeout(long timeout) { + return timeout(timeout, TimeUnit.MILLISECONDS); + } + + @Override + public X timeout(long timeout, TimeUnit timeUnit) { + timeoutBuilder.timeout(timeout, timeUnit); + return thisBuilder(); + } + + @Override + public X initialTimeout(long timeout, TimeUnit timeUnit) { + timeoutBuilder.initialTimeout(timeout, timeUnit); + return thisBuilder(); + } + + @Override + public X overallTimeout(long timeout, TimeUnit timeUnit) { + timeoutBuilder.overallTimeout(timeout, timeUnit); + return thisBuilder(); + } + + @Override + public X transformExec(QueryExecTransform queryExecTransform) { + Objects.requireNonNull(queryExecTransform); + queryExecTransforms.add(queryExecTransform); + return thisBuilder(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderDeferred.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderDeferred.java new file mode 100644 index 00000000000..552a0db5599 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderDeferred.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import org.apache.jena.sparql.adapter.SparqlAdapter; +import org.apache.jena.sparql.adapter.SparqlAdapterRegistry; + +/** + * QueryExecBuilder that chooses the actual builder only when build is called. + * The base class handles transferring the settings. + */ +public class QueryExecDatasetBuilderDeferred + extends QueryExecDatasetBuilderDeferredBase +{ + public static QueryExecDatasetBuilderDeferred create() { + return new QueryExecDatasetBuilderDeferred(); + } + + @Override + protected QueryExecBuilder newActualExecBuilder() { + SparqlAdapter adapter = SparqlAdapterRegistry.adapt(dataset); + return adapter.newQuery(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderDeferredBase.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderDeferredBase.java new file mode 100644 index 00000000000..7165ba55e98 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderDeferredBase.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.Map.Entry; + +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Node; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.DatasetGraphFactory; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; +import org.apache.jena.sparql.util.Context; + +/** + * Deferred QueryExecBuilder that during build creates the target builder. + * The settings of this builder are then transferred to the target builder. + */ +public abstract class QueryExecDatasetBuilderDeferredBase> + extends QueryExecDatasetBuilderBase +{ + @Override + public QueryExecDatasetBuilderDeferredBase dataset(DatasetGraph dsg) { + this.dataset = dsg; + return thisBuilder(); + } + + public QueryExecDatasetBuilderDeferredBase graph(Graph graph) { + DatasetGraph dsg = DatasetGraphFactory.wrap(graph); + dataset(dsg); + return thisBuilder(); + } + + /** This method must be implemented. */ + protected abstract QueryExecBuilder newActualExecBuilder(); + + @Override + public QueryExec build() { + QueryExecBuilder qeb = newActualExecBuilder(); + qeb = applySettings(qeb); + QueryExec qe = qeb.build(); + return qe; + } + + /** Transfer settings from this builder to to the destination. */ + protected QueryExecBuilder applySettings(QueryExecBuilder dest) { + // Make sure to set parseCheck before setting the query string. + if (parseCheck != null) { + dest = dest.parseCheck(parseCheck); + } + + if (query != null) { + dest = dest.query(query); + } else if (queryString != null) { + dest = dest.query(queryString, syntax); + } + + // Transfer context settings. + // Because of QueryExecCompat we just transfer the built context: + Context cxt = contextAcc.context(); + dest = dest.context(cxt); + + if (initialBinding != null) { + if (dest instanceof QueryExecDatasetBuilder dsBuilder) { + dest = dsBuilder.initialBinding(initialBinding); + } else { + throw new UnsupportedOperationException("The target builder does not support initial bindings."); + } + } + + if (substitutionMap != null) { + for (Entry e : substitutionMap.entrySet()) { + dest = dest.substitution(e.getKey(), e.getValue()); + } + } + + Timeout timeout = timeoutBuilder.build(); + if (timeout.hasInitialTimeout()) { + dest = dest.initialTimeout(timeout.initialTimeout().amount(), timeout.initialTimeout().unit()); + } + + if (timeout.hasOverallTimeout()) { + dest = dest.overallTimeout(timeout.overallTimeout().amount(), timeout.overallTimeout().unit()); + } + + for (QueryExecTransform execTransform : queryExecTransforms) { + dest = dest.transformExec(execTransform); + } + + return dest; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderImpl.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderImpl.java new file mode 100644 index 00000000000..d2a7404356d --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecDatasetBuilderImpl.java @@ -0,0 +1,242 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +import org.apache.jena.atlas.logging.Log; +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Node; +import org.apache.jena.query.ARQ; +import org.apache.jena.query.DatasetFactory; +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryFactory; +import org.apache.jena.query.Syntax; +import org.apache.jena.sparql.ARQConstants; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.DatasetGraphFactory; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.QueryEngineFactory; +import org.apache.jena.sparql.engine.QueryEngineRegistry; +import org.apache.jena.sparql.engine.Timeouts; +import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.engine.Timeouts.TimeoutBuilderImpl; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; +import org.apache.jena.sparql.syntax.syntaxtransform.QueryTransformOps; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.ContextAccumulator; +import org.apache.jena.sparql.util.Symbol; +import org.apache.jena.sys.JenaSystem; + +/** + * Query execution for local datasets - builder style. + */ +public class QueryExecDatasetBuilderImpl implements QueryExecMod, QueryExecDatasetBuilder { + + static { JenaSystem.init(); } + + /** Create a new builder of {@link QueryExec} for a local dataset. */ + public static QueryExecDatasetBuilder create() { + return new QueryExecDatasetBuilderImpl(); + } + + private DatasetGraph dataset = null; + private Query query = null; + private String queryString = null; + + private ContextAccumulator contextAcc = + ContextAccumulator.newBuilder(()->ARQ.getContext(), ()->Context.fromDataset(dataset)); + + // Uses query rewrite to replace variables by values. + private Map substitutionMap = null; + + // Uses initial binding to execution (old, original) feature + private Binding initialBinding = null; + private TimeoutBuilderImpl timeoutBuilder = new TimeoutBuilderImpl(); + + private List queryExecTransforms = new ArrayList<>(); + + private QueryExecDatasetBuilderImpl() { } + + public Query getQuery() { return query; } + public String getQueryString() { return queryString; } + + @Override + public QueryExecDatasetBuilderImpl query(Query query) { + this.query = query; + this.queryString = null; + return this; + } + + @Override + public QueryExecDatasetBuilderImpl query(String queryString) { + query(queryString, Syntax.syntaxARQ); + return this; + } + + @Override + public QueryExecDatasetBuilderImpl query(String queryString, Syntax syntax) { + this.query = QueryFactory.create(queryString, syntax); + return this; + } + + @Override + public QueryExecDatasetBuilderImpl parseCheck(boolean parseCheck) { + return this; + } + + public QueryExecDatasetBuilderImpl dataset(DatasetGraph dsg) { + this.dataset = dsg; + return this; + } + + public QueryExecDatasetBuilderImpl graph(Graph graph) { + DatasetGraph dsg = DatasetGraphFactory.wrap(graph); + dataset(dsg); + return this; + } + + @Override + public QueryExecDatasetBuilderImpl set(Symbol symbol, Object value) { + contextAcc.set(symbol, value); + return this; + } + + @Override + public QueryExecDatasetBuilderImpl set(Symbol symbol, boolean value) { + contextAcc.set(symbol, value); + return this; + } + + @Override + public QueryExecDatasetBuilderImpl context(Context cxt) { + contextAcc.context(cxt); + return this; + } + + @Override + public Context getContext() { + return contextAcc.context(); + } + + @Override + public QueryExecDatasetBuilderImpl substitution(Binding binding) { + ensureSubstitutionMap(); + binding.forEach(this.substitutionMap::put); + return this; + } + + @Override + public QueryExecDatasetBuilderImpl substitution(Var var, Node value) { + ensureSubstitutionMap(); + this.substitutionMap.put(var, value); + return this; + } + + private void ensureSubstitutionMap() { + if ( substitutionMap == null ) + substitutionMap = new HashMap<>(); + } + + /** @deprecated Use {@link #substitution(Binding)} */ + @Deprecated(forRemoval = true) + @Override + public QueryExecDatasetBuilderImpl initialBinding(Binding binding) { + this.initialBinding = binding; + return this; + } + + @Override + public QueryExecDatasetBuilderImpl transformExec(QueryExecTransform queryExecTransform) { + Objects.requireNonNull(queryExecTransform); + queryExecTransforms.add(queryExecTransform); + return this; + } + + @Override + public QueryExecDatasetBuilderImpl timeout(long timeout) { + return timeout(timeout, TimeUnit.MILLISECONDS); + } + + @Override + public QueryExecDatasetBuilderImpl timeout(long timeout, TimeUnit timeUnit) { + timeoutBuilder.timeout(timeout, timeUnit); + return this; + } + + @Override + public QueryExecDatasetBuilderImpl initialTimeout(long timeout, TimeUnit timeUnit) { + timeoutBuilder.initialTimeout(timeout, timeUnit); + return this; + } + + @Override + public QueryExecDatasetBuilderImpl overallTimeout(long timeout, TimeUnit timeUnit) { + timeoutBuilder.overallTimeout(timeout, timeUnit); + return this; + } + + @Override + public QueryExec build() { + Objects.requireNonNull(query, "No query for QueryExec"); + // Queries can have FROM/FROM NAMED or VALUES to get data. + //Objects.requireNonNull(dataset, "No dataset for QueryExec"); + query.ensureResultVars(); + Context cxt = getContext(); + + QueryEngineFactory qeFactory = QueryEngineRegistry.findFactory(query, dataset, cxt); + if ( qeFactory == null ) { + Log.warn(QueryExecDatasetBuilderImpl.class, "Failed to find a QueryEngineFactory"); + return null; + } + + // Initial bindings / parameterized query + Query queryActual = query; + String queryStringActual = queryString; + + if ( substitutionMap != null && ! substitutionMap.isEmpty() ) { + queryActual = QueryTransformOps.replaceVars(query, substitutionMap); + queryStringActual = null; + } + + Timeouts.applyDefaultQueryTimeoutFromContext(this.timeoutBuilder, cxt); + + if ( dataset != null ) + cxt.set(ARQConstants.sysCurrentDataset, DatasetFactory.wrap(dataset)); + if ( queryActual != null ) + cxt.set(ARQConstants.sysCurrentQuery, queryActual); + + Timeout timeout = timeoutBuilder.build(); + + QueryExec qExec = new QueryExecDataset(queryActual, queryStringActual, dataset, cxt, qeFactory, + timeout, initialBinding); + + for (QueryExecTransform queryExecTransform : queryExecTransforms) { + qExec = queryExecTransform.transform(qExec); + } + + return qExec; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecModWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecModWrapper.java new file mode 100644 index 00000000000..acfc2a127c7 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecModWrapper.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.concurrent.TimeUnit; + +import org.apache.jena.sparql.util.Context; + +public class QueryExecModWrapper + implements QueryExecMod +{ + protected T delegate; + + public QueryExecModWrapper(T delegate) { + super(); + this.delegate = delegate; + } + + protected T getDelegate() { + return delegate; + } + + @SuppressWarnings("unchecked") + public X self() { + return (X)this; + } + + @Override + public X timeout(long timeout) { + getDelegate().timeout(timeout, TimeUnit.MILLISECONDS); + return self(); + } + + @Override + public X timeout(long timeout, TimeUnit timeoutUnits) { + getDelegate().timeout(timeout, TimeUnit.MILLISECONDS); + return self(); + } + + @Override + public X initialTimeout(long timeout, TimeUnit timeUnit) { + getDelegate().initialTimeout(timeout, timeUnit); + return self(); + } + + @Override + public X overallTimeout(long timeout, TimeUnit timeUnit) { + getDelegate().overallTimeout(timeout, timeUnit); + return self(); + } + + @Override + public Context getContext() { + return getDelegate().getContext(); + } + + @Override + public QueryExec build() { + return getDelegate().build(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecWrapper.java new file mode 100644 index 00000000000..725c6fb479f --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/QueryExecWrapper.java @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.Iterator; +import java.util.function.Supplier; + +import org.apache.jena.atlas.json.JsonArray; +import org.apache.jena.atlas.json.JsonObject; +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Triple; +import org.apache.jena.query.Query; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.Quad; +import org.apache.jena.sparql.util.Context; + +public abstract class QueryExecWrapper + implements QueryExec +{ + private X delegate; + + public QueryExecWrapper(X delegate) { + super(); + this.delegate = delegate; + } + + protected X getDelegate() { + return delegate; + } + + @Override + public Context getContext() { + return getDelegate().getContext(); + } + + @Override + public Query getQuery() { + return getDelegate().getQuery(); + } + + @Override + public String getQueryString() { + return getDelegate().getQueryString(); + } + + @Override + public void close() { + getDelegate().close(); + } + + @Override + public boolean isClosed() { + return getDelegate().isClosed(); + } + + @Override + public void abort() { + getDelegate().abort(); + } + + @Override + public RowSet select() { + return exec(() -> getDelegate().select()); + } + + @Override + public Graph construct() { + return exec(() -> getDelegate().construct()); + } + + @Override + public Graph construct(Graph graph) { + return exec(() -> getDelegate().construct(graph)); + } + + @Override + public Graph describe() { + return exec(() -> getDelegate().describe()); + } + + @Override + public Graph describe(Graph graph) { + return exec(() -> getDelegate().describe(graph)); + } + + @Override + public boolean ask() { + return exec(() -> getDelegate().ask()); + } + + @Override + public Iterator constructTriples() { + return exec(() -> getDelegate().constructTriples()); + } + + @Override + public Iterator describeTriples() { + return exec(() -> getDelegate().describeTriples()); + } + + @Override + public Iterator constructQuads() { + return exec(() -> getDelegate().constructQuads()); + } + + @Override + public DatasetGraph constructDataset() { + return exec(() -> getDelegate().constructDataset()); + } + + @Override + public DatasetGraph constructDataset(DatasetGraph dataset) { + return exec(() -> getDelegate().constructDataset(dataset)); + } + + @Override + public JsonArray execJson() { + return exec(() -> getDelegate().execJson()); + } + + @Override + public Iterator execJsonItems() { + return exec(() -> getDelegate().execJsonItems()); + } + + @Override + public DatasetGraph getDataset() { + return getDelegate().getDataset(); + } + + protected T exec(Supplier supplier) { + T result = supplier.get(); + return result; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExec.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExec.java index 1ef542bce01..fbbbca45af6 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExec.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExec.java @@ -19,6 +19,7 @@ package org.apache.jena.sparql.exec; import org.apache.jena.graph.Graph; +import org.apache.jena.sparql.adapter.SparqlAdapterRegistry; import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.DatasetGraphFactory; import org.apache.jena.sparql.exec.http.UpdateExecHTTP; @@ -28,7 +29,7 @@ public interface UpdateExec extends UpdateProcessor { /** Create a {@link UpdateExecBuilder} for a dataset. */ public static UpdateExecBuilder dataset(DatasetGraph dataset) { - return UpdateExecDatasetBuilder.create().dataset(dataset); + return SparqlAdapterRegistry.adapt(dataset).newUpdate(); } /** @@ -37,7 +38,7 @@ public static UpdateExecBuilder dataset(DatasetGraph dataset) { */ public static UpdateExecBuilder dataset(Graph graph) { DatasetGraph dsg = DatasetGraphFactory.wrap(graph); - return UpdateExecDatasetBuilder.create().dataset(dsg); + return dataset(dsg); } /** Create a {@link UpdateExecBuilder} for a remote endpoint. */ @@ -45,11 +46,7 @@ public static UpdateExecBuilder service(String serviceURL) { return UpdateExecHTTP.newBuilder().endpoint(serviceURL); } - public static UpdateExecDatasetBuilder newBuilder() { - return UpdateExecDatasetBuilder.create(); + public static UpdateExecDatasetBuilderDeferred newBuilder() { + return new UpdateExecDatasetBuilderDeferred(); } - - /** Execute */ - @Override - public void execute(); } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecAdapter.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecAdapter.java index b97c2af0420..45968427517 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecAdapter.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecAdapter.java @@ -20,6 +20,7 @@ import org.apache.jena.sparql.util.Context; import org.apache.jena.update.UpdateExecution; +import org.apache.jena.update.UpdateRequest; public class UpdateExecAdapter implements UpdateExec { @@ -38,6 +39,16 @@ protected UpdateExecAdapter(UpdateExecution updateProc) { this.updateProc = updateProc; } + @Override + public UpdateRequest getUpdateRequest() { + return updateProc.getUpdateRequest(); + } + + @Override + public String getUpdateRequestString() { + return updateProc.getUpdateRequestString(); + } + @Override public void execute() { updateProc.execute(); } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilder.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilder.java index 02f56413c90..e7aa0c6342e 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilder.java @@ -24,6 +24,7 @@ import org.apache.jena.query.ARQ; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; import org.apache.jena.sparql.util.Context; import org.apache.jena.sparql.util.Symbol; import org.apache.jena.update.Update; @@ -67,6 +68,9 @@ public default UpdateExecBuilder substitution(String var, Node value) { return substitution(Var.alloc(var), value); } + /** Add a transform that gets applied when building the UpdateExec instance. */ + public UpdateExecBuilder transformExec(UpdateExecTransform updateExecTransform); + public UpdateExecBuilder timeout(long value, TimeUnit timeUnit); public UpdateExec build(); diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilderAdapter.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilderAdapter.java index 461373bb0e7..a23646d89af 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilderAdapter.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilderAdapter.java @@ -18,6 +18,8 @@ package org.apache.jena.sparql.exec; +import java.util.ArrayList; +import java.util.List; import java.util.Objects; import java.util.concurrent.TimeUnit; @@ -25,6 +27,7 @@ import org.apache.jena.sparql.core.ResultBinding; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; import org.apache.jena.sparql.util.Context; import org.apache.jena.sparql.util.ModelUtils; import org.apache.jena.sparql.util.Symbol; @@ -38,6 +41,7 @@ public class UpdateExecBuilderAdapter implements UpdateExecBuilder { protected UpdateExecutionBuilder builder; + protected List updateExecTransforms = new ArrayList<>(); protected UpdateExecBuilderAdapter(UpdateExecutionBuilder delegate) { super(); @@ -125,10 +129,20 @@ public UpdateExecBuilder timeout(long timeout, TimeUnit timeoutUnit) { return this; } + @Override + public UpdateExecBuilder transformExec(UpdateExecTransform updateExecTransform) { + Objects.requireNonNull(updateExecTransform); + updateExecTransforms.add(updateExecTransform); + return this; + } + @Override public UpdateExec build() { UpdateExecution updateExec = builder.build(); UpdateExec result = UpdateExecAdapter.adapt(updateExec); + for (UpdateExecTransform updateExecTransform : updateExecTransforms) { + result = updateExecTransform.transform(result); + } return result; } } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilderWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilderWrapper.java new file mode 100644 index 00000000000..8b613f32218 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecBuilderWrapper.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.concurrent.TimeUnit; + +import org.apache.jena.graph.Node; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.Symbol; +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateRequest; + +public class UpdateExecBuilderWrapper + implements UpdateExecBuilder +{ + private T delegate; + + public UpdateExecBuilderWrapper(T delegate) { + super(); + this.delegate = delegate; + } + + protected T getDelegate() { + return delegate; + } + + @SuppressWarnings("unchecked") + protected X self() { + return (X)this; + } + + @Override + public X update(UpdateRequest request) { + getDelegate().update(request); + return self(); + } + + @Override + public X update(Update update) { + getDelegate().update(update); + return self(); + } + + @Override + public X update(String updateString) { + getDelegate().update(updateString); + return self(); + } + + @Override + public X parseCheck(boolean parseCheck) { + getDelegate().parseCheck(parseCheck); + return self(); + } + + @Override + public X set(Symbol symbol, Object value) { + getDelegate().set(symbol, value); + return self(); + } + + @Override + public X set(Symbol symbol, boolean value) { + getDelegate().set(symbol, value); + return self(); + } + + @Override + public X context(Context context) { + getDelegate().context(context); + return self(); + } + + @Override + public X substitution(Binding binding) { + getDelegate().substitution(binding); + return self(); + } + + @Override + public X substitution(Var var, Node value) { + getDelegate().substitution(var, value); + return self(); + } + + @Override + public X substitution(String var, Node value) { + getDelegate().substitution(var, value); + return self(); + } + + @Override + public X timeout(long value, TimeUnit timeUnit) { + getDelegate().timeout(value, timeUnit); + return self(); + } + + @Override + public UpdateExecBuilder transformExec(UpdateExecTransform updateExecTransform) { + getDelegate().transformExec(updateExecTransform); + return self(); + } + + @Override + public UpdateExec build() { + UpdateExec result = getDelegate().build(); + return result; + } + + @Override + public void execute() { + getDelegate().execute(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilder.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilder.java index e1601177833..83a7eb4313a 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilder.java @@ -18,181 +18,43 @@ package org.apache.jena.sparql.exec; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; import java.util.concurrent.TimeUnit; import org.apache.jena.graph.Node; -import org.apache.jena.query.ARQ; import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.binding.Binding; -import org.apache.jena.sparql.engine.Timeouts.Timeout; -import org.apache.jena.sparql.engine.Timeouts.TimeoutBuilderImpl; -import org.apache.jena.sparql.modify.UpdateEngineFactory; -import org.apache.jena.sparql.modify.UpdateEngineRegistry; -import org.apache.jena.sparql.syntax.syntaxtransform.UpdateTransformOps; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; import org.apache.jena.sparql.util.Context; -import org.apache.jena.sparql.util.ContextAccumulator; import org.apache.jena.sparql.util.Symbol; import org.apache.jena.update.Update; -import org.apache.jena.update.UpdateException; -import org.apache.jena.update.UpdateFactory; import org.apache.jena.update.UpdateRequest; -public class UpdateExecDatasetBuilder implements UpdateExecBuilder { - - public static UpdateExecDatasetBuilder create() { return new UpdateExecDatasetBuilder(); } - - private DatasetGraph dataset = null; - private ContextAccumulator contextAcc = ContextAccumulator.newBuilder(()->ARQ.getContext(), ()->Context.fromDataset(dataset)); - - // Uses query rewrite to replace variables by values. - private Map substitutionMap = null; - - private Binding initialBinding = null; - - private TimeoutBuilderImpl timeoutBuilder = new TimeoutBuilderImpl(); - - private UpdateRequest update = null; - private UpdateRequest updateRequest = new UpdateRequest(); - - private UpdateExecDatasetBuilder() {} - - /** Append the updates in an {@link UpdateRequest} to the {@link UpdateRequest} being built. */ - @Override - public UpdateExecDatasetBuilder update(UpdateRequest updateRequest) { - Objects.requireNonNull(updateRequest); - add(updateRequest); - return this; - } - - /** Add the {@link Update} to the {@link UpdateRequest} being built. */ - @Override - public UpdateExecDatasetBuilder update(Update update) { - Objects.requireNonNull(update); - add(update); - return this; - } - - /** Parse and update operations to the {@link UpdateRequest} being built. */ - @Override - public UpdateExecDatasetBuilder update(String updateRequestString) { - UpdateRequest more = UpdateFactory.create(updateRequestString); - add(more); - return this; - } - - /** Hint has no effect on update execs over datasets. */ - @Override - public UpdateExecBuilder parseCheck(boolean parseCheck) { - return this; - } - - public UpdateExecDatasetBuilder dataset(DatasetGraph dsg) { - this.dataset = dsg; - return this; - } - - /** Set the {@link Context}. - * This defaults to the global settings of {@code ARQ.getContext()}. - * If there was a previous call of {@code context} the multiple contexts are merged. - * */ - @Override - public UpdateExecDatasetBuilder context(Context context) { - if ( context == null ) - return this; - this.contextAcc.context(context); - return this; - } - - @Override - public UpdateExecDatasetBuilder set(Symbol symbol, Object value) { - this.contextAcc.set(symbol, value); - return this; - } - - @Override - public UpdateExecDatasetBuilder set(Symbol symbol, boolean value) { - this.contextAcc.set(symbol, value); - return this; - } - - public Context getContext() { - return contextAcc.context(); - } - - @Override - public UpdateExecDatasetBuilder substitution(Binding binding) { - ensureSubstitutionMap(); - binding.forEach(this.substitutionMap::put); - return this; - } - - @Override - public UpdateExecDatasetBuilder substitution(Var var, Node value) { - ensureSubstitutionMap(); - this.substitutionMap.put(var, value); - return this; - } - - private void ensureSubstitutionMap() { - if ( substitutionMap == null ) - substitutionMap = new HashMap<>(); - } - - @Override - public UpdateExecDatasetBuilder timeout(long timeout, TimeUnit timeoutUnit) { - this.timeoutBuilder.timeout(timeout, timeoutUnit); - return this; - } +public interface UpdateExecDatasetBuilder + extends UpdateExecBuilder +{ + public static UpdateExecDatasetBuilder create() { return new UpdateExecDatasetBuilderDeferred(); } /** @deprecated Use {@link #substitution(Binding)} */ @Deprecated(forRemoval = true) - public UpdateExecDatasetBuilder initialBinding(Binding initialBinding) { - this.initialBinding = initialBinding; - return this; - } - - @Override - public UpdateExec build() { - Objects.requireNonNull(dataset, "No dataset for update"); - Objects.requireNonNull(updateRequest, "No update request"); - - UpdateRequest actualUpdate = updateRequest; - - if ( substitutionMap != null && ! substitutionMap.isEmpty() ) - actualUpdate = UpdateTransformOps.transform(actualUpdate, substitutionMap); - - Context cxt = getContext(); - UpdateEngineFactory f = UpdateEngineRegistry.get().find(dataset, cxt); - if ( f == null ) - throw new UpdateException("Failed to find an UpdateEngine"); - - Timeout timeout = timeoutBuilder.build(); - - UpdateExec uExec = new UpdateExecDataset(actualUpdate, dataset, initialBinding, cxt, f, timeout); - return uExec; - } - - // Abbreviated forms - - @Override - public void execute() { - build().execute(); - } - - public void execute(DatasetGraph dsg) { - dataset(dsg); - execute(); - } - - private void add(UpdateRequest request) { - request.getOperations().forEach(this::add); - } - - private void add(Update update) { - this.updateRequest.add(update); - } + public UpdateExecDatasetBuilder initialBinding(Binding initialBinding); + + public UpdateExecDatasetBuilder dataset(DatasetGraph dataset); + + @Override public UpdateExecDatasetBuilder update(UpdateRequest request); + @Override public UpdateExecDatasetBuilder update(Update update); + @Override public UpdateExecDatasetBuilder update(String updateString); + @Override public UpdateExecDatasetBuilder parseCheck(boolean parseCheck); + @Override public UpdateExecDatasetBuilder set(Symbol symbol, Object value); + @Override public UpdateExecDatasetBuilder set(Symbol symbol, boolean value); + @Override public UpdateExecDatasetBuilder context(Context context); + @Override public UpdateExecDatasetBuilder substitution(Binding binding); + @Override public UpdateExecDatasetBuilder substitution(Var var, Node value); + @Override public default UpdateExecBuilder substitution(String var, Node value) { + return substitution(Var.alloc(var), value); + } + + /** Add a transform that gets applied when building the UpdateExec instance. */ + @Override public UpdateExecDatasetBuilder transformExec(UpdateExecTransform updateExecTransform); + @Override public UpdateExecDatasetBuilder timeout(long value, TimeUnit timeUnit); } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderBase.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderBase.java new file mode 100644 index 00000000000..26fc48a8206 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderBase.java @@ -0,0 +1,187 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import org.apache.jena.graph.Node; +import org.apache.jena.http.sys.UpdateEltAcc; +import org.apache.jena.query.ARQ; +import org.apache.jena.sparql.adapter.ParseCheckUtils; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.Timeouts.TimeoutBuilderImpl; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.ContextAccumulator; +import org.apache.jena.sparql.util.Symbol; +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateFactory; +import org.apache.jena.update.UpdateRequest; + +public abstract class UpdateExecDatasetBuilderBase> + implements UpdateExecDatasetBuilder +{ + + protected DatasetGraph dataset = null; + protected ContextAccumulator contextAcc = ContextAccumulator.newBuilder(()->ARQ.getContext(), ()->Context.fromDataset(dataset)); + + // Uses query rewrite to replace variables by values. + protected Map substitutionMap = null; + + protected Binding initialBinding = null; + + protected TimeoutBuilderImpl timeoutBuilder = new TimeoutBuilderImpl(); + + protected Boolean parseCheck = null; + protected UpdateEltAcc updateEltAcc = new UpdateEltAcc(); + + protected List updateExecTransforms = new ArrayList<>(); + + // private UpdateExecBuilder() {} + + @SuppressWarnings("unchecked") + private X thisBuilder() { + return (X)this; + } + + /** Append the updates in an {@link UpdateRequest} to the {@link UpdateRequest} being built. */ + @Override + public X update(UpdateRequest updateRequest) { + Objects.requireNonNull(updateRequest); + add(updateRequest); + return thisBuilder(); + } + + /** Add the {@link Update} to the {@link UpdateRequest} being built. */ + @Override + public X update(Update update) { + Objects.requireNonNull(update); + add(update); + return thisBuilder(); + } + + /** Parse and update operations to the {@link UpdateRequest} being built. */ + @Override + public X update(String updateRequestString) { + if (effectiveParseCheck()) { + UpdateRequest more = UpdateFactory.create(updateRequestString); + add(more); + } else { + updateEltAcc.add(updateRequestString); + } + return thisBuilder(); + } + + @Override + public X parseCheck(boolean parseCheck) { + this.parseCheck = parseCheck; + return thisBuilder(); + } + + protected boolean effectiveParseCheck() { + return ParseCheckUtils.effectiveParseCheck(parseCheck, contextAcc); + } + + public X dataset(DatasetGraph dsg) { + this.dataset = dsg; + return thisBuilder(); + } + + /** Set the {@link Context}. + * This defaults to the global settings of {@code ARQ.getContext()}. + * If there was a previous call of {@code context} the multiple contexts are merged. + * */ + @Override + public X context(Context context) { + if ( context == null ) + return thisBuilder(); + this.contextAcc.context(context); + return thisBuilder(); + } + + @Override + public X set(Symbol symbol, Object value) { + this.contextAcc.set(symbol, value); + return thisBuilder(); + } + + @Override + public X set(Symbol symbol, boolean value) { + this.contextAcc.set(symbol, value); + return thisBuilder(); + } + + public Context getContext() { + return contextAcc.context(); + } + + @Override + public X substitution(Binding binding) { + ensureSubstitutionMap(); + binding.forEach(this.substitutionMap::put); + return thisBuilder(); + } + + @Override + public X substitution(Var var, Node value) { + ensureSubstitutionMap(); + this.substitutionMap.put(var, value); + return thisBuilder(); + } + + private void ensureSubstitutionMap() { + if ( substitutionMap == null ) + substitutionMap = new HashMap<>(); + } + + @Override + public X timeout(long timeout, TimeUnit timeoutUnit) { + this.timeoutBuilder.timeout(timeout, timeoutUnit); + return thisBuilder(); + } + + /** @deprecated Use {@link #substitution(Binding)} */ + @Deprecated(forRemoval = true) + public X initialBinding(Binding initialBinding) { + this.initialBinding = initialBinding; + return thisBuilder(); + } + + @Override + public X transformExec(UpdateExecTransform updateExecTransform) { + updateExecTransforms.add(updateExecTransform); + return thisBuilder(); + } + + private void add(UpdateRequest request) { + updateEltAcc.add(request); + } + + private void add(Update update) { + updateEltAcc.add(update); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderDeferred.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderDeferred.java new file mode 100644 index 00000000000..4475736ace9 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderDeferred.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import org.apache.jena.sparql.adapter.SparqlAdapter; +import org.apache.jena.sparql.adapter.SparqlAdapterRegistry; + +/** + * QueryExecBuilder that chooses the actual builder only when build is called. + */ +public class UpdateExecDatasetBuilderDeferred + extends UpdateExecDatasetBuilderDeferredBase +{ + public static UpdateExecDatasetBuilder create() { return new UpdateExecDatasetBuilderDeferred(); } + + @Override + protected UpdateExecBuilder newActualExecBuilder() { + SparqlAdapter adapter = SparqlAdapterRegistry.adapt(dataset); + return adapter.newUpdate(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderDeferredBase.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderDeferredBase.java new file mode 100644 index 00000000000..8c1ade62851 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderDeferredBase.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.Map.Entry; + +import org.apache.jena.graph.Node; +import org.apache.jena.http.sys.UpdateElt; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; +import org.apache.jena.sparql.util.Context; + +/** + * QueryExecBuilder that chooses the actual builder only when build is called. + */ +public abstract class UpdateExecDatasetBuilderDeferredBase> + extends UpdateExecDatasetBuilderBase +{ + // Abbreviated forms + + @Override + public void execute() { + build().execute(); + } + + public void execute(DatasetGraph dsg) { + dataset(dsg); + execute(); + } + + protected abstract UpdateExecBuilder newActualExecBuilder(); + + @Override + public UpdateExec build() { + UpdateExecBuilder ueb = newActualExecBuilder(); + ueb = applySettings(ueb); + UpdateExec ue = ueb.build(); + return ue; + } + + /** Transfer settings from this builder to to the destination. */ + protected UpdateExecBuilder applySettings(UpdateExecBuilder dest) { + if (parseCheck != null) { + dest = dest.parseCheck(parseCheck); + } + + for (UpdateElt updateElt : updateEltAcc) { + if (updateElt.isParsed()) { + dest.update(updateElt.update()); + } else { + dest.update(updateElt.updateString()); + } + } + + // Transfer the built context. + Context cxt = contextAcc.context(); + dest = dest.context(cxt); + + if (substitutionMap != null) { + for (Entry e : substitutionMap.entrySet()) { + dest = dest.substitution(e.getKey(), e.getValue()); + } + } + + if (initialBinding != null) { + if (dest instanceof UpdateExecDatasetBuilder dsBuilder) { + dsBuilder.initialBinding(initialBinding); + } else { + throw new UnsupportedOperationException("The target builder does not support initial bindings."); + } + } + + Timeout timeout = timeoutBuilder.build(); + if (timeout.hasOverallTimeout()) { + dest = dest.timeout(timeout.overallTimeout().amount(), timeout.overallTimeout().unit()); + } + + for (UpdateExecTransform execTransform : updateExecTransforms) { + dest = dest.transformExec(execTransform); + } + + return dest; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderImpl.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderImpl.java new file mode 100644 index 00000000000..c1fea9c414c --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecDatasetBuilderImpl.java @@ -0,0 +1,215 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +import org.apache.jena.graph.Node; +import org.apache.jena.query.ARQ; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; +import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.engine.Timeouts.TimeoutBuilderImpl; +import org.apache.jena.sparql.modify.UpdateEngineFactory; +import org.apache.jena.sparql.modify.UpdateEngineRegistry; +import org.apache.jena.sparql.syntax.syntaxtransform.UpdateTransformOps; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.ContextAccumulator; +import org.apache.jena.sparql.util.Symbol; +import org.apache.jena.update.Update; +import org.apache.jena.update.UpdateException; +import org.apache.jena.update.UpdateFactory; +import org.apache.jena.update.UpdateRequest; + +public class UpdateExecDatasetBuilderImpl implements UpdateExecDatasetBuilder { + + public static UpdateExecDatasetBuilderImpl create() { return new UpdateExecDatasetBuilderImpl(); } + + private DatasetGraph dataset = null; + private ContextAccumulator contextAcc = ContextAccumulator.newBuilder(()->ARQ.getContext(), ()->Context.fromDataset(dataset)); + + // Uses query rewrite to replace variables by values. + private Map substitutionMap = null; + + private Binding initialBinding = null; + + private TimeoutBuilderImpl timeoutBuilder = new TimeoutBuilderImpl(); + + private UpdateRequest update = null; + private UpdateRequest updateRequest = new UpdateRequest(); + + private List updateExecTransforms = new ArrayList<>(); + + private UpdateExecDatasetBuilderImpl() {} + + /** Append the updates in an {@link UpdateRequest} to the {@link UpdateRequest} being built. */ + @Override + public UpdateExecDatasetBuilderImpl update(UpdateRequest updateRequest) { + Objects.requireNonNull(updateRequest); + add(updateRequest); + return this; + } + + /** Add the {@link Update} to the {@link UpdateRequest} being built. */ + @Override + public UpdateExecDatasetBuilderImpl update(Update update) { + Objects.requireNonNull(update); + add(update); + return this; + } + + /** Parse and update operations to the {@link UpdateRequest} being built. */ + @Override + public UpdateExecDatasetBuilderImpl update(String updateRequestString) { + UpdateRequest more = UpdateFactory.create(updateRequestString); + add(more); + return this; + } + + /** Hint has no effect on update execs over datasets. */ + @Override + public UpdateExecDatasetBuilderImpl parseCheck(boolean parseCheck) { + return this; + } + + public UpdateExecDatasetBuilderImpl dataset(DatasetGraph dsg) { + this.dataset = dsg; + return this; + } + + /** Set the {@link Context}. + * This defaults to the global settings of {@code ARQ.getContext()}. + * If there was a previous call of {@code context} the multiple contexts are merged. + * */ + @Override + public UpdateExecDatasetBuilderImpl context(Context context) { + if ( context == null ) + return this; + this.contextAcc.context(context); + return this; + } + + @Override + public UpdateExecDatasetBuilderImpl set(Symbol symbol, Object value) { + this.contextAcc.set(symbol, value); + return this; + } + + @Override + public UpdateExecDatasetBuilderImpl set(Symbol symbol, boolean value) { + this.contextAcc.set(symbol, value); + return this; + } + + public Context getContext() { + return contextAcc.context(); + } + + @Override + public UpdateExecDatasetBuilderImpl substitution(Binding binding) { + ensureSubstitutionMap(); + binding.forEach(this.substitutionMap::put); + return this; + } + + @Override + public UpdateExecDatasetBuilderImpl substitution(Var var, Node value) { + ensureSubstitutionMap(); + this.substitutionMap.put(var, value); + return this; + } + + private void ensureSubstitutionMap() { + if ( substitutionMap == null ) + substitutionMap = new HashMap<>(); + } + + @Override + public UpdateExecDatasetBuilderImpl timeout(long timeout, TimeUnit timeoutUnit) { + this.timeoutBuilder.timeout(timeout, timeoutUnit); + return this; + } + + /** @deprecated Use {@link #substitution(Binding)} */ + @Deprecated(forRemoval = true) + public UpdateExecDatasetBuilderImpl initialBinding(Binding initialBinding) { + this.initialBinding = initialBinding; + return this; + } + + @Override + public UpdateExecDatasetBuilderImpl transformExec(UpdateExecTransform updateExecTransform) { + Objects.requireNonNull(updateExecTransform); + updateExecTransforms.add(updateExecTransform); + return this; + } + + @Override + public UpdateExec build() { + Objects.requireNonNull(dataset, "No dataset for update"); + Objects.requireNonNull(updateRequest, "No update request"); + + UpdateRequest actualUpdate = updateRequest; + + if ( substitutionMap != null && ! substitutionMap.isEmpty() ) + actualUpdate = UpdateTransformOps.transform(actualUpdate, substitutionMap); + + Context cxt = getContext(); + UpdateEngineFactory f = UpdateEngineRegistry.get().find(dataset, cxt); + if ( f == null ) + throw new UpdateException("Failed to find an UpdateEngine"); + + Timeout timeout = timeoutBuilder.build(); + + UpdateExec uExec = new UpdateExecDataset(actualUpdate, dataset, initialBinding, cxt, f, timeout); + + for (UpdateExecTransform updateExecTransform : updateExecTransforms) { + uExec = updateExecTransform.transform(uExec); + } + + return uExec; + } + + // Abbreviated forms + + @Override + public void execute() { + build().execute(); + } + + public void execute(DatasetGraph dsg) { + dataset(dsg); + execute(); + } + + private void add(UpdateRequest request) { + request.getOperations().forEach(this::add); + } + + private void add(Update update) { + this.updateRequest.add(update); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecWrapper.java new file mode 100644 index 00000000000..274447df082 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/UpdateExecWrapper.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec; + +import org.apache.jena.sparql.exec.tracker.UpdateProcessorWrapper; + +public class UpdateExecWrapper + extends UpdateProcessorWrapper + implements UpdateExec +{ + public UpdateExecWrapper(T delegate) { + super(delegate); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTP.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTP.java index 9e4582ad466..92755966309 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTP.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTP.java @@ -18,61 +18,9 @@ package org.apache.jena.sparql.exec.http; -import static org.apache.jena.http.HttpLib.*; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpRequest.BodyPublishers; -import java.net.http.HttpResponse; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.util.*; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.io.input.ClosedInputStream; -import org.apache.commons.io.input.ProxyInputStream; -import org.apache.jena.atlas.RuntimeIOException; -import org.apache.jena.atlas.io.IO; -import org.apache.jena.atlas.iterator.Iter; -import org.apache.jena.atlas.json.JSON; -import org.apache.jena.atlas.json.JsonArray; -import org.apache.jena.atlas.json.JsonObject; -import org.apache.jena.atlas.lib.InternalErrorException; -import org.apache.jena.atlas.lib.Pair; -import org.apache.jena.atlas.logging.Log; -import org.apache.jena.atlas.web.HttpException; -import org.apache.jena.atlas.web.MediaType; -import org.apache.jena.graph.Graph; -import org.apache.jena.graph.Triple; -import org.apache.jena.http.AsyncHttpRDF; -import org.apache.jena.http.HttpEnv; -import org.apache.jena.http.HttpLib; -import org.apache.jena.query.*; -import org.apache.jena.riot.*; -import org.apache.jena.riot.resultset.ResultSetLang; -import org.apache.jena.riot.resultset.ResultSetReaderRegistry; -import org.apache.jena.riot.web.HttpNames; -import org.apache.jena.sparql.ARQException; -import org.apache.jena.sparql.core.DatasetGraph; -import org.apache.jena.sparql.core.DatasetGraphFactory; -import org.apache.jena.sparql.core.Quad; -import org.apache.jena.sparql.engine.http.HttpParams; -import org.apache.jena.sparql.engine.http.QueryExceptionHTTP; import org.apache.jena.sparql.exec.QueryExec; -import org.apache.jena.sparql.exec.RowSet; -import org.apache.jena.sparql.util.Context; -import org.apache.jena.web.HttpSC; -/** - * A {@link QueryExec} implementation where queries are executed against a remote - * service over HTTP. - */ -public class QueryExecHTTP implements QueryExec { +public interface QueryExecHTTP extends QueryExec { public static QueryExecHTTPBuilder newBuilder() { return QueryExecHTTPBuilder.create(); } @@ -80,715 +28,17 @@ public static QueryExecHTTPBuilder service(String serviceURL) { return QueryExecHTTP.newBuilder().endpoint(serviceURL); } - // Blazegraph has a bug : it impacts wikidata. - // Unless the charset is set, wikidata interprets a POST as ISO-8859-??? (c.f. POST as form). - // https://github.com/blazegraph/database/issues/224 - // Only applies to SendMode.asPost of a SPARQL query. - public static final String QUERY_MIME_TYPE = WebContent.contentTypeSPARQLQuery+";charset="+WebContent.charsetUTF8; - private final Query query; - private final String queryString; - private final String service; - private final Context context; - - // Params - private Params params = null; - - private final QuerySendMode sendMode; - private int urlLimit = HttpEnv.urlLimit; - - // Protocol - private List defaultGraphURIs = new ArrayList<>(); - private List namedGraphURIs = new ArrayList<>(); - - private boolean closed = false; - - // Timeout of query execution. - private long readTimeout = -1; - private TimeUnit readTimeoutUnit = TimeUnit.MILLISECONDS; - - private final String selectAcceptHeader; - private final String askAcceptHeader; - private final String graphAcceptHeader; - private final String datasetAcceptHeader; + String getAcceptHeaderSelect(); - // If this is non-null, it overrides the use of any Content-Type above. - @Deprecated(forRemoval = true) // Deprecated in favor of setting the other header fields. - private String overrideAcceptHeader = null; + String getAcceptHeaderAsk(); - // Received content type - private String httpResponseContentType = null; + String getAcceptHeaderDescribe(); - private HttpClient httpClient = HttpEnv.getDftHttpClient(); - private Map httpHeaders; + String getAcceptHeaderConstructGraph(); - // ----- Cancellation ----- - - private volatile boolean isAborted = false; - private final Object abortLock = new Object(); - private volatile CompletableFuture> future = null; - - // Releasing HTTP input streams is important. We remember this for SELECT result - // set streaming, and will close it when the execution is closed - // This is the physical InputStream of the HTTP request which will only be closed by close(). - private InputStream retainedConnection = null; - - // This is a wrapped view of retainedConnection that will be closed by abort(). - private volatile InputStream retainedConnectionView = null; - - // Whether abort cancels an async HTTP request's future immediately. - private boolean cancelFutureOnAbort = true; - - /** - * This constructor is superseded by the other one which has more parameters. - * The recommended way to create instances of this class is via {@link QueryExecHTTPBuilder}. - */ - @Deprecated(forRemoval = true) - public QueryExecHTTP(String serviceURL, Query query, String queryString, int urlLimit, - HttpClient httpClient, Map httpHeaders, Params params, Context context, - List defaultGraphURIs, List namedGraphURIs, - QuerySendMode sendMode, String overrideAcceptHeader, - long timeout, TimeUnit timeoutUnit) { - // Content Types: these list the standard formats and also include */* - this(serviceURL, query, queryString, urlLimit, - httpClient, httpHeaders, params, context, - defaultGraphURIs, namedGraphURIs, - sendMode, - dft(overrideAcceptHeader, WebContent.defaultSparqlResultsHeader), - dft(overrideAcceptHeader, WebContent.defaultSparqlAskHeader), - dft(overrideAcceptHeader, WebContent.defaultGraphAcceptHeader), - dft(overrideAcceptHeader, WebContent.defaultDatasetAcceptHeader), - timeout, timeoutUnit); - - // Handling of legacy overrideAcceptHeader. - this.overrideAcceptHeader = overrideAcceptHeader; - // Important - handled as special case because the defaults vary by query type. - if ( httpHeaders.containsKey(HttpNames.hAccept) ) { - if ( this.overrideAcceptHeader != null ) { - String acceptHeader = httpHeaders.get(HttpNames.hAccept); - this.overrideAcceptHeader = acceptHeader; - } - this.httpHeaders.remove(HttpNames.hAccept); - } - } - - protected QueryExecHTTP(String serviceURL, Query query, String queryString, int urlLimit, - HttpClient httpClient, Map httpHeaders, Params params, Context context, - List defaultGraphURIs, List namedGraphURIs, - QuerySendMode sendMode, - String selectAcceptHeader, String askAcceptHeader, - String graphAcceptHeader, String datasetAcceptHeader, - long timeout, TimeUnit timeoutUnit) { - this.context = ( context == null ) ? ARQ.getContext().copy() : context.copy(); - this.service = serviceURL; - this.query = query; - this.queryString = queryString; - this.urlLimit = urlLimit; - this.httpHeaders = httpHeaders; - this.defaultGraphURIs = defaultGraphURIs; - this.namedGraphURIs = namedGraphURIs; - this.sendMode = Objects.requireNonNull(sendMode); - this.selectAcceptHeader = selectAcceptHeader; - this.askAcceptHeader = askAcceptHeader; - this.graphAcceptHeader = graphAcceptHeader; - this.datasetAcceptHeader = datasetAcceptHeader; - this.httpHeaders = httpHeaders; - this.params = params; - this.readTimeout = timeout; - this.readTimeoutUnit = timeoutUnit; - this.httpClient = HttpLib.dft(httpClient, HttpEnv.getDftHttpClient()); - } - - public String getAcceptHeaderSelect() { - return selectAcceptHeader; - } - - public String getAcceptHeaderAsk() { - return askAcceptHeader; - } - - public String getAcceptHeaderDescribe() { - return graphAcceptHeader; - } - - public String getAcceptHeaderConstructGraph() { - return graphAcceptHeader; - } - - public String getAcceptHeaderConstructDataset() { - return datasetAcceptHeader; - } - - /** Getter for the override accept header. Only used for testing. */ - @Deprecated(forRemoval = true) - public String getAppProvidedAcceptHeader() { - return overrideAcceptHeader; - } + String getAcceptHeaderConstructDataset(); /** The Content-Type response header received (null before the remote operation is attempted). */ - public String getHttpResponseContentType() { - return httpResponseContentType; - } - - @Override - public RowSet select() { - checkNotClosed(); - check(QueryType.SELECT); - RowSet rs = execRowSet(); - return rs; - } - - private RowSet execRowSet() { - HttpRequest request = effectiveHttpRequest(selectAcceptHeader); - HttpResponse response = executeQuery(request); - InputStream in = registerInputStream(response); - // Don't assume the endpoint actually gives back the content type we asked for - String actualContentType = responseHeader(response, HttpNames.hContentType); - - // Remember the response. - httpResponseContentType = actualContentType; - - // More reliable to use the format-defined charsets e.g. JSON -> UTF-8 - actualContentType = removeCharset(actualContentType); - - if (false) { - byte b[] = IO.readWholeFile(in); - String str = new String(b); - System.out.println(str); - in = new ByteArrayInputStream(b); - } - - if (actualContentType == null || actualContentType.equals("")) - actualContentType = WebContent.contentTypeResultsXML; - - // Map to lang, with pragmatic alternatives. - Lang lang = WebContent.contentTypeToLangResultSet(actualContentType); - boolean unknownLang = lang == null; - boolean unsupportedFormat = !unknownLang && !ResultSetReaderRegistry.isRegistered(lang); - if ( unknownLang || unsupportedFormat ) { - String errorTerm = unknownLang ? "recognized" : "supported"; - String errorMsg = String.format("Endpoint returned Content-Type: %s which is not %s for SELECT queries", - actualContentType, errorTerm); - raiseException(errorMsg, request, response, in); - } - - // This returns a streaming result set for some formats. - // Do not close the InputStream at this point. - ResultSet result = ResultSetMgr.read(in, lang); - return RowSet.adapt(result); - } - - @Override - public boolean ask() { - checkNotClosed(); - check(QueryType.ASK); - HttpRequest request = effectiveHttpRequest(askAcceptHeader); - HttpResponse response = executeQuery(request); - InputStream in = registerInputStream(response); - - String actualContentType = responseHeader(response, HttpNames.hContentType); - httpResponseContentType = actualContentType; - actualContentType = removeCharset(actualContentType); - - // If the server fails to return a Content-Type then we will assume - // the server returned the type we asked for - if (actualContentType == null || actualContentType.equals("")) - actualContentType = askAcceptHeader; - - Lang lang = RDFLanguages.contentTypeToLang(actualContentType); - if ( lang == null ) { - // Any specials : - // application/xml for application/sparql-results+xml - // application/json for application/sparql-results+json - if (actualContentType.equals(WebContent.contentTypeXML)) - lang = ResultSetLang.RS_XML; - else if ( actualContentType.equals(WebContent.contentTypeJSON)) - lang = ResultSetLang.RS_JSON; - } - if (lang == null) { - raiseException("Endpoint returned Content-Type: " + actualContentType + " which is not supported for ASK queries", request, response, in); - } - try { - boolean result = ResultSetMgr.readBoolean(in, lang); - return result; - } finally { - finishInputStream(in); - } - } - - private String removeCharset(String contentType) { - if ( contentType == null ) - return contentType; - int idx = contentType.indexOf(';'); - if ( idx < 0 ) - return contentType; - return contentType.substring(0,idx); - } - - @Override - public Graph construct(Graph graph) { - checkNotClosed(); - check(QueryType.CONSTRUCT); - return execGraph(graph, graphAcceptHeader); - } - - @Override - public Iterator constructTriples() { - checkNotClosed(); - check(QueryType.CONSTRUCT); - return execTriples(graphAcceptHeader); - } - - @Override - public Iterator constructQuads(){ - checkNotClosed(); - return execQuads(); - } - - @Override - public DatasetGraph constructDataset(){ - checkNotClosed(); - return constructDataset(DatasetGraphFactory.createTxnMem()); - } - - @Override - public DatasetGraph constructDataset(DatasetGraph dataset){ - checkNotClosed(); - check(QueryType.CONSTRUCT); - return execDataset(dataset); - } - - @Override - public Graph describe(Graph graph) { - checkNotClosed(); - check(QueryType.DESCRIBE); - return execGraph(graph, graphAcceptHeader); - } - - @Override - public Iterator describeTriples() { - checkNotClosed(); - return execTriples(graphAcceptHeader); - } - - private Graph execGraph(Graph graph, String acceptHeader) { - Pair p = execRdfWorker(acceptHeader, WebContent.contentTypeRDFXML); - InputStream in = p.getLeft(); - Lang lang = p.getRight(); - try { - RDFDataMgr.read(graph, in, lang); - } finally { - finishInputStream(in); - } - return graph; - } - - private DatasetGraph execDataset(DatasetGraph dataset) { - Pair p = execRdfWorker(datasetAcceptHeader, WebContent.contentTypeNQuads); - InputStream in = p.getLeft(); - Lang lang = p.getRight(); - try { - RDFDataMgr.read(dataset, in, lang); - } finally { - finishInputStream(in); - } - return dataset; - } - - @SuppressWarnings("removal") - private Iterator execTriples(String acceptHeader) { - Pair p = execRdfWorker(acceptHeader, WebContent.contentTypeRDFXML); - InputStream input = p.getLeft(); - Lang lang = p.getRight(); - // Base URI? - // Unless N-Triples, this creates a thread. - Iterator iter = RDFDataMgr.createIteratorTriples(input, lang, null); - return Iter.onCloseIO(iter, input); - } - - @SuppressWarnings("removal") - private Iterator execQuads() { - checkNotClosed(); - Pair p = execRdfWorker(datasetAcceptHeader, WebContent.contentTypeNQuads); - InputStream input = p.getLeft(); - Lang lang = p.getRight(); - // Unless N-Quads, this creates a thread. - Iterator iter = RDFDataMgr.createIteratorQuads(input, lang, null); - return Iter.onCloseIO(iter, input); - } - - // Any RDF data back (CONSTRUCT, DESCRIBE, QUADS) - // ifNoContentType - some wild guess at the content type. - private Pair execRdfWorker(String contentType, String ifNoContentType) { - checkNotClosed(); - String thisAcceptHeader = contentType; - HttpRequest request = effectiveHttpRequest(thisAcceptHeader); - HttpResponse response = executeQuery(request); - InputStream in = registerInputStream(response); - - // Don't assume the endpoint actually gives back the content type we asked for - String actualContentType = responseHeader(response, HttpNames.hContentType); - httpResponseContentType = actualContentType; - actualContentType = removeCharset(actualContentType); - - // If the server fails to return a Content-Type then we will assume - // the server returned the type we asked for - if (actualContentType == null || actualContentType.equals("")) - actualContentType = ifNoContentType; - - Lang lang = RDFLanguages.contentTypeToLang(actualContentType); - if ( ! RDFLanguages.isQuads(lang) && ! RDFLanguages.isTriples(lang) ) { - raiseException("Endpoint returned Content Type: " - + actualContentType - + " which is not a valid RDF syntax", request, response, in); - } - return Pair.create(in, lang); - } - - @Override - public JsonArray execJson() { - checkNotClosed(); - check(QueryType.CONSTRUCT_JSON); - String thisAcceptHeader = dft(overrideAcceptHeader, WebContent.contentTypeJSON); - HttpRequest request = effectiveHttpRequest(thisAcceptHeader); - HttpResponse response = executeQuery(request); - InputStream in = registerInputStream(response); - try { - return JSON.parseAny(in).getAsArray(); - } finally { finishInputStream(in); } - } - - @Override - public Iterator execJsonItems() { - JsonArray array = execJson().getAsArray(); - List x = new ArrayList<>(array.size()); - array.forEach(elt->{ - if ( ! elt.isObject()) - throw new QueryExecException("Item in an array from a JSON query isn't an object"); - x.add(elt.getAsObject()); - }); - return x.iterator(); - } - - private void check(QueryType queryType) { - if ( query == null ) { - // Pass through the queryString. - return; - } - if ( query.queryType() != queryType ) - throw new QueryExecException("Not the right form of query. Expected "+queryType+" but got "+query.queryType()); - } - - @Override - public Context getContext() { - return context; - } - - @Override - public DatasetGraph getDataset() { - return null; - } - - // This may be null - if we were created form a query string, - // we don't guarantee to parse it so we let through non-SPARQL - // extensions to the far end. - @Override - public Query getQuery() { - if ( query != null ) - return query; - if ( queryString != null ) { - // Object not created with a Query object, may be because there is foreign - // syntax in the query or may be because the query string was available and the app - // didn't want the overhead of parsing it every time. - // Try to parse it else return null; - try { return QueryFactory.create(queryString, Syntax.syntaxARQ); } - catch (QueryParseException ex) {} - return null; - } - return null; - } - - /** - * Return the query string. If this was supplied as a string, - * there is no guarantee this is legal SPARQL syntax. - */ - @Override - public String getQueryString() { - return queryString; - } - - private static long asMillis(long duration, TimeUnit timeUnit) { - return (duration < 0) ? duration : timeUnit.toMillis(duration); - } - - private void raiseException(String errorMsg, HttpRequest request, HttpResponse response, InputStream in) { - int bodySummaryLength = 1024; - int statusCode = response.statusCode(); - String statusCodeMsg = HttpSC.getMessage(statusCode); - - // Determine the charset for extracting an excerpt of the body - String actualContentType = responseHeader(response, HttpNames.hContentType); - MediaType ct = MediaType.create(actualContentType); - String charsetName = ct == null ? null : ct.getCharset(); - Charset charset = null; - try { - charset = charsetName == null ? null : Charset.forName(charsetName); - } catch (Throwable e) { - // Silently ignore - } - if (charset == null) { - charset = StandardCharsets.UTF_8; - } - - String bodyStr; - try { - bodyStr = in == null ? "(no data supplied)" : IO.abbreviate(in, charset, bodySummaryLength, "..."); - } catch (Throwable e) { - // No need to rethrow because we are already about to throw - bodyStr = "(failed to retrieve HTTP body due to: " + e.getMessage() + ")"; - } - - throw new QueryException(String.format( - "%s.\nStatus code %d %s, Method %s, Request Headers: %s\nBody (extracted with charset %s): %s", - errorMsg, statusCode, statusCodeMsg, request.method(), request.headers().map(), charset.name(), bodyStr)); - } - - /** - * Build the effective HTTP request ready for use with {@link #executeQuery(HttpRequest)}. - */ - private HttpRequest effectiveHttpRequest(String reqAcceptHeader) { - if (closed) - throw new ARQException("HTTP execution already closed"); - - // SERVICE specials. - - Params thisParams = Params.create(params); - - if ( defaultGraphURIs != null ) { - for ( String dft : defaultGraphURIs ) - thisParams.add( HttpParams.pDefaultGraph, dft ); - } - if ( namedGraphURIs != null ) { - for ( String name : namedGraphURIs ) - thisParams.add( HttpParams.pNamedGraph, name ); - } - - HttpLib.modifyByService(service, context, thisParams, httpHeaders); - - HttpRequest request = makeRequest(thisParams, reqAcceptHeader); - return request; - } - - private HttpRequest makeRequest(Params thisParams, String reqAcceptHeader) { - QuerySendMode actualSendMode = actualSendMode(); - HttpRequest.Builder requestBuilder; - switch(actualSendMode) { - case asGetAlways : - requestBuilder = executeQueryGet(thisParams, reqAcceptHeader); - break; - case asPostForm : - requestBuilder = executeQueryPostForm(thisParams, reqAcceptHeader); - break; - case asPost : - requestBuilder = executeQueryPostBody(thisParams, reqAcceptHeader); - break; - default : - // Should not happen! - throw new InternalErrorException("Invalid value for 'actualSendMode' "+actualSendMode); - } - return requestBuilder.build(); - } - - /** - * Execute an HttpRequest and wait for the HttpResponse. - * A call to {@link #abort()} interrupts the wait. - * The response is returned after status code processing so the caller can assume the - * query execution was successful and return 200. - * Use {@link HttpLib#getInputStream} to access the body. - */ - private HttpResponse executeQuery(HttpRequest request) { - checkNotClosed(); - - if (future != null) { - throw new IllegalStateException("Execution was already started."); - } - - try { - synchronized (abortLock) { - checkNotAborted(); - logQuery(queryString, request); - future = HttpLib.executeAsync(httpClient, request); - } - - HttpResponse response = AsyncHttpRDF.getOrElseThrow(future, request); - HttpLib.handleHttpStatusCode(response); - return response; - } catch (HttpException httpEx) { - throw QueryExceptionHTTP.rewrap(httpEx); - } - } - - private QuerySendMode actualSendMode() { - int thisLengthLimit = urlLimit; - switch(sendMode) { - case asGetAlways : - case asPostForm : - case asPost : - return sendMode; - case asGetWithLimitBody : - case asGetWithLimitForm : - break; - } - - // Only QuerySendMode.asGetWithLimitBody and QuerySendMode.asGetWithLimitForm here. - String requestURL = service; - // Other params (query= has not been added at this point) - int paramsLength = params.httpString().length(); - int qEncodedLength = calcEncodeStringLength(queryString); - - // URL Length, including service (for safety) - int length = service.length() - + /* ?query= */ 1 + HttpParams.pQuery.length() - + /* encoded query */ qEncodedLength - + /* &other params*/ 1 + paramsLength; - if ( length <= thisLengthLimit ) - return QuerySendMode.asGetAlways; - return (sendMode==QuerySendMode.asGetWithLimitBody) ? QuerySendMode.asPost : QuerySendMode.asPostForm; - } - - private static int calcEncodeStringLength(String str) { - // Could approximate by counting non-queryString character and adding that *2 to the length of the string. - String qs = HttpLib.urlEncodeQueryString(str); - int encodedLength = qs.length(); - return encodedLength; - } - - private HttpRequest.Builder executeQueryGet(Params thisParams, String acceptHeader) { - thisParams.add(HttpParams.pQuery, queryString); - String requestURL = requestURL(service, thisParams.httpString()); - HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, readTimeout, readTimeoutUnit); - acceptHeader(builder, acceptHeader); - return builder.GET(); - } - - private HttpRequest.Builder executeQueryPostForm(Params thisParams, String acceptHeader) { - thisParams.add(HttpParams.pQuery, queryString); - String requestURL = service; - String formBody = thisParams.httpString(); - HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, readTimeout, readTimeoutUnit); - acceptHeader(builder, acceptHeader); - // Use an HTML form. - contentTypeHeader(builder, WebContent.contentTypeHTMLForm); - // Already UTF-8 encoded to ASCII. - return builder.POST(BodyPublishers.ofString(formBody, StandardCharsets.US_ASCII)); - } - - // Use SPARQL query body and MIME type. - private HttpRequest.Builder executeQueryPostBody(Params thisParams, String acceptHeader) { - // Use thisParams (for default-graph-uri etc) - String requestURL = requestURL(service, thisParams.httpString()); - HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, readTimeout, readTimeoutUnit); - contentTypeHeader(builder, QUERY_MIME_TYPE); - acceptHeader(builder, acceptHeader); - return builder.POST(BodyPublishers.ofString(queryString)); - } - - private static void logQuery(String queryString, HttpRequest request) {} - - /** - * Cancel query evaluation - */ - @Override - public void abort() { - // Setting abort to true causes the next read from - // retainedConnectionView (if already created) to - // fail with a QueryCancelledException. - isAborted = true; - if (cancelFutureOnAbort) { - cancelFuture(future); - } - } - - private InputStream registerInputStream(HttpResponse httpResponse) { - InputStream in = HttpLib.getInputStream(httpResponse); - registerInputStream(in); - return in; - } - - /** - * Set the given input stream as the 'retainedConnection' and create a corresponding - * asynchronously abortable 'retainedConnectionView'. The latter is returned. - * If execution was already aborted then a {@link QueryCancelledException} is raised. - */ - private InputStream registerInputStream(InputStream input) { - synchronized (abortLock) { - this.retainedConnection = input; - // Note: Used ProxyInputStream because the ctor of CloseShieldInputStream is deprecated. - this.retainedConnectionView = new ProxyInputStream(input) { - @Override - protected void beforeRead(int n) throws IOException { - checkNotAborted(); - super.beforeRead(n); - } - @Override - public void close() { - this.in = ClosedInputStream.INSTANCE; - } - }; - - // If already aborted then bail out before starting the parsers. - checkNotAborted(); - } - return retainedConnectionView; - } - - @Override - public void close() { - closed = true; - // No need to handle the future here, because the possible states are: - // - Null because no execution was started -> retainedConnection is null. - // - Cancelled by asynchronous abort -> retainedConnection is null. - // - Completed successfully by the same thread that now closes the retainedConnection - // -> retainedConnection is non-null. - IOUtils.closeQuietly(retainedConnectionView); - closeRetainedConnection(); - } - - private static void cancelFuture(CompletableFuture future) { - if (future != null) { - future.cancel(true); - } - } - - private void closeRetainedConnection() { - if (retainedConnection != null) { - try { - // This call may take a long time if the response has not been consumed - // as HTTP client will consume the remaining response so it can re-use the - // connection. If we're closing when we're not at the end of the stream then - // issue a warning to the logs - if (retainedConnection.read() != -1) - Log.warn(this, "HTTP response not fully consumed, if HTTP Client is reusing connections (its default behaviour) then it will consume the remaining response data which may take a long time and cause this application to become unresponsive"); - retainedConnection.close(); - } catch (RuntimeIOException | java.io.IOException e) { - // If we are closing early and the underlying stream is chunk encoded - // the close() can result in a IOException. TypedInputStream catches - // and re-wraps that and we want to suppress both forms. - } finally { - retainedConnection = null; - } - } - } - - private void checkNotClosed() { - if ( closed ) - throw new QueryExecException("HTTP QueryExecHTTP has been closed"); - } - - protected void checkNotAborted() { - if ( isAborted ) - throw new QueryCancelledException(); - } + String getHttpResponseContentType(); - @Override - public boolean isClosed() { return closed; } } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTPBuilder.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTPBuilder.java index 86f0bba7e85..a8eebf18866 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTPBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTPBuilder.java @@ -21,18 +21,28 @@ import static org.apache.jena.http.HttpLib.copyArray; import java.net.http.HttpClient; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; +import java.util.Objects; import java.util.concurrent.TimeUnit; import org.apache.jena.http.sys.ExecHTTPBuilder; import org.apache.jena.query.Query; import org.apache.jena.sparql.exec.QueryExecBuilder; import org.apache.jena.sparql.exec.QueryExecMod; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; import org.apache.jena.sparql.util.Context; public class QueryExecHTTPBuilder extends ExecHTTPBuilder implements QueryExecMod, QueryExecBuilder { - public static QueryExecHTTPBuilder create() { return new QueryExecHTTPBuilder(); } + protected List queryExecTransforms = new ArrayList<>(); + + public static QueryExecHTTPBuilder create() { + return new QueryExecHTTPBuilder() + // .transformExec(qExec -> TaskEventBroker.track(ARQ.getContext(), qExec)) + ; + } public static QueryExecHTTPBuilder service(String serviceURL) { return create().endpoint(serviceURL); } @@ -45,7 +55,7 @@ protected QueryExecHTTPBuilder thisBuilder() { @Override protected QueryExecHTTP buildX(HttpClient hClient, Query queryActual, String queryStringActual, Context cxt) { - return new QueryExecHTTP(serviceURL, queryActual, queryStringActual, urlLimit, + QueryExecHTTP result = new QueryExecHTTPImpl(serviceURL, queryActual, queryStringActual, urlLimit, hClient, new HashMap<>(httpHeaders), Params.create(params), cxt, copyArray(defaultGraphURIs), copyArray(namedGraphURIs), @@ -55,6 +65,10 @@ protected QueryExecHTTP buildX(HttpClient hClient, Query queryActual, String que graphAcceptHeader, datasetAcceptHeader, timeout, timeoutUnit); + for (QueryExecTransform queryExecTransform : queryExecTransforms) { + result = QueryExecHTTPWrapper.transform(result, queryExecTransform); + } + return result; } @Override @@ -74,6 +88,13 @@ public QueryExecHTTPBuilder overallTimeout(long timeout, TimeUnit timeUnit) { return thisBuilder(); } + @Override + public QueryExecHTTPBuilder transformExec(QueryExecTransform queryExecTransform) { + Objects.requireNonNull(queryExecTransform); + queryExecTransforms.add(queryExecTransform); + return thisBuilder(); + } + @Override public Context getContext() { return null; diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTPImpl.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTPImpl.java new file mode 100644 index 00000000000..b4f6cdea030 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTPImpl.java @@ -0,0 +1,811 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.http; + +import static org.apache.jena.http.HttpLib.acceptHeader; +import static org.apache.jena.http.HttpLib.contentTypeHeader; +import static org.apache.jena.http.HttpLib.dft; +import static org.apache.jena.http.HttpLib.finishInputStream; +import static org.apache.jena.http.HttpLib.requestURL; +import static org.apache.jena.http.HttpLib.responseHeader; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpRequest.BodyPublishers; +import java.net.http.HttpResponse; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.io.input.ClosedInputStream; +import org.apache.commons.io.input.ProxyInputStream; +import org.apache.jena.atlas.RuntimeIOException; +import org.apache.jena.atlas.io.IO; +import org.apache.jena.atlas.iterator.Iter; +import org.apache.jena.atlas.json.JSON; +import org.apache.jena.atlas.json.JsonArray; +import org.apache.jena.atlas.json.JsonObject; +import org.apache.jena.atlas.lib.InternalErrorException; +import org.apache.jena.atlas.lib.Pair; +import org.apache.jena.atlas.logging.Log; +import org.apache.jena.atlas.web.HttpException; +import org.apache.jena.atlas.web.MediaType; +import org.apache.jena.graph.Graph; +import org.apache.jena.graph.Triple; +import org.apache.jena.http.AsyncHttpRDF; +import org.apache.jena.http.HttpEnv; +import org.apache.jena.http.HttpLib; +import org.apache.jena.query.ARQ; +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryCancelledException; +import org.apache.jena.query.QueryException; +import org.apache.jena.query.QueryExecException; +import org.apache.jena.query.QueryFactory; +import org.apache.jena.query.QueryParseException; +import org.apache.jena.query.QueryType; +import org.apache.jena.query.ResultSet; +import org.apache.jena.query.Syntax; +import org.apache.jena.riot.Lang; +import org.apache.jena.riot.RDFDataMgr; +import org.apache.jena.riot.RDFLanguages; +import org.apache.jena.riot.ResultSetMgr; +import org.apache.jena.riot.WebContent; +import org.apache.jena.riot.resultset.ResultSetLang; +import org.apache.jena.riot.resultset.ResultSetReaderRegistry; +import org.apache.jena.riot.web.HttpNames; +import org.apache.jena.sparql.ARQException; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.DatasetGraphFactory; +import org.apache.jena.sparql.core.Quad; +import org.apache.jena.sparql.engine.http.HttpParams; +import org.apache.jena.sparql.engine.http.QueryExceptionHTTP; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.RowSet; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.web.HttpSC; + +/** + * A {@link QueryExec} implementation where queries are executed against a remote + * service over HTTP. + */ +public class QueryExecHTTPImpl implements QueryExecHTTP { + + // Blazegraph has a bug : it impacts wikidata. + // Unless the charset is set, wikidata interprets a POST as ISO-8859-??? (c.f. POST as form). + // https://github.com/blazegraph/database/issues/224 + // Only applies to SendMode.asPost of a SPARQL query. + public static final String QUERY_MIME_TYPE = WebContent.contentTypeSPARQLQuery+";charset="+WebContent.charsetUTF8; + private final Query query; + private final String queryString; + private final String service; + private final Context context; + + // Params + private Params params = null; + + private final QuerySendMode sendMode; + private int urlLimit = HttpEnv.urlLimit; + + // Protocol + private List defaultGraphURIs = new ArrayList<>(); + private List namedGraphURIs = new ArrayList<>(); + + private boolean closed = false; + + // Timeout of query execution. + private long readTimeout = -1; + private TimeUnit readTimeoutUnit = TimeUnit.MILLISECONDS; + + private final String selectAcceptHeader; + private final String askAcceptHeader; + private final String graphAcceptHeader; + private final String datasetAcceptHeader; + + // If this is non-null, it overrides the use of any Content-Type above. + @Deprecated(forRemoval = true) // Deprecated in favor of setting the other header fields. + private String overrideAcceptHeader = null; + + // Received content type + private String httpResponseContentType = null; + + private HttpClient httpClient = HttpEnv.getDftHttpClient(); + private Map httpHeaders; + + // ----- Cancellation ----- + + private volatile boolean isAborted = false; + private final Object abortLock = new Object(); + private volatile CompletableFuture> future = null; + + // Releasing HTTP input streams is important. We remember this for SELECT result + // set streaming, and will close it when the execution is closed + // This is the physical InputStream of the HTTP request which will only be closed by close(). + private InputStream retainedConnection = null; + + // This is a wrapped view of retainedConnection that will be closed by abort(). + private volatile InputStream retainedConnectionView = null; + + // Whether abort cancels an async HTTP request's future immediately. + private boolean cancelFutureOnAbort = true; + + /** + * Constructor variant with one accept header for all query types. + * The recommended way to create instances of this class is via {@link QueryExecHTTPBuilder}. + */ + public QueryExecHTTPImpl(String serviceURL, Query query, String queryString, int urlLimit, + HttpClient httpClient, Map httpHeaders, Params params, Context context, + List defaultGraphURIs, List namedGraphURIs, + QuerySendMode sendMode, String overrideAcceptHeader, + long timeout, TimeUnit timeoutUnit) { + // Content Types: these list the standard formats and also include */* + this(serviceURL, query, queryString, urlLimit, + httpClient, httpHeaders, params, context, + defaultGraphURIs, namedGraphURIs, + sendMode, + dft(overrideAcceptHeader, WebContent.defaultSparqlResultsHeader), + dft(overrideAcceptHeader, WebContent.defaultSparqlAskHeader), + dft(overrideAcceptHeader, WebContent.defaultGraphAcceptHeader), + dft(overrideAcceptHeader, WebContent.defaultDatasetAcceptHeader), + timeout, timeoutUnit); + + // Handling of legacy overrideAcceptHeader. + this.overrideAcceptHeader = overrideAcceptHeader; + // Important - handled as special case because the defaults vary by query type. + if ( httpHeaders.containsKey(HttpNames.hAccept) ) { + if ( this.overrideAcceptHeader != null ) { + String acceptHeader = httpHeaders.get(HttpNames.hAccept); + this.overrideAcceptHeader = acceptHeader; + } + this.httpHeaders.remove(HttpNames.hAccept); + } + } + + protected QueryExecHTTPImpl(String serviceURL, Query query, String queryString, int urlLimit, + HttpClient httpClient, Map httpHeaders, Params params, Context context, + List defaultGraphURIs, List namedGraphURIs, + QuerySendMode sendMode, + String selectAcceptHeader, String askAcceptHeader, + String graphAcceptHeader, String datasetAcceptHeader, + long timeout, TimeUnit timeoutUnit) { + this.context = ( context == null ) ? ARQ.getContext().copy() : context.copy(); + this.service = serviceURL; + this.query = query; + this.queryString = queryString; + this.urlLimit = urlLimit; + this.httpHeaders = httpHeaders; + this.defaultGraphURIs = defaultGraphURIs; + this.namedGraphURIs = namedGraphURIs; + this.sendMode = Objects.requireNonNull(sendMode); + this.selectAcceptHeader = selectAcceptHeader; + this.askAcceptHeader = askAcceptHeader; + this.graphAcceptHeader = graphAcceptHeader; + this.datasetAcceptHeader = datasetAcceptHeader; + this.httpHeaders = httpHeaders; + this.params = params; + this.readTimeout = timeout; + this.readTimeoutUnit = timeoutUnit; + this.httpClient = HttpLib.dft(httpClient, HttpEnv.getDftHttpClient()); + } + + @Override + public String getAcceptHeaderSelect() { + return selectAcceptHeader; + } + + @Override + public String getAcceptHeaderAsk() { + return askAcceptHeader; + } + + @Override + public String getAcceptHeaderDescribe() { + return graphAcceptHeader; + } + + @Override + public String getAcceptHeaderConstructGraph() { + return graphAcceptHeader; + } + + @Override + public String getAcceptHeaderConstructDataset() { + return datasetAcceptHeader; + } + + /** Getter for the override accept header. Only used for testing. */ + @Deprecated(forRemoval = true) + public String getAppProvidedAcceptHeader() { + return overrideAcceptHeader; + } + + /** The Content-Type response header received (null before the remote operation is attempted). */ + @Override + public String getHttpResponseContentType() { + return httpResponseContentType; + } + + @Override + public RowSet select() { + checkNotClosed(); + check(QueryType.SELECT); + RowSet rs = execRowSet(); + return rs; + } + + private RowSet execRowSet() { + HttpRequest request = effectiveHttpRequest(selectAcceptHeader); + HttpResponse response = executeQuery(request); + InputStream in = registerInputStream(response); + // Don't assume the endpoint actually gives back the content type we asked for + String actualContentType = responseHeader(response, HttpNames.hContentType); + + // Remember the response. + httpResponseContentType = actualContentType; + + // More reliable to use the format-defined charsets e.g. JSON -> UTF-8 + actualContentType = removeCharset(actualContentType); + + if (false) { + byte b[] = IO.readWholeFile(in); + String str = new String(b); + System.out.println(str); + in = new ByteArrayInputStream(b); + } + + if (actualContentType == null || actualContentType.equals("")) + actualContentType = WebContent.contentTypeResultsXML; + + // Map to lang, with pragmatic alternatives. + Lang lang = WebContent.contentTypeToLangResultSet(actualContentType); + boolean unknownLang = lang == null; + boolean unsupportedFormat = !unknownLang && !ResultSetReaderRegistry.isRegistered(lang); + if ( unknownLang || unsupportedFormat ) { + String errorTerm = unknownLang ? "recognized" : "supported"; + String errorMsg = String.format("Endpoint returned Content-Type: %s which is not %s for SELECT queries", + actualContentType, errorTerm); + raiseException(errorMsg, request, response, in); + } + + // This returns a streaming result set for some formats. + // Do not close the InputStream at this point. + ResultSet result = ResultSetMgr.read(in, lang); + return RowSet.adapt(result); + } + + @Override + public boolean ask() { + checkNotClosed(); + check(QueryType.ASK); + HttpRequest request = effectiveHttpRequest(askAcceptHeader); + HttpResponse response = executeQuery(request); + InputStream in = registerInputStream(response); + + String actualContentType = responseHeader(response, HttpNames.hContentType); + httpResponseContentType = actualContentType; + actualContentType = removeCharset(actualContentType); + + // If the server fails to return a Content-Type then we will assume + // the server returned the type we asked for + if (actualContentType == null || actualContentType.equals("")) + actualContentType = askAcceptHeader; + + Lang lang = RDFLanguages.contentTypeToLang(actualContentType); + if ( lang == null ) { + // Any specials : + // application/xml for application/sparql-results+xml + // application/json for application/sparql-results+json + if (actualContentType.equals(WebContent.contentTypeXML)) + lang = ResultSetLang.RS_XML; + else if ( actualContentType.equals(WebContent.contentTypeJSON)) + lang = ResultSetLang.RS_JSON; + } + if (lang == null) { + raiseException("Endpoint returned Content-Type: " + actualContentType + " which is not supported for ASK queries", request, response, in); + } + try { + boolean result = ResultSetMgr.readBoolean(in, lang); + return result; + } finally { + finishInputStream(in); + } + } + + private String removeCharset(String contentType) { + if ( contentType == null ) + return contentType; + int idx = contentType.indexOf(';'); + if ( idx < 0 ) + return contentType; + return contentType.substring(0,idx); + } + + @Override + public Graph construct(Graph graph) { + checkNotClosed(); + check(QueryType.CONSTRUCT); + return execGraph(graph, graphAcceptHeader); + } + + @Override + public Iterator constructTriples() { + checkNotClosed(); + check(QueryType.CONSTRUCT); + return execTriples(graphAcceptHeader); + } + + @Override + public Iterator constructQuads(){ + checkNotClosed(); + return execQuads(); + } + + @Override + public DatasetGraph constructDataset(){ + checkNotClosed(); + return constructDataset(DatasetGraphFactory.createTxnMem()); + } + + @Override + public DatasetGraph constructDataset(DatasetGraph dataset){ + checkNotClosed(); + check(QueryType.CONSTRUCT); + return execDataset(dataset); + } + + @Override + public Graph describe(Graph graph) { + checkNotClosed(); + check(QueryType.DESCRIBE); + return execGraph(graph, graphAcceptHeader); + } + + @Override + public Iterator describeTriples() { + checkNotClosed(); + return execTriples(graphAcceptHeader); + } + + private Graph execGraph(Graph graph, String acceptHeader) { + Pair p = execRdfWorker(acceptHeader, WebContent.contentTypeRDFXML); + InputStream in = p.getLeft(); + Lang lang = p.getRight(); + try { + RDFDataMgr.read(graph, in, lang); + } finally { + finishInputStream(in); + } + return graph; + } + + private DatasetGraph execDataset(DatasetGraph dataset) { + Pair p = execRdfWorker(datasetAcceptHeader, WebContent.contentTypeNQuads); + InputStream in = p.getLeft(); + Lang lang = p.getRight(); + try { + RDFDataMgr.read(dataset, in, lang); + } finally { + finishInputStream(in); + } + return dataset; + } + + @SuppressWarnings("removal") + private Iterator execTriples(String acceptHeader) { + Pair p = execRdfWorker(acceptHeader, WebContent.contentTypeRDFXML); + InputStream input = p.getLeft(); + Lang lang = p.getRight(); + // Base URI? + // Unless N-Triples, this creates a thread. + Iterator iter = RDFDataMgr.createIteratorTriples(input, lang, null); + return Iter.onCloseIO(iter, input); + } + + @SuppressWarnings("removal") + private Iterator execQuads() { + checkNotClosed(); + Pair p = execRdfWorker(datasetAcceptHeader, WebContent.contentTypeNQuads); + InputStream input = p.getLeft(); + Lang lang = p.getRight(); + // Unless N-Quads, this creates a thread. + Iterator iter = RDFDataMgr.createIteratorQuads(input, lang, null); + return Iter.onCloseIO(iter, input); + } + + // Any RDF data back (CONSTRUCT, DESCRIBE, QUADS) + // ifNoContentType - some wild guess at the content type. + private Pair execRdfWorker(String contentType, String ifNoContentType) { + checkNotClosed(); + String thisAcceptHeader = contentType; + HttpRequest request = effectiveHttpRequest(thisAcceptHeader); + HttpResponse response = executeQuery(request); + InputStream in = registerInputStream(response); + + // Don't assume the endpoint actually gives back the content type we asked for + String actualContentType = responseHeader(response, HttpNames.hContentType); + httpResponseContentType = actualContentType; + actualContentType = removeCharset(actualContentType); + + // If the server fails to return a Content-Type then we will assume + // the server returned the type we asked for + if (actualContentType == null || actualContentType.equals("")) + actualContentType = ifNoContentType; + + Lang lang = RDFLanguages.contentTypeToLang(actualContentType); + if ( ! RDFLanguages.isQuads(lang) && ! RDFLanguages.isTriples(lang) ) { + raiseException("Endpoint returned Content Type: " + + actualContentType + + " which is not a valid RDF syntax", request, response, in); + } + return Pair.create(in, lang); + } + + @Override + public JsonArray execJson() { + checkNotClosed(); + check(QueryType.CONSTRUCT_JSON); + String thisAcceptHeader = dft(overrideAcceptHeader, WebContent.contentTypeJSON); + HttpRequest request = effectiveHttpRequest(thisAcceptHeader); + HttpResponse response = executeQuery(request); + InputStream in = registerInputStream(response); + try { + return JSON.parseAny(in).getAsArray(); + } finally { finishInputStream(in); } + } + + @Override + public Iterator execJsonItems() { + JsonArray array = execJson().getAsArray(); + List x = new ArrayList<>(array.size()); + array.forEach(elt->{ + if ( ! elt.isObject()) + throw new QueryExecException("Item in an array from a JSON query isn't an object"); + x.add(elt.getAsObject()); + }); + return x.iterator(); + } + + private void check(QueryType queryType) { + if ( query == null ) { + // Pass through the queryString. + return; + } + if ( query.queryType() != queryType ) + throw new QueryExecException("Not the right form of query. Expected "+queryType+" but got "+query.queryType()); + } + + @Override + public Context getContext() { + return context; + } + + @Override + public DatasetGraph getDataset() { + return null; + } + + // This may be null - if we were created form a query string, + // we don't guarantee to parse it so we let through non-SPARQL + // extensions to the far end. + @Override + public Query getQuery() { + if ( query != null ) + return query; + if ( queryString != null ) { + // Object not created with a Query object, may be because there is foreign + // syntax in the query or may be because the query string was available and the app + // didn't want the overhead of parsing it every time. + // Try to parse it else return null; + try { return QueryFactory.create(queryString, Syntax.syntaxARQ); } + catch (QueryParseException ex) {} + return null; + } + return null; + } + + /** + * Return the query string. If this was supplied as a string, + * there is no guarantee this is legal SPARQL syntax. + */ + @Override + public String getQueryString() { + return queryString; + } + + private static long asMillis(long duration, TimeUnit timeUnit) { + return (duration < 0) ? duration : timeUnit.toMillis(duration); + } + + private void raiseException(String errorMsg, HttpRequest request, HttpResponse response, InputStream in) { + int bodySummaryLength = 1024; + int statusCode = response.statusCode(); + String statusCodeMsg = HttpSC.getMessage(statusCode); + + // Determine the charset for extracting an excerpt of the body + String actualContentType = responseHeader(response, HttpNames.hContentType); + MediaType ct = MediaType.create(actualContentType); + String charsetName = ct == null ? null : ct.getCharset(); + Charset charset = null; + try { + charset = charsetName == null ? null : Charset.forName(charsetName); + } catch (Throwable e) { + // Silently ignore + } + if (charset == null) { + charset = StandardCharsets.UTF_8; + } + + String bodyStr; + try { + bodyStr = in == null ? "(no data supplied)" : IO.abbreviate(in, charset, bodySummaryLength, "..."); + } catch (Throwable e) { + // No need to rethrow because we are already about to throw + bodyStr = "(failed to retrieve HTTP body due to: " + e.getMessage() + ")"; + } + + throw new QueryException(String.format( + "%s.\nStatus code %d %s, Method %s, Request Headers: %s\nBody (extracted with charset %s): %s", + errorMsg, statusCode, statusCodeMsg, request.method(), request.headers().map(), charset.name(), bodyStr)); + } + + /** + * Build the effective HTTP request ready for use with {@link #executeQuery(HttpRequest)}. + */ + private HttpRequest effectiveHttpRequest(String reqAcceptHeader) { + if (closed) + throw new ARQException("HTTP execution already closed"); + + // SERVICE specials. + + Params thisParams = Params.create(params); + + if ( defaultGraphURIs != null ) { + for ( String dft : defaultGraphURIs ) + thisParams.add( HttpParams.pDefaultGraph, dft ); + } + if ( namedGraphURIs != null ) { + for ( String name : namedGraphURIs ) + thisParams.add( HttpParams.pNamedGraph, name ); + } + + HttpLib.modifyByService(service, context, thisParams, httpHeaders); + + HttpRequest request = makeRequest(thisParams, reqAcceptHeader); + return request; + } + + private HttpRequest makeRequest(Params thisParams, String reqAcceptHeader) { + QuerySendMode actualSendMode = actualSendMode(); + HttpRequest.Builder requestBuilder; + switch(actualSendMode) { + case asGetAlways : + requestBuilder = executeQueryGet(thisParams, reqAcceptHeader); + break; + case asPostForm : + requestBuilder = executeQueryPostForm(thisParams, reqAcceptHeader); + break; + case asPost : + requestBuilder = executeQueryPostBody(thisParams, reqAcceptHeader); + break; + default : + // Should not happen! + throw new InternalErrorException("Invalid value for 'actualSendMode' "+actualSendMode); + } + return requestBuilder.build(); + } + + /** + * Execute an HttpRequest and wait for the HttpResponse. + * A call to {@link #abort()} interrupts the wait. + * The response is returned after status code processing so the caller can assume the + * query execution was successful and return 200. + * Use {@link HttpLib#getInputStream} to access the body. + */ + private HttpResponse executeQuery(HttpRequest request) { + checkNotClosed(); + + if (future != null) { + throw new IllegalStateException("Execution was already started."); + } + + try { + synchronized (abortLock) { + checkNotAborted(); + logQuery(queryString, request); + future = HttpLib.executeAsync(httpClient, request); + } + + HttpResponse response = AsyncHttpRDF.getOrElseThrow(future, request); + HttpLib.handleHttpStatusCode(response); + return response; + } catch (HttpException httpEx) { + throw QueryExceptionHTTP.rewrap(httpEx); + } + } + + private QuerySendMode actualSendMode() { + int thisLengthLimit = urlLimit; + switch(sendMode) { + case asGetAlways : + case asPostForm : + case asPost : + return sendMode; + case asGetWithLimitBody : + case asGetWithLimitForm : + break; + } + + // Only QuerySendMode.asGetWithLimitBody and QuerySendMode.asGetWithLimitForm here. + String requestURL = service; + // Other params (query= has not been added at this point) + int paramsLength = params.httpString().length(); + int qEncodedLength = calcEncodeStringLength(queryString); + + // URL Length, including service (for safety) + int length = service.length() + + /* ?query= */ 1 + HttpParams.pQuery.length() + + /* encoded query */ qEncodedLength + + /* &other params*/ 1 + paramsLength; + if ( length <= thisLengthLimit ) + return QuerySendMode.asGetAlways; + return (sendMode==QuerySendMode.asGetWithLimitBody) ? QuerySendMode.asPost : QuerySendMode.asPostForm; + } + + private static int calcEncodeStringLength(String str) { + // Could approximate by counting non-queryString character and adding that *2 to the length of the string. + String qs = HttpLib.urlEncodeQueryString(str); + int encodedLength = qs.length(); + return encodedLength; + } + + private HttpRequest.Builder executeQueryGet(Params thisParams, String acceptHeader) { + thisParams.add(HttpParams.pQuery, queryString); + String requestURL = requestURL(service, thisParams.httpString()); + HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, readTimeout, readTimeoutUnit); + acceptHeader(builder, acceptHeader); + return builder.GET(); + } + + private HttpRequest.Builder executeQueryPostForm(Params thisParams, String acceptHeader) { + thisParams.add(HttpParams.pQuery, queryString); + String requestURL = service; + String formBody = thisParams.httpString(); + HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, readTimeout, readTimeoutUnit); + acceptHeader(builder, acceptHeader); + // Use an HTML form. + contentTypeHeader(builder, WebContent.contentTypeHTMLForm); + // Already UTF-8 encoded to ASCII. + return builder.POST(BodyPublishers.ofString(formBody, StandardCharsets.US_ASCII)); + } + + // Use SPARQL query body and MIME type. + private HttpRequest.Builder executeQueryPostBody(Params thisParams, String acceptHeader) { + // Use thisParams (for default-graph-uri etc) + String requestURL = requestURL(service, thisParams.httpString()); + HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, readTimeout, readTimeoutUnit); + contentTypeHeader(builder, QUERY_MIME_TYPE); + acceptHeader(builder, acceptHeader); + return builder.POST(BodyPublishers.ofString(queryString)); + } + + private static void logQuery(String queryString, HttpRequest request) {} + + /** + * Cancel query evaluation + */ + @Override + public void abort() { + // Setting abort to true causes the next read from + // retainedConnectionView (if already created) to + // fail with a QueryCancelledException. + isAborted = true; + if (cancelFutureOnAbort) { + cancelFuture(future); + } + } + + private InputStream registerInputStream(HttpResponse httpResponse) { + InputStream in = HttpLib.getInputStream(httpResponse); + registerInputStream(in); + return in; + } + + /** + * Set the given input stream as the 'retainedConnection' and create a corresponding + * asynchronously abortable 'retainedConnectionView'. The latter is returned. + * If execution was already aborted then a {@link QueryCancelledException} is raised. + */ + private InputStream registerInputStream(InputStream input) { + synchronized (abortLock) { + this.retainedConnection = input; + // Note: Used ProxyInputStream because the ctor of CloseShieldInputStream is deprecated. + this.retainedConnectionView = new ProxyInputStream(input) { + @Override + protected void beforeRead(int n) throws IOException { + checkNotAborted(); + super.beforeRead(n); + } + @Override + public void close() { + this.in = ClosedInputStream.INSTANCE; + } + }; + + // If already aborted then bail out before starting the parsers. + checkNotAborted(); + } + return retainedConnectionView; + } + + @Override + public void close() { + closed = true; + // No need to handle the future here, because the possible states are: + // - Null because no execution was started -> retainedConnection is null. + // - Cancelled by asynchronous abort -> retainedConnection is null. + // - Completed successfully by the same thread that now closes the retainedConnection + // -> retainedConnection is non-null. + IOUtils.closeQuietly(retainedConnectionView); + closeRetainedConnection(); + } + + private static void cancelFuture(CompletableFuture future) { + if (future != null) { + future.cancel(true); + } + } + + private void closeRetainedConnection() { + if (retainedConnection != null) { + try { + // This call may take a long time if the response has not been consumed + // as HTTP client will consume the remaining response so it can re-use the + // connection. If we're closing when we're not at the end of the stream then + // issue a warning to the logs + if (retainedConnection.read() != -1) + Log.warn(this, "HTTP response not fully consumed, if HTTP Client is reusing connections (its default behaviour) then it will consume the remaining response data which may take a long time and cause this application to become unresponsive"); + retainedConnection.close(); + } catch (RuntimeIOException | java.io.IOException e) { + // If we are closing early and the underlying stream is chunk encoded + // the close() can result in a IOException. TypedInputStream catches + // and re-wraps that and we want to suppress both forms. + } finally { + retainedConnection = null; + } + } + } + + private void checkNotClosed() { + if ( closed ) + throw new QueryExecException("HTTP QueryExecHTTP has been closed"); + } + + protected void checkNotAborted() { + if ( isAborted ) + throw new QueryCancelledException(); + } + + @Override + public boolean isClosed() { return closed; } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTPWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTPWrapper.java new file mode 100644 index 00000000000..4fdec2cc41f --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTPWrapper.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.http; + +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.QueryExecWrapper; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; + +/** + * Wrapper for QueryExecHTTP instances. + * Uses one delegate for execution and another for information. + */ +public class QueryExecHTTPWrapper + extends QueryExecWrapper + implements QueryExecHTTP +{ + private final QueryExecHTTP httpDelegate; + + // Closing the exec delegate is assumed to close the http delegate. + public static QueryExecHTTP transform(QueryExecHTTP qExec, QueryExecTransform transform) { + QueryExecHTTP httpDelegate = qExec; + QueryExec execDelegate = qExec; + + // Unwrap an existing wrapper. + if (qExec instanceof QueryExecHTTPWrapper wrapper) { + httpDelegate = wrapper.getDelegateHttp(); + execDelegate = wrapper.getDelegate(); + } + + QueryExec qe = transform.transform(execDelegate); + if (qe instanceof QueryExecHTTP qeh) { + return qeh; + } + + return new QueryExecHTTPWrapper(httpDelegate, qe); + } + + public QueryExecHTTPWrapper(QueryExecHTTP delegate) { + this(delegate, delegate); + } + + public QueryExecHTTPWrapper(QueryExecHTTP httpDelegate, QueryExec execDelegate) { + super(execDelegate); + this.httpDelegate = httpDelegate; + } + + /** Delegate for HTTP metadata. May be different from the execution delegate obtained from {@link #getDelegate()}. */ + protected QueryExecHTTP getDelegateHttp() { + return httpDelegate; + } + + @Override + public String getAcceptHeaderSelect() { + return getDelegateHttp().getAcceptHeaderSelect(); + } + + @Override + public String getAcceptHeaderAsk() { + return getDelegateHttp().getAcceptHeaderAsk(); + } + + @Override + public String getAcceptHeaderDescribe() { + return getDelegateHttp().getAcceptHeaderDescribe(); + } + + @Override + public String getAcceptHeaderConstructGraph() { + return getDelegateHttp().getAcceptHeaderConstructGraph(); + } + + @Override + public String getAcceptHeaderConstructDataset() { + return getDelegateHttp().getAcceptHeaderConstructDataset(); + } + + @Override + public String getHttpResponseContentType() { + return getDelegateHttp().getHttpResponseContentType(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecutionHTTPBuilder.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecutionHTTPBuilder.java index fa8cc473c09..073479f6024 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecutionHTTPBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecutionHTTPBuilder.java @@ -50,7 +50,7 @@ protected QueryExecutionHTTPBuilder thisBuilder() { @Override protected QueryExecutionHTTP buildX(HttpClient hClient, Query queryActual, String queryStringActual, Context cxt) { - QueryExecHTTP qExec = new QueryExecHTTP(serviceURL, queryActual, queryStringActual, urlLimit, + QueryExecHTTP qExec = new QueryExecHTTPImpl(serviceURL, queryActual, queryStringActual, urlLimit, hClient, new HashMap<>(httpHeaders), Params.create(params), cxt, copyArray(defaultGraphURIs), copyArray(namedGraphURIs), sendMode, diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTP.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTP.java index a2ebaa1cda3..d0e8dd8c31e 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTP.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTP.java @@ -18,32 +18,9 @@ package org.apache.jena.sparql.exec.http; -import static org.apache.jena.http.HttpLib.*; - -import java.io.InputStream; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpRequest.BodyPublisher; -import java.net.http.HttpRequest.BodyPublishers; -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; - -import org.apache.jena.atlas.logging.Log; -import org.apache.jena.http.HttpEnv; -import org.apache.jena.http.HttpLib; -import org.apache.jena.riot.WebContent; -import org.apache.jena.riot.web.HttpNames; -import org.apache.jena.sparql.engine.http.HttpParams; import org.apache.jena.sparql.exec.UpdateExec; -import org.apache.jena.sparql.util.Context; -import org.apache.jena.update.UpdateRequest; - -public class UpdateExecHTTP implements UpdateExec { +public interface UpdateExecHTTP extends UpdateExec { public static UpdateExecHTTPBuilder newBuilder() { return UpdateExecHTTPBuilder.create(); } @@ -51,142 +28,4 @@ public static UpdateExecHTTPBuilder newBuilder() { public static UpdateExecHTTPBuilder service(String endpointURL) { return UpdateExecHTTPBuilder.create().endpoint(endpointURL); } - - private final Context context; - private final String service; - - // UpdateRequest as an object - may be null. - private final UpdateRequest update; - private final String updateString; - private final Map httpHeaders; - private final HttpClient httpClient; - private final UpdateSendMode sendMode; - private final Params params; - private final List usingGraphURIs; - private final List usingNamedGraphURIs; - private final long timeout; - private final TimeUnit timeoutUnit; - - private AtomicBoolean cancelSignal = new AtomicBoolean(false); - private volatile InputStream retainedConnection = null; - - /*package*/ UpdateExecHTTP(String serviceURL, UpdateRequest update, String updateString, - HttpClient httpClient, Params params, - List usingGraphURIs, - List usingNamedGraphURIs, - Map httpHeaders, UpdateSendMode sendMode, - Context context, - long timeout, TimeUnit timeoutUnit) { - this.context = context; - this.service = serviceURL; - this.update = update; - // Builder ensures one or the other is set. - this.updateString = ( updateString != null ) ? updateString : update.toString(); - this.httpClient = dft(httpClient, HttpEnv.getDftHttpClient()); - this.params = params; - this.usingGraphURIs = usingGraphURIs; - this.usingNamedGraphURIs = usingNamedGraphURIs; - this.httpHeaders = httpHeaders; - this.sendMode = sendMode; - this.timeout = timeout; - this.timeoutUnit = timeoutUnit; - } - - @Override - public Context getContext() { - return context; - } - - @Override - public UpdateRequest getUpdateRequest() { - return update; - } - - @Override - public String getUpdateRequestString() { - return updateString; - } - - @Override - public void execute() { - Params thisParams = Params.create(params); - if ( usingGraphURIs != null ) { - for ( String uri : usingGraphURIs ) - thisParams.add(HttpNames.paramUsingGraphURI, uri); - } - if ( usingNamedGraphURIs != null ) { - for ( String uri : usingNamedGraphURIs ) - thisParams.add(HttpNames.paramUsingNamedGraphURI, uri); - } - - modifyByService(service, context, thisParams, httpHeaders); - - switch(sendMode) { - case asPost : - executePostBody(thisParams); break; - case asPostForm : - executePostForm(thisParams); break; - } - } - - private void executePostBody(Params thisParams) { - String str = updateString; - String requestURL = service; - if ( thisParams.count() > 0 ) { - String qs = thisParams.httpString(); - requestURL = requestURL(requestURL, qs); - } - executeUpdate(requestURL, BodyPublishers.ofString(str), WebContent.contentTypeSPARQLUpdate); - } - - private void executePostForm(Params thisParams) { - String requestURL = service; - thisParams.add(HttpParams.pUpdate, updateString); - String formString = thisParams.httpString(); - // Everything goes into the form body, no use of the request URI query string. - executeUpdate(requestURL, BodyPublishers.ofString(formString, StandardCharsets.US_ASCII), WebContent.contentTypeHTMLForm); - } - - private String executeUpdate(String requestURL, BodyPublisher body, String contentType) { - HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, timeout, timeoutUnit); - builder = contentTypeHeader(builder, contentType); - HttpRequest request = builder.POST(body).build(); - logUpdate(updateString, request); - HttpResponse response = HttpLib.execute(httpClient, request); - // Consumes and closes the input stream. - return HttpLib.handleResponseRtnString(response, this::setRetainedConnection); - } - - // abort() may be called while waiting for the remote update to complete. - // Capture the input stream (from HttpLib.handleResponseRtnString) - private void setRetainedConnection(InputStream in) { - synchronized (cancelSignal) { - retainedConnection = in; - if (cancelSignal.get()) { - abort(); - } - } - } - - private static void logUpdate(String updateString, HttpRequest request) {} - - /** - * Best effort that tries to close an underlying HTTP connection. - * May still hang waiting for the HTTP request to complete. - */ - @Override - public void abort() { - cancelSignal.set(true); - synchronized (cancelSignal) { - try { - InputStream in = retainedConnection; - if (in != null) { - HttpLib.finishInputStream(in); - retainedConnection = null; - } - } catch (Exception ex) { - Log.warn(this, "Error during abort", ex); - } - } - } } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTPBuilder.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTPBuilder.java index c38ff2c72c4..b38adc4ff46 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTPBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTPBuilder.java @@ -21,15 +21,21 @@ import static org.apache.jena.http.HttpLib.copyArray; import java.net.http.HttpClient; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; +import java.util.Objects; import org.apache.jena.http.sys.ExecUpdateHTTPBuilder; import org.apache.jena.sparql.exec.UpdateExecBuilder; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; import org.apache.jena.sparql.util.Context; import org.apache.jena.update.UpdateRequest; public class UpdateExecHTTPBuilder extends ExecUpdateHTTPBuilder implements UpdateExecBuilder { + protected List updateExecTransforms = new ArrayList<>(); + public static UpdateExecHTTPBuilder create() { return new UpdateExecHTTPBuilder(); } private UpdateExecHTTPBuilder() {} @@ -41,10 +47,21 @@ protected UpdateExecHTTPBuilder thisBuilder() { @Override protected UpdateExecHTTP buildX(HttpClient hClient, UpdateRequest updateActual, String updateStringActual, Context cxt) { - return new UpdateExecHTTP(serviceURL, updateActual, updateStringActual, hClient, params, + UpdateExecHTTP result = new UpdateExecHTTPImpl(serviceURL, updateActual, updateStringActual, hClient, params, copyArray(usingGraphURIs), copyArray(usingNamedGraphURIs), new HashMap<>(httpHeaders), sendMode, cxt, timeout, timeoutUnit); + for (UpdateExecTransform updateExecTransform : updateExecTransforms) { + result = UpdateExecHTTPWrapper.transform(result, updateExecTransform); + } + return result; + } + + @Override + public UpdateExecHTTPBuilder transformExec(UpdateExecTransform updateExecTransform) { + Objects.requireNonNull(updateExecTransform); + updateExecTransforms.add(updateExecTransform); + return this; } } diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTPImpl.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTPImpl.java new file mode 100644 index 00000000000..399fdef305d --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTPImpl.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.http; + +import static org.apache.jena.http.HttpLib.contentTypeHeader; +import static org.apache.jena.http.HttpLib.dft; +import static org.apache.jena.http.HttpLib.modifyByService; +import static org.apache.jena.http.HttpLib.requestURL; + +import java.io.InputStream; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpRequest.BodyPublisher; +import java.net.http.HttpRequest.BodyPublishers; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.apache.jena.atlas.logging.Log; +import org.apache.jena.http.HttpEnv; +import org.apache.jena.http.HttpLib; +import org.apache.jena.riot.WebContent; +import org.apache.jena.riot.web.HttpNames; +import org.apache.jena.sparql.engine.http.HttpParams; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.UpdateRequest; + +public class UpdateExecHTTPImpl implements UpdateExecHTTP { + + private final Context context; + private final String service; + + // UpdateRequest as an object - may be null. + private final UpdateRequest update; + private final String updateString; + private final Map httpHeaders; + private final HttpClient httpClient; + private final UpdateSendMode sendMode; + private final Params params; + private final List usingGraphURIs; + private final List usingNamedGraphURIs; + private final long timeout; + private final TimeUnit timeoutUnit; + + private AtomicBoolean cancelSignal = new AtomicBoolean(false); + private volatile InputStream retainedConnection = null; + + /*package*/ UpdateExecHTTPImpl(String serviceURL, UpdateRequest update, String updateString, + HttpClient httpClient, Params params, + List usingGraphURIs, + List usingNamedGraphURIs, + Map httpHeaders, UpdateSendMode sendMode, + Context context, + long timeout, TimeUnit timeoutUnit) { + this.context = context; + this.service = serviceURL; + this.update = update; + // Builder ensures one or the other is set. + this.updateString = ( updateString != null ) ? updateString : update.toString(); + this.httpClient = dft(httpClient, HttpEnv.getDftHttpClient()); + this.params = params; + this.usingGraphURIs = usingGraphURIs; + this.usingNamedGraphURIs = usingNamedGraphURIs; + this.httpHeaders = httpHeaders; + this.sendMode = sendMode; + this.timeout = timeout; + this.timeoutUnit = timeoutUnit; + } + + @Override + public Context getContext() { + return context; + } + + @Override + public UpdateRequest getUpdateRequest() { + return update; + } + + @Override + public String getUpdateRequestString() { + return updateString; + } + + @Override + public void execute() { + Params thisParams = Params.create(params); + if ( usingGraphURIs != null ) { + for ( String uri : usingGraphURIs ) + thisParams.add(HttpNames.paramUsingGraphURI, uri); + } + if ( usingNamedGraphURIs != null ) { + for ( String uri : usingNamedGraphURIs ) + thisParams.add(HttpNames.paramUsingNamedGraphURI, uri); + } + + modifyByService(service, context, thisParams, httpHeaders); + + switch(sendMode) { + case asPost : + executePostBody(thisParams); break; + case asPostForm : + executePostForm(thisParams); break; + } + } + + private void executePostBody(Params thisParams) { + String str = updateString; + String requestURL = service; + if ( thisParams.count() > 0 ) { + String qs = thisParams.httpString(); + requestURL = requestURL(requestURL, qs); + } + executeUpdate(requestURL, BodyPublishers.ofString(str), WebContent.contentTypeSPARQLUpdate); + } + + private void executePostForm(Params thisParams) { + String requestURL = service; + thisParams.add(HttpParams.pUpdate, updateString); + String formString = thisParams.httpString(); + // Everything goes into the form body, no use of the request URI query string. + executeUpdate(requestURL, BodyPublishers.ofString(formString, StandardCharsets.US_ASCII), WebContent.contentTypeHTMLForm); + } + + private String executeUpdate(String requestURL, BodyPublisher body, String contentType) { + HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, timeout, timeoutUnit); + builder = contentTypeHeader(builder, contentType); + HttpRequest request = builder.POST(body).build(); + logUpdate(updateString, request); + HttpResponse response = HttpLib.execute(httpClient, request); + // Consumes and closes the input stream. + return HttpLib.handleResponseRtnString(response, this::setRetainedConnection); + } + + // abort() may be called while waiting for the remote update to complete. + // Capture the input stream (from HttpLib.handleResponseRtnString) + private void setRetainedConnection(InputStream in) { + synchronized (cancelSignal) { + retainedConnection = in; + if (cancelSignal.get()) { + abort(); + } + } + } + + private static void logUpdate(String updateString, HttpRequest request) {} + + /** + * Best effort that tries to close an underlying HTTP connection. + * May still hang waiting for the HTTP request to complete. + */ + @Override + public void abort() { + cancelSignal.set(true); + synchronized (cancelSignal) { + try { + InputStream in = retainedConnection; + if (in != null) { + HttpLib.finishInputStream(in); + retainedConnection = null; + } + } catch (Exception ex) { + Log.warn(this, "Error during abort", ex); + } + } + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTPWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTPWrapper.java new file mode 100644 index 00000000000..6b7e0283ac5 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecHTTPWrapper.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.http; + +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.exec.UpdateExecWrapper; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; + +public class UpdateExecHTTPWrapper + extends UpdateExecWrapper + implements UpdateExecHTTP +{ + private final UpdateExecHTTP httpDelegate; + + // Closing the exec delegate is assumed to close the http delegate. + public static UpdateExecHTTP transform(UpdateExecHTTP qExec, UpdateExecTransform transform) { + UpdateExecHTTP httpDelegate = qExec; + UpdateExec execDelegate = qExec; + + // Unwrap an existing wrapper. + if (qExec instanceof UpdateExecHTTPWrapper wrapper) { + httpDelegate = wrapper.getHttpDelegate(); + execDelegate = wrapper.getDelegate(); + } + + UpdateExec ue = transform.transform(execDelegate); + if (ue instanceof UpdateExecHTTP ueh) { + return ueh; + } + + return new UpdateExecHTTPWrapper(httpDelegate, ue); + } + + public UpdateExecHTTPWrapper(UpdateExecHTTP delegate) { + this(delegate, delegate); + } + + public UpdateExecHTTPWrapper(UpdateExecHTTP httpDelegate, UpdateExec execDelegate) { + super(execDelegate); + this.httpDelegate = httpDelegate; + } + + protected UpdateExecHTTP getHttpDelegate() { + return httpDelegate; + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecutionHTTPBuilder.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecutionHTTPBuilder.java index f4aecfb0b7c..0889bfe0f62 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecutionHTTPBuilder.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/http/UpdateExecutionHTTPBuilder.java @@ -48,7 +48,7 @@ protected UpdateExecutionHTTPBuilder thisBuilder() { @Override protected UpdateExecutionHTTP buildX(HttpClient hClient, UpdateRequest updateActual, String updateStringActual, Context cxt) { - UpdateExecHTTP uExec = new UpdateExecHTTP(serviceURL, updateActual, updateStringActual, hClient, params, + UpdateExecHTTP uExec = new UpdateExecHTTPImpl(serviceURL, updateActual, updateStringActual, hClient, params, copyArray(usingGraphURIs), copyArray(usingNamedGraphURIs), new HashMap<>(httpHeaders), diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/QueryExecTransform.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/QueryExecTransform.java new file mode 100644 index 00000000000..9a9dd25a84d --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/QueryExecTransform.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import org.apache.jena.sparql.exec.QueryExec; + +public interface QueryExecTransform { + QueryExec transform(QueryExec queryExec); +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateExecTransform.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateExecTransform.java new file mode 100644 index 00000000000..6bf4816d6d8 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateExecTransform.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import org.apache.jena.sparql.exec.UpdateExec; + +public interface UpdateExecTransform { + UpdateExec transform(UpdateExec updateExec); +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateProcessorWrapper.java b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateProcessorWrapper.java new file mode 100644 index 00000000000..d2a6f0d2b47 --- /dev/null +++ b/jena-arq/src/main/java/org/apache/jena/sparql/exec/tracker/UpdateProcessorWrapper.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.exec.tracker; + +import org.apache.jena.update.UpdateProcessor; +import org.apache.jena.update.UpdateRequest; + +public class UpdateProcessorWrapper + implements UpdateProcessor +{ + private T delegate; + + public UpdateProcessorWrapper(T delegate) { + super(); + this.delegate = delegate; + } + + protected T getDelegate() { + return delegate; + } + + @Override + public UpdateRequest getUpdateRequest() { + return getDelegate().getUpdateRequest(); + } + + @Override + public String getUpdateRequestString() { + return getDelegate().getUpdateRequestString(); + } + + @Override + public void execute() { + getDelegate().execute(); + } +} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/graph/GraphSPARQLService.java b/jena-arq/src/main/java/org/apache/jena/sparql/graph/GraphSPARQLService.java deleted file mode 100644 index 4f66bd46c00..00000000000 --- a/jena-arq/src/main/java/org/apache/jena/sparql/graph/GraphSPARQLService.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.jena.sparql.graph; - -import org.apache.jena.graph.Graph ; -import org.apache.jena.graph.Node ; -import org.apache.jena.graph.Triple ; -import org.apache.jena.graph.impl.GraphBase ; -import org.apache.jena.sparql.algebra.Op ; -import org.apache.jena.sparql.algebra.op.OpBGP ; -import org.apache.jena.sparql.core.BasicPattern ; -import org.apache.jena.sparql.core.Var ; -import org.apache.jena.util.iterator.ExtendedIterator ; -import org.slf4j.Logger ; -import org.slf4j.LoggerFactory ; - -/** - * This class provides the Jena Graph interface to a remote SPARQL endpoint. - * Efficiency not guaranteed. - */ - -public class GraphSPARQLService extends GraphBase implements Graph -{ - private static Logger log = LoggerFactory.getLogger(GraphSPARQLService.class) ; - - private String serviceURI ; - private String graphIRI = null ; - - // Remote default graph - public GraphSPARQLService(String serviceURI) - { - this.serviceURI = serviceURI ; - this.graphIRI = null ; - } - - // Remote named graph - public GraphSPARQLService(String serviceURI, String graphIRI) - { - this.serviceURI = serviceURI ; - this.graphIRI = graphIRI ; - } - -// @Override -// public Capabilities getCapabilities() -// { -// if (capabilities == null) -// capabilities = new AllCapabilities() -// { @Override public boolean handlesLiteralTyping() { return false; } }; -// return capabilities; -// } - - @Override - protected ExtendedIterator graphBaseFind(Triple m) - { - Node s = m.getMatchSubject() ; - Var sVar = null ; - if ( s == null ) - { - sVar = Var.alloc("s") ; - s = sVar ; - } - - Node p = m.getMatchPredicate() ; - Var pVar = null ; - if ( p == null ) - { - pVar = Var.alloc("p") ; - p = pVar ; - } - - Node o = m.getMatchObject() ; - Var oVar = null ; - if ( o == null ) - { - oVar = Var.alloc("o") ; - o = oVar ; - } - - Triple triple = Triple.create(s, p ,o) ; - - // Evaluate as an algebra expression - BasicPattern pattern = new BasicPattern() ; - pattern.add(triple) ; - Op op = new OpBGP(pattern) ; - -// // Make remote execution object. -// System.err.println("GraphSPARQLService.graphBaseFind: Unimplemented : remote service execution") ; -// //Plan plan = factory.create(op, getDataset(), BindingRoot.create(), null) ; -// -// QueryIterator qIter = plan.iterator() ; -// List triples = new ArrayList() ; -// -// for (; qIter.hasNext() ; ) -// { -// Binding b = qIter.nextBinding() ; -// Node sResult = s ; -// Node pResult = p ; -// Node oResult = o ; -// if ( sVar != null ) -// sResult = b.get(sVar) ; -// if ( pVar != null ) -// pResult = b.get(pVar) ; -// if ( oVar != null ) -// oResult = b.get(oVar) ; -// Triple resultTriple = Triple.create(sResult, pResult, oResult) ; -// if ( log.isDebugEnabled() ) -// log.debug(" "+resultTriple) ; -// triples.add(resultTriple) ; -// } -// qIter.close() ; -// return WrappedIterator.createNoRemove(triples.iterator()) ; - return null ; - } - -} diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateEngineWorker.java b/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateEngineWorker.java index 3bc9b710393..fee378cb332 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateEngineWorker.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateEngineWorker.java @@ -575,7 +575,8 @@ protected Iterator evalBindings(Query query, DatasetGraph dsg, Binding updateRemainingQueryTimeout(context); // Not QueryExecDataset.dataset(...) because of initialBinding. - QueryExecDatasetBuilder builder = QueryExecDatasetBuilder.create().dataset(dsg).query(query).context(context); + // Note: QueryExecDatasetBuilderImpl.create() -> default engine builder, QueryExecDataset.create() -> deferred builder. + QueryExecDatasetBuilder builder = QueryExecDatasetBuilderImpl.create().dataset(dsg).query(query).context(context); if ( inputBinding != null ) { // Must use initialBinding - it puts the input in the results, unlike substitution. builder.initialBinding(inputBinding); diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateProcessorBase.java b/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateProcessorBase.java index d51b96e050e..5d1b9953411 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateProcessorBase.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateProcessorBase.java @@ -18,13 +18,14 @@ package org.apache.jena.sparql.modify; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.Objects ; +import java.util.concurrent.atomic.AtomicBoolean ; import org.apache.jena.atlas.iterator.Iter ; import org.apache.jena.sparql.core.DatasetGraph ; -import org.apache.jena.sparql.engine.binding.Binding ; import org.apache.jena.sparql.engine.Timeouts; import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.engine.binding.Binding ; import org.apache.jena.sparql.util.Context ; import org.apache.jena.update.UpdateProcessor ; import org.apache.jena.update.UpdateRequest ; @@ -70,6 +71,16 @@ public UpdateProcessorBase(UpdateRequest request, } } + @Override + public UpdateRequest getUpdateRequest() { + return request; + } + + @Override + public String getUpdateRequestString() { + return Objects.toString(request); + } + @Override public void execute() { UpdateEngine uProc = factory.create(datasetGraph, inputBinding, context); diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/util/ContextAccumulator.java b/jena-arq/src/main/java/org/apache/jena/sparql/util/ContextAccumulator.java index 52e91147a6a..87783368040 100644 --- a/jena-arq/src/main/java/org/apache/jena/sparql/util/ContextAccumulator.java +++ b/jena-arq/src/main/java/org/apache/jena/sparql/util/ContextAccumulator.java @@ -93,6 +93,32 @@ public ContextAccumulator set(Symbol symbol, boolean value) { return this; } + /** + * Return the current value of a symbol without building + * the full context. + * Allows builders to inspect current settings, such as parseCheck. + * */ + public T get(Symbol symbol) { + T result = addedContext.get(symbol); + if (result == null) { + Context extra = extra(); + if (extra != null) { + result = extra.get(symbol); + } + + if (result == null) { + Context base = baseContext != null + ? baseContext + : baseContext(); + + if (base != null) { + result = base.get(symbol); + } + } + } + return result; + } + public ContextAccumulator context(Context context) { if ( context == null ) return this; diff --git a/jena-arq/src/main/java/org/apache/jena/update/UpdateAction.java b/jena-arq/src/main/java/org/apache/jena/update/UpdateAction.java index 987b76e7e68..98470b0bb3d 100644 --- a/jena-arq/src/main/java/org/apache/jena/update/UpdateAction.java +++ b/jena-arq/src/main/java/org/apache/jena/update/UpdateAction.java @@ -19,6 +19,7 @@ package org.apache.jena.update; import java.io.InputStream; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.jena.atlas.io.IO; import org.apache.jena.graph.Graph; @@ -37,6 +38,7 @@ import org.apache.jena.sparql.modify.UsingList; import org.apache.jena.sparql.modify.UsingUpdateSink; import org.apache.jena.sparql.modify.request.UpdateWithUsing; +import org.apache.jena.sparql.util.Context; /** * A class of forms for executing SPARQL Update operations. parse means the update @@ -356,7 +358,7 @@ public static void parseExecute(UsingList usingList, DatasetGraph dataset, Strin if ( in == null ) throw new UpdateException("File could not be opened: " + fileName); } - parseExecute(usingList, dataset, in, inputBinding, baseURI, syntax); + parseExecute(usingList, dataset, in, inputBinding, baseURI, syntax, null); if ( in != System.in ) IO.close(in); } @@ -408,7 +410,7 @@ public static void parseExecute(UsingList usingList, DatasetGraph dataset, Input * @param syntax The update language syntax */ public static void parseExecute(UsingList usingList, DatasetGraph dataset, InputStream input, String baseURI, Syntax syntax) { - parseExecute(usingList, dataset, input, (Binding)null, baseURI, syntax); + parseExecute(usingList, dataset, input, (Binding)null, baseURI, syntax, null); } /** @@ -427,7 +429,7 @@ public static void parseExecute(UsingList usingList, DatasetGraph dataset, Input */ public static void parseExecute(UsingList usingList, DatasetGraph dataset, InputStream input, QuerySolution inputBinding, String baseURI, Syntax syntax) { - parseExecute(usingList, dataset, input, BindingLib.asBinding(inputBinding), baseURI, syntax); + parseExecute(usingList, dataset, input, BindingLib.asBinding(inputBinding), baseURI, syntax, null); } /** @@ -445,25 +447,82 @@ public static void parseExecute(UsingList usingList, DatasetGraph dataset, Input * @param syntax The update language syntax */ public static void parseExecute(UsingList usingList, DatasetGraph dataset, InputStream input, Binding inputBinding, String baseURI, - Syntax syntax) { + Syntax syntax, Context context) { @SuppressWarnings("removal") - UpdateProcessorStreaming uProc = UpdateStreaming.createStreaming(dataset, inputBinding); + UpdateProcessorStreaming uProc = UpdateStreaming.makeStreaming(dataset, inputBinding, context); if ( uProc == null ) throw new ARQException("No suitable update procesors are registered/able to execute your updates"); - uProc.startRequest(); - try { - UpdateSink sink = new UsingUpdateSink(uProc.getUpdateSink(), usingList); + UpdateExec uExec = new UpdateExecStreaming(usingList, dataset, input, inputBinding, baseURI, syntax, context); + + // TODO UpdateExec tracking could go here. + + uExec.execute(); + } + + private static class UpdateExecStreaming + implements UpdateExec { + + private UsingList usingList; + private DatasetGraph dataset; + private InputStream input; + private Binding inputBinding; + private String baseURI; + private Syntax syntax; + private Context context; + + /** InputStream is not owned by this class - must be closed externally. */ + public UpdateExecStreaming(UsingList usingList, DatasetGraph dataset, InputStream input, Binding inputBinding, + String baseURI, Syntax syntax, Context context) { + super(); + this.usingList = usingList; + this.dataset = dataset; + this.input = input; + this.inputBinding = inputBinding; + this.baseURI = baseURI; + this.syntax = syntax; + this.context = context; + } + + @Override + public String getUpdateRequestString() { + // Options to provide more information about the request: + // - Use a BufferedInputStream to peek the beginning of the request + // - Use an InputStreamWrapper that appends read bytes (up to some cap) to the string returned by this methods. + return "# streaming update."; + } + + @Override + public void abort() { + // Could improve abort by aborting the parser and uProc. + // Could also call inputStream.close() but that'd kill the parser with EOF - ugly. + AtomicBoolean cancelSignal = Context.getCancelSignal(context); + if (cancelSignal != null) { + cancelSignal.set(true); + } + } + + @Override + public void execute() { + @SuppressWarnings("removal") + UpdateProcessorStreaming uProc = UpdateStreaming.makeStreaming(dataset, inputBinding, context); + if ( uProc == null ) + throw new ARQException("No suitable update procesors are registered/able to execute your updates"); + + uProc.startRequest(); try { - UpdateParser parser = UpdateFactory.setupParser(uProc.getPrologue(), baseURI, syntax); - parser.parse(sink, uProc.getPrologue(), input); + UpdateSink sink = new UsingUpdateSink(uProc.getUpdateSink(), usingList); + try { + UpdateParser parser = UpdateFactory.setupParser(uProc.getPrologue(), baseURI, syntax); + parser.parse(sink, uProc.getPrologue(), input); + } + finally { + sink.close(); + } } finally { - sink.close(); + uProc.finishRequest(); } } - finally { - uProc.finishRequest(); - } } } diff --git a/jena-arq/src/test/java/org/apache/jena/sparql/core/TS_SparqlCore.java b/jena-arq/src/test/java/org/apache/jena/sparql/core/TS_SparqlCore.java index 3ea561bfb4f..9e7f47012d1 100644 --- a/jena-arq/src/test/java/org/apache/jena/sparql/core/TS_SparqlCore.java +++ b/jena-arq/src/test/java/org/apache/jena/sparql/core/TS_SparqlCore.java @@ -46,6 +46,8 @@ , TestDatasetGraphFilteredView.class , TestSpecialDatasets.class , TestDatasetMisc.class + + , TestDatasetGraphOverSparql.class }) public class TS_SparqlCore diff --git a/jena-arq/src/test/java/org/apache/jena/sparql/core/TestDatasetGraphOverSparql.java b/jena-arq/src/test/java/org/apache/jena/sparql/core/TestDatasetGraphOverSparql.java new file mode 100644 index 00000000000..d037c3e8ddd --- /dev/null +++ b/jena-arq/src/test/java/org/apache/jena/sparql/core/TestDatasetGraphOverSparql.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.sparql.core; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.apache.jena.graph.Node; +import org.apache.jena.graph.NodeFactory; +import org.apache.jena.query.Query; +import org.apache.jena.riot.Lang; +import org.apache.jena.riot.RDFParser; +import org.apache.jena.sparql.adapter.DatasetGraphSPARQL; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.update.UpdateRequest; +import org.junit.jupiter.api.Test; + +public class TestDatasetGraphOverSparql extends AbstractDatasetGraphTests { + + @Override + protected DatasetGraph emptyDataset() { + DatasetGraph backend = DatasetGraphFactory.create(); + + DatasetGraph frontend = new DatasetGraphSPARQL() { + @Override + protected UpdateExec update(UpdateRequest update) { + return UpdateExec.dataset(backend).update(update).build(); + } + + @Override + protected QueryExec query(Query query) { + return QueryExec.dataset(backend).query(query).build(); + } + }; + + return frontend; + } + + @Test + public void deleteDefaultGraph() { + DatasetGraph dsg = testDataset(); + dsg.deleteAny(Quad.defaultGraphIRI, Node.ANY, Node.ANY, Node.ANY); + assertFalse(dsg.isEmpty()); + assertTrue(dsg.getDefaultGraph().isEmpty()); + } + + @Test + public void deleteNamedGraph() { + DatasetGraph dsg = testDataset(); + dsg.deleteAny(NodeFactory.createURI("http://www.example.org/g"), Node.ANY, Node.ANY, Node.ANY); + assertEquals(0, dsg.size()); + assertFalse(dsg.isEmpty()); + } + + @Test + public void deleteAllNamedGraphs() { + DatasetGraph dsg = testDataset(); + dsg.deleteAny(Quad.unionGraph, Node.ANY, Node.ANY, Node.ANY); + assertEquals(0, dsg.size()); + assertFalse(dsg.isEmpty()); + } + + @Test + public void deleteAllGraphs() { + DatasetGraph dsg = testDataset(); + dsg.deleteAny(Node.ANY, Node.ANY, Node.ANY, Node.ANY); + assertTrue(dsg.isEmpty()); + } + + private DatasetGraph testDataset() { + DatasetGraph dsg = emptyDataset(); + RDFParser.fromString(""" + PREFIX eg: + eg:s1 eg:p eg:o . + eg:g { + eg:s2 eg:p eg:o + } + """, Lang.TRIG).parse(dsg); + assertFalse(dsg.getDefaultGraph().isEmpty()); + assertEquals(1, dsg.size()); + return dsg; + } +} diff --git a/jena-examples/src/main/java/arq/examples/ExampleDBpediaViaRemoteDataset.java b/jena-examples/src/main/java/arq/examples/ExampleDBpediaViaRemoteDataset.java new file mode 100644 index 00000000000..543786d9726 --- /dev/null +++ b/jena-examples/src/main/java/arq/examples/ExampleDBpediaViaRemoteDataset.java @@ -0,0 +1,105 @@ +package arq.examples; + +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import org.apache.jena.query.DatasetFactory; +import org.apache.jena.rdfconnection.RDFConnection; +import org.apache.jena.rdflink.RDFLinkHTTP; +import org.apache.jena.rdflink.dataset.DatasetGraphOverRDFLink; +import org.apache.jena.sparql.adapter.SparqlAdapterRegistry; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.DatasetGraphFactory; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.QueryExecBuilder; +import org.apache.jena.sparql.exec.QueryExecBuilderAdapter; +import org.apache.jena.sparql.exec.QueryExecWrapper; +import org.apache.jena.sparql.exec.RowSetOps; + +/** + * An example that sends a query to the DBpedia endpoint via the {@link SparqlAdapterRegistry} abstraction. + * + */ +public class ExampleDBpediaViaRemoteDataset { + public static void main(String... args) { + execLocal("Local Execution"); + System.out.println(); + execRemote("Remote Execution"); + } + + private static void execLocal(String label) { + execQuery(label, DatasetGraphFactory.empty(), "SELECT * { BIND('test' AS ?x) }"); + } + + private static void execRemote(String label) { + // The query string is sent to the DBpedia endpoint as is (without parsing). + // By default, Jena would fail to parse it because of the undeclared prefixes. + String queryString = """ + SELECT * + FROM + { + ?s rdfs:label ?o . + ?o bif:contains 'Leipzig' + } + LIMIT 3 + """; + + // Execution via DatasetGraph + DatasetGraph dsg = new DatasetGraphOverRDFLink(() -> + RDFLinkHTTP.newBuilder() + .destination("http://dbpedia.org/sparql") + .build()); + + execQuery(label, dsg, queryString); + System.out.println(); + + // Execution via RDFLink (via DatasetGraph) + try (RDFConnection conn = RDFConnection.connect(DatasetFactory.wrap(dsg))) { + QueryExecBuilder builder = QueryExecBuilderAdapter.adapt(conn.newQuery()).query(queryString); + execQuery(label + " via Connection", dsg, builder); + } + } + + private static void execQuery(String label, DatasetGraph dsg, String queryString) { + QueryExecBuilder builder; + builder = QueryExec.dataset(dsg).query(queryString); + execQuery(label + " Direct", dsg, builder); + System.out.println(); + builder = QueryExec.newBuilder().dataset(dsg).query(queryString); + execQuery(label + " Deferred", dsg, builder); + } + + private static void execQuery(String label, DatasetGraph dsg, QueryExecBuilder builder) { + try (QueryExec qe = builder + .timeout(10, TimeUnit.SECONDS) + .transformExec(e -> new QueryExecWrapperDemo(label, e)).build()) { + System.out.println(label + ": Dataset type: " + className(dsg)); + System.out.println(label + ": QueryExecBuilder type: " + className(builder)); + System.out.println(label + ": QueryExec type: " + className(qe)); + RowSetOps.out(System.out, qe.select()); + } + } + + private static class QueryExecWrapperDemo extends QueryExecWrapper { + private final String label; + + public QueryExecWrapperDemo(String label, QueryExec delegate) { + super(delegate); + this.label = label; + } + + protected T exec(Supplier supplier) { + T r = supplier.get(); + System.out.println(label + ": Execution result object type: " + className(r)); + return r; + }; + } + + private static String className(Object obj) { + return obj == null ? "(null)" : className(obj.getClass()); + } + + private static String className(Class clz) { + return clz.getSimpleName(); + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLink.java index 59554543dd9..49c2ab826a4 100644 --- a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLink.java +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLink.java @@ -29,7 +29,6 @@ import org.apache.jena.http.HttpEnv; import org.apache.jena.query.Query; import org.apache.jena.query.QueryExecution; -import org.apache.jena.query.QueryFactory; import org.apache.jena.rdfconnection.Isolation; import org.apache.jena.rdfconnection.JenaConnectionException; import org.apache.jena.rdfconnection.LibSec; @@ -44,7 +43,6 @@ import org.apache.jena.sparql.exec.UpdateExecBuilder; import org.apache.jena.system.Txn; import org.apache.jena.update.Update; -import org.apache.jena.update.UpdateFactory; import org.apache.jena.update.UpdateRequest; /** @@ -355,7 +353,9 @@ public default boolean queryAsk(Query query) { * @return QueryExecution */ @Override - public QueryExec query(Query query); + default public QueryExec query(Query query) { + return newQuery().query(query).build(); + } /** * Setup a SPARQL query execution. @@ -374,7 +374,7 @@ public default boolean queryAsk(Query query) { */ @Override public default QueryExec query(String queryString) { - return query(QueryFactory.create(queryString)); + return newQuery().query(queryString).build(); } /** @@ -405,7 +405,7 @@ public default QueryExec query(String queryString) { */ @Override public default void update(Update update) { - update(new UpdateRequest(update)); + newUpdate().update(update).execute(); } /** @@ -413,7 +413,9 @@ public default void update(Update update) { * @param update */ @Override - public void update(UpdateRequest update); + public default void update(UpdateRequest update) { + newUpdate().update(update).execute(); + } /** * Execute a SPARQL Update. @@ -421,7 +423,7 @@ public default void update(Update update) { */ @Override public default void update(String updateString) { - update(UpdateFactory.create(updateString)); + newUpdate().update(updateString).execute(); } /** Fetch the default graph. diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkDataset.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkDataset.java index c86b107bfb5..52c2c9f2761 100644 --- a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkDataset.java +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkDataset.java @@ -41,6 +41,7 @@ import org.apache.jena.sparql.graph.GraphFactory; import org.apache.jena.sparql.graph.GraphReadOnly; import org.apache.jena.system.Txn; +import org.apache.jena.update.Update; import org.apache.jena.update.UpdateRequest; /** @@ -68,7 +69,7 @@ private RDFLinkDataset(DatasetGraph dataset) { this(dataset, Isolation.NONE); } - /*package*/ RDFLinkDataset(DatasetGraph dataset, Isolation isolation) { + /*package*/ public RDFLinkDataset(DatasetGraph dataset, Isolation isolation) { this.dataset = dataset; this.isolation = isolation; } @@ -76,9 +77,8 @@ private RDFLinkDataset(DatasetGraph dataset) { @Override public QueryExec query(Query query) { checkOpen(); - //return QueryExec.newBuilder().dataset(dataset).query(query).build(); // Delayed. - return QueryExecApp.create(QueryExec.dataset(dataset).query(query), + return QueryExecApp.create(newQuery().query(query), dataset, query, null); @@ -101,10 +101,22 @@ public UpdateExecBuilder newUpdate() { return UpdateExec.dataset(dataset); } + @Override + public void update(Update update) { + checkOpen(); + Txn.executeWrite(dataset, ()->newUpdate().update(update).execute()); + } + @Override public void update(UpdateRequest update) { checkOpen(); - Txn.executeWrite(dataset, ()->UpdateExecDatasetBuilder.create().update(update).execute(dataset)); + Txn.executeWrite(dataset, ()->newUpdate().update(update).execute()); + } + + @Override + public void update(String updateString) { + checkOpen(); + Txn.executeWrite(dataset, ()->newUpdate().update(updateString).execute()); } @Override diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkFuseki.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkFuseki.java index 515796235e4..8b323d26eea 100644 --- a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkFuseki.java +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkFuseki.java @@ -19,6 +19,7 @@ package org.apache.jena.rdflink; import java.net.http.HttpClient; +import java.util.List; import org.apache.jena.riot.Lang; import org.apache.jena.riot.RDFFormat; @@ -26,6 +27,8 @@ import org.apache.jena.sparql.core.Transactional; import org.apache.jena.sparql.exec.http.QuerySendMode; import org.apache.jena.sparql.exec.http.UpdateSendMode; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; /** * Implementation of the {@link RDFLink} interface for connecting to an Apache Jena Fuseki. @@ -89,7 +92,8 @@ protected RDFLinkFuseki(RDFLinkHTTPBuilder base) { base.acceptDataset, base.acceptGraph, base.acceptSparqlResults, base.acceptSelectResult, base.acceptAskResult, base.parseCheckQueries, base.parseCheckUpdates, - base.querySendMode, base.updateSendMode); + base.querySendMode, base.updateSendMode, + base.queryExecTransforms, base.updateExecTransforms); } protected RDFLinkFuseki(Transactional txnLifecycle, HttpClient httpClient, String destination, @@ -97,13 +101,15 @@ protected RDFLinkFuseki(Transactional txnLifecycle, HttpClient httpClient, Strin String acceptDataset, String acceptGraph, String acceptSparqlResults, String acceptSelectResult, String acceptAskResult, boolean parseCheckQueries, boolean parseCheckUpdates, - QuerySendMode querySendMode, UpdateSendMode updateSendMode) { + QuerySendMode querySendMode, UpdateSendMode updateSendMode, + List queryExecTransforms, List updateExecTransforms) { super(txnLifecycle, httpClient, destination, queryURL, updateURL, gspURL, outputQuads, outputTriples, acceptDataset, acceptGraph, acceptSparqlResults, acceptSelectResult, acceptAskResult, parseCheckQueries, parseCheckUpdates, - querySendMode, updateSendMode); + querySendMode, updateSendMode, + queryExecTransforms, updateExecTransforms); } // Fuseki specific operations. diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTP.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTP.java index b4edd28ece8..7846c13c705 100644 --- a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTP.java +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTP.java @@ -19,6 +19,7 @@ package org.apache.jena.rdflink; import java.net.http.HttpClient; +import java.util.List; import java.util.Objects; import java.util.function.Consumer; @@ -42,7 +43,10 @@ import org.apache.jena.sparql.exec.http.QuerySendMode; import org.apache.jena.sparql.exec.http.UpdateExecHTTPBuilder; import org.apache.jena.sparql.exec.http.UpdateSendMode; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; import org.apache.jena.system.Txn; +import org.apache.jena.update.Update; import org.apache.jena.update.UpdateFactory; import org.apache.jena.update.UpdateRequest; @@ -84,6 +88,10 @@ public class RDFLinkHTTP implements RDFLink { protected final QuerySendMode querySendMode; protected final UpdateSendMode updateSendMode; + protected final List queryExecTransforms; + protected final List updateExecTransforms; + + /** Create a {@link RDFLinkHTTPBuilder}. */ public static RDFLinkHTTPBuilder newBuilder() { return new RDFLinkHTTPBuilder(); @@ -109,7 +117,8 @@ protected RDFLinkHTTP(Transactional txnLifecycle, HttpClient httpClient, String String acceptSparqlResultsFallback, String acceptSelectResult, String acceptAskResult, boolean parseCheckQueries, boolean parseCheckUpdates, - QuerySendMode querySendMode, UpdateSendMode updateSendMode) { + QuerySendMode querySendMode, UpdateSendMode updateSendMode, + List queryExecTransforms, List updateExecTransforms) { // Any defaults. HttpClient hc = httpClient!=null ? httpClient : HttpEnv.getDftHttpClient(); if ( txnLifecycle == null ) @@ -132,6 +141,8 @@ protected RDFLinkHTTP(Transactional txnLifecycle, HttpClient httpClient, String this.parseCheckUpdates = parseCheckUpdates; this.querySendMode = querySendMode; this.updateSendMode = updateSendMode; + this.queryExecTransforms = queryExecTransforms; + this.updateExecTransforms = updateExecTransforms; } @Override @@ -268,6 +279,11 @@ private QueryExecHTTPBuilder createQExecBuilder() { .acceptHeaderAskQuery(HttpLib.dft(acceptAskResult, acceptSparqlResults)) .acceptHeaderGraph(HttpLib.dft(acceptGraph, acceptSparqlResults)) .acceptHeaderDataset(HttpLib.dft(acceptDataset, acceptSparqlResults)); + + for (QueryExecTransform queryExecTransform : queryExecTransforms) { + builder = builder.transformExec(queryExecTransform); + } + return builder; } @@ -293,8 +309,14 @@ public UpdateExecBuilder newUpdate() { /** Create a builder, configured with the link setup. */ private UpdateExecHTTPBuilder createUExecBuilder() { - return UpdateExecHTTPBuilder.create().endpoint(svcUpdate).httpClient(httpClient) + UpdateExecHTTPBuilder builder = UpdateExecHTTPBuilder.create().endpoint(svcUpdate).httpClient(httpClient) .sendMode(updateSendMode).parseCheck(parseCheckUpdates); + + for (UpdateExecTransform updateExecTransform : updateExecTransforms) { + builder = builder.transformExec(updateExecTransform); + } + + return builder; } @Override @@ -303,6 +325,12 @@ public void update(String updateString) { updateExec(null, updateString); } + @Override + public void update(Update update) { + Objects.requireNonNull(update); + updateExec(new UpdateRequest(update), null); + } + @Override public void update(UpdateRequest update) { Objects.requireNonNull(update); @@ -313,10 +341,10 @@ private void updateExec(UpdateRequest update, String updateString ) { checkUpdate(); if ( update == null && updateString == null ) throw new InternalErrorException("Both update request and update string are null"); - UpdateRequest actual = null; + UpdateRequest parsed = null; // Kept for inspection if ( update == null ) { if ( parseCheckUpdates ) - actual = UpdateFactory.create(updateString); + parsed = UpdateFactory.create(updateString); } // Use the update string as provided if possible, otherwise serialize the update. String updateStringToSend = ( updateString != null ) ? updateString : update.toString(); diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTPBuilder.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTPBuilder.java index 2b77cbf862f..ef6d978fc55 100644 --- a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTPBuilder.java +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/RDFLinkHTTPBuilder.java @@ -21,6 +21,8 @@ import static java.util.Objects.requireNonNull; import java.net.http.HttpClient; +import java.util.ArrayList; +import java.util.List; import java.util.Objects; import java.util.function.Function; @@ -30,6 +32,8 @@ import org.apache.jena.sparql.core.TransactionalLock; import org.apache.jena.sparql.exec.http.QuerySendMode; import org.apache.jena.sparql.exec.http.UpdateSendMode; +import org.apache.jena.sparql.exec.tracker.QueryExecTransform; +import org.apache.jena.sparql.exec.tracker.UpdateExecTransform; import org.apache.jena.sys.JenaSystem; /** Builder class for {@link RDFLinkHTTP} */ @@ -70,6 +74,9 @@ public class RDFLinkHTTPBuilder { protected QuerySendMode querySendMode = QuerySendMode.systemDefault; protected UpdateSendMode updateSendMode = UpdateSendMode.systemDefault; + protected List queryExecTransforms = new ArrayList<>(); + protected List updateExecTransforms = new ArrayList<>(); + protected RDFLinkHTTPBuilder() { // Default settings are the member declarations. } @@ -97,6 +104,9 @@ protected RDFLinkHTTPBuilder(RDFLinkHTTP base) { querySendMode = base.querySendMode; updateSendMode = base.updateSendMode; + + queryExecTransforms = base.queryExecTransforms; + updateExecTransforms = base.updateExecTransforms; } /** URL of the remote SPARQL endpoint. @@ -341,6 +351,7 @@ protected RDFLinkHTTP buildConnection() { acceptDataset, acceptGraph, acceptSparqlResults, acceptSelectResult, acceptAskResult, parseCheckQueries, parseCheckUpdates, - querySendMode, updateSendMode); + querySendMode, updateSendMode, + queryExecTransforms, updateExecTransforms); } } diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/DatasetGraphOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/DatasetGraphOverRDFLink.java new file mode 100644 index 00000000000..6a7c0dfcb65 --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/DatasetGraphOverRDFLink.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.atlas.lib.Creator; +import org.apache.jena.query.Query; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.rdflink.dataset.todelete.QueryExecWrapperCloseRDFLink; +import org.apache.jena.sparql.adapter.DatasetGraphSPARQL; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.update.UpdateRequest; + +/** + * DatasetGraph implementation that implements all methods + * against an RDFLink. + * All returned iterators are backed by a fresh RDFLink instance. + * The iterators must be closed to free the resources. + */ +public class DatasetGraphOverRDFLink + extends DatasetGraphSPARQL +{ + private Creator rdfLinkCreator; + + public DatasetGraphOverRDFLink(Creator rdfLinkCreator) { + super(); + this.rdfLinkCreator = rdfLinkCreator; + } + + /** This method can be overridden. */ + public RDFLink newLink() { + RDFLink link = rdfLinkCreator.create(); + return link; + } + + public DatasetGraphOverRDFLink() { + initContext(); + } + + @Override + protected QueryExec query(Query query) { + RDFLink link = newLink(); + QueryExec base = link.query(query); + QueryExec result = new QueryExecWrapperCloseRDFLink(base, link); + return result; + } + + @Override + protected UpdateExec update(UpdateRequest update) { + return new UpdateExecOverRDFLink(this::newLink, null, null, null, false, update, null); + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/InitDatasetGraphOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/InitDatasetGraphOverRDFLink.java new file mode 100644 index 00000000000..8861d55727e --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/InitDatasetGraphOverRDFLink.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.sparql.adapter.SparqlAdapterRegistry; +import org.apache.jena.sparql.system.InitARQ; +import org.apache.jena.sys.JenaSubsystemLifecycle; + +/** + * Initialize SPARQL dispatcher for {@link DatasetGraphOverRDFLink}. + */ +public class InitDatasetGraphOverRDFLink implements JenaSubsystemLifecycle { + + @Override + public void start() { + init(); + } + + @Override + public void stop() {} + + /** Initialize after {@link InitARQ} and before {@link InitExecTracking}. */ + @Override + public int level() { + return 40 ; + } + + private static boolean initialized = false; + + public synchronized static void init() { + if (!initialized) { + initialized = true; + + SparqlAdapterRegistry.addProvider(new SparqlAdapterProviderForDatasetGraphOverRDFLink()); + } + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/QueryExecDatasetBuilderOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/QueryExecDatasetBuilderOverRDFLink.java new file mode 100644 index 00000000000..9a421b278e7 --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/QueryExecDatasetBuilderOverRDFLink.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.atlas.lib.Creator; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.rdflink.dataset.todelete.QueryExecWrapperCloseRDFLink; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.exec.QueryExecBuilder; +import org.apache.jena.sparql.exec.QueryExecDatasetBuilderDeferredBase; + +/** + * A QueryExecBuilder over a creator of RDFLinks. + * Link creation is deferred: The life cycle of the link is tied to + * that of the QueryExec created by this builder. + * This means that the link is created when the QueryExec is built, and + * is closed when the QueryExec is closed. + **/ +public class QueryExecDatasetBuilderOverRDFLink + extends QueryExecDatasetBuilderDeferredBase +{ + private Creator linkCreator; + + public QueryExecDatasetBuilderOverRDFLink(Creator linkCreator, DatasetGraph dataset) { + super(); + this.linkCreator = linkCreator; + this.dataset = dataset; + } + + @Override + protected QueryExecBuilder newActualExecBuilder() { + RDFLink link = linkCreator.create(); + boolean parseCheck = effectiveParseCheck(); + QueryExecBuilder qeb = link.newQuery() + .parseCheck(parseCheck) + .transformExec(qe -> new QueryExecWrapperCloseRDFLink(qe, link)); + return qeb; + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/SparqlAdapterForRDFLinkCreator.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/SparqlAdapterForRDFLinkCreator.java new file mode 100644 index 00000000000..c1768e8e95b --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/SparqlAdapterForRDFLinkCreator.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.atlas.lib.Creator; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.sparql.adapter.SparqlAdapter; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.exec.QueryExecBuilder; +import org.apache.jena.sparql.exec.QueryExecDatasetBuilder; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.exec.UpdateExecBuilder; +import org.apache.jena.sparql.exec.UpdateExecDatasetBuilder; + +/** + * Adapter that wraps a source of RDFLinks with builders that mimic those of for DatasetGraphs. + * The adapter thus returns {@link QueryExecDatasetBuilder} and {@link UpdateExecDatasetBuilder} views + * over the link source. + * + * The life cycle of links is as follows: + * For queries, a link is only created when {@link QueryExecDatasetBuilder#build()} is called. The link is closed + * with the corresponding QueryExec is closed. + * For updates, a link is only created during {@link UpdateExec#execute()} and closed when this method completes. + */ +public class SparqlAdapterForRDFLinkCreator + implements SparqlAdapter +{ + private Creator rdfLinkCreator; + private DatasetGraph dataset; + + public SparqlAdapterForRDFLinkCreator(Creator rdfLinkCreator, DatasetGraph dataset) { + super(); + this.rdfLinkCreator = rdfLinkCreator; + this.dataset = dataset; + } + + @Override + public QueryExecBuilder newQuery() { + return new QueryExecDatasetBuilderOverRDFLink(rdfLinkCreator, dataset); + } + + @Override + public UpdateExecBuilder newUpdate() { + return new UpdateExecDatasetBuilderOverRDFLink(rdfLinkCreator, dataset); + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/SparqlAdapterProviderForDatasetGraphOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/SparqlAdapterProviderForDatasetGraphOverRDFLink.java new file mode 100644 index 00000000000..c0eb54bc049 --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/SparqlAdapterProviderForDatasetGraphOverRDFLink.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.atlas.lib.Creator; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.sparql.adapter.SparqlAdapter; +import org.apache.jena.sparql.adapter.SparqlAdapterProvider; +import org.apache.jena.sparql.core.DatasetGraph; + +public class SparqlAdapterProviderForDatasetGraphOverRDFLink + implements SparqlAdapterProvider +{ + @Override + public SparqlAdapter adapt(DatasetGraph dsg) { + SparqlAdapter adapter = null; + if (dsg instanceof DatasetGraphOverRDFLink d) { + adapter = new SparqlAdapterForRDFLinkCreator(new Adapter(d), d); + } + return adapter; + } + + private static class Adapter + implements Creator + { + private DatasetGraphOverRDFLink dsg; + + public Adapter(DatasetGraphOverRDFLink dsg) { + super(); + this.dsg = dsg; + } + + public DatasetGraph getDataset() { + return dsg; + } + + @Override + public RDFLink create() { + return dsg.newLink(); + } + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/UpdateExecDatasetBuilderOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/UpdateExecDatasetBuilderOverRDFLink.java new file mode 100644 index 00000000000..f8830fdf335 --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/UpdateExecDatasetBuilderOverRDFLink.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import org.apache.jena.atlas.lib.Creator; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.exec.UpdateExecDatasetBuilderBase; +import org.apache.jena.update.UpdateRequest; + +public class UpdateExecDatasetBuilderOverRDFLink + extends UpdateExecDatasetBuilderBase +{ + private Creator linkCreator; + + public UpdateExecDatasetBuilderOverRDFLink(Creator linkCreator, DatasetGraph dataset) { + super(); + this.linkCreator = linkCreator; + this.dataset = dataset; + } + + @Override + public UpdateExec build() { + UpdateRequest updateRequest = null; + String updateRequestString = null; + if (updateEltAcc.isParsed()) { + updateRequest = updateEltAcc.buildUpdateRequest(); + } else { + updateRequestString = updateEltAcc.buildString(); + } + + boolean parseCheck = effectiveParseCheck(); + return new UpdateExecOverRDFLink(linkCreator, initialBinding, substitutionMap, contextAcc.context(), parseCheck, updateRequest, updateRequestString); + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/UpdateExecOverRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/UpdateExecOverRDFLink.java new file mode 100644 index 00000000000..c87ab0c579f --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/UpdateExecOverRDFLink.java @@ -0,0 +1,172 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset; + +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; + +import org.apache.jena.atlas.lib.Creator; +import org.apache.jena.graph.Node; +import org.apache.jena.query.QueryCancelledException; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.sparql.adapter.ParseCheckUtils; +import org.apache.jena.sparql.core.Var; +import org.apache.jena.sparql.engine.Timeouts; +import org.apache.jena.sparql.engine.Timeouts.Timeout; +import org.apache.jena.sparql.engine.binding.Binding; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.sparql.exec.UpdateExecBuilder; +import org.apache.jena.sparql.exec.UpdateExecDatasetBuilder; +import org.apache.jena.sparql.util.Context; +import org.apache.jena.update.UpdateRequest; + +/** + * Deferred update execution that allocates all resources in the + * execute() method. Note, that UpdateExec does not have a close method, + * so all needed resources are allocated and closed during {@link #execute()}. + */ +public class UpdateExecOverRDFLink + implements UpdateExec +{ + private Creator linkCreator; + private Binding initialBinding; + private Map substitutionMap; + private Context requestContext; + + private boolean parseCheck; + private UpdateRequest updateRequest; + private String updateRequestString; + + private Object cancelLock = new Object(); + + private volatile boolean isAborted = false; + private volatile boolean isExecStarted = false; + private volatile UpdateExec delegate = null; + + public UpdateExecOverRDFLink(Creator linkCreator, Binding initialBinding, Map substitutionMap, Context context, + boolean parseCheck, UpdateRequest updateRequest, String updateRequestString) { + super(); + this.linkCreator = linkCreator; + this.initialBinding = initialBinding; + this.substitutionMap = substitutionMap; + this.requestContext = context; + this.parseCheck = parseCheck; + this.updateRequest = updateRequest; + this.updateRequestString = updateRequestString; + } + + @Override + public UpdateRequest getUpdateRequest() { + return updateRequest; + } + + @Override + public String getUpdateRequestString() { + return updateRequestString; + } + + /** + * If the execution has not been started then the context configured with this instance + * is returned. Otherwise the context of the delegate is returned. + */ + @Override + public Context getContext() { + return delegate == null ? requestContext : delegate.getContext(); + } + + @Override + public void abort() { + synchronized (cancelLock) { + isAborted = true; + if (delegate != null) { + delegate.abort(); + } + } + } + + @Override + public void execute() { + RDFLink link = null; + try { + synchronized (cancelLock) { + if (isExecStarted) { + throw new IllegalStateException("Execution was already stated."); + } + isExecStarted = true; + + if (isAborted) { + throw new QueryCancelledException(); + } + + link = linkCreator.create(); + UpdateExecBuilder r = link.newUpdate(); + + r = r.parseCheck(parseCheck); + + if (requestContext != null) { + r = r.context(requestContext); + Timeout timeout = Timeouts.extractUpdateTimeout(requestContext); + applyTimeouts(r, timeout); + } + + if (initialBinding != null) { + if (r instanceof UpdateExecDatasetBuilder db) { + r = db.initialBinding(initialBinding); + } else { + throw new UnsupportedOperationException("Cannot apply initial binding to delegate."); + } + } + + if (substitutionMap != null) { + for (Entry e : substitutionMap.entrySet()) { + r = r.substitution(e.getKey(), e.getValue()); + } + } + + Optional parseCheck = ParseCheckUtils.getParseCheck(requestContext); + if (parseCheck.isPresent()) { + r = r.parseCheck(parseCheck.get()); + } + + if (updateRequest != null) { + r = r.update(updateRequest); + } else { + r = r.update(updateRequestString); + } + + delegate = r.build(); + } + + delegate.execute(); + } finally { + if (link != null) { + link.close(); + } + } + } + + private static void applyTimeouts(UpdateExecBuilder uExec, Timeout t) { + if (t != null) { + if (t.hasOverallTimeout()) { + uExec.timeout(t.overallTimeout().amount(), t.overallTimeout().unit()); + } + } + } +} diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/todelete/QueryExecWrapperCloseRDFLink.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/todelete/QueryExecWrapperCloseRDFLink.java new file mode 100644 index 00000000000..cf0348bc62f --- /dev/null +++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdflink/dataset/todelete/QueryExecWrapperCloseRDFLink.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdflink.dataset.todelete; + +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.QueryExecWrapper; + +// TODO No longer needed with exec transforms. +public class QueryExecWrapperCloseRDFLink + extends QueryExecWrapper +{ + protected RDFLink link; + + public QueryExecWrapperCloseRDFLink(QueryExec delegate, RDFLink link) { + super(delegate); + this.link = link; + } + + @Override + public void close() { + try { + super.close(); + } finally { + link.close(); + } + } +} diff --git a/jena-rdfconnection/src/main/resources/META-INF/services/org.apache.jena.sys.JenaSubsystemLifecycle b/jena-rdfconnection/src/main/resources/META-INF/services/org.apache.jena.sys.JenaSubsystemLifecycle new file mode 100644 index 00000000000..2b6ade72848 --- /dev/null +++ b/jena-rdfconnection/src/main/resources/META-INF/services/org.apache.jena.sys.JenaSubsystemLifecycle @@ -0,0 +1 @@ +org.apache.jena.rdflink.dataset.InitDatasetGraphOverRDFLink diff --git a/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TS_RDFConnection.java b/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TS_RDFConnection.java index 629de0d2153..a486c0c89a4 100644 --- a/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TS_RDFConnection.java +++ b/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TS_RDFConnection.java @@ -28,6 +28,7 @@ TestRDFConnectionLocalTxnMem.class , TestRDFConnectionLocalMRSW.class , TestLibRDFConn.class + , TestRDFConnectionToDatasetGraphOverRDFLink.class , TestRDFConnectionRewrapping.class , TestRDFConnectionHTTPHeaders.class }) diff --git a/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TestRDFConnectionToDatasetGraphOverRDFLink.java b/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TestRDFConnectionToDatasetGraphOverRDFLink.java new file mode 100644 index 00000000000..d7bfa0a0482 --- /dev/null +++ b/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/TestRDFConnectionToDatasetGraphOverRDFLink.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jena.rdfconnection; + +import org.apache.jena.query.Dataset; +import org.apache.jena.query.DatasetFactory; +import org.apache.jena.query.Query; +import org.apache.jena.rdflink.RDFLink; +import org.apache.jena.rdflink.dataset.DatasetGraphOverRDFLink; +import org.apache.jena.sparql.core.DatasetGraph; +import org.apache.jena.sparql.core.DatasetGraphFactory; +import org.apache.jena.sparql.exec.QueryExec; +import org.apache.jena.sparql.exec.UpdateExec; +import org.apache.jena.update.UpdateRequest; + +public class TestRDFConnectionToDatasetGraphOverRDFLink extends AbstractTestRDFConnection { + @Override + protected boolean supportsAbort() { return false; } + + @Override + protected RDFConnection connection() { + DatasetGraph backendDsg = DatasetGraphFactory.create(); + DatasetGraph frontendDsg = new DatasetGraphOverRDFLink(() -> RDFLink.connect(backendDsg)) { + @Override + protected QueryExec query(Query query) { + // Future: With RDFLink unwrapping in jena-rdfconnection this method should be bypassed + // using an internal call to frontendDsg.newLink().newQuery(). + // throw new UnsupportedOperationException("Should not be called"); + + return super.query(query); + } + + @Override + protected UpdateExec update(UpdateRequest updateRequest) { + // Future: With RDFLink unwrapping in jena-rdfconnection this method should be bypassed + // using an internal call to frontendDsg.newLink().newUpdate(). + // throw new UnsupportedOperationException("Should not be called"); + + return super.update(updateRequest); + } + }; + Dataset dataset = DatasetFactory.wrap(frontendDsg); + return RDFConnection.connect(dataset); + } +}