diff --git a/src/main/java/org/topbraid/jenax/functions/AbstractFunction.java b/src/main/java/org/topbraid/jenax/functions/AbstractFunction.java
index c1a51cb5..64ed35a0 100644
--- a/src/main/java/org/topbraid/jenax/functions/AbstractFunction.java
+++ b/src/main/java/org/topbraid/jenax/functions/AbstractFunction.java
@@ -31,6 +31,7 @@
import org.apache.jena.sparql.function.Function;
import org.apache.jena.sparql.function.FunctionEnv;
import org.apache.jena.sparql.sse.SSE;
+import org.apache.jena.sparql.util.Context;
import org.apache.jena.sparql.util.FmtUtils;
import org.topbraid.jenax.statistics.ExecStatistics;
import org.topbraid.jenax.statistics.ExecStatisticsManager;
@@ -44,10 +45,8 @@
public abstract class AbstractFunction implements Function {
@Override
- public void build(String uri, ExprList args) {
- }
+ public void build(String uri, ExprList args, Context context){}
-
@Override
public NodeValue exec(Binding binding, ExprList args, String uri, FunctionEnv env) {
Node[] nodes = new Node[args.size()];
diff --git a/src/main/java/org/topbraid/jenax/progress/ProgressMonitor.java b/src/main/java/org/topbraid/jenax/progress/ProgressMonitor.java
index 36f661e9..a445a2e3 100644
--- a/src/main/java/org/topbraid/jenax/progress/ProgressMonitor.java
+++ b/src/main/java/org/topbraid/jenax/progress/ProgressMonitor.java
@@ -21,60 +21,66 @@
/**
* Inspired by the Eclipse IProgressMonitor, this interface supports monitoring long-running processes with intermediate
* status messages and the ability to cancel.
- *
+ *
* @author Holger Knublauch
*/
public interface ProgressMonitor {
- /**
- * Typically used by the (long-running) process to determine whether the user has requested cancellation.
- * The process should then find a suitable, clean termination.
- * @return true if cancel was requested
- */
- boolean isCanceled();
-
-
- /**
- * Informs the progress monitor that a new task has been started, with a given number of expected steps.
- * A UI connected to the ProgressMonitor would typically display something like a progress bar and the task name.
- * @param label the name of the task
- * @param totalWork the number of steps (see worked
) that is expected to be needed to complete the task
- */
- void beginTask(String label, int totalWork);
-
-
- /**
- * Informs the progress monitor that all is completed.
- */
- void done();
-
-
- /**
- * Typically called from a parallel thread triggered by the user, this informs the progress monitor that it needs to
- * return true
for isCanceled
.
- * Once a process has been canceled, it should not be un-canceled.
- * @param value true if canceled
- */
- void setCanceled(boolean value);
-
-
- /**
- * Changes the name or label of the current task.
- * @param value
- */
- void setTaskName(String value);
-
-
- /**
- * Sets the label that serves as sub-task, typically printed under the main task name.
- * @param label the subtask label
- */
- void subTask(String label);
-
-
- /**
- * Informs the progress monitor that one or more steps have been completed towards the current task (see beginTask
).
- * @param amount the number of steps completed
- */
- void worked(int amount);
+ /**
+ * Typically used by the (long-running) process to determine whether the user has requested cancellation.
+ * The process should then find a suitable, clean termination.
+ *
+ * @return true if cancel was requested
+ */
+ boolean isCanceled();
+
+
+ /**
+ * Informs the progress monitor that a new task has been started, with a given number of expected steps.
+ * A UI connected to the ProgressMonitor would typically display something like a progress bar and the task name.
+ *
+ * @param label the name of the task
+ * @param totalWork the number of steps (see worked
) that is expected to be needed to complete the task
+ */
+ void beginTask(String label, int totalWork);
+
+
+ /**
+ * Informs the progress monitor that all is completed.
+ */
+ void done();
+
+
+ /**
+ * Typically called from a parallel thread triggered by the user, this informs the progress monitor that it needs to
+ * return true
for isCanceled
.
+ * Once a process has been canceled, it should not be un-canceled.
+ *
+ * @param value true if canceled
+ */
+ void setCanceled(boolean value);
+
+
+ /**
+ * Changes the name or label of the current task.
+ *
+ * @param value the task name
+ */
+ void setTaskName(String value);
+
+
+ /**
+ * Sets the label that serves as sub-task, typically printed under the main task name.
+ *
+ * @param label the subtask label
+ */
+ void subTask(String label);
+
+
+ /**
+ * Informs the progress monitor that one or more steps have been completed towards the current task (see beginTask
).
+ *
+ * @param amount the number of steps completed
+ */
+ void worked(int amount);
}
diff --git a/src/main/java/org/topbraid/jenax/util/DiffGraph.java b/src/main/java/org/topbraid/jenax/util/DiffGraph.java
index 75c77542..138af200 100644
--- a/src/main/java/org/topbraid/jenax/util/DiffGraph.java
+++ b/src/main/java/org/topbraid/jenax/util/DiffGraph.java
@@ -1,225 +1,218 @@
package org.topbraid.jenax.util;
-import java.util.HashSet;
-import java.util.Set;
-
import org.apache.jena.graph.Graph;
+import org.apache.jena.graph.GraphMemFactory;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.Triple;
import org.apache.jena.graph.impl.GraphMatcher;
import org.apache.jena.graph.impl.GraphWithPerform;
-import org.apache.jena.mem.GraphMem;
import org.apache.jena.shared.PrefixMapping;
import org.apache.jena.shared.impl.PrefixMappingImpl;
import org.apache.jena.util.iterator.ExtendedIterator;
+import java.util.HashSet;
+import java.util.Set;
+
/**
* A WrappedGraph that filters out deleted triples or adds added triples, without
* modifying the underlying base graph.
- *
+ *
* This class is for single-threaded use only, typically used as temporary graph layer on top of an existing
* graph for the duration of some algorithm.
- *
+ *
* This runs in two modes, based on the updateBaseGraph flag.
- *
+ *
* By default/legacy (false) the system will only add triples that exist in none of the subgraphs of the delegate graph
* and claim to delete triples even if they exist in subgraphs only.
- *
+ *
* If true, the adds will always be applied even if one of the subgraphs already contains the triple.
* This is making sure that transformations will always produce all requested triples.
* Furthermore this mode is more correct w.r.t. deletes because it will only allow deleting triples from the editable graph.
- *
+ *
* @author Holger Knublauch
*/
public class DiffGraph extends TransparentWrappedGraph {
- /**
- * This graph has additional triples that are not in the delegate.
- */
- protected GraphWithPerform addedGraph = new GraphMem();
-
- /**
- * This Set has triples that are in the delegate but are excluded from the filtered graph.
- */
- protected Set deletedTriples = new HashSet<>();
-
- private PrefixMapping pm;
-
- // The graph that the triples will be added to
- private Graph updateableGraph;
-
-
- public DiffGraph(Graph delegate) {
- this(delegate, false);
- }
-
-
- public DiffGraph(Graph delegate, boolean updateBaseGraph) {
- super(delegate);
- if(updateBaseGraph) {
- updateableGraph = JenaUtil.getBaseGraph(delegate);
- }
- else {
- updateableGraph = delegate;
- }
- }
-
-
- @Override
- public void add(Triple t) {
- performAdd(t);
- }
-
-
- @Override
- public void delete(Triple t) {
- performDelete(t);
- }
-
-
- public Graph getAddedGraph() {
- return addedGraph;
- }
-
-
- @Override
- public boolean contains(Node s, Node p, Node o) {
- return contains(Triple.create(s == null ? Node.ANY : s, p == null ? Node.ANY : p, o == null ? Node.ANY : o));
- }
-
-
- @Override
- public boolean contains(Triple t) {
- if(addedGraph.contains(t)) {
- return true;
- }
- else {
- ExtendedIterator it = base.find(t);
- while(it.hasNext()) {
- Triple n = it.next();
- if(!deletedTriples.contains(n)) {
- it.close();
- return true;
- }
- }
- return false;
- }
- }
-
-
- // TODO: If the delegate does not use equals for add and delete
- // but sameValueAs then this code is incorrect.
- // Specifically we should be able to show bugs with TDB which does
- // something different from either equals or sameValueAs.
- protected boolean containsByEquals(Graph g, Triple t) {
- ExtendedIterator it = g.find(t);
- try {
- while (it.hasNext()) {
- if (t.equals(it.next()))
- return true;
- }
- }
- finally {
- it.close();
- }
- return false;
- }
-
-
- @Override
- public ExtendedIterator find(Node s, Node p, Node o) {
-
- // First get the underlying base query (without any buffered triples)
- ExtendedIterator base = super.find(s, p, o);
-
- // If deleted triples exist then continue with a filtered iterator
- if(deletedTriples.size() > 0) {
- // base without deleted triples.
- base = base.filterDrop(deletedTriples::contains);
- }
-
- // If added triples exist then chain the two together
- // this iterator supports remove and removes correctly for this graph
- ExtendedIterator added = addedGraph.find(s, p, o);
- if(added.hasNext()) {
- return base.andThen(added); // base and added are guaranteed disjoint
- }
- else {
- return base;
- }
- }
-
-
- @Override
- public ExtendedIterator find(Triple m) {
- return find(m.getMatchSubject(), m.getMatchPredicate(), m.getMatchObject());
- }
-
-
- public Set getDeletedTriples() {
- return deletedTriples;
- }
-
-
- @Override
- public PrefixMapping getPrefixMapping() {
- if (pm == null) {
- // copy delegate's prefix mapping.
- pm = new PrefixMappingImpl().setNsPrefixes(base.getPrefixMapping());
- }
- return pm;
- }
-
-
- @Override
- public boolean isEmpty() {
- if (!addedGraph.isEmpty()) {
- return false;
- }
- if (deletedTriples.isEmpty()) {
- return base.isEmpty();
- }
- ExtendedIterator it = find(Triple.ANY);
- try {
- return !it.hasNext();
- }
- finally {
- it.close();
- }
- }
-
-
+ /**
+ * This graph has additional triples that are not in the delegate.
+ */
+ protected GraphWithPerform addedGraph = (GraphWithPerform) GraphMemFactory.createDefaultGraph();
+
+ /**
+ * This Set has triples that are in the delegate but are excluded from the filtered graph.
+ */
+ protected Set deletedTriples = new HashSet<>();
+
+ private PrefixMapping pm;
+
+ // The graph that the triples will be added to
+ private Graph updateableGraph;
+
+
+ public DiffGraph(Graph delegate) {
+ this(delegate, false);
+ }
+
+
+ public DiffGraph(Graph delegate, boolean updateBaseGraph) {
+ super(delegate);
+ if (updateBaseGraph) {
+ updateableGraph = JenaUtil.getBaseGraph(delegate);
+ } else {
+ updateableGraph = delegate;
+ }
+ }
+
+
+ @Override
+ public void add(Triple t) {
+ performAdd(t);
+ }
+
+
+ @Override
+ public void delete(Triple t) {
+ performDelete(t);
+ }
+
+
+ public Graph getAddedGraph() {
+ return addedGraph;
+ }
+
+
+ @Override
+ public boolean contains(Node s, Node p, Node o) {
+ return contains(Triple.create(s == null ? Node.ANY : s, p == null ? Node.ANY : p, o == null ? Node.ANY : o));
+ }
+
+
@Override
- public boolean isIsomorphicWith(Graph g) {
- return g != null && GraphMatcher.equals(this, g);
- }
-
-
- @Override
- public void performAdd(Triple t) {
- if (deletedTriples.contains(t)) {
- deletedTriples.remove(t);
- }
- else if(!containsByEquals(addedGraph, t) && !containsByEquals(updateableGraph, t)) {
- addedGraph.add(t);
- }
- }
-
-
- @Override
- public void performDelete(Triple t) {
- if(containsByEquals(addedGraph, t)) {
- addedGraph.delete(t);
- }
- else if(containsByEquals(updateableGraph, t)) {
- deletedTriples.add(t);
- }
- }
-
-
- @Override
- public int size() {
- return super.size() - deletedTriples.size() + addedGraph.size();
- }
+ public boolean contains(Triple t) {
+ if (addedGraph.contains(t)) {
+ return true;
+ } else {
+ ExtendedIterator it = base.find(t);
+ while (it.hasNext()) {
+ Triple n = it.next();
+ if (!deletedTriples.contains(n)) {
+ it.close();
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+
+
+ // TODO: If the delegate does not use equals for add and delete
+ // but sameValueAs then this code is incorrect.
+ // Specifically we should be able to show bugs with TDB which does
+ // something different from either equals or sameValueAs.
+ protected boolean containsByEquals(Graph g, Triple t) {
+ ExtendedIterator it = g.find(t);
+ try {
+ while (it.hasNext()) {
+ if (t.equals(it.next()))
+ return true;
+ }
+ } finally {
+ it.close();
+ }
+ return false;
+ }
+
+
+ @Override
+ public ExtendedIterator find(Node s, Node p, Node o) {
+
+ // First get the underlying base query (without any buffered triples)
+ ExtendedIterator base = super.find(s, p, o);
+
+ // If deleted triples exist then continue with a filtered iterator
+ if (deletedTriples.size() > 0) {
+ // base without deleted triples.
+ base = base.filterDrop(deletedTriples::contains);
+ }
+
+ // If added triples exist then chain the two together
+ // this iterator supports remove and removes correctly for this graph
+ ExtendedIterator added = addedGraph.find(s, p, o);
+ if (added.hasNext()) {
+ return base.andThen(added); // base and added are guaranteed disjoint
+ } else {
+ return base;
+ }
+ }
+
+
+ @Override
+ public ExtendedIterator find(Triple m) {
+ return find(m.getMatchSubject(), m.getMatchPredicate(), m.getMatchObject());
+ }
+
+
+ public Set getDeletedTriples() {
+ return deletedTriples;
+ }
+
+
+ @Override
+ public PrefixMapping getPrefixMapping() {
+ if (pm == null) {
+ // copy delegate's prefix mapping.
+ pm = new PrefixMappingImpl().setNsPrefixes(base.getPrefixMapping());
+ }
+ return pm;
+ }
+
+
+ @Override
+ public boolean isEmpty() {
+ if (!addedGraph.isEmpty()) {
+ return false;
+ }
+ if (deletedTriples.isEmpty()) {
+ return base.isEmpty();
+ }
+ ExtendedIterator it = find(Triple.ANY);
+ try {
+ return !it.hasNext();
+ } finally {
+ it.close();
+ }
+ }
+
+
+ @Override
+ public boolean isIsomorphicWith(Graph g) {
+ return g != null && GraphMatcher.equals(this, g);
+ }
+
+
+ @Override
+ public void performAdd(Triple t) {
+ if (deletedTriples.contains(t)) {
+ deletedTriples.remove(t);
+ } else if (!containsByEquals(addedGraph, t) && !containsByEquals(updateableGraph, t)) {
+ addedGraph.add(t);
+ }
+ }
+
+
+ @Override
+ public void performDelete(Triple t) {
+ if (containsByEquals(addedGraph, t)) {
+ addedGraph.delete(t);
+ } else if (containsByEquals(updateableGraph, t)) {
+ deletedTriples.add(t);
+ }
+ }
+
+
+ @Override
+ public int size() {
+ return super.size() - deletedTriples.size() + addedGraph.size();
+ }
}
diff --git a/src/main/java/org/topbraid/jenax/util/GraphNotFoundException.java b/src/main/java/org/topbraid/jenax/util/GraphNotFoundException.java
index 0e8a470c..e1597b44 100644
--- a/src/main/java/org/topbraid/jenax/util/GraphNotFoundException.java
+++ b/src/main/java/org/topbraid/jenax/util/GraphNotFoundException.java
@@ -19,17 +19,16 @@
/**
* An Exception thrown if a named graph could not be resolved
* while setting the default graph of a dataset.
- *
+ *
* This is subclassing RuntimeException because otherwise a lot of
* existing code would have to catch GraphNotFoundException
* (where it would otherwise have crashed with a NullPointerException anyway).
- *
+ *
* @author Holger Knublauch
*/
-@SuppressWarnings("serial")
public class GraphNotFoundException extends RuntimeException {
- public GraphNotFoundException(String message) {
- super(message);
- }
+ public GraphNotFoundException(String message) {
+ super(message);
+ }
}
diff --git a/src/main/java/org/topbraid/jenax/util/JenaUtil.java b/src/main/java/org/topbraid/jenax/util/JenaUtil.java
index b092f064..0a8c069b 100644
--- a/src/main/java/org/topbraid/jenax/util/JenaUtil.java
+++ b/src/main/java/org/topbraid/jenax/util/JenaUtil.java
@@ -17,41 +17,15 @@
package org.topbraid.jenax.util;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.function.BiFunction;
-
import org.apache.jena.enhanced.EnhGraph;
-import org.apache.jena.graph.Factory;
import org.apache.jena.graph.Graph;
+import org.apache.jena.graph.GraphMemFactory;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.compose.MultiUnion;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.ontology.OntModelSpec;
-import org.apache.jena.query.ARQ;
-import org.apache.jena.query.Dataset;
-import org.apache.jena.query.Query;
-import org.apache.jena.query.QueryExecution;
-import org.apache.jena.query.QuerySolution;
-import org.apache.jena.query.QuerySolutionMap;
-import org.apache.jena.query.ResultSet;
-import org.apache.jena.rdf.model.Literal;
-import org.apache.jena.rdf.model.Model;
-import org.apache.jena.rdf.model.ModelFactory;
-import org.apache.jena.rdf.model.NodeIterator;
-import org.apache.jena.rdf.model.Property;
-import org.apache.jena.rdf.model.RDFList;
-import org.apache.jena.rdf.model.RDFNode;
-import org.apache.jena.rdf.model.Resource;
-import org.apache.jena.rdf.model.Statement;
-import org.apache.jena.rdf.model.StmtIterator;
+import org.apache.jena.query.*;
+import org.apache.jena.rdf.model.*;
import org.apache.jena.rdf.model.impl.PropertyImpl;
import org.apache.jena.rdf.model.impl.StmtIteratorImpl;
import org.apache.jena.shared.PrefixMapping;
@@ -62,14 +36,7 @@
import org.apache.jena.sparql.engine.binding.Binding;
import org.apache.jena.sparql.engine.binding.BindingBuilder;
import org.apache.jena.sparql.engine.binding.BindingRoot;
-import org.apache.jena.sparql.expr.E_Function;
-import org.apache.jena.sparql.expr.Expr;
-import org.apache.jena.sparql.expr.ExprEvalException;
-import org.apache.jena.sparql.expr.ExprList;
-import org.apache.jena.sparql.expr.ExprTransform;
-import org.apache.jena.sparql.expr.ExprTransformer;
-import org.apache.jena.sparql.expr.ExprVar;
-import org.apache.jena.sparql.expr.NodeValue;
+import org.apache.jena.sparql.expr.*;
import org.apache.jena.sparql.expr.nodevalue.NodeFunctions;
import org.apache.jena.sparql.function.FunctionEnv;
import org.apache.jena.sparql.graph.NodeTransform;
@@ -78,8 +45,8 @@
import org.apache.jena.sparql.syntax.syntaxtransform.ExprTransformNodeElement;
import org.apache.jena.sparql.syntax.syntaxtransform.QueryTransformOps;
import org.apache.jena.sparql.util.Context;
+import org.apache.jena.sparql.util.NodeCmp;
import org.apache.jena.sparql.util.NodeFactoryExtra;
-import org.apache.jena.sparql.util.NodeUtils;
import org.apache.jena.util.iterator.ExtendedIterator;
import org.apache.jena.vocabulary.OWL;
import org.apache.jena.vocabulary.RDF;
@@ -87,1191 +54,1196 @@
import org.apache.jena.vocabulary.XSD;
import org.topbraid.jenax.progress.ProgressMonitor;
+import java.util.*;
+import java.util.function.BiFunction;
+
/**
* Some convenience methods to operate on Jena Models.
- *
+ *
* These methods are not as stable as the rest of the API, but
* they may be of general use.
- *
+ *
* @author Holger Knublauch
*/
public class JenaUtil {
- // Unstable
- private static JenaUtilHelper helper = new JenaUtilHelper();
-
- // Leave this line under the helper line above!
- private static Model dummyModel = JenaUtil.createDefaultModel();
-
- public static final String WITH_IMPORTS_PREFIX = "http://rdfex.org/withImports?uri=";
-
-
-
- /**
- * Sets the helper which allows the behavior of some JenaUtil
- * methods to be modified by the system using the SPIN library.
- * Note: Should not be used outside of TopBraid - not stable.
- * @param h the JenaUtilHelper
- * @return the old helper
- */
- public static JenaUtilHelper setHelper(JenaUtilHelper h) {
- JenaUtilHelper old = helper;
- helper = h;
- return old;
- }
-
-
- /**
- * Gets the current helper object.
- * Note: Should not be used outside of TopBraid - not stable.
- * @return the helper
- */
- public final static JenaUtilHelper getHelper() {
- return helper;
- }
-
-
- /**
- * Populates a result set of resources reachable from a subject via zero or more steps with a given predicate.
- * Implementation note: the results set need only implement {@link Collection#add(Object)}.
- * @param results The transitive objects reached from subject via triples with the given predicate
- * @param subject the subject to start traversal at
- * @param predicate the predicate to walk
- */
- public static void addTransitiveObjects(Set results, Resource subject, Property predicate) {
- helper.setGraphReadOptimization(true);
- try {
- addTransitiveObjects(results, new HashSet(), subject, predicate);
- }
- finally {
- helper.setGraphReadOptimization(false);
- }
- }
-
-
- private static void addTransitiveObjects(Set resources, Set reached,
- Resource subject, Property predicate) {
- resources.add(subject);
- reached.add(subject);
- StmtIterator it = subject.listProperties(predicate);
- try {
- while (it.hasNext()) {
- RDFNode object = it.next().getObject();
- if (object instanceof Resource) {
- if (!reached.contains(object)) {
- addTransitiveObjects(resources, reached, (Resource)object, predicate);
- }
- }
- }
- }
- finally {
- it.close();
- }
- }
-
-
- private static void addTransitiveSubjects(Set reached, Resource object,
- Property predicate, ProgressMonitor monitor) {
- if (object != null) {
- reached.add(object);
- StmtIterator it = object.getModel().listStatements(null, predicate, object);
- try {
- while (it.hasNext()) {
- if (monitor != null && monitor.isCanceled()) {
- it.close();
- return;
- }
- Resource subject = it.next().getSubject();
- if (!reached.contains(subject)) {
- addTransitiveSubjects(reached, subject, predicate, monitor);
- }
- }
- }
- finally {
- it.close();
- }
- }
- }
-
-
- /**
- * Turns a QuerySolution into a Binding.
- * @param map the input QuerySolution
- * @return a Binding or null if the input is null
- */
- public static Binding asBinding(final QuerySolution map) {
- if(map != null) {
- BindingBuilder builder = BindingBuilder.create();
- Iterator varNames = map.varNames();
- while(varNames.hasNext()) {
- String varName = varNames.next();
- RDFNode node = map.get(varName);
- if(node != null) {
- builder.add(Var.alloc(varName), node.asNode());
- }
- }
- return builder.build();
- }
- else {
- return null;
- }
- }
-
-
- /**
- * Turns a Binding into a QuerySolutionMap.
- * @param binding the Binding to convert
- * @return a QuerySolutionMap
- */
- public static QuerySolutionMap asQuerySolutionMap(Binding binding) {
- QuerySolutionMap map = new QuerySolutionMap();
- Iterator vars = binding.vars();
- while(vars.hasNext()) {
- Var var = vars.next();
- Node node = binding.get(var);
- if(node != null) {
- map.add(var.getName(), dummyModel.asRDFNode(node));
- }
- }
- return map;
- }
-
-
- /**
- * Returns a set of resources reachable from an object via one or more reversed steps with a given predicate.
- * @param object the object to start traversal at
- * @param predicate the predicate to walk
- * @param monitor an optional progress monitor to allow cancelation
- * @return the reached resources
- */
- public static Set getAllTransitiveSubjects(Resource object, Property predicate, ProgressMonitor monitor) {
- Set set = new HashSet<>();
- helper.setGraphReadOptimization(true);
- try {
- addTransitiveSubjects(set, object, predicate, monitor);
- }
- finally {
- helper.setGraphReadOptimization(false);
- }
- set.remove(object);
- return set;
- }
-
-
- /**
- * Casts a Resource into a Property.
- * @param resource the Resource to cast
- * @return resource as an instance of Property
- */
- public static Property asProperty(Resource resource) {
- if(resource instanceof Property) {
- return (Property) resource;
- }
- else {
- return new PropertyImpl(resource.asNode(), (EnhGraph)resource.getModel());
- }
- }
-
-
- public static void collectBaseGraphs(Graph graph, Set baseGraphs) {
- if(graph instanceof MultiUnion) {
- MultiUnion union = (MultiUnion)graph;
- collectBaseGraphs(union.getBaseGraph(), baseGraphs);
- for(Object subGraph : union.getSubGraphs()) {
- collectBaseGraphs((Graph)subGraph, baseGraphs);
- }
- }
- else if(graph != null) {
- baseGraphs.add(graph);
- }
- }
-
-
- /**
- * Creates a new Graph. By default this will deliver a plain in-memory graph,
- * but other implementations may deliver graphs with concurrency support and
- * other features.
- * @return a default graph
- * @see #createDefaultModel()
- */
- public static Graph createDefaultGraph() {
- return helper.createDefaultGraph();
- }
-
-
- /**
- * Wraps the result of {@link #createDefaultGraph()} into a Model and initializes namespaces.
- * @return a default Model
- * @see #createDefaultGraph()
- */
- public static Model createDefaultModel() {
- Model m = ModelFactory.createModelForGraph(createDefaultGraph());
- initNamespaces(m);
- return m;
- }
-
-
- /**
- * Creates a memory Graph with no reification.
- * @return a new memory graph
- */
- public static Graph createMemoryGraph() {
- return Factory.createDefaultGraph();
- }
-
-
- /**
- * Creates a memory Model with no reification.
- * @return a new memory Model
- */
- public static Model createMemoryModel() {
- return ModelFactory.createModelForGraph(createMemoryGraph());
- }
-
-
- public static MultiUnion createMultiUnion() {
- return helper.createMultiUnion();
- }
-
-
- public static MultiUnion createMultiUnion(Graph[] graphs) {
- return helper.createMultiUnion(graphs);
- }
-
-
- public static MultiUnion createMultiUnion(Iterator graphs) {
- return helper.createMultiUnion(graphs);
- }
-
-
- /**
- * Gets all instances of a given class and its subclasses.
- * @param cls the class to get the instances of
- * @return the instances
- */
- public static Set getAllInstances(Resource cls) {
- JenaUtil.setGraphReadOptimization(true);
- try {
- Model model = cls.getModel();
- Set classes = getAllSubClasses(cls);
- classes.add(cls);
- Set results = new HashSet<>();
- for(Resource subClass : classes) {
- StmtIterator it = model.listStatements(null, RDF.type, subClass);
- while (it.hasNext()) {
- results.add(it.next().getSubject());
- }
- }
- return results;
- }
- finally {
- JenaUtil.setGraphReadOptimization(false);
- }
- }
-
-
- public static Set getAllSubClasses(Resource cls) {
- return getAllTransitiveSubjects(cls, RDFS.subClassOf);
- }
-
-
- /**
- * Returns a set consisting of a given class and all its subclasses.
- * Similar to rdfs:subClassOf*.
- * @param cls the class to return with its subclasses
- * @return the Set of class resources
- */
- public static Set getAllSubClassesStar(Resource cls) {
- Set results = getAllTransitiveSubjects(cls, RDFS.subClassOf);
- results.add(cls);
- return results;
- }
-
-
- public static Set getAllSubProperties(Property superProperty) {
- return getAllTransitiveSubjects(superProperty, RDFS.subPropertyOf);
- }
-
-
- public static Set getAllSuperClasses(Resource cls) {
- return getAllTransitiveObjects(cls, RDFS.subClassOf);
- }
-
-
- /**
- * Returns a set consisting of a given class and all its superclasses.
- * Similar to rdfs:subClassOf*.
- * @param cls the class to return with its superclasses
- * @return the Set of class resources
- */
- public static Set getAllSuperClassesStar(Resource cls) {
- Set results = getAllTransitiveObjects(cls, RDFS.subClassOf);
- results.add(cls);
- return results;
- }
-
-
- public static Set getAllSuperProperties(Property subProperty) {
- return getAllTransitiveObjects(subProperty, RDFS.subPropertyOf);
- }
-
-
- /**
- * Returns a set of resources reachable from a subject via one or more steps with a given predicate.
- * @param subject the subject to start at
- * @param predicate the predicate to traverse
- * @return the reached resources
- */
- public static Set getAllTransitiveObjects(Resource subject, Property predicate) {
- Set set = new HashSet<>();
- addTransitiveObjects(set, subject, predicate);
- set.remove(subject);
- return set;
- }
-
-
- private static Set getAllTransitiveSubjects(Resource object, Property predicate) {
- return getAllTransitiveSubjects(object, predicate, null);
- }
-
-
- public static Set getAllTypes(Resource instance) {
- Set types = new HashSet<>();
- StmtIterator it = instance.listProperties(RDF.type);
- try {
- while (it.hasNext()) {
- Resource type = it.next().getResource();
- types.add(type);
- types.addAll(getAllSuperClasses(type));
- }
- }
- finally {
- it.close();
- }
- return types;
- }
-
-
- /**
- * Gets the "base graph" of a Model, walking into MultiUnions if needed.
- * @param model the Model to get the base graph of
- * @return the base graph or null if the model contains a MultiUnion that doesn't declare one
- */
- public static Graph getBaseGraph(final Model model) {
- return getBaseGraph(model.getGraph());
- }
-
-
- public static Graph getBaseGraph(Graph graph) {
- Graph baseGraph = graph;
- while(baseGraph instanceof MultiUnion) {
- baseGraph = ((MultiUnion)baseGraph).getBaseGraph();
- }
- return baseGraph;
- }
-
-
- public static Model getBaseModel(Model model) {
- Graph baseGraph = getBaseGraph(model);
- if(baseGraph == model.getGraph()) {
- return model;
- }
- else {
- return ModelFactory.createModelForGraph(baseGraph);
- }
- }
-
-
- /**
- * For a given subject resource and a given collection of (label/comment) properties this finds the most
- * suitable value of either property for a given list of languages (usually from the current user's preferences).
- * For example, if the user's languages are [ "en-AU" ] then the function will prefer "mate"@en-AU over
- * "friend"@en and never return "freund"@de. The function falls back to literals that have no language
- * if no better literal has been found.
- * @param resource the subject resource
- * @param langs the allowed languages
- * @param properties the properties to check
- * @return the best suitable value or null
- */
- public static Literal getBestStringLiteral(Resource resource, List langs, Iterable properties) {
- return getBestStringLiteral(resource, langs, properties, (r,p) -> r.listProperties(p));
- }
-
-
- public static Literal getBestStringLiteral(Resource resource, List langs, Iterable properties, BiFunction> getter) {
- String prefLang = langs.isEmpty() ? null : langs.get(0);
- Literal label = null;
- int bestLang = -1;
- for(Property predicate : properties) {
- ExtendedIterator it = getter.apply(resource, predicate);
- while(it.hasNext()) {
- RDFNode object = it.next().getObject();
- if(object.isLiteral()) {
- Literal literal = (Literal)object;
- String lang = literal.getLanguage();
- if(lang.length() == 0 && label == null) {
- label = literal;
- }
- else if(prefLang != null && prefLang.equalsIgnoreCase(lang)) {
- it.close();
- return literal;
- }
- else {
- // 1) Never use a less suitable language
- // 2) Never replace an already existing label (esp: skos:prefLabel) unless new lang is better
- // 3) Fall back to more special languages if no other was found (e.g. use en-GB if only "en" is accepted)
- int startLang = bestLang < 0 ? langs.size() - 1 : (label != null ? bestLang - 1 : bestLang);
- for(int i = startLang; i > 0; i--) {
- String langi = langs.get(i);
- if(langi.equalsIgnoreCase(lang)) {
- label = literal;
- bestLang = i;
- }
- else if(label == null && lang.contains("-") && NodeFunctions.langMatches(lang, langi)) {
- label = literal;
- }
- }
- }
- }
- }
- }
- return label;
- }
-
-
- /**
- * Gets the "first" declared rdfs:range of a given property.
- * If multiple ranges exist, the behavior is undefined.
- * Note that this method does not consider ranges defined on
- * super-properties.
- * @param property the property to get the range of
- * @return the "first" range Resource or null
- */
- public static Resource getFirstDirectRange(Resource property) {
- return property.getPropertyResourceValue(RDFS.range);
- }
-
-
- private static Resource getFirstRange(Resource property, Set reached) {
- Resource directRange = getFirstDirectRange(property);
- if(directRange != null) {
- return directRange;
- }
- StmtIterator it = property.listProperties(RDFS.subPropertyOf);
- while (it.hasNext()) {
- Statement ss = it.next();
- if (ss.getObject().isURIResource()) {
- Resource superProperty = ss.getResource();
- if (!reached.contains(superProperty)) {
- reached.add(superProperty);
- Resource r = getFirstRange(superProperty, reached);
- if (r != null) {
- it.close();
- return r;
- }
- }
- }
- }
- return null;
- }
-
-
- /**
- * Gets the "first" declared rdfs:range of a given property.
- * If multiple ranges exist, the behavior is undefined.
- * This method walks up to super-properties if no direct match exists.
- * @param property the property to get the range of
- * @return the "first" range Resource or null
- */
- public static Resource getFirstRange(Resource property) {
- return getFirstRange(property, new HashSet<>());
- }
-
-
- public static Set getImports(Resource graph) {
- Set results = new HashSet<>();
- for(Property importProperty : ImportProperties.get().getImportProperties()) {
- results.addAll(JenaUtil.getResourceProperties(graph, importProperty));
- }
- return results;
- }
-
-
- public static Integer getIntegerProperty(Resource subject, Property predicate) {
- Statement s = subject.getProperty(predicate);
- if(s != null && s.getObject().isLiteral()) {
- return s.getInt();
- }
- else {
- return null;
- }
- }
-
-
- public static RDFList getListProperty(Resource subject, Property predicate) {
- Statement s = subject.getProperty(predicate);
- if(s != null && s.getObject().canAs(RDFList.class)) {
- return s.getResource().as(RDFList.class);
- }
- else {
- return null;
- }
- }
-
-
- public static List getLiteralProperties(Resource subject, Property predicate) {
- List results = new LinkedList<>();
- StmtIterator it = subject.listProperties(predicate);
- while(it.hasNext()) {
- Statement s = it.next();
- if(s.getObject().isLiteral()) {
- results.add(s.getLiteral());
- }
- }
- return results;
- }
-
-
- /**
- * Walks up the class hierarchy starting at a given class until one of them
- * returns a value for a given Function.
- * @param cls the class to start at
- * @param function the Function to execute on each class
- * @param the requested result type
- * @return the "first" non-null value, or null
- */
- public static T getNearest(Resource cls, java.util.function.Function function) {
- T result = function.apply(cls);
- if(result != null) {
- return result;
- }
- return getNearest(cls, function, new HashSet<>());
- }
-
-
- private static T getNearest(Resource cls, java.util.function.Function function, Set reached) {
- reached.add(cls);
- StmtIterator it = cls.listProperties(RDFS.subClassOf);
- while(it.hasNext()) {
- Statement s = it.next();
- if(s.getObject().isResource() && !reached.contains(s.getResource())) {
- T result = function.apply(s.getResource());
- if(result == null) {
- result = getNearest(s.getResource(), function, reached);
- }
- if(result != null) {
- it.close();
- return result;
- }
- }
- }
- return null;
- }
-
-
- /**
- * Overcomes a design mismatch with Jena: if the base model does not declare a default namespace then the
- * default namespace of an import is returned - this is not desirable for TopBraid-like scenarios.
- * @param model the Model to operate on
- * @param prefix the prefix to get the URI of
- * @return the URI of prefix
- */
- public static String getNsPrefixURI(Model model, String prefix) {
- if ("".equals(prefix) && model.getGraph() instanceof MultiUnion) {
- Graph baseGraph = ((MultiUnion)model.getGraph()).getBaseGraph();
- if(baseGraph != null) {
- return baseGraph.getPrefixMapping().getNsPrefixURI(prefix);
- }
- else {
- return model.getNsPrefixURI(prefix);
- }
- }
- else {
- return model.getNsPrefixURI(prefix);
- }
- }
-
-
- public static RDFNode getProperty(Resource subject, Property predicate) {
- Statement s = subject.getProperty(predicate);
- if(s != null) {
- return s.getObject();
- }
- else {
- return null;
- }
- }
-
-
- public static Resource getResourcePropertyWithType(Resource subject, Property predicate, Resource type) {
- StmtIterator it = subject.listProperties(predicate);
- while(it.hasNext()) {
- Statement s = it.next();
- if(s.getObject().isResource() && JenaUtil.hasIndirectType(s.getResource(), type)) {
- it.close();
- return s.getResource();
- }
- }
- return null;
- }
-
-
- public static List getResourceProperties(Resource subject, Property predicate) {
- List results = new LinkedList<>();
- StmtIterator it = subject.listProperties(predicate);
- while(it.hasNext()) {
- Statement s = it.next();
- if(s.getObject().isResource()) {
- results.add(s.getResource());
- }
- }
- return results;
- }
-
-
- public static Resource getURIResourceProperty(Resource subject, Property predicate) {
- Statement s = subject.getProperty(predicate);
- if(s != null && s.getObject().isURIResource()) {
- return s.getResource();
- }
- else {
- return null;
- }
- }
-
-
- public static List getURIResourceProperties(Resource subject, Property predicate) {
- List results = new LinkedList<>();
- StmtIterator it = subject.listProperties(predicate);
- while(it.hasNext()) {
- Statement s = it.next();
- if(s.getObject().isURIResource()) {
- results.add(s.getResource());
- }
- }
- return results;
- }
-
-
- public static String getStringProperty(Resource subject, Property predicate) {
- Statement s = subject.getProperty(predicate);
- if(s != null && s.getObject().isLiteral()) {
- return s.getString();
- }
- else {
- return null;
- }
- }
-
-
- public static boolean getBooleanProperty(Resource subject, Property predicate) {
- Statement s = subject.getProperty(predicate);
- if(s != null && s.getObject().isLiteral()) {
- return s.getBoolean();
- }
- else {
- return false;
- }
- }
-
-
- public static Double getDoubleProperty(Resource subject, Property predicate) {
- Statement s = subject.getProperty(predicate);
- if(s != null && s.getObject().isLiteral()) {
- return s.getDouble();
- }
- else {
- return null;
- }
- }
-
-
- public static double getDoubleProperty(Resource subject, Property predicate, double defaultValue) {
- Double d = getDoubleProperty(subject, predicate);
- if(d != null) {
- return d;
- }
- else {
- return defaultValue;
- }
- }
-
-
- public static List getSubGraphs(MultiUnion union) {
- List results = new LinkedList<>();
- Graph baseGraph = union.getBaseGraph();
- if(baseGraph != null) {
- results.add(baseGraph);
- }
- results.addAll(union.getSubGraphs());
- return results;
- }
-
-
- /**
- * Gets a Set of all superclasses (rdfs:subClassOf) of a given Resource.
- * @param subClass the subClass Resource
- * @return a Collection of class resources
- */
- public static Collection getSuperClasses(Resource subClass) {
- NodeIterator it = subClass.getModel().listObjectsOfProperty(subClass, RDFS.subClassOf);
- Set results = new HashSet<>();
- while (it.hasNext()) {
- RDFNode node = it.nextNode();
- if (node instanceof Resource) {
- results.add((Resource)node);
- }
- }
- return results;
- }
-
-
- /**
- * Gets the "first" type of a given Resource.
- * @param instance the instance to get the type of
- * @return the type or null
- */
- public static Resource getType(Resource instance) {
- return instance.getPropertyResourceValue(RDF.type);
- }
-
-
- /**
- * Gets a Set of all rdf:types of a given Resource.
- * @param instance the instance Resource
- * @return a Collection of type resources
- */
- public static List getTypes(Resource instance) {
- return JenaUtil.getResourceProperties(instance, RDF.type);
- }
-
-
- /**
- * Checks whether a given Resource is an instance of a given type, or
- * a subclass thereof. Make sure that the expectedType parameter is associated
- * with the right Model, because the system will try to walk up the superclasses
- * of expectedType. The expectedType may have no Model, in which case
- * the method will use the instance's Model.
- * @param instance the Resource to test
- * @param expectedType the type that instance is expected to have
- * @return true if resource has rdf:type expectedType
- */
- public static boolean hasIndirectType(Resource instance, Resource expectedType) {
-
- if(expectedType.getModel() == null) {
- expectedType = expectedType.inModel(instance.getModel());
- }
-
- StmtIterator it = instance.listProperties(RDF.type);
- while(it.hasNext()) {
- Statement s = it.next();
- if(s.getObject().isResource()) {
- Resource actualType = s.getResource();
- if(actualType.equals(expectedType) || JenaUtil.hasSuperClass(actualType, expectedType)) {
- it.close();
- return true;
- }
- }
- }
- return false;
- }
-
-
- /**
- * Checks whether a given class has a given (transitive) super class.
- * @param subClass the sub-class
- * @param superClass the super-class
- * @return true if subClass has superClass (somewhere up the tree)
- */
- public static boolean hasSuperClass(Resource subClass, Resource superClass) {
- return hasSuperClass(subClass, superClass, new HashSet<>());
- }
-
-
- private static boolean hasSuperClass(Resource subClass, Resource superClass, Set reached) {
- StmtIterator it = subClass.listProperties(RDFS.subClassOf);
- while(it.hasNext()) {
- Statement s = it.next();
- if(superClass.equals(s.getObject())) {
- it.close();
- return true;
- }
- else if(!reached.contains(s.getResource())) {
- reached.add(s.getResource());
- if(hasSuperClass(s.getResource(), superClass, reached)) {
- it.close();
- return true;
- }
- }
- }
- return false;
- }
-
-
- /**
- * Checks whether a given property has a given (transitive) super property.
- * @param subProperty the sub-property
- * @param superProperty the super-property
- * @return true if subProperty has superProperty (somewhere up the tree)
- */
- public static boolean hasSuperProperty(Property subProperty, Property superProperty) {
- return getAllSuperProperties(subProperty).contains(superProperty);
- }
-
-
- /**
- * Sets the usual default namespaces for rdf, rdfs, owl and xsd.
- * @param graph the Graph to modify
- */
- public static void initNamespaces(Graph graph) {
- PrefixMapping prefixMapping = graph.getPrefixMapping();
- initNamespaces(prefixMapping);
- }
-
-
- /**
- * Sets the usual default namespaces for rdf, rdfs, owl and xsd.
- * @param prefixMapping the Model to modify
- */
- public static void initNamespaces(PrefixMapping prefixMapping) {
- ensurePrefix(prefixMapping, "rdf", RDF.getURI());
- ensurePrefix(prefixMapping, "rdfs", RDFS.getURI());
- ensurePrefix(prefixMapping, "owl", OWL.getURI());
- ensurePrefix(prefixMapping, "xsd", XSD.getURI());
- }
-
- private static void ensurePrefix(PrefixMapping prefixMapping, String prefix, String uristr) {
- // set if not present, or if different
- if (!uristr.equals(prefixMapping.getNsPrefixURI(prefix))) {
- prefixMapping.setNsPrefix(prefix, uristr);
- }
- }
-
- /**
- * Checks whether a given graph (possibly a MultiUnion) only contains
- * GraphMemBase instances.
- * @param graph the Graph to test
- * @return true if graph is a memory graph
- */
- public static boolean isMemoryGraph(Graph graph) {
- if(graph instanceof MultiUnion) {
- for(Graph subGraph : JenaUtil.getSubGraphs((MultiUnion)graph)) {
- if(!isMemoryGraph(subGraph)) {
- return false;
- }
- }
- return true;
- }
- else {
- return helper.isMemoryGraph(graph);
- }
- }
-
-
- /**
- * Gets an Iterator over all Statements of a given property or its sub-properties
- * at a given subject instance. Note that the predicate and subject should be
- * both attached to a Model to avoid NPEs.
- * @param subject the subject (may be null)
- * @param predicate the predicate
- * @return a StmtIterator
- */
- public static StmtIterator listAllProperties(Resource subject, Property predicate) {
- List results = new LinkedList<>();
- helper.setGraphReadOptimization(true);
- try {
- listAllProperties(subject, predicate, new HashSet<>(), results);
- }
- finally {
- helper.setGraphReadOptimization(false);
- }
- return new StmtIteratorImpl(results.iterator());
- }
-
-
- private static void listAllProperties(Resource subject, Property predicate, Set reached,
- List results) {
- reached.add(predicate);
- StmtIterator sit;
- Model model;
- if (subject != null) {
- sit = subject.listProperties(predicate);
- model = subject.getModel();
- }
- else {
- model = predicate.getModel();
- sit = model.listStatements(null, predicate, (RDFNode)null);
- }
- while (sit.hasNext()) {
- results.add(sit.next());
- }
-
- // Iterate into direct subproperties
- StmtIterator it = model.listStatements(null, RDFS.subPropertyOf, predicate);
- while (it.hasNext()) {
- Statement sps = it.next();
- if (!reached.contains(sps.getSubject())) {
- Property subProperty = asProperty(sps.getSubject());
- listAllProperties(subject, subProperty, reached, results);
- }
- }
- }
-
-
- /**
- * This indicates that no further changes to the model are needed.
- * Some implementations may give runtime exceptions if this is violated.
- * @param m the Model to get as a read-only variant
- * @return A read-only model
- */
- public static Model asReadOnlyModel(Model m) {
- return helper.asReadOnlyModel(m);
- }
-
-
- /**
- * This indicates that no further changes to the graph are needed.
- * Some implementations may give runtime exceptions if this is violated.
- * @param g the Graph to get as a read-only variant
- * @return a read-only graph
- */
- public static Graph asReadOnlyGraph(Graph g) {
- return helper.asReadOnlyGraph(g);
- }
-
-
- // Internal to TopBraid only
- public static OntModel createOntologyModel(OntModelSpec spec, Model base) {
- return helper.createOntologyModel(spec,base);
- }
-
-
- /**
- * Allows some environments, e.g. TopBraid, to prioritize
- * a block of code for reading graphs, with no update occurring.
- * The top of the block should call this with true
- * with a matching call with false
in a finally
- * block.
- *
- * Note: Unstable - don't use outside of TopBraid.
- *
- * @param onOrOff true to switch on
- */
- public static void setGraphReadOptimization(boolean onOrOff) {
- helper.setGraphReadOptimization(onOrOff);
- }
-
-
- /**
- * Ensure that we there is a read-only, thread safe version of the
- * graph. If the graph is not, then create a deep clone that is
- * both.
- *
- * Note: Unstable - don't use outside of TopBraid.
- *
- * @param g The given graph
- * @return A read-only, thread safe version of the given graph.
- */
- public static Graph deepCloneForReadOnlyThreadSafe(Graph g) {
- return helper.deepCloneReadOnlyGraph(g);
- }
-
-
- /**
- * Calls a SPARQL expression and returns the result, using some initial bindings.
- *
- * @param expression the expression to execute (must contain absolute URIs)
- * @param initialBinding the initial bindings for the unbound variables
- * @param dataset the query Dataset or null for default
- * @return the result or null
- */
- public static Node invokeExpression(String expression, QuerySolution initialBinding, Dataset dataset) {
- if (dataset == null) {
- dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel());
- }
- Query query = ARQFactory.get().createExpressionQuery(expression);
- try(QueryExecution qexec = ARQFactory.get().createQueryExecution(query, dataset, initialBinding)) {
- ResultSet rs = qexec.execSelect();
- Node result = null;
- if (rs.hasNext()) {
- QuerySolution qs = rs.next();
- String firstVarName = rs.getResultVars().get(0);
- RDFNode rdfNode = qs.get(firstVarName);
- if (rdfNode != null) {
- result = rdfNode.asNode();
- }
- }
- return result;
- }
- }
-
-
- /**
- * Calls a given SPARQL function with no arguments.
- *
- * @param function the URI resource of the function to call
- * @param dataset the Dataset to operate on or null for default
- * @return the result of the function call
- */
- public static Node invokeFunction0(Resource function, Dataset dataset) {
- ExprList args = new ExprList();
- return invokeFunction(function, args, dataset);
- }
-
-
- /**
- * Calls a given SPARQL function with one argument.
- *
- * @param function the URI resource of the function to call
- * @param argument the first argument
- * @param dataset the Dataset to operate on or null for default
- * @return the result of the function call
- */
- public static Node invokeFunction1(Resource function, RDFNode argument, Dataset dataset) {
- ExprList args = new ExprList();
- args.add(argument != null ? NodeValue.makeNode(argument.asNode()) : new ExprVar("arg1"));
- return invokeFunction(function, args, dataset);
- }
-
-
- public static Node invokeFunction1(Resource function, Node argument, Dataset dataset) {
- return invokeFunction1(function, toRDFNode(argument), dataset);
- }
-
-
- /**
- * Calls a given SPARQL function with two arguments.
- *
- * @param function the URI resource of the function to call
- * @param argument1 the first argument
- * @param argument2 the second argument
- * @param dataset the Dataset to operate on or null for default
- * @return the result of the function call
- */
- public static Node invokeFunction2(Resource function, RDFNode argument1, RDFNode argument2, Dataset dataset) {
- ExprList args = new ExprList();
- args.add(argument1 != null ? NodeValue.makeNode(argument1.asNode()) : new ExprVar("arg1"));
- args.add(argument2 != null ? NodeValue.makeNode(argument2.asNode()) : new ExprVar("arg2"));
- return invokeFunction(function, args, dataset);
- }
-
-
- public static Node invokeFunction2(Resource function, Node argument1, Node argument2, Dataset dataset) {
- return invokeFunction2(function, toRDFNode(argument1), toRDFNode(argument2), dataset);
- }
-
-
- public static Node invokeFunction3(Resource function, RDFNode argument1, RDFNode argument2, RDFNode argument3, Dataset dataset) {
- ExprList args = new ExprList();
- args.add(argument1 != null ? NodeValue.makeNode(argument1.asNode()) : new ExprVar("arg1"));
- args.add(argument2 != null ? NodeValue.makeNode(argument2.asNode()) : new ExprVar("arg2"));
- args.add(argument3 != null ? NodeValue.makeNode(argument3.asNode()) : new ExprVar("arg3"));
- return invokeFunction(function, args, dataset);
- }
-
-
- private static Node invokeFunction(Resource function, ExprList args, Dataset dataset) {
-
- if (dataset == null) {
- dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel());
- }
-
- E_Function expr = new E_Function(function.getURI(), args);
- DatasetGraph dsg = dataset.asDatasetGraph();
- Context cxt = ARQ.getContext().copy();
- cxt.set(ARQConstants.sysCurrentTime, NodeFactoryExtra.nowAsDateTime());
- FunctionEnv env = new ExecutionContext(cxt, dsg.getDefaultGraph(), dsg, null);
- try {
- NodeValue r = expr.eval(BindingRoot.create(), env);
- if(r != null) {
- return r.asNode();
- }
- }
- catch(ExprEvalException ex) {
- }
- return null;
- }
-
-
- public static Node invokeFunction3(Resource function, Node argument1, Node argument2, Node argument3, Dataset dataset) {
- return invokeFunction3(function, toRDFNode(argument1), toRDFNode(argument2), toRDFNode(argument3), dataset);
- }
-
-
- /**
- * Temp patch for a bug in Jena's syntaxtransform, also applying substitutions on
- * HAVING clauses.
- * @param query the Query to transform
- * @param substitutions the variable bindings
- * @return a new Query with the bindings applied
- */
- public static Query queryWithSubstitutions(Query query, final Map substitutions) {
- Query result = QueryTransformOps.transform(query, substitutions);
-
- // TODO: Replace this hack once there is a Jena patch
- if(result.hasHaving()) {
- NodeTransform nodeTransform = new NodeTransform() {
- @Override
- public Node apply(Node node) {
- Node n = substitutions.get(node) ;
- if ( n == null ) {
- return node ;
- }
- return n ;
- }
- };
- ElementTransform eltrans = new ElementTransformSubst(substitutions) ;
- ExprTransform exprTrans = new ExprTransformNodeElement(nodeTransform, eltrans) ;
- List havingExprs = result.getHavingExprs();
- for(int i = 0; i < havingExprs.size(); i++) {
- Expr old = havingExprs.get(i);
- Expr neo = ExprTransformer.transform(exprTrans, old) ;
- if ( neo != old ) {
- havingExprs.set(i, neo);
- }
- }
- }
- return result;
- }
-
-
- public static void sort(List nodes) {
- Collections.sort(nodes, new Comparator() {
- @Override
- public int compare(Resource o1, Resource o2) {
- return NodeUtils.compareRDFTerms(o1.asNode(), o2.asNode());
- }
- });
- }
-
-
- public static RDFNode toRDFNode(Node node) {
- if(node != null) {
- return dummyModel.asRDFNode(node);
- }
- else {
- return null;
- }
- }
-
-
- public static String withImports(String uri) {
- if(!uri.startsWith(WITH_IMPORTS_PREFIX)) {
- return WITH_IMPORTS_PREFIX + uri;
- }
- else {
- return uri;
- }
- }
-
-
- public static String withoutImports(String uri) {
- if(uri.startsWith(WITH_IMPORTS_PREFIX)) {
- return uri.substring(WITH_IMPORTS_PREFIX.length());
- }
- else {
- return uri;
- }
- }
+ // Unstable
+ private static JenaUtilHelper helper = new JenaUtilHelper();
+
+ // Leave this line under the helper line above!
+ private static Model dummyModel = JenaUtil.createDefaultModel();
+
+ public static final String WITH_IMPORTS_PREFIX = "http://rdfex.org/withImports?uri=";
+
+
+ /**
+ * Sets the helper which allows the behavior of some JenaUtil
+ * methods to be modified by the system using the SPIN library.
+ * Note: Should not be used outside of TopBraid - not stable.
+ *
+ * @param h the JenaUtilHelper
+ * @return the old helper
+ */
+ public static JenaUtilHelper setHelper(JenaUtilHelper h) {
+ JenaUtilHelper old = helper;
+ helper = h;
+ return old;
+ }
+
+
+ /**
+ * Gets the current helper object.
+ * Note: Should not be used outside of TopBraid - not stable.
+ *
+ * @return the helper
+ */
+ public static JenaUtilHelper getHelper() {
+ return helper;
+ }
+
+
+ /**
+ * Populates a result set of resources reachable from a subject via zero or more steps with a given predicate.
+ * Implementation note: the results set need only implement {@link Collection#add(Object)}.
+ *
+ * @param results The transitive objects reached from subject via triples with the given predicate
+ * @param subject the subject to start traversal at
+ * @param predicate the predicate to walk
+ */
+ public static void addTransitiveObjects(Set results, Resource subject, Property predicate) {
+ helper.setGraphReadOptimization(true);
+ try {
+ addTransitiveObjects(results, new HashSet(), subject, predicate);
+ } finally {
+ helper.setGraphReadOptimization(false);
+ }
+ }
+
+
+ private static void addTransitiveObjects(Set resources, Set reached,
+ Resource subject, Property predicate) {
+ resources.add(subject);
+ reached.add(subject);
+ StmtIterator it = subject.listProperties(predicate);
+ try {
+ while (it.hasNext()) {
+ RDFNode object = it.next().getObject();
+ if (object instanceof Resource) {
+ if (!reached.contains(object)) {
+ addTransitiveObjects(resources, reached, (Resource) object, predicate);
+ }
+ }
+ }
+ } finally {
+ it.close();
+ }
+ }
+
+
+ private static void addTransitiveSubjects(Set reached, Resource object,
+ Property predicate, ProgressMonitor monitor) {
+ if (object != null) {
+ reached.add(object);
+ StmtIterator it = object.getModel().listStatements(null, predicate, object);
+ try {
+ while (it.hasNext()) {
+ if (monitor != null && monitor.isCanceled()) {
+ it.close();
+ return;
+ }
+ Resource subject = it.next().getSubject();
+ if (!reached.contains(subject)) {
+ addTransitiveSubjects(reached, subject, predicate, monitor);
+ }
+ }
+ } finally {
+ it.close();
+ }
+ }
+ }
+
+
+ /**
+ * Turns a QuerySolution into a Binding.
+ *
+ * @param map the input QuerySolution
+ * @return a Binding or null if the input is null
+ */
+ public static Binding asBinding(final QuerySolution map) {
+ if (map != null) {
+ BindingBuilder builder = BindingBuilder.create();
+ Iterator varNames = map.varNames();
+ while (varNames.hasNext()) {
+ String varName = varNames.next();
+ RDFNode node = map.get(varName);
+ if (node != null) {
+ builder.add(Var.alloc(varName), node.asNode());
+ }
+ }
+ return builder.build();
+ } else {
+ return null;
+ }
+ }
+
+
+ /**
+ * Turns a Binding into a QuerySolutionMap.
+ *
+ * @param binding the Binding to convert
+ * @return a QuerySolutionMap
+ */
+ public static QuerySolutionMap asQuerySolutionMap(Binding binding) {
+ QuerySolutionMap map = new QuerySolutionMap();
+ Iterator vars = binding.vars();
+ while (vars.hasNext()) {
+ Var var = vars.next();
+ Node node = binding.get(var);
+ if (node != null) {
+ map.add(var.getName(), dummyModel.asRDFNode(node));
+ }
+ }
+ return map;
+ }
+
+
+ /**
+ * Returns a set of resources reachable from an object via one or more reversed steps with a given predicate.
+ *
+ * @param object the object to start traversal at
+ * @param predicate the predicate to walk
+ * @param monitor an optional progress monitor to allow cancelation
+ * @return the reached resources
+ */
+ public static Set getAllTransitiveSubjects(Resource object, Property predicate, ProgressMonitor monitor) {
+ Set set = new HashSet<>();
+ helper.setGraphReadOptimization(true);
+ try {
+ addTransitiveSubjects(set, object, predicate, monitor);
+ } finally {
+ helper.setGraphReadOptimization(false);
+ }
+ set.remove(object);
+ return set;
+ }
+
+
+ /**
+ * Casts a Resource into a Property.
+ *
+ * @param resource the Resource to cast
+ * @return resource as an instance of Property
+ */
+ public static Property asProperty(Resource resource) {
+ if (resource instanceof Property) {
+ return (Property) resource;
+ } else {
+ return new PropertyImpl(resource.asNode(), (EnhGraph) resource.getModel());
+ }
+ }
+
+
+ public static void collectBaseGraphs(Graph graph, Set baseGraphs) {
+ if (graph instanceof MultiUnion) {
+ MultiUnion union = (MultiUnion) graph;
+ collectBaseGraphs(union.getBaseGraph(), baseGraphs);
+ for (Object subGraph : union.getSubGraphs()) {
+ collectBaseGraphs((Graph) subGraph, baseGraphs);
+ }
+ } else if (graph != null) {
+ baseGraphs.add(graph);
+ }
+ }
+
+
+ /**
+ * Creates a new Graph. By default, this will deliver a plain in-memory graph,
+ * but other implementations may deliver graphs with concurrency support and
+ * other features.
+ *
+ * @return a default graph
+ * @see #createDefaultModel()
+ */
+ public static Graph createDefaultGraph() {
+ return helper.createDefaultGraph();
+ }
+
+
+ /**
+ * Wraps the result of {@link #createDefaultGraph()} into a Model and initializes namespaces.
+ *
+ * @return a default Model
+ * @see #createDefaultGraph()
+ */
+ public static Model createDefaultModel() {
+ Model m = ModelFactory.createModelForGraph(createDefaultGraph());
+ initNamespaces(m);
+ return m;
+ }
+
+
+ /**
+ * Creates a memory Graph with no reification.
+ *
+ * @return a new memory graph
+ */
+ public static Graph createMemoryGraph() {
+ return GraphMemFactory.createDefaultGraph();
+ }
+
+
+ /**
+ * Creates a memory Model with no reification.
+ *
+ * @return a new memory Model
+ */
+ public static Model createMemoryModel() {
+ return ModelFactory.createModelForGraph(createMemoryGraph());
+ }
+
+
+ public static MultiUnion createMultiUnion() {
+ return helper.createMultiUnion();
+ }
+
+
+ public static MultiUnion createMultiUnion(Graph[] graphs) {
+ return helper.createMultiUnion(graphs);
+ }
+
+
+ public static MultiUnion createMultiUnion(Iterator graphs) {
+ return helper.createMultiUnion(graphs);
+ }
+
+
+ /**
+ * Gets all instances of a given class and its subclasses.
+ *
+ * @param cls the class to get the instances of
+ * @return the instances
+ */
+ public static Set getAllInstances(Resource cls) {
+ JenaUtil.setGraphReadOptimization(true);
+ try {
+ Model model = cls.getModel();
+ Set classes = getAllSubClasses(cls);
+ classes.add(cls);
+ Set results = new HashSet<>();
+ for (Resource subClass : classes) {
+ StmtIterator it = model.listStatements(null, RDF.type, subClass);
+ while (it.hasNext()) {
+ results.add(it.next().getSubject());
+ }
+ }
+ return results;
+ } finally {
+ JenaUtil.setGraphReadOptimization(false);
+ }
+ }
+
+
+ public static Set getAllSubClasses(Resource cls) {
+ return getAllTransitiveSubjects(cls, RDFS.subClassOf);
+ }
+
+
+ /**
+ * Returns a set consisting of a given class and all its subclasses.
+ * Similar to rdfs:subClassOf*.
+ *
+ * @param cls the class to return with its subclasses
+ * @return the Set of class resources
+ */
+ public static Set getAllSubClassesStar(Resource cls) {
+ Set results = getAllTransitiveSubjects(cls, RDFS.subClassOf);
+ results.add(cls);
+ return results;
+ }
+
+
+ public static Set getAllSubProperties(Property superProperty) {
+ return getAllTransitiveSubjects(superProperty, RDFS.subPropertyOf);
+ }
+
+
+ public static Set getAllSuperClasses(Resource cls) {
+ return getAllTransitiveObjects(cls, RDFS.subClassOf);
+ }
+
+
+ /**
+ * Returns a set consisting of a given class and all its superclasses.
+ * Similar to rdfs:subClassOf*.
+ *
+ * @param cls the class to return with its superclasses
+ * @return the Set of class resources
+ */
+ public static Set getAllSuperClassesStar(Resource cls) {
+ Set results = getAllTransitiveObjects(cls, RDFS.subClassOf);
+ results.add(cls);
+ return results;
+ }
+
+
+ public static Set getAllSuperProperties(Property subProperty) {
+ return getAllTransitiveObjects(subProperty, RDFS.subPropertyOf);
+ }
+
+
+ /**
+ * Returns a set of resources reachable from a subject via one or more steps with a given predicate.
+ *
+ * @param subject the subject to start at
+ * @param predicate the predicate to traverse
+ * @return the reached resources
+ */
+ public static Set getAllTransitiveObjects(Resource subject, Property predicate) {
+ Set set = new HashSet<>();
+ addTransitiveObjects(set, subject, predicate);
+ set.remove(subject);
+ return set;
+ }
+
+
+ private static Set getAllTransitiveSubjects(Resource object, Property predicate) {
+ return getAllTransitiveSubjects(object, predicate, null);
+ }
+
+
+ public static Set getAllTypes(Resource instance) {
+ Set types = new HashSet<>();
+ StmtIterator it = instance.listProperties(RDF.type);
+ try {
+ while (it.hasNext()) {
+ Resource type = it.next().getResource();
+ types.add(type);
+ types.addAll(getAllSuperClasses(type));
+ }
+ } finally {
+ it.close();
+ }
+ return types;
+ }
+
+
+ /**
+ * Gets the "base graph" of a Model, walking into MultiUnions if needed.
+ *
+ * @param model the Model to get the base graph of
+ * @return the base graph or null if the model contains a MultiUnion that doesn't declare one
+ */
+ public static Graph getBaseGraph(final Model model) {
+ return getBaseGraph(model.getGraph());
+ }
+
+
+ public static Graph getBaseGraph(Graph graph) {
+ Graph baseGraph = graph;
+ while (baseGraph instanceof MultiUnion) {
+ baseGraph = ((MultiUnion) baseGraph).getBaseGraph();
+ }
+ return baseGraph;
+ }
+
+
+ public static Model getBaseModel(Model model) {
+ Graph baseGraph = getBaseGraph(model);
+ if (baseGraph == model.getGraph()) {
+ return model;
+ } else {
+ return ModelFactory.createModelForGraph(baseGraph);
+ }
+ }
+
+
+ /**
+ * For a given subject resource and a given collection of (label/comment) properties this finds the most
+ * suitable value of either property for a given list of languages (usually from the current user's preferences).
+ * For example, if the user's languages are [ "en-AU" ] then the function will prefer "mate"@en-AU over
+ * "friend"@en and never return "freund"@de. The function falls back to literals that have no language
+ * if no better literal has been found.
+ *
+ * @param resource the subject resource
+ * @param langs the allowed languages
+ * @param properties the properties to check
+ * @return the best suitable value or null
+ */
+ public static Literal getBestStringLiteral(Resource resource, List langs, Iterable properties) {
+ return getBestStringLiteral(resource, langs, properties, (r, p) -> r.listProperties(p));
+ }
+
+
+ public static Literal getBestStringLiteral(Resource resource, List langs, Iterable properties, BiFunction> getter) {
+ String prefLang = langs.isEmpty() ? null : langs.get(0);
+ Literal label = null;
+ int bestLang = -1;
+ for (Property predicate : properties) {
+ ExtendedIterator it = getter.apply(resource, predicate);
+ while (it.hasNext()) {
+ RDFNode object = it.next().getObject();
+ if (object.isLiteral()) {
+ Literal literal = (Literal) object;
+ String lang = literal.getLanguage();
+ if (lang.length() == 0 && label == null) {
+ label = literal;
+ } else if (prefLang != null && prefLang.equalsIgnoreCase(lang)) {
+ it.close();
+ return literal;
+ } else {
+ // 1) Never use a less suitable language
+ // 2) Never replace an already existing label (esp: skos:prefLabel) unless new lang is better
+ // 3) Fall back to more special languages if no other was found (e.g. use en-GB if only "en" is accepted)
+ int startLang = bestLang < 0 ? langs.size() - 1 : (label != null ? bestLang - 1 : bestLang);
+ for (int i = startLang; i > 0; i--) {
+ String langi = langs.get(i);
+ if (langi.equalsIgnoreCase(lang)) {
+ label = literal;
+ bestLang = i;
+ } else if (label == null && lang.contains("-") && NodeFunctions.langMatches(lang, langi)) {
+ label = literal;
+ }
+ }
+ }
+ }
+ }
+ }
+ return label;
+ }
+
+
+ /**
+ * Gets the "first" declared rdfs:range of a given property.
+ * If multiple ranges exist, the behavior is undefined.
+ * Note that this method does not consider ranges defined on
+ * super-properties.
+ *
+ * @param property the property to get the range of
+ * @return the "first" range Resource or null
+ */
+ public static Resource getFirstDirectRange(Resource property) {
+ return property.getPropertyResourceValue(RDFS.range);
+ }
+
+
+ private static Resource getFirstRange(Resource property, Set reached) {
+ Resource directRange = getFirstDirectRange(property);
+ if (directRange != null) {
+ return directRange;
+ }
+ StmtIterator it = property.listProperties(RDFS.subPropertyOf);
+ while (it.hasNext()) {
+ Statement ss = it.next();
+ if (ss.getObject().isURIResource()) {
+ Resource superProperty = ss.getResource();
+ if (!reached.contains(superProperty)) {
+ reached.add(superProperty);
+ Resource r = getFirstRange(superProperty, reached);
+ if (r != null) {
+ it.close();
+ return r;
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+
+ /**
+ * Gets the "first" declared rdfs:range of a given property.
+ * If multiple ranges exist, the behavior is undefined.
+ * This method walks up to super-properties if no direct match exists.
+ *
+ * @param property the property to get the range of
+ * @return the "first" range Resource or null
+ */
+ public static Resource getFirstRange(Resource property) {
+ return getFirstRange(property, new HashSet<>());
+ }
+
+
+ public static Set getImports(Resource graph) {
+ Set results = new HashSet<>();
+ for (Property importProperty : ImportProperties.get().getImportProperties()) {
+ results.addAll(JenaUtil.getResourceProperties(graph, importProperty));
+ }
+ return results;
+ }
+
+
+ public static Integer getIntegerProperty(Resource subject, Property predicate) {
+ Statement s = subject.getProperty(predicate);
+ if (s != null && s.getObject().isLiteral()) {
+ return s.getInt();
+ } else {
+ return null;
+ }
+ }
+
+
+ public static RDFList getListProperty(Resource subject, Property predicate) {
+ Statement s = subject.getProperty(predicate);
+ if (s != null && s.getObject().canAs(RDFList.class)) {
+ return s.getResource().as(RDFList.class);
+ } else {
+ return null;
+ }
+ }
+
+
+ public static List getLiteralProperties(Resource subject, Property predicate) {
+ List results = new LinkedList<>();
+ StmtIterator it = subject.listProperties(predicate);
+ while (it.hasNext()) {
+ Statement s = it.next();
+ if (s.getObject().isLiteral()) {
+ results.add(s.getLiteral());
+ }
+ }
+ return results;
+ }
+
+
+ /**
+ * Walks up the class hierarchy starting at a given class until one of them
+ * returns a value for a given Function.
+ *
+ * @param cls the class to start at
+ * @param function the Function to execute on each class
+ * @param the requested result type
+ * @return the "first" non-null value, or null
+ */
+ public static T getNearest(Resource cls, java.util.function.Function function) {
+ T result = function.apply(cls);
+ if (result != null) {
+ return result;
+ }
+ return getNearest(cls, function, new HashSet<>());
+ }
+
+
+ private static T getNearest(Resource cls, java.util.function.Function function, Set reached) {
+ reached.add(cls);
+ StmtIterator it = cls.listProperties(RDFS.subClassOf);
+ while (it.hasNext()) {
+ Statement s = it.next();
+ if (s.getObject().isResource() && !reached.contains(s.getResource())) {
+ T result = function.apply(s.getResource());
+ if (result == null) {
+ result = getNearest(s.getResource(), function, reached);
+ }
+ if (result != null) {
+ it.close();
+ return result;
+ }
+ }
+ }
+ return null;
+ }
+
+
+ /**
+ * Overcomes a design mismatch with Jena: if the base model does not declare a default namespace then the
+ * default namespace of an import is returned - this is not desirable for TopBraid-like scenarios.
+ *
+ * @param model the Model to operate on
+ * @param prefix the prefix to get the URI of
+ * @return the URI of prefix
+ */
+ public static String getNsPrefixURI(Model model, String prefix) {
+ if ("".equals(prefix) && model.getGraph() instanceof MultiUnion) {
+ Graph baseGraph = ((MultiUnion) model.getGraph()).getBaseGraph();
+ if (baseGraph != null) {
+ return baseGraph.getPrefixMapping().getNsPrefixURI(prefix);
+ } else {
+ return model.getNsPrefixURI(prefix);
+ }
+ } else {
+ return model.getNsPrefixURI(prefix);
+ }
+ }
+
+
+ public static RDFNode getProperty(Resource subject, Property predicate) {
+ Statement s = subject.getProperty(predicate);
+ if (s != null) {
+ return s.getObject();
+ } else {
+ return null;
+ }
+ }
+
+
+ public static Resource getResourcePropertyWithType(Resource subject, Property predicate, Resource type) {
+ StmtIterator it = subject.listProperties(predicate);
+ while (it.hasNext()) {
+ Statement s = it.next();
+ if (s.getObject().isResource() && JenaUtil.hasIndirectType(s.getResource(), type)) {
+ it.close();
+ return s.getResource();
+ }
+ }
+ return null;
+ }
+
+
+ public static List getResourceProperties(Resource subject, Property predicate) {
+ List results = new LinkedList<>();
+ StmtIterator it = subject.listProperties(predicate);
+ while (it.hasNext()) {
+ Statement s = it.next();
+ if (s.getObject().isResource()) {
+ results.add(s.getResource());
+ }
+ }
+ return results;
+ }
+
+
+ public static Resource getURIResourceProperty(Resource subject, Property predicate) {
+ Statement s = subject.getProperty(predicate);
+ if (s != null && s.getObject().isURIResource()) {
+ return s.getResource();
+ } else {
+ return null;
+ }
+ }
+
+
+ public static List getURIResourceProperties(Resource subject, Property predicate) {
+ List results = new LinkedList<>();
+ StmtIterator it = subject.listProperties(predicate);
+ while (it.hasNext()) {
+ Statement s = it.next();
+ if (s.getObject().isURIResource()) {
+ results.add(s.getResource());
+ }
+ }
+ return results;
+ }
+
+
+ public static String getStringProperty(Resource subject, Property predicate) {
+ Statement s = subject.getProperty(predicate);
+ if (s != null && s.getObject().isLiteral()) {
+ return s.getString();
+ } else {
+ return null;
+ }
+ }
+
+
+ public static boolean getBooleanProperty(Resource subject, Property predicate) {
+ Statement s = subject.getProperty(predicate);
+ if (s != null && s.getObject().isLiteral()) {
+ return s.getBoolean();
+ } else {
+ return false;
+ }
+ }
+
+
+ public static Double getDoubleProperty(Resource subject, Property predicate) {
+ Statement s = subject.getProperty(predicate);
+ if (s != null && s.getObject().isLiteral()) {
+ return s.getDouble();
+ } else {
+ return null;
+ }
+ }
+
+
+ public static double getDoubleProperty(Resource subject, Property predicate, double defaultValue) {
+ Double d = getDoubleProperty(subject, predicate);
+ if (d != null) {
+ return d;
+ } else {
+ return defaultValue;
+ }
+ }
+
+
+ public static List getSubGraphs(MultiUnion union) {
+ List results = new LinkedList<>();
+ Graph baseGraph = union.getBaseGraph();
+ if (baseGraph != null) {
+ results.add(baseGraph);
+ }
+ results.addAll(union.getSubGraphs());
+ return results;
+ }
+
+
+ /**
+ * Gets a Set of all superclasses (rdfs:subClassOf) of a given Resource.
+ *
+ * @param subClass the subClass Resource
+ * @return a Collection of class resources
+ */
+ public static Collection getSuperClasses(Resource subClass) {
+ NodeIterator it = subClass.getModel().listObjectsOfProperty(subClass, RDFS.subClassOf);
+ Set results = new HashSet<>();
+ while (it.hasNext()) {
+ RDFNode node = it.nextNode();
+ if (node instanceof Resource) {
+ results.add((Resource) node);
+ }
+ }
+ return results;
+ }
+
+
+ /**
+ * Gets the "first" type of a given Resource.
+ *
+ * @param instance the instance to get the type of
+ * @return the type or null
+ */
+ public static Resource getType(Resource instance) {
+ return instance.getPropertyResourceValue(RDF.type);
+ }
+
+
+ /**
+ * Gets a Set of all rdf:types of a given Resource.
+ *
+ * @param instance the instance Resource
+ * @return a Collection of type resources
+ */
+ public static List getTypes(Resource instance) {
+ return JenaUtil.getResourceProperties(instance, RDF.type);
+ }
+
+
+ /**
+ * Checks whether a given Resource is an instance of a given type, or
+ * a subclass thereof. Make sure that the expectedType parameter is associated
+ * with the right Model, because the system will try to walk up the superclasses
+ * of expectedType. The expectedType may have no Model, in which case
+ * the method will use the instance's Model.
+ *
+ * @param instance the Resource to test
+ * @param expectedType the type that instance is expected to have
+ * @return true if resource has rdf:type expectedType
+ */
+ public static boolean hasIndirectType(Resource instance, Resource expectedType) {
+
+ if (expectedType.getModel() == null) {
+ expectedType = expectedType.inModel(instance.getModel());
+ }
+
+ StmtIterator it = instance.listProperties(RDF.type);
+ while (it.hasNext()) {
+ Statement s = it.next();
+ if (s.getObject().isResource()) {
+ Resource actualType = s.getResource();
+ if (actualType.equals(expectedType) || JenaUtil.hasSuperClass(actualType, expectedType)) {
+ it.close();
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+
+ /**
+ * Checks whether a given class has a given (transitive) super class.
+ *
+ * @param subClass the sub-class
+ * @param superClass the super-class
+ * @return true if subClass has superClass (somewhere up the tree)
+ */
+ public static boolean hasSuperClass(Resource subClass, Resource superClass) {
+ return hasSuperClass(subClass, superClass, new HashSet<>());
+ }
+
+
+ private static boolean hasSuperClass(Resource subClass, Resource superClass, Set reached) {
+ StmtIterator it = subClass.listProperties(RDFS.subClassOf);
+ while (it.hasNext()) {
+ Statement s = it.next();
+ if (superClass.equals(s.getObject())) {
+ it.close();
+ return true;
+ } else if (!reached.contains(s.getResource())) {
+ reached.add(s.getResource());
+ if (hasSuperClass(s.getResource(), superClass, reached)) {
+ it.close();
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+
+ /**
+ * Checks whether a given property has a given (transitive) super property.
+ *
+ * @param subProperty the sub-property
+ * @param superProperty the super-property
+ * @return true if subProperty has superProperty (somewhere up the tree)
+ */
+ public static boolean hasSuperProperty(Property subProperty, Property superProperty) {
+ return getAllSuperProperties(subProperty).contains(superProperty);
+ }
+
+
+ /**
+ * Sets the usual default namespaces for rdf, rdfs, owl and xsd.
+ *
+ * @param graph the Graph to modify
+ */
+ public static void initNamespaces(Graph graph) {
+ PrefixMapping prefixMapping = graph.getPrefixMapping();
+ initNamespaces(prefixMapping);
+ }
+
+
+ /**
+ * Sets the usual default namespaces for rdf, rdfs, owl and xsd.
+ *
+ * @param prefixMapping the Model to modify
+ */
+ public static void initNamespaces(PrefixMapping prefixMapping) {
+ ensurePrefix(prefixMapping, "rdf", RDF.getURI());
+ ensurePrefix(prefixMapping, "rdfs", RDFS.getURI());
+ ensurePrefix(prefixMapping, "owl", OWL.getURI());
+ ensurePrefix(prefixMapping, "xsd", XSD.getURI());
+ }
+
+ private static void ensurePrefix(PrefixMapping prefixMapping, String prefix, String uristr) {
+ // set if not present, or if different
+ if (!uristr.equals(prefixMapping.getNsPrefixURI(prefix))) {
+ prefixMapping.setNsPrefix(prefix, uristr);
+ }
+ }
+
+ /**
+ * Checks whether a given graph (possibly a MultiUnion) only contains
+ * GraphMemBase instances.
+ *
+ * @param graph the Graph to test
+ * @return true if graph is a memory graph
+ */
+ public static boolean isMemoryGraph(Graph graph) {
+ if (graph instanceof MultiUnion) {
+ for (Graph subGraph : JenaUtil.getSubGraphs((MultiUnion) graph)) {
+ if (!isMemoryGraph(subGraph)) {
+ return false;
+ }
+ }
+ return true;
+ } else {
+ return helper.isMemoryGraph(graph);
+ }
+ }
+
+
+ /**
+ * Gets an Iterator over all Statements of a given property or its sub-properties
+ * at a given subject instance. Note that the predicate and subject should be
+ * both attached to a Model to avoid NPEs.
+ *
+ * @param subject the subject (may be null)
+ * @param predicate the predicate
+ * @return a StmtIterator
+ */
+ public static StmtIterator listAllProperties(Resource subject, Property predicate) {
+ List results = new LinkedList<>();
+ helper.setGraphReadOptimization(true);
+ try {
+ listAllProperties(subject, predicate, new HashSet<>(), results);
+ } finally {
+ helper.setGraphReadOptimization(false);
+ }
+ return new StmtIteratorImpl(results.iterator());
+ }
+
+
+ private static void listAllProperties(Resource subject, Property predicate, Set reached,
+ List results) {
+ reached.add(predicate);
+ StmtIterator sit;
+ Model model;
+ if (subject != null) {
+ sit = subject.listProperties(predicate);
+ model = subject.getModel();
+ } else {
+ model = predicate.getModel();
+ sit = model.listStatements(null, predicate, (RDFNode) null);
+ }
+ while (sit.hasNext()) {
+ results.add(sit.next());
+ }
+
+ // Iterate into direct subproperties
+ StmtIterator it = model.listStatements(null, RDFS.subPropertyOf, predicate);
+ while (it.hasNext()) {
+ Statement sps = it.next();
+ if (!reached.contains(sps.getSubject())) {
+ Property subProperty = asProperty(sps.getSubject());
+ listAllProperties(subject, subProperty, reached, results);
+ }
+ }
+ }
+
+
+ /**
+ * This indicates that no further changes to the model are needed.
+ * Some implementations may give runtime exceptions if this is violated.
+ *
+ * @param m the Model to get as a read-only variant
+ * @return A read-only model
+ */
+ public static Model asReadOnlyModel(Model m) {
+ return helper.asReadOnlyModel(m);
+ }
+
+
+ /**
+ * This indicates that no further changes to the graph are needed.
+ * Some implementations may give runtime exceptions if this is violated.
+ *
+ * @param g the Graph to get as a read-only variant
+ * @return a read-only graph
+ */
+ public static Graph asReadOnlyGraph(Graph g) {
+ return helper.asReadOnlyGraph(g);
+ }
+
+
+ // Internal to TopBraid only
+ public static OntModel createOntologyModel(OntModelSpec spec, Model base) {
+ return helper.createOntologyModel(spec, base);
+ }
+
+
+ /**
+ * Allows some environments, e.g. TopBraid, to prioritize
+ * a block of code for reading graphs, with no update occurring.
+ * The top of the block should call this with true
+ * with a matching call with false
in a finally
+ * block.
+ *
+ * Note: Unstable - don't use outside of TopBraid.
+ *
+ * @param onOrOff true to switch on
+ */
+ public static void setGraphReadOptimization(boolean onOrOff) {
+ helper.setGraphReadOptimization(onOrOff);
+ }
+
+
+ /**
+ * Ensure that we there is a read-only, thread safe version of the
+ * graph. If the graph is not, then create a deep clone that is
+ * both.
+ *
+ * Note: Unstable - don't use outside of TopBraid.
+ *
+ * @param g The given graph
+ * @return A read-only, thread safe version of the given graph.
+ */
+ public static Graph deepCloneForReadOnlyThreadSafe(Graph g) {
+ return helper.deepCloneReadOnlyGraph(g);
+ }
+
+
+ /**
+ * Calls a SPARQL expression and returns the result, using some initial bindings.
+ *
+ * @param expression the expression to execute (must contain absolute URIs)
+ * @param initialBinding the initial bindings for the unbound variables
+ * @param dataset the query Dataset or null for default
+ * @return the result or null
+ */
+ public static Node invokeExpression(String expression, QuerySolution initialBinding, Dataset dataset) {
+ if (dataset == null) {
+ dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel());
+ }
+ Query query = ARQFactory.get().createExpressionQuery(expression);
+ try (QueryExecution qexec = ARQFactory.get().createQueryExecution(query, dataset, initialBinding)) {
+ ResultSet rs = qexec.execSelect();
+ Node result = null;
+ if (rs.hasNext()) {
+ QuerySolution qs = rs.next();
+ String firstVarName = rs.getResultVars().get(0);
+ RDFNode rdfNode = qs.get(firstVarName);
+ if (rdfNode != null) {
+ result = rdfNode.asNode();
+ }
+ }
+ return result;
+ }
+ }
+
+
+ /**
+ * Calls a given SPARQL function with no arguments.
+ *
+ * @param function the URI resource of the function to call
+ * @param dataset the Dataset to operate on or null for default
+ * @return the result of the function call
+ */
+ public static Node invokeFunction0(Resource function, Dataset dataset) {
+ ExprList args = new ExprList();
+ return invokeFunction(function, args, dataset);
+ }
+
+
+ /**
+ * Calls a given SPARQL function with one argument.
+ *
+ * @param function the URI resource of the function to call
+ * @param argument the first argument
+ * @param dataset the Dataset to operate on or null for default
+ * @return the result of the function call
+ */
+ public static Node invokeFunction1(Resource function, RDFNode argument, Dataset dataset) {
+ ExprList args = new ExprList();
+ args.add(argument != null ? NodeValue.makeNode(argument.asNode()) : new ExprVar("arg1"));
+ return invokeFunction(function, args, dataset);
+ }
+
+
+ public static Node invokeFunction1(Resource function, Node argument, Dataset dataset) {
+ return invokeFunction1(function, toRDFNode(argument), dataset);
+ }
+
+
+ /**
+ * Calls a given SPARQL function with two arguments.
+ *
+ * @param function the URI resource of the function to call
+ * @param argument1 the first argument
+ * @param argument2 the second argument
+ * @param dataset the Dataset to operate on or null for default
+ * @return the result of the function call
+ */
+ public static Node invokeFunction2(Resource function, RDFNode argument1, RDFNode argument2, Dataset dataset) {
+ ExprList args = new ExprList();
+ args.add(argument1 != null ? NodeValue.makeNode(argument1.asNode()) : new ExprVar("arg1"));
+ args.add(argument2 != null ? NodeValue.makeNode(argument2.asNode()) : new ExprVar("arg2"));
+ return invokeFunction(function, args, dataset);
+ }
+
+
+ public static Node invokeFunction2(Resource function, Node argument1, Node argument2, Dataset dataset) {
+ return invokeFunction2(function, toRDFNode(argument1), toRDFNode(argument2), dataset);
+ }
+
+
+ public static Node invokeFunction3(Resource function, RDFNode argument1, RDFNode argument2, RDFNode argument3, Dataset dataset) {
+ ExprList args = new ExprList();
+ args.add(argument1 != null ? NodeValue.makeNode(argument1.asNode()) : new ExprVar("arg1"));
+ args.add(argument2 != null ? NodeValue.makeNode(argument2.asNode()) : new ExprVar("arg2"));
+ args.add(argument3 != null ? NodeValue.makeNode(argument3.asNode()) : new ExprVar("arg3"));
+ return invokeFunction(function, args, dataset);
+ }
+
+
+ private static Node invokeFunction(Resource function, ExprList args, Dataset dataset) {
+
+ if (dataset == null) {
+ dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel());
+ }
+
+ E_Function expr = new E_Function(function.getURI(), args);
+ DatasetGraph dsg = dataset.asDatasetGraph();
+ Context cxt = ARQ.getContext().copy();
+ cxt.set(ARQConstants.sysCurrentTime, NodeFactoryExtra.nowAsDateTime());
+ FunctionEnv env = new ExecutionContext(cxt, dsg.getDefaultGraph(), dsg, null);
+ try {
+ NodeValue r = expr.eval(BindingRoot.create(), env);
+ if (r != null) {
+ return r.asNode();
+ }
+ } catch (ExprEvalException ex) {
+ }
+ return null;
+ }
+
+
+ public static Node invokeFunction3(Resource function, Node argument1, Node argument2, Node argument3, Dataset dataset) {
+ return invokeFunction3(function, toRDFNode(argument1), toRDFNode(argument2), toRDFNode(argument3), dataset);
+ }
+
+
+ /**
+ * Temp patch for a bug in Jena's syntaxtransform, also applying substitutions on
+ * HAVING clauses.
+ *
+ * @param query the Query to transform
+ * @param substitutions the variable bindings
+ * @return a new Query with the bindings applied
+ */
+ public static Query queryWithSubstitutions(Query query, final Map substitutions) {
+ Query result = QueryTransformOps.transform(query, substitutions);
+
+ // TODO: Replace this hack once there is a Jena patch
+ if (result.hasHaving()) {
+ NodeTransform nodeTransform = new NodeTransform() {
+ @Override
+ public Node apply(Node node) {
+ Node n = substitutions.get(node);
+ if (n == null) {
+ return node;
+ }
+ return n;
+ }
+ };
+ ElementTransform eltrans = new ElementTransformSubst(substitutions);
+ ExprTransform exprTrans = new ExprTransformNodeElement(nodeTransform, eltrans);
+ List havingExprs = result.getHavingExprs();
+ for (int i = 0; i < havingExprs.size(); i++) {
+ Expr old = havingExprs.get(i);
+ Expr neo = ExprTransformer.transform(exprTrans, old);
+ if (neo != old) {
+ havingExprs.set(i, neo);
+ }
+ }
+ }
+ return result;
+ }
+
+
+ public static void sort(List nodes) {
+ Collections.sort(nodes, new Comparator() {
+ @Override
+ public int compare(Resource o1, Resource o2) {
+ return NodeCmp.compareRDFTerms(o1.asNode(), o2.asNode());
+ }
+ });
+ }
+
+
+ public static RDFNode toRDFNode(Node node) {
+ if (node != null) {
+ return dummyModel.asRDFNode(node);
+ } else {
+ return null;
+ }
+ }
+
+
+ public static String withImports(String uri) {
+ if (!uri.startsWith(WITH_IMPORTS_PREFIX)) {
+ return WITH_IMPORTS_PREFIX + uri;
+ } else {
+ return uri;
+ }
+ }
+
+
+ public static String withoutImports(String uri) {
+ if (uri.startsWith(WITH_IMPORTS_PREFIX)) {
+ return uri.substring(WITH_IMPORTS_PREFIX.length());
+ } else {
+ return uri;
+ }
+ }
}
diff --git a/src/main/java/org/topbraid/jenax/util/JenaUtilHelper.java b/src/main/java/org/topbraid/jenax/util/JenaUtilHelper.java
index 801a8e92..472aad57 100644
--- a/src/main/java/org/topbraid/jenax/util/JenaUtilHelper.java
+++ b/src/main/java/org/topbraid/jenax/util/JenaUtilHelper.java
@@ -18,7 +18,6 @@
import java.util.Iterator;
-import org.apache.jena.graph.Factory;
import org.apache.jena.graph.Graph;
import org.apache.jena.graph.compose.MultiUnion;
import org.apache.jena.mem.GraphMemBase;
@@ -26,6 +25,7 @@
import org.apache.jena.ontology.OntModelSpec;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
+import org.apache.jena.sparql.graph.GraphFactory;
/**
* This is an extension point for the SPIN library
@@ -72,7 +72,7 @@ public MultiUnion createMultiUnion(Graph[] graphs) {
* @return the default Graph
*/
public Graph createDefaultGraph() {
- return Factory.createDefaultGraph();
+ return GraphFactory.createDefaultGraph();
}
diff --git a/src/main/java/org/topbraid/jenax/util/PrefixUtils.java b/src/main/java/org/topbraid/jenax/util/PrefixUtils.java
index 48267f9b..3b4c4695 100644
--- a/src/main/java/org/topbraid/jenax/util/PrefixUtils.java
+++ b/src/main/java/org/topbraid/jenax/util/PrefixUtils.java
@@ -9,7 +9,7 @@ public class PrefixUtils {
/**
* Make the {@code dstGraph} prefix map the same {@code srcGraph} prefix map,
- * only making chnages where necessary.
+ * only making changes where necessary.
* @param dstGraph the destination graph
* @param srcGraph the source graph
* @return false if no changes where made.
diff --git a/src/main/java/org/topbraid/jenax/util/QueryExecutionFactoryFilter.java b/src/main/java/org/topbraid/jenax/util/QueryExecutionFactoryFilter.java
index 65da64b2..5cbeda87 100644
--- a/src/main/java/org/topbraid/jenax/util/QueryExecutionFactoryFilter.java
+++ b/src/main/java/org/topbraid/jenax/util/QueryExecutionFactoryFilter.java
@@ -16,151 +16,150 @@
*/
package org.topbraid.jenax.util;
-import java.net.http.HttpClient;
-import java.time.format.DateTimeFormatter;
-import java.util.List;
-
-import org.apache.jena.atlas.lib.DateTimeUtils ;
-import org.apache.jena.query.Dataset;
-import org.apache.jena.query.Query;
-import org.apache.jena.query.QueryExecution;
-import org.apache.jena.query.QueryExecutionFactory;
-import org.apache.jena.query.QuerySolution;
+import org.apache.jena.atlas.lib.DateTimeUtils;
+import org.apache.jena.query.*;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.sparql.exec.http.QueryExecutionHTTP;
import org.apache.jena.sparql.exec.http.QueryExecutionHTTPBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.net.http.HttpClient;
+import java.time.format.DateTimeFormatter;
+import java.util.List;
+
public class QueryExecutionFactoryFilter {
- static final String LOG_NAME = "QueryLog";
- private Logger logger;
- private static QueryExecutionFactoryFilter singleton = new QueryExecutionFactoryFilter();
-
- // ---- Support for controlling printing queries while running. See function "printQuery".
- private static boolean PRINT = false;
+ static final String LOG_NAME = "QueryLog";
+ private Logger logger;
+ private static QueryExecutionFactoryFilter singleton = new QueryExecutionFactoryFilter();
+
+ // ---- Support for controlling printing queries while running. See function "printQuery".
+ private static boolean PRINT = false;
// ---- Support for controlling printing queries while running.
-
- /**
- * Gets the singleton instance of this class.
- * @return the singleton
- */
- public static QueryExecutionFactoryFilter get() {
- return singleton;
- }
-
- private QueryExecutionFactoryFilter() {
- logger = LoggerFactory.getLogger(LOG_NAME);
- }
-
- public QueryExecution create(Query query, Model model) {
- analyzeRequest(query, model, null);
- return QueryExecutionFactory.create(query, model);
- }
-
- public QueryExecution create(Query query, Model model, QuerySolution initialBinding) {
- analyzeRequest(query, model, initialBinding);
- return QueryExecution.create()
- .query(query)
- .model(model)
- .initialBinding(initialBinding)
- .build();
- }
-
- public QueryExecution create(Query query, Dataset dataset) {
- analyzeRequest(query, dataset, null);
- return QueryExecutionFactory.create(query, dataset);
- }
-
- public QueryExecution create(Query query, Dataset dataset, QuerySolution initialBinding) {
- analyzeRequest(query, dataset, initialBinding);
- return QueryExecutionFactory.create(query, dataset, initialBinding);
- }
-
- public QueryExecution sparqlService(String service, Query query) {
- return sparqlServiceBuilder(service, query).build();
- }
-
+
+ /**
+ * Gets the singleton instance of this class.
+ *
+ * @return the singleton
+ */
+ public static QueryExecutionFactoryFilter get() {
+ return singleton;
+ }
+
+ private QueryExecutionFactoryFilter() {
+ logger = LoggerFactory.getLogger(LOG_NAME);
+ }
+
+ public QueryExecution create(Query query, Model model) {
+ analyzeRequest(query, model, null);
+ return QueryExecutionFactory.create(query, model);
+ }
+
+ public QueryExecution create(Query query, Model model, QuerySolution initialBinding) {
+ analyzeRequest(query, model, initialBinding);
+ return QueryExecution.create()
+ .query(query)
+ .model(model)
+ .initialBinding(initialBinding)
+ .build();
+ }
+
+ public QueryExecution create(Query query, Dataset dataset) {
+ analyzeRequest(query, dataset, null);
+ return QueryExecutionFactory.create(query, dataset);
+ }
+
+ public QueryExecution create(Query query, Dataset dataset, QuerySolution querySolution) {
+ analyzeRequest(query, dataset, querySolution);
+ return QueryExecution.dataset(dataset).query(query).substitution(querySolution).build();
+ }
+
+ public QueryExecution sparqlService(String service, Query query) {
+ return sparqlServiceBuilder(service, query).build();
+ }
+
public QueryExecution sparqlService(String service, Query query, HttpClient httpClient) {
- return sparqlServiceBuilder(service, query, httpClient).build();
+ return sparqlServiceBuilder(service, query, httpClient).build();
+ }
+
+ public QueryExecutionHTTP sparqlService(String service, Query query, HttpClient httpClient, List defaultGraphURIs, List namedGraphURIs) {
+ QueryExecutionHTTPBuilder builder = sparqlServiceBuilder(service, query, httpClient);
+ defaultGraphURIs.forEach(builder::addDefaultGraphURI);
+ namedGraphURIs.forEach(builder::addNamedGraphURI);
+ return builder.build();
}
- public QueryExecutionHTTP sparqlService(String service, Query query, HttpClient httpClient, List defaultGraphURIs, List namedGraphURIs) {
- QueryExecutionHTTPBuilder builder = sparqlServiceBuilder(service, query, httpClient);
- defaultGraphURIs.forEach(uri -> builder.addDefaultGraphURI(uri));
- namedGraphURIs.forEach(uri -> builder.addNamedGraphURI(uri));
- return builder.build();
- }
-
private QueryExecutionHTTPBuilder sparqlServiceBuilder(String service, Query query, HttpClient httpClient) {
- return sparqlServiceBuilder(service, query).httpClient(httpClient);
- }
-
+ return sparqlServiceBuilder(service, query).httpClient(httpClient);
+ }
+
private QueryExecutionHTTPBuilder sparqlServiceBuilder(String service, Query query) {
- return QueryExecution.service(service).query(query);
+ return QueryExecution.service(service).query(query);
}
-
- private void analyzeRequest(Query query, Model model, QuerySolution initialBinding) {
+
+ private void analyzeRequest(Query query, Model model, QuerySolution initialBinding) {
printQuery(query, initialBinding);
- if(logger.isTraceEnabled()) {
- logger.trace("QUERY[" + analyzeQuery(query)
- + "]\nMODEL[" + analyzeModel(model) + "]"
- + serializeBindings(initialBinding));
- }
- }
-
- private void analyzeRequest(Query query, Dataset dataset, QuerySolution initialBinding) {
- printQuery(query, initialBinding);
-
- if(logger.isTraceEnabled()) {
- logger.trace("QUERY[" + analyzeQuery(query)
- + "]\nDATASET[" + analyzeDataset(dataset) + "]"
- + serializeBindings(initialBinding));
- }
- }
-
- private static final DateTimeFormatter timestamp = DateTimeFormatter.ofPattern("HH:mm:ss.SSS");
- // Development support. Dynmically controlled print query.
- private void printQuery(Query query, QuerySolution initialBinding) {
- if ( PRINT ) {
- String time = DateTimeUtils.nowAsString(timestamp);
+ if (logger.isTraceEnabled()) {
+ logger.trace("QUERY[" + analyzeQuery(query)
+ + "]\nMODEL[" + analyzeModel(model) + "]"
+ + serializeBindings(initialBinding));
+ }
+ }
+
+ private void analyzeRequest(Query query, Dataset dataset, QuerySolution initialBinding) {
+ printQuery(query, initialBinding);
+
+ if (logger.isTraceEnabled()) {
+ logger.trace("QUERY[" + analyzeQuery(query)
+ + "]\nDATASET[" + analyzeDataset(dataset) + "]"
+ + serializeBindings(initialBinding));
+ }
+ }
+
+ private static final DateTimeFormatter timestamp = DateTimeFormatter.ofPattern("HH:mm:ss.SSS");
+
+ // Development support. Dynamically controlled print query.
+ private void printQuery(Query query, QuerySolution initialBinding) {
+ if (PRINT) {
+ String time = DateTimeUtils.nowAsString(timestamp);
System.err.print("~~ ");
System.err.print(time);
System.err.println(" ~~");
System.err.println(initialBinding);
System.err.print(query);
}
- }
+ }
/**
* Allow query printing to be switched on/off around specific sections of code that
* are issuing queries.
- * @param value true to enable
+ *
+ * @param value true to enable
*/
public static void enableQueryPrinting(boolean value) {
PRINT = value;
}
-
- private String serializeBindings(QuerySolution bindings) {
- if(bindings == null) return "";
- return "\nINITIAL BINDINGS[" + bindings.toString() + "]";
- }
-
- private String analyzeQuery(Query query) {
- if(query == null) return "null query";
- return query.toString();
- }
-
- private String analyzeModel(Model model) {
- if(model == null) return "null model";
-
- return "this space for rent";
- }
-
- private String analyzeDataset(Dataset dataset) {
- if(dataset == null) return "null dataset";
-
- return "A Dataset";
- }
+
+ private String serializeBindings(QuerySolution bindings) {
+ if (bindings == null) return "";
+ return "\nINITIAL BINDINGS[" + bindings.toString() + "]";
+ }
+
+ private String analyzeQuery(Query query) {
+ if (query == null) return "null query";
+ return query.toString();
+ }
+
+ private String analyzeModel(Model model) {
+ if (model == null) return "null model";
+
+ return "this space for rent";
+ }
+
+ private String analyzeDataset(Dataset dataset) {
+ if (dataset == null) return "null dataset";
+
+ return "A Dataset";
+ }
}
diff --git a/src/main/java/org/topbraid/jenax/util/RDFLabels.java b/src/main/java/org/topbraid/jenax/util/RDFLabels.java
index 161687b8..9a3a3de6 100644
--- a/src/main/java/org/topbraid/jenax/util/RDFLabels.java
+++ b/src/main/java/org/topbraid/jenax/util/RDFLabels.java
@@ -30,7 +30,7 @@
/**
* A singleton that is used to render resources into strings.
- * By default this displays qnames (if possible).
+ * By default, this displays qnames (if possible).
* Can be changed, for example, to switch to displaying rdfs:labels
* instead of qnames etc.
*
diff --git a/src/main/java/org/topbraid/jenax/util/SystemTriples.java b/src/main/java/org/topbraid/jenax/util/SystemTriples.java
index 6dad3c2e..3b3401c6 100644
--- a/src/main/java/org/topbraid/jenax/util/SystemTriples.java
+++ b/src/main/java/org/topbraid/jenax/util/SystemTriples.java
@@ -17,11 +17,6 @@
package org.topbraid.jenax.util;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.jena.datatypes.xsd.impl.XMLLiteralType;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.StmtIterator;
@@ -31,92 +26,97 @@
import org.apache.jena.vocabulary.RDFS;
import org.apache.jena.vocabulary.XSD;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+
/**
* Provides access to the RDF/RDFS/OWL system triples.
- *
+ *
* TopBraid and this API adds some extra triples (such as declaring
* superclasses for each system class) that make life easier.
- *
+ *
* @author Holger Knublauch
*/
public class SystemTriples {
- private static Model vocabulary;
-
-
- private static void ensureSuperClasses(Resource metaClass, Resource superClass) {
- List toAdd = collectMissingSuperClasses(metaClass, superClass);
- for (Resource c: toAdd) {
- vocabulary.add(c, RDFS.subClassOf, superClass);
- }
- }
-
-
- private static List collectMissingSuperClasses(Resource metaClass,
- Resource superClass) {
- List toAdd = new ArrayList();
- StmtIterator it = vocabulary.listStatements(null, RDF.type, metaClass);
- while (it.hasNext()) {
- Resource c = it.nextStatement().getSubject();
- if (!c.equals(superClass)) {
- if (c.getProperty(RDFS.subClassOf) == null) {
- toAdd.add(c);
- }
- }
- }
- return toAdd;
- }
-
-
- /**
- * Gets the system ontology (a shared copy).
- * @return the system ontology
- */
- public static synchronized Model getVocabularyModel() {
- if (vocabulary == null) {
- vocabulary = JenaUtil.createDefaultModel();
- org.topbraid.jenax.util.JenaUtil.initNamespaces(vocabulary.getGraph());
- vocabulary.setNsPrefix("xsd", XSD.getURI());
- InputStream ttl = SystemTriples.class.getResourceAsStream("/rdf/system-triples.ttl");
- vocabulary.read(ttl, "urn:x:dummy", FileUtils.langTurtle);
- ensureSuperClasses(RDFS.Class, RDFS.Resource);
- ensureSuperClasses(OWL.Class, OWL.Thing);
-
- // Remove owl imports rdfs which only causes trouble
- vocabulary.removeAll(null, OWL.imports, null);
-
- vocabulary.add(OWL.Thing, RDFS.subClassOf, RDFS.Resource);
- vocabulary.add(OWL.inverseOf, RDF.type, OWL.SymmetricProperty);
- vocabulary.add(OWL.equivalentClass, RDF.type, OWL.SymmetricProperty);
- vocabulary.add(OWL.equivalentProperty, RDF.type, OWL.SymmetricProperty);
- vocabulary.add(OWL.equivalentProperty, RDFS.range, RDF.Property);
- vocabulary.add(OWL.differentFrom, RDF.type, OWL.SymmetricProperty);
- vocabulary.add(OWL.sameAs, RDF.type, OWL.SymmetricProperty);
- vocabulary.add(OWL.disjointWith, RDF.type, OWL.SymmetricProperty);
- Resource xml = vocabulary.getResource(XMLLiteralType.theXMLLiteralType.getURI());
- vocabulary.add(xml, RDFS.subClassOf, RDFS.Resource);
- for(String uri : JenaDatatypes.getDatatypeURIs()) {
- Resource r = vocabulary.getResource(uri);
- if (r.getProperty(RDF.type) == null) {
- vocabulary.add(r, RDF.type, RDFS.Datatype);
- vocabulary.add(r, RDFS.subClassOf, RDFS.Literal);
- }
- }
-
- // vocabulary.add(RDF.HTML, RDFS.label, "HTML");
-
- // Triples were formally in OWL 1, but dropped from OWL 2
- vocabulary.add(RDFS.comment, RDF.type, OWL.AnnotationProperty);
- vocabulary.add(RDFS.label, RDF.type, OWL.AnnotationProperty);
- vocabulary.add(RDFS.isDefinedBy, RDF.type, OWL.AnnotationProperty);
- vocabulary.add(RDFS.seeAlso, RDF.type, OWL.AnnotationProperty);
-
- // Add rdfs:labels for XSD types
- for(Resource datatype : vocabulary.listSubjectsWithProperty(RDF.type, RDFS.Datatype).toList()) {
- datatype.addProperty(RDFS.label, datatype.getLocalName());
+ private static Model vocabulary;
+
+
+ private static void ensureSuperClasses(Resource metaClass, Resource superClass) {
+ List toAdd = collectMissingSuperClasses(metaClass, superClass);
+ for (Resource c : toAdd) {
+ vocabulary.add(c, RDFS.subClassOf, superClass);
+ }
+ }
+
+
+ private static List collectMissingSuperClasses(Resource metaClass,
+ Resource superClass) {
+ List toAdd = new ArrayList<>();
+ StmtIterator it = vocabulary.listStatements(null, RDF.type, metaClass);
+ while (it.hasNext()) {
+ Resource c = it.nextStatement().getSubject();
+ if (!c.equals(superClass)) {
+ if (c.getProperty(RDFS.subClassOf) == null) {
+ toAdd.add(c);
+ }
+ }
+ }
+ return toAdd;
+ }
+
+
+ /**
+ * Gets the system ontology (a shared copy).
+ *
+ * @return the system ontology
+ */
+ public static synchronized Model getVocabularyModel() {
+ if (vocabulary == null) {
+ vocabulary = JenaUtil.createDefaultModel();
+ org.topbraid.jenax.util.JenaUtil.initNamespaces(vocabulary.getGraph());
+ vocabulary.setNsPrefix("xsd", XSD.getURI());
+ InputStream ttl = SystemTriples.class.getResourceAsStream("/rdf/system-triples.ttl");
+ vocabulary.read(ttl, "urn:x:dummy", FileUtils.langTurtle);
+ ensureSuperClasses(RDFS.Class, RDFS.Resource);
+ ensureSuperClasses(OWL.Class, OWL.Thing);
+
+ // Remove owl imports rdfs which only causes trouble
+ vocabulary.removeAll(null, OWL.imports, null);
+
+ vocabulary.add(OWL.Thing, RDFS.subClassOf, RDFS.Resource);
+ vocabulary.add(OWL.inverseOf, RDF.type, OWL.SymmetricProperty);
+ vocabulary.add(OWL.equivalentClass, RDF.type, OWL.SymmetricProperty);
+ vocabulary.add(OWL.equivalentProperty, RDF.type, OWL.SymmetricProperty);
+ vocabulary.add(OWL.equivalentProperty, RDFS.range, RDF.Property);
+ vocabulary.add(OWL.differentFrom, RDF.type, OWL.SymmetricProperty);
+ vocabulary.add(OWL.sameAs, RDF.type, OWL.SymmetricProperty);
+ vocabulary.add(OWL.disjointWith, RDF.type, OWL.SymmetricProperty);
+ Resource xml = vocabulary.getResource(RDF.dtXMLLiteral.getURI());
+ vocabulary.add(xml, RDFS.subClassOf, RDFS.Resource);
+ for (String uri : JenaDatatypes.getDatatypeURIs()) {
+ Resource r = vocabulary.getResource(uri);
+ if (r.getProperty(RDF.type) == null) {
+ vocabulary.add(r, RDF.type, RDFS.Datatype);
+ vocabulary.add(r, RDFS.subClassOf, RDFS.Literal);
+ }
+ }
+
+ // vocabulary.add(RDF.HTML, RDFS.label, "HTML");
+
+ // Triples were formally in OWL 1, but dropped from OWL 2
+ vocabulary.add(RDFS.comment, RDF.type, OWL.AnnotationProperty);
+ vocabulary.add(RDFS.label, RDF.type, OWL.AnnotationProperty);
+ vocabulary.add(RDFS.isDefinedBy, RDF.type, OWL.AnnotationProperty);
+ vocabulary.add(RDFS.seeAlso, RDF.type, OWL.AnnotationProperty);
+
+ // Add rdfs:labels for XSD types
+ for (Resource datatype : vocabulary.listSubjectsWithProperty(RDF.type, RDFS.Datatype).toList()) {
+ datatype.addProperty(RDFS.label, datatype.getLocalName());
}
- vocabulary = JenaUtil.asReadOnlyModel(vocabulary);
- }
- return vocabulary;
- }
+ vocabulary = JenaUtil.asReadOnlyModel(vocabulary);
+ }
+ return vocabulary;
+ }
}
diff --git a/src/main/java/org/topbraid/shacl/arq/SHACLARQFunction.java b/src/main/java/org/topbraid/shacl/arq/SHACLARQFunction.java
index 4be99e60..e5abc50c 100644
--- a/src/main/java/org/topbraid/shacl/arq/SHACLARQFunction.java
+++ b/src/main/java/org/topbraid/shacl/arq/SHACLARQFunction.java
@@ -17,11 +17,6 @@
package org.topbraid.shacl.arq;
-import java.io.ByteArrayOutputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
import org.apache.jena.atlas.io.IndentedWriter;
import org.apache.jena.graph.Graph;
import org.apache.jena.graph.Node;
@@ -54,192 +49,184 @@
import org.topbraid.shacl.model.SHParameterizable;
import org.topbraid.shacl.vocabulary.DASH;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
/**
* An ARQ function that is based on a SHACL function definition.
- *
+ *
* @author Holger Knublauch
*/
public abstract class SHACLARQFunction implements org.apache.jena.sparql.function.Function, OptionalArgsFunction, DeclarativeFunctionFactory {
-
- private boolean cachable;
-
- protected List paramNames = new ArrayList();
-
- private List optional = new ArrayList();
-
- private SHFunction shFunction;
-
-
- /**
- * Constructs a new SHACLARQFunction based on a given sh:Function.
- * The shaclFunction must be associated with the Model containing
- * the triples of its definition.
- * @param shaclFunction the SHACL function
- */
- protected SHACLARQFunction(SHFunction shaclFunction) {
- this.shFunction = shaclFunction;
- if(shaclFunction != null) {
- this.cachable = shaclFunction.hasProperty(DASH.cachable, JenaDatatypes.TRUE);
- }
- }
-
-
- protected void addParameters(SHParameterizable parameterizable) {
- JenaUtil.setGraphReadOptimization(true);
- try {
- for(SHParameter param : parameterizable.getOrderedParameters()) {
- String varName = param.getVarName();
- if(varName == null) {
- throw new IllegalStateException(param + " of " + parameterizable + " does not have a valid predicate");
- }
- paramNames.add(varName);
- optional.add(param.isOptional());
- }
- }
- finally {
- JenaUtil.setGraphReadOptimization(false);
- }
- }
-
-
- @Override
- public void build(String uri, ExprList args) {
- }
-
-
- @Override
+
+ private boolean cachable;
+
+ protected List paramNames = new ArrayList();
+
+ private List optional = new ArrayList();
+
+ private SHFunction shFunction;
+
+
+ /**
+ * Constructs a new SHACLARQFunction based on a given sh:Function.
+ * The shaclFunction must be associated with the Model containing
+ * the triples of its definition.
+ *
+ * @param shaclFunction the SHACL function
+ */
+ protected SHACLARQFunction(SHFunction shaclFunction) {
+ this.shFunction = shaclFunction;
+ if (shaclFunction != null) {
+ this.cachable = shaclFunction.hasProperty(DASH.cachable, JenaDatatypes.TRUE);
+ }
+ }
+
+
+ protected void addParameters(SHParameterizable parameterizable) {
+ JenaUtil.setGraphReadOptimization(true);
+ try {
+ for (SHParameter param : parameterizable.getOrderedParameters()) {
+ String varName = param.getVarName();
+ if (varName == null) {
+ throw new IllegalStateException(param + " of " + parameterizable + " does not have a valid predicate");
+ }
+ paramNames.add(varName);
+ optional.add(param.isOptional());
+ }
+ } finally {
+ JenaUtil.setGraphReadOptimization(false);
+ }
+ }
+
+ @Override
public org.apache.jena.sparql.function.Function create(String uri) {
- return this;
- }
+ return this;
+ }
+
-
- @Override
+ @Override
public NodeValue exec(Binding binding, ExprList args, String uri, FunctionEnv env) {
-
- Graph activeGraph = env.getActiveGraph();
- Model model = activeGraph != null ?
- ModelFactory.createModelForGraph(activeGraph) :
- ModelFactory.createDefaultModel();
-
- QuerySolutionMap bindings = new QuerySolutionMap();
-
- Node[] paramsForCache;
- if(cachable) {
- paramsForCache = new Node[args.size()];
- }
- else {
- paramsForCache = null;
- }
- for(int i = 0; i < args.size(); i++) {
- Expr expr = args.get(i);
- if(expr != null && (!expr.isVariable() || binding.contains(expr.asVar()))) {
- NodeValue x = expr.eval(binding, env);
- if(x != null) {
- String paramName;
- if(i < paramNames.size()) {
- paramName = paramNames.get(i);
- }
- else {
- paramName = "arg" + (i + 1);
- }
- bindings.add(paramName, model.asRDFNode(x.asNode()));
- if(cachable) {
- paramsForCache[i] = x.asNode();
- }
- }
- else if(!optional.get(i)) {
- throw new ExprEvalException("Missing SHACL function argument");
- }
- }
- }
-
- Dataset dataset = DatasetFactory.wrap(env.getDataset());
-
- if(ExecStatisticsManager.get().isRecording() && ExecStatisticsManager.get().isRecordingDeclarativeFunctions()) {
- StringBuffer sb = new StringBuffer();
- sb.append("SHACL Function ");
- sb.append(SSE.str(NodeFactory.createURI(uri), model));
- sb.append("(");
- for(int i = 0; i < args.size(); i++) {
- if(i > 0) {
- sb.append(", ");
- }
- Expr expr = args.get(i);
- expr = Substitute.substitute(expr, binding);
- if(expr == null) {
- sb.append("?unbound");
- }
- else {
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- IndentedWriter iOut = new IndentedWriter(bos);
- ExprUtils.fmtSPARQL(iOut, expr, new SerializationContext(model));
- iOut.flush();
- sb.append(bos.toString());
- }
- }
- sb.append(")");
- long startTime = System.currentTimeMillis();
- NodeValue result;
- try {
- if(cachable) {
- result = SHACLFunctionsCache.get().execute(this, dataset, model, bindings, paramsForCache);
- }
- else {
- result = executeBody(dataset, model, bindings);
- }
- sb.append(" = ");
- sb.append(FmtUtils.stringForNode(result.asNode(), model));
- }
- catch(ExprEvalException ex) {
- sb.append(" : ");
- sb.append(ex.getLocalizedMessage());
- throw ex;
- }
- finally {
- long endTime = System.currentTimeMillis();
- ExecStatistics stats = new ExecStatistics(sb.toString(), getQueryString(), endTime - startTime, startTime, NodeFactory.createURI(uri));
- ExecStatisticsManager.get().addSilently(Collections.singleton(stats));
- }
- return result;
- }
- else {
- if(cachable) {
- return SHACLFunctionsCache.get().execute(this, dataset, model, bindings, paramsForCache);
- }
- else {
- return executeBody(dataset, model, bindings);
- }
- }
- }
-
-
- public abstract NodeValue executeBody(Dataset dataset, Model model, QuerySolution bindings);
-
-
- protected abstract String getQueryString();
-
-
- /**
- * Gets the underlying sh:Function Model object for this ARQ function.
- * @return the sh:Function (may be null)
- */
- public SHFunction getSHACLFunction() {
- return shFunction;
- }
-
-
- /**
- * Gets the names of the declared parameters, in order from left to right.
- * @return the parameter names
- */
- public String[] getParamNames() {
- return paramNames.toArray(new String[0]);
- }
-
-
- @Override
- public boolean isOptionalArg(int index) {
- return optional.get(index);
- }
+
+ Graph activeGraph = env.getActiveGraph();
+ Model model = activeGraph != null ?
+ ModelFactory.createModelForGraph(activeGraph) :
+ ModelFactory.createDefaultModel();
+
+ QuerySolutionMap bindings = new QuerySolutionMap();
+
+ Node[] paramsForCache;
+ if (cachable) {
+ paramsForCache = new Node[args.size()];
+ } else {
+ paramsForCache = null;
+ }
+ for (int i = 0; i < args.size(); i++) {
+ Expr expr = args.get(i);
+ if (expr != null && (!expr.isVariable() || binding.contains(expr.asVar()))) {
+ NodeValue x = expr.eval(binding, env);
+ if (x != null) {
+ String paramName;
+ if (i < paramNames.size()) {
+ paramName = paramNames.get(i);
+ } else {
+ paramName = "arg" + (i + 1);
+ }
+ bindings.add(paramName, model.asRDFNode(x.asNode()));
+ if (cachable) {
+ paramsForCache[i] = x.asNode();
+ }
+ } else if (!optional.get(i)) {
+ throw new ExprEvalException("Missing SHACL function argument");
+ }
+ }
+ }
+
+ Dataset dataset = DatasetFactory.wrap(env.getDataset());
+
+ if (ExecStatisticsManager.get().isRecording() && ExecStatisticsManager.get().isRecordingDeclarativeFunctions()) {
+ StringBuffer sb = new StringBuffer();
+ sb.append("SHACL Function ");
+ sb.append(SSE.str(NodeFactory.createURI(uri), model));
+ sb.append("(");
+ for (int i = 0; i < args.size(); i++) {
+ if (i > 0) {
+ sb.append(", ");
+ }
+ Expr expr = args.get(i);
+ expr = Substitute.substitute(expr, binding);
+ if (expr == null) {
+ sb.append("?unbound");
+ } else {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ IndentedWriter iOut = new IndentedWriter(bos);
+ ExprUtils.fmtSPARQL(iOut, expr, new SerializationContext(model));
+ iOut.flush();
+ sb.append(bos);
+ }
+ }
+ sb.append(")");
+ long startTime = System.currentTimeMillis();
+ NodeValue result;
+ try {
+ if (cachable) {
+ result = SHACLFunctionsCache.get().execute(this, dataset, model, bindings, paramsForCache);
+ } else {
+ result = executeBody(dataset, model, bindings);
+ }
+ sb.append(" = ");
+ sb.append(FmtUtils.stringForNode(result.asNode(), model));
+ } catch (ExprEvalException ex) {
+ sb.append(" : ");
+ sb.append(ex.getLocalizedMessage());
+ throw ex;
+ } finally {
+ long endTime = System.currentTimeMillis();
+ ExecStatistics stats = new ExecStatistics(sb.toString(), getQueryString(), endTime - startTime, startTime, NodeFactory.createURI(uri));
+ ExecStatisticsManager.get().addSilently(Collections.singleton(stats));
+ }
+ return result;
+ } else {
+ if (cachable) {
+ return SHACLFunctionsCache.get().execute(this, dataset, model, bindings, paramsForCache);
+ } else {
+ return executeBody(dataset, model, bindings);
+ }
+ }
+ }
+
+
+ public abstract NodeValue executeBody(Dataset dataset, Model model, QuerySolution bindings);
+
+
+ protected abstract String getQueryString();
+
+
+ /**
+ * Gets the underlying sh:Function Model object for this ARQ function.
+ *
+ * @return the sh:Function (may be null)
+ */
+ public SHFunction getSHACLFunction() {
+ return shFunction;
+ }
+
+
+ /**
+ * Gets the names of the declared parameters, in order from left to right.
+ *
+ * @return the parameter names
+ */
+ public String[] getParamNames() {
+ return paramNames.toArray(new String[0]);
+ }
+
+
+ @Override
+ public boolean isOptionalArg(int index) {
+ return optional.get(index);
+ }
}
diff --git a/src/main/java/org/topbraid/shacl/arq/SHACLFunctionsCache.java b/src/main/java/org/topbraid/shacl/arq/SHACLFunctionsCache.java
index a15b8cd0..adaca999 100644
--- a/src/main/java/org/topbraid/shacl/arq/SHACLFunctionsCache.java
+++ b/src/main/java/org/topbraid/shacl/arq/SHACLFunctionsCache.java
@@ -16,11 +16,6 @@
*/
package org.topbraid.shacl.arq;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
import org.apache.jena.graph.Node;
import org.apache.jena.query.Dataset;
import org.apache.jena.query.QuerySolution;
@@ -28,145 +23,145 @@
import org.apache.jena.sparql.expr.ExprEvalException;
import org.apache.jena.sparql.expr.NodeValue;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+
/**
* A cache that remembers previous calls to SHACL functions marked with sh:cachable.
- *
+ *
* @author Holger Knublauch
*/
public class SHACLFunctionsCache {
- private static SHACLFunctionsCache singleton = new SHACLFunctionsCache();
-
- public static SHACLFunctionsCache get() {
- return singleton;
- }
-
- public static void set(SHACLFunctionsCache value) {
- SHACLFunctionsCache.singleton = value;
- }
-
-
- private static final int capacity = 10000;
-
- @SuppressWarnings("serial")
- private static class MyCache extends LinkedHashMap {
-
- MyCache() {
- super(capacity + 1, 1.1f, true);
- }
-
- @Override
- protected boolean removeEldestEntry(Entry eldest) {
- if(size() > capacity) {
- return true;
- }
- else {
- return false;
- }
- }
- };
-
- private Map cache = Collections.synchronizedMap(new MyCache());
-
-
- public void clear() {
- cache.clear();
- }
-
-
- public NodeValue execute(SHACLARQFunction function, Dataset dataset, Model defaultModel, QuerySolution bindings, Node[] args) {
- Key key = new Key(function.getSHACLFunction().getURI(), args);
- Result result = cache.get(key);
- if(result == null) {
- result = new Result();
- try {
- result.nodeValue = function.executeBody(dataset, defaultModel, bindings);
- }
- catch(ExprEvalException ex) {
- result.ex = ex;
- }
- cache.put(key, result);
- }
- if(result.ex != null) {
- throw new ExprEvalException(result.ex.getMessage());
- }
- else {
- return result.nodeValue;
- }
- }
-
-
- private static class Key {
-
- private int hashCode;
-
- private Node[] args;
-
- private String functionURI;
-
-
- Key(String functionURI, Node[] args) {
- this.args = args;
- this.functionURI = functionURI;
- hashCode = functionURI.hashCode();
- for(Node arg : args) {
- if(arg != null) {
- hashCode += arg.hashCode();
- }
- }
- }
-
-
- private boolean argEquals(Node arg1, Node arg2) {
- if(arg1 == null) {
- return arg2 == null;
- }
- else if(arg2 == null) {
- return false;
- }
- else {
- return arg1.equals(arg2);
- }
- }
-
-
- @Override
- public boolean equals(Object obj) {
-
- if(!(obj instanceof Key)) {
- return false;
- }
-
- Key other = (Key) obj;
- if(!functionURI.equals(other.functionURI)) {
- return false;
- }
-
- if(args.length != other.args.length) {
- return false;
- }
-
- for(int i = 0; i < args.length; i++) {
- if(!argEquals(args[i], other.args[i])) {
- return false;
- }
- }
-
- return true;
- }
-
-
- @Override
- public int hashCode() {
- return hashCode;
- }
- }
-
-
- private static class Result {
-
- ExprEvalException ex;
-
- NodeValue nodeValue;
- }
+ private static SHACLFunctionsCache singleton = new SHACLFunctionsCache();
+
+ public static SHACLFunctionsCache get() {
+ return singleton;
+ }
+
+ public static void set(SHACLFunctionsCache value) {
+ SHACLFunctionsCache.singleton = value;
+ }
+
+
+ private static final int capacity = 10000;
+
+ @SuppressWarnings("serial")
+ private static class MyCache extends LinkedHashMap {
+
+ MyCache() {
+ super(capacity + 1, 1.1f, true);
+ }
+
+ @Override
+ protected boolean removeEldestEntry(Entry eldest) {
+ if (size() > capacity) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+ }
+
+ private Map cache = Collections.synchronizedMap(new MyCache());
+
+
+ public void clear() {
+ cache.clear();
+ }
+
+
+ public NodeValue execute(SHACLARQFunction function, Dataset dataset, Model defaultModel, QuerySolution bindings, Node[] args) {
+ Key key = new Key(function.getSHACLFunction().getURI(), args);
+ Result result = cache.get(key);
+ if (result == null) {
+ result = new Result();
+ try {
+ result.nodeValue = function.executeBody(dataset, defaultModel, bindings);
+ } catch (ExprEvalException ex) {
+ result.ex = ex;
+ }
+ cache.put(key, result);
+ }
+ if (result.ex != null) {
+ throw new ExprEvalException(result.ex.getMessage());
+ } else {
+ return result.nodeValue;
+ }
+ }
+
+
+ private static class Key {
+
+ private int hashCode;
+
+ private Node[] args;
+
+ private String functionURI;
+
+
+ Key(String functionURI, Node[] args) {
+ this.args = args;
+ this.functionURI = functionURI;
+ hashCode = functionURI.hashCode();
+ for (Node arg : args) {
+ if (arg != null) {
+ hashCode += arg.hashCode();
+ }
+ }
+ }
+
+
+ private boolean argEquals(Node arg1, Node arg2) {
+ if (arg1 == null) {
+ return arg2 == null;
+ } else if (arg2 == null) {
+ return false;
+ } else {
+ return arg1.equals(arg2);
+ }
+ }
+
+
+ @Override
+ public boolean equals(Object obj) {
+
+ if (!(obj instanceof Key)) {
+ return false;
+ }
+
+ Key other = (Key) obj;
+ if (!functionURI.equals(other.functionURI)) {
+ return false;
+ }
+
+ if (args.length != other.args.length) {
+ return false;
+ }
+
+ for (int i = 0; i < args.length; i++) {
+ if (!argEquals(args[i], other.args[i])) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+
+ @Override
+ public int hashCode() {
+ return hashCode;
+ }
+ }
+
+
+ private static class Result {
+
+ ExprEvalException ex;
+
+ NodeValue nodeValue;
+ }
}
diff --git a/src/main/java/org/topbraid/shacl/arq/SHACLPaths.java b/src/main/java/org/topbraid/shacl/arq/SHACLPaths.java
index e70052d1..d4a1f070 100644
--- a/src/main/java/org/topbraid/shacl/arq/SHACLPaths.java
+++ b/src/main/java/org/topbraid/shacl/arq/SHACLPaths.java
@@ -57,7 +57,7 @@
import org.topbraid.shacl.vocabulary.SH;
/**
- * Utilties to manage the conversion between SHACL paths and SPARQL 1.1 property paths.
+ * Utilities to manage the conversion between SHACL paths and SPARQL 1.1 property paths.
*
* @author Holger Knublauch
*/
diff --git a/src/main/java/org/topbraid/shacl/arq/functions/CheckRegexSyntaxFunction.java b/src/main/java/org/topbraid/shacl/arq/functions/CheckRegexSyntaxFunction.java
index dd96f9e9..e3efee2f 100644
--- a/src/main/java/org/topbraid/shacl/arq/functions/CheckRegexSyntaxFunction.java
+++ b/src/main/java/org/topbraid/shacl/arq/functions/CheckRegexSyntaxFunction.java
@@ -50,7 +50,7 @@ protected NodeValue exec(Node regexNode, FunctionEnv env) {
/**
* Convert the specified exception's message to a system-independent
* format while preserving the message's embedded regex unchanged.
- * This allows whomever catches the exception to inspect the original regex
+ * This allows whoever catches the exception to inspect the original regex
* unchanged.
*
* @see PatternSyntaxException#getMessage()
diff --git a/src/main/java/org/topbraid/shacl/arq/functions/SHACLSPARQLARQFunction.java b/src/main/java/org/topbraid/shacl/arq/functions/SHACLSPARQLARQFunction.java
index cb076778..74d626c7 100644
--- a/src/main/java/org/topbraid/shacl/arq/functions/SHACLSPARQLARQFunction.java
+++ b/src/main/java/org/topbraid/shacl/arq/functions/SHACLSPARQLARQFunction.java
@@ -29,6 +29,7 @@
import org.apache.jena.sparql.expr.ExprEvalException;
import org.apache.jena.sparql.expr.ExprList;
import org.apache.jena.sparql.expr.NodeValue;
+import org.apache.jena.sparql.util.Context;
import org.topbraid.jenax.util.ARQFactory;
import org.topbraid.jenax.util.DatasetWithDifferentDefaultModel;
import org.topbraid.jenax.util.JenaUtil;
@@ -54,7 +55,10 @@ public class SHACLSPARQLARQFunction extends SHACLARQFunction {
private org.apache.jena.query.Query arqQuery;
private String queryString;
-
+
+ @Override
+ public void build(String uri, ExprList args, Context context) {
+ }
/**
* Constructs a new SHACLSPARQLARQFunction based on a given sh:ConstraintComponent
@@ -106,11 +110,6 @@ public SHACLSPARQLARQFunction(SHSPARQLFunction shaclFunction) {
addParameters(shaclFunction);
}
-
-
- @Override
- public void build(String uri, ExprList args) {
- }
@Override
diff --git a/src/main/java/org/topbraid/shacl/engine/ShapesGraph.java b/src/main/java/org/topbraid/shacl/engine/ShapesGraph.java
index 31c509d9..4e23b31b 100644
--- a/src/main/java/org/topbraid/shacl/engine/ShapesGraph.java
+++ b/src/main/java/org/topbraid/shacl/engine/ShapesGraph.java
@@ -16,21 +16,8 @@
*/
package org.topbraid.shacl.engine;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.function.Predicate;
-
import org.apache.jena.graph.Node;
-import org.apache.jena.rdf.model.Model;
-import org.apache.jena.rdf.model.Property;
-import org.apache.jena.rdf.model.RDFNode;
-import org.apache.jena.rdf.model.Resource;
-import org.apache.jena.rdf.model.StmtIterator;
+import org.apache.jena.rdf.model.*;
import org.apache.jena.shared.PrefixMapping;
import org.apache.jena.sparql.graph.PrefixMappingMem;
import org.apache.jena.sparql.util.FmtUtils;
@@ -49,284 +36,291 @@
import org.topbraid.shacl.vocabulary.DASH;
import org.topbraid.shacl.vocabulary.SH;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Predicate;
+
/**
* Represents a shapes graph as input to an engine (e.g. validation or inferencing).
* This is basically a collection of Shapes with some data structures that avoid repetitive computation.
- *
+ *
* @author Holger Knublauch
*/
public class ShapesGraph {
-
- private final static Map EMPTY = new HashMap<>();
-
- // May be defined to skip certain constraints (which are computed on demand)
- private Predicate constraintFilter;
-
- // Map of sh:defaultValue expressions. Outer keys are sh:path predicates, inner keys are (node) shapes.
- private Map> defaultValueMap = new ConcurrentHashMap<>();
-
- // Can be used to bypass TDB's slow prefix mapping
- private PrefixMapping fastPrefixMapping;
-
- // Cache of shapeFilter results
- private Map ignoredShapes = new ConcurrentHashMap<>();
-
- // Mapping of properties (e.g., sh:datatype) to their constraint components (e.g., sh:DatatypeConstraintComponent)
- private Map parametersMap = new ConcurrentHashMap<>();
-
- // The root shapes where whole-graph validation and inferencing would start
- private List rootShapes;
-
- // Can be used to skip certain shapes
- private Predicate shapeFilter;
-
- // Map of Jena Nodes to their Shape instances, computed on demand
- private Map shapesMap = new ConcurrentHashMap<>();
-
- // The Jena Model of the shape definitions
- private Model shapesModel;
-
- // Map of sh:values expressions. Outer keys are sh:path predicates, inner keys are (node) shapes.
- private Map> valuesMap = new ConcurrentHashMap<>();
-
-
- /**
- * Constructs a new ShapesGraph.
- * This should not be called directly, only from ShapesGraphFactory.
- * @param shapesModel the Model containing the shape definitions
- */
- public ShapesGraph(Model shapesModel) {
- this.shapesModel = shapesModel;
- }
-
-
- public ShapesGraph clone() {
- ShapesGraph clone = new ShapesGraph(shapesModel);
- clone.constraintFilter = this.constraintFilter;
- clone.shapeFilter = this.shapeFilter;
- return clone;
- }
-
-
- public Constraint createConstraint(Shape shape, SHConstraintComponent component, List params, RDFNode parameterValue) {
- return new Constraint(shape, component, params, parameterValue);
- }
-
-
- public SHConstraintComponent getComponentWithParameter(Property parameter) {
- return parametersMap.computeIfAbsent(parameter, p -> {
- StmtIterator it = shapesModel.listStatements(null, SH.path, parameter);
- while(it.hasNext()) {
- Resource param = it.next().getSubject();
- if(!param.hasProperty(SH.optional, JenaDatatypes.TRUE)) {
- StmtIterator i2 = shapesModel.listStatements(null, SH.parameter, param);
- while(i2.hasNext()) {
- Resource r = i2.next().getSubject();
- if(JenaUtil.hasIndirectType(r, SH.ConstraintComponent)) {
- i2.close();
- it.close();
- SHConstraintComponent cc = SHFactory.asConstraintComponent(r);
- return cc;
- }
- }
- }
- }
- return null;
- });
- }
-
-
- // Added for cases where repeated access to the prefixes causes many (TDB) loads, produces a faster in-memory PrefixMapping
- public synchronized PrefixMapping getFastPrefixMapping() {
- if(fastPrefixMapping == null) {
- fastPrefixMapping = new PrefixMappingMem();
- Map pm = shapesModel.getNsPrefixMap();
- for(String prefix : pm.keySet()) {
- fastPrefixMapping.setNsPrefix(prefix, pm.get(prefix));
- }
- }
- return fastPrefixMapping;
- }
-
-
- public String getPathString(Resource path) {
- if(path.isURIResource()) {
- return FmtUtils.stringForNode(path.asNode(), getFastPrefixMapping());
- }
- else {
- return SHACLPaths.getPathString(path);
- }
- }
-
-
- /**
- * Gets all non-deactivated shapes that declare a target and pass the provided filter.
- * @return the root shapes
- */
- public synchronized List getRootShapes() {
- if(rootShapes == null) {
-
- // Collect all shapes, as identified by target and/or type
- Set candidates = new HashSet<>();
- candidates.addAll(shapesModel.listSubjectsWithProperty(SH.target).toList());
- candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetClass).toList());
- candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetNode).toList());
- candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetObjectsOf).toList());
- candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetSubjectsOf).toList());
- for(Resource shape : JenaUtil.getAllInstances(shapesModel.getResource(SH.NodeShape.getURI()))) {
- if(JenaUtil.hasIndirectType(shape, RDFS.Class)) {
- candidates.add(shape);
- }
- }
- for(Resource shape : JenaUtil.getAllInstances(shapesModel.getResource(SH.PropertyShape.getURI()))) {
- if(JenaUtil.hasIndirectType(shape, RDFS.Class)) {
- candidates.add(shape);
- }
- }
-
- // Turn the shape Resource objects into Shape instances
- this.rootShapes = new LinkedList();
- for(Resource candidate : candidates) {
- SHShape shape = SHFactory.asShape(candidate);
- if(!shape.isDeactivated() && !isIgnored(shape.asNode())) {
- this.rootShapes.add(getShape(shape.asNode()));
- }
- }
- }
- return rootShapes;
- }
-
-
- public Shape getShape(Node node) {
- return shapesMap.computeIfAbsent(node, n -> new Shape(this, SHFactory.asShape(shapesModel.asRDFNode(node))));
- }
-
-
- /**
- * Gets a Map from (node) shapes to NodeExpressions derived from sh:defaultValue statements.
- * @param predicate the predicate to infer
- * @return a Map which is empty if the predicate is not mentioned in any inferences
- */
- public Map getDefaultValueNodeExpressionsMap(Resource predicate) {
- return getExpressionsMap(defaultValueMap, predicate, SH.defaultValue);
- }
-
-
- /**
- * Gets a Map from (node) shapes to NodeExpressions derived from sh:values statements.
- * Can be used to efficiently figure out how to infer the values of a given instance, based on the rdf:types
- * of the instance.
- * @param predicate the predicate to infer
- * @return a Map which is empty if the predicate is not mentioned in any inferences
- */
- public Map getValuesNodeExpressionsMap(Resource predicate) {
- return getExpressionsMap(valuesMap, predicate, SH.values);
- }
-
-
- private Map getExpressionsMap(Map> valuesMap, Resource predicate, Property systemPredicate) {
- return valuesMap.computeIfAbsent(predicate.asNode(), p -> {
-
- Map> map = new HashMap<>();
- StmtIterator it = shapesModel.listStatements(null, SH.path, predicate);
- while(it.hasNext()) {
- Resource ps = it.next().getSubject();
- if(ps.hasProperty(systemPredicate) && !ps.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) {
- StmtIterator nit = shapesModel.listStatements(null, SH.property, ps);
- while(nit.hasNext()) {
- Resource nodeShape = nit.next().getSubject();
- if(!nodeShape.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) {
- Node shapeNode = nodeShape.asNode();
- addExpressions(map, ps, shapeNode, systemPredicate);
- for(Resource targetClass : JenaUtil.getResourceProperties(nodeShape, SH.targetClass)) {
- addExpressions(map, ps, targetClass.asNode(), systemPredicate);
- }
- for(Resource targetClass : JenaUtil.getResourceProperties(nodeShape, DASH.applicableToClass)) {
- addExpressions(map, ps, targetClass.asNode(), systemPredicate);
- }
- }
- }
- }
- }
-
- if(map.isEmpty()) {
- // Return a non-null but empty value to avoid re-computation (null not supported by ConcurrentHashMap)
- return EMPTY;
- }
- else {
- Map result = new HashMap<>();
- for(Node key : map.keySet()) {
- List list = map.get(key);
- if(list.size() > 1) {
- RDFNode exprNode = shapesModel.asRDFNode(key);
- result.put(key, new DistinctExpression(exprNode, new UnionExpression(exprNode, list)));
- }
- else {
- result.put(key, list.get(0));
- }
- }
- return result;
- }
- });
- }
-
-
- private void addExpressions(Map> map, Resource ps, Node shapeNode, Property systemPredicate) {
- map.computeIfAbsent(shapeNode, n -> {
- List exprs = new LinkedList<>();
- StmtIterator vit = ps.listProperties(systemPredicate);
- while(vit.hasNext()) {
- RDFNode expr = vit.next().getObject();
- NodeExpression nodeExpression = NodeExpressionFactory.get().create(expr);
- exprs.add(nodeExpression);
- }
- return exprs;
- });
- }
-
-
- public Model getShapesModel() {
- return shapesModel;
- }
-
-
- public boolean isIgnored(Node shapeNode) {
- if(shapeFilter == null) {
- return false;
- }
- else {
- return ignoredShapes.computeIfAbsent(shapeNode, node -> {
- SHShape shape = SHFactory.asShape(shapesModel.asRDFNode(shapeNode));
- return !shapeFilter.test(shape);
- });
- }
- }
-
-
- public boolean isIgnoredConstraint(Constraint constraint) {
- return constraintFilter != null && !constraintFilter.test(constraint);
- }
-
-
- /**
- * Sets a filter Predicate that can be used to ignore certain constraints.
- * See for example CoreConstraintFilter.
- * Such filters must return true if the Constraint should be used, false to ignore.
- * This method should be called immediately after the constructor only.
- * @param value the new constraint filter
- */
- public void setConstraintFilter(Predicate value) {
- this.constraintFilter = value;
- }
-
-
- /**
- * Sets a filter Predicate that can be used to ignore certain shapes.
- * Such filters must return true if the shape should be used, false to ignore.
- * This method should be called immediately after the constructor only.
- * @param value the new shape filter
- */
- public void setShapeFilter(Predicate value) {
- this.shapeFilter = value;
- }
+
+ private final static Map EMPTY = new HashMap<>();
+
+ // May be defined to skip certain constraints (which are computed on demand)
+ private Predicate constraintFilter;
+
+ // Map of sh:defaultValue expressions. Outer keys are sh:path predicates, inner keys are (node) shapes.
+ private Map> defaultValueMap = new ConcurrentHashMap<>();
+
+ // Can be used to bypass TDB's slow prefix mapping
+ private PrefixMapping fastPrefixMapping;
+
+ // Cache of shapeFilter results
+ private Map ignoredShapes = new ConcurrentHashMap<>();
+
+ // Mapping of properties (e.g., sh:datatype) to their constraint components (e.g., sh:DatatypeConstraintComponent)
+ private Map parametersMap = new ConcurrentHashMap<>();
+
+ // The root shapes where whole-graph validation and inferencing would start
+ private List rootShapes;
+
+ // Can be used to skip certain shapes
+ private Predicate shapeFilter;
+
+ // Map of Jena Nodes to their Shape instances, computed on demand
+ private Map shapesMap = new ConcurrentHashMap<>();
+
+ // The Jena Model of the shape definitions
+ private Model shapesModel;
+
+ // Map of sh:values expressions. Outer keys are sh:path predicates, inner keys are (node) shapes.
+ private Map> valuesMap = new ConcurrentHashMap<>();
+
+
+ /**
+ * Constructs a new ShapesGraph.
+ * This should not be called directly, only from ShapesGraphFactory.
+ *
+ * @param shapesModel the Model containing the shape definitions
+ */
+ public ShapesGraph(Model shapesModel) {
+ this.shapesModel = shapesModel;
+ }
+
+
+ @Override
+ public ShapesGraph clone() {
+ ShapesGraph clone = new ShapesGraph(shapesModel);
+ clone.constraintFilter = this.constraintFilter;
+ clone.shapeFilter = this.shapeFilter;
+ return clone;
+ }
+
+
+ public Constraint createConstraint(Shape shape, SHConstraintComponent component, List params, RDFNode parameterValue) {
+ return new Constraint(shape, component, params, parameterValue);
+ }
+
+
+ public SHConstraintComponent getComponentWithParameter(Property parameter) {
+ return parametersMap.computeIfAbsent(parameter, p -> {
+ StmtIterator it = shapesModel.listStatements(null, SH.path, parameter);
+ while (it.hasNext()) {
+ Resource param = it.next().getSubject();
+ if (!param.hasProperty(SH.optional, JenaDatatypes.TRUE)) {
+ StmtIterator i2 = shapesModel.listStatements(null, SH.parameter, param);
+ while (i2.hasNext()) {
+ Resource r = i2.next().getSubject();
+ if (JenaUtil.hasIndirectType(r, SH.ConstraintComponent)) {
+ i2.close();
+ it.close();
+ SHConstraintComponent cc = SHFactory.asConstraintComponent(r);
+ return cc;
+ }
+ }
+ }
+ }
+ return null;
+ });
+ }
+
+
+ // Added for cases where repeated access to the prefixes causes many (TDB) loads, produces a faster in-memory PrefixMapping
+ public synchronized PrefixMapping getFastPrefixMapping() {
+ if (fastPrefixMapping == null) {
+ fastPrefixMapping = new PrefixMappingMem();
+ Map pm = shapesModel.getNsPrefixMap();
+ for (String prefix : pm.keySet()) {
+ fastPrefixMapping.setNsPrefix(prefix, pm.get(prefix));
+ }
+ }
+ return fastPrefixMapping;
+ }
+
+
+ public String getPathString(Resource path) {
+ if (path.isURIResource()) {
+ return FmtUtils.stringForNode(path.asNode(), getFastPrefixMapping());
+ } else {
+ return SHACLPaths.getPathString(path);
+ }
+ }
+
+
+ /**
+ * Gets all non-deactivated shapes that declare a target and pass the provided filter.
+ *
+ * @return the root shapes
+ */
+ public synchronized List getRootShapes() {
+ if (rootShapes == null) {
+
+ // Collect all shapes, as identified by target and/or type
+ Set candidates = new HashSet<>();
+ candidates.addAll(shapesModel.listSubjectsWithProperty(SH.target).toList());
+ candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetClass).toList());
+ candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetNode).toList());
+ candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetObjectsOf).toList());
+ candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetSubjectsOf).toList());
+ for (Resource shape : JenaUtil.getAllInstances(shapesModel.getResource(SH.NodeShape.getURI()))) {
+ if (JenaUtil.hasIndirectType(shape, RDFS.Class)) {
+ candidates.add(shape);
+ }
+ }
+ for (Resource shape : JenaUtil.getAllInstances(shapesModel.getResource(SH.PropertyShape.getURI()))) {
+ if (JenaUtil.hasIndirectType(shape, RDFS.Class)) {
+ candidates.add(shape);
+ }
+ }
+
+ // Turn the shape Resource objects into Shape instances
+ this.rootShapes = new LinkedList();
+ for (Resource candidate : candidates) {
+ SHShape shape = SHFactory.asShape(candidate);
+ if (!shape.isDeactivated() && !isIgnored(shape.asNode())) {
+ this.rootShapes.add(getShape(shape.asNode()));
+ }
+ }
+ }
+ return rootShapes;
+ }
+
+
+ public Shape getShape(Node node) {
+ return shapesMap.computeIfAbsent(node, n -> new Shape(this, SHFactory.asShape(shapesModel.asRDFNode(node))));
+ }
+
+
+ /**
+ * Gets a Map from (node) shapes to NodeExpressions derived from sh:defaultValue statements.
+ *
+ * @param predicate the predicate to infer
+ * @return a Map which is empty if the predicate is not mentioned in any inferences
+ */
+ public Map getDefaultValueNodeExpressionsMap(Resource predicate) {
+ return getExpressionsMap(defaultValueMap, predicate, SH.defaultValue);
+ }
+
+
+ /**
+ * Gets a Map from (node) shapes to NodeExpressions derived from sh:values statements.
+ * Can be used to efficiently figure out how to infer the values of a given instance, based on the rdf:types
+ * of the instance.
+ *
+ * @param predicate the predicate to infer
+ * @return a Map which is empty if the predicate is not mentioned in any inferences
+ */
+ public Map getValuesNodeExpressionsMap(Resource predicate) {
+ return getExpressionsMap(valuesMap, predicate, SH.values);
+ }
+
+
+ private Map getExpressionsMap(Map> valuesMap, Resource predicate, Property systemPredicate) {
+ return valuesMap.computeIfAbsent(predicate.asNode(), p -> {
+
+ Map> map = new HashMap<>();
+ StmtIterator it = shapesModel.listStatements(null, SH.path, predicate);
+ while (it.hasNext()) {
+ Resource ps = it.next().getSubject();
+ if (ps.hasProperty(systemPredicate) && !ps.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) {
+ StmtIterator nit = shapesModel.listStatements(null, SH.property, ps);
+ while (nit.hasNext()) {
+ Resource nodeShape = nit.next().getSubject();
+ if (!nodeShape.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) {
+ Node shapeNode = nodeShape.asNode();
+ addExpressions(map, ps, shapeNode, systemPredicate);
+ for (Resource targetClass : JenaUtil.getResourceProperties(nodeShape, SH.targetClass)) {
+ addExpressions(map, ps, targetClass.asNode(), systemPredicate);
+ }
+ for (Resource targetClass : JenaUtil.getResourceProperties(nodeShape, DASH.applicableToClass)) {
+ addExpressions(map, ps, targetClass.asNode(), systemPredicate);
+ }
+ }
+ }
+ }
+ }
+
+ if (map.isEmpty()) {
+ // Return a non-null but empty value to avoid re-computation (null not supported by ConcurrentHashMap)
+ return EMPTY;
+ } else {
+ Map result = new HashMap<>();
+ for (Node key : map.keySet()) {
+ List list = map.get(key);
+ if (list.size() > 1) {
+ RDFNode exprNode = shapesModel.asRDFNode(key);
+ result.put(key, new DistinctExpression(exprNode, new UnionExpression(exprNode, list)));
+ } else {
+ result.put(key, list.get(0));
+ }
+ }
+ return result;
+ }
+ });
+ }
+
+
+ private void addExpressions(Map> map, Resource ps, Node shapeNode, Property systemPredicate) {
+ map.computeIfAbsent(shapeNode, n -> {
+ List exprs = new LinkedList<>();
+ StmtIterator vit = ps.listProperties(systemPredicate);
+ while (vit.hasNext()) {
+ RDFNode expr = vit.next().getObject();
+ NodeExpression nodeExpression = NodeExpressionFactory.get().create(expr);
+ exprs.add(nodeExpression);
+ }
+ return exprs;
+ });
+ }
+
+
+ public Model getShapesModel() {
+ return shapesModel;
+ }
+
+
+ public boolean isIgnored(Node shapeNode) {
+ if (shapeFilter == null) {
+ return false;
+ } else {
+ return ignoredShapes.computeIfAbsent(shapeNode, node -> {
+ SHShape shape = SHFactory.asShape(shapesModel.asRDFNode(shapeNode));
+ return !shapeFilter.test(shape);
+ });
+ }
+ }
+
+
+ public boolean isIgnoredConstraint(Constraint constraint) {
+ return constraintFilter != null && !constraintFilter.test(constraint);
+ }
+
+
+ /**
+ * Sets a filter Predicate that can be used to ignore certain constraints.
+ * See for example CoreConstraintFilter.
+ * Such filters must return true if the Constraint should be used, false to ignore.
+ * This method should be called immediately after the constructor only.
+ *
+ * @param value the new constraint filter
+ */
+ public void setConstraintFilter(Predicate value) {
+ this.constraintFilter = value;
+ }
+
+
+ /**
+ * Sets a filter Predicate that can be used to ignore certain shapes.
+ * Such filters must return true if the shape should be used, false to ignore.
+ * This method should be called immediately after the constructor only.
+ *
+ * @param value the new shape filter
+ */
+ public void setShapeFilter(Predicate value) {
+ this.shapeFilter = value;
+ }
}
diff --git a/src/main/java/org/topbraid/shacl/engine/filters/ExcludeMetaShapesFilter.java b/src/main/java/org/topbraid/shacl/engine/filters/ExcludeMetaShapesFilter.java
index 623dfca6..c49b030e 100644
--- a/src/main/java/org/topbraid/shacl/engine/filters/ExcludeMetaShapesFilter.java
+++ b/src/main/java/org/topbraid/shacl/engine/filters/ExcludeMetaShapesFilter.java
@@ -16,11 +16,6 @@
*/
package org.topbraid.shacl.engine.filters;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.function.Predicate;
-
import org.apache.jena.rdf.model.Resource;
import org.topbraid.jenax.util.JenaUtil;
import org.topbraid.shacl.model.SHShape;
@@ -28,30 +23,34 @@
import org.topbraid.shacl.vocabulary.SH;
import org.topbraid.shacl.vocabulary.TOSH;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.function.Predicate;
+
/**
* A Predicate that can be used to bypass any shapes that are also constraint components
* and any shapes from the tosh namespace.
- *
+ *
* @author Holger Knublauch
*/
public class ExcludeMetaShapesFilter implements Predicate {
-
- private static Set systemShapes = new HashSet<>();
- static {
- Collections.addAll(systemShapes, DASH.Editor, DASH.GraphStoreTestCase, DASH.InferencingTestCase, DASH.QueryTestCase, DASH.ValidationTestCase, DASH.Viewer, DASH.Widget);
- }
-
-
- public static void addSystemShapes(Resource... shapes) {
- for(Resource shape : shapes) {
- systemShapes.add(shape);
- }
- }
-
-
- @Override
- public boolean test(SHShape shape) {
- return !JenaUtil.hasIndirectType(shape, SH.Parameter) && !systemShapes.contains(shape) &&
- (shape.isAnon() || !shape.getURI().startsWith(TOSH.NS));
- }
+
+ private static Set systemShapes = new HashSet<>();
+
+ static {
+ Collections.addAll(systemShapes, DASH.Editor, DASH.GraphStoreTestCase, DASH.InferencingTestCase, DASH.QueryTestCase, DASH.ValidationTestCase, DASH.Viewer, DASH.Widget);
+ }
+
+
+ public static void addSystemShapes(Resource... shapes) {
+ Collections.addAll(systemShapes, shapes);
+ }
+
+
+ @Override
+ public boolean test(SHShape shape) {
+ return !JenaUtil.hasIndirectType(shape, SH.Parameter) && !systemShapes.contains(shape) &&
+ (shape.isAnon() || !shape.getURI().startsWith(TOSH.NS));
+ }
}
diff --git a/src/main/java/org/topbraid/shacl/entailment/SHACLEntailment.java b/src/main/java/org/topbraid/shacl/entailment/SHACLEntailment.java
index ed04a062..ef592ae9 100644
--- a/src/main/java/org/topbraid/shacl/entailment/SHACLEntailment.java
+++ b/src/main/java/org/topbraid/shacl/entailment/SHACLEntailment.java
@@ -16,10 +16,6 @@
*/
package org.topbraid.shacl.entailment;
-import java.net.URI;
-import java.util.HashMap;
-import java.util.Map;
-
import org.apache.jena.query.Dataset;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
@@ -31,64 +27,65 @@
import org.topbraid.shacl.rules.RulesEntailment;
import org.topbraid.shacl.vocabulary.SH;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.Map;
+
/**
* Singleton to support sh:entailment.
* Extensions may install their own Engines.
- *
+ *
* @author Holger Knublauch
*/
public class SHACLEntailment {
-
- public final static Resource RDFS = ResourceFactory.createResource("http://www.w3.org/ns/entailment/RDFS");
-
- public static interface Engine {
-
- Model createModelWithEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, ProgressMonitor monitor) throws InterruptedException;
- }
-
- private static SHACLEntailment singleton = new SHACLEntailment();
-
- public static SHACLEntailment get() {
- return singleton;
- }
-
- private Map engines = new HashMap<>();
-
-
- protected SHACLEntailment() {
- setEngine(RDFS.getURI(), new Engine() {
- @Override
- public Model createModelWithEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, ProgressMonitor monitor) {
- return ModelFactory.createRDFSModel(dataset.getDefaultModel());
- }
- });
- setEngine(SH.Rules.getURI(), new RulesEntailment());
- }
-
-
- public Engine getEngine(String uri) {
- return engines.get(uri);
- }
-
-
- public void setEngine(String uri, Engine engine) {
- engines.put(uri, engine);
- }
-
-
- public Dataset withEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, Resource entailment, ProgressMonitor monitor) throws InterruptedException {
- if(entailment == null || dataset.getDefaultModel() == null) {
- return dataset;
- }
- else {
- Engine engine = getEngine(entailment.getURI());
- if(engine != null) {
- Model newDefaultModel = engine.createModelWithEntailment(dataset, shapesGraphURI, shapesGraph, monitor);
- return new DatasetWithDifferentDefaultModel(newDefaultModel, dataset);
- }
- else {
- return null;
- }
- }
- }
+
+ public final static Resource RDFS = ResourceFactory.createResource("http://www.w3.org/ns/entailment/RDFS");
+
+ public interface Engine {
+ Model createModelWithEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, ProgressMonitor monitor) throws InterruptedException;
+ }
+
+ private static final SHACLEntailment singleton = new SHACLEntailment();
+
+ public static SHACLEntailment get() {
+ return singleton;
+ }
+
+ private Map engines = new HashMap<>();
+
+
+ protected SHACLEntailment() {
+ setEngine(RDFS.getURI(), new Engine() {
+ @Override
+ public Model createModelWithEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, ProgressMonitor monitor) {
+ return ModelFactory.createRDFSModel(dataset.getDefaultModel());
+ }
+ });
+ setEngine(SH.Rules.getURI(), new RulesEntailment());
+ }
+
+
+ public Engine getEngine(String uri) {
+ return engines.get(uri);
+ }
+
+
+ public void setEngine(String uri, Engine engine) {
+ engines.put(uri, engine);
+ }
+
+
+ public Dataset withEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, Resource entailment, ProgressMonitor monitor) throws InterruptedException {
+ if (entailment == null || dataset.getDefaultModel() == null) {
+ return dataset;
+ } else {
+ Engine engine = getEngine(entailment.getURI());
+ if (engine != null) {
+ Model newDefaultModel = engine.createModelWithEntailment(dataset, shapesGraphURI, shapesGraph, monitor);
+ return new DatasetWithDifferentDefaultModel(newDefaultModel, dataset);
+ } else {
+ return null;
+ }
+ }
+ }
}
diff --git a/src/main/java/org/topbraid/shacl/expr/AbstractNodeExpression.java b/src/main/java/org/topbraid/shacl/expr/AbstractNodeExpression.java
index 0302f16a..5aff66e2 100644
--- a/src/main/java/org/topbraid/shacl/expr/AbstractNodeExpression.java
+++ b/src/main/java/org/topbraid/shacl/expr/AbstractNodeExpression.java
@@ -1,56 +1,56 @@
package org.topbraid.shacl.expr;
-import java.util.Collections;
-import java.util.List;
-
import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.util.iterator.ExtendedIterator;
+import java.util.Collections;
+import java.util.List;
+
public abstract class AbstractNodeExpression implements NodeExpression {
-
- private final static List EMPTY = Collections.emptyList();
- private RDFNode expr;
-
-
- protected AbstractNodeExpression(RDFNode expr) {
- this.expr = expr;
- }
+ private final static List EMPTY = Collections.emptyList();
+
+ private final RDFNode expr;
+
+
+ protected AbstractNodeExpression(RDFNode expr) {
+ this.expr = expr;
+ }
- @Override
- public ExtendedIterator evalReverse(RDFNode valueNode, NodeExpressionContext context) {
- throw new IllegalStateException("Reverse evaluation is not supported for this node expression: " + toString());
- }
+ @Override
+ public ExtendedIterator evalReverse(RDFNode valueNode, NodeExpressionContext context) {
+ throw new IllegalStateException("Reverse evaluation is not supported for this node expression: " + this);
+ }
- @Override
- public List getInputExpressions() {
- return EMPTY;
- }
+ @Override
+ public List getInputExpressions() {
+ return EMPTY;
+ }
- @Override
- public Resource getOutputShape(Resource contextShape) {
- return null;
- }
+ @Override
+ public Resource getOutputShape(Resource contextShape) {
+ return null;
+ }
- @Override
- public RDFNode getRDFNode() {
- return expr;
- }
+ @Override
+ public RDFNode getRDFNode() {
+ return expr;
+ }
- @Override
- public boolean isReversible(NodeExpressionContext context) {
- return false;
- }
+ @Override
+ public boolean isReversible(NodeExpressionContext context) {
+ return false;
+ }
- @Override
- public String toString() {
- return getFunctionalSyntax();
- }
+ @Override
+ public String toString() {
+ return getFunctionalSyntax();
+ }
}
diff --git a/src/main/java/org/topbraid/shacl/expr/AbstractSPARQLExpression.java b/src/main/java/org/topbraid/shacl/expr/AbstractSPARQLExpression.java
index ff858369..f65e089f 100644
--- a/src/main/java/org/topbraid/shacl/expr/AbstractSPARQLExpression.java
+++ b/src/main/java/org/topbraid/shacl/expr/AbstractSPARQLExpression.java
@@ -1,9 +1,5 @@
package org.topbraid.shacl.expr;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
-
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryExecution;
@@ -18,79 +14,81 @@
import org.topbraid.jenax.util.JenaDatatypes;
import org.topbraid.shacl.vocabulary.SH;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
/**
* Node expressions based on a SPARQL query, identified by sh:select or sh:ask.
- *
+ *
* This node expression type is not part of the SHACL-AF 1.0 document, but a candidate for 1.1.
- *
+ *
* @author Holger Knublauch
*/
public abstract class AbstractSPARQLExpression extends AbstractInputExpression {
-
- private Query query;
-
- private String queryString;
-
-
- protected AbstractSPARQLExpression(Resource expr, Query query, NodeExpression input, String queryString) {
- super(expr, input);
- this.query = query;
- this.queryString = queryString;
- }
-
- @Override
- public ExtendedIterator eval(RDFNode focusNode, NodeExpressionContext context) {
- List focusNodes;
- NodeExpression input = getInput();
- if(input != null) {
- focusNodes = input.eval(focusNode, context).toList();
- }
- else {
- focusNodes = Collections.singletonList(focusNode);
- }
- List results = new LinkedList<>();
- for(RDFNode f : focusNodes) {
- QuerySolutionMap binding = new QuerySolutionMap();
- binding.add(SH.thisVar.getName(), f);
- try(QueryExecution qexec = ARQFactory.get().createQueryExecution(query, context.getDataset(), binding)) {
- if(query.isAskType()) {
- results.add(qexec.execAsk() ? JenaDatatypes.TRUE : JenaDatatypes.FALSE);
- }
- else {
- ResultSet rs = qexec.execSelect();
- String varName = rs.getResultVars().get(0);
- while(rs.hasNext()) {
- RDFNode node = rs.next().get(varName);
- if(node != null) {
- results.add(node);
- }
- }
- }
- }
- }
- return WrappedIterator.create(results.iterator());
- }
+ private Query query;
+
+ private String queryString;
+
+
+ protected AbstractSPARQLExpression(Resource expr, Query query, NodeExpression input, String queryString) {
+ super(expr, input);
+ this.query = query;
+ this.queryString = queryString;
+ }
+
+
+ @Override
+ public ExtendedIterator eval(RDFNode focusNode, NodeExpressionContext context) {
+ List focusNodes;
+ NodeExpression input = getInput();
+ if (input != null) {
+ focusNodes = input.eval(focusNode, context).toList();
+ } else {
+ focusNodes = Collections.singletonList(focusNode);
+ }
+ List results = new LinkedList<>();
+ for (RDFNode f : focusNodes) {
+ QuerySolutionMap binding = new QuerySolutionMap();
+ binding.add(SH.thisVar.getName(), f);
+ try (QueryExecution qexec = ARQFactory.get().createQueryExecution(query, context.getDataset(), binding)) {
+ if (query.isAskType()) {
+ results.add(qexec.execAsk() ? JenaDatatypes.TRUE : JenaDatatypes.FALSE);
+ } else {
+ ResultSet rs = qexec.execSelect();
+ String varName = rs.getResultVars().get(0);
+ while (rs.hasNext()) {
+ RDFNode node = rs.next().get(varName);
+ if (node != null) {
+ results.add(node);
+ }
+ }
+ }
+ }
+ }
+ return WrappedIterator.create(results.iterator());
+ }
+
+
+ @Override
+ public List getFunctionalSyntaxArguments() {
+ List results = new LinkedList<>();
+ results.add(FmtUtils.stringForNode(NodeFactory.createLiteralString(queryString)));
+ NodeExpression input = getInput();
+ if (input != null) {
+ results.add(input.getFunctionalSyntax());
+ }
+ return results;
+ }
+
+
+ public Query getQuery() {
+ return query;
+ }
- @Override
- public List getFunctionalSyntaxArguments() {
- List results = new LinkedList<>();
- results.add(FmtUtils.stringForNode(NodeFactory.createLiteral(queryString)));
- NodeExpression input = getInput();
- if(input != null) {
- results.add(input.getFunctionalSyntax());
- }
- return results;
- }
-
-
- public Query getQuery() {
- return query;
- }
-
-
- public String getQueryString() {
- return queryString;
- }
+ public String getQueryString() {
+ return queryString;
+ }
}
diff --git a/src/main/java/org/topbraid/shacl/expr/ComplexNodeExpression.java b/src/main/java/org/topbraid/shacl/expr/ComplexNodeExpression.java
index 3074aab9..5c25219c 100644
--- a/src/main/java/org/topbraid/shacl/expr/ComplexNodeExpression.java
+++ b/src/main/java/org/topbraid/shacl/expr/ComplexNodeExpression.java
@@ -16,40 +16,40 @@
*/
package org.topbraid.shacl.expr;
+import org.apache.jena.rdf.model.RDFNode;
+
import java.util.Iterator;
import java.util.List;
-import org.apache.jena.rdf.model.RDFNode;
-
public abstract class ComplexNodeExpression extends AbstractNodeExpression {
-
- protected ComplexNodeExpression(RDFNode expr) {
- super(expr);
- }
-
-
- @Override
- public String getFunctionalSyntax() {
- String str = getFunctionalSyntaxName();
- str += "(";
- List args = getFunctionalSyntaxArguments();
- Iterator it = args.iterator();
- while(it.hasNext()) {
- String next = it.next();
- str += next;
- if(it.hasNext()) {
- str += ", ";
- }
- }
- str += ")";
- return str;
- }
-
-
- protected String getFunctionalSyntaxName() {
- return getTypeId().toString();
- }
-
-
- public abstract List getFunctionalSyntaxArguments();
+
+ protected ComplexNodeExpression(RDFNode expr) {
+ super(expr);
+ }
+
+
+ @Override
+ public String getFunctionalSyntax() {
+ String str = getFunctionalSyntaxName();
+ str += "(";
+ List args = getFunctionalSyntaxArguments();
+ Iterator it = args.iterator();
+ while (it.hasNext()) {
+ String next = it.next();
+ str += next;
+ if (it.hasNext()) {
+ str += ", ";
+ }
+ }
+ str += ")";
+ return str;
+ }
+
+
+ protected String getFunctionalSyntaxName() {
+ return getTypeId();
+ }
+
+
+ public abstract List getFunctionalSyntaxArguments();
}
diff --git a/src/main/java/org/topbraid/shacl/expr/PathEvaluator.java b/src/main/java/org/topbraid/shacl/expr/PathEvaluator.java
index 907ea4bb..f99622bd 100644
--- a/src/main/java/org/topbraid/shacl/expr/PathEvaluator.java
+++ b/src/main/java/org/topbraid/shacl/expr/PathEvaluator.java
@@ -16,17 +16,8 @@
*/
package org.topbraid.shacl.expr;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
import org.apache.jena.graph.Node;
-import org.apache.jena.rdf.model.Literal;
-import org.apache.jena.rdf.model.Model;
-import org.apache.jena.rdf.model.Property;
-import org.apache.jena.rdf.model.RDFNode;
-import org.apache.jena.rdf.model.Resource;
+import org.apache.jena.rdf.model.*;
import org.apache.jena.sparql.path.P_Inverse;
import org.apache.jena.sparql.path.P_Link;
import org.apache.jena.sparql.path.Path;
@@ -38,220 +29,218 @@
import org.topbraid.shacl.engine.ShapesGraph;
import org.topbraid.shacl.expr.lib.DistinctExpression;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
/**
* An object that computes the values of a sh:path node expression.
* This implements consistent handling of inferred values.
- *
+ *
* Inferences are limited to simple forward paths consisting of a single predicate.
- *
+ *
* @author Holger Knublauch
*/
public class PathEvaluator {
-
- private NodeExpression input;
-
- private boolean isInverse;
-
- private Path jenaPath;
-
- private Property predicate;
-
-
- /**
- * Constructs a PathEvaluator for a single "forward" property look-up.
- * @param predicate the predicate
- */
- public PathEvaluator(Property predicate) {
- this.predicate = predicate;
- }
-
-
- /**
- * Constructs a PathEvaluator for an arbitrary SPARQL path (except single forward properties).
- * @param path the path
- * @param shapesModel the shapes Model
- */
- public PathEvaluator(Path path, Model shapesModel) {
- this.jenaPath = path;
- isInverse = jenaPath instanceof P_Inverse && ((P_Inverse)jenaPath).getSubPath() instanceof P_Link;
- if(isInverse) {
- P_Link link = (P_Link) ((P_Inverse)jenaPath).getSubPath();
- predicate = shapesModel.getProperty(link.getNode().getURI());
- }
- }
+
+ private NodeExpression input;
+
+ private boolean isInverse;
+
+ private Path jenaPath;
+
+ private Property predicate;
+
+
+ /**
+ * Constructs a PathEvaluator for a single "forward" property look-up.
+ *
+ * @param predicate the predicate
+ */
+ public PathEvaluator(Property predicate) {
+ this.predicate = predicate;
+ }
+
+
+ /**
+ * Constructs a PathEvaluator for an arbitrary SPARQL path (except single forward properties).
+ *
+ * @param path the path
+ * @param shapesModel the shapes Model
+ */
+ public PathEvaluator(Path path, Model shapesModel) {
+ this.jenaPath = path;
+ isInverse = jenaPath instanceof P_Inverse && ((P_Inverse) jenaPath).getSubPath() instanceof P_Link;
+ if (isInverse) {
+ P_Link link = (P_Link) ((P_Inverse) jenaPath).getSubPath();
+ predicate = shapesModel.getProperty(link.getNode().getURI());
+ }
+ }
+
+
+ public ExtendedIterator eval(RDFNode focusNode, NodeExpressionContext context) {
+ if (input == null) {
+ ExtendedIterator asserted = evalFocusNode(focusNode, context);
+ return withDefaultValues(withInferences(asserted, focusNode, context), focusNode, context);
+ } else {
+ Iterator it = input.eval(focusNode, context);
+ if (it.hasNext()) {
+ RDFNode first = it.next();
+ ExtendedIterator result = withDefaultValues(withInferences(evalFocusNode(first, context), first, context), first, context);
+ while (it.hasNext()) {
+ RDFNode n = it.next();
+ result = result.andThen(withDefaultValues(withInferences(evalFocusNode(n, context), n, context), first, context));
+ }
+ return result;
+ } else {
+ return WrappedIterator.emptyIterator();
+ }
+ }
+ }
+
+
+ public ExtendedIterator evalReverse(RDFNode valueNode, NodeExpressionContext context) {
+ // See isReversible, this only supports trivial cases for now
+ if (isInverse) {
+ if (valueNode instanceof Literal) {
+ return WrappedIterator.emptyIterator();
+ } else {
+ return context.getDataset().getDefaultModel().listObjectsOfProperty((Resource) valueNode, predicate);
+ }
+ } else {
+ return context.getDataset().getDefaultModel().listSubjectsWithProperty(predicate, valueNode).mapWith(r -> (RDFNode) r);
+ }
+ }
+
+
+ /**
+ * Gets the executed Jena Path or null if this is just a simple forward property.
+ *
+ * @return the executed Jena Path
+ */
+ public Path getJenaPath() {
+ return jenaPath;
+ }
+
+
+ /**
+ * Gets the predicate if this is a simple forward property path.
+ * Returns null for inverse paths.
+ *
+ * @return the predicate or null
+ */
+ public Property getPredicate() {
+ if (predicate != null && !isInverse) {
+ return predicate;
+ } else {
+ return null;
+ }
+ }
+
+
+ /**
+ * Checks if the values of this may be inferred.
+ * This is the case if this uses a single forward property path and there are any sh:values or sh:defaultValue statements on
+ * that predicate in the provided shapes graph.
+ * The actual computation on whether the values are inferred depends on the actual focus node, which is why this is
+ * only a "maybe".
+ * This function may be used to exclude optimizations that are possible if we know that no inferences can exist.
+ *
+ * @param shapesGraph the ShapesGraph (which caches previous results)
+ * @return true if there may be sh:values statements
+ */
+ public boolean isMaybeInferred(ShapesGraph shapesGraph) {
+ if (predicate != null && !isInverse) {
+ return !shapesGraph.getValuesNodeExpressionsMap(predicate).isEmpty() || !shapesGraph.getDefaultValueNodeExpressionsMap(predicate).isEmpty();
+ } else {
+ return false;
+ }
+ }
+
+
+ public boolean isReversible(ShapesGraph shapesGraph) {
+ // Very conservative algorithm for now
+ return input == null && !isMaybeInferred(shapesGraph) && jenaPath == null;
+ }
+
+
+ public void setInput(NodeExpression input) {
+ this.input = input;
+ }
+
+
+ private ExtendedIterator