diff --git a/.gitattributes b/.gitattributes index 53f1bbb5..0a23fec6 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,7 +1,7 @@ # By default, normalize line endings * text=auto -# Force microsofty files to use CRLF +# Force microsoft files to use CRLF *.{cmd,[cC][mM][dD]} text eol=crlf *.{bat,[bB][aA][tT]} text eol=crlf *.{reg,[rR][eE][gG]} text eol=crlf diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 8665a113..1ba1706d 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -25,10 +25,10 @@ jobs: include: - package: "x86_64" platform: linux/amd64 - base: eclipse-temurin:11-alpine + base: eclipse-temurin:21-alpine - package: "arm64" platform: linux/arm64 - base: amazoncorretto:11-alpine3.18-jdk + base: amazoncorretto:21-alpine3.20-jdk steps: - name: lowercase image name diff --git a/.github/workflows/maven-test-pr.yml b/.github/workflows/maven-test-pr.yml index c2d826f6..d42d8301 100644 --- a/.github/workflows/maven-test-pr.yml +++ b/.github/workflows/maven-test-pr.yml @@ -7,13 +7,18 @@ on: jobs: test: runs-on: ubuntu-latest + strategy: + matrix: + distribution: [ "temurin", "corretto" ] + java: [ "17", "21" ] + name: Testing with Java ${{ matrix.java }} (${{ matrix.distribution }}) steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Set up JDK uses: actions/setup-java@8df1039502a15bceb9433410b1a100fbe190c53b # v4.5.0 with: - java-version: "11" - distribution: "temurin" + distribution: ${{ matrix.distribution }} + java-version: ${{ matrix.java }} - name: Test with Maven - run: mvn -B test + run: mvn -B test \ No newline at end of file diff --git a/pom.xml b/pom.xml index cbfef6f7..902f47cd 100644 --- a/pom.xml +++ b/pom.xml @@ -68,11 +68,11 @@ UTF-8 - 4.10.0 + 5.2.0 4.13.2 1.7.36 2.20.0 - 11 + 17 diff --git a/src/main/java/org/topbraid/jenax/functions/AbstractFunction.java b/src/main/java/org/topbraid/jenax/functions/AbstractFunction.java index c1a51cb5..64ed35a0 100644 --- a/src/main/java/org/topbraid/jenax/functions/AbstractFunction.java +++ b/src/main/java/org/topbraid/jenax/functions/AbstractFunction.java @@ -31,6 +31,7 @@ import org.apache.jena.sparql.function.Function; import org.apache.jena.sparql.function.FunctionEnv; import org.apache.jena.sparql.sse.SSE; +import org.apache.jena.sparql.util.Context; import org.apache.jena.sparql.util.FmtUtils; import org.topbraid.jenax.statistics.ExecStatistics; import org.topbraid.jenax.statistics.ExecStatisticsManager; @@ -44,10 +45,8 @@ public abstract class AbstractFunction implements Function { @Override - public void build(String uri, ExprList args) { - } + public void build(String uri, ExprList args, Context context){} - @Override public NodeValue exec(Binding binding, ExprList args, String uri, FunctionEnv env) { Node[] nodes = new Node[args.size()]; diff --git a/src/main/java/org/topbraid/jenax/progress/ProgressMonitor.java b/src/main/java/org/topbraid/jenax/progress/ProgressMonitor.java index 36f661e9..a445a2e3 100644 --- a/src/main/java/org/topbraid/jenax/progress/ProgressMonitor.java +++ b/src/main/java/org/topbraid/jenax/progress/ProgressMonitor.java @@ -21,60 +21,66 @@ /** * Inspired by the Eclipse IProgressMonitor, this interface supports monitoring long-running processes with intermediate * status messages and the ability to cancel. - * + * * @author Holger Knublauch */ public interface ProgressMonitor { - /** - * Typically used by the (long-running) process to determine whether the user has requested cancellation. - * The process should then find a suitable, clean termination. - * @return true if cancel was requested - */ - boolean isCanceled(); - - - /** - * Informs the progress monitor that a new task has been started, with a given number of expected steps. - * A UI connected to the ProgressMonitor would typically display something like a progress bar and the task name. - * @param label the name of the task - * @param totalWork the number of steps (see worked) that is expected to be needed to complete the task - */ - void beginTask(String label, int totalWork); - - - /** - * Informs the progress monitor that all is completed. - */ - void done(); - - - /** - * Typically called from a parallel thread triggered by the user, this informs the progress monitor that it needs to - * return true for isCanceled. - * Once a process has been canceled, it should not be un-canceled. - * @param value true if canceled - */ - void setCanceled(boolean value); - - - /** - * Changes the name or label of the current task. - * @param value - */ - void setTaskName(String value); - - - /** - * Sets the label that serves as sub-task, typically printed under the main task name. - * @param label the subtask label - */ - void subTask(String label); - - - /** - * Informs the progress monitor that one or more steps have been completed towards the current task (see beginTask). - * @param amount the number of steps completed - */ - void worked(int amount); + /** + * Typically used by the (long-running) process to determine whether the user has requested cancellation. + * The process should then find a suitable, clean termination. + * + * @return true if cancel was requested + */ + boolean isCanceled(); + + + /** + * Informs the progress monitor that a new task has been started, with a given number of expected steps. + * A UI connected to the ProgressMonitor would typically display something like a progress bar and the task name. + * + * @param label the name of the task + * @param totalWork the number of steps (see worked) that is expected to be needed to complete the task + */ + void beginTask(String label, int totalWork); + + + /** + * Informs the progress monitor that all is completed. + */ + void done(); + + + /** + * Typically called from a parallel thread triggered by the user, this informs the progress monitor that it needs to + * return true for isCanceled. + * Once a process has been canceled, it should not be un-canceled. + * + * @param value true if canceled + */ + void setCanceled(boolean value); + + + /** + * Changes the name or label of the current task. + * + * @param value the task name + */ + void setTaskName(String value); + + + /** + * Sets the label that serves as sub-task, typically printed under the main task name. + * + * @param label the subtask label + */ + void subTask(String label); + + + /** + * Informs the progress monitor that one or more steps have been completed towards the current task (see beginTask). + * + * @param amount the number of steps completed + */ + void worked(int amount); } diff --git a/src/main/java/org/topbraid/jenax/util/DiffGraph.java b/src/main/java/org/topbraid/jenax/util/DiffGraph.java index 75c77542..138af200 100644 --- a/src/main/java/org/topbraid/jenax/util/DiffGraph.java +++ b/src/main/java/org/topbraid/jenax/util/DiffGraph.java @@ -1,225 +1,218 @@ package org.topbraid.jenax.util; -import java.util.HashSet; -import java.util.Set; - import org.apache.jena.graph.Graph; +import org.apache.jena.graph.GraphMemFactory; import org.apache.jena.graph.Node; import org.apache.jena.graph.Triple; import org.apache.jena.graph.impl.GraphMatcher; import org.apache.jena.graph.impl.GraphWithPerform; -import org.apache.jena.mem.GraphMem; import org.apache.jena.shared.PrefixMapping; import org.apache.jena.shared.impl.PrefixMappingImpl; import org.apache.jena.util.iterator.ExtendedIterator; +import java.util.HashSet; +import java.util.Set; + /** * A WrappedGraph that filters out deleted triples or adds added triples, without * modifying the underlying base graph. - * + *

* This class is for single-threaded use only, typically used as temporary graph layer on top of an existing * graph for the duration of some algorithm. - * + *

* This runs in two modes, based on the updateBaseGraph flag. - * + *

* By default/legacy (false) the system will only add triples that exist in none of the subgraphs of the delegate graph * and claim to delete triples even if they exist in subgraphs only. - * + *

* If true, the adds will always be applied even if one of the subgraphs already contains the triple. * This is making sure that transformations will always produce all requested triples. * Furthermore this mode is more correct w.r.t. deletes because it will only allow deleting triples from the editable graph. - * + * * @author Holger Knublauch */ public class DiffGraph extends TransparentWrappedGraph { - /** - * This graph has additional triples that are not in the delegate. - */ - protected GraphWithPerform addedGraph = new GraphMem(); - - /** - * This Set has triples that are in the delegate but are excluded from the filtered graph. - */ - protected Set deletedTriples = new HashSet<>(); - - private PrefixMapping pm; - - // The graph that the triples will be added to - private Graph updateableGraph; - - - public DiffGraph(Graph delegate) { - this(delegate, false); - } - - - public DiffGraph(Graph delegate, boolean updateBaseGraph) { - super(delegate); - if(updateBaseGraph) { - updateableGraph = JenaUtil.getBaseGraph(delegate); - } - else { - updateableGraph = delegate; - } - } - - - @Override - public void add(Triple t) { - performAdd(t); - } - - - @Override - public void delete(Triple t) { - performDelete(t); - } - - - public Graph getAddedGraph() { - return addedGraph; - } - - - @Override - public boolean contains(Node s, Node p, Node o) { - return contains(Triple.create(s == null ? Node.ANY : s, p == null ? Node.ANY : p, o == null ? Node.ANY : o)); - } - - - @Override - public boolean contains(Triple t) { - if(addedGraph.contains(t)) { - return true; - } - else { - ExtendedIterator it = base.find(t); - while(it.hasNext()) { - Triple n = it.next(); - if(!deletedTriples.contains(n)) { - it.close(); - return true; - } - } - return false; - } - } - - - // TODO: If the delegate does not use equals for add and delete - // but sameValueAs then this code is incorrect. - // Specifically we should be able to show bugs with TDB which does - // something different from either equals or sameValueAs. - protected boolean containsByEquals(Graph g, Triple t) { - ExtendedIterator it = g.find(t); - try { - while (it.hasNext()) { - if (t.equals(it.next())) - return true; - } - } - finally { - it.close(); - } - return false; - } - - - @Override - public ExtendedIterator find(Node s, Node p, Node o) { - - // First get the underlying base query (without any buffered triples) - ExtendedIterator base = super.find(s, p, o); - - // If deleted triples exist then continue with a filtered iterator - if(deletedTriples.size() > 0) { - // base without deleted triples. - base = base.filterDrop(deletedTriples::contains); - } - - // If added triples exist then chain the two together - // this iterator supports remove and removes correctly for this graph - ExtendedIterator added = addedGraph.find(s, p, o); - if(added.hasNext()) { - return base.andThen(added); // base and added are guaranteed disjoint - } - else { - return base; - } - } - - - @Override - public ExtendedIterator find(Triple m) { - return find(m.getMatchSubject(), m.getMatchPredicate(), m.getMatchObject()); - } - - - public Set getDeletedTriples() { - return deletedTriples; - } - - - @Override - public PrefixMapping getPrefixMapping() { - if (pm == null) { - // copy delegate's prefix mapping. - pm = new PrefixMappingImpl().setNsPrefixes(base.getPrefixMapping()); - } - return pm; - } - - - @Override - public boolean isEmpty() { - if (!addedGraph.isEmpty()) { - return false; - } - if (deletedTriples.isEmpty()) { - return base.isEmpty(); - } - ExtendedIterator it = find(Triple.ANY); - try { - return !it.hasNext(); - } - finally { - it.close(); - } - } - - + /** + * This graph has additional triples that are not in the delegate. + */ + protected GraphWithPerform addedGraph = (GraphWithPerform) GraphMemFactory.createDefaultGraph(); + + /** + * This Set has triples that are in the delegate but are excluded from the filtered graph. + */ + protected Set deletedTriples = new HashSet<>(); + + private PrefixMapping pm; + + // The graph that the triples will be added to + private Graph updateableGraph; + + + public DiffGraph(Graph delegate) { + this(delegate, false); + } + + + public DiffGraph(Graph delegate, boolean updateBaseGraph) { + super(delegate); + if (updateBaseGraph) { + updateableGraph = JenaUtil.getBaseGraph(delegate); + } else { + updateableGraph = delegate; + } + } + + + @Override + public void add(Triple t) { + performAdd(t); + } + + + @Override + public void delete(Triple t) { + performDelete(t); + } + + + public Graph getAddedGraph() { + return addedGraph; + } + + + @Override + public boolean contains(Node s, Node p, Node o) { + return contains(Triple.create(s == null ? Node.ANY : s, p == null ? Node.ANY : p, o == null ? Node.ANY : o)); + } + + @Override - public boolean isIsomorphicWith(Graph g) { - return g != null && GraphMatcher.equals(this, g); - } - - - @Override - public void performAdd(Triple t) { - if (deletedTriples.contains(t)) { - deletedTriples.remove(t); - } - else if(!containsByEquals(addedGraph, t) && !containsByEquals(updateableGraph, t)) { - addedGraph.add(t); - } - } - - - @Override - public void performDelete(Triple t) { - if(containsByEquals(addedGraph, t)) { - addedGraph.delete(t); - } - else if(containsByEquals(updateableGraph, t)) { - deletedTriples.add(t); - } - } - - - @Override - public int size() { - return super.size() - deletedTriples.size() + addedGraph.size(); - } + public boolean contains(Triple t) { + if (addedGraph.contains(t)) { + return true; + } else { + ExtendedIterator it = base.find(t); + while (it.hasNext()) { + Triple n = it.next(); + if (!deletedTriples.contains(n)) { + it.close(); + return true; + } + } + return false; + } + } + + + // TODO: If the delegate does not use equals for add and delete + // but sameValueAs then this code is incorrect. + // Specifically we should be able to show bugs with TDB which does + // something different from either equals or sameValueAs. + protected boolean containsByEquals(Graph g, Triple t) { + ExtendedIterator it = g.find(t); + try { + while (it.hasNext()) { + if (t.equals(it.next())) + return true; + } + } finally { + it.close(); + } + return false; + } + + + @Override + public ExtendedIterator find(Node s, Node p, Node o) { + + // First get the underlying base query (without any buffered triples) + ExtendedIterator base = super.find(s, p, o); + + // If deleted triples exist then continue with a filtered iterator + if (deletedTriples.size() > 0) { + // base without deleted triples. + base = base.filterDrop(deletedTriples::contains); + } + + // If added triples exist then chain the two together + // this iterator supports remove and removes correctly for this graph + ExtendedIterator added = addedGraph.find(s, p, o); + if (added.hasNext()) { + return base.andThen(added); // base and added are guaranteed disjoint + } else { + return base; + } + } + + + @Override + public ExtendedIterator find(Triple m) { + return find(m.getMatchSubject(), m.getMatchPredicate(), m.getMatchObject()); + } + + + public Set getDeletedTriples() { + return deletedTriples; + } + + + @Override + public PrefixMapping getPrefixMapping() { + if (pm == null) { + // copy delegate's prefix mapping. + pm = new PrefixMappingImpl().setNsPrefixes(base.getPrefixMapping()); + } + return pm; + } + + + @Override + public boolean isEmpty() { + if (!addedGraph.isEmpty()) { + return false; + } + if (deletedTriples.isEmpty()) { + return base.isEmpty(); + } + ExtendedIterator it = find(Triple.ANY); + try { + return !it.hasNext(); + } finally { + it.close(); + } + } + + + @Override + public boolean isIsomorphicWith(Graph g) { + return g != null && GraphMatcher.equals(this, g); + } + + + @Override + public void performAdd(Triple t) { + if (deletedTriples.contains(t)) { + deletedTriples.remove(t); + } else if (!containsByEquals(addedGraph, t) && !containsByEquals(updateableGraph, t)) { + addedGraph.add(t); + } + } + + + @Override + public void performDelete(Triple t) { + if (containsByEquals(addedGraph, t)) { + addedGraph.delete(t); + } else if (containsByEquals(updateableGraph, t)) { + deletedTriples.add(t); + } + } + + + @Override + public int size() { + return super.size() - deletedTriples.size() + addedGraph.size(); + } } diff --git a/src/main/java/org/topbraid/jenax/util/GraphNotFoundException.java b/src/main/java/org/topbraid/jenax/util/GraphNotFoundException.java index 0e8a470c..e1597b44 100644 --- a/src/main/java/org/topbraid/jenax/util/GraphNotFoundException.java +++ b/src/main/java/org/topbraid/jenax/util/GraphNotFoundException.java @@ -19,17 +19,16 @@ /** * An Exception thrown if a named graph could not be resolved * while setting the default graph of a dataset. - * + *

* This is subclassing RuntimeException because otherwise a lot of * existing code would have to catch GraphNotFoundException * (where it would otherwise have crashed with a NullPointerException anyway). - * + * * @author Holger Knublauch */ -@SuppressWarnings("serial") public class GraphNotFoundException extends RuntimeException { - public GraphNotFoundException(String message) { - super(message); - } + public GraphNotFoundException(String message) { + super(message); + } } diff --git a/src/main/java/org/topbraid/jenax/util/JenaUtil.java b/src/main/java/org/topbraid/jenax/util/JenaUtil.java index b092f064..0a8c069b 100644 --- a/src/main/java/org/topbraid/jenax/util/JenaUtil.java +++ b/src/main/java/org/topbraid/jenax/util/JenaUtil.java @@ -17,41 +17,15 @@ package org.topbraid.jenax.util; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.BiFunction; - import org.apache.jena.enhanced.EnhGraph; -import org.apache.jena.graph.Factory; import org.apache.jena.graph.Graph; +import org.apache.jena.graph.GraphMemFactory; import org.apache.jena.graph.Node; import org.apache.jena.graph.compose.MultiUnion; import org.apache.jena.ontology.OntModel; import org.apache.jena.ontology.OntModelSpec; -import org.apache.jena.query.ARQ; -import org.apache.jena.query.Dataset; -import org.apache.jena.query.Query; -import org.apache.jena.query.QueryExecution; -import org.apache.jena.query.QuerySolution; -import org.apache.jena.query.QuerySolutionMap; -import org.apache.jena.query.ResultSet; -import org.apache.jena.rdf.model.Literal; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.NodeIterator; -import org.apache.jena.rdf.model.Property; -import org.apache.jena.rdf.model.RDFList; -import org.apache.jena.rdf.model.RDFNode; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.Statement; -import org.apache.jena.rdf.model.StmtIterator; +import org.apache.jena.query.*; +import org.apache.jena.rdf.model.*; import org.apache.jena.rdf.model.impl.PropertyImpl; import org.apache.jena.rdf.model.impl.StmtIteratorImpl; import org.apache.jena.shared.PrefixMapping; @@ -62,14 +36,7 @@ import org.apache.jena.sparql.engine.binding.Binding; import org.apache.jena.sparql.engine.binding.BindingBuilder; import org.apache.jena.sparql.engine.binding.BindingRoot; -import org.apache.jena.sparql.expr.E_Function; -import org.apache.jena.sparql.expr.Expr; -import org.apache.jena.sparql.expr.ExprEvalException; -import org.apache.jena.sparql.expr.ExprList; -import org.apache.jena.sparql.expr.ExprTransform; -import org.apache.jena.sparql.expr.ExprTransformer; -import org.apache.jena.sparql.expr.ExprVar; -import org.apache.jena.sparql.expr.NodeValue; +import org.apache.jena.sparql.expr.*; import org.apache.jena.sparql.expr.nodevalue.NodeFunctions; import org.apache.jena.sparql.function.FunctionEnv; import org.apache.jena.sparql.graph.NodeTransform; @@ -78,8 +45,8 @@ import org.apache.jena.sparql.syntax.syntaxtransform.ExprTransformNodeElement; import org.apache.jena.sparql.syntax.syntaxtransform.QueryTransformOps; import org.apache.jena.sparql.util.Context; +import org.apache.jena.sparql.util.NodeCmp; import org.apache.jena.sparql.util.NodeFactoryExtra; -import org.apache.jena.sparql.util.NodeUtils; import org.apache.jena.util.iterator.ExtendedIterator; import org.apache.jena.vocabulary.OWL; import org.apache.jena.vocabulary.RDF; @@ -87,1191 +54,1196 @@ import org.apache.jena.vocabulary.XSD; import org.topbraid.jenax.progress.ProgressMonitor; +import java.util.*; +import java.util.function.BiFunction; + /** * Some convenience methods to operate on Jena Models. - * + *

* These methods are not as stable as the rest of the API, but * they may be of general use. - * + * * @author Holger Knublauch */ public class JenaUtil { - // Unstable - private static JenaUtilHelper helper = new JenaUtilHelper(); - - // Leave this line under the helper line above! - private static Model dummyModel = JenaUtil.createDefaultModel(); - - public static final String WITH_IMPORTS_PREFIX = "http://rdfex.org/withImports?uri="; - - - - /** - * Sets the helper which allows the behavior of some JenaUtil - * methods to be modified by the system using the SPIN library. - * Note: Should not be used outside of TopBraid - not stable. - * @param h the JenaUtilHelper - * @return the old helper - */ - public static JenaUtilHelper setHelper(JenaUtilHelper h) { - JenaUtilHelper old = helper; - helper = h; - return old; - } - - - /** - * Gets the current helper object. - * Note: Should not be used outside of TopBraid - not stable. - * @return the helper - */ - public final static JenaUtilHelper getHelper() { - return helper; - } - - - /** - * Populates a result set of resources reachable from a subject via zero or more steps with a given predicate. - * Implementation note: the results set need only implement {@link Collection#add(Object)}. - * @param results The transitive objects reached from subject via triples with the given predicate - * @param subject the subject to start traversal at - * @param predicate the predicate to walk - */ - public static void addTransitiveObjects(Set results, Resource subject, Property predicate) { - helper.setGraphReadOptimization(true); - try { - addTransitiveObjects(results, new HashSet(), subject, predicate); - } - finally { - helper.setGraphReadOptimization(false); - } - } - - - private static void addTransitiveObjects(Set resources, Set reached, - Resource subject, Property predicate) { - resources.add(subject); - reached.add(subject); - StmtIterator it = subject.listProperties(predicate); - try { - while (it.hasNext()) { - RDFNode object = it.next().getObject(); - if (object instanceof Resource) { - if (!reached.contains(object)) { - addTransitiveObjects(resources, reached, (Resource)object, predicate); - } - } - } - } - finally { - it.close(); - } - } - - - private static void addTransitiveSubjects(Set reached, Resource object, - Property predicate, ProgressMonitor monitor) { - if (object != null) { - reached.add(object); - StmtIterator it = object.getModel().listStatements(null, predicate, object); - try { - while (it.hasNext()) { - if (monitor != null && monitor.isCanceled()) { - it.close(); - return; - } - Resource subject = it.next().getSubject(); - if (!reached.contains(subject)) { - addTransitiveSubjects(reached, subject, predicate, monitor); - } - } - } - finally { - it.close(); - } - } - } - - - /** - * Turns a QuerySolution into a Binding. - * @param map the input QuerySolution - * @return a Binding or null if the input is null - */ - public static Binding asBinding(final QuerySolution map) { - if(map != null) { - BindingBuilder builder = BindingBuilder.create(); - Iterator varNames = map.varNames(); - while(varNames.hasNext()) { - String varName = varNames.next(); - RDFNode node = map.get(varName); - if(node != null) { - builder.add(Var.alloc(varName), node.asNode()); - } - } - return builder.build(); - } - else { - return null; - } - } - - - /** - * Turns a Binding into a QuerySolutionMap. - * @param binding the Binding to convert - * @return a QuerySolutionMap - */ - public static QuerySolutionMap asQuerySolutionMap(Binding binding) { - QuerySolutionMap map = new QuerySolutionMap(); - Iterator vars = binding.vars(); - while(vars.hasNext()) { - Var var = vars.next(); - Node node = binding.get(var); - if(node != null) { - map.add(var.getName(), dummyModel.asRDFNode(node)); - } - } - return map; - } - - - /** - * Returns a set of resources reachable from an object via one or more reversed steps with a given predicate. - * @param object the object to start traversal at - * @param predicate the predicate to walk - * @param monitor an optional progress monitor to allow cancelation - * @return the reached resources - */ - public static Set getAllTransitiveSubjects(Resource object, Property predicate, ProgressMonitor monitor) { - Set set = new HashSet<>(); - helper.setGraphReadOptimization(true); - try { - addTransitiveSubjects(set, object, predicate, monitor); - } - finally { - helper.setGraphReadOptimization(false); - } - set.remove(object); - return set; - } - - - /** - * Casts a Resource into a Property. - * @param resource the Resource to cast - * @return resource as an instance of Property - */ - public static Property asProperty(Resource resource) { - if(resource instanceof Property) { - return (Property) resource; - } - else { - return new PropertyImpl(resource.asNode(), (EnhGraph)resource.getModel()); - } - } - - - public static void collectBaseGraphs(Graph graph, Set baseGraphs) { - if(graph instanceof MultiUnion) { - MultiUnion union = (MultiUnion)graph; - collectBaseGraphs(union.getBaseGraph(), baseGraphs); - for(Object subGraph : union.getSubGraphs()) { - collectBaseGraphs((Graph)subGraph, baseGraphs); - } - } - else if(graph != null) { - baseGraphs.add(graph); - } - } - - - /** - * Creates a new Graph. By default this will deliver a plain in-memory graph, - * but other implementations may deliver graphs with concurrency support and - * other features. - * @return a default graph - * @see #createDefaultModel() - */ - public static Graph createDefaultGraph() { - return helper.createDefaultGraph(); - } - - - /** - * Wraps the result of {@link #createDefaultGraph()} into a Model and initializes namespaces. - * @return a default Model - * @see #createDefaultGraph() - */ - public static Model createDefaultModel() { - Model m = ModelFactory.createModelForGraph(createDefaultGraph()); - initNamespaces(m); - return m; - } - - - /** - * Creates a memory Graph with no reification. - * @return a new memory graph - */ - public static Graph createMemoryGraph() { - return Factory.createDefaultGraph(); - } - - - /** - * Creates a memory Model with no reification. - * @return a new memory Model - */ - public static Model createMemoryModel() { - return ModelFactory.createModelForGraph(createMemoryGraph()); - } - - - public static MultiUnion createMultiUnion() { - return helper.createMultiUnion(); - } - - - public static MultiUnion createMultiUnion(Graph[] graphs) { - return helper.createMultiUnion(graphs); - } - - - public static MultiUnion createMultiUnion(Iterator graphs) { - return helper.createMultiUnion(graphs); - } - - - /** - * Gets all instances of a given class and its subclasses. - * @param cls the class to get the instances of - * @return the instances - */ - public static Set getAllInstances(Resource cls) { - JenaUtil.setGraphReadOptimization(true); - try { - Model model = cls.getModel(); - Set classes = getAllSubClasses(cls); - classes.add(cls); - Set results = new HashSet<>(); - for(Resource subClass : classes) { - StmtIterator it = model.listStatements(null, RDF.type, subClass); - while (it.hasNext()) { - results.add(it.next().getSubject()); - } - } - return results; - } - finally { - JenaUtil.setGraphReadOptimization(false); - } - } - - - public static Set getAllSubClasses(Resource cls) { - return getAllTransitiveSubjects(cls, RDFS.subClassOf); - } - - - /** - * Returns a set consisting of a given class and all its subclasses. - * Similar to rdfs:subClassOf*. - * @param cls the class to return with its subclasses - * @return the Set of class resources - */ - public static Set getAllSubClassesStar(Resource cls) { - Set results = getAllTransitiveSubjects(cls, RDFS.subClassOf); - results.add(cls); - return results; - } - - - public static Set getAllSubProperties(Property superProperty) { - return getAllTransitiveSubjects(superProperty, RDFS.subPropertyOf); - } - - - public static Set getAllSuperClasses(Resource cls) { - return getAllTransitiveObjects(cls, RDFS.subClassOf); - } - - - /** - * Returns a set consisting of a given class and all its superclasses. - * Similar to rdfs:subClassOf*. - * @param cls the class to return with its superclasses - * @return the Set of class resources - */ - public static Set getAllSuperClassesStar(Resource cls) { - Set results = getAllTransitiveObjects(cls, RDFS.subClassOf); - results.add(cls); - return results; - } - - - public static Set getAllSuperProperties(Property subProperty) { - return getAllTransitiveObjects(subProperty, RDFS.subPropertyOf); - } - - - /** - * Returns a set of resources reachable from a subject via one or more steps with a given predicate. - * @param subject the subject to start at - * @param predicate the predicate to traverse - * @return the reached resources - */ - public static Set getAllTransitiveObjects(Resource subject, Property predicate) { - Set set = new HashSet<>(); - addTransitiveObjects(set, subject, predicate); - set.remove(subject); - return set; - } - - - private static Set getAllTransitiveSubjects(Resource object, Property predicate) { - return getAllTransitiveSubjects(object, predicate, null); - } - - - public static Set getAllTypes(Resource instance) { - Set types = new HashSet<>(); - StmtIterator it = instance.listProperties(RDF.type); - try { - while (it.hasNext()) { - Resource type = it.next().getResource(); - types.add(type); - types.addAll(getAllSuperClasses(type)); - } - } - finally { - it.close(); - } - return types; - } - - - /** - * Gets the "base graph" of a Model, walking into MultiUnions if needed. - * @param model the Model to get the base graph of - * @return the base graph or null if the model contains a MultiUnion that doesn't declare one - */ - public static Graph getBaseGraph(final Model model) { - return getBaseGraph(model.getGraph()); - } - - - public static Graph getBaseGraph(Graph graph) { - Graph baseGraph = graph; - while(baseGraph instanceof MultiUnion) { - baseGraph = ((MultiUnion)baseGraph).getBaseGraph(); - } - return baseGraph; - } - - - public static Model getBaseModel(Model model) { - Graph baseGraph = getBaseGraph(model); - if(baseGraph == model.getGraph()) { - return model; - } - else { - return ModelFactory.createModelForGraph(baseGraph); - } - } - - - /** - * For a given subject resource and a given collection of (label/comment) properties this finds the most - * suitable value of either property for a given list of languages (usually from the current user's preferences). - * For example, if the user's languages are [ "en-AU" ] then the function will prefer "mate"@en-AU over - * "friend"@en and never return "freund"@de. The function falls back to literals that have no language - * if no better literal has been found. - * @param resource the subject resource - * @param langs the allowed languages - * @param properties the properties to check - * @return the best suitable value or null - */ - public static Literal getBestStringLiteral(Resource resource, List langs, Iterable properties) { - return getBestStringLiteral(resource, langs, properties, (r,p) -> r.listProperties(p)); - } - - - public static Literal getBestStringLiteral(Resource resource, List langs, Iterable properties, BiFunction> getter) { - String prefLang = langs.isEmpty() ? null : langs.get(0); - Literal label = null; - int bestLang = -1; - for(Property predicate : properties) { - ExtendedIterator it = getter.apply(resource, predicate); - while(it.hasNext()) { - RDFNode object = it.next().getObject(); - if(object.isLiteral()) { - Literal literal = (Literal)object; - String lang = literal.getLanguage(); - if(lang.length() == 0 && label == null) { - label = literal; - } - else if(prefLang != null && prefLang.equalsIgnoreCase(lang)) { - it.close(); - return literal; - } - else { - // 1) Never use a less suitable language - // 2) Never replace an already existing label (esp: skos:prefLabel) unless new lang is better - // 3) Fall back to more special languages if no other was found (e.g. use en-GB if only "en" is accepted) - int startLang = bestLang < 0 ? langs.size() - 1 : (label != null ? bestLang - 1 : bestLang); - for(int i = startLang; i > 0; i--) { - String langi = langs.get(i); - if(langi.equalsIgnoreCase(lang)) { - label = literal; - bestLang = i; - } - else if(label == null && lang.contains("-") && NodeFunctions.langMatches(lang, langi)) { - label = literal; - } - } - } - } - } - } - return label; - } - - - /** - * Gets the "first" declared rdfs:range of a given property. - * If multiple ranges exist, the behavior is undefined. - * Note that this method does not consider ranges defined on - * super-properties. - * @param property the property to get the range of - * @return the "first" range Resource or null - */ - public static Resource getFirstDirectRange(Resource property) { - return property.getPropertyResourceValue(RDFS.range); - } - - - private static Resource getFirstRange(Resource property, Set reached) { - Resource directRange = getFirstDirectRange(property); - if(directRange != null) { - return directRange; - } - StmtIterator it = property.listProperties(RDFS.subPropertyOf); - while (it.hasNext()) { - Statement ss = it.next(); - if (ss.getObject().isURIResource()) { - Resource superProperty = ss.getResource(); - if (!reached.contains(superProperty)) { - reached.add(superProperty); - Resource r = getFirstRange(superProperty, reached); - if (r != null) { - it.close(); - return r; - } - } - } - } - return null; - } - - - /** - * Gets the "first" declared rdfs:range of a given property. - * If multiple ranges exist, the behavior is undefined. - * This method walks up to super-properties if no direct match exists. - * @param property the property to get the range of - * @return the "first" range Resource or null - */ - public static Resource getFirstRange(Resource property) { - return getFirstRange(property, new HashSet<>()); - } - - - public static Set getImports(Resource graph) { - Set results = new HashSet<>(); - for(Property importProperty : ImportProperties.get().getImportProperties()) { - results.addAll(JenaUtil.getResourceProperties(graph, importProperty)); - } - return results; - } - - - public static Integer getIntegerProperty(Resource subject, Property predicate) { - Statement s = subject.getProperty(predicate); - if(s != null && s.getObject().isLiteral()) { - return s.getInt(); - } - else { - return null; - } - } - - - public static RDFList getListProperty(Resource subject, Property predicate) { - Statement s = subject.getProperty(predicate); - if(s != null && s.getObject().canAs(RDFList.class)) { - return s.getResource().as(RDFList.class); - } - else { - return null; - } - } - - - public static List getLiteralProperties(Resource subject, Property predicate) { - List results = new LinkedList<>(); - StmtIterator it = subject.listProperties(predicate); - while(it.hasNext()) { - Statement s = it.next(); - if(s.getObject().isLiteral()) { - results.add(s.getLiteral()); - } - } - return results; - } - - - /** - * Walks up the class hierarchy starting at a given class until one of them - * returns a value for a given Function. - * @param cls the class to start at - * @param function the Function to execute on each class - * @param the requested result type - * @return the "first" non-null value, or null - */ - public static T getNearest(Resource cls, java.util.function.Function function) { - T result = function.apply(cls); - if(result != null) { - return result; - } - return getNearest(cls, function, new HashSet<>()); - } - - - private static T getNearest(Resource cls, java.util.function.Function function, Set reached) { - reached.add(cls); - StmtIterator it = cls.listProperties(RDFS.subClassOf); - while(it.hasNext()) { - Statement s = it.next(); - if(s.getObject().isResource() && !reached.contains(s.getResource())) { - T result = function.apply(s.getResource()); - if(result == null) { - result = getNearest(s.getResource(), function, reached); - } - if(result != null) { - it.close(); - return result; - } - } - } - return null; - } - - - /** - * Overcomes a design mismatch with Jena: if the base model does not declare a default namespace then the - * default namespace of an import is returned - this is not desirable for TopBraid-like scenarios. - * @param model the Model to operate on - * @param prefix the prefix to get the URI of - * @return the URI of prefix - */ - public static String getNsPrefixURI(Model model, String prefix) { - if ("".equals(prefix) && model.getGraph() instanceof MultiUnion) { - Graph baseGraph = ((MultiUnion)model.getGraph()).getBaseGraph(); - if(baseGraph != null) { - return baseGraph.getPrefixMapping().getNsPrefixURI(prefix); - } - else { - return model.getNsPrefixURI(prefix); - } - } - else { - return model.getNsPrefixURI(prefix); - } - } - - - public static RDFNode getProperty(Resource subject, Property predicate) { - Statement s = subject.getProperty(predicate); - if(s != null) { - return s.getObject(); - } - else { - return null; - } - } - - - public static Resource getResourcePropertyWithType(Resource subject, Property predicate, Resource type) { - StmtIterator it = subject.listProperties(predicate); - while(it.hasNext()) { - Statement s = it.next(); - if(s.getObject().isResource() && JenaUtil.hasIndirectType(s.getResource(), type)) { - it.close(); - return s.getResource(); - } - } - return null; - } - - - public static List getResourceProperties(Resource subject, Property predicate) { - List results = new LinkedList<>(); - StmtIterator it = subject.listProperties(predicate); - while(it.hasNext()) { - Statement s = it.next(); - if(s.getObject().isResource()) { - results.add(s.getResource()); - } - } - return results; - } - - - public static Resource getURIResourceProperty(Resource subject, Property predicate) { - Statement s = subject.getProperty(predicate); - if(s != null && s.getObject().isURIResource()) { - return s.getResource(); - } - else { - return null; - } - } - - - public static List getURIResourceProperties(Resource subject, Property predicate) { - List results = new LinkedList<>(); - StmtIterator it = subject.listProperties(predicate); - while(it.hasNext()) { - Statement s = it.next(); - if(s.getObject().isURIResource()) { - results.add(s.getResource()); - } - } - return results; - } - - - public static String getStringProperty(Resource subject, Property predicate) { - Statement s = subject.getProperty(predicate); - if(s != null && s.getObject().isLiteral()) { - return s.getString(); - } - else { - return null; - } - } - - - public static boolean getBooleanProperty(Resource subject, Property predicate) { - Statement s = subject.getProperty(predicate); - if(s != null && s.getObject().isLiteral()) { - return s.getBoolean(); - } - else { - return false; - } - } - - - public static Double getDoubleProperty(Resource subject, Property predicate) { - Statement s = subject.getProperty(predicate); - if(s != null && s.getObject().isLiteral()) { - return s.getDouble(); - } - else { - return null; - } - } - - - public static double getDoubleProperty(Resource subject, Property predicate, double defaultValue) { - Double d = getDoubleProperty(subject, predicate); - if(d != null) { - return d; - } - else { - return defaultValue; - } - } - - - public static List getSubGraphs(MultiUnion union) { - List results = new LinkedList<>(); - Graph baseGraph = union.getBaseGraph(); - if(baseGraph != null) { - results.add(baseGraph); - } - results.addAll(union.getSubGraphs()); - return results; - } - - - /** - * Gets a Set of all superclasses (rdfs:subClassOf) of a given Resource. - * @param subClass the subClass Resource - * @return a Collection of class resources - */ - public static Collection getSuperClasses(Resource subClass) { - NodeIterator it = subClass.getModel().listObjectsOfProperty(subClass, RDFS.subClassOf); - Set results = new HashSet<>(); - while (it.hasNext()) { - RDFNode node = it.nextNode(); - if (node instanceof Resource) { - results.add((Resource)node); - } - } - return results; - } - - - /** - * Gets the "first" type of a given Resource. - * @param instance the instance to get the type of - * @return the type or null - */ - public static Resource getType(Resource instance) { - return instance.getPropertyResourceValue(RDF.type); - } - - - /** - * Gets a Set of all rdf:types of a given Resource. - * @param instance the instance Resource - * @return a Collection of type resources - */ - public static List getTypes(Resource instance) { - return JenaUtil.getResourceProperties(instance, RDF.type); - } - - - /** - * Checks whether a given Resource is an instance of a given type, or - * a subclass thereof. Make sure that the expectedType parameter is associated - * with the right Model, because the system will try to walk up the superclasses - * of expectedType. The expectedType may have no Model, in which case - * the method will use the instance's Model. - * @param instance the Resource to test - * @param expectedType the type that instance is expected to have - * @return true if resource has rdf:type expectedType - */ - public static boolean hasIndirectType(Resource instance, Resource expectedType) { - - if(expectedType.getModel() == null) { - expectedType = expectedType.inModel(instance.getModel()); - } - - StmtIterator it = instance.listProperties(RDF.type); - while(it.hasNext()) { - Statement s = it.next(); - if(s.getObject().isResource()) { - Resource actualType = s.getResource(); - if(actualType.equals(expectedType) || JenaUtil.hasSuperClass(actualType, expectedType)) { - it.close(); - return true; - } - } - } - return false; - } - - - /** - * Checks whether a given class has a given (transitive) super class. - * @param subClass the sub-class - * @param superClass the super-class - * @return true if subClass has superClass (somewhere up the tree) - */ - public static boolean hasSuperClass(Resource subClass, Resource superClass) { - return hasSuperClass(subClass, superClass, new HashSet<>()); - } - - - private static boolean hasSuperClass(Resource subClass, Resource superClass, Set reached) { - StmtIterator it = subClass.listProperties(RDFS.subClassOf); - while(it.hasNext()) { - Statement s = it.next(); - if(superClass.equals(s.getObject())) { - it.close(); - return true; - } - else if(!reached.contains(s.getResource())) { - reached.add(s.getResource()); - if(hasSuperClass(s.getResource(), superClass, reached)) { - it.close(); - return true; - } - } - } - return false; - } - - - /** - * Checks whether a given property has a given (transitive) super property. - * @param subProperty the sub-property - * @param superProperty the super-property - * @return true if subProperty has superProperty (somewhere up the tree) - */ - public static boolean hasSuperProperty(Property subProperty, Property superProperty) { - return getAllSuperProperties(subProperty).contains(superProperty); - } - - - /** - * Sets the usual default namespaces for rdf, rdfs, owl and xsd. - * @param graph the Graph to modify - */ - public static void initNamespaces(Graph graph) { - PrefixMapping prefixMapping = graph.getPrefixMapping(); - initNamespaces(prefixMapping); - } - - - /** - * Sets the usual default namespaces for rdf, rdfs, owl and xsd. - * @param prefixMapping the Model to modify - */ - public static void initNamespaces(PrefixMapping prefixMapping) { - ensurePrefix(prefixMapping, "rdf", RDF.getURI()); - ensurePrefix(prefixMapping, "rdfs", RDFS.getURI()); - ensurePrefix(prefixMapping, "owl", OWL.getURI()); - ensurePrefix(prefixMapping, "xsd", XSD.getURI()); - } - - private static void ensurePrefix(PrefixMapping prefixMapping, String prefix, String uristr) { - // set if not present, or if different - if (!uristr.equals(prefixMapping.getNsPrefixURI(prefix))) { - prefixMapping.setNsPrefix(prefix, uristr); - } - } - - /** - * Checks whether a given graph (possibly a MultiUnion) only contains - * GraphMemBase instances. - * @param graph the Graph to test - * @return true if graph is a memory graph - */ - public static boolean isMemoryGraph(Graph graph) { - if(graph instanceof MultiUnion) { - for(Graph subGraph : JenaUtil.getSubGraphs((MultiUnion)graph)) { - if(!isMemoryGraph(subGraph)) { - return false; - } - } - return true; - } - else { - return helper.isMemoryGraph(graph); - } - } - - - /** - * Gets an Iterator over all Statements of a given property or its sub-properties - * at a given subject instance. Note that the predicate and subject should be - * both attached to a Model to avoid NPEs. - * @param subject the subject (may be null) - * @param predicate the predicate - * @return a StmtIterator - */ - public static StmtIterator listAllProperties(Resource subject, Property predicate) { - List results = new LinkedList<>(); - helper.setGraphReadOptimization(true); - try { - listAllProperties(subject, predicate, new HashSet<>(), results); - } - finally { - helper.setGraphReadOptimization(false); - } - return new StmtIteratorImpl(results.iterator()); - } - - - private static void listAllProperties(Resource subject, Property predicate, Set reached, - List results) { - reached.add(predicate); - StmtIterator sit; - Model model; - if (subject != null) { - sit = subject.listProperties(predicate); - model = subject.getModel(); - } - else { - model = predicate.getModel(); - sit = model.listStatements(null, predicate, (RDFNode)null); - } - while (sit.hasNext()) { - results.add(sit.next()); - } - - // Iterate into direct subproperties - StmtIterator it = model.listStatements(null, RDFS.subPropertyOf, predicate); - while (it.hasNext()) { - Statement sps = it.next(); - if (!reached.contains(sps.getSubject())) { - Property subProperty = asProperty(sps.getSubject()); - listAllProperties(subject, subProperty, reached, results); - } - } - } - - - /** - * This indicates that no further changes to the model are needed. - * Some implementations may give runtime exceptions if this is violated. - * @param m the Model to get as a read-only variant - * @return A read-only model - */ - public static Model asReadOnlyModel(Model m) { - return helper.asReadOnlyModel(m); - } - - - /** - * This indicates that no further changes to the graph are needed. - * Some implementations may give runtime exceptions if this is violated. - * @param g the Graph to get as a read-only variant - * @return a read-only graph - */ - public static Graph asReadOnlyGraph(Graph g) { - return helper.asReadOnlyGraph(g); - } - - - // Internal to TopBraid only - public static OntModel createOntologyModel(OntModelSpec spec, Model base) { - return helper.createOntologyModel(spec,base); - } - - - /** - * Allows some environments, e.g. TopBraid, to prioritize - * a block of code for reading graphs, with no update occurring. - * The top of the block should call this with true - * with a matching call with false in a finally - * block. - * - * Note: Unstable - don't use outside of TopBraid. - * - * @param onOrOff true to switch on - */ - public static void setGraphReadOptimization(boolean onOrOff) { - helper.setGraphReadOptimization(onOrOff); - } - - - /** - * Ensure that we there is a read-only, thread safe version of the - * graph. If the graph is not, then create a deep clone that is - * both. - * - * Note: Unstable - don't use outside of TopBraid. - * - * @param g The given graph - * @return A read-only, thread safe version of the given graph. - */ - public static Graph deepCloneForReadOnlyThreadSafe(Graph g) { - return helper.deepCloneReadOnlyGraph(g); - } - - - /** - * Calls a SPARQL expression and returns the result, using some initial bindings. - * - * @param expression the expression to execute (must contain absolute URIs) - * @param initialBinding the initial bindings for the unbound variables - * @param dataset the query Dataset or null for default - * @return the result or null - */ - public static Node invokeExpression(String expression, QuerySolution initialBinding, Dataset dataset) { - if (dataset == null) { - dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel()); - } - Query query = ARQFactory.get().createExpressionQuery(expression); - try(QueryExecution qexec = ARQFactory.get().createQueryExecution(query, dataset, initialBinding)) { - ResultSet rs = qexec.execSelect(); - Node result = null; - if (rs.hasNext()) { - QuerySolution qs = rs.next(); - String firstVarName = rs.getResultVars().get(0); - RDFNode rdfNode = qs.get(firstVarName); - if (rdfNode != null) { - result = rdfNode.asNode(); - } - } - return result; - } - } - - - /** - * Calls a given SPARQL function with no arguments. - * - * @param function the URI resource of the function to call - * @param dataset the Dataset to operate on or null for default - * @return the result of the function call - */ - public static Node invokeFunction0(Resource function, Dataset dataset) { - ExprList args = new ExprList(); - return invokeFunction(function, args, dataset); - } - - - /** - * Calls a given SPARQL function with one argument. - * - * @param function the URI resource of the function to call - * @param argument the first argument - * @param dataset the Dataset to operate on or null for default - * @return the result of the function call - */ - public static Node invokeFunction1(Resource function, RDFNode argument, Dataset dataset) { - ExprList args = new ExprList(); - args.add(argument != null ? NodeValue.makeNode(argument.asNode()) : new ExprVar("arg1")); - return invokeFunction(function, args, dataset); - } - - - public static Node invokeFunction1(Resource function, Node argument, Dataset dataset) { - return invokeFunction1(function, toRDFNode(argument), dataset); - } - - - /** - * Calls a given SPARQL function with two arguments. - * - * @param function the URI resource of the function to call - * @param argument1 the first argument - * @param argument2 the second argument - * @param dataset the Dataset to operate on or null for default - * @return the result of the function call - */ - public static Node invokeFunction2(Resource function, RDFNode argument1, RDFNode argument2, Dataset dataset) { - ExprList args = new ExprList(); - args.add(argument1 != null ? NodeValue.makeNode(argument1.asNode()) : new ExprVar("arg1")); - args.add(argument2 != null ? NodeValue.makeNode(argument2.asNode()) : new ExprVar("arg2")); - return invokeFunction(function, args, dataset); - } - - - public static Node invokeFunction2(Resource function, Node argument1, Node argument2, Dataset dataset) { - return invokeFunction2(function, toRDFNode(argument1), toRDFNode(argument2), dataset); - } - - - public static Node invokeFunction3(Resource function, RDFNode argument1, RDFNode argument2, RDFNode argument3, Dataset dataset) { - ExprList args = new ExprList(); - args.add(argument1 != null ? NodeValue.makeNode(argument1.asNode()) : new ExprVar("arg1")); - args.add(argument2 != null ? NodeValue.makeNode(argument2.asNode()) : new ExprVar("arg2")); - args.add(argument3 != null ? NodeValue.makeNode(argument3.asNode()) : new ExprVar("arg3")); - return invokeFunction(function, args, dataset); - } - - - private static Node invokeFunction(Resource function, ExprList args, Dataset dataset) { - - if (dataset == null) { - dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel()); - } - - E_Function expr = new E_Function(function.getURI(), args); - DatasetGraph dsg = dataset.asDatasetGraph(); - Context cxt = ARQ.getContext().copy(); - cxt.set(ARQConstants.sysCurrentTime, NodeFactoryExtra.nowAsDateTime()); - FunctionEnv env = new ExecutionContext(cxt, dsg.getDefaultGraph(), dsg, null); - try { - NodeValue r = expr.eval(BindingRoot.create(), env); - if(r != null) { - return r.asNode(); - } - } - catch(ExprEvalException ex) { - } - return null; - } - - - public static Node invokeFunction3(Resource function, Node argument1, Node argument2, Node argument3, Dataset dataset) { - return invokeFunction3(function, toRDFNode(argument1), toRDFNode(argument2), toRDFNode(argument3), dataset); - } - - - /** - * Temp patch for a bug in Jena's syntaxtransform, also applying substitutions on - * HAVING clauses. - * @param query the Query to transform - * @param substitutions the variable bindings - * @return a new Query with the bindings applied - */ - public static Query queryWithSubstitutions(Query query, final Map substitutions) { - Query result = QueryTransformOps.transform(query, substitutions); - - // TODO: Replace this hack once there is a Jena patch - if(result.hasHaving()) { - NodeTransform nodeTransform = new NodeTransform() { - @Override - public Node apply(Node node) { - Node n = substitutions.get(node) ; - if ( n == null ) { - return node ; - } - return n ; - } - }; - ElementTransform eltrans = new ElementTransformSubst(substitutions) ; - ExprTransform exprTrans = new ExprTransformNodeElement(nodeTransform, eltrans) ; - List havingExprs = result.getHavingExprs(); - for(int i = 0; i < havingExprs.size(); i++) { - Expr old = havingExprs.get(i); - Expr neo = ExprTransformer.transform(exprTrans, old) ; - if ( neo != old ) { - havingExprs.set(i, neo); - } - } - } - return result; - } - - - public static void sort(List nodes) { - Collections.sort(nodes, new Comparator() { - @Override - public int compare(Resource o1, Resource o2) { - return NodeUtils.compareRDFTerms(o1.asNode(), o2.asNode()); - } - }); - } - - - public static RDFNode toRDFNode(Node node) { - if(node != null) { - return dummyModel.asRDFNode(node); - } - else { - return null; - } - } - - - public static String withImports(String uri) { - if(!uri.startsWith(WITH_IMPORTS_PREFIX)) { - return WITH_IMPORTS_PREFIX + uri; - } - else { - return uri; - } - } - - - public static String withoutImports(String uri) { - if(uri.startsWith(WITH_IMPORTS_PREFIX)) { - return uri.substring(WITH_IMPORTS_PREFIX.length()); - } - else { - return uri; - } - } + // Unstable + private static JenaUtilHelper helper = new JenaUtilHelper(); + + // Leave this line under the helper line above! + private static Model dummyModel = JenaUtil.createDefaultModel(); + + public static final String WITH_IMPORTS_PREFIX = "http://rdfex.org/withImports?uri="; + + + /** + * Sets the helper which allows the behavior of some JenaUtil + * methods to be modified by the system using the SPIN library. + * Note: Should not be used outside of TopBraid - not stable. + * + * @param h the JenaUtilHelper + * @return the old helper + */ + public static JenaUtilHelper setHelper(JenaUtilHelper h) { + JenaUtilHelper old = helper; + helper = h; + return old; + } + + + /** + * Gets the current helper object. + * Note: Should not be used outside of TopBraid - not stable. + * + * @return the helper + */ + public static JenaUtilHelper getHelper() { + return helper; + } + + + /** + * Populates a result set of resources reachable from a subject via zero or more steps with a given predicate. + * Implementation note: the results set need only implement {@link Collection#add(Object)}. + * + * @param results The transitive objects reached from subject via triples with the given predicate + * @param subject the subject to start traversal at + * @param predicate the predicate to walk + */ + public static void addTransitiveObjects(Set results, Resource subject, Property predicate) { + helper.setGraphReadOptimization(true); + try { + addTransitiveObjects(results, new HashSet(), subject, predicate); + } finally { + helper.setGraphReadOptimization(false); + } + } + + + private static void addTransitiveObjects(Set resources, Set reached, + Resource subject, Property predicate) { + resources.add(subject); + reached.add(subject); + StmtIterator it = subject.listProperties(predicate); + try { + while (it.hasNext()) { + RDFNode object = it.next().getObject(); + if (object instanceof Resource) { + if (!reached.contains(object)) { + addTransitiveObjects(resources, reached, (Resource) object, predicate); + } + } + } + } finally { + it.close(); + } + } + + + private static void addTransitiveSubjects(Set reached, Resource object, + Property predicate, ProgressMonitor monitor) { + if (object != null) { + reached.add(object); + StmtIterator it = object.getModel().listStatements(null, predicate, object); + try { + while (it.hasNext()) { + if (monitor != null && monitor.isCanceled()) { + it.close(); + return; + } + Resource subject = it.next().getSubject(); + if (!reached.contains(subject)) { + addTransitiveSubjects(reached, subject, predicate, monitor); + } + } + } finally { + it.close(); + } + } + } + + + /** + * Turns a QuerySolution into a Binding. + * + * @param map the input QuerySolution + * @return a Binding or null if the input is null + */ + public static Binding asBinding(final QuerySolution map) { + if (map != null) { + BindingBuilder builder = BindingBuilder.create(); + Iterator varNames = map.varNames(); + while (varNames.hasNext()) { + String varName = varNames.next(); + RDFNode node = map.get(varName); + if (node != null) { + builder.add(Var.alloc(varName), node.asNode()); + } + } + return builder.build(); + } else { + return null; + } + } + + + /** + * Turns a Binding into a QuerySolutionMap. + * + * @param binding the Binding to convert + * @return a QuerySolutionMap + */ + public static QuerySolutionMap asQuerySolutionMap(Binding binding) { + QuerySolutionMap map = new QuerySolutionMap(); + Iterator vars = binding.vars(); + while (vars.hasNext()) { + Var var = vars.next(); + Node node = binding.get(var); + if (node != null) { + map.add(var.getName(), dummyModel.asRDFNode(node)); + } + } + return map; + } + + + /** + * Returns a set of resources reachable from an object via one or more reversed steps with a given predicate. + * + * @param object the object to start traversal at + * @param predicate the predicate to walk + * @param monitor an optional progress monitor to allow cancelation + * @return the reached resources + */ + public static Set getAllTransitiveSubjects(Resource object, Property predicate, ProgressMonitor monitor) { + Set set = new HashSet<>(); + helper.setGraphReadOptimization(true); + try { + addTransitiveSubjects(set, object, predicate, monitor); + } finally { + helper.setGraphReadOptimization(false); + } + set.remove(object); + return set; + } + + + /** + * Casts a Resource into a Property. + * + * @param resource the Resource to cast + * @return resource as an instance of Property + */ + public static Property asProperty(Resource resource) { + if (resource instanceof Property) { + return (Property) resource; + } else { + return new PropertyImpl(resource.asNode(), (EnhGraph) resource.getModel()); + } + } + + + public static void collectBaseGraphs(Graph graph, Set baseGraphs) { + if (graph instanceof MultiUnion) { + MultiUnion union = (MultiUnion) graph; + collectBaseGraphs(union.getBaseGraph(), baseGraphs); + for (Object subGraph : union.getSubGraphs()) { + collectBaseGraphs((Graph) subGraph, baseGraphs); + } + } else if (graph != null) { + baseGraphs.add(graph); + } + } + + + /** + * Creates a new Graph. By default, this will deliver a plain in-memory graph, + * but other implementations may deliver graphs with concurrency support and + * other features. + * + * @return a default graph + * @see #createDefaultModel() + */ + public static Graph createDefaultGraph() { + return helper.createDefaultGraph(); + } + + + /** + * Wraps the result of {@link #createDefaultGraph()} into a Model and initializes namespaces. + * + * @return a default Model + * @see #createDefaultGraph() + */ + public static Model createDefaultModel() { + Model m = ModelFactory.createModelForGraph(createDefaultGraph()); + initNamespaces(m); + return m; + } + + + /** + * Creates a memory Graph with no reification. + * + * @return a new memory graph + */ + public static Graph createMemoryGraph() { + return GraphMemFactory.createDefaultGraph(); + } + + + /** + * Creates a memory Model with no reification. + * + * @return a new memory Model + */ + public static Model createMemoryModel() { + return ModelFactory.createModelForGraph(createMemoryGraph()); + } + + + public static MultiUnion createMultiUnion() { + return helper.createMultiUnion(); + } + + + public static MultiUnion createMultiUnion(Graph[] graphs) { + return helper.createMultiUnion(graphs); + } + + + public static MultiUnion createMultiUnion(Iterator graphs) { + return helper.createMultiUnion(graphs); + } + + + /** + * Gets all instances of a given class and its subclasses. + * + * @param cls the class to get the instances of + * @return the instances + */ + public static Set getAllInstances(Resource cls) { + JenaUtil.setGraphReadOptimization(true); + try { + Model model = cls.getModel(); + Set classes = getAllSubClasses(cls); + classes.add(cls); + Set results = new HashSet<>(); + for (Resource subClass : classes) { + StmtIterator it = model.listStatements(null, RDF.type, subClass); + while (it.hasNext()) { + results.add(it.next().getSubject()); + } + } + return results; + } finally { + JenaUtil.setGraphReadOptimization(false); + } + } + + + public static Set getAllSubClasses(Resource cls) { + return getAllTransitiveSubjects(cls, RDFS.subClassOf); + } + + + /** + * Returns a set consisting of a given class and all its subclasses. + * Similar to rdfs:subClassOf*. + * + * @param cls the class to return with its subclasses + * @return the Set of class resources + */ + public static Set getAllSubClassesStar(Resource cls) { + Set results = getAllTransitiveSubjects(cls, RDFS.subClassOf); + results.add(cls); + return results; + } + + + public static Set getAllSubProperties(Property superProperty) { + return getAllTransitiveSubjects(superProperty, RDFS.subPropertyOf); + } + + + public static Set getAllSuperClasses(Resource cls) { + return getAllTransitiveObjects(cls, RDFS.subClassOf); + } + + + /** + * Returns a set consisting of a given class and all its superclasses. + * Similar to rdfs:subClassOf*. + * + * @param cls the class to return with its superclasses + * @return the Set of class resources + */ + public static Set getAllSuperClassesStar(Resource cls) { + Set results = getAllTransitiveObjects(cls, RDFS.subClassOf); + results.add(cls); + return results; + } + + + public static Set getAllSuperProperties(Property subProperty) { + return getAllTransitiveObjects(subProperty, RDFS.subPropertyOf); + } + + + /** + * Returns a set of resources reachable from a subject via one or more steps with a given predicate. + * + * @param subject the subject to start at + * @param predicate the predicate to traverse + * @return the reached resources + */ + public static Set getAllTransitiveObjects(Resource subject, Property predicate) { + Set set = new HashSet<>(); + addTransitiveObjects(set, subject, predicate); + set.remove(subject); + return set; + } + + + private static Set getAllTransitiveSubjects(Resource object, Property predicate) { + return getAllTransitiveSubjects(object, predicate, null); + } + + + public static Set getAllTypes(Resource instance) { + Set types = new HashSet<>(); + StmtIterator it = instance.listProperties(RDF.type); + try { + while (it.hasNext()) { + Resource type = it.next().getResource(); + types.add(type); + types.addAll(getAllSuperClasses(type)); + } + } finally { + it.close(); + } + return types; + } + + + /** + * Gets the "base graph" of a Model, walking into MultiUnions if needed. + * + * @param model the Model to get the base graph of + * @return the base graph or null if the model contains a MultiUnion that doesn't declare one + */ + public static Graph getBaseGraph(final Model model) { + return getBaseGraph(model.getGraph()); + } + + + public static Graph getBaseGraph(Graph graph) { + Graph baseGraph = graph; + while (baseGraph instanceof MultiUnion) { + baseGraph = ((MultiUnion) baseGraph).getBaseGraph(); + } + return baseGraph; + } + + + public static Model getBaseModel(Model model) { + Graph baseGraph = getBaseGraph(model); + if (baseGraph == model.getGraph()) { + return model; + } else { + return ModelFactory.createModelForGraph(baseGraph); + } + } + + + /** + * For a given subject resource and a given collection of (label/comment) properties this finds the most + * suitable value of either property for a given list of languages (usually from the current user's preferences). + * For example, if the user's languages are [ "en-AU" ] then the function will prefer "mate"@en-AU over + * "friend"@en and never return "freund"@de. The function falls back to literals that have no language + * if no better literal has been found. + * + * @param resource the subject resource + * @param langs the allowed languages + * @param properties the properties to check + * @return the best suitable value or null + */ + public static Literal getBestStringLiteral(Resource resource, List langs, Iterable properties) { + return getBestStringLiteral(resource, langs, properties, (r, p) -> r.listProperties(p)); + } + + + public static Literal getBestStringLiteral(Resource resource, List langs, Iterable properties, BiFunction> getter) { + String prefLang = langs.isEmpty() ? null : langs.get(0); + Literal label = null; + int bestLang = -1; + for (Property predicate : properties) { + ExtendedIterator it = getter.apply(resource, predicate); + while (it.hasNext()) { + RDFNode object = it.next().getObject(); + if (object.isLiteral()) { + Literal literal = (Literal) object; + String lang = literal.getLanguage(); + if (lang.length() == 0 && label == null) { + label = literal; + } else if (prefLang != null && prefLang.equalsIgnoreCase(lang)) { + it.close(); + return literal; + } else { + // 1) Never use a less suitable language + // 2) Never replace an already existing label (esp: skos:prefLabel) unless new lang is better + // 3) Fall back to more special languages if no other was found (e.g. use en-GB if only "en" is accepted) + int startLang = bestLang < 0 ? langs.size() - 1 : (label != null ? bestLang - 1 : bestLang); + for (int i = startLang; i > 0; i--) { + String langi = langs.get(i); + if (langi.equalsIgnoreCase(lang)) { + label = literal; + bestLang = i; + } else if (label == null && lang.contains("-") && NodeFunctions.langMatches(lang, langi)) { + label = literal; + } + } + } + } + } + } + return label; + } + + + /** + * Gets the "first" declared rdfs:range of a given property. + * If multiple ranges exist, the behavior is undefined. + * Note that this method does not consider ranges defined on + * super-properties. + * + * @param property the property to get the range of + * @return the "first" range Resource or null + */ + public static Resource getFirstDirectRange(Resource property) { + return property.getPropertyResourceValue(RDFS.range); + } + + + private static Resource getFirstRange(Resource property, Set reached) { + Resource directRange = getFirstDirectRange(property); + if (directRange != null) { + return directRange; + } + StmtIterator it = property.listProperties(RDFS.subPropertyOf); + while (it.hasNext()) { + Statement ss = it.next(); + if (ss.getObject().isURIResource()) { + Resource superProperty = ss.getResource(); + if (!reached.contains(superProperty)) { + reached.add(superProperty); + Resource r = getFirstRange(superProperty, reached); + if (r != null) { + it.close(); + return r; + } + } + } + } + return null; + } + + + /** + * Gets the "first" declared rdfs:range of a given property. + * If multiple ranges exist, the behavior is undefined. + * This method walks up to super-properties if no direct match exists. + * + * @param property the property to get the range of + * @return the "first" range Resource or null + */ + public static Resource getFirstRange(Resource property) { + return getFirstRange(property, new HashSet<>()); + } + + + public static Set getImports(Resource graph) { + Set results = new HashSet<>(); + for (Property importProperty : ImportProperties.get().getImportProperties()) { + results.addAll(JenaUtil.getResourceProperties(graph, importProperty)); + } + return results; + } + + + public static Integer getIntegerProperty(Resource subject, Property predicate) { + Statement s = subject.getProperty(predicate); + if (s != null && s.getObject().isLiteral()) { + return s.getInt(); + } else { + return null; + } + } + + + public static RDFList getListProperty(Resource subject, Property predicate) { + Statement s = subject.getProperty(predicate); + if (s != null && s.getObject().canAs(RDFList.class)) { + return s.getResource().as(RDFList.class); + } else { + return null; + } + } + + + public static List getLiteralProperties(Resource subject, Property predicate) { + List results = new LinkedList<>(); + StmtIterator it = subject.listProperties(predicate); + while (it.hasNext()) { + Statement s = it.next(); + if (s.getObject().isLiteral()) { + results.add(s.getLiteral()); + } + } + return results; + } + + + /** + * Walks up the class hierarchy starting at a given class until one of them + * returns a value for a given Function. + * + * @param cls the class to start at + * @param function the Function to execute on each class + * @param the requested result type + * @return the "first" non-null value, or null + */ + public static T getNearest(Resource cls, java.util.function.Function function) { + T result = function.apply(cls); + if (result != null) { + return result; + } + return getNearest(cls, function, new HashSet<>()); + } + + + private static T getNearest(Resource cls, java.util.function.Function function, Set reached) { + reached.add(cls); + StmtIterator it = cls.listProperties(RDFS.subClassOf); + while (it.hasNext()) { + Statement s = it.next(); + if (s.getObject().isResource() && !reached.contains(s.getResource())) { + T result = function.apply(s.getResource()); + if (result == null) { + result = getNearest(s.getResource(), function, reached); + } + if (result != null) { + it.close(); + return result; + } + } + } + return null; + } + + + /** + * Overcomes a design mismatch with Jena: if the base model does not declare a default namespace then the + * default namespace of an import is returned - this is not desirable for TopBraid-like scenarios. + * + * @param model the Model to operate on + * @param prefix the prefix to get the URI of + * @return the URI of prefix + */ + public static String getNsPrefixURI(Model model, String prefix) { + if ("".equals(prefix) && model.getGraph() instanceof MultiUnion) { + Graph baseGraph = ((MultiUnion) model.getGraph()).getBaseGraph(); + if (baseGraph != null) { + return baseGraph.getPrefixMapping().getNsPrefixURI(prefix); + } else { + return model.getNsPrefixURI(prefix); + } + } else { + return model.getNsPrefixURI(prefix); + } + } + + + public static RDFNode getProperty(Resource subject, Property predicate) { + Statement s = subject.getProperty(predicate); + if (s != null) { + return s.getObject(); + } else { + return null; + } + } + + + public static Resource getResourcePropertyWithType(Resource subject, Property predicate, Resource type) { + StmtIterator it = subject.listProperties(predicate); + while (it.hasNext()) { + Statement s = it.next(); + if (s.getObject().isResource() && JenaUtil.hasIndirectType(s.getResource(), type)) { + it.close(); + return s.getResource(); + } + } + return null; + } + + + public static List getResourceProperties(Resource subject, Property predicate) { + List results = new LinkedList<>(); + StmtIterator it = subject.listProperties(predicate); + while (it.hasNext()) { + Statement s = it.next(); + if (s.getObject().isResource()) { + results.add(s.getResource()); + } + } + return results; + } + + + public static Resource getURIResourceProperty(Resource subject, Property predicate) { + Statement s = subject.getProperty(predicate); + if (s != null && s.getObject().isURIResource()) { + return s.getResource(); + } else { + return null; + } + } + + + public static List getURIResourceProperties(Resource subject, Property predicate) { + List results = new LinkedList<>(); + StmtIterator it = subject.listProperties(predicate); + while (it.hasNext()) { + Statement s = it.next(); + if (s.getObject().isURIResource()) { + results.add(s.getResource()); + } + } + return results; + } + + + public static String getStringProperty(Resource subject, Property predicate) { + Statement s = subject.getProperty(predicate); + if (s != null && s.getObject().isLiteral()) { + return s.getString(); + } else { + return null; + } + } + + + public static boolean getBooleanProperty(Resource subject, Property predicate) { + Statement s = subject.getProperty(predicate); + if (s != null && s.getObject().isLiteral()) { + return s.getBoolean(); + } else { + return false; + } + } + + + public static Double getDoubleProperty(Resource subject, Property predicate) { + Statement s = subject.getProperty(predicate); + if (s != null && s.getObject().isLiteral()) { + return s.getDouble(); + } else { + return null; + } + } + + + public static double getDoubleProperty(Resource subject, Property predicate, double defaultValue) { + Double d = getDoubleProperty(subject, predicate); + if (d != null) { + return d; + } else { + return defaultValue; + } + } + + + public static List getSubGraphs(MultiUnion union) { + List results = new LinkedList<>(); + Graph baseGraph = union.getBaseGraph(); + if (baseGraph != null) { + results.add(baseGraph); + } + results.addAll(union.getSubGraphs()); + return results; + } + + + /** + * Gets a Set of all superclasses (rdfs:subClassOf) of a given Resource. + * + * @param subClass the subClass Resource + * @return a Collection of class resources + */ + public static Collection getSuperClasses(Resource subClass) { + NodeIterator it = subClass.getModel().listObjectsOfProperty(subClass, RDFS.subClassOf); + Set results = new HashSet<>(); + while (it.hasNext()) { + RDFNode node = it.nextNode(); + if (node instanceof Resource) { + results.add((Resource) node); + } + } + return results; + } + + + /** + * Gets the "first" type of a given Resource. + * + * @param instance the instance to get the type of + * @return the type or null + */ + public static Resource getType(Resource instance) { + return instance.getPropertyResourceValue(RDF.type); + } + + + /** + * Gets a Set of all rdf:types of a given Resource. + * + * @param instance the instance Resource + * @return a Collection of type resources + */ + public static List getTypes(Resource instance) { + return JenaUtil.getResourceProperties(instance, RDF.type); + } + + + /** + * Checks whether a given Resource is an instance of a given type, or + * a subclass thereof. Make sure that the expectedType parameter is associated + * with the right Model, because the system will try to walk up the superclasses + * of expectedType. The expectedType may have no Model, in which case + * the method will use the instance's Model. + * + * @param instance the Resource to test + * @param expectedType the type that instance is expected to have + * @return true if resource has rdf:type expectedType + */ + public static boolean hasIndirectType(Resource instance, Resource expectedType) { + + if (expectedType.getModel() == null) { + expectedType = expectedType.inModel(instance.getModel()); + } + + StmtIterator it = instance.listProperties(RDF.type); + while (it.hasNext()) { + Statement s = it.next(); + if (s.getObject().isResource()) { + Resource actualType = s.getResource(); + if (actualType.equals(expectedType) || JenaUtil.hasSuperClass(actualType, expectedType)) { + it.close(); + return true; + } + } + } + return false; + } + + + /** + * Checks whether a given class has a given (transitive) super class. + * + * @param subClass the sub-class + * @param superClass the super-class + * @return true if subClass has superClass (somewhere up the tree) + */ + public static boolean hasSuperClass(Resource subClass, Resource superClass) { + return hasSuperClass(subClass, superClass, new HashSet<>()); + } + + + private static boolean hasSuperClass(Resource subClass, Resource superClass, Set reached) { + StmtIterator it = subClass.listProperties(RDFS.subClassOf); + while (it.hasNext()) { + Statement s = it.next(); + if (superClass.equals(s.getObject())) { + it.close(); + return true; + } else if (!reached.contains(s.getResource())) { + reached.add(s.getResource()); + if (hasSuperClass(s.getResource(), superClass, reached)) { + it.close(); + return true; + } + } + } + return false; + } + + + /** + * Checks whether a given property has a given (transitive) super property. + * + * @param subProperty the sub-property + * @param superProperty the super-property + * @return true if subProperty has superProperty (somewhere up the tree) + */ + public static boolean hasSuperProperty(Property subProperty, Property superProperty) { + return getAllSuperProperties(subProperty).contains(superProperty); + } + + + /** + * Sets the usual default namespaces for rdf, rdfs, owl and xsd. + * + * @param graph the Graph to modify + */ + public static void initNamespaces(Graph graph) { + PrefixMapping prefixMapping = graph.getPrefixMapping(); + initNamespaces(prefixMapping); + } + + + /** + * Sets the usual default namespaces for rdf, rdfs, owl and xsd. + * + * @param prefixMapping the Model to modify + */ + public static void initNamespaces(PrefixMapping prefixMapping) { + ensurePrefix(prefixMapping, "rdf", RDF.getURI()); + ensurePrefix(prefixMapping, "rdfs", RDFS.getURI()); + ensurePrefix(prefixMapping, "owl", OWL.getURI()); + ensurePrefix(prefixMapping, "xsd", XSD.getURI()); + } + + private static void ensurePrefix(PrefixMapping prefixMapping, String prefix, String uristr) { + // set if not present, or if different + if (!uristr.equals(prefixMapping.getNsPrefixURI(prefix))) { + prefixMapping.setNsPrefix(prefix, uristr); + } + } + + /** + * Checks whether a given graph (possibly a MultiUnion) only contains + * GraphMemBase instances. + * + * @param graph the Graph to test + * @return true if graph is a memory graph + */ + public static boolean isMemoryGraph(Graph graph) { + if (graph instanceof MultiUnion) { + for (Graph subGraph : JenaUtil.getSubGraphs((MultiUnion) graph)) { + if (!isMemoryGraph(subGraph)) { + return false; + } + } + return true; + } else { + return helper.isMemoryGraph(graph); + } + } + + + /** + * Gets an Iterator over all Statements of a given property or its sub-properties + * at a given subject instance. Note that the predicate and subject should be + * both attached to a Model to avoid NPEs. + * + * @param subject the subject (may be null) + * @param predicate the predicate + * @return a StmtIterator + */ + public static StmtIterator listAllProperties(Resource subject, Property predicate) { + List results = new LinkedList<>(); + helper.setGraphReadOptimization(true); + try { + listAllProperties(subject, predicate, new HashSet<>(), results); + } finally { + helper.setGraphReadOptimization(false); + } + return new StmtIteratorImpl(results.iterator()); + } + + + private static void listAllProperties(Resource subject, Property predicate, Set reached, + List results) { + reached.add(predicate); + StmtIterator sit; + Model model; + if (subject != null) { + sit = subject.listProperties(predicate); + model = subject.getModel(); + } else { + model = predicate.getModel(); + sit = model.listStatements(null, predicate, (RDFNode) null); + } + while (sit.hasNext()) { + results.add(sit.next()); + } + + // Iterate into direct subproperties + StmtIterator it = model.listStatements(null, RDFS.subPropertyOf, predicate); + while (it.hasNext()) { + Statement sps = it.next(); + if (!reached.contains(sps.getSubject())) { + Property subProperty = asProperty(sps.getSubject()); + listAllProperties(subject, subProperty, reached, results); + } + } + } + + + /** + * This indicates that no further changes to the model are needed. + * Some implementations may give runtime exceptions if this is violated. + * + * @param m the Model to get as a read-only variant + * @return A read-only model + */ + public static Model asReadOnlyModel(Model m) { + return helper.asReadOnlyModel(m); + } + + + /** + * This indicates that no further changes to the graph are needed. + * Some implementations may give runtime exceptions if this is violated. + * + * @param g the Graph to get as a read-only variant + * @return a read-only graph + */ + public static Graph asReadOnlyGraph(Graph g) { + return helper.asReadOnlyGraph(g); + } + + + // Internal to TopBraid only + public static OntModel createOntologyModel(OntModelSpec spec, Model base) { + return helper.createOntologyModel(spec, base); + } + + + /** + * Allows some environments, e.g. TopBraid, to prioritize + * a block of code for reading graphs, with no update occurring. + * The top of the block should call this with true + * with a matching call with false in a finally + * block. + *

+ * Note: Unstable - don't use outside of TopBraid. + * + * @param onOrOff true to switch on + */ + public static void setGraphReadOptimization(boolean onOrOff) { + helper.setGraphReadOptimization(onOrOff); + } + + + /** + * Ensure that we there is a read-only, thread safe version of the + * graph. If the graph is not, then create a deep clone that is + * both. + *

+ * Note: Unstable - don't use outside of TopBraid. + * + * @param g The given graph + * @return A read-only, thread safe version of the given graph. + */ + public static Graph deepCloneForReadOnlyThreadSafe(Graph g) { + return helper.deepCloneReadOnlyGraph(g); + } + + + /** + * Calls a SPARQL expression and returns the result, using some initial bindings. + * + * @param expression the expression to execute (must contain absolute URIs) + * @param initialBinding the initial bindings for the unbound variables + * @param dataset the query Dataset or null for default + * @return the result or null + */ + public static Node invokeExpression(String expression, QuerySolution initialBinding, Dataset dataset) { + if (dataset == null) { + dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel()); + } + Query query = ARQFactory.get().createExpressionQuery(expression); + try (QueryExecution qexec = ARQFactory.get().createQueryExecution(query, dataset, initialBinding)) { + ResultSet rs = qexec.execSelect(); + Node result = null; + if (rs.hasNext()) { + QuerySolution qs = rs.next(); + String firstVarName = rs.getResultVars().get(0); + RDFNode rdfNode = qs.get(firstVarName); + if (rdfNode != null) { + result = rdfNode.asNode(); + } + } + return result; + } + } + + + /** + * Calls a given SPARQL function with no arguments. + * + * @param function the URI resource of the function to call + * @param dataset the Dataset to operate on or null for default + * @return the result of the function call + */ + public static Node invokeFunction0(Resource function, Dataset dataset) { + ExprList args = new ExprList(); + return invokeFunction(function, args, dataset); + } + + + /** + * Calls a given SPARQL function with one argument. + * + * @param function the URI resource of the function to call + * @param argument the first argument + * @param dataset the Dataset to operate on or null for default + * @return the result of the function call + */ + public static Node invokeFunction1(Resource function, RDFNode argument, Dataset dataset) { + ExprList args = new ExprList(); + args.add(argument != null ? NodeValue.makeNode(argument.asNode()) : new ExprVar("arg1")); + return invokeFunction(function, args, dataset); + } + + + public static Node invokeFunction1(Resource function, Node argument, Dataset dataset) { + return invokeFunction1(function, toRDFNode(argument), dataset); + } + + + /** + * Calls a given SPARQL function with two arguments. + * + * @param function the URI resource of the function to call + * @param argument1 the first argument + * @param argument2 the second argument + * @param dataset the Dataset to operate on or null for default + * @return the result of the function call + */ + public static Node invokeFunction2(Resource function, RDFNode argument1, RDFNode argument2, Dataset dataset) { + ExprList args = new ExprList(); + args.add(argument1 != null ? NodeValue.makeNode(argument1.asNode()) : new ExprVar("arg1")); + args.add(argument2 != null ? NodeValue.makeNode(argument2.asNode()) : new ExprVar("arg2")); + return invokeFunction(function, args, dataset); + } + + + public static Node invokeFunction2(Resource function, Node argument1, Node argument2, Dataset dataset) { + return invokeFunction2(function, toRDFNode(argument1), toRDFNode(argument2), dataset); + } + + + public static Node invokeFunction3(Resource function, RDFNode argument1, RDFNode argument2, RDFNode argument3, Dataset dataset) { + ExprList args = new ExprList(); + args.add(argument1 != null ? NodeValue.makeNode(argument1.asNode()) : new ExprVar("arg1")); + args.add(argument2 != null ? NodeValue.makeNode(argument2.asNode()) : new ExprVar("arg2")); + args.add(argument3 != null ? NodeValue.makeNode(argument3.asNode()) : new ExprVar("arg3")); + return invokeFunction(function, args, dataset); + } + + + private static Node invokeFunction(Resource function, ExprList args, Dataset dataset) { + + if (dataset == null) { + dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel()); + } + + E_Function expr = new E_Function(function.getURI(), args); + DatasetGraph dsg = dataset.asDatasetGraph(); + Context cxt = ARQ.getContext().copy(); + cxt.set(ARQConstants.sysCurrentTime, NodeFactoryExtra.nowAsDateTime()); + FunctionEnv env = new ExecutionContext(cxt, dsg.getDefaultGraph(), dsg, null); + try { + NodeValue r = expr.eval(BindingRoot.create(), env); + if (r != null) { + return r.asNode(); + } + } catch (ExprEvalException ex) { + } + return null; + } + + + public static Node invokeFunction3(Resource function, Node argument1, Node argument2, Node argument3, Dataset dataset) { + return invokeFunction3(function, toRDFNode(argument1), toRDFNode(argument2), toRDFNode(argument3), dataset); + } + + + /** + * Temp patch for a bug in Jena's syntaxtransform, also applying substitutions on + * HAVING clauses. + * + * @param query the Query to transform + * @param substitutions the variable bindings + * @return a new Query with the bindings applied + */ + public static Query queryWithSubstitutions(Query query, final Map substitutions) { + Query result = QueryTransformOps.transform(query, substitutions); + + // TODO: Replace this hack once there is a Jena patch + if (result.hasHaving()) { + NodeTransform nodeTransform = new NodeTransform() { + @Override + public Node apply(Node node) { + Node n = substitutions.get(node); + if (n == null) { + return node; + } + return n; + } + }; + ElementTransform eltrans = new ElementTransformSubst(substitutions); + ExprTransform exprTrans = new ExprTransformNodeElement(nodeTransform, eltrans); + List havingExprs = result.getHavingExprs(); + for (int i = 0; i < havingExprs.size(); i++) { + Expr old = havingExprs.get(i); + Expr neo = ExprTransformer.transform(exprTrans, old); + if (neo != old) { + havingExprs.set(i, neo); + } + } + } + return result; + } + + + public static void sort(List nodes) { + Collections.sort(nodes, new Comparator() { + @Override + public int compare(Resource o1, Resource o2) { + return NodeCmp.compareRDFTerms(o1.asNode(), o2.asNode()); + } + }); + } + + + public static RDFNode toRDFNode(Node node) { + if (node != null) { + return dummyModel.asRDFNode(node); + } else { + return null; + } + } + + + public static String withImports(String uri) { + if (!uri.startsWith(WITH_IMPORTS_PREFIX)) { + return WITH_IMPORTS_PREFIX + uri; + } else { + return uri; + } + } + + + public static String withoutImports(String uri) { + if (uri.startsWith(WITH_IMPORTS_PREFIX)) { + return uri.substring(WITH_IMPORTS_PREFIX.length()); + } else { + return uri; + } + } } diff --git a/src/main/java/org/topbraid/jenax/util/JenaUtilHelper.java b/src/main/java/org/topbraid/jenax/util/JenaUtilHelper.java index 801a8e92..472aad57 100644 --- a/src/main/java/org/topbraid/jenax/util/JenaUtilHelper.java +++ b/src/main/java/org/topbraid/jenax/util/JenaUtilHelper.java @@ -18,7 +18,6 @@ import java.util.Iterator; -import org.apache.jena.graph.Factory; import org.apache.jena.graph.Graph; import org.apache.jena.graph.compose.MultiUnion; import org.apache.jena.mem.GraphMemBase; @@ -26,6 +25,7 @@ import org.apache.jena.ontology.OntModelSpec; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.sparql.graph.GraphFactory; /** * This is an extension point for the SPIN library @@ -72,7 +72,7 @@ public MultiUnion createMultiUnion(Graph[] graphs) { * @return the default Graph */ public Graph createDefaultGraph() { - return Factory.createDefaultGraph(); + return GraphFactory.createDefaultGraph(); } diff --git a/src/main/java/org/topbraid/jenax/util/PrefixUtils.java b/src/main/java/org/topbraid/jenax/util/PrefixUtils.java index 48267f9b..3b4c4695 100644 --- a/src/main/java/org/topbraid/jenax/util/PrefixUtils.java +++ b/src/main/java/org/topbraid/jenax/util/PrefixUtils.java @@ -9,7 +9,7 @@ public class PrefixUtils { /** * Make the {@code dstGraph} prefix map the same {@code srcGraph} prefix map, - * only making chnages where necessary. + * only making changes where necessary. * @param dstGraph the destination graph * @param srcGraph the source graph * @return false if no changes where made. diff --git a/src/main/java/org/topbraid/jenax/util/QueryExecutionFactoryFilter.java b/src/main/java/org/topbraid/jenax/util/QueryExecutionFactoryFilter.java index 65da64b2..5cbeda87 100644 --- a/src/main/java/org/topbraid/jenax/util/QueryExecutionFactoryFilter.java +++ b/src/main/java/org/topbraid/jenax/util/QueryExecutionFactoryFilter.java @@ -16,151 +16,150 @@ */ package org.topbraid.jenax.util; -import java.net.http.HttpClient; -import java.time.format.DateTimeFormatter; -import java.util.List; - -import org.apache.jena.atlas.lib.DateTimeUtils ; -import org.apache.jena.query.Dataset; -import org.apache.jena.query.Query; -import org.apache.jena.query.QueryExecution; -import org.apache.jena.query.QueryExecutionFactory; -import org.apache.jena.query.QuerySolution; +import org.apache.jena.atlas.lib.DateTimeUtils; +import org.apache.jena.query.*; import org.apache.jena.rdf.model.Model; import org.apache.jena.sparql.exec.http.QueryExecutionHTTP; import org.apache.jena.sparql.exec.http.QueryExecutionHTTPBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.net.http.HttpClient; +import java.time.format.DateTimeFormatter; +import java.util.List; + public class QueryExecutionFactoryFilter { - static final String LOG_NAME = "QueryLog"; - private Logger logger; - private static QueryExecutionFactoryFilter singleton = new QueryExecutionFactoryFilter(); - - // ---- Support for controlling printing queries while running. See function "printQuery". - private static boolean PRINT = false; + static final String LOG_NAME = "QueryLog"; + private Logger logger; + private static QueryExecutionFactoryFilter singleton = new QueryExecutionFactoryFilter(); + + // ---- Support for controlling printing queries while running. See function "printQuery". + private static boolean PRINT = false; // ---- Support for controlling printing queries while running. - - /** - * Gets the singleton instance of this class. - * @return the singleton - */ - public static QueryExecutionFactoryFilter get() { - return singleton; - } - - private QueryExecutionFactoryFilter() { - logger = LoggerFactory.getLogger(LOG_NAME); - } - - public QueryExecution create(Query query, Model model) { - analyzeRequest(query, model, null); - return QueryExecutionFactory.create(query, model); - } - - public QueryExecution create(Query query, Model model, QuerySolution initialBinding) { - analyzeRequest(query, model, initialBinding); - return QueryExecution.create() - .query(query) - .model(model) - .initialBinding(initialBinding) - .build(); - } - - public QueryExecution create(Query query, Dataset dataset) { - analyzeRequest(query, dataset, null); - return QueryExecutionFactory.create(query, dataset); - } - - public QueryExecution create(Query query, Dataset dataset, QuerySolution initialBinding) { - analyzeRequest(query, dataset, initialBinding); - return QueryExecutionFactory.create(query, dataset, initialBinding); - } - - public QueryExecution sparqlService(String service, Query query) { - return sparqlServiceBuilder(service, query).build(); - } - + + /** + * Gets the singleton instance of this class. + * + * @return the singleton + */ + public static QueryExecutionFactoryFilter get() { + return singleton; + } + + private QueryExecutionFactoryFilter() { + logger = LoggerFactory.getLogger(LOG_NAME); + } + + public QueryExecution create(Query query, Model model) { + analyzeRequest(query, model, null); + return QueryExecutionFactory.create(query, model); + } + + public QueryExecution create(Query query, Model model, QuerySolution initialBinding) { + analyzeRequest(query, model, initialBinding); + return QueryExecution.create() + .query(query) + .model(model) + .initialBinding(initialBinding) + .build(); + } + + public QueryExecution create(Query query, Dataset dataset) { + analyzeRequest(query, dataset, null); + return QueryExecutionFactory.create(query, dataset); + } + + public QueryExecution create(Query query, Dataset dataset, QuerySolution querySolution) { + analyzeRequest(query, dataset, querySolution); + return QueryExecution.dataset(dataset).query(query).substitution(querySolution).build(); + } + + public QueryExecution sparqlService(String service, Query query) { + return sparqlServiceBuilder(service, query).build(); + } + public QueryExecution sparqlService(String service, Query query, HttpClient httpClient) { - return sparqlServiceBuilder(service, query, httpClient).build(); + return sparqlServiceBuilder(service, query, httpClient).build(); + } + + public QueryExecutionHTTP sparqlService(String service, Query query, HttpClient httpClient, List defaultGraphURIs, List namedGraphURIs) { + QueryExecutionHTTPBuilder builder = sparqlServiceBuilder(service, query, httpClient); + defaultGraphURIs.forEach(builder::addDefaultGraphURI); + namedGraphURIs.forEach(builder::addNamedGraphURI); + return builder.build(); } - public QueryExecutionHTTP sparqlService(String service, Query query, HttpClient httpClient, List defaultGraphURIs, List namedGraphURIs) { - QueryExecutionHTTPBuilder builder = sparqlServiceBuilder(service, query, httpClient); - defaultGraphURIs.forEach(uri -> builder.addDefaultGraphURI(uri)); - namedGraphURIs.forEach(uri -> builder.addNamedGraphURI(uri)); - return builder.build(); - } - private QueryExecutionHTTPBuilder sparqlServiceBuilder(String service, Query query, HttpClient httpClient) { - return sparqlServiceBuilder(service, query).httpClient(httpClient); - } - + return sparqlServiceBuilder(service, query).httpClient(httpClient); + } + private QueryExecutionHTTPBuilder sparqlServiceBuilder(String service, Query query) { - return QueryExecution.service(service).query(query); + return QueryExecution.service(service).query(query); } - - private void analyzeRequest(Query query, Model model, QuerySolution initialBinding) { + + private void analyzeRequest(Query query, Model model, QuerySolution initialBinding) { printQuery(query, initialBinding); - if(logger.isTraceEnabled()) { - logger.trace("QUERY[" + analyzeQuery(query) - + "]\nMODEL[" + analyzeModel(model) + "]" - + serializeBindings(initialBinding)); - } - } - - private void analyzeRequest(Query query, Dataset dataset, QuerySolution initialBinding) { - printQuery(query, initialBinding); - - if(logger.isTraceEnabled()) { - logger.trace("QUERY[" + analyzeQuery(query) - + "]\nDATASET[" + analyzeDataset(dataset) + "]" - + serializeBindings(initialBinding)); - } - } - - private static final DateTimeFormatter timestamp = DateTimeFormatter.ofPattern("HH:mm:ss.SSS"); - // Development support. Dynmically controlled print query. - private void printQuery(Query query, QuerySolution initialBinding) { - if ( PRINT ) { - String time = DateTimeUtils.nowAsString(timestamp); + if (logger.isTraceEnabled()) { + logger.trace("QUERY[" + analyzeQuery(query) + + "]\nMODEL[" + analyzeModel(model) + "]" + + serializeBindings(initialBinding)); + } + } + + private void analyzeRequest(Query query, Dataset dataset, QuerySolution initialBinding) { + printQuery(query, initialBinding); + + if (logger.isTraceEnabled()) { + logger.trace("QUERY[" + analyzeQuery(query) + + "]\nDATASET[" + analyzeDataset(dataset) + "]" + + serializeBindings(initialBinding)); + } + } + + private static final DateTimeFormatter timestamp = DateTimeFormatter.ofPattern("HH:mm:ss.SSS"); + + // Development support. Dynamically controlled print query. + private void printQuery(Query query, QuerySolution initialBinding) { + if (PRINT) { + String time = DateTimeUtils.nowAsString(timestamp); System.err.print("~~ "); System.err.print(time); System.err.println(" ~~"); System.err.println(initialBinding); System.err.print(query); } - } + } /** * Allow query printing to be switched on/off around specific sections of code that * are issuing queries. - * @param value true to enable + * + * @param value true to enable */ public static void enableQueryPrinting(boolean value) { PRINT = value; } - - private String serializeBindings(QuerySolution bindings) { - if(bindings == null) return ""; - return "\nINITIAL BINDINGS[" + bindings.toString() + "]"; - } - - private String analyzeQuery(Query query) { - if(query == null) return "null query"; - return query.toString(); - } - - private String analyzeModel(Model model) { - if(model == null) return "null model"; - - return "this space for rent"; - } - - private String analyzeDataset(Dataset dataset) { - if(dataset == null) return "null dataset"; - - return "A Dataset"; - } + + private String serializeBindings(QuerySolution bindings) { + if (bindings == null) return ""; + return "\nINITIAL BINDINGS[" + bindings.toString() + "]"; + } + + private String analyzeQuery(Query query) { + if (query == null) return "null query"; + return query.toString(); + } + + private String analyzeModel(Model model) { + if (model == null) return "null model"; + + return "this space for rent"; + } + + private String analyzeDataset(Dataset dataset) { + if (dataset == null) return "null dataset"; + + return "A Dataset"; + } } diff --git a/src/main/java/org/topbraid/jenax/util/RDFLabels.java b/src/main/java/org/topbraid/jenax/util/RDFLabels.java index 161687b8..9a3a3de6 100644 --- a/src/main/java/org/topbraid/jenax/util/RDFLabels.java +++ b/src/main/java/org/topbraid/jenax/util/RDFLabels.java @@ -30,7 +30,7 @@ /** * A singleton that is used to render resources into strings. - * By default this displays qnames (if possible). + * By default, this displays qnames (if possible). * Can be changed, for example, to switch to displaying rdfs:labels * instead of qnames etc. * diff --git a/src/main/java/org/topbraid/jenax/util/SystemTriples.java b/src/main/java/org/topbraid/jenax/util/SystemTriples.java index 6dad3c2e..3b3401c6 100644 --- a/src/main/java/org/topbraid/jenax/util/SystemTriples.java +++ b/src/main/java/org/topbraid/jenax/util/SystemTriples.java @@ -17,11 +17,6 @@ package org.topbraid.jenax.util; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; - -import org.apache.jena.datatypes.xsd.impl.XMLLiteralType; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.Resource; import org.apache.jena.rdf.model.StmtIterator; @@ -31,92 +26,97 @@ import org.apache.jena.vocabulary.RDFS; import org.apache.jena.vocabulary.XSD; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; + /** * Provides access to the RDF/RDFS/OWL system triples. - * + *

* TopBraid and this API adds some extra triples (such as declaring * superclasses for each system class) that make life easier. - * + * * @author Holger Knublauch */ public class SystemTriples { - private static Model vocabulary; - - - private static void ensureSuperClasses(Resource metaClass, Resource superClass) { - List toAdd = collectMissingSuperClasses(metaClass, superClass); - for (Resource c: toAdd) { - vocabulary.add(c, RDFS.subClassOf, superClass); - } - } - - - private static List collectMissingSuperClasses(Resource metaClass, - Resource superClass) { - List toAdd = new ArrayList(); - StmtIterator it = vocabulary.listStatements(null, RDF.type, metaClass); - while (it.hasNext()) { - Resource c = it.nextStatement().getSubject(); - if (!c.equals(superClass)) { - if (c.getProperty(RDFS.subClassOf) == null) { - toAdd.add(c); - } - } - } - return toAdd; - } - - - /** - * Gets the system ontology (a shared copy). - * @return the system ontology - */ - public static synchronized Model getVocabularyModel() { - if (vocabulary == null) { - vocabulary = JenaUtil.createDefaultModel(); - org.topbraid.jenax.util.JenaUtil.initNamespaces(vocabulary.getGraph()); - vocabulary.setNsPrefix("xsd", XSD.getURI()); - InputStream ttl = SystemTriples.class.getResourceAsStream("/rdf/system-triples.ttl"); - vocabulary.read(ttl, "urn:x:dummy", FileUtils.langTurtle); - ensureSuperClasses(RDFS.Class, RDFS.Resource); - ensureSuperClasses(OWL.Class, OWL.Thing); - - // Remove owl imports rdfs which only causes trouble - vocabulary.removeAll(null, OWL.imports, null); - - vocabulary.add(OWL.Thing, RDFS.subClassOf, RDFS.Resource); - vocabulary.add(OWL.inverseOf, RDF.type, OWL.SymmetricProperty); - vocabulary.add(OWL.equivalentClass, RDF.type, OWL.SymmetricProperty); - vocabulary.add(OWL.equivalentProperty, RDF.type, OWL.SymmetricProperty); - vocabulary.add(OWL.equivalentProperty, RDFS.range, RDF.Property); - vocabulary.add(OWL.differentFrom, RDF.type, OWL.SymmetricProperty); - vocabulary.add(OWL.sameAs, RDF.type, OWL.SymmetricProperty); - vocabulary.add(OWL.disjointWith, RDF.type, OWL.SymmetricProperty); - Resource xml = vocabulary.getResource(XMLLiteralType.theXMLLiteralType.getURI()); - vocabulary.add(xml, RDFS.subClassOf, RDFS.Resource); - for(String uri : JenaDatatypes.getDatatypeURIs()) { - Resource r = vocabulary.getResource(uri); - if (r.getProperty(RDF.type) == null) { - vocabulary.add(r, RDF.type, RDFS.Datatype); - vocabulary.add(r, RDFS.subClassOf, RDFS.Literal); - } - } - - // vocabulary.add(RDF.HTML, RDFS.label, "HTML"); - - // Triples were formally in OWL 1, but dropped from OWL 2 - vocabulary.add(RDFS.comment, RDF.type, OWL.AnnotationProperty); - vocabulary.add(RDFS.label, RDF.type, OWL.AnnotationProperty); - vocabulary.add(RDFS.isDefinedBy, RDF.type, OWL.AnnotationProperty); - vocabulary.add(RDFS.seeAlso, RDF.type, OWL.AnnotationProperty); - - // Add rdfs:labels for XSD types - for(Resource datatype : vocabulary.listSubjectsWithProperty(RDF.type, RDFS.Datatype).toList()) { - datatype.addProperty(RDFS.label, datatype.getLocalName()); + private static Model vocabulary; + + + private static void ensureSuperClasses(Resource metaClass, Resource superClass) { + List toAdd = collectMissingSuperClasses(metaClass, superClass); + for (Resource c : toAdd) { + vocabulary.add(c, RDFS.subClassOf, superClass); + } + } + + + private static List collectMissingSuperClasses(Resource metaClass, + Resource superClass) { + List toAdd = new ArrayList<>(); + StmtIterator it = vocabulary.listStatements(null, RDF.type, metaClass); + while (it.hasNext()) { + Resource c = it.nextStatement().getSubject(); + if (!c.equals(superClass)) { + if (c.getProperty(RDFS.subClassOf) == null) { + toAdd.add(c); + } + } + } + return toAdd; + } + + + /** + * Gets the system ontology (a shared copy). + * + * @return the system ontology + */ + public static synchronized Model getVocabularyModel() { + if (vocabulary == null) { + vocabulary = JenaUtil.createDefaultModel(); + org.topbraid.jenax.util.JenaUtil.initNamespaces(vocabulary.getGraph()); + vocabulary.setNsPrefix("xsd", XSD.getURI()); + InputStream ttl = SystemTriples.class.getResourceAsStream("/rdf/system-triples.ttl"); + vocabulary.read(ttl, "urn:x:dummy", FileUtils.langTurtle); + ensureSuperClasses(RDFS.Class, RDFS.Resource); + ensureSuperClasses(OWL.Class, OWL.Thing); + + // Remove owl imports rdfs which only causes trouble + vocabulary.removeAll(null, OWL.imports, null); + + vocabulary.add(OWL.Thing, RDFS.subClassOf, RDFS.Resource); + vocabulary.add(OWL.inverseOf, RDF.type, OWL.SymmetricProperty); + vocabulary.add(OWL.equivalentClass, RDF.type, OWL.SymmetricProperty); + vocabulary.add(OWL.equivalentProperty, RDF.type, OWL.SymmetricProperty); + vocabulary.add(OWL.equivalentProperty, RDFS.range, RDF.Property); + vocabulary.add(OWL.differentFrom, RDF.type, OWL.SymmetricProperty); + vocabulary.add(OWL.sameAs, RDF.type, OWL.SymmetricProperty); + vocabulary.add(OWL.disjointWith, RDF.type, OWL.SymmetricProperty); + Resource xml = vocabulary.getResource(RDF.dtXMLLiteral.getURI()); + vocabulary.add(xml, RDFS.subClassOf, RDFS.Resource); + for (String uri : JenaDatatypes.getDatatypeURIs()) { + Resource r = vocabulary.getResource(uri); + if (r.getProperty(RDF.type) == null) { + vocabulary.add(r, RDF.type, RDFS.Datatype); + vocabulary.add(r, RDFS.subClassOf, RDFS.Literal); + } + } + + // vocabulary.add(RDF.HTML, RDFS.label, "HTML"); + + // Triples were formally in OWL 1, but dropped from OWL 2 + vocabulary.add(RDFS.comment, RDF.type, OWL.AnnotationProperty); + vocabulary.add(RDFS.label, RDF.type, OWL.AnnotationProperty); + vocabulary.add(RDFS.isDefinedBy, RDF.type, OWL.AnnotationProperty); + vocabulary.add(RDFS.seeAlso, RDF.type, OWL.AnnotationProperty); + + // Add rdfs:labels for XSD types + for (Resource datatype : vocabulary.listSubjectsWithProperty(RDF.type, RDFS.Datatype).toList()) { + datatype.addProperty(RDFS.label, datatype.getLocalName()); } - vocabulary = JenaUtil.asReadOnlyModel(vocabulary); - } - return vocabulary; - } + vocabulary = JenaUtil.asReadOnlyModel(vocabulary); + } + return vocabulary; + } } diff --git a/src/main/java/org/topbraid/shacl/arq/SHACLARQFunction.java b/src/main/java/org/topbraid/shacl/arq/SHACLARQFunction.java index 4be99e60..e5abc50c 100644 --- a/src/main/java/org/topbraid/shacl/arq/SHACLARQFunction.java +++ b/src/main/java/org/topbraid/shacl/arq/SHACLARQFunction.java @@ -17,11 +17,6 @@ package org.topbraid.shacl.arq; -import java.io.ByteArrayOutputStream; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - import org.apache.jena.atlas.io.IndentedWriter; import org.apache.jena.graph.Graph; import org.apache.jena.graph.Node; @@ -54,192 +49,184 @@ import org.topbraid.shacl.model.SHParameterizable; import org.topbraid.shacl.vocabulary.DASH; +import java.io.ByteArrayOutputStream; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + /** * An ARQ function that is based on a SHACL function definition. - * + * * @author Holger Knublauch */ public abstract class SHACLARQFunction implements org.apache.jena.sparql.function.Function, OptionalArgsFunction, DeclarativeFunctionFactory { - - private boolean cachable; - - protected List paramNames = new ArrayList(); - - private List optional = new ArrayList(); - - private SHFunction shFunction; - - - /** - * Constructs a new SHACLARQFunction based on a given sh:Function. - * The shaclFunction must be associated with the Model containing - * the triples of its definition. - * @param shaclFunction the SHACL function - */ - protected SHACLARQFunction(SHFunction shaclFunction) { - this.shFunction = shaclFunction; - if(shaclFunction != null) { - this.cachable = shaclFunction.hasProperty(DASH.cachable, JenaDatatypes.TRUE); - } - } - - - protected void addParameters(SHParameterizable parameterizable) { - JenaUtil.setGraphReadOptimization(true); - try { - for(SHParameter param : parameterizable.getOrderedParameters()) { - String varName = param.getVarName(); - if(varName == null) { - throw new IllegalStateException(param + " of " + parameterizable + " does not have a valid predicate"); - } - paramNames.add(varName); - optional.add(param.isOptional()); - } - } - finally { - JenaUtil.setGraphReadOptimization(false); - } - } - - - @Override - public void build(String uri, ExprList args) { - } - - - @Override + + private boolean cachable; + + protected List paramNames = new ArrayList(); + + private List optional = new ArrayList(); + + private SHFunction shFunction; + + + /** + * Constructs a new SHACLARQFunction based on a given sh:Function. + * The shaclFunction must be associated with the Model containing + * the triples of its definition. + * + * @param shaclFunction the SHACL function + */ + protected SHACLARQFunction(SHFunction shaclFunction) { + this.shFunction = shaclFunction; + if (shaclFunction != null) { + this.cachable = shaclFunction.hasProperty(DASH.cachable, JenaDatatypes.TRUE); + } + } + + + protected void addParameters(SHParameterizable parameterizable) { + JenaUtil.setGraphReadOptimization(true); + try { + for (SHParameter param : parameterizable.getOrderedParameters()) { + String varName = param.getVarName(); + if (varName == null) { + throw new IllegalStateException(param + " of " + parameterizable + " does not have a valid predicate"); + } + paramNames.add(varName); + optional.add(param.isOptional()); + } + } finally { + JenaUtil.setGraphReadOptimization(false); + } + } + + @Override public org.apache.jena.sparql.function.Function create(String uri) { - return this; - } + return this; + } + - - @Override + @Override public NodeValue exec(Binding binding, ExprList args, String uri, FunctionEnv env) { - - Graph activeGraph = env.getActiveGraph(); - Model model = activeGraph != null ? - ModelFactory.createModelForGraph(activeGraph) : - ModelFactory.createDefaultModel(); - - QuerySolutionMap bindings = new QuerySolutionMap(); - - Node[] paramsForCache; - if(cachable) { - paramsForCache = new Node[args.size()]; - } - else { - paramsForCache = null; - } - for(int i = 0; i < args.size(); i++) { - Expr expr = args.get(i); - if(expr != null && (!expr.isVariable() || binding.contains(expr.asVar()))) { - NodeValue x = expr.eval(binding, env); - if(x != null) { - String paramName; - if(i < paramNames.size()) { - paramName = paramNames.get(i); - } - else { - paramName = "arg" + (i + 1); - } - bindings.add(paramName, model.asRDFNode(x.asNode())); - if(cachable) { - paramsForCache[i] = x.asNode(); - } - } - else if(!optional.get(i)) { - throw new ExprEvalException("Missing SHACL function argument"); - } - } - } - - Dataset dataset = DatasetFactory.wrap(env.getDataset()); - - if(ExecStatisticsManager.get().isRecording() && ExecStatisticsManager.get().isRecordingDeclarativeFunctions()) { - StringBuffer sb = new StringBuffer(); - sb.append("SHACL Function "); - sb.append(SSE.str(NodeFactory.createURI(uri), model)); - sb.append("("); - for(int i = 0; i < args.size(); i++) { - if(i > 0) { - sb.append(", "); - } - Expr expr = args.get(i); - expr = Substitute.substitute(expr, binding); - if(expr == null) { - sb.append("?unbound"); - } - else { - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - IndentedWriter iOut = new IndentedWriter(bos); - ExprUtils.fmtSPARQL(iOut, expr, new SerializationContext(model)); - iOut.flush(); - sb.append(bos.toString()); - } - } - sb.append(")"); - long startTime = System.currentTimeMillis(); - NodeValue result; - try { - if(cachable) { - result = SHACLFunctionsCache.get().execute(this, dataset, model, bindings, paramsForCache); - } - else { - result = executeBody(dataset, model, bindings); - } - sb.append(" = "); - sb.append(FmtUtils.stringForNode(result.asNode(), model)); - } - catch(ExprEvalException ex) { - sb.append(" : "); - sb.append(ex.getLocalizedMessage()); - throw ex; - } - finally { - long endTime = System.currentTimeMillis(); - ExecStatistics stats = new ExecStatistics(sb.toString(), getQueryString(), endTime - startTime, startTime, NodeFactory.createURI(uri)); - ExecStatisticsManager.get().addSilently(Collections.singleton(stats)); - } - return result; - } - else { - if(cachable) { - return SHACLFunctionsCache.get().execute(this, dataset, model, bindings, paramsForCache); - } - else { - return executeBody(dataset, model, bindings); - } - } - } - - - public abstract NodeValue executeBody(Dataset dataset, Model model, QuerySolution bindings); - - - protected abstract String getQueryString(); - - - /** - * Gets the underlying sh:Function Model object for this ARQ function. - * @return the sh:Function (may be null) - */ - public SHFunction getSHACLFunction() { - return shFunction; - } - - - /** - * Gets the names of the declared parameters, in order from left to right. - * @return the parameter names - */ - public String[] getParamNames() { - return paramNames.toArray(new String[0]); - } - - - @Override - public boolean isOptionalArg(int index) { - return optional.get(index); - } + + Graph activeGraph = env.getActiveGraph(); + Model model = activeGraph != null ? + ModelFactory.createModelForGraph(activeGraph) : + ModelFactory.createDefaultModel(); + + QuerySolutionMap bindings = new QuerySolutionMap(); + + Node[] paramsForCache; + if (cachable) { + paramsForCache = new Node[args.size()]; + } else { + paramsForCache = null; + } + for (int i = 0; i < args.size(); i++) { + Expr expr = args.get(i); + if (expr != null && (!expr.isVariable() || binding.contains(expr.asVar()))) { + NodeValue x = expr.eval(binding, env); + if (x != null) { + String paramName; + if (i < paramNames.size()) { + paramName = paramNames.get(i); + } else { + paramName = "arg" + (i + 1); + } + bindings.add(paramName, model.asRDFNode(x.asNode())); + if (cachable) { + paramsForCache[i] = x.asNode(); + } + } else if (!optional.get(i)) { + throw new ExprEvalException("Missing SHACL function argument"); + } + } + } + + Dataset dataset = DatasetFactory.wrap(env.getDataset()); + + if (ExecStatisticsManager.get().isRecording() && ExecStatisticsManager.get().isRecordingDeclarativeFunctions()) { + StringBuffer sb = new StringBuffer(); + sb.append("SHACL Function "); + sb.append(SSE.str(NodeFactory.createURI(uri), model)); + sb.append("("); + for (int i = 0; i < args.size(); i++) { + if (i > 0) { + sb.append(", "); + } + Expr expr = args.get(i); + expr = Substitute.substitute(expr, binding); + if (expr == null) { + sb.append("?unbound"); + } else { + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + IndentedWriter iOut = new IndentedWriter(bos); + ExprUtils.fmtSPARQL(iOut, expr, new SerializationContext(model)); + iOut.flush(); + sb.append(bos); + } + } + sb.append(")"); + long startTime = System.currentTimeMillis(); + NodeValue result; + try { + if (cachable) { + result = SHACLFunctionsCache.get().execute(this, dataset, model, bindings, paramsForCache); + } else { + result = executeBody(dataset, model, bindings); + } + sb.append(" = "); + sb.append(FmtUtils.stringForNode(result.asNode(), model)); + } catch (ExprEvalException ex) { + sb.append(" : "); + sb.append(ex.getLocalizedMessage()); + throw ex; + } finally { + long endTime = System.currentTimeMillis(); + ExecStatistics stats = new ExecStatistics(sb.toString(), getQueryString(), endTime - startTime, startTime, NodeFactory.createURI(uri)); + ExecStatisticsManager.get().addSilently(Collections.singleton(stats)); + } + return result; + } else { + if (cachable) { + return SHACLFunctionsCache.get().execute(this, dataset, model, bindings, paramsForCache); + } else { + return executeBody(dataset, model, bindings); + } + } + } + + + public abstract NodeValue executeBody(Dataset dataset, Model model, QuerySolution bindings); + + + protected abstract String getQueryString(); + + + /** + * Gets the underlying sh:Function Model object for this ARQ function. + * + * @return the sh:Function (may be null) + */ + public SHFunction getSHACLFunction() { + return shFunction; + } + + + /** + * Gets the names of the declared parameters, in order from left to right. + * + * @return the parameter names + */ + public String[] getParamNames() { + return paramNames.toArray(new String[0]); + } + + + @Override + public boolean isOptionalArg(int index) { + return optional.get(index); + } } diff --git a/src/main/java/org/topbraid/shacl/arq/SHACLFunctionsCache.java b/src/main/java/org/topbraid/shacl/arq/SHACLFunctionsCache.java index a15b8cd0..adaca999 100644 --- a/src/main/java/org/topbraid/shacl/arq/SHACLFunctionsCache.java +++ b/src/main/java/org/topbraid/shacl/arq/SHACLFunctionsCache.java @@ -16,11 +16,6 @@ */ package org.topbraid.shacl.arq; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Map.Entry; - import org.apache.jena.graph.Node; import org.apache.jena.query.Dataset; import org.apache.jena.query.QuerySolution; @@ -28,145 +23,145 @@ import org.apache.jena.sparql.expr.ExprEvalException; import org.apache.jena.sparql.expr.NodeValue; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; + /** * A cache that remembers previous calls to SHACL functions marked with sh:cachable. - * + * * @author Holger Knublauch */ public class SHACLFunctionsCache { - private static SHACLFunctionsCache singleton = new SHACLFunctionsCache(); - - public static SHACLFunctionsCache get() { - return singleton; - } - - public static void set(SHACLFunctionsCache value) { - SHACLFunctionsCache.singleton = value; - } - - - private static final int capacity = 10000; - - @SuppressWarnings("serial") - private static class MyCache extends LinkedHashMap { - - MyCache() { - super(capacity + 1, 1.1f, true); - } - - @Override - protected boolean removeEldestEntry(Entry eldest) { - if(size() > capacity) { - return true; - } - else { - return false; - } - } - }; - - private Map cache = Collections.synchronizedMap(new MyCache()); - - - public void clear() { - cache.clear(); - } - - - public NodeValue execute(SHACLARQFunction function, Dataset dataset, Model defaultModel, QuerySolution bindings, Node[] args) { - Key key = new Key(function.getSHACLFunction().getURI(), args); - Result result = cache.get(key); - if(result == null) { - result = new Result(); - try { - result.nodeValue = function.executeBody(dataset, defaultModel, bindings); - } - catch(ExprEvalException ex) { - result.ex = ex; - } - cache.put(key, result); - } - if(result.ex != null) { - throw new ExprEvalException(result.ex.getMessage()); - } - else { - return result.nodeValue; - } - } - - - private static class Key { - - private int hashCode; - - private Node[] args; - - private String functionURI; - - - Key(String functionURI, Node[] args) { - this.args = args; - this.functionURI = functionURI; - hashCode = functionURI.hashCode(); - for(Node arg : args) { - if(arg != null) { - hashCode += arg.hashCode(); - } - } - } - - - private boolean argEquals(Node arg1, Node arg2) { - if(arg1 == null) { - return arg2 == null; - } - else if(arg2 == null) { - return false; - } - else { - return arg1.equals(arg2); - } - } - - - @Override - public boolean equals(Object obj) { - - if(!(obj instanceof Key)) { - return false; - } - - Key other = (Key) obj; - if(!functionURI.equals(other.functionURI)) { - return false; - } - - if(args.length != other.args.length) { - return false; - } - - for(int i = 0; i < args.length; i++) { - if(!argEquals(args[i], other.args[i])) { - return false; - } - } - - return true; - } - - - @Override - public int hashCode() { - return hashCode; - } - } - - - private static class Result { - - ExprEvalException ex; - - NodeValue nodeValue; - } + private static SHACLFunctionsCache singleton = new SHACLFunctionsCache(); + + public static SHACLFunctionsCache get() { + return singleton; + } + + public static void set(SHACLFunctionsCache value) { + SHACLFunctionsCache.singleton = value; + } + + + private static final int capacity = 10000; + + @SuppressWarnings("serial") + private static class MyCache extends LinkedHashMap { + + MyCache() { + super(capacity + 1, 1.1f, true); + } + + @Override + protected boolean removeEldestEntry(Entry eldest) { + if (size() > capacity) { + return true; + } else { + return false; + } + } + } + + private Map cache = Collections.synchronizedMap(new MyCache()); + + + public void clear() { + cache.clear(); + } + + + public NodeValue execute(SHACLARQFunction function, Dataset dataset, Model defaultModel, QuerySolution bindings, Node[] args) { + Key key = new Key(function.getSHACLFunction().getURI(), args); + Result result = cache.get(key); + if (result == null) { + result = new Result(); + try { + result.nodeValue = function.executeBody(dataset, defaultModel, bindings); + } catch (ExprEvalException ex) { + result.ex = ex; + } + cache.put(key, result); + } + if (result.ex != null) { + throw new ExprEvalException(result.ex.getMessage()); + } else { + return result.nodeValue; + } + } + + + private static class Key { + + private int hashCode; + + private Node[] args; + + private String functionURI; + + + Key(String functionURI, Node[] args) { + this.args = args; + this.functionURI = functionURI; + hashCode = functionURI.hashCode(); + for (Node arg : args) { + if (arg != null) { + hashCode += arg.hashCode(); + } + } + } + + + private boolean argEquals(Node arg1, Node arg2) { + if (arg1 == null) { + return arg2 == null; + } else if (arg2 == null) { + return false; + } else { + return arg1.equals(arg2); + } + } + + + @Override + public boolean equals(Object obj) { + + if (!(obj instanceof Key)) { + return false; + } + + Key other = (Key) obj; + if (!functionURI.equals(other.functionURI)) { + return false; + } + + if (args.length != other.args.length) { + return false; + } + + for (int i = 0; i < args.length; i++) { + if (!argEquals(args[i], other.args[i])) { + return false; + } + } + + return true; + } + + + @Override + public int hashCode() { + return hashCode; + } + } + + + private static class Result { + + ExprEvalException ex; + + NodeValue nodeValue; + } } diff --git a/src/main/java/org/topbraid/shacl/arq/SHACLPaths.java b/src/main/java/org/topbraid/shacl/arq/SHACLPaths.java index e70052d1..d4a1f070 100644 --- a/src/main/java/org/topbraid/shacl/arq/SHACLPaths.java +++ b/src/main/java/org/topbraid/shacl/arq/SHACLPaths.java @@ -57,7 +57,7 @@ import org.topbraid.shacl.vocabulary.SH; /** - * Utilties to manage the conversion between SHACL paths and SPARQL 1.1 property paths. + * Utilities to manage the conversion between SHACL paths and SPARQL 1.1 property paths. * * @author Holger Knublauch */ diff --git a/src/main/java/org/topbraid/shacl/arq/functions/CheckRegexSyntaxFunction.java b/src/main/java/org/topbraid/shacl/arq/functions/CheckRegexSyntaxFunction.java index dd96f9e9..e3efee2f 100644 --- a/src/main/java/org/topbraid/shacl/arq/functions/CheckRegexSyntaxFunction.java +++ b/src/main/java/org/topbraid/shacl/arq/functions/CheckRegexSyntaxFunction.java @@ -50,7 +50,7 @@ protected NodeValue exec(Node regexNode, FunctionEnv env) { /** * Convert the specified exception's message to a system-independent * format while preserving the message's embedded regex unchanged. - * This allows whomever catches the exception to inspect the original regex + * This allows whoever catches the exception to inspect the original regex * unchanged. * * @see PatternSyntaxException#getMessage() diff --git a/src/main/java/org/topbraid/shacl/arq/functions/SHACLSPARQLARQFunction.java b/src/main/java/org/topbraid/shacl/arq/functions/SHACLSPARQLARQFunction.java index cb076778..74d626c7 100644 --- a/src/main/java/org/topbraid/shacl/arq/functions/SHACLSPARQLARQFunction.java +++ b/src/main/java/org/topbraid/shacl/arq/functions/SHACLSPARQLARQFunction.java @@ -29,6 +29,7 @@ import org.apache.jena.sparql.expr.ExprEvalException; import org.apache.jena.sparql.expr.ExprList; import org.apache.jena.sparql.expr.NodeValue; +import org.apache.jena.sparql.util.Context; import org.topbraid.jenax.util.ARQFactory; import org.topbraid.jenax.util.DatasetWithDifferentDefaultModel; import org.topbraid.jenax.util.JenaUtil; @@ -54,7 +55,10 @@ public class SHACLSPARQLARQFunction extends SHACLARQFunction { private org.apache.jena.query.Query arqQuery; private String queryString; - + + @Override + public void build(String uri, ExprList args, Context context) { + } /** * Constructs a new SHACLSPARQLARQFunction based on a given sh:ConstraintComponent @@ -106,11 +110,6 @@ public SHACLSPARQLARQFunction(SHSPARQLFunction shaclFunction) { addParameters(shaclFunction); } - - - @Override - public void build(String uri, ExprList args) { - } @Override diff --git a/src/main/java/org/topbraid/shacl/engine/ShapesGraph.java b/src/main/java/org/topbraid/shacl/engine/ShapesGraph.java index 31c509d9..4e23b31b 100644 --- a/src/main/java/org/topbraid/shacl/engine/ShapesGraph.java +++ b/src/main/java/org/topbraid/shacl/engine/ShapesGraph.java @@ -16,21 +16,8 @@ */ package org.topbraid.shacl.engine; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.function.Predicate; - import org.apache.jena.graph.Node; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.Property; -import org.apache.jena.rdf.model.RDFNode; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.StmtIterator; +import org.apache.jena.rdf.model.*; import org.apache.jena.shared.PrefixMapping; import org.apache.jena.sparql.graph.PrefixMappingMem; import org.apache.jena.sparql.util.FmtUtils; @@ -49,284 +36,291 @@ import org.topbraid.shacl.vocabulary.DASH; import org.topbraid.shacl.vocabulary.SH; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Predicate; + /** * Represents a shapes graph as input to an engine (e.g. validation or inferencing). * This is basically a collection of Shapes with some data structures that avoid repetitive computation. - * + * * @author Holger Knublauch */ public class ShapesGraph { - - private final static Map EMPTY = new HashMap<>(); - - // May be defined to skip certain constraints (which are computed on demand) - private Predicate constraintFilter; - - // Map of sh:defaultValue expressions. Outer keys are sh:path predicates, inner keys are (node) shapes. - private Map> defaultValueMap = new ConcurrentHashMap<>(); - - // Can be used to bypass TDB's slow prefix mapping - private PrefixMapping fastPrefixMapping; - - // Cache of shapeFilter results - private Map ignoredShapes = new ConcurrentHashMap<>(); - - // Mapping of properties (e.g., sh:datatype) to their constraint components (e.g., sh:DatatypeConstraintComponent) - private Map parametersMap = new ConcurrentHashMap<>(); - - // The root shapes where whole-graph validation and inferencing would start - private List rootShapes; - - // Can be used to skip certain shapes - private Predicate shapeFilter; - - // Map of Jena Nodes to their Shape instances, computed on demand - private Map shapesMap = new ConcurrentHashMap<>(); - - // The Jena Model of the shape definitions - private Model shapesModel; - - // Map of sh:values expressions. Outer keys are sh:path predicates, inner keys are (node) shapes. - private Map> valuesMap = new ConcurrentHashMap<>(); - - - /** - * Constructs a new ShapesGraph. - * This should not be called directly, only from ShapesGraphFactory. - * @param shapesModel the Model containing the shape definitions - */ - public ShapesGraph(Model shapesModel) { - this.shapesModel = shapesModel; - } - - - public ShapesGraph clone() { - ShapesGraph clone = new ShapesGraph(shapesModel); - clone.constraintFilter = this.constraintFilter; - clone.shapeFilter = this.shapeFilter; - return clone; - } - - - public Constraint createConstraint(Shape shape, SHConstraintComponent component, List params, RDFNode parameterValue) { - return new Constraint(shape, component, params, parameterValue); - } - - - public SHConstraintComponent getComponentWithParameter(Property parameter) { - return parametersMap.computeIfAbsent(parameter, p -> { - StmtIterator it = shapesModel.listStatements(null, SH.path, parameter); - while(it.hasNext()) { - Resource param = it.next().getSubject(); - if(!param.hasProperty(SH.optional, JenaDatatypes.TRUE)) { - StmtIterator i2 = shapesModel.listStatements(null, SH.parameter, param); - while(i2.hasNext()) { - Resource r = i2.next().getSubject(); - if(JenaUtil.hasIndirectType(r, SH.ConstraintComponent)) { - i2.close(); - it.close(); - SHConstraintComponent cc = SHFactory.asConstraintComponent(r); - return cc; - } - } - } - } - return null; - }); - } - - - // Added for cases where repeated access to the prefixes causes many (TDB) loads, produces a faster in-memory PrefixMapping - public synchronized PrefixMapping getFastPrefixMapping() { - if(fastPrefixMapping == null) { - fastPrefixMapping = new PrefixMappingMem(); - Map pm = shapesModel.getNsPrefixMap(); - for(String prefix : pm.keySet()) { - fastPrefixMapping.setNsPrefix(prefix, pm.get(prefix)); - } - } - return fastPrefixMapping; - } - - - public String getPathString(Resource path) { - if(path.isURIResource()) { - return FmtUtils.stringForNode(path.asNode(), getFastPrefixMapping()); - } - else { - return SHACLPaths.getPathString(path); - } - } - - - /** - * Gets all non-deactivated shapes that declare a target and pass the provided filter. - * @return the root shapes - */ - public synchronized List getRootShapes() { - if(rootShapes == null) { - - // Collect all shapes, as identified by target and/or type - Set candidates = new HashSet<>(); - candidates.addAll(shapesModel.listSubjectsWithProperty(SH.target).toList()); - candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetClass).toList()); - candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetNode).toList()); - candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetObjectsOf).toList()); - candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetSubjectsOf).toList()); - for(Resource shape : JenaUtil.getAllInstances(shapesModel.getResource(SH.NodeShape.getURI()))) { - if(JenaUtil.hasIndirectType(shape, RDFS.Class)) { - candidates.add(shape); - } - } - for(Resource shape : JenaUtil.getAllInstances(shapesModel.getResource(SH.PropertyShape.getURI()))) { - if(JenaUtil.hasIndirectType(shape, RDFS.Class)) { - candidates.add(shape); - } - } - - // Turn the shape Resource objects into Shape instances - this.rootShapes = new LinkedList(); - for(Resource candidate : candidates) { - SHShape shape = SHFactory.asShape(candidate); - if(!shape.isDeactivated() && !isIgnored(shape.asNode())) { - this.rootShapes.add(getShape(shape.asNode())); - } - } - } - return rootShapes; - } - - - public Shape getShape(Node node) { - return shapesMap.computeIfAbsent(node, n -> new Shape(this, SHFactory.asShape(shapesModel.asRDFNode(node)))); - } - - - /** - * Gets a Map from (node) shapes to NodeExpressions derived from sh:defaultValue statements. - * @param predicate the predicate to infer - * @return a Map which is empty if the predicate is not mentioned in any inferences - */ - public Map getDefaultValueNodeExpressionsMap(Resource predicate) { - return getExpressionsMap(defaultValueMap, predicate, SH.defaultValue); - } - - - /** - * Gets a Map from (node) shapes to NodeExpressions derived from sh:values statements. - * Can be used to efficiently figure out how to infer the values of a given instance, based on the rdf:types - * of the instance. - * @param predicate the predicate to infer - * @return a Map which is empty if the predicate is not mentioned in any inferences - */ - public Map getValuesNodeExpressionsMap(Resource predicate) { - return getExpressionsMap(valuesMap, predicate, SH.values); - } - - - private Map getExpressionsMap(Map> valuesMap, Resource predicate, Property systemPredicate) { - return valuesMap.computeIfAbsent(predicate.asNode(), p -> { - - Map> map = new HashMap<>(); - StmtIterator it = shapesModel.listStatements(null, SH.path, predicate); - while(it.hasNext()) { - Resource ps = it.next().getSubject(); - if(ps.hasProperty(systemPredicate) && !ps.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { - StmtIterator nit = shapesModel.listStatements(null, SH.property, ps); - while(nit.hasNext()) { - Resource nodeShape = nit.next().getSubject(); - if(!nodeShape.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { - Node shapeNode = nodeShape.asNode(); - addExpressions(map, ps, shapeNode, systemPredicate); - for(Resource targetClass : JenaUtil.getResourceProperties(nodeShape, SH.targetClass)) { - addExpressions(map, ps, targetClass.asNode(), systemPredicate); - } - for(Resource targetClass : JenaUtil.getResourceProperties(nodeShape, DASH.applicableToClass)) { - addExpressions(map, ps, targetClass.asNode(), systemPredicate); - } - } - } - } - } - - if(map.isEmpty()) { - // Return a non-null but empty value to avoid re-computation (null not supported by ConcurrentHashMap) - return EMPTY; - } - else { - Map result = new HashMap<>(); - for(Node key : map.keySet()) { - List list = map.get(key); - if(list.size() > 1) { - RDFNode exprNode = shapesModel.asRDFNode(key); - result.put(key, new DistinctExpression(exprNode, new UnionExpression(exprNode, list))); - } - else { - result.put(key, list.get(0)); - } - } - return result; - } - }); - } - - - private void addExpressions(Map> map, Resource ps, Node shapeNode, Property systemPredicate) { - map.computeIfAbsent(shapeNode, n -> { - List exprs = new LinkedList<>(); - StmtIterator vit = ps.listProperties(systemPredicate); - while(vit.hasNext()) { - RDFNode expr = vit.next().getObject(); - NodeExpression nodeExpression = NodeExpressionFactory.get().create(expr); - exprs.add(nodeExpression); - } - return exprs; - }); - } - - - public Model getShapesModel() { - return shapesModel; - } - - - public boolean isIgnored(Node shapeNode) { - if(shapeFilter == null) { - return false; - } - else { - return ignoredShapes.computeIfAbsent(shapeNode, node -> { - SHShape shape = SHFactory.asShape(shapesModel.asRDFNode(shapeNode)); - return !shapeFilter.test(shape); - }); - } - } - - - public boolean isIgnoredConstraint(Constraint constraint) { - return constraintFilter != null && !constraintFilter.test(constraint); - } - - - /** - * Sets a filter Predicate that can be used to ignore certain constraints. - * See for example CoreConstraintFilter. - * Such filters must return true if the Constraint should be used, false to ignore. - * This method should be called immediately after the constructor only. - * @param value the new constraint filter - */ - public void setConstraintFilter(Predicate value) { - this.constraintFilter = value; - } - - - /** - * Sets a filter Predicate that can be used to ignore certain shapes. - * Such filters must return true if the shape should be used, false to ignore. - * This method should be called immediately after the constructor only. - * @param value the new shape filter - */ - public void setShapeFilter(Predicate value) { - this.shapeFilter = value; - } + + private final static Map EMPTY = new HashMap<>(); + + // May be defined to skip certain constraints (which are computed on demand) + private Predicate constraintFilter; + + // Map of sh:defaultValue expressions. Outer keys are sh:path predicates, inner keys are (node) shapes. + private Map> defaultValueMap = new ConcurrentHashMap<>(); + + // Can be used to bypass TDB's slow prefix mapping + private PrefixMapping fastPrefixMapping; + + // Cache of shapeFilter results + private Map ignoredShapes = new ConcurrentHashMap<>(); + + // Mapping of properties (e.g., sh:datatype) to their constraint components (e.g., sh:DatatypeConstraintComponent) + private Map parametersMap = new ConcurrentHashMap<>(); + + // The root shapes where whole-graph validation and inferencing would start + private List rootShapes; + + // Can be used to skip certain shapes + private Predicate shapeFilter; + + // Map of Jena Nodes to their Shape instances, computed on demand + private Map shapesMap = new ConcurrentHashMap<>(); + + // The Jena Model of the shape definitions + private Model shapesModel; + + // Map of sh:values expressions. Outer keys are sh:path predicates, inner keys are (node) shapes. + private Map> valuesMap = new ConcurrentHashMap<>(); + + + /** + * Constructs a new ShapesGraph. + * This should not be called directly, only from ShapesGraphFactory. + * + * @param shapesModel the Model containing the shape definitions + */ + public ShapesGraph(Model shapesModel) { + this.shapesModel = shapesModel; + } + + + @Override + public ShapesGraph clone() { + ShapesGraph clone = new ShapesGraph(shapesModel); + clone.constraintFilter = this.constraintFilter; + clone.shapeFilter = this.shapeFilter; + return clone; + } + + + public Constraint createConstraint(Shape shape, SHConstraintComponent component, List params, RDFNode parameterValue) { + return new Constraint(shape, component, params, parameterValue); + } + + + public SHConstraintComponent getComponentWithParameter(Property parameter) { + return parametersMap.computeIfAbsent(parameter, p -> { + StmtIterator it = shapesModel.listStatements(null, SH.path, parameter); + while (it.hasNext()) { + Resource param = it.next().getSubject(); + if (!param.hasProperty(SH.optional, JenaDatatypes.TRUE)) { + StmtIterator i2 = shapesModel.listStatements(null, SH.parameter, param); + while (i2.hasNext()) { + Resource r = i2.next().getSubject(); + if (JenaUtil.hasIndirectType(r, SH.ConstraintComponent)) { + i2.close(); + it.close(); + SHConstraintComponent cc = SHFactory.asConstraintComponent(r); + return cc; + } + } + } + } + return null; + }); + } + + + // Added for cases where repeated access to the prefixes causes many (TDB) loads, produces a faster in-memory PrefixMapping + public synchronized PrefixMapping getFastPrefixMapping() { + if (fastPrefixMapping == null) { + fastPrefixMapping = new PrefixMappingMem(); + Map pm = shapesModel.getNsPrefixMap(); + for (String prefix : pm.keySet()) { + fastPrefixMapping.setNsPrefix(prefix, pm.get(prefix)); + } + } + return fastPrefixMapping; + } + + + public String getPathString(Resource path) { + if (path.isURIResource()) { + return FmtUtils.stringForNode(path.asNode(), getFastPrefixMapping()); + } else { + return SHACLPaths.getPathString(path); + } + } + + + /** + * Gets all non-deactivated shapes that declare a target and pass the provided filter. + * + * @return the root shapes + */ + public synchronized List getRootShapes() { + if (rootShapes == null) { + + // Collect all shapes, as identified by target and/or type + Set candidates = new HashSet<>(); + candidates.addAll(shapesModel.listSubjectsWithProperty(SH.target).toList()); + candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetClass).toList()); + candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetNode).toList()); + candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetObjectsOf).toList()); + candidates.addAll(shapesModel.listSubjectsWithProperty(SH.targetSubjectsOf).toList()); + for (Resource shape : JenaUtil.getAllInstances(shapesModel.getResource(SH.NodeShape.getURI()))) { + if (JenaUtil.hasIndirectType(shape, RDFS.Class)) { + candidates.add(shape); + } + } + for (Resource shape : JenaUtil.getAllInstances(shapesModel.getResource(SH.PropertyShape.getURI()))) { + if (JenaUtil.hasIndirectType(shape, RDFS.Class)) { + candidates.add(shape); + } + } + + // Turn the shape Resource objects into Shape instances + this.rootShapes = new LinkedList(); + for (Resource candidate : candidates) { + SHShape shape = SHFactory.asShape(candidate); + if (!shape.isDeactivated() && !isIgnored(shape.asNode())) { + this.rootShapes.add(getShape(shape.asNode())); + } + } + } + return rootShapes; + } + + + public Shape getShape(Node node) { + return shapesMap.computeIfAbsent(node, n -> new Shape(this, SHFactory.asShape(shapesModel.asRDFNode(node)))); + } + + + /** + * Gets a Map from (node) shapes to NodeExpressions derived from sh:defaultValue statements. + * + * @param predicate the predicate to infer + * @return a Map which is empty if the predicate is not mentioned in any inferences + */ + public Map getDefaultValueNodeExpressionsMap(Resource predicate) { + return getExpressionsMap(defaultValueMap, predicate, SH.defaultValue); + } + + + /** + * Gets a Map from (node) shapes to NodeExpressions derived from sh:values statements. + * Can be used to efficiently figure out how to infer the values of a given instance, based on the rdf:types + * of the instance. + * + * @param predicate the predicate to infer + * @return a Map which is empty if the predicate is not mentioned in any inferences + */ + public Map getValuesNodeExpressionsMap(Resource predicate) { + return getExpressionsMap(valuesMap, predicate, SH.values); + } + + + private Map getExpressionsMap(Map> valuesMap, Resource predicate, Property systemPredicate) { + return valuesMap.computeIfAbsent(predicate.asNode(), p -> { + + Map> map = new HashMap<>(); + StmtIterator it = shapesModel.listStatements(null, SH.path, predicate); + while (it.hasNext()) { + Resource ps = it.next().getSubject(); + if (ps.hasProperty(systemPredicate) && !ps.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { + StmtIterator nit = shapesModel.listStatements(null, SH.property, ps); + while (nit.hasNext()) { + Resource nodeShape = nit.next().getSubject(); + if (!nodeShape.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { + Node shapeNode = nodeShape.asNode(); + addExpressions(map, ps, shapeNode, systemPredicate); + for (Resource targetClass : JenaUtil.getResourceProperties(nodeShape, SH.targetClass)) { + addExpressions(map, ps, targetClass.asNode(), systemPredicate); + } + for (Resource targetClass : JenaUtil.getResourceProperties(nodeShape, DASH.applicableToClass)) { + addExpressions(map, ps, targetClass.asNode(), systemPredicate); + } + } + } + } + } + + if (map.isEmpty()) { + // Return a non-null but empty value to avoid re-computation (null not supported by ConcurrentHashMap) + return EMPTY; + } else { + Map result = new HashMap<>(); + for (Node key : map.keySet()) { + List list = map.get(key); + if (list.size() > 1) { + RDFNode exprNode = shapesModel.asRDFNode(key); + result.put(key, new DistinctExpression(exprNode, new UnionExpression(exprNode, list))); + } else { + result.put(key, list.get(0)); + } + } + return result; + } + }); + } + + + private void addExpressions(Map> map, Resource ps, Node shapeNode, Property systemPredicate) { + map.computeIfAbsent(shapeNode, n -> { + List exprs = new LinkedList<>(); + StmtIterator vit = ps.listProperties(systemPredicate); + while (vit.hasNext()) { + RDFNode expr = vit.next().getObject(); + NodeExpression nodeExpression = NodeExpressionFactory.get().create(expr); + exprs.add(nodeExpression); + } + return exprs; + }); + } + + + public Model getShapesModel() { + return shapesModel; + } + + + public boolean isIgnored(Node shapeNode) { + if (shapeFilter == null) { + return false; + } else { + return ignoredShapes.computeIfAbsent(shapeNode, node -> { + SHShape shape = SHFactory.asShape(shapesModel.asRDFNode(shapeNode)); + return !shapeFilter.test(shape); + }); + } + } + + + public boolean isIgnoredConstraint(Constraint constraint) { + return constraintFilter != null && !constraintFilter.test(constraint); + } + + + /** + * Sets a filter Predicate that can be used to ignore certain constraints. + * See for example CoreConstraintFilter. + * Such filters must return true if the Constraint should be used, false to ignore. + * This method should be called immediately after the constructor only. + * + * @param value the new constraint filter + */ + public void setConstraintFilter(Predicate value) { + this.constraintFilter = value; + } + + + /** + * Sets a filter Predicate that can be used to ignore certain shapes. + * Such filters must return true if the shape should be used, false to ignore. + * This method should be called immediately after the constructor only. + * + * @param value the new shape filter + */ + public void setShapeFilter(Predicate value) { + this.shapeFilter = value; + } } diff --git a/src/main/java/org/topbraid/shacl/engine/filters/ExcludeMetaShapesFilter.java b/src/main/java/org/topbraid/shacl/engine/filters/ExcludeMetaShapesFilter.java index 623dfca6..c49b030e 100644 --- a/src/main/java/org/topbraid/shacl/engine/filters/ExcludeMetaShapesFilter.java +++ b/src/main/java/org/topbraid/shacl/engine/filters/ExcludeMetaShapesFilter.java @@ -16,11 +16,6 @@ */ package org.topbraid.shacl.engine.filters; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; -import java.util.function.Predicate; - import org.apache.jena.rdf.model.Resource; import org.topbraid.jenax.util.JenaUtil; import org.topbraid.shacl.model.SHShape; @@ -28,30 +23,34 @@ import org.topbraid.shacl.vocabulary.SH; import org.topbraid.shacl.vocabulary.TOSH; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + /** * A Predicate that can be used to bypass any shapes that are also constraint components * and any shapes from the tosh namespace. - * + * * @author Holger Knublauch */ public class ExcludeMetaShapesFilter implements Predicate { - - private static Set systemShapes = new HashSet<>(); - static { - Collections.addAll(systemShapes, DASH.Editor, DASH.GraphStoreTestCase, DASH.InferencingTestCase, DASH.QueryTestCase, DASH.ValidationTestCase, DASH.Viewer, DASH.Widget); - } - - - public static void addSystemShapes(Resource... shapes) { - for(Resource shape : shapes) { - systemShapes.add(shape); - } - } - - - @Override - public boolean test(SHShape shape) { - return !JenaUtil.hasIndirectType(shape, SH.Parameter) && !systemShapes.contains(shape) && - (shape.isAnon() || !shape.getURI().startsWith(TOSH.NS)); - } + + private static Set systemShapes = new HashSet<>(); + + static { + Collections.addAll(systemShapes, DASH.Editor, DASH.GraphStoreTestCase, DASH.InferencingTestCase, DASH.QueryTestCase, DASH.ValidationTestCase, DASH.Viewer, DASH.Widget); + } + + + public static void addSystemShapes(Resource... shapes) { + Collections.addAll(systemShapes, shapes); + } + + + @Override + public boolean test(SHShape shape) { + return !JenaUtil.hasIndirectType(shape, SH.Parameter) && !systemShapes.contains(shape) && + (shape.isAnon() || !shape.getURI().startsWith(TOSH.NS)); + } } diff --git a/src/main/java/org/topbraid/shacl/entailment/SHACLEntailment.java b/src/main/java/org/topbraid/shacl/entailment/SHACLEntailment.java index ed04a062..ef592ae9 100644 --- a/src/main/java/org/topbraid/shacl/entailment/SHACLEntailment.java +++ b/src/main/java/org/topbraid/shacl/entailment/SHACLEntailment.java @@ -16,10 +16,6 @@ */ package org.topbraid.shacl.entailment; -import java.net.URI; -import java.util.HashMap; -import java.util.Map; - import org.apache.jena.query.Dataset; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; @@ -31,64 +27,65 @@ import org.topbraid.shacl.rules.RulesEntailment; import org.topbraid.shacl.vocabulary.SH; +import java.net.URI; +import java.util.HashMap; +import java.util.Map; + /** * Singleton to support sh:entailment. * Extensions may install their own Engines. - * + * * @author Holger Knublauch */ public class SHACLEntailment { - - public final static Resource RDFS = ResourceFactory.createResource("http://www.w3.org/ns/entailment/RDFS"); - - public static interface Engine { - - Model createModelWithEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, ProgressMonitor monitor) throws InterruptedException; - } - - private static SHACLEntailment singleton = new SHACLEntailment(); - - public static SHACLEntailment get() { - return singleton; - } - - private Map engines = new HashMap<>(); - - - protected SHACLEntailment() { - setEngine(RDFS.getURI(), new Engine() { - @Override - public Model createModelWithEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, ProgressMonitor monitor) { - return ModelFactory.createRDFSModel(dataset.getDefaultModel()); - } - }); - setEngine(SH.Rules.getURI(), new RulesEntailment()); - } - - - public Engine getEngine(String uri) { - return engines.get(uri); - } - - - public void setEngine(String uri, Engine engine) { - engines.put(uri, engine); - } - - - public Dataset withEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, Resource entailment, ProgressMonitor monitor) throws InterruptedException { - if(entailment == null || dataset.getDefaultModel() == null) { - return dataset; - } - else { - Engine engine = getEngine(entailment.getURI()); - if(engine != null) { - Model newDefaultModel = engine.createModelWithEntailment(dataset, shapesGraphURI, shapesGraph, monitor); - return new DatasetWithDifferentDefaultModel(newDefaultModel, dataset); - } - else { - return null; - } - } - } + + public final static Resource RDFS = ResourceFactory.createResource("http://www.w3.org/ns/entailment/RDFS"); + + public interface Engine { + Model createModelWithEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, ProgressMonitor monitor) throws InterruptedException; + } + + private static final SHACLEntailment singleton = new SHACLEntailment(); + + public static SHACLEntailment get() { + return singleton; + } + + private Map engines = new HashMap<>(); + + + protected SHACLEntailment() { + setEngine(RDFS.getURI(), new Engine() { + @Override + public Model createModelWithEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, ProgressMonitor monitor) { + return ModelFactory.createRDFSModel(dataset.getDefaultModel()); + } + }); + setEngine(SH.Rules.getURI(), new RulesEntailment()); + } + + + public Engine getEngine(String uri) { + return engines.get(uri); + } + + + public void setEngine(String uri, Engine engine) { + engines.put(uri, engine); + } + + + public Dataset withEntailment(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, Resource entailment, ProgressMonitor monitor) throws InterruptedException { + if (entailment == null || dataset.getDefaultModel() == null) { + return dataset; + } else { + Engine engine = getEngine(entailment.getURI()); + if (engine != null) { + Model newDefaultModel = engine.createModelWithEntailment(dataset, shapesGraphURI, shapesGraph, monitor); + return new DatasetWithDifferentDefaultModel(newDefaultModel, dataset); + } else { + return null; + } + } + } } diff --git a/src/main/java/org/topbraid/shacl/expr/AbstractNodeExpression.java b/src/main/java/org/topbraid/shacl/expr/AbstractNodeExpression.java index 0302f16a..5aff66e2 100644 --- a/src/main/java/org/topbraid/shacl/expr/AbstractNodeExpression.java +++ b/src/main/java/org/topbraid/shacl/expr/AbstractNodeExpression.java @@ -1,56 +1,56 @@ package org.topbraid.shacl.expr; -import java.util.Collections; -import java.util.List; - import org.apache.jena.rdf.model.RDFNode; import org.apache.jena.rdf.model.Resource; import org.apache.jena.util.iterator.ExtendedIterator; +import java.util.Collections; +import java.util.List; + public abstract class AbstractNodeExpression implements NodeExpression { - - private final static List EMPTY = Collections.emptyList(); - private RDFNode expr; - - - protected AbstractNodeExpression(RDFNode expr) { - this.expr = expr; - } + private final static List EMPTY = Collections.emptyList(); + + private final RDFNode expr; + + + protected AbstractNodeExpression(RDFNode expr) { + this.expr = expr; + } - @Override - public ExtendedIterator evalReverse(RDFNode valueNode, NodeExpressionContext context) { - throw new IllegalStateException("Reverse evaluation is not supported for this node expression: " + toString()); - } + @Override + public ExtendedIterator evalReverse(RDFNode valueNode, NodeExpressionContext context) { + throw new IllegalStateException("Reverse evaluation is not supported for this node expression: " + this); + } - @Override - public List getInputExpressions() { - return EMPTY; - } + @Override + public List getInputExpressions() { + return EMPTY; + } - @Override - public Resource getOutputShape(Resource contextShape) { - return null; - } + @Override + public Resource getOutputShape(Resource contextShape) { + return null; + } - @Override - public RDFNode getRDFNode() { - return expr; - } + @Override + public RDFNode getRDFNode() { + return expr; + } - @Override - public boolean isReversible(NodeExpressionContext context) { - return false; - } + @Override + public boolean isReversible(NodeExpressionContext context) { + return false; + } - @Override - public String toString() { - return getFunctionalSyntax(); - } + @Override + public String toString() { + return getFunctionalSyntax(); + } } diff --git a/src/main/java/org/topbraid/shacl/expr/AbstractSPARQLExpression.java b/src/main/java/org/topbraid/shacl/expr/AbstractSPARQLExpression.java index ff858369..f65e089f 100644 --- a/src/main/java/org/topbraid/shacl/expr/AbstractSPARQLExpression.java +++ b/src/main/java/org/topbraid/shacl/expr/AbstractSPARQLExpression.java @@ -1,9 +1,5 @@ package org.topbraid.shacl.expr; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; - import org.apache.jena.graph.NodeFactory; import org.apache.jena.query.Query; import org.apache.jena.query.QueryExecution; @@ -18,79 +14,81 @@ import org.topbraid.jenax.util.JenaDatatypes; import org.topbraid.shacl.vocabulary.SH; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; + /** * Node expressions based on a SPARQL query, identified by sh:select or sh:ask. - * + *

* This node expression type is not part of the SHACL-AF 1.0 document, but a candidate for 1.1. - * + * * @author Holger Knublauch */ public abstract class AbstractSPARQLExpression extends AbstractInputExpression { - - private Query query; - - private String queryString; - - - protected AbstractSPARQLExpression(Resource expr, Query query, NodeExpression input, String queryString) { - super(expr, input); - this.query = query; - this.queryString = queryString; - } - - @Override - public ExtendedIterator eval(RDFNode focusNode, NodeExpressionContext context) { - List focusNodes; - NodeExpression input = getInput(); - if(input != null) { - focusNodes = input.eval(focusNode, context).toList(); - } - else { - focusNodes = Collections.singletonList(focusNode); - } - List results = new LinkedList<>(); - for(RDFNode f : focusNodes) { - QuerySolutionMap binding = new QuerySolutionMap(); - binding.add(SH.thisVar.getName(), f); - try(QueryExecution qexec = ARQFactory.get().createQueryExecution(query, context.getDataset(), binding)) { - if(query.isAskType()) { - results.add(qexec.execAsk() ? JenaDatatypes.TRUE : JenaDatatypes.FALSE); - } - else { - ResultSet rs = qexec.execSelect(); - String varName = rs.getResultVars().get(0); - while(rs.hasNext()) { - RDFNode node = rs.next().get(varName); - if(node != null) { - results.add(node); - } - } - } - } - } - return WrappedIterator.create(results.iterator()); - } + private Query query; + + private String queryString; + + + protected AbstractSPARQLExpression(Resource expr, Query query, NodeExpression input, String queryString) { + super(expr, input); + this.query = query; + this.queryString = queryString; + } + + + @Override + public ExtendedIterator eval(RDFNode focusNode, NodeExpressionContext context) { + List focusNodes; + NodeExpression input = getInput(); + if (input != null) { + focusNodes = input.eval(focusNode, context).toList(); + } else { + focusNodes = Collections.singletonList(focusNode); + } + List results = new LinkedList<>(); + for (RDFNode f : focusNodes) { + QuerySolutionMap binding = new QuerySolutionMap(); + binding.add(SH.thisVar.getName(), f); + try (QueryExecution qexec = ARQFactory.get().createQueryExecution(query, context.getDataset(), binding)) { + if (query.isAskType()) { + results.add(qexec.execAsk() ? JenaDatatypes.TRUE : JenaDatatypes.FALSE); + } else { + ResultSet rs = qexec.execSelect(); + String varName = rs.getResultVars().get(0); + while (rs.hasNext()) { + RDFNode node = rs.next().get(varName); + if (node != null) { + results.add(node); + } + } + } + } + } + return WrappedIterator.create(results.iterator()); + } + + + @Override + public List getFunctionalSyntaxArguments() { + List results = new LinkedList<>(); + results.add(FmtUtils.stringForNode(NodeFactory.createLiteralString(queryString))); + NodeExpression input = getInput(); + if (input != null) { + results.add(input.getFunctionalSyntax()); + } + return results; + } + + + public Query getQuery() { + return query; + } - @Override - public List getFunctionalSyntaxArguments() { - List results = new LinkedList<>(); - results.add(FmtUtils.stringForNode(NodeFactory.createLiteral(queryString))); - NodeExpression input = getInput(); - if(input != null) { - results.add(input.getFunctionalSyntax()); - } - return results; - } - - - public Query getQuery() { - return query; - } - - - public String getQueryString() { - return queryString; - } + public String getQueryString() { + return queryString; + } } diff --git a/src/main/java/org/topbraid/shacl/expr/ComplexNodeExpression.java b/src/main/java/org/topbraid/shacl/expr/ComplexNodeExpression.java index 3074aab9..5c25219c 100644 --- a/src/main/java/org/topbraid/shacl/expr/ComplexNodeExpression.java +++ b/src/main/java/org/topbraid/shacl/expr/ComplexNodeExpression.java @@ -16,40 +16,40 @@ */ package org.topbraid.shacl.expr; +import org.apache.jena.rdf.model.RDFNode; + import java.util.Iterator; import java.util.List; -import org.apache.jena.rdf.model.RDFNode; - public abstract class ComplexNodeExpression extends AbstractNodeExpression { - - protected ComplexNodeExpression(RDFNode expr) { - super(expr); - } - - - @Override - public String getFunctionalSyntax() { - String str = getFunctionalSyntaxName(); - str += "("; - List args = getFunctionalSyntaxArguments(); - Iterator it = args.iterator(); - while(it.hasNext()) { - String next = it.next(); - str += next; - if(it.hasNext()) { - str += ", "; - } - } - str += ")"; - return str; - } - - - protected String getFunctionalSyntaxName() { - return getTypeId().toString(); - } - - - public abstract List getFunctionalSyntaxArguments(); + + protected ComplexNodeExpression(RDFNode expr) { + super(expr); + } + + + @Override + public String getFunctionalSyntax() { + String str = getFunctionalSyntaxName(); + str += "("; + List args = getFunctionalSyntaxArguments(); + Iterator it = args.iterator(); + while (it.hasNext()) { + String next = it.next(); + str += next; + if (it.hasNext()) { + str += ", "; + } + } + str += ")"; + return str; + } + + + protected String getFunctionalSyntaxName() { + return getTypeId(); + } + + + public abstract List getFunctionalSyntaxArguments(); } diff --git a/src/main/java/org/topbraid/shacl/expr/PathEvaluator.java b/src/main/java/org/topbraid/shacl/expr/PathEvaluator.java index 907ea4bb..f99622bd 100644 --- a/src/main/java/org/topbraid/shacl/expr/PathEvaluator.java +++ b/src/main/java/org/topbraid/shacl/expr/PathEvaluator.java @@ -16,17 +16,8 @@ */ package org.topbraid.shacl.expr; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - import org.apache.jena.graph.Node; -import org.apache.jena.rdf.model.Literal; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.Property; -import org.apache.jena.rdf.model.RDFNode; -import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.*; import org.apache.jena.sparql.path.P_Inverse; import org.apache.jena.sparql.path.P_Link; import org.apache.jena.sparql.path.Path; @@ -38,220 +29,218 @@ import org.topbraid.shacl.engine.ShapesGraph; import org.topbraid.shacl.expr.lib.DistinctExpression; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + /** * An object that computes the values of a sh:path node expression. * This implements consistent handling of inferred values. - * + *

* Inferences are limited to simple forward paths consisting of a single predicate. - * + * * @author Holger Knublauch */ public class PathEvaluator { - - private NodeExpression input; - - private boolean isInverse; - - private Path jenaPath; - - private Property predicate; - - - /** - * Constructs a PathEvaluator for a single "forward" property look-up. - * @param predicate the predicate - */ - public PathEvaluator(Property predicate) { - this.predicate = predicate; - } - - - /** - * Constructs a PathEvaluator for an arbitrary SPARQL path (except single forward properties). - * @param path the path - * @param shapesModel the shapes Model - */ - public PathEvaluator(Path path, Model shapesModel) { - this.jenaPath = path; - isInverse = jenaPath instanceof P_Inverse && ((P_Inverse)jenaPath).getSubPath() instanceof P_Link; - if(isInverse) { - P_Link link = (P_Link) ((P_Inverse)jenaPath).getSubPath(); - predicate = shapesModel.getProperty(link.getNode().getURI()); - } - } + + private NodeExpression input; + + private boolean isInverse; + + private Path jenaPath; + + private Property predicate; + + + /** + * Constructs a PathEvaluator for a single "forward" property look-up. + * + * @param predicate the predicate + */ + public PathEvaluator(Property predicate) { + this.predicate = predicate; + } + + + /** + * Constructs a PathEvaluator for an arbitrary SPARQL path (except single forward properties). + * + * @param path the path + * @param shapesModel the shapes Model + */ + public PathEvaluator(Path path, Model shapesModel) { + this.jenaPath = path; + isInverse = jenaPath instanceof P_Inverse && ((P_Inverse) jenaPath).getSubPath() instanceof P_Link; + if (isInverse) { + P_Link link = (P_Link) ((P_Inverse) jenaPath).getSubPath(); + predicate = shapesModel.getProperty(link.getNode().getURI()); + } + } + + + public ExtendedIterator eval(RDFNode focusNode, NodeExpressionContext context) { + if (input == null) { + ExtendedIterator asserted = evalFocusNode(focusNode, context); + return withDefaultValues(withInferences(asserted, focusNode, context), focusNode, context); + } else { + Iterator it = input.eval(focusNode, context); + if (it.hasNext()) { + RDFNode first = it.next(); + ExtendedIterator result = withDefaultValues(withInferences(evalFocusNode(first, context), first, context), first, context); + while (it.hasNext()) { + RDFNode n = it.next(); + result = result.andThen(withDefaultValues(withInferences(evalFocusNode(n, context), n, context), first, context)); + } + return result; + } else { + return WrappedIterator.emptyIterator(); + } + } + } + + + public ExtendedIterator evalReverse(RDFNode valueNode, NodeExpressionContext context) { + // See isReversible, this only supports trivial cases for now + if (isInverse) { + if (valueNode instanceof Literal) { + return WrappedIterator.emptyIterator(); + } else { + return context.getDataset().getDefaultModel().listObjectsOfProperty((Resource) valueNode, predicate); + } + } else { + return context.getDataset().getDefaultModel().listSubjectsWithProperty(predicate, valueNode).mapWith(r -> (RDFNode) r); + } + } + + + /** + * Gets the executed Jena Path or null if this is just a simple forward property. + * + * @return the executed Jena Path + */ + public Path getJenaPath() { + return jenaPath; + } + + + /** + * Gets the predicate if this is a simple forward property path. + * Returns null for inverse paths. + * + * @return the predicate or null + */ + public Property getPredicate() { + if (predicate != null && !isInverse) { + return predicate; + } else { + return null; + } + } + + + /** + * Checks if the values of this may be inferred. + * This is the case if this uses a single forward property path and there are any sh:values or sh:defaultValue statements on + * that predicate in the provided shapes graph. + * The actual computation on whether the values are inferred depends on the actual focus node, which is why this is + * only a "maybe". + * This function may be used to exclude optimizations that are possible if we know that no inferences can exist. + * + * @param shapesGraph the ShapesGraph (which caches previous results) + * @return true if there may be sh:values statements + */ + public boolean isMaybeInferred(ShapesGraph shapesGraph) { + if (predicate != null && !isInverse) { + return !shapesGraph.getValuesNodeExpressionsMap(predicate).isEmpty() || !shapesGraph.getDefaultValueNodeExpressionsMap(predicate).isEmpty(); + } else { + return false; + } + } + + + public boolean isReversible(ShapesGraph shapesGraph) { + // Very conservative algorithm for now + return input == null && !isMaybeInferred(shapesGraph) && jenaPath == null; + } + + + public void setInput(NodeExpression input) { + this.input = input; + } + + + private ExtendedIterator evalFocusNode(RDFNode focusNode, NodeExpressionContext context) { + if (jenaPath == null) { + if (focusNode.isLiteral()) { + return WrappedIterator.emptyIterator(); + } else { + return context.getDataset().getDefaultModel().listObjectsOfProperty((Resource) focusNode, predicate); + } + } else if (isInverse) { + return context.getDataset().getDefaultModel().listSubjectsWithProperty(predicate, focusNode).mapWith(r -> (RDFNode) r); + } else { + // This ought to do lazy evaluation too + List results = new LinkedList<>(); + SHACLPaths.addValueNodes(focusNode.inModel(context.getDataset().getDefaultModel()), jenaPath, results); + return WrappedIterator.create(results.iterator()); + } + } - public ExtendedIterator eval(RDFNode focusNode, NodeExpressionContext context) { - if(input == null) { - ExtendedIterator asserted = evalFocusNode(focusNode, context); - return withDefaultValues(withInferences(asserted, focusNode, context), focusNode, context); - } - else { - Iterator it = input.eval(focusNode, context); - if(it.hasNext()) { - RDFNode first = it.next(); - ExtendedIterator result = withDefaultValues(withInferences(evalFocusNode(first, context), first, context), first, context); - while(it.hasNext()) { - RDFNode n = it.next(); - result = result.andThen(withDefaultValues(withInferences(evalFocusNode(n, context), n, context), first, context)); - } - return result; - } - else { - return WrappedIterator.emptyIterator(); - } - } - } - - - public ExtendedIterator evalReverse(RDFNode valueNode, NodeExpressionContext context) { - // See isReversible, this only supports trivial cases for now - if(isInverse) { - if(valueNode instanceof Literal) { - return WrappedIterator.emptyIterator(); - } - else { - return context.getDataset().getDefaultModel().listObjectsOfProperty((Resource)valueNode, predicate); - } - } - else { - return context.getDataset().getDefaultModel().listSubjectsWithProperty(predicate, valueNode).mapWith(r -> (RDFNode)r); - } - } - - - /** - * Gets the executed Jena Path or null if this is just a simple forward property. - * @return - */ - public Path getJenaPath() { - return jenaPath; - } - - - /** - * Gets the predicate if this is a simple forward property path. - * Returns null for inverse paths. - * @return the predicate or null - */ - public Property getPredicate() { - if(predicate != null && !isInverse) { - return predicate; - } - else { - return null; - } - } - - - /** - * Checks if the values of this may be inferred. - * This is the case if this uses a single forward property path and there are any sh:values or sh:defaultValue statements on - * that predicate in the provided shapes graph. - * The actual computation on whether the values are inferred depends on the actual focus node, which is why this is - * only a "maybe". - * This function may be used to exclude optimizations that are possible if we know that no inferences can exist. - * @param shapesGraph the ShapesGraph (which caches previous results) - * @return true if there may be sh:values statements - */ - public boolean isMaybeInferred(ShapesGraph shapesGraph) { - if(predicate != null && !isInverse) { - return !shapesGraph.getValuesNodeExpressionsMap(predicate).isEmpty() || !shapesGraph.getDefaultValueNodeExpressionsMap(predicate).isEmpty(); - } - else { - return false; - } - } - - - public boolean isReversible(ShapesGraph shapesGraph) { - // Very conservative algorithm for now - return input == null && !isMaybeInferred(shapesGraph) && jenaPath == null; - } - - - public void setInput(NodeExpression input) { - this.input = input; - } + private ExtendedIterator withDefaultValues(ExtendedIterator base, RDFNode focusNode, NodeExpressionContext context) { + if (isInverse || predicate == null || base.hasNext()) { + return base; + } else { + Map map = context.getShapesGraph().getDefaultValueNodeExpressionsMap(predicate); + if (map.isEmpty()) { + return base; + } else { + ExtendedIterator result = WrappedIterator.emptyIterator(); + int count = 0; + for (Resource type : JenaUtil.getAllTypes((Resource) focusNode)) { + NodeExpression expr = map.get(type.asNode()); + if (expr != null) { + result = result.andThen(expr.eval(focusNode, context)); + count++; + } + } + if (count > 1) { + // Filter out duplicates in case multiple sh:defaultValue expressions exist + return DistinctExpression.distinct(result); + } else { + return result; + } + } + } + } - private ExtendedIterator evalFocusNode(RDFNode focusNode, NodeExpressionContext context) { - if(jenaPath == null) { - if(focusNode.isLiteral()) { - return WrappedIterator.emptyIterator(); - } - else { - return context.getDataset().getDefaultModel().listObjectsOfProperty((Resource)focusNode, predicate); - } - } - else if(isInverse) { - return context.getDataset().getDefaultModel().listSubjectsWithProperty(predicate, focusNode).mapWith(r -> (RDFNode)r); - } - else { - // This ought to do lazy evaluation too - List results = new LinkedList<>(); - SHACLPaths.addValueNodes(focusNode.inModel(context.getDataset().getDefaultModel()), jenaPath, results); - return WrappedIterator.create(results.iterator()); - } - } - - - private ExtendedIterator withDefaultValues(ExtendedIterator base, RDFNode focusNode, NodeExpressionContext context) { - if(isInverse || predicate == null || base.hasNext()) { - return base; - } - else { - Map map = context.getShapesGraph().getDefaultValueNodeExpressionsMap(predicate); - if(map.isEmpty()) { - return base; - } - else { - ExtendedIterator result = WrappedIterator.emptyIterator(); - int count = 0; - for(Resource type : JenaUtil.getAllTypes((Resource)focusNode)) { - NodeExpression expr = map.get(type.asNode()); - if(expr != null) { - result = result.andThen(expr.eval(focusNode, context)); - count++; - } - } - if(count > 1) { - // Filter out duplicates in case multiple sh:defaultValue expressions exist - return DistinctExpression.distinct(result); - } - else { - return result; - } - } - } - } - - - private ExtendedIterator withInferences(ExtendedIterator base, RDFNode focusNode, NodeExpressionContext context) { - if(predicate != null && !isInverse && focusNode.isResource()) { - Map map = context.getShapesGraph().getValuesNodeExpressionsMap(predicate); - if(!map.isEmpty()) { - ExtendedIterator result = base; - boolean hasInferences = false; - // TODO: support cases like metash:Resource (if it had no target): if the type has a sh:node then the value rules should be found - // even if declared in the super-shape - for(Resource type : JenaUtil.getAllTypes((Resource)focusNode)) { - NodeExpression expr = map.get(type.asNode()); - if(expr != null) { - result = result.andThen(expr.eval(focusNode, context)); - hasInferences = true; - } - } - if(!hasInferences && map.get(RDFS.Resource.asNode()) != null) { - // This is to support cases like generic schema even if no rdf:type is present or it doesn't reach rdfs:Resource in the hierarchy - NodeExpression expr = map.get(RDFS.Resource.asNode()); - result = result.andThen(expr.eval(focusNode, context)); - hasInferences = true; - } - // Filter out duplicates in case the graph contains materialized inferences and because sh:values may return lists - return DistinctExpression.distinct(result); - } - } - return base; - } + private ExtendedIterator withInferences(ExtendedIterator base, RDFNode focusNode, NodeExpressionContext context) { + if (predicate != null && !isInverse && focusNode.isResource()) { + Map map = context.getShapesGraph().getValuesNodeExpressionsMap(predicate); + if (!map.isEmpty()) { + ExtendedIterator result = base; + boolean hasInferences = false; + // TODO: support cases like metash:Resource (if it had no target): if the type has a sh:node then the value rules should be found + // even if declared in the super-shape + for (Resource type : JenaUtil.getAllTypes((Resource) focusNode)) { + NodeExpression expr = map.get(type.asNode()); + if (expr != null) { + result = result.andThen(expr.eval(focusNode, context)); + hasInferences = true; + } + } + if (!hasInferences && map.get(RDFS.Resource.asNode()) != null) { + // This is to support cases like generic schema even if no rdf:type is present or it doesn't reach rdfs:Resource in the hierarchy + NodeExpression expr = map.get(RDFS.Resource.asNode()); + result = result.andThen(expr.eval(focusNode, context)); + hasInferences = true; + } + // Filter out duplicates in case the graph contains materialized inferences and because sh:values may return lists + return DistinctExpression.distinct(result); + } + } + return base; + } } diff --git a/src/main/java/org/topbraid/shacl/optimize/ClassMetadata.java b/src/main/java/org/topbraid/shacl/optimize/ClassMetadata.java index e300b9c7..cea1d293 100644 --- a/src/main/java/org/topbraid/shacl/optimize/ClassMetadata.java +++ b/src/main/java/org/topbraid/shacl/optimize/ClassMetadata.java @@ -16,16 +16,6 @@ */ package org.topbraid.shacl.optimize; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.function.Predicate; - import org.apache.jena.graph.Graph; import org.apache.jena.graph.Node; import org.apache.jena.graph.Triple; @@ -35,289 +25,292 @@ import org.topbraid.jenax.util.JenaNodeUtil; import org.topbraid.shacl.vocabulary.SH; +import java.util.*; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Predicate; + /** * Represents some ontology-related data about a given class, so that it can be accessed * more efficiently than through the RDF triples. - * + *

* In particular this includes information about the properties attached to the class, * as well as utilities to walk up the superclass hierarchy. - * + * * @author Holger Knublauch */ public class ClassMetadata { - - public static Object createKey(Node classNode, String graphKey) { - return new Key(classNode, graphKey); - } - - - private Node classNode; - - private String graphKey; - - private Map> groupPaths; - - private Map properties = new HashMap<>(); - - private List superClasses; - - - public ClassMetadata(Node classNode, String graphKey) { - this.classNode = classNode; - this.graphKey = graphKey; - } - - - public synchronized Set getGroupPaths(Node group, Graph graph) { - if(groupPaths == null) { - groupPaths = new HashMap<>(); - if(JenaNodeUtil.isInstanceOf(classNode, SH.Shape.asNode(), graph)) { - addGroupProperties(classNode, graph, SH.parameter.asNode()); - addGroupProperties(classNode, graph, SH.property.asNode()); - } - ExtendedIterator it = graph.find(null, SH.targetClass.asNode(), classNode); - while(it.hasNext()) { - Node shape = it.next().getSubject(); - addGroupProperties(shape, graph, SH.parameter.asNode()); - addGroupProperties(shape, graph, SH.property.asNode()); - } - } - return groupPaths.get(group); - } - - - private void addGroupProperties(Node nodeShape, Graph graph, Node systemPredicate) { - ExtendedIterator it = graph.find(nodeShape, systemPredicate, Node.ANY); - while(it.hasNext()) { - Node propertyShape = it.next().getObject(); - if(!graph.contains(propertyShape, SH.deactivated.asNode(), JenaDatatypes.TRUE.asNode())) { - Node group = JenaNodeUtil.getObject(propertyShape, SH.group.asNode(), graph); - if(group != null) { - Node path = JenaNodeUtil.getObject(propertyShape, SH.path.asNode(), graph); - if(path != null) { - Set paths = groupPaths.get(group); - if(paths == null) { - paths = new HashSet<>(); - groupPaths.put(group, paths); - } - if(path.isURI()) { - paths.add(new PathMetadata(path, false)); - } - else { - Node inverse = JenaNodeUtil.getObject(path, SH.inversePath.asNode(), graph); - if(inverse != null && inverse.isURI()) { - paths.add(new PathMetadata(inverse, true)); - } - } - } - } - } - } - } - - - public Node getPropertyDescription(Node property, boolean inverse, Graph graph) { - return nearest(graph, new Function() { - @Override - public Node apply(ClassMetadata cm) { - return cm.getProperty(property, inverse, graph).getDescription(); - } - }, null); - } - - - public Node getPropertyEditWidget(Node property, boolean inverse, Graph graph) { - return nearest(graph, new Function() { - @Override - public Node apply(ClassMetadata cm) { - return cm.getProperty(property, inverse, graph).getEditWidget(); - } - }, null); - } - - - public Node getPropertyLocalRange(Node property, boolean inverse, Graph graph) { - return nearest(graph, new Function() { - @Override - public Node apply(ClassMetadata cm) { - return cm.getProperty(property, inverse, graph).getLocalRange(); - } - }, null); - } - - - public Integer getPropertyMaxCount(Node property, boolean inverse, Graph graph) { - return (Integer) nearestObject(graph, new Function() { - @Override - public Object apply(ClassMetadata cm) { - return cm.getProperty(property, inverse, graph).getMaxCount(); - } - }, new HashSet()); - } - - - public Node getPropertyName(Node property, boolean inverse, Graph graph) { - return nearest(graph, new Function() { - @Override - public Node apply(ClassMetadata cm) { - return cm.getProperty(property, inverse, graph).getName(); - } - }, null); - } - - - public Node getPropertyViewWidget(Node property, boolean inverse, Graph graph) { - return nearest(graph, new Function() { - @Override - public Node apply(ClassMetadata cm) { - return cm.getProperty(property, inverse, graph).getViewWidget(); - } - }, null); - } - - - public synchronized Iterable getSuperClasses(Graph graph) { - if(superClasses == null) { - superClasses = new LinkedList<>(); - ExtendedIterator it = graph.find(classNode, RDFS.subClassOf.asNode(), Node.ANY); - while(it.hasNext()) { - Node superClass = it.next().getObject(); - superClasses.add(OntologyOptimizations.get().getClassMetadata(superClass, graph, graphKey)); - } - } - return superClasses; - } - - - public synchronized ClassPropertyMetadata getProperty(Node predicate, boolean inverse, Graph graph) { - PathMetadata pm = new PathMetadata(predicate, inverse); - return getProperty(pm, graph); - } - - - public synchronized ClassPropertyMetadata getProperty(PathMetadata pm, Graph graph) { - ClassPropertyMetadata result = properties.get(pm); - if(result == null) { - result = new ClassPropertyMetadata(classNode, pm.getPredicate(), pm.isInverse(), graph); - properties.put(pm, result); - } - return result; - } - - - /** - * Walks this and its superclasses until it finds one where the given Supplier returns a value. - * @param property - * @param graph - * @param supplier - * @return the nearest supplied value - */ - private Node nearest(Graph graph, Function supplier, Set visited) { - Node result = supplier.apply(this); - if(result != null) { - return result; - } - if(visited == null) { - visited = new HashSet<>(); - } - visited.add(classNode); - for(ClassMetadata superClass : getSuperClasses(graph)) { - if(!visited.contains(superClass.classNode)) { - result = superClass.nearest(graph, supplier, visited); - if(result != null) { - return result; - } - } - } - return null; - } - - - private Object nearestObject(Graph graph, Function supplier, Set visited) { - if(!visited.contains(classNode)) { - Object result = supplier.apply(this); - if(result != null) { - return result; - } - visited.add(classNode); - for(ClassMetadata superClass : getSuperClasses(graph)) { - result = superClass.nearestObject(graph, supplier, visited); - if(result != null) { - return result; - } - } - } - return null; - } - - - public void walkSuperClasses(Graph graph, Consumer consumer, Set visited) { - if(!visited.contains(classNode)) { - consumer.accept(this); - visited.add(classNode); - for(ClassMetadata superClassMetadata : getSuperClasses(graph)) { - superClassMetadata.walkSuperClasses(graph, consumer, visited); - } - } - } - - - public boolean walkSuperClassesUntil(Graph graph, Predicate predicate, Set visited) { - if(!visited.contains(classNode)) { - if(predicate.test(this)) { - return true; - } - else { - visited.add(classNode); - for(ClassMetadata superClassMetadata : getSuperClasses(graph)) { - if(superClassMetadata.walkSuperClassesUntil(graph, predicate, visited)) { - return true; - } - } - } - } - return false; - } - - - @Override + + public static Object createKey(Node classNode, String graphKey) { + return new Key(classNode, graphKey); + } + + + private Node classNode; + + private String graphKey; + + private Map> groupPaths; + + private Map properties = new HashMap<>(); + + private List superClasses; + + + public ClassMetadata(Node classNode, String graphKey) { + this.classNode = classNode; + this.graphKey = graphKey; + } + + + public synchronized Set getGroupPaths(Node group, Graph graph) { + if (groupPaths == null) { + groupPaths = new HashMap<>(); + if (JenaNodeUtil.isInstanceOf(classNode, SH.Shape.asNode(), graph)) { + addGroupProperties(classNode, graph, SH.parameter.asNode()); + addGroupProperties(classNode, graph, SH.property.asNode()); + } + ExtendedIterator it = graph.find(null, SH.targetClass.asNode(), classNode); + while (it.hasNext()) { + Node shape = it.next().getSubject(); + addGroupProperties(shape, graph, SH.parameter.asNode()); + addGroupProperties(shape, graph, SH.property.asNode()); + } + } + return groupPaths.get(group); + } + + + private void addGroupProperties(Node nodeShape, Graph graph, Node systemPredicate) { + ExtendedIterator it = graph.find(nodeShape, systemPredicate, Node.ANY); + while (it.hasNext()) { + Node propertyShape = it.next().getObject(); + if (!graph.contains(propertyShape, SH.deactivated.asNode(), JenaDatatypes.TRUE.asNode())) { + Node group = JenaNodeUtil.getObject(propertyShape, SH.group.asNode(), graph); + if (group != null) { + Node path = JenaNodeUtil.getObject(propertyShape, SH.path.asNode(), graph); + if (path != null) { + Set paths = groupPaths.get(group); + if (paths == null) { + paths = new HashSet<>(); + groupPaths.put(group, paths); + } + if (path.isURI()) { + paths.add(new PathMetadata(path, false)); + } else { + Node inverse = JenaNodeUtil.getObject(path, SH.inversePath.asNode(), graph); + if (inverse != null && inverse.isURI()) { + paths.add(new PathMetadata(inverse, true)); + } + } + } + } + } + } + } + + + public Node getPropertyDescription(Node property, boolean inverse, Graph graph) { + return nearest(graph, new Function() { + @Override + public Node apply(ClassMetadata cm) { + return cm.getProperty(property, inverse, graph).getDescription(); + } + }, null); + } + + + public Node getPropertyEditWidget(Node property, boolean inverse, Graph graph) { + return nearest(graph, new Function() { + @Override + public Node apply(ClassMetadata cm) { + return cm.getProperty(property, inverse, graph).getEditWidget(); + } + }, null); + } + + + public Node getPropertyLocalRange(Node property, boolean inverse, Graph graph) { + return nearest(graph, new Function() { + @Override + public Node apply(ClassMetadata cm) { + return cm.getProperty(property, inverse, graph).getLocalRange(); + } + }, null); + } + + + public Integer getPropertyMaxCount(Node property, boolean inverse, Graph graph) { + return (Integer) nearestObject(graph, new Function() { + @Override + public Object apply(ClassMetadata cm) { + return cm.getProperty(property, inverse, graph).getMaxCount(); + } + }, new HashSet()); + } + + + public Node getPropertyName(Node property, boolean inverse, Graph graph) { + return nearest(graph, new Function() { + @Override + public Node apply(ClassMetadata cm) { + return cm.getProperty(property, inverse, graph).getName(); + } + }, null); + } + + + public Node getPropertyViewWidget(Node property, boolean inverse, Graph graph) { + return nearest(graph, new Function() { + @Override + public Node apply(ClassMetadata cm) { + return cm.getProperty(property, inverse, graph).getViewWidget(); + } + }, null); + } + + + public synchronized Iterable getSuperClasses(Graph graph) { + if (superClasses == null) { + superClasses = new LinkedList<>(); + ExtendedIterator it = graph.find(classNode, RDFS.subClassOf.asNode(), Node.ANY); + while (it.hasNext()) { + Node superClass = it.next().getObject(); + superClasses.add(OntologyOptimizations.get().getClassMetadata(superClass, graph, graphKey)); + } + } + return superClasses; + } + + + public synchronized ClassPropertyMetadata getProperty(Node predicate, boolean inverse, Graph graph) { + PathMetadata pm = new PathMetadata(predicate, inverse); + return getProperty(pm, graph); + } + + + public synchronized ClassPropertyMetadata getProperty(PathMetadata pm, Graph graph) { + ClassPropertyMetadata result = properties.get(pm); + if (result == null) { + result = new ClassPropertyMetadata(classNode, pm.getPredicate(), pm.isInverse(), graph); + properties.put(pm, result); + } + return result; + } + + + /** + * Walks this and its superclasses until it finds one where the given Supplier returns a value. + * + * @param graph + * @param graph + * @param visited + * @return the nearest supplied value + */ + private Node nearest(Graph graph, Function supplier, Set visited) { + Node result = supplier.apply(this); + if (result != null) { + return result; + } + if (visited == null) { + visited = new HashSet<>(); + } + visited.add(classNode); + for (ClassMetadata superClass : getSuperClasses(graph)) { + if (!visited.contains(superClass.classNode)) { + result = superClass.nearest(graph, supplier, visited); + if (result != null) { + return result; + } + } + } + return null; + } + + + private Object nearestObject(Graph graph, Function supplier, Set visited) { + if (!visited.contains(classNode)) { + Object result = supplier.apply(this); + if (result != null) { + return result; + } + visited.add(classNode); + for (ClassMetadata superClass : getSuperClasses(graph)) { + result = superClass.nearestObject(graph, supplier, visited); + if (result != null) { + return result; + } + } + } + return null; + } + + + public void walkSuperClasses(Graph graph, Consumer consumer, Set visited) { + if (!visited.contains(classNode)) { + consumer.accept(this); + visited.add(classNode); + for (ClassMetadata superClassMetadata : getSuperClasses(graph)) { + superClassMetadata.walkSuperClasses(graph, consumer, visited); + } + } + } + + + public boolean walkSuperClassesUntil(Graph graph, Predicate predicate, Set visited) { + if (!visited.contains(classNode)) { + if (predicate.test(this)) { + return true; + } else { + visited.add(classNode); + for (ClassMetadata superClassMetadata : getSuperClasses(graph)) { + if (superClassMetadata.walkSuperClassesUntil(graph, predicate, visited)) { + return true; + } + } + } + } + return false; + } + + + @Override public String toString() { - return "ClassMetadata for " + classNode + " with " + properties.size() + " properties"; - } - - - private static class Key { - - private Node classNode; - - private String graphKey; - - - Key(Node classNode, String graphKey) { - this.classNode = classNode; - this.graphKey = graphKey; - } - - - @Override - public boolean equals(Object obj) { - if(obj instanceof Key) { - return classNode.equals(((Key)obj).classNode) && graphKey.equals(((Key)obj).graphKey); - } - else { - return false; - } - } - - @Override - public int hashCode() { - return classNode.hashCode() + graphKey.hashCode(); - } - - - @Override - public String toString() { - return graphKey + ".classMetadata." + classNode; - } - } + return "ClassMetadata for " + classNode + " with " + properties.size() + " properties"; + } + + + private static class Key { + + private Node classNode; + + private String graphKey; + + + Key(Node classNode, String graphKey) { + this.classNode = classNode; + this.graphKey = graphKey; + } + + + @Override + public boolean equals(Object obj) { + if (obj instanceof Key) { + return classNode.equals(((Key) obj).classNode) && graphKey.equals(((Key) obj).graphKey); + } else { + return false; + } + } + + @Override + public int hashCode() { + return classNode.hashCode() + graphKey.hashCode(); + } + + + @Override + public String toString() { + return graphKey + ".classMetadata." + classNode; + } + } } diff --git a/src/main/java/org/topbraid/shacl/optimize/ClassPropertyMetadata.java b/src/main/java/org/topbraid/shacl/optimize/ClassPropertyMetadata.java index d412c69a..c6301d86 100644 --- a/src/main/java/org/topbraid/shacl/optimize/ClassPropertyMetadata.java +++ b/src/main/java/org/topbraid/shacl/optimize/ClassPropertyMetadata.java @@ -16,9 +16,6 @@ */ package org.topbraid.shacl.optimize; -import java.util.LinkedList; -import java.util.List; - import org.apache.jena.graph.Graph; import org.apache.jena.graph.Node; import org.apache.jena.graph.Triple; @@ -29,202 +26,201 @@ import org.topbraid.shacl.vocabulary.SH; import org.topbraid.shacl.vocabulary.TOSH; +import java.util.LinkedList; +import java.util.List; + /** * Metadata about a property at a given class, possibly in the inverse direction. * Populated from SHACL constraints and plugins (currently including OWL restrictions * and - within the TopBraid ecosystem - SPIN constraints). - * + * * @author Holger Knublauch */ public class ClassPropertyMetadata { - - private Node description; - - private Node editWidget; - - private boolean inverse; - - private Node localRange; - - private Integer maxCount; - - private Node name; - - private Node order; - - private Node predicate; - - private Node viewWidget; - - - ClassPropertyMetadata(Node classNode, Node predicate, boolean inverse, Graph graph) { - - this.inverse = inverse; - this.predicate = predicate; - - // Init from SHACL shapes - if(SHACLUtil.exists(graph)) { - if(JenaNodeUtil.isInstanceOf(classNode, SH.Shape.asNode(), graph)) { - initFromShape(classNode, graph); - } - ExtendedIterator it = graph.find(null, SH.targetClass.asNode(), classNode); - while(it.hasNext()) { - Node shape = it.next().getSubject(); - initFromShape(shape, graph); - } - } - - if(!inverse) { - for(Plugin plugin : plugins) { - plugin.init(this, classNode, graph); - } - } - } - - - public Node getDescription() { - return description; - } - - - public Node getEditWidget() { - return editWidget; - } - - - // Currently not supported for inverse properties (not used yet) - public Node getLocalRange() { - return localRange; - } - - - public Integer getMaxCount() { - return maxCount; - } - - - public Node getName() { - return name; - } - - - public Node getOrder() { - return order; - } - - - public Node getPredicate() { - return predicate; - } - - - public Node getViewWidget() { - return viewWidget; - } - - - private void initFromShape(Node shape, Graph graph) { - if(!graph.contains(shape, SH.deactivated.asNode(), JenaDatatypes.TRUE.asNode())) { - initFromShape(shape, SH.property.asNode(), graph); - initFromShape(shape, SH.parameter.asNode(), graph); - } - } - - - private void initFromShape(Node shape, Node systemPredicate, Graph graph) { - ExtendedIterator it = graph.find(shape, systemPredicate, Node.ANY); - while(it.hasNext()) { - Node propertyShape = it.next().getObject(); - if(!propertyShape.isLiteral()) { - if(hasMatchingPath(propertyShape, graph)) { - if(!graph.contains(propertyShape, SH.deactivated.asNode(), JenaDatatypes.TRUE.asNode())) { - if(description == null) { - description = JenaNodeUtil.getObject(propertyShape, SH.description.asNode(), graph); - } - if(localRange == null) { - if(inverse) { - // Maybe: support inverse ranges - } - else { - localRange = SHACLUtil.walkPropertyShapesHelper(propertyShape, graph); - } - } - if(maxCount == null) { - Node maxCountNode = JenaNodeUtil.getObject(propertyShape, SH.maxCount.asNode(), graph); - if(maxCountNode != null && maxCountNode.isLiteral()) { - Object value = maxCountNode.getLiteralValue(); - if(value instanceof Number) { - maxCount = ((Number) value).intValue(); - } - } - } - if(name == null) { - name = JenaNodeUtil.getObject(propertyShape, SH.name.asNode(), graph); - } - if(order == null) { - order = JenaNodeUtil.getObject(propertyShape, SH.order.asNode(), graph); - } - if(viewWidget == null) { - viewWidget = JenaNodeUtil.getObject(propertyShape, TOSH.viewWidget.asNode(), graph); - } - } - } - } - } - } - - - public boolean hasMatchingPath(Node propertyShape, Graph graph) { - if(inverse) { - Node path = JenaNodeUtil.getObject(propertyShape, SH.path.asNode(), graph); - if(path != null && path.isBlank()) { - return predicate.equals(JenaNodeUtil.getObject(path, SH.inversePath.asNode(), graph)); - } - else { - return false; - } - } - else { - return graph.contains(propertyShape, SH.path.asNode(), predicate); - } - } - - - public boolean isInverse() { - return inverse; - } - - - public void setLocalRange(Node value) { - this.localRange = value; - } - - - public void setMaxCount(int value) { - this.maxCount = value; - } - - - @Override - public String toString() { - return "ClassPropertyMetadata for " + (inverse ? "^" : "") + predicate; - } - - - // Abstraction layer for OWL and SPIN - - private static List plugins = new LinkedList<>(); - - public static void register(Plugin plugin) { - plugins.add(plugin); - } - - static { - register(new OWLClassPropertyMetadataPlugin()); - } - - public static interface Plugin { - - void init(ClassPropertyMetadata cpm, Node classNode, Graph graph); - } + + private Node description; + + private Node editWidget; + + private boolean inverse; + + private Node localRange; + + private Integer maxCount; + + private Node name; + + private Node order; + + private Node predicate; + + private Node viewWidget; + + + ClassPropertyMetadata(Node classNode, Node predicate, boolean inverse, Graph graph) { + + this.inverse = inverse; + this.predicate = predicate; + + // Init from SHACL shapes + if (SHACLUtil.exists(graph)) { + if (JenaNodeUtil.isInstanceOf(classNode, SH.Shape.asNode(), graph)) { + initFromShape(classNode, graph); + } + ExtendedIterator it = graph.find(null, SH.targetClass.asNode(), classNode); + while (it.hasNext()) { + Node shape = it.next().getSubject(); + initFromShape(shape, graph); + } + } + + if (!inverse) { + for (Plugin plugin : plugins) { + plugin.init(this, classNode, graph); + } + } + } + + + public Node getDescription() { + return description; + } + + + public Node getEditWidget() { + return editWidget; + } + + + // Currently not supported for inverse properties (not used yet) + public Node getLocalRange() { + return localRange; + } + + + public Integer getMaxCount() { + return maxCount; + } + + + public Node getName() { + return name; + } + + + public Node getOrder() { + return order; + } + + + public Node getPredicate() { + return predicate; + } + + + public Node getViewWidget() { + return viewWidget; + } + + + private void initFromShape(Node shape, Graph graph) { + if (!graph.contains(shape, SH.deactivated.asNode(), JenaDatatypes.TRUE.asNode())) { + initFromShape(shape, SH.property.asNode(), graph); + initFromShape(shape, SH.parameter.asNode(), graph); + } + } + + + private void initFromShape(Node shape, Node systemPredicate, Graph graph) { + ExtendedIterator it = graph.find(shape, systemPredicate, Node.ANY); + while (it.hasNext()) { + Node propertyShape = it.next().getObject(); + if (!propertyShape.isLiteral()) { + if (hasMatchingPath(propertyShape, graph)) { + if (!graph.contains(propertyShape, SH.deactivated.asNode(), JenaDatatypes.TRUE.asNode())) { + if (description == null) { + description = JenaNodeUtil.getObject(propertyShape, SH.description.asNode(), graph); + } + if (localRange == null) { + if (inverse) { + // Maybe: support inverse ranges + } else { + localRange = SHACLUtil.walkPropertyShapesHelper(propertyShape, graph); + } + } + if (maxCount == null) { + Node maxCountNode = JenaNodeUtil.getObject(propertyShape, SH.maxCount.asNode(), graph); + if (maxCountNode != null && maxCountNode.isLiteral()) { + Object value = maxCountNode.getLiteralValue(); + if (value instanceof Number) { + maxCount = ((Number) value).intValue(); + } + } + } + if (name == null) { + name = JenaNodeUtil.getObject(propertyShape, SH.name.asNode(), graph); + } + if (order == null) { + order = JenaNodeUtil.getObject(propertyShape, SH.order.asNode(), graph); + } + if (viewWidget == null) { + viewWidget = JenaNodeUtil.getObject(propertyShape, TOSH.viewWidget.asNode(), graph); + } + } + } + } + } + } + + + public boolean hasMatchingPath(Node propertyShape, Graph graph) { + if (inverse) { + Node path = JenaNodeUtil.getObject(propertyShape, SH.path.asNode(), graph); + if (path != null && path.isBlank()) { + return predicate.equals(JenaNodeUtil.getObject(path, SH.inversePath.asNode(), graph)); + } else { + return false; + } + } else { + return graph.contains(propertyShape, SH.path.asNode(), predicate); + } + } + + + public boolean isInverse() { + return inverse; + } + + + public void setLocalRange(Node value) { + this.localRange = value; + } + + + public void setMaxCount(int value) { + this.maxCount = value; + } + + + @Override + public String toString() { + return "ClassPropertyMetadata for " + (inverse ? "^" : "") + predicate; + } + + + // Abstraction layer for OWL and SPIN + + private static List plugins = new LinkedList<>(); + + public static void register(Plugin plugin) { + plugins.add(plugin); + } + + static { + register(new OWLClassPropertyMetadataPlugin()); + } + + public interface Plugin { + void init(ClassPropertyMetadata cpm, Node classNode, Graph graph); + } } diff --git a/src/main/java/org/topbraid/shacl/optimize/OntologyOptimizations.java b/src/main/java/org/topbraid/shacl/optimize/OntologyOptimizations.java index a1a92e00..8a22c5a9 100644 --- a/src/main/java/org/topbraid/shacl/optimize/OntologyOptimizations.java +++ b/src/main/java/org/topbraid/shacl/optimize/OntologyOptimizations.java @@ -16,23 +16,22 @@ */ package org.topbraid.shacl.optimize; -import com.github.jsonldjava.shaded.com.google.common.cache.Cache; -import com.github.jsonldjava.shaded.com.google.common.cache.CacheBuilder; +import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.Cache; + import java.util.ArrayList; import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; import java.util.function.Function; import org.apache.jena.graph.Graph; import org.apache.jena.graph.Node; import org.apache.jena.rdf.model.Model; import org.topbraid.jenax.util.ARQFactory; -import org.topbraid.jenax.util.ExceptionUtil; import org.topbraid.jenax.util.JenaUtil; import org.topbraid.shacl.engine.ShapesGraph; import org.topbraid.shacl.engine.ShapesGraphFactory; + /** * A singleton managing Ontology-based optimizations, to be used (for example) with OptimizedMultiUnions. * The contract is that such optimization Objects need to register themselves so that they can @@ -79,19 +78,25 @@ public String getKeyIfEnabledFor(Graph graph) { } private static final int capacity = 10000; - - private Cache cache = CacheBuilder. - newBuilder(). - maximumSize(capacity). - build(); - - + + private Cache cache = Caffeine + .newBuilder() + .maximumSize(capacity) + .build(); + + public ClassMetadata getClassMetadata(Node cls, Graph graph, String graphKey) { Object cacheKey = ClassMetadata.createKey(cls, graphKey); - return (ClassMetadata) getOrComputeObject(cacheKey, () -> new ClassMetadata(cls, graphKey)); + + ClassMetadata cachedMetadata = (ClassMetadata) getOrComputeObject(cacheKey, (key) -> { + return new ClassMetadata((Node) key, graphKey); + }); + + return cachedMetadata; } - - + + + public Object getObject(Object key) { return cache.getIfPresent(key); } @@ -99,17 +104,7 @@ public Object getObject(Object key) { // Legacy version with Function parameter public Object getOrComputeObject(Object key, Function function) { - return getOrComputeObject(key, () -> function.apply(key)); - } - - - public Object getOrComputeObject(Object key, Callable callable) { - try { - return cache.get(key, callable); - } catch (ExecutionException ex) { - log.error("Failed to populate OntologyOptimizations with key " + key, ex); - throw ExceptionUtil.throwUnchecked(ex); - } + return cache.get(key, function); } public ShapesGraph getCachableShapesGraph(String uri) { diff --git a/src/main/java/org/topbraid/shacl/rules/RuleEngine.java b/src/main/java/org/topbraid/shacl/rules/RuleEngine.java index 32d57197..b927dcc7 100644 --- a/src/main/java/org/topbraid/shacl/rules/RuleEngine.java +++ b/src/main/java/org/topbraid/shacl/rules/RuleEngine.java @@ -16,25 +16,10 @@ */ package org.topbraid.shacl.rules; -import java.net.URI; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; - import org.apache.jena.graph.Node; import org.apache.jena.graph.Triple; import org.apache.jena.query.Dataset; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.Property; -import org.apache.jena.rdf.model.RDFNode; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.Statement; +import org.apache.jena.rdf.model.*; import org.apache.jena.util.iterator.ExtendedIterator; import org.apache.jena.vocabulary.RDF; import org.topbraid.jenax.progress.ProgressMonitor; @@ -54,307 +39,308 @@ import org.topbraid.shacl.vocabulary.DASH; import org.topbraid.shacl.vocabulary.SH; +import java.net.URI; +import java.util.*; + /** * A SHACL Rules engine with a pluggable architecture for different execution languages * including Triple rules, SPARQL rules and JavaScript rules. - * + *

* In preparation for inclusion into SHACL 1.1, this engine also supports sh:values rules, - * see https://www.topquadrant.com/graphql/values.html and treats sh:defaultValues as inferences. - * + * see Values and treats sh:defaultValues as inferences. + * * @author Holger Knublauch */ public class RuleEngine extends AbstractEngine { - - // true to skip sh:values rules from property shapes marked with dash:neverMaterialize true - private boolean excludeNeverMaterialize; - - // true to skip all sh:values rules - private boolean excludeValues; - - private Model inferences; - - private Set pending = new HashSet<>(); - - private Map> rule2Conditions = new HashMap<>(); - - private Map> shape2Rules = new HashMap<>(); - - - public RuleEngine(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, Model inferences) { - super(dataset, shapesGraph, shapesGraphURI); - this.inferences = inferences; - } - - - public void executeAll() throws InterruptedException { - List ruleShapes = new ArrayList<>(); - for(Shape shape : shapesGraph.getRootShapes()) { - if(shape.getShapeResource().hasProperty(SH.rule)) { - ruleShapes.add(shape); - } - else { - for(Resource ps : JenaUtil.getResourceProperties(shape.getShapeResource(), SH.property)) { - if(ps.hasProperty(SH.values)) { - ruleShapes.add(shape); - break; - } - } - } - } - executeShapes(ruleShapes, null); - } - - - public void executeAllDefaultValues() throws InterruptedException { - // Add sh:defaultValues where applicable - Model shapesModel = this.getShapesModel(); - Set defaultValuePredicates = new HashSet<>(); - shapesModel.listSubjectsWithProperty(SH.defaultValue).forEachRemaining(ps -> { - Resource path = ps.getPropertyResourceValue(SH.path); - if(path != null && path.isURIResource() && !ps.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { - defaultValuePredicates.add(JenaUtil.asProperty(path)); - } - }); - for(Property predicate : defaultValuePredicates) { - Map map = shapesGraph.getDefaultValueNodeExpressionsMap(predicate); - for(Node shapeNode : map.keySet()) { - Shape shape = shapesGraph.getShape(shapeNode); - if(shape != null) { - NodeExpression expr = map.get(shapeNode); - List targetNodes = new ArrayList<>(shape.getTargetNodes(getDataset())); - for(RDFNode targetNode : targetNodes) { - if(targetNode.isResource() && !targetNode.asResource().hasProperty(predicate)) { - ExtendedIterator it = expr.eval(targetNode, this); - if(it.hasNext()) { - List list = it.toList(); - for(RDFNode value : list) { - inferences.add(targetNode.asResource(), predicate, value); - } - } - } - } - } - } - } - } - - - /** - * Executes the rules attached to a given list of shapes, either for a dedicated - * focus node or all target nodes of the shapes. - * @param ruleShapes the shapes to execute - * @param focusNode the (optional) focus node or null for all target nodes - * @throws InterruptedException if the monitor has canceled this - */ - public void executeShapes(List ruleShapes, RDFNode focusNode) throws InterruptedException { - - if(ruleShapes.isEmpty()) { - return; - } - - Collections.sort(ruleShapes, new Comparator() { - @Override - public int compare(Shape shape1, Shape shape2) { - return shape1.getOrder().compareTo(shape2.getOrder()); - } - }); - - String baseMessage = null; - if(monitor != null) { - int rules = 0; - for(Shape shape : ruleShapes) { - rules += getShapeRules(shape).size(); - } - baseMessage = "Executing " + rules + " SHACL rules from " + ruleShapes.size() + " shapes"; - monitor.beginTask(baseMessage, rules); - } - - Double oldOrder = ruleShapes.get(0).getOrder(); - for(Shape shape : ruleShapes) { - if(!oldOrder.equals(shape.getOrder())) { - oldOrder = shape.getOrder(); - flushPending(); - } - executeShape(shape, baseMessage, focusNode); - } - flushPending(); - } - - - public void executeShape(Shape shape, String baseMessage, RDFNode focusNode) throws InterruptedException { - - if(shape.isDeactivated()) { - return; - } - - List rules = getShapeRules(shape); - if(rules.isEmpty()) { - return; - } - - List targetNodes; - if(focusNode != null) { - targetNodes = Collections.singletonList(focusNode); - } - else { - targetNodes = new ArrayList<>(shape.getTargetNodes(dataset)); - } - - if(!targetNodes.isEmpty()) { - Number oldOrder = rules.get(0).getOrder(); - for(Rule rule : rules) { - if(monitor != null) { - if(monitor.isCanceled()) { - throw new InterruptedException(); - } - monitor.setTaskName(baseMessage + " (at " + RDFLabels.get().getLabel(shape.getShapeResource()) + " with " + targetNodes.size() + " target nodes)"); - monitor.subTask(rule.toString().replace("\n", " ")); - } - if(!oldOrder.equals(rule.getOrder())) { - oldOrder = rule.getOrder(); - // If new rdf:type triples have been inferred, recompute the target nodes (this is brute-force for now) - boolean recomputeTarget = focusNode == null && pending.stream().anyMatch(triple -> RDF.type.asNode().equals(triple.getPredicate())); - flushPending(); - if(recomputeTarget) { - targetNodes = new ArrayList<>(shape.getTargetNodes(dataset)); - } - } - List conditions = rule2Conditions.get(rule); - if(conditions != null && !conditions.isEmpty()) { - List filtered = new LinkedList<>(); - for(RDFNode targetNode : targetNodes) { - if(nodeConformsToAllShapes(targetNode, conditions)) { - filtered.add(targetNode); - } - } - executeRule(rule, filtered, shape); - } - else { - executeRule(rule, targetNodes, shape); - } - if(monitor != null) { - monitor.worked(1); - } - } - } - } - - - private void executeRule(Rule rule, List focusNodes, Shape shape) { - JenaUtil.setGraphReadOptimization(true); - try { - if(ExecStatisticsManager.get().isRecording()) { - long startTime = System.currentTimeMillis(); - rule.execute(this, focusNodes, shape); - long endTime = System.currentTimeMillis(); - long duration = (endTime - startTime); - String queryText = rule.toString(); - ExecStatisticsManager.get().add(Collections.singletonList( - new ExecStatistics(queryText, queryText, duration, startTime, rule.getContextNode()))); - } - else { - rule.execute(this, focusNodes, shape); - } - } - finally { - JenaUtil.setGraphReadOptimization(false); - } - } - - - private void flushPending() { - for(Triple triple : pending) { - inferences.add(inferences.asStatement(triple)); - } - pending.clear(); - } - - - private List getShapeRules(Shape shape) { - return shape2Rules.computeIfAbsent(shape, s2 -> { - List rules = new LinkedList<>(); - List raws = new LinkedList<>(); - for(Statement s : shape.getShapeResource().listProperties(SH.rule).toList()) { - if(s.getObject().isResource() && !s.getResource().hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { - raws.add(s.getResource()); - } - } - Collections.sort(raws, OrderComparator.get()); - for(Resource raw : raws) { - RuleLanguage ruleLanguage = RuleLanguages.get().getRuleLanguage(raw); - if(ruleLanguage == null) { - throw new IllegalArgumentException("Unsupported SHACL rule type for " + raw); - } - Rule rule = ruleLanguage.createRule(raw); - rules.add(rule); - List conditions = JenaUtil.getResourceProperties(raw, SH.condition); - rule2Conditions.put(rule, conditions); - } - if(!excludeValues) { - for(Resource ps : JenaUtil.getResourceProperties(shape.getShapeResource(), SH.property)) { - if(!ps.hasProperty(SH.deactivated, JenaDatatypes.TRUE) && (!excludeNeverMaterialize || !ps.hasProperty(DASH.neverMaterialize, JenaDatatypes.TRUE))) { - Resource path = ps.getPropertyResourceValue(SH.path); - if(path != null && path.isURIResource()) { - for(Statement s : ps.listProperties(SH.values).toList()) { - NodeExpression expr = NodeExpressionFactory.get().create(s.getObject()); - rules.add(new ValuesRule(expr, path.asNode(), false)); - } - } - } - } - } - return rules; - }); - } - - - public Model getInferencesModel() { - return inferences; - } - - - @Override + + // true to skip sh:values rules from property shapes marked with dash:neverMaterialize true + private boolean excludeNeverMaterialize; + + // true to skip all sh:values rules + private boolean excludeValues; + + private Model inferences; + + private Set pending = new HashSet<>(); + + private Map> rule2Conditions = new HashMap<>(); + + private Map> shape2Rules = new HashMap<>(); + + + public RuleEngine(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, Model inferences) { + super(dataset, shapesGraph, shapesGraphURI); + this.inferences = inferences; + } + + + public void executeAll() throws InterruptedException { + List ruleShapes = new ArrayList<>(); + for (Shape shape : shapesGraph.getRootShapes()) { + if (shape.getShapeResource().hasProperty(SH.rule)) { + ruleShapes.add(shape); + } else { + for (Resource ps : JenaUtil.getResourceProperties(shape.getShapeResource(), SH.property)) { + if (ps.hasProperty(SH.values)) { + ruleShapes.add(shape); + break; + } + } + } + } + executeShapes(ruleShapes, null); + } + + + public void executeAllDefaultValues() throws InterruptedException { + // Add sh:defaultValues where applicable + Model shapesModel = this.getShapesModel(); + Set defaultValuePredicates = new HashSet<>(); + shapesModel.listSubjectsWithProperty(SH.defaultValue).forEachRemaining(ps -> { + Resource path = ps.getPropertyResourceValue(SH.path); + if (path != null && path.isURIResource() && !ps.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { + defaultValuePredicates.add(JenaUtil.asProperty(path)); + } + }); + for (Property predicate : defaultValuePredicates) { + Map map = shapesGraph.getDefaultValueNodeExpressionsMap(predicate); + for (Node shapeNode : map.keySet()) { + Shape shape = shapesGraph.getShape(shapeNode); + if (shape != null) { + NodeExpression expr = map.get(shapeNode); + List targetNodes = new ArrayList<>(shape.getTargetNodes(getDataset())); + for (RDFNode targetNode : targetNodes) { + if (targetNode.isResource() && !targetNode.asResource().hasProperty(predicate)) { + ExtendedIterator it = expr.eval(targetNode, this); + if (it.hasNext()) { + List list = it.toList(); + for (RDFNode value : list) { + inferences.add(targetNode.asResource(), predicate, value); + } + } + } + } + } + } + } + } + + + /** + * Executes the rules attached to a given list of shapes, either for a dedicated + * focus node or all target nodes of the shapes. + * + * @param ruleShapes the shapes to execute + * @param focusNode the (optional) focus node or null for all target nodes + * @throws InterruptedException if the monitor has canceled this + */ + public void executeShapes(List ruleShapes, RDFNode focusNode) throws InterruptedException { + + if (ruleShapes.isEmpty()) { + return; + } + + Collections.sort(ruleShapes, new Comparator() { + @Override + public int compare(Shape shape1, Shape shape2) { + return shape1.getOrder().compareTo(shape2.getOrder()); + } + }); + + String baseMessage = null; + if (monitor != null) { + int rules = 0; + for (Shape shape : ruleShapes) { + rules += getShapeRules(shape).size(); + } + baseMessage = "Executing " + rules + " SHACL rules from " + ruleShapes.size() + " shapes"; + monitor.beginTask(baseMessage, rules); + } + + Double oldOrder = ruleShapes.get(0).getOrder(); + for (Shape shape : ruleShapes) { + if (!oldOrder.equals(shape.getOrder())) { + oldOrder = shape.getOrder(); + flushPending(); + } + executeShape(shape, baseMessage, focusNode); + } + flushPending(); + } + + + public void executeShape(Shape shape, String baseMessage, RDFNode focusNode) throws InterruptedException { + + if (shape.isDeactivated()) { + return; + } + + List rules = getShapeRules(shape); + if (rules.isEmpty()) { + return; + } + + List targetNodes; + if (focusNode != null) { + targetNodes = Collections.singletonList(focusNode); + } else { + targetNodes = new ArrayList<>(shape.getTargetNodes(dataset)); + } + + if (!targetNodes.isEmpty()) { + Number oldOrder = rules.get(0).getOrder(); + for (Rule rule : rules) { + if (monitor != null) { + if (monitor.isCanceled()) { + throw new InterruptedException(); + } + monitor.setTaskName(baseMessage + " (at " + RDFLabels.get().getLabel(shape.getShapeResource()) + " with " + targetNodes.size() + " target nodes)"); + monitor.subTask(rule.toString().replace("\n", " ")); + } + if (!oldOrder.equals(rule.getOrder())) { + oldOrder = rule.getOrder(); + // If new rdf:type triples have been inferred, recompute the target nodes (this is brute-force for now) + boolean recomputeTarget = focusNode == null && pending.stream().anyMatch(triple -> RDF.type.asNode().equals(triple.getPredicate())); + flushPending(); + if (recomputeTarget) { + targetNodes = new ArrayList<>(shape.getTargetNodes(dataset)); + } + } + List conditions = rule2Conditions.get(rule); + if (conditions != null && !conditions.isEmpty()) { + List filtered = new LinkedList<>(); + for (RDFNode targetNode : targetNodes) { + if (nodeConformsToAllShapes(targetNode, conditions)) { + filtered.add(targetNode); + } + } + executeRule(rule, filtered, shape); + } else { + executeRule(rule, targetNodes, shape); + } + if (monitor != null) { + monitor.worked(1); + } + } + } + } + + + private void executeRule(Rule rule, List focusNodes, Shape shape) { + JenaUtil.setGraphReadOptimization(true); + try { + if (ExecStatisticsManager.get().isRecording()) { + long startTime = System.currentTimeMillis(); + rule.execute(this, focusNodes, shape); + long endTime = System.currentTimeMillis(); + long duration = (endTime - startTime); + String queryText = rule.toString(); + ExecStatisticsManager.get().add(Collections.singletonList( + new ExecStatistics(queryText, queryText, duration, startTime, rule.getContextNode()))); + } else { + rule.execute(this, focusNodes, shape); + } + } finally { + JenaUtil.setGraphReadOptimization(false); + } + } + + + private void flushPending() { + for (Triple triple : pending) { + inferences.add(inferences.asStatement(triple)); + } + pending.clear(); + } + + + private List getShapeRules(Shape shape) { + return shape2Rules.computeIfAbsent(shape, s2 -> { + List rules = new LinkedList<>(); + List raws = new LinkedList<>(); + for (Statement s : shape.getShapeResource().listProperties(SH.rule).toList()) { + if (s.getObject().isResource() && !s.getResource().hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { + raws.add(s.getResource()); + } + } + Collections.sort(raws, OrderComparator.get()); + for (Resource raw : raws) { + RuleLanguage ruleLanguage = RuleLanguages.get().getRuleLanguage(raw); + if (ruleLanguage == null) { + throw new IllegalArgumentException("Unsupported SHACL rule type for " + raw); + } + Rule rule = ruleLanguage.createRule(raw); + rules.add(rule); + List conditions = JenaUtil.getResourceProperties(raw, SH.condition); + rule2Conditions.put(rule, conditions); + } + if (!excludeValues) { + for (Resource ps : JenaUtil.getResourceProperties(shape.getShapeResource(), SH.property)) { + if (!ps.hasProperty(SH.deactivated, JenaDatatypes.TRUE) && (!excludeNeverMaterialize || !ps.hasProperty(DASH.neverMaterialize, JenaDatatypes.TRUE))) { + Resource path = ps.getPropertyResourceValue(SH.path); + if (path != null && path.isURIResource()) { + for (Statement s : ps.listProperties(SH.values).toList()) { + NodeExpression expr = NodeExpressionFactory.get().create(s.getObject()); + rules.add(new ValuesRule(expr, path.asNode(), false)); + } + } + } + } + } + return rules; + }); + } + + + public Model getInferencesModel() { + return inferences; + } + + + @Override public Model getShapesModel() { - return dataset.getNamedModel(shapesGraphURI.toString()); - } - - - public void infer(Triple triple, Rule rule, Shape shape) { - pending.add(triple); - } - - - private boolean nodeConformsToAllShapes(RDFNode focusNode, Iterable shapes) { - for(Resource shape : shapes) { - ValidationEngine engine = ValidationEngineFactory.get().create(dataset, shapesGraphURI, shapesGraph, null); - if(!engine.nodesConformToShape(Collections.singletonList(focusNode), shape.asNode())) { - return false; - } - } - return true; - } - - - /** - * If set to true then all sh:values rules in property shapes marked with dash:neverMaterialize will be skipped. - * @param value the new flag (defaults to false) - */ - public void setExcludeNeverMaterialize(boolean value) { - this.excludeNeverMaterialize = value; - } - - - /** - * If set to true then all sh:values rules will be skipped. - * @param value the new flag (defaults to false) - */ - public void setExcludeValues(boolean value) { - this.excludeValues = value; - } - - - @Override + return dataset.getNamedModel(shapesGraphURI.toString()); + } + + + public void infer(Triple triple, Rule rule, Shape shape) { + pending.add(triple); + } + + + private boolean nodeConformsToAllShapes(RDFNode focusNode, Iterable shapes) { + for (Resource shape : shapes) { + ValidationEngine engine = ValidationEngineFactory.get().create(dataset, shapesGraphURI, shapesGraph, null); + if (!engine.nodesConformToShape(Collections.singletonList(focusNode), shape.asNode())) { + return false; + } + } + return true; + } + + + /** + * If set to true then all sh:values rules in property shapes marked with dash:neverMaterialize will be skipped. + * + * @param value the new flag (defaults to false) + */ + public void setExcludeNeverMaterialize(boolean value) { + this.excludeNeverMaterialize = value; + } + + + /** + * If set to true then all sh:values rules will be skipped. + * + * @param value the new flag (defaults to false) + */ + public void setExcludeValues(boolean value) { + this.excludeValues = value; + } + + + @Override public void setProgressMonitor(ProgressMonitor value) { - this.monitor = value; - } + this.monitor = value; + } } diff --git a/src/main/java/org/topbraid/shacl/targets/Target.java b/src/main/java/org/topbraid/shacl/targets/Target.java index 0c8b64a0..d2351c4a 100644 --- a/src/main/java/org/topbraid/shacl/targets/Target.java +++ b/src/main/java/org/topbraid/shacl/targets/Target.java @@ -1,30 +1,32 @@ package org.topbraid.shacl.targets; -import java.util.Set; - import org.apache.jena.query.Dataset; import org.apache.jena.rdf.model.RDFNode; +import java.util.Set; + /** - * Interface for the various target types supported by SHACL, including https://www.w3.org/TR/shacl/#targets + * Interface for the various target types supported by SHACL, including SHACL Targets * but also SHACL-AF and SHACL-JS extensions. - * + * * @author Holger Knublauch */ public interface Target { - /** - * Adds target nodes to a given result collection. - * @param dataset the Dataset with the potential target nodes in the default graph - * @param results the collection to add the results to - */ - void addTargetNodes(Dataset dataset, Set results); + /** + * Adds target nodes to a given result collection. + * + * @param dataset the Dataset with the potential target nodes in the default graph + * @param results the collection to add the results to + */ + void addTargetNodes(Dataset dataset, Set results); - /** - * Checks whether a given node is in the target. - * @param dataset the Dataset with the potential target node in the default graph - * @param node the potential target node - * @return true if node is in this target - */ - boolean contains(Dataset dataset, RDFNode node); + /** + * Checks whether a given node is in the target. + * + * @param dataset the Dataset with the potential target node in the default graph + * @param node the potential target node + * @return true if node is in this target + */ + boolean contains(Dataset dataset, RDFNode node); } diff --git a/src/main/java/org/topbraid/shacl/testcases/W3CTestRunner.java b/src/main/java/org/topbraid/shacl/testcases/W3CTestRunner.java index 93403279..e3545086 100644 --- a/src/main/java/org/topbraid/shacl/testcases/W3CTestRunner.java +++ b/src/main/java/org/topbraid/shacl/testcases/W3CTestRunner.java @@ -16,26 +16,10 @@ */ package org.topbraid.shacl.testcases; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.PrintStream; -import java.net.URI; -import java.util.LinkedList; -import java.util.List; -import java.util.UUID; - import org.apache.jena.graph.Graph; import org.apache.jena.graph.compose.MultiUnion; import org.apache.jena.query.Dataset; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.RDFList; -import org.apache.jena.rdf.model.RDFNode; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.ResourceFactory; -import org.apache.jena.rdf.model.Statement; +import org.apache.jena.rdf.model.*; import org.apache.jena.sparql.vocabulary.DOAP; import org.apache.jena.sparql.vocabulary.EARL; import org.apache.jena.util.FileUtils; @@ -52,260 +36,255 @@ import org.topbraid.shacl.validation.ValidationEngine; import org.topbraid.shacl.validation.ValidationEngineConfiguration; import org.topbraid.shacl.validation.ValidationEngineFactory; -import org.topbraid.shacl.vocabulary.DASH; -import org.topbraid.shacl.vocabulary.MF; -import org.topbraid.shacl.vocabulary.SH; -import org.topbraid.shacl.vocabulary.SHT; -import org.topbraid.shacl.vocabulary.TOSH; +import org.topbraid.shacl.vocabulary.*; + +import java.io.*; +import java.net.URI; +import java.util.LinkedList; +import java.util.List; +import java.util.UUID; /** * Helper object for executing the W3C test cases for SHACL. * The tests are assumed to be in a folder structure mirroring - * + *

* https://github.com/w3c/data-shapes/tree/gh-pages/data-shapes-test-suite/tests - * + * * @author Holger Knublauch */ public class W3CTestRunner { - - private final static Resource EARL_AUTHOR = ResourceFactory.createResource("http://knublauch.com"); - - private final static Resource EARL_SUBJECT = ResourceFactory.createResource("http://topquadrant.com/shacl/api"); - - private Model earl; - - private List items = new LinkedList<>(); - - - public W3CTestRunner(File rootManifest) throws IOException { - - earl = JenaUtil.createMemoryModel(); - JenaUtil.initNamespaces(earl.getGraph()); - earl.setNsPrefix("doap", DOAP.NS); - earl.setNsPrefix("earl", EARL.NS); - - earl.add(EARL_SUBJECT, RDF.type, DOAP.Project); - earl.add(EARL_SUBJECT, RDF.type, EARL.Software); - earl.add(EARL_SUBJECT, RDF.type, EARL.TestSubject); - earl.add(EARL_SUBJECT, DOAP.developer, EARL_AUTHOR); - earl.add(EARL_SUBJECT, DOAP.name, "TopBraid SHACL API"); - - collectItems(rootManifest, "urn:x:root/"); - } - - - private void collectItems(File manifestFile, String baseURI) throws IOException { - - String filePath = manifestFile.getAbsolutePath().replaceAll("\\\\", "/"); - int coreIndex = filePath.indexOf("core/"); - if(coreIndex > 0 && !filePath.contains("sparql/core")) { - filePath = filePath.substring(coreIndex); - } - else { - int sindex = filePath.indexOf("sparql/"); - if(sindex > 0) { - filePath = filePath.substring(sindex); - } - } - - Model model = JenaUtil.createMemoryModel(); - model.read(new FileInputStream(manifestFile), baseURI, FileUtils.langTurtle); - - for(Resource manifest : model.listSubjectsWithProperty(RDF.type, MF.Manifest).toList()) { - for(Resource include : JenaUtil.getResourceProperties(manifest, MF.include)) { - String path = include.getURI().substring(baseURI.length()); - File includeFile = new File(manifestFile.getParentFile(), path); - if(path.contains("/")) { - String addURI = path.substring(0, path.indexOf('/')); - collectItems(includeFile, baseURI + addURI + "/"); - } - else { - collectItems(includeFile, baseURI + path); - } - } - for(Resource entries : JenaUtil.getResourceProperties(manifest, MF.entries)) { - for(RDFNode entry : entries.as(RDFList.class).iterator().toList()) { - items.add(new Item(entry.asResource(), filePath, manifestFile)); - } - } - } - } - - - public Model getEARLModel() { - return earl; - } - - - public List getItems() { - return items; - } - - - public void run(PrintStream out) throws InterruptedException { - long startTime = System.currentTimeMillis(); - out.println("Running " + items.size() + " W3C Test Cases..."); - int count = 0; - for(Item item : items) { - if(!item.run(out)) { - count++; - } - } - out.println("Completed: " + count + " test failures (Duration: " + (System.currentTimeMillis() - startTime) + " ms)"); - } - - - public class Item { - - // The sht:Validate in its defining Model - Resource entry; - - String filePath; - - File manifestFile; - - - Item(Resource entry, String filePath, File manifestFile) { - this.entry = entry; - this.filePath = filePath; - this.manifestFile = manifestFile; - } - - - public Resource getEARLResource() { - return ResourceFactory.createResource("urn:x-shacl-test:" + entry.getURI().substring("urn:x:root".length())); - } - - - public String getFilePath() { - return filePath; - } - - - public String getLabel() { - return JenaUtil.getStringProperty(entry, RDFS.label); - } - - - public Resource getStatus() { - return entry.getPropertyResourceValue(MF.status); - } - - - public boolean run(PrintStream out) throws InterruptedException { - - Resource assertion = earl.createResource(EARL.Assertion); - assertion.addProperty(EARL.assertedBy, EARL_AUTHOR); - assertion.addProperty(EARL.subject, EARL_SUBJECT); - assertion.addProperty(EARL.test, getEARLResource()); - Resource result = earl.createResource(EARL.TestResult); - assertion.addProperty(EARL.result, result); - result.addProperty(EARL.mode, EARL.automatic); - - Resource action = entry.getPropertyResourceValue(MF.action); - Resource shapesGraphResource = action.getPropertyResourceValue(SHT.shapesGraph); - Graph shapesBaseGraph = entry.getModel().getGraph(); - if(!(entry.getURI() + ".ttl").equals(shapesGraphResource.getURI())) { - int last = shapesGraphResource.getURI().lastIndexOf('/'); - File shapesFile = new File(manifestFile.getParentFile(), shapesGraphResource.getURI().substring(last + 1)); - Model shapesModel = JenaUtil.createMemoryModel(); - try { - shapesModel.read(new FileInputStream(shapesFile), "urn:x:dummy", FileUtils.langTurtle); - shapesBaseGraph = shapesModel.getGraph(); - } catch (FileNotFoundException e) { - ExceptionUtil.throwUnchecked(e); - } - } - - MultiUnion multiUnion = new MultiUnion(new Graph[] { - shapesBaseGraph, - ARQFactory.getNamedModel(TOSH.BASE_URI).getGraph(), - ARQFactory.getNamedModel(DASH.BASE_URI).getGraph(), - ARQFactory.getNamedModel(SH.BASE_URI).getGraph() - }); - Model shapesModel = ModelFactory.createModelForGraph(multiUnion); - - Model dataModel = entry.getModel(); - Resource dataGraph = action.getPropertyResourceValue(SHT.dataGraph); - if(!(entry.getURI() + ".ttl").equals(dataGraph.getURI())) { - int last = dataGraph.getURI().lastIndexOf('/'); - File dataFile = new File(manifestFile.getParentFile(), dataGraph.getURI().substring(last + 1)); - dataModel = JenaUtil.createMemoryModel(); - try { - dataModel.read(new FileInputStream(dataFile), "urn:x:dummy", FileUtils.langTurtle); - } catch (FileNotFoundException e) { - ExceptionUtil.throwUnchecked(e); - } - } - - URI shapesGraphURI = URI.create("urn:x-shacl-shapes-graph:" + UUID.randomUUID().toString()); - Dataset dataset = ARQFactory.get().getDataset(dataModel); - dataset.addNamedModel(shapesGraphURI.toString(), shapesModel); - - ShapesGraph shapesGraph = ShapesGraphFactory.get().createShapesGraph(shapesModel); - ValidationEngineConfiguration configuration = new ValidationEngineConfiguration().setValidateShapes(false); - if(entry.hasProperty(ResourceFactory.createProperty(MF.NS + "requires"), SHT.CoreOnly)) { - shapesGraph.setConstraintFilter(new CoreConstraintFilter()); - } - ValidationEngine engine = ValidationEngineFactory.get().create(dataset, shapesGraphURI, shapesGraph, null); - engine.setConfiguration(configuration); - try { - Resource actualReport = engine.validateAll(); - Model actualResults = actualReport.getModel(); - actualResults.setNsPrefix(SH.PREFIX, SH.NS); - actualResults.setNsPrefix("rdf", RDF.getURI()); - actualResults.setNsPrefix("rdfs", RDFS.getURI()); - Model expectedModel = JenaUtil.createDefaultModel(); - Resource expectedReport = entry.getPropertyResourceValue(MF.result); - for(Statement s : expectedReport.listProperties().toList()) { - expectedModel.add(s); - } - for(Statement s : expectedReport.listProperties(SH.result).toList()) { - for(Statement t : s.getResource().listProperties().toList()) { - if(t.getPredicate().equals(DASH.suggestion)) { - GraphValidationTestCaseType.addStatements(expectedModel, t); - } - else if(SH.resultPath.equals(t.getPredicate())) { - expectedModel.add(t.getSubject(), t.getPredicate(), - SHACLPaths.clonePath(t.getResource(), expectedModel)); - } - else { - expectedModel.add(t); - } - } - } - actualResults.removeAll(null, SH.message, (RDFNode)null); - for(Statement s : actualResults.listStatements(null, SH.resultMessage, (RDFNode)null).toList()) { - if(!expectedModel.contains(null, SH.resultMessage, s.getObject())) { - actualResults.remove(s); - } - } - if(expectedModel.getGraph().isIsomorphicWith(actualResults.getGraph())) { - out.println("PASSED: " + entry); - result.addProperty(EARL.outcome, EARL.passed); - return true; - } - else { - out.println("FAILED: " + entry); - result.addProperty(EARL.outcome, EARL.failed); - expectedModel.setNsPrefixes(actualResults); - System.out.println("Expected\n" + ModelPrinter.get().print(expectedModel)); - System.out.println("Actual\n" + ModelPrinter.get().print(actualResults)); - return false; - } - } - catch(Exception ex) { - if(entry.hasProperty(MF.result, SHT.Failure)) { - out.println("PASSED: " + entry); - result.addProperty(EARL.outcome, EARL.passed); - return true; - } - else { - out.println("EXCEPTION: " + entry + " " + ex.getMessage()); - result.addProperty(EARL.outcome, EARL.failed); - return false; - } - } - } - } + + private final static Resource EARL_AUTHOR = ResourceFactory.createResource("http://knublauch.com"); + + private final static Resource EARL_SUBJECT = ResourceFactory.createResource("http://topquadrant.com/shacl/api"); + + private Model earl; + + private List items = new LinkedList<>(); + + + public W3CTestRunner(File rootManifest) throws IOException { + + earl = JenaUtil.createMemoryModel(); + JenaUtil.initNamespaces(earl.getGraph()); + earl.setNsPrefix("doap", DOAP.NS); + earl.setNsPrefix("earl", EARL.NS); + + earl.add(EARL_SUBJECT, RDF.type, DOAP.Project); + earl.add(EARL_SUBJECT, RDF.type, EARL.Software); + earl.add(EARL_SUBJECT, RDF.type, EARL.TestSubject); + earl.add(EARL_SUBJECT, DOAP.developer, EARL_AUTHOR); + earl.add(EARL_SUBJECT, DOAP.name, "TopBraid SHACL API"); + + collectItems(rootManifest, "urn:x:root/"); + } + + + private void collectItems(File manifestFile, String baseURI) throws IOException { + + String filePath = manifestFile.getAbsolutePath().replaceAll("\\\\", "/"); + int coreIndex = filePath.indexOf("core/"); + if (coreIndex > 0 && !filePath.contains("sparql/core")) { + filePath = filePath.substring(coreIndex); + } else { + int sindex = filePath.indexOf("sparql/"); + if (sindex > 0) { + filePath = filePath.substring(sindex); + } + } + + Model model = JenaUtil.createMemoryModel(); + model.read(new FileInputStream(manifestFile), baseURI, FileUtils.langTurtle); + + for (Resource manifest : model.listSubjectsWithProperty(RDF.type, MF.Manifest).toList()) { + for (Resource include : JenaUtil.getResourceProperties(manifest, MF.include)) { + String path = include.getURI().substring(baseURI.length()); + File includeFile = new File(manifestFile.getParentFile(), path); + if (path.contains("/")) { + String addURI = path.substring(0, path.indexOf('/')); + collectItems(includeFile, baseURI + addURI + "/"); + } else { + collectItems(includeFile, baseURI + path); + } + } + for (Resource entries : JenaUtil.getResourceProperties(manifest, MF.entries)) { + for (RDFNode entry : entries.as(RDFList.class).iterator().toList()) { + items.add(new Item(entry.asResource(), filePath, manifestFile)); + } + } + } + } + + + public Model getEARLModel() { + return earl; + } + + + public List getItems() { + return items; + } + + + public void run(PrintStream out) throws InterruptedException { + long startTime = System.currentTimeMillis(); + out.println("Running " + items.size() + " W3C Test Cases..."); + int count = 0; + for (Item item : items) { + if (!item.run(out)) { + count++; + } + } + out.println("Completed: " + count + " test failures (Duration: " + (System.currentTimeMillis() - startTime) + " ms)"); + } + + + public class Item { + + // The sht:Validate in its defining Model + Resource entry; + + String filePath; + + File manifestFile; + + + Item(Resource entry, String filePath, File manifestFile) { + this.entry = entry; + this.filePath = filePath; + this.manifestFile = manifestFile; + } + + + public Resource getEARLResource() { + return ResourceFactory.createResource("urn:x-shacl-test:" + entry.getURI().substring("urn:x:root".length())); + } + + + public String getFilePath() { + return filePath; + } + + + public String getLabel() { + return JenaUtil.getStringProperty(entry, RDFS.label); + } + + + public Resource getStatus() { + return entry.getPropertyResourceValue(MF.status); + } + + + public boolean run(PrintStream out) throws InterruptedException { + + Resource assertion = earl.createResource(EARL.Assertion); + assertion.addProperty(EARL.assertedBy, EARL_AUTHOR); + assertion.addProperty(EARL.subject, EARL_SUBJECT); + assertion.addProperty(EARL.test, getEARLResource()); + Resource result = earl.createResource(EARL.TestResult); + assertion.addProperty(EARL.result, result); + result.addProperty(EARL.mode, EARL.automatic); + + Resource action = entry.getPropertyResourceValue(MF.action); + Resource shapesGraphResource = action.getPropertyResourceValue(SHT.shapesGraph); + Graph shapesBaseGraph = entry.getModel().getGraph(); + if (!(entry.getURI() + ".ttl").equals(shapesGraphResource.getURI())) { + int last = shapesGraphResource.getURI().lastIndexOf('/'); + File shapesFile = new File(manifestFile.getParentFile(), shapesGraphResource.getURI().substring(last + 1)); + Model shapesModel = JenaUtil.createMemoryModel(); + try { + shapesModel.read(new FileInputStream(shapesFile), "urn:x:dummy", FileUtils.langTurtle); + shapesBaseGraph = shapesModel.getGraph(); + } catch (FileNotFoundException e) { + ExceptionUtil.throwUnchecked(e); + } + } + + MultiUnion multiUnion = new MultiUnion(new Graph[]{ + shapesBaseGraph, + ARQFactory.getNamedModel(TOSH.BASE_URI).getGraph(), + ARQFactory.getNamedModel(DASH.BASE_URI).getGraph(), + ARQFactory.getNamedModel(SH.BASE_URI).getGraph() + }); + Model shapesModel = ModelFactory.createModelForGraph(multiUnion); + + Model dataModel = entry.getModel(); + Resource dataGraph = action.getPropertyResourceValue(SHT.dataGraph); + if (!(entry.getURI() + ".ttl").equals(dataGraph.getURI())) { + int last = dataGraph.getURI().lastIndexOf('/'); + File dataFile = new File(manifestFile.getParentFile(), dataGraph.getURI().substring(last + 1)); + dataModel = JenaUtil.createMemoryModel(); + try { + dataModel.read(new FileInputStream(dataFile), "urn:x:dummy", FileUtils.langTurtle); + } catch (FileNotFoundException e) { + ExceptionUtil.throwUnchecked(e); + } + } + + URI shapesGraphURI = URI.create("urn:x-shacl-shapes-graph:" + UUID.randomUUID()); + Dataset dataset = ARQFactory.get().getDataset(dataModel); + dataset.addNamedModel(shapesGraphURI.toString(), shapesModel); + + ShapesGraph shapesGraph = ShapesGraphFactory.get().createShapesGraph(shapesModel); + ValidationEngineConfiguration configuration = new ValidationEngineConfiguration().setValidateShapes(false); + if (entry.hasProperty(ResourceFactory.createProperty(MF.NS + "requires"), SHT.CoreOnly)) { + shapesGraph.setConstraintFilter(new CoreConstraintFilter()); + } + ValidationEngine engine = ValidationEngineFactory.get().create(dataset, shapesGraphURI, shapesGraph, null); + engine.setConfiguration(configuration); + try { + Resource actualReport = engine.validateAll(); + Model actualResults = actualReport.getModel(); + actualResults.setNsPrefix(SH.PREFIX, SH.NS); + actualResults.setNsPrefix("rdf", RDF.getURI()); + actualResults.setNsPrefix("rdfs", RDFS.getURI()); + Model expectedModel = JenaUtil.createDefaultModel(); + Resource expectedReport = entry.getPropertyResourceValue(MF.result); + for (Statement s : expectedReport.listProperties().toList()) { + expectedModel.add(s); + } + for (Statement s : expectedReport.listProperties(SH.result).toList()) { + for (Statement t : s.getResource().listProperties().toList()) { + if (t.getPredicate().equals(DASH.suggestion)) { + GraphValidationTestCaseType.addStatements(expectedModel, t); + } else if (SH.resultPath.equals(t.getPredicate())) { + expectedModel.add(t.getSubject(), t.getPredicate(), + SHACLPaths.clonePath(t.getResource(), expectedModel)); + } else { + expectedModel.add(t); + } + } + } + actualResults.removeAll(null, SH.message, (RDFNode) null); + for (Statement s : actualResults.listStatements(null, SH.resultMessage, (RDFNode) null).toList()) { + if (!expectedModel.contains(null, SH.resultMessage, s.getObject())) { + actualResults.remove(s); + } + } + if (expectedModel.getGraph().isIsomorphicWith(actualResults.getGraph())) { + out.println("PASSED: " + entry); + result.addProperty(EARL.outcome, EARL.passed); + return true; + } else { + out.println("FAILED: " + entry); + result.addProperty(EARL.outcome, EARL.failed); + expectedModel.setNsPrefixes(actualResults); + System.out.println("Expected\n" + ModelPrinter.get().print(expectedModel)); + System.out.println("Actual\n" + ModelPrinter.get().print(actualResults)); + return false; + } + } catch (Exception ex) { + if (entry.hasProperty(MF.result, SHT.Failure)) { + out.println("PASSED: " + entry); + result.addProperty(EARL.outcome, EARL.passed); + return true; + } else { + out.println("EXCEPTION: " + entry + " " + ex.getMessage()); + result.addProperty(EARL.outcome, EARL.failed); + return false; + } + } + } + } } diff --git a/src/main/java/org/topbraid/shacl/util/ModelPrinter.java b/src/main/java/org/topbraid/shacl/util/ModelPrinter.java index 623064fc..4fccb179 100644 --- a/src/main/java/org/topbraid/shacl/util/ModelPrinter.java +++ b/src/main/java/org/topbraid/shacl/util/ModelPrinter.java @@ -16,41 +16,34 @@ */ package org.topbraid.shacl.util; -import java.io.StringWriter; - import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.RDFWriterI; -import org.apache.jena.util.FileUtils; +import org.apache.jena.riot.Lang; +import org.apache.jena.riot.RDFDataMgr; + +import java.io.StringWriter; /** * A singleton that takes a Jena Model and prints it into a string. * Used to create comparable renderings of Models produced by test cases. - * + * * @author Holger Knublauch */ public class ModelPrinter { - private static ModelPrinter singleton = new ModelPrinter(); - - - public static ModelPrinter get() { - return singleton; - } - - public static void set(ModelPrinter value) { - singleton = value; - } - - - protected RDFWriterI createRDFWriter(Model model) { - return model.getWriter(FileUtils.langTurtle); - } - - - public String print(Model model) { - StringWriter writer = new StringWriter(); - RDFWriterI w = createRDFWriter(model); - w.write(model, writer, null); - return writer.toString(); - } + private static ModelPrinter singleton = new ModelPrinter(); + + public static ModelPrinter get() { + return singleton; + } + + public static void set(ModelPrinter value) { + singleton = value; + } + + public String print(Model model) { + StringWriter writer = new StringWriter(); + RDFDataMgr.write(writer, model, Lang.TURTLE); + return writer.toString(); + } + } diff --git a/src/main/java/org/topbraid/shacl/util/SHACLUtil.java b/src/main/java/org/topbraid/shacl/util/SHACLUtil.java index f1a46505..7c76945a 100644 --- a/src/main/java/org/topbraid/shacl/util/SHACLUtil.java +++ b/src/main/java/org/topbraid/shacl/util/SHACLUtil.java @@ -503,7 +503,7 @@ public static List getAllShapesAtClassOrShape(Resource clsOrShape) String key = OntologyOptimizations.get().getKeyIfEnabledFor(clsOrShape.getModel().getGraph()); if(key != null) { key += ".getAllShapesAtClassOrShape(" + clsOrShape + ")"; - return (List) OntologyOptimizations.get().getOrComputeObject(key, () -> { + return (List) OntologyOptimizations.get().getOrComputeObject(key, (cacheKey) -> { List results = new LinkedList(); addAllShapesAtClassOrShape(clsOrShape, results, new HashSet()); return results; @@ -515,8 +515,9 @@ public static List getAllShapesAtClassOrShape(Resource clsOrShape) return results; } } - - + + + private static void addAllShapesAtClassOrShape(Resource clsOrShape, List results, Set reached) { addDirectShapesAtClassOrShape(clsOrShape, results); reached.add(clsOrShape); diff --git a/src/main/java/org/topbraid/shacl/validation/ExpressionConstraintExecutor.java b/src/main/java/org/topbraid/shacl/validation/ExpressionConstraintExecutor.java index 4fb09f5c..0bcafb0d 100644 --- a/src/main/java/org/topbraid/shacl/validation/ExpressionConstraintExecutor.java +++ b/src/main/java/org/topbraid/shacl/validation/ExpressionConstraintExecutor.java @@ -16,9 +16,6 @@ */ package org.topbraid.shacl.validation; -import java.util.Collection; -import java.util.List; - import org.apache.jena.rdf.model.RDFNode; import org.apache.jena.rdf.model.Resource; import org.apache.jena.rdf.model.Statement; @@ -28,31 +25,34 @@ import org.topbraid.shacl.expr.NodeExpressionFactory; import org.topbraid.shacl.vocabulary.SH; +import java.util.Collection; +import java.util.List; + /** - * Validator for sh:expression constraints, see https://w3c.github.io/shacl/shacl-af/#ExpressionConstraintComponent - * + * Validator for sh:expression constraints, see ExpressionConstraintComponent + * * @author Holger Knublauch */ public class ExpressionConstraintExecutor implements ConstraintExecutor { - @Override - public void executeConstraint(Constraint constraint, ValidationEngine engine, Collection focusNodes) { - // TODO: optimize, currently produces a new NodeExpression each time - NodeExpression expr = NodeExpressionFactory.get().create(constraint.getParameterValue()); - for(RDFNode focusNode : focusNodes) { - engine.checkCanceled(); - for(RDFNode valueNode : engine.getValueNodes(constraint, focusNode)) { - List results = expr.eval(valueNode, engine).toList(); - if(results.size() != 1 || !JenaDatatypes.TRUE.equals(results.get(0))) { - Resource result = engine.createValidationResult(constraint, focusNode, valueNode, () -> "Expression does not evaluate to true"); - result.addProperty(SH.sourceConstraint, constraint.getParameterValue()); - if(constraint.getParameterValue() instanceof Resource && ((Resource)constraint.getParameterValue()).hasProperty(SH.message)) { - for(Statement s : ((Resource)constraint.getParameterValue()).listProperties(SH.message).toList()) { - result.addProperty(SH.resultMessage, s.getObject()); - } - } - } - } - } - } + @Override + public void executeConstraint(Constraint constraint, ValidationEngine engine, Collection focusNodes) { + // TODO: optimize, currently produces a new NodeExpression each time + NodeExpression expr = NodeExpressionFactory.get().create(constraint.getParameterValue()); + for (RDFNode focusNode : focusNodes) { + engine.checkCanceled(); + for (RDFNode valueNode : engine.getValueNodes(constraint, focusNode)) { + List results = expr.eval(valueNode, engine).toList(); + if (results.size() != 1 || !JenaDatatypes.TRUE.equals(results.get(0))) { + Resource result = engine.createValidationResult(constraint, focusNode, valueNode, () -> "Expression does not evaluate to true"); + result.addProperty(SH.sourceConstraint, constraint.getParameterValue()); + if (constraint.getParameterValue() instanceof Resource && ((Resource) constraint.getParameterValue()).hasProperty(SH.message)) { + for (Statement s : ((Resource) constraint.getParameterValue()).listProperties(SH.message).toList()) { + result.addProperty(SH.resultMessage, s.getObject()); + } + } + } + } + } + } } diff --git a/src/main/java/org/topbraid/shacl/validation/MaximumNumberViolations.java b/src/main/java/org/topbraid/shacl/validation/MaximumNumberViolations.java index 79e00910..4cea4845 100644 --- a/src/main/java/org/topbraid/shacl/validation/MaximumNumberViolations.java +++ b/src/main/java/org/topbraid/shacl/validation/MaximumNumberViolations.java @@ -1,9 +1,8 @@ package org.topbraid.shacl.validation; -@SuppressWarnings("serial") public class MaximumNumberViolations extends RuntimeException { - - public MaximumNumberViolations(int violationCount) { - super("Maximum number of violations (" + violationCount + ") reached"); + + public MaximumNumberViolations(int violationCount) { + super("Maximum number of violations (" + violationCount + ") reached"); } } diff --git a/src/main/java/org/topbraid/shacl/validation/SHACLException.java b/src/main/java/org/topbraid/shacl/validation/SHACLException.java index 699d6a9e..325fe57a 100644 --- a/src/main/java/org/topbraid/shacl/validation/SHACLException.java +++ b/src/main/java/org/topbraid/shacl/validation/SHACLException.java @@ -18,13 +18,12 @@ /** * An Exception signaling invalid input to the constraint validation engine. - * + * * @author Holger Knublauch */ -@SuppressWarnings("serial") public class SHACLException extends RuntimeException { - public SHACLException(String message) { - super(message); - } + public SHACLException(String message) { + super(message); + } } diff --git a/src/main/java/org/topbraid/shacl/validation/ValidationEngine.java b/src/main/java/org/topbraid/shacl/validation/ValidationEngine.java index d5c4b992..bb890c57 100644 --- a/src/main/java/org/topbraid/shacl/validation/ValidationEngine.java +++ b/src/main/java/org/topbraid/shacl/validation/ValidationEngine.java @@ -16,37 +16,13 @@ */ package org.topbraid.shacl.validation; -import java.net.URI; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.WeakHashMap; -import java.util.concurrent.ConcurrentHashMap; -import java.util.function.Function; -import java.util.function.Predicate; -import java.util.function.Supplier; - import org.apache.jena.graph.Graph; import org.apache.jena.graph.Node; import org.apache.jena.graph.Triple; import org.apache.jena.graph.compose.MultiUnion; import org.apache.jena.query.Dataset; import org.apache.jena.query.QuerySolution; -import org.apache.jena.rdf.model.Literal; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.Property; -import org.apache.jena.rdf.model.RDFNode; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.ResourceFactory; -import org.apache.jena.rdf.model.Statement; -import org.apache.jena.rdf.model.StmtIterator; +import org.apache.jena.rdf.model.*; import org.apache.jena.sparql.path.P_Inverse; import org.apache.jena.sparql.path.P_Link; import org.apache.jena.sparql.path.Path; @@ -58,11 +34,7 @@ import org.topbraid.jenax.util.JenaUtil; import org.topbraid.jenax.util.RDFLabels; import org.topbraid.shacl.arq.SHACLPaths; -import org.topbraid.shacl.engine.AbstractEngine; -import org.topbraid.shacl.engine.Constraint; -import org.topbraid.shacl.engine.SHACLScriptEngineManager; -import org.topbraid.shacl.engine.Shape; -import org.topbraid.shacl.engine.ShapesGraph; +import org.topbraid.shacl.engine.*; import org.topbraid.shacl.engine.filters.ExcludeMetaShapesFilter; import org.topbraid.shacl.expr.NodeExpression; import org.topbraid.shacl.expr.NodeExpressionFactory; @@ -77,610 +49,607 @@ import org.topbraid.shacl.vocabulary.DASH; import org.topbraid.shacl.vocabulary.SH; +import java.net.URI; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.function.Supplier; + /** * A ValidationEngine uses a given shapes graph (represented via an instance of ShapesGraph) * and performs SHACL validation on a given Dataset. - * + *

* Instances of this class should be created via the ValidatorFactory. - * + * * @author Holger Knublauch */ public class ValidationEngine extends AbstractEngine { - - // The currently active ValidationEngine for cases where no direct pointer can be acquired, e.g. from HasShapeFunction - private static ThreadLocal current = new ThreadLocal<>(); - - public static ValidationEngine getCurrent() { - return current.get(); - } - - public static void setCurrent(ValidationEngine value) { - current.set(value); - } - - - // Avoids repeatedly walking up/down the class hierarchy for sh:class constraints - private ClassesCache classesCache; - - private ValidationEngineConfiguration configuration; - - // Can be used to drop certain focus nodes from validation - private Predicate focusNodeFilter; - - // The inferred triples if the shapes graph declares an entailment regime - private Model inferencesModel; - - // The label function for rendering nodes in validation results (message templates etc) - private Function labelFunction = (node -> RDFLabels.get().getNodeLabel(node)); - - // Avoids repeatedly fetching labels - private Map labelsCache = new ConcurrentHashMap<>(); - - // Can be used to collect statistical data about execution time of constraint components and shapes - private ValidationProfile profile; - - // The resulting validation report instance - private Resource report; - - // Number of created results, e.g. for progress monitor - private int resultsCount = 0; - - // Avoids repeatedly fetching the value nodes of a focus node / path combination - private Map> valueNodes = new WeakHashMap<>(); - - // Number of created violations, e.g. for progress monitor - private int violationsCount = 0; - - - /** - * Constructs a new ValidationEngine. - * @param dataset the Dataset to operate on - * @param shapesGraphURI the URI of the shapes graph (must be in the dataset) - * @param shapesGraph the ShapesGraph with the shapes to validate against - * @param report the sh:ValidationReport object in the results Model, or null to create a new one - */ - protected ValidationEngine(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, Resource report) { - super(dataset, shapesGraph, shapesGraphURI); - setConfiguration(new ValidationEngineConfiguration()); - if(report == null) { - Model reportModel = JenaUtil.createMemoryModel(); - reportModel.setNsPrefixes(dataset.getDefaultModel()); // This can be very expensive in some databases - reportModel.withDefaultMappings(shapesGraph.getShapesModel()); - this.report = reportModel.createResource(SH.ValidationReport); - } - else { - this.report = report; - } - } - - - /** - * Checks if entailments are active for the current shapes graph and applies them for a given focus node. - * This will only work for the sh:Rules entailment, e.g. to compute sh:values and sh:defaultValue. - * If any inferred triples exist, the focus node will be returned attached to the model that includes those inferences. - * The dataset used internally will also be switched to use that new model as its default model, so that if - * a node gets validated it will "see" the inferred triples too. - * @param focusNode the focus node - * @return the focus node, possibly in a different Model than originally - */ - public RDFNode applyEntailments(Resource focusNode) { - Model shapesModel = dataset.getNamedModel(shapesGraphURI.toString()); - if(shapesModel.contains(null, SH.entailment, SH.Rules)) { - - // Create union of data model and inferences if called for the first time - if(inferencesModel == null) { - inferencesModel = JenaUtil.createDefaultModel(); - Model dataModel = dataset.getDefaultModel(); - MultiUnion multiUnion = new MultiUnion(new Graph[]{ - dataModel.getGraph(), - inferencesModel.getGraph() - }); - multiUnion.setBaseGraph(dataModel.getGraph()); - dataset.setDefaultModel(ModelFactory.createModelForGraph(multiUnion)); - } - - // Apply sh:values rules - Map defaultValueMap = new HashMap<>(); - for(SHNodeShape nodeShape : SHACLUtil.getAllShapesAtNode(focusNode)) { - if(!nodeShape.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { - for(SHPropertyShape ps : nodeShape.getPropertyShapes()) { - if(!ps.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { - Resource path = ps.getPath(); - if(path instanceof Resource) { - Statement values = ps.getProperty(SH.values); - if(values != null) { - NodeExpression ne = NodeExpressionFactory.get().create(values.getObject()); - ne.eval(focusNode, this).forEachRemaining(v -> inferencesModel.getGraph().add(Triple.create(focusNode.asNode(), path.asNode(), v.asNode()))); - } - Statement defaultValue = ps.getProperty(SH.defaultValue); - if(defaultValue != null) { - defaultValueMap.put(JenaUtil.asProperty(path), defaultValue.getObject()); - } - } - } - } - } - } - - // Add sh:defaultValue where needed - Model dataModel = dataset.getDefaultModel(); // This is now the union model - Resource newFocusNode = focusNode.inModel(dataModel); - for(Property predicate : defaultValueMap.keySet()) { - if(!newFocusNode.hasProperty(predicate)) { - NodeExpression ne = NodeExpressionFactory.get().create(defaultValueMap.get(predicate)); - ne.eval(focusNode, this).forEachRemaining(v -> inferencesModel.add(focusNode, predicate, v)); - } - } - return newFocusNode; - } - return focusNode; - } - - - public void addResultMessage(Resource result, Literal message, QuerySolution bindings) { - result.addProperty(SH.resultMessage, SPARQLSubstitutions.withSubstitutions(message, bindings, getLabelFunction())); - } - - - // Note: does not set sh:path - public Resource createResult(Resource type, Constraint constraint, RDFNode focusNode) { - Resource result = report.getModel().createResource(type); - report.addProperty(SH.result, result); - result.addProperty(SH.resultSeverity, constraint.getSeverity()); - result.addProperty(SH.sourceConstraintComponent, constraint.getComponent()); - result.addProperty(SH.sourceShape, constraint.getShapeResource()); - if(focusNode != null) { - result.addProperty(SH.focusNode, focusNode); - } - - checkMaximumNumberFailures(constraint); - - resultsCount++; - - return result; - } - - - public Resource createValidationResult(Constraint constraint, RDFNode focusNode, RDFNode value, Supplier defaultMessage) { - Resource result = createResult(SH.ValidationResult, constraint, focusNode); - if(value != null) { - result.addProperty(SH.value, value); - } - if(!constraint.getShape().isNodeShape()) { - result.addProperty(SH.resultPath, SHACLPaths.clonePath(constraint.getShapeResource().getPath(), result.getModel())); - } - Collection messages = constraint.getMessages(); - if(messages.size() > 0) { - messages.stream().forEach(message -> result.addProperty(SH.resultMessage, message)); - } - else if(defaultMessage != null) { - String m = defaultMessage.get(); - if(m != null) { - result.addProperty(SH.resultMessage, m); - } - } - return result; - } - - - private void checkMaximumNumberFailures(Constraint constraint) { - if (SH.Violation.equals(constraint.getShape().getSeverity())) { - this.violationsCount++; - if (configuration.getValidationErrorBatch() != -1 && violationsCount >= configuration.getValidationErrorBatch()) { - throw new MaximumNumberViolations(violationsCount); - } - } - } - - - public ClassesCache getClassesCache() { - return classesCache; - } - - - public ValidationEngineConfiguration getConfiguration() { - return configuration; - } - - - public String getLabel(RDFNode node) { - return labelsCache.computeIfAbsent(node, n -> getLabelFunction().apply(n)); - } - - - public Function getLabelFunction() { - return labelFunction; - } - - - public ValidationProfile getProfile() { - return profile; - } - - - /** - * Gets the validation report as a Resource in the report Model. - * @return the report Resource - */ - public Resource getReport() { - return report; - } - - - /** - * Gets a Set of all shapes that should be evaluated for a given resource. - * @param focusNode the focus node to get the shapes for - * @param dataset the Dataset containing the resource - * @param shapesModel the shapes Model - * @return a Set of shape resources - */ - private Set getShapesForNode(RDFNode focusNode, Dataset dataset, Model shapesModel) { - - Set shapes = new HashSet<>(); - - for(Shape rootShape : shapesGraph.getRootShapes()) { - for(Target target : rootShape.getTargets()) { - if(!(target instanceof InstancesTarget)) { - if(target.contains(dataset, focusNode)) { - shapes.add(rootShape.getShapeResource()); - } - } - } - } - - // rdf:type / sh:targetClass - if(focusNode instanceof Resource) { - for(Resource type : JenaUtil.getAllTypes((Resource)focusNode)) { - if(JenaUtil.hasIndirectType(type.inModel(shapesModel), SH.Shape)) { - shapes.add(type); - } - for(Statement s : shapesModel.listStatements(null, SH.targetClass, type).toList()) { - shapes.add(s.getSubject()); - } - } - } - - return shapes; - } - - - public ValidationReport getValidationReport() { - return new ResourceValidationReport(report); - } - - - public Collection getValueNodes(Constraint constraint, RDFNode focusNode) { - if(constraint.getShape().isNodeShape()) { - return Collections.singletonList(focusNode); - } - else { - // We use a cache here because many shapes contains for example both sh:datatype and sh:minCount, and fetching - // the value nodes each time may be expensive, esp for sh:minCount/maxCount constraints. - ValueNodesCacheKey key = new ValueNodesCacheKey(focusNode, constraint.getShape().getPath()); - return valueNodes.computeIfAbsent(key, k -> getValueNodesHelper(focusNode, constraint)); - } - } - - - private Collection getValueNodesHelper(RDFNode focusNode, Constraint constraint) { - Property predicate = constraint.getShape().getPredicate(); - if(predicate != null) { - List results = new LinkedList<>(); - if(focusNode instanceof Resource) { - Iterator it = ((Resource)focusNode).listProperties(predicate); - while(it.hasNext()) { - results.add(it.next().getObject()); - } - } - return results; - } - else { - Path jenaPath = constraint.getShape().getJenaPath(); - if(jenaPath instanceof P_Inverse && ((P_Inverse)jenaPath).getSubPath() instanceof P_Link) { - List results = new LinkedList<>(); - Property inversePredicate = ResourceFactory.createProperty(((P_Link)((P_Inverse)jenaPath).getSubPath()).getNode().getURI()); - Iterator it = focusNode.getModel().listStatements(null, inversePredicate, focusNode); - while(it.hasNext()) { - results.add(it.next().getSubject()); - } - return results; - } - Set results = new HashSet<>(); - Iterator it = PathEval.eval(focusNode.getModel().getGraph(), focusNode.asNode(), jenaPath, Context.emptyContext()); - while(it.hasNext()) { - Node node = it.next(); - results.add(focusNode.getModel().asRDFNode(node)); - } - return results; - } - } - - - /** - * Validates a given list of focus nodes against a given Shape, and stops as soon - * as one validation result is reported. No results are recorded. - * @param focusNodes the nodes to validate - * @param shape the sh:Shape to validate against - * @return true if there were no validation results, false for violations - */ - public boolean nodesConformToShape(List focusNodes, Node shape) { - if(!shapesGraph.isIgnored(shape)) { - Resource oldReport = report; - report = JenaUtil.createMemoryModel().createResource(); - try { - Shape vs = shapesGraph.getShape(shape); - if(!vs.isDeactivated()) { - boolean nested = SHACLScriptEngineManager.get().begin(); - try { - for(Constraint constraint : vs.getConstraints()) { - validateNodesAgainstConstraint(focusNodes, constraint); - if(report.hasProperty(SH.result)) { - return false; - } - } - } - finally { - SHACLScriptEngineManager.get().end(nested); - } - } - } - finally { - this.report = oldReport; - } - } - return true; - } - - - public void setClassesCache(ClassesCache value) { - this.classesCache = value; - } - - - /** - * Sets a filter that can be used to skip certain focus node from validation. - * The filter must return true if the given candidate focus node shall be validated, - * and false to skip it. - * @param value the new filter - */ - public void setFocusNodeFilter(Predicate value) { - this.focusNodeFilter = value; - } - - - public void setLabelFunction(Function value) { - this.labelFunction = value; - } - - - public void updateConforms() { - boolean conforms = true; - StmtIterator it = report.listProperties(SH.result); - while(it.hasNext()) { - Statement s = it.next(); - if(s.getResource().hasProperty(RDF.type, SH.ValidationResult)) { - conforms = false; - it.close(); - break; - } - } - if(report.hasProperty(SH.conforms)) { - report.removeAll(SH.conforms); - } - report.addProperty(SH.conforms, conforms ? JenaDatatypes.TRUE : JenaDatatypes.FALSE); - } - - - /** - * Validates all target nodes against all of their shapes. - * To further narrow down which nodes to validate, use {@link #setFocusNodeFilter(Predicate)}. - * @return an instance of sh:ValidationReport in the results Model - * @throws InterruptedException if the monitor has canceled this - */ - public Resource validateAll() throws InterruptedException { - List rootShapes = shapesGraph.getRootShapes(); - return validateShapes(rootShapes); - } - - - /** - * Validates a given focus node against all of the shapes that have matching targets. - * @param focusNode the node to validate - * @return an instance of sh:ValidationReport in the results Model - * @throws InterruptedException if the monitor has canceled this - */ - public Resource validateNode(Node focusNode) throws InterruptedException { - - Model shapesModel = dataset.getNamedModel(shapesGraphURI.toString()); - - RDFNode focusRDFNode = dataset.getDefaultModel().asRDFNode(focusNode); - Set shapes = getShapesForNode(focusRDFNode, dataset, shapesModel); - boolean nested = SHACLScriptEngineManager.get().begin(); - try { - for(Resource shape : shapes) { - if(monitor != null && monitor.isCanceled()) { - throw new InterruptedException(); - } - validateNodesAgainstShape(Collections.singletonList(focusRDFNode), shape.asNode()); - } - } - finally { - SHACLScriptEngineManager.get().end(nested); - } - - return report; - } - - - /** - * Validates a given list of focus node against a given Shape. - * @param focusNodes the nodes to validate - * @param shape the sh:Shape to validate against - * @return an instance of sh:ValidationReport in the results Model - */ - public Resource validateNodesAgainstShape(List focusNodes, Node shape) { - if(!shapesGraph.isIgnored(shape)) { - Shape vs = shapesGraph.getShape(shape); - if(!vs.isDeactivated()) { - boolean nested = SHACLScriptEngineManager.get().begin(); - ValidationEngine oldEngine = current.get(); - current.set(this); - try { - for(Constraint constraint : vs.getConstraints()) { - validateNodesAgainstConstraint(focusNodes, constraint); - } - } - finally { - current.set(oldEngine); - SHACLScriptEngineManager.get().end(nested); - } - } - } - return report; - } - - - /** - * Validates all target nodes of a given collection of shapes against these shapes. - * To further narrow down which nodes to validate, use {@link #setFocusNodeFilter(Predicate)}. - * @return an instance of sh:ValidationReport in the results Model - * @throws InterruptedException if the monitor has canceled this - */ - public Resource validateShapes(Collection shapes) throws InterruptedException { - boolean nested = SHACLScriptEngineManager.get().begin(); - try { - if(monitor != null) { - monitor.beginTask("Validating " + shapes.size() + " shapes", shapes.size()); - } - if(classesCache == null) { - // If we are doing everything then the cache should be used, but not for validation of individual focus nodes - classesCache = new ClassesCache(); - } - int i = 0; - for(Shape shape : shapes) { - - if(monitor != null) { - String label = "Shape " + (++i) + ": " + getLabelFunction().apply(shape.getShapeResource()); - if(resultsCount > 0) { - label = "" + resultsCount + " results. " + label; - } - monitor.subTask(label); - } - - Collection focusNodes = shape.getTargetNodes(dataset); - if(focusNodeFilter != null) { - List filteredFocusNodes = new LinkedList<>(); - for(RDFNode focusNode : focusNodes) { - if(focusNodeFilter.test(focusNode)) { - filteredFocusNodes.add(focusNode); - } - } - focusNodes = filteredFocusNodes; - } - if(!focusNodes.isEmpty()) { - for(Constraint constraint : shape.getConstraints()) { - validateNodesAgainstConstraint(focusNodes, constraint); - } - } - if(monitor != null) { - monitor.worked(1); - if(monitor.isCanceled()) { - throw new InterruptedException(); - } - } - } - } - catch(MaximumNumberViolations ex) { - // Ignore as this is just our way to stop validation when max number of violations is reached - } - finally { - SHACLScriptEngineManager.get().end(nested); - } - updateConforms(); - return report; - } - - - protected void validateNodesAgainstConstraint(Collection focusNodes, Constraint constraint) { - if(configuration != null && configuration.isSkippedConstraintComponent(constraint.getComponent())) { - return; - } - - ConstraintExecutor executor; - try { - executor = constraint.getExecutor(); - } - catch(Exception ex) { - Resource result = createResult(DASH.FailureResult, constraint, constraint.getShapeResource()); - result.addProperty(SH.resultMessage, "Failed to create validator: " + ExceptionUtil.getStackTrace(ex)); - return; - } - if(executor != null) { - if(SHACLPreferences.isProduceFailuresMode()) { - try { - executor.executeConstraint(constraint, this, focusNodes); - } - catch(Exception ex) { - Resource result = createResult(DASH.FailureResult, constraint, constraint.getShapeResource()); - result.addProperty(SH.resultMessage, "Exception during validation: " + ExceptionUtil.getStackTrace(ex)); - } - } - else { - executor.executeConstraint(constraint, this, focusNodes); - } - } - else { - FailureLog.get().logWarning("No suitable validator found for constraint " + constraint); - } - } - - - public void setConfiguration(ValidationEngineConfiguration configuration) { - this.configuration = configuration; - if(!configuration.getValidateShapes()) { - shapesGraph.setShapeFilter(new ExcludeMetaShapesFilter()); - } - } - - - public void setProfile(ValidationProfile profile) { - this.profile = profile; - } - - - // Used to avoid repeated computation of value nodes for a focus node / path combination - private static class ValueNodesCacheKey { - - Resource path; - - RDFNode focusNode; - - - ValueNodesCacheKey(RDFNode focusNode, Resource path) { - this.path = path; - this.focusNode = focusNode; - } - - - public boolean equals(Object o) { - if(o instanceof ValueNodesCacheKey) { - return path.equals(((ValueNodesCacheKey)o).path) && focusNode.equals(((ValueNodesCacheKey)o).focusNode); - } - else { - return false; - } - } - - - @Override - public int hashCode() { - return path.hashCode() + focusNode.hashCode(); - } - - - @Override - public String toString() { - return focusNode.toString() + " . " + path; - } - } -} + + // The currently active ValidationEngine for cases where no direct pointer can be acquired, e.g. from HasShapeFunction + private static ThreadLocal current = new ThreadLocal<>(); + + public static ValidationEngine getCurrent() { + return current.get(); + } + + public static void setCurrent(ValidationEngine value) { + current.set(value); + } + + + // Avoids repeatedly walking up/down the class hierarchy for sh:class constraints + private ClassesCache classesCache; + + private ValidationEngineConfiguration configuration; + + // Can be used to drop certain focus nodes from validation + private Predicate focusNodeFilter; + + // The inferred triples if the shapes graph declares an entailment regime + private Model inferencesModel; + + // The label function for rendering nodes in validation results (message templates etc) + private Function labelFunction = (node -> RDFLabels.get().getNodeLabel(node)); + + // Avoids repeatedly fetching labels + private Map labelsCache = new ConcurrentHashMap<>(); + + // Can be used to collect statistical data about execution time of constraint components and shapes + private ValidationProfile profile; + + // The resulting validation report instance + private Resource report; + + // Number of created results, e.g. for progress monitor + private int resultsCount = 0; + + // Avoids repeatedly fetching the value nodes of a focus node / path combination + private Map> valueNodes = new WeakHashMap<>(); + + // Number of created violations, e.g. for progress monitor + private int violationsCount = 0; + + + /** + * Constructs a new ValidationEngine. + * + * @param dataset the Dataset to operate on + * @param shapesGraphURI the URI of the shapes graph (must be in the dataset) + * @param shapesGraph the ShapesGraph with the shapes to validate against + * @param report the sh:ValidationReport object in the results Model, or null to create a new one + */ + protected ValidationEngine(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, Resource report) { + super(dataset, shapesGraph, shapesGraphURI); + setConfiguration(new ValidationEngineConfiguration()); + if (report == null) { + Model reportModel = JenaUtil.createMemoryModel(); + reportModel.setNsPrefixes(dataset.getDefaultModel()); // This can be very expensive in some databases + reportModel.withDefaultMappings(shapesGraph.getShapesModel()); + this.report = reportModel.createResource(SH.ValidationReport); + } else { + this.report = report; + } + } + + + /** + * Checks if entailments are active for the current shapes graph and applies them for a given focus node. + * This will only work for the sh:Rules entailment, e.g. to compute sh:values and sh:defaultValue. + * If any inferred triples exist, the focus node will be returned attached to the model that includes those inferences. + * The dataset used internally will also be switched to use that new model as its default model, so that if + * a node gets validated it will "see" the inferred triples too. + * + * @param focusNode the focus node + * @return the focus node, possibly in a different Model than originally + */ + public RDFNode applyEntailments(Resource focusNode) { + Model shapesModel = dataset.getNamedModel(shapesGraphURI.toString()); + if (shapesModel.contains(null, SH.entailment, SH.Rules)) { + + // Create union of data model and inferences if called for the first time + if (inferencesModel == null) { + inferencesModel = JenaUtil.createDefaultModel(); + Model dataModel = dataset.getDefaultModel(); + MultiUnion multiUnion = new MultiUnion(new Graph[]{dataModel.getGraph(), inferencesModel.getGraph()}); + multiUnion.setBaseGraph(dataModel.getGraph()); + dataset.setDefaultModel(ModelFactory.createModelForGraph(multiUnion)); + } + + // Apply sh:values rules + Map defaultValueMap = new HashMap<>(); + for (SHNodeShape nodeShape : SHACLUtil.getAllShapesAtNode(focusNode)) { + if (!nodeShape.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { + for (SHPropertyShape ps : nodeShape.getPropertyShapes()) { + if (!ps.hasProperty(SH.deactivated, JenaDatatypes.TRUE)) { + Resource path = ps.getPath(); + Statement values = ps.getProperty(SH.values); + if (values != null) { + NodeExpression ne = NodeExpressionFactory.get().create(values.getObject()); + ne.eval(focusNode, this).forEachRemaining(v -> inferencesModel.getGraph().add(Triple.create(focusNode.asNode(), path.asNode(), v.asNode()))); + } + Statement defaultValue = ps.getProperty(SH.defaultValue); + if (defaultValue != null) { + defaultValueMap.put(JenaUtil.asProperty(path), defaultValue.getObject()); + } + } + } + } + } + + // Add sh:defaultValue where needed + Model dataModel = dataset.getDefaultModel(); // This is now the union model + Resource newFocusNode = focusNode.inModel(dataModel); + for (Property predicate : defaultValueMap.keySet()) { + if (!newFocusNode.hasProperty(predicate)) { + NodeExpression ne = NodeExpressionFactory.get().create(defaultValueMap.get(predicate)); + ne.eval(focusNode, this).forEachRemaining(v -> inferencesModel.add(focusNode, predicate, v)); + } + } + return newFocusNode; + } + return focusNode; + } + + + public void addResultMessage(Resource result, Literal message, QuerySolution bindings) { + result.addProperty(SH.resultMessage, SPARQLSubstitutions.withSubstitutions(message, bindings, getLabelFunction())); + } + + + // Note: does not set sh:path + public Resource createResult(Resource type, Constraint constraint, RDFNode focusNode) { + Resource result = report.getModel().createResource(type); + report.addProperty(SH.result, result); + result.addProperty(SH.resultSeverity, constraint.getSeverity()); + result.addProperty(SH.sourceConstraintComponent, constraint.getComponent()); + result.addProperty(SH.sourceShape, constraint.getShapeResource()); + if (focusNode != null) { + result.addProperty(SH.focusNode, focusNode); + } + + checkMaximumNumberFailures(constraint); + + resultsCount++; + + return result; + } + + + public Resource createValidationResult(Constraint constraint, RDFNode focusNode, RDFNode value, Supplier defaultMessage) { + Resource result = createResult(SH.ValidationResult, constraint, focusNode); + if (value != null) { + result.addProperty(SH.value, value); + } + if (!constraint.getShape().isNodeShape()) { + result.addProperty(SH.resultPath, SHACLPaths.clonePath(constraint.getShapeResource().getPath(), result.getModel())); + } + Collection messages = constraint.getMessages(); + if (messages.size() > 0) { + messages.stream().forEach(message -> result.addProperty(SH.resultMessage, message)); + } else if (defaultMessage != null) { + String m = defaultMessage.get(); + if (m != null) { + result.addProperty(SH.resultMessage, m); + } + } + return result; + } + + + private void checkMaximumNumberFailures(Constraint constraint) { + if (SH.Violation.equals(constraint.getShape().getSeverity())) { + this.violationsCount++; + if (configuration.getValidationErrorBatch() != -1 && violationsCount >= configuration.getValidationErrorBatch()) { + throw new MaximumNumberViolations(violationsCount); + } + } + } + + + public ClassesCache getClassesCache() { + return classesCache; + } + + + public ValidationEngineConfiguration getConfiguration() { + return configuration; + } + + + public String getLabel(RDFNode node) { + return labelsCache.computeIfAbsent(node, n -> getLabelFunction().apply(n)); + } + + + public Function getLabelFunction() { + return labelFunction; + } + + + public ValidationProfile getProfile() { + return profile; + } + + + /** + * Gets the validation report as a Resource in the report Model. + * + * @return the report Resource + */ + public Resource getReport() { + return report; + } + + + /** + * Gets a Set of all shapes that should be evaluated for a given resource. + * + * @param focusNode the focus node to get the shapes for + * @param dataset the Dataset containing the resource + * @param shapesModel the shapes Model + * @return a Set of shape resources + */ + private Set getShapesForNode(RDFNode focusNode, Dataset dataset, Model shapesModel) { + + Set shapes = new HashSet<>(); + + for (Shape rootShape : shapesGraph.getRootShapes()) { + for (Target target : rootShape.getTargets()) { + if (!(target instanceof InstancesTarget)) { + if (target.contains(dataset, focusNode)) { + shapes.add(rootShape.getShapeResource()); + } + } + } + } + + // rdf:type / sh:targetClass + if (focusNode instanceof Resource) { + for (Resource type : JenaUtil.getAllTypes((Resource) focusNode)) { + if (JenaUtil.hasIndirectType(type.inModel(shapesModel), SH.Shape)) { + shapes.add(type); + } + for (Statement s : shapesModel.listStatements(null, SH.targetClass, type).toList()) { + shapes.add(s.getSubject()); + } + } + } + + return shapes; + } + + + public ValidationReport getValidationReport() { + return new ResourceValidationReport(report); + } + + + public Collection getValueNodes(Constraint constraint, RDFNode focusNode) { + if (constraint.getShape().isNodeShape()) { + return Collections.singletonList(focusNode); + } else { + // We use a cache here because many shapes contains for example both sh:datatype and sh:minCount, and fetching + // the value nodes each time may be expensive, esp for sh:minCount/maxCount constraints. + ValueNodesCacheKey key = new ValueNodesCacheKey(focusNode, constraint.getShape().getPath()); + return valueNodes.computeIfAbsent(key, k -> getValueNodesHelper(focusNode, constraint)); + } + } + + + private Collection getValueNodesHelper(RDFNode focusNode, Constraint constraint) { + Property predicate = constraint.getShape().getPredicate(); + if (predicate != null) { + List results = new LinkedList<>(); + if (focusNode instanceof Resource) { + Iterator it = ((Resource) focusNode).listProperties(predicate); + while (it.hasNext()) { + results.add(it.next().getObject()); + } + } + return results; + } else { + Path jenaPath = constraint.getShape().getJenaPath(); + if (jenaPath instanceof P_Inverse && ((P_Inverse) jenaPath).getSubPath() instanceof P_Link) { + List results = new LinkedList<>(); + Property inversePredicate = ResourceFactory.createProperty(((P_Link) ((P_Inverse) jenaPath).getSubPath()).getNode().getURI()); + Iterator it = focusNode.getModel().listStatements(null, inversePredicate, focusNode); + while (it.hasNext()) { + results.add(it.next().getSubject()); + } + return results; + } + Set results = new HashSet<>(); + Iterator it = PathEval.eval(focusNode.getModel().getGraph(), focusNode.asNode(), jenaPath, Context.emptyContext()); + while (it.hasNext()) { + Node node = it.next(); + results.add(focusNode.getModel().asRDFNode(node)); + } + return results; + } + } + + + /** + * Validates a given list of focus nodes against a given Shape, and stops as soon + * as one validation result is reported. No results are recorded. + * + * @param focusNodes the nodes to validate + * @param shape the sh:Shape to validate against + * @return true if there were no validation results, false for violations + */ + public boolean nodesConformToShape(List focusNodes, Node shape) { + if (!shapesGraph.isIgnored(shape)) { + Resource oldReport = report; + report = JenaUtil.createMemoryModel().createResource(); + try { + Shape vs = shapesGraph.getShape(shape); + if (!vs.isDeactivated()) { + boolean nested = SHACLScriptEngineManager.get().begin(); + try { + for (Constraint constraint : vs.getConstraints()) { + validateNodesAgainstConstraint(focusNodes, constraint); + if (report.hasProperty(SH.result)) { + return false; + } + } + } finally { + SHACLScriptEngineManager.get().end(nested); + } + } + } finally { + this.report = oldReport; + } + } + return true; + } + + + public void setClassesCache(ClassesCache value) { + this.classesCache = value; + } + + + /** + * Sets a filter that can be used to skip certain focus node from validation. + * The filter must return true if the given candidate focus node shall be validated, + * and false to skip it. + * + * @param value the new filter + */ + public void setFocusNodeFilter(Predicate value) { + this.focusNodeFilter = value; + } + + + public void setLabelFunction(Function value) { + this.labelFunction = value; + } + + + public void updateConforms() { + boolean conforms = true; + StmtIterator it = report.listProperties(SH.result); + while (it.hasNext()) { + Statement s = it.next(); + if (s.getResource().hasProperty(RDF.type, SH.ValidationResult)) { + conforms = false; + it.close(); + break; + } + } + if (report.hasProperty(SH.conforms)) { + report.removeAll(SH.conforms); + } + report.addProperty(SH.conforms, conforms ? JenaDatatypes.TRUE : JenaDatatypes.FALSE); + } + + + /** + * Validates all target nodes against all of their shapes. + * To further narrow down which nodes to validate, use {@link #setFocusNodeFilter(Predicate)}. + * + * @return an instance of sh:ValidationReport in the results Model + * @throws InterruptedException if the monitor has canceled this + */ + public Resource validateAll() throws InterruptedException { + List rootShapes = shapesGraph.getRootShapes(); + return validateShapes(rootShapes); + } + + + /** + * Validates a given focus node against all of the shapes that have matching targets. + * + * @param focusNode the node to validate + * @return an instance of sh:ValidationReport in the results Model + * @throws InterruptedException if the monitor has canceled this + */ + public Resource validateNode(Node focusNode) throws InterruptedException { + + Model shapesModel = dataset.getNamedModel(shapesGraphURI.toString()); + + RDFNode focusRDFNode = dataset.getDefaultModel().asRDFNode(focusNode); + Set shapes = getShapesForNode(focusRDFNode, dataset, shapesModel); + boolean nested = SHACLScriptEngineManager.get().begin(); + try { + for (Resource shape : shapes) { + if (monitor != null && monitor.isCanceled()) { + throw new InterruptedException(); + } + validateNodesAgainstShape(Collections.singletonList(focusRDFNode), shape.asNode()); + } + } finally { + SHACLScriptEngineManager.get().end(nested); + } + + return report; + } + + + /** + * Validates a given list of focus node against a given Shape. + * + * @param focusNodes the nodes to validate + * @param shape the sh:Shape to validate against + * @return an instance of sh:ValidationReport in the results Model + */ + public Resource validateNodesAgainstShape(List focusNodes, Node shape) { + if (!shapesGraph.isIgnored(shape)) { + Shape vs = shapesGraph.getShape(shape); + if (!vs.isDeactivated()) { + boolean nested = SHACLScriptEngineManager.get().begin(); + ValidationEngine oldEngine = current.get(); + current.set(this); + try { + for (Constraint constraint : vs.getConstraints()) { + validateNodesAgainstConstraint(focusNodes, constraint); + } + } finally { + current.set(oldEngine); + SHACLScriptEngineManager.get().end(nested); + } + } + } + return report; + } + + + /** + * Validates all target nodes of a given collection of shapes against these shapes. + * To further narrow down which nodes to validate, use {@link #setFocusNodeFilter(Predicate)}. + * + * @return an instance of sh:ValidationReport in the results Model + * @throws InterruptedException if the monitor has canceled this + */ + public Resource validateShapes(Collection shapes) throws InterruptedException { + boolean nested = SHACLScriptEngineManager.get().begin(); + try { + if (monitor != null) { + monitor.beginTask("Validating " + shapes.size() + " shapes", shapes.size()); + } + if (classesCache == null) { + // If we are doing everything then the cache should be used, but not for validation of individual focus nodes + classesCache = new ClassesCache(); + } + int i = 0; + for (Shape shape : shapes) { + + if (monitor != null) { + String label = "Shape " + (++i) + ": " + getLabelFunction().apply(shape.getShapeResource()); + if (resultsCount > 0) { + label = "" + resultsCount + " results. " + label; + } + monitor.subTask(label); + } + + Collection focusNodes = shape.getTargetNodes(dataset); + if (focusNodeFilter != null) { + List filteredFocusNodes = new LinkedList<>(); + for (RDFNode focusNode : focusNodes) { + if (focusNodeFilter.test(focusNode)) { + filteredFocusNodes.add(focusNode); + } + } + focusNodes = filteredFocusNodes; + } + if (!focusNodes.isEmpty()) { + for (Constraint constraint : shape.getConstraints()) { + validateNodesAgainstConstraint(focusNodes, constraint); + } + } + if (monitor != null) { + monitor.worked(1); + if (monitor.isCanceled()) { + throw new InterruptedException(); + } + } + } + } catch (MaximumNumberViolations ex) { + // Ignore as this is just our way to stop validation when max number of violations is reached + } finally { + SHACLScriptEngineManager.get().end(nested); + } + updateConforms(); + return report; + } + + + protected void validateNodesAgainstConstraint(Collection focusNodes, Constraint constraint) { + if (configuration != null && configuration.isSkippedConstraintComponent(constraint.getComponent())) { + return; + } + + ConstraintExecutor executor; + try { + executor = constraint.getExecutor(); + } catch (Exception ex) { + Resource result = createResult(DASH.FailureResult, constraint, constraint.getShapeResource()); + result.addProperty(SH.resultMessage, "Failed to create validator: " + ExceptionUtil.getStackTrace(ex)); + return; + } + if (executor != null) { + if (SHACLPreferences.isProduceFailuresMode()) { + try { + executor.executeConstraint(constraint, this, focusNodes); + } catch (Exception ex) { + Resource result = createResult(DASH.FailureResult, constraint, constraint.getShapeResource()); + result.addProperty(SH.resultMessage, "Exception during validation: " + ExceptionUtil.getStackTrace(ex)); + } + } else { + executor.executeConstraint(constraint, this, focusNodes); + } + } else { + FailureLog.get().logWarning("No suitable validator found for constraint " + constraint); + } + } + + + public void setConfiguration(ValidationEngineConfiguration configuration) { + this.configuration = configuration; + if (!configuration.getValidateShapes()) { + shapesGraph.setShapeFilter(new ExcludeMetaShapesFilter()); + } + } + + + public void setProfile(ValidationProfile profile) { + this.profile = profile; + } + + + // Used to avoid repeated computation of value nodes for a focus node / path combination + private static class ValueNodesCacheKey { + + Resource path; + + RDFNode focusNode; + + + ValueNodesCacheKey(RDFNode focusNode, Resource path) { + this.path = path; + this.focusNode = focusNode; + } + + @Override + public boolean equals(Object o) { + if (o instanceof ValueNodesCacheKey) { + return path.equals(((ValueNodesCacheKey) o).path) && focusNode.equals(((ValueNodesCacheKey) o).focusNode); + } else { + return false; + } + } + + + @Override + public int hashCode() { + return path.hashCode() + focusNode.hashCode(); + } + + + @Override + public String toString() { + return focusNode.toString() + " . " + path; + } + } +} \ No newline at end of file diff --git a/src/main/java/org/topbraid/shacl/validation/ValidationEngineConfiguration.java b/src/main/java/org/topbraid/shacl/validation/ValidationEngineConfiguration.java index 1fa3fc2e..f3ed7f64 100644 --- a/src/main/java/org/topbraid/shacl/validation/ValidationEngineConfiguration.java +++ b/src/main/java/org/topbraid/shacl/validation/ValidationEngineConfiguration.java @@ -1,61 +1,65 @@ package org.topbraid.shacl.validation; +import org.apache.jena.rdf.model.Resource; + import java.util.HashSet; import java.util.Set; -import org.apache.jena.rdf.model.Resource; - /** * Configures the behavior of the validation engine. */ public class ValidationEngineConfiguration { - - // By default don't produce sh:detail - private boolean reportDetails = false; - - // By default validate all constraints - private Set skippedConstraintComponents = new HashSet<>(); - - // By default validate shapes - private boolean validateShapes = true; + + // By default don't produce sh:detail + private boolean reportDetails = false; + + // By default validate all constraints + private Set skippedConstraintComponents = new HashSet<>(); + + // By default validate shapes + private boolean validateShapes = true; // By default collect all possible errors private int validationErrorBatch = -1; - - + + /** * Creates a clone of this, with exactly the same values. */ + @Override public ValidationEngineConfiguration clone() { - ValidationEngineConfiguration c = new ValidationEngineConfiguration(); - c.reportDetails = this.reportDetails; - c.skippedConstraintComponents = new HashSet<>(this.skippedConstraintComponents); - c.validateShapes = this.validateShapes; - c.validationErrorBatch = this.validationErrorBatch; - return c; + ValidationEngineConfiguration c = new ValidationEngineConfiguration(); + c.reportDetails = this.reportDetails; + c.skippedConstraintComponents = new HashSet<>(this.skippedConstraintComponents); + c.validateShapes = this.validateShapes; + c.validationErrorBatch = this.validationErrorBatch; + return c; } - - + + /** * Checks whether the report shall include sh:detail triples (for sh:node etc). + * * @return true to report details (false is default) */ public boolean getReportDetails() { - return reportDetails; + return reportDetails; } - + /** * Specifies whether the report shall include sh:detail triples where supported. - * @param reportDetails true to produce sh:details, false for the default + * + * @param reportDetails true to produce sh:details, false for the default * @return current configuration after modification */ public ValidationEngineConfiguration setReportDetails(boolean reportDetails) { - this.reportDetails = reportDetails; - return this; + this.reportDetails = reportDetails; + return this; } - + /** * Maximum number of validations before returning the report. + * * @return number of validations or -1 to mean all validations */ public int getValidationErrorBatch() { @@ -64,6 +68,7 @@ public int getValidationErrorBatch() { /** * Set the maximum number of validations before returning the report. + * * @param validationErrorBatch maximum number of validations or -1 for all validations * @return current configuration after modification */ @@ -74,14 +79,16 @@ public ValidationEngineConfiguration setValidationErrorBatch(int validationError /** * Should the engine validates shapes + * * @return boolean flag for shapes validation */ - public boolean getValidateShapes() { - return validateShapes; + public boolean getValidateShapes() { + return validateShapes; } /** * Sets an option for the engine to validate shapes + * * @param validateShapes boolean flat indicating if shapes must be validated * @return current configuration after modification */ @@ -89,17 +96,18 @@ public ValidationEngineConfiguration setValidateShapes(boolean validateShapes) { this.validateShapes = validateShapes; return this; } - + /** * Checks whether the engine should skip constraints of a given constraint component. - * @param component the constraint component + * + * @param component the constraint component * @return true if component shall be skipped */ public boolean isSkippedConstraintComponent(Resource component) { - return skippedConstraintComponents.contains(component); + return skippedConstraintComponents.contains(component); } - + public void addSkippedConstraintComponent(Resource component) { - skippedConstraintComponents.add(component); + skippedConstraintComponents.add(component); } } diff --git a/src/main/java/org/topbraid/shacl/validation/ValidationProfile.java b/src/main/java/org/topbraid/shacl/validation/ValidationProfile.java index e86c4e4b..c74414e8 100644 --- a/src/main/java/org/topbraid/shacl/validation/ValidationProfile.java +++ b/src/main/java/org/topbraid/shacl/validation/ValidationProfile.java @@ -1,90 +1,90 @@ package org.topbraid.shacl.validation; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - import org.apache.jena.atlas.json.JsonArray; import org.apache.jena.atlas.json.JsonObject; import org.apache.jena.graph.Node; import org.topbraid.shacl.engine.Constraint; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + /** * Statistical data about execution time collected during validation. - * + * * @author Holger Knublauch */ public class ValidationProfile { - - private Map componentCounts = new ConcurrentHashMap<>(); - - private Map componentDurations = new ConcurrentHashMap<>(); - - private Map componentFNCs = new ConcurrentHashMap<>(); - - private Map componentVNCs = new ConcurrentHashMap<>(); - - private Map shapeCounts = new ConcurrentHashMap<>(); - - private Map shapeDurations = new ConcurrentHashMap<>(); - - private Map shapeFNCs = new ConcurrentHashMap<>(); - - private Map shapeVNCs = new ConcurrentHashMap<>(); - - - public void record(long duration, int focusNodeCount, long valueNodeCount, Constraint constraint) { - record(componentCounts, componentDurations, componentFNCs, componentVNCs, constraint.getComponent().asNode(), duration, focusNodeCount, valueNodeCount); - record(shapeCounts, shapeDurations, shapeFNCs, shapeVNCs, constraint.getShape().getShapeResource().asNode(), duration, focusNodeCount, valueNodeCount); - } - - - private void record(Map counts, Map durations, Map focusNodeCounts, Map valueNodeCounts, Node key, long duration, int focusNodeCount, long valueNodeCount) { - - Long totalDuration = durations.computeIfAbsent(key, n -> 0L); - totalDuration += duration; - durations.put(key, totalDuration); - - Long count = counts.computeIfAbsent(key, n -> 0L); - count++; - counts.put(key, count); - - Integer fnc = focusNodeCounts.computeIfAbsent(key, n -> 0); - fnc += focusNodeCount; - focusNodeCounts.put(key, fnc); - - Long vnc = valueNodeCounts.computeIfAbsent(key, n -> 0L); - vnc += valueNodeCount; - valueNodeCounts.put(key, vnc); - } - - - public JsonObject toJson() { - JsonObject result = new JsonObject(); - - JsonArray components = new JsonArray(); - for(Node component : componentCounts.keySet()) { - JsonObject o = new JsonObject(); - o.put("uri", component.isURI() ? component.getURI() : "_:" + component.getBlankNodeLabel()); - o.put("calls", componentCounts.get(component)); - o.put("ms", componentDurations.get(component)); - o.put("focusNodes", componentFNCs.get(component)); - o.put("valueNodes", componentVNCs.get(component)); - components.add(o); - } - result.put("components", components); - - JsonArray shapes = new JsonArray(); - for(Node shape : shapeCounts.keySet()) { - JsonObject o = new JsonObject(); - o.put("uri", shape.isURI() ? shape.getURI() : "_:" + shape.getBlankNodeLabel()); - o.put("calls", shapeCounts.get(shape)); - o.put("ms", shapeDurations.get(shape)); - o.put("focusNodes", shapeFNCs.get(shape)); - o.put("valueNodes", shapeVNCs.get(shape)); - shapes.add(o); - } - result.put("shapes", shapes); - - return result; - } + + private Map componentCounts = new ConcurrentHashMap<>(); + + private Map componentDurations = new ConcurrentHashMap<>(); + + private Map componentFNCs = new ConcurrentHashMap<>(); + + private Map componentVNCs = new ConcurrentHashMap<>(); + + private Map shapeCounts = new ConcurrentHashMap<>(); + + private Map shapeDurations = new ConcurrentHashMap<>(); + + private Map shapeFNCs = new ConcurrentHashMap<>(); + + private Map shapeVNCs = new ConcurrentHashMap<>(); + + + public void record(long duration, int focusNodeCount, long valueNodeCount, Constraint constraint) { + record(componentCounts, componentDurations, componentFNCs, componentVNCs, constraint.getComponent().asNode(), duration, focusNodeCount, valueNodeCount); + record(shapeCounts, shapeDurations, shapeFNCs, shapeVNCs, constraint.getShape().getShapeResource().asNode(), duration, focusNodeCount, valueNodeCount); + } + + + private void record(Map counts, Map durations, Map focusNodeCounts, Map valueNodeCounts, Node key, long duration, int focusNodeCount, long valueNodeCount) { + + long totalDuration = durations.computeIfAbsent(key, n -> 0L); + totalDuration += duration; + durations.put(key, totalDuration); + + Long count = counts.computeIfAbsent(key, n -> 0L); + count++; + counts.put(key, count); + + int fnc = focusNodeCounts.computeIfAbsent(key, n -> 0); + fnc += focusNodeCount; + focusNodeCounts.put(key, fnc); + + long vnc = valueNodeCounts.computeIfAbsent(key, n -> 0L); + vnc += valueNodeCount; + valueNodeCounts.put(key, vnc); + } + + + public JsonObject toJson() { + JsonObject result = new JsonObject(); + + JsonArray components = new JsonArray(); + for (Node component : componentCounts.keySet()) { + JsonObject o = new JsonObject(); + o.put("uri", component.isURI() ? component.getURI() : "_:" + component.getBlankNodeLabel()); + o.put("calls", componentCounts.get(component)); + o.put("ms", componentDurations.get(component)); + o.put("focusNodes", componentFNCs.get(component)); + o.put("valueNodes", componentVNCs.get(component)); + components.add(o); + } + result.put("components", components); + + JsonArray shapes = new JsonArray(); + for (Node shape : shapeCounts.keySet()) { + JsonObject o = new JsonObject(); + o.put("uri", shape.isURI() ? shape.getURI() : "_:" + shape.getBlankNodeLabel()); + o.put("calls", shapeCounts.get(shape)); + o.put("ms", shapeDurations.get(shape)); + o.put("focusNodes", shapeFNCs.get(shape)); + o.put("valueNodes", shapeVNCs.get(shape)); + shapes.add(o); + } + result.put("shapes", shapes); + + return result; + } } diff --git a/src/main/java/org/topbraid/shacl/validation/ValidationResult.java b/src/main/java/org/topbraid/shacl/validation/ValidationResult.java index 595cfa4e..7dcc4f4d 100644 --- a/src/main/java/org/topbraid/shacl/validation/ValidationResult.java +++ b/src/main/java/org/topbraid/shacl/validation/ValidationResult.java @@ -1,79 +1,73 @@ package org.topbraid.shacl.validation; -import java.util.Collection; -import java.util.List; - import org.apache.jena.rdf.model.Property; import org.apache.jena.rdf.model.RDFNode; import org.apache.jena.rdf.model.Resource; +import java.util.Collection; +import java.util.List; + /** * A validation result, as produced by the validation engine. - * + * * @author Holger Knublauch */ public interface ValidationResult { - /** - * See sh:focusNode. - * @return - */ - RDFNode getFocusNode(); - - /** - * Gets the human-readable message attached to the result. - * Note that validation results can have multiple messages in different languages, getMessages() might be - * a better choice. - * @return a message or null - */ - String getMessage(); + /** + * See sh:focusNode. + */ + RDFNode getFocusNode(); + + /** + * Gets the human-readable message attached to the result. + * Note that validation results can have multiple messages in different languages, getMessages() might be + * a better choice. + * + * @return a message or null + */ + String getMessage(); + + /** + * Gets the human-readable message attached to the result (see sh:resultMessage). + */ + Collection getMessages(); + + /** + * See sh:resultPath. + */ + Resource getPath(); + + /** + * Provides access to other RDF values that may exist for the result instance, for any given property. + * + * @param predicate the property to get the values of + * @return the values, often empty + */ + List getPropertyValues(Property predicate); + + /** + * See sh:resultSeverity. + */ + Resource getSeverity(); - /** - * Gets the human-readable message attached to the result (see sh:resultMessage). - * @return - */ - Collection getMessages(); + /** + * See sh:sourceConstraint. + */ + Resource getSourceConstraint(); - /** - * See sh:resultPath. - * @return - */ - Resource getPath(); + /** + * See sh:sourceConstraintComponent. + */ + Resource getSourceConstraintComponent(); - /** - * Provides access to other RDF values that may exist for the result instance, for any given property. - * @param predicate the property to get the values of - * @return the values, often empty - */ - List getPropertyValues(Property predicate); + /** + * See sh:sourceShape. + */ + Resource getSourceShape(); - /** - * See sh:resultSeverity. - * @return - */ - Resource getSeverity(); - - /** - * See sh:sourceConstraint. - * @return - */ - Resource getSourceConstraint(); - - /** - * See sh:sourceConstraintComponent. - * @return - */ - Resource getSourceConstraintComponent(); - - /** - * See sh:sourceShape. - * @return - */ - Resource getSourceShape(); - - /** - * See sh:value. - * @return - */ - RDFNode getValue(); + /** + * See sh:value. + */ + RDFNode getValue(); } diff --git a/src/main/java/org/topbraid/shacl/validation/sparql/SPARQLSyntaxChecker.java b/src/main/java/org/topbraid/shacl/validation/sparql/SPARQLSyntaxChecker.java index e11791b1..0209dceb 100644 --- a/src/main/java/org/topbraid/shacl/validation/sparql/SPARQLSyntaxChecker.java +++ b/src/main/java/org/topbraid/shacl/validation/sparql/SPARQLSyntaxChecker.java @@ -16,34 +16,18 @@ */ package org.topbraid.shacl.validation.sparql; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; - import org.apache.jena.query.Query; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.core.VarExprList; -import org.apache.jena.sparql.expr.Expr; -import org.apache.jena.sparql.expr.ExprAggregator; -import org.apache.jena.sparql.expr.ExprFunction; -import org.apache.jena.sparql.expr.ExprFunctionOp; -import org.apache.jena.sparql.expr.ExprNone; -import org.apache.jena.sparql.expr.ExprTripleTerm; -import org.apache.jena.sparql.expr.ExprVar; -import org.apache.jena.sparql.expr.ExprVisitorFunction; -import org.apache.jena.sparql.expr.NodeValue; -import org.apache.jena.sparql.syntax.ElementBind; -import org.apache.jena.sparql.syntax.ElementData; -import org.apache.jena.sparql.syntax.ElementFilter; -import org.apache.jena.sparql.syntax.ElementMinus; -import org.apache.jena.sparql.syntax.ElementService; -import org.apache.jena.sparql.syntax.ElementSubQuery; -import org.apache.jena.sparql.syntax.ElementVisitor; -import org.apache.jena.sparql.syntax.ElementVisitorBase; -import org.apache.jena.sparql.syntax.PatternVars; +import org.apache.jena.sparql.expr.*; +import org.apache.jena.sparql.syntax.*; import org.topbraid.shacl.vocabulary.SH; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + /** * Can be used to check for the violation of any of the syntax rules in Appendix A of the SHACL * spec, to prevent certain pre-binding scenarios. @@ -55,7 +39,7 @@ public class SPARQLSyntaxChecker { /** * Checks whether a given Query violates any of the syntax rules in Appendix A. * - * @param query the Query to check + * @param query the Query to check * @param preBoundVars the potentially pre-bound variables * @return an List of error messages (empty if OK) */ @@ -142,16 +126,20 @@ public void visit(ExprFunctionOp funcOp) { } @Override - public void visit(NodeValue nv) {} + public void visit(NodeValue nv) { + } @Override - public void visit(ExprVar nv) {} + public void visit(ExprVar nv) { + } @Override - public void visit(ExprAggregator eAgg) {} + public void visit(ExprAggregator eAgg) { + } @Override - public void visit(ExprNone exprNone) {} + public void visit(ExprNone exprNone) { + } @Override protected void visitExprFunction(ExprFunction func) { @@ -161,7 +149,8 @@ protected void visitExprFunction(ExprFunction func) { } @Override - public void visit(ExprTripleTerm tripleTerm) {} + public void visit(ExprTripleTerm tripleTerm) { + } }); } }; diff --git a/src/main/java/org/topbraid/shacl/vocabulary/DASH.java b/src/main/java/org/topbraid/shacl/vocabulary/DASH.java index 93512958..895ce090 100644 --- a/src/main/java/org/topbraid/shacl/vocabulary/DASH.java +++ b/src/main/java/org/topbraid/shacl/vocabulary/DASH.java @@ -21,35 +21,35 @@ import org.apache.jena.rdf.model.ResourceFactory; /** - * Vocabulary for http://datashapes.org/dash + * Vocabulary for DASH */ public class DASH { public final static String BASE_URI = "http://datashapes.org/dash"; - + public final static String NAME = "DASH Data Shapes Vocabulary"; public final static String NS = BASE_URI + "#"; public final static String PREFIX = "dash"; - + public final static Resource Action = ResourceFactory.createResource(NS + "Action"); - + public final static Resource ActionGroup = ResourceFactory.createResource(NS + "ActionGroup"); - + public final static Resource ActionTestCase = ResourceFactory.createResource(NS + "ActionTestCase"); - + public final static Resource addedGraph = ResourceFactory.createResource(NS + "addedGraph"); - public final static Resource all = ResourceFactory.createResource(NS + "all"); + public final static Resource all = ResourceFactory.createResource(NS + "all"); public final static Resource ChangeScript = ResourceFactory.createResource(NS + "ChangeScript"); public final static Resource CommitScript = ResourceFactory.createResource(NS + "CommitScript"); public final static Resource Constructor = ResourceFactory.createResource(NS + "Constructor"); - + public final static Resource deletedGraph = ResourceFactory.createResource(NS + "deletedGraph"); public final static Resource DepictionRole = ResourceFactory.createResource(NS + "DepictionRole"); @@ -63,11 +63,11 @@ public class DASH { public final static Resource ExecutionPlatform = ResourceFactory.createResource(NS + "ExecutionPlatform"); public final static Resource Experimental = ResourceFactory.createResource(NS + "Experimental"); - + public final static Resource ExploreAction = ResourceFactory.createResource(NS + "ExploreAction"); public final static Resource FailureResult = ResourceFactory.createResource(NS + "FailureResult"); - + public final static Resource FailureTestCaseResult = ResourceFactory.createResource(NS + "FailureTestCaseResult"); public final static Resource FunctionTestCase = ResourceFactory.createResource(NS + "FunctionTestCase"); @@ -99,11 +99,11 @@ public class DASH { public final static Resource LabelRole = ResourceFactory.createResource(NS + "LabelRole"); public final static Resource ListShape = ResourceFactory.createResource(NS + "ListShape"); - + public final static Resource ModifyAction = ResourceFactory.createResource(NS + "ModifyAction"); - + public final static Resource MultiFunction = ResourceFactory.createResource(NS + "MultiFunction"); - + public final static Resource None = ResourceFactory.createResource(NS + "None"); public final static Resource NonRecursiveConstraintComponent = ResourceFactory.createResource(NS + "NonRecursiveConstraintComponent"); @@ -115,9 +115,9 @@ public class DASH { public final static Resource RDFQueryJSLibrary = ResourceFactory.createResource(NS + "RDFQueryJSLibrary"); public final static Resource ReifiableByConstraintComponent = ResourceFactory.createResource(NS + "ReifiableByConstraintComponent"); - + public final static Resource ResourceAction = ResourceFactory.createResource(NS + "ResourceAction"); - + public final static Resource ResourceService = ResourceFactory.createResource(NS + "ResourceService"); public final static Resource Script = ResourceFactory.createResource(NS + "Script"); @@ -141,7 +141,7 @@ public class DASH { public final static Resource ShapeScript = ResourceFactory.createResource(NS + "ShapeScript"); public final static Resource SPARQLConstructTemplate = ResourceFactory.createResource(NS + "SPARQLConstructTemplate"); - + public final static Resource SPARQLMultiFunction = ResourceFactory.createResource(NS + "SPARQLMultiFunction"); public final static Resource SPARQLSelectTemplate = ResourceFactory.createResource(NS + "SPARQLSelectTemplate"); @@ -153,7 +153,7 @@ public class DASH { public final static Resource Stable = ResourceFactory.createResource(NS + "Stable"); public final static Resource SuccessResult = ResourceFactory.createResource(NS + "SuccessResult"); - + public final static Resource SuccessTestCaseResult = ResourceFactory.createResource(NS + "SuccessTestCaseResult"); public final static Resource SuggestionResult = ResourceFactory.createResource(NS + "SuggestionResult"); @@ -176,39 +176,39 @@ public class DASH { public final static Property actualResult = ResourceFactory.createProperty(NS + "actualResult"); public final static Property actionGroup = ResourceFactory.createProperty(NS + "actionGroup"); - + public final static Property addedTriple = ResourceFactory.createProperty(NS + "addedTriple"); - + public final static Property apiStatus = ResourceFactory.createProperty(NS + "apiStatus"); - + public final static Property applicableToClass = ResourceFactory.createProperty(NS + "applicableToClass"); public final static Property cachable = ResourceFactory.createProperty(NS + "cachable"); - + public final static Property canWrite = ResourceFactory.createProperty(NS + "canWrite"); public final static Property composite = ResourceFactory.createProperty(NS + "composite"); public final static Property constructor = ResourceFactory.createProperty(NS + "constructor"); - + public final static Property contextFree = ResourceFactory.createProperty(NS + "contextFree"); public final static Property defaultViewForRole = ResourceFactory.createProperty(NS + "defaultViewForRole"); - + public final static Property deletedTriple = ResourceFactory.createProperty(NS + "deletedTriple"); - + public final static Property dependencyPredicate = ResourceFactory.createProperty(NS + "dependencyPredicate"); - + public final static Property detailsEndpoint = ResourceFactory.createProperty(NS + "detailsEndpoint"); - + public final static Property detailsGraph = ResourceFactory.createProperty(NS + "detailsGraph"); - + public final static Property editor = ResourceFactory.createProperty(NS + "editor"); - + public final static Property expectedResult = ResourceFactory.createProperty(NS + "expectedResult"); - + public final static Property expectedResultIsJSON = ResourceFactory.createProperty(NS + "expectedResultIsJSON"); - + public final static Property expectedResultIsTTL = ResourceFactory.createProperty(NS + "expectedResultIsTTL"); public final static Property exports = ResourceFactory.createProperty(NS + "exports"); @@ -224,13 +224,13 @@ public class DASH { public final static Property generatePrefixConstants = ResourceFactory.createProperty(NS + "generatePrefixConstants"); public final static Property hidden = ResourceFactory.createProperty(NS + "hidden"); - + public final static Property includedExecutionPlatform = ResourceFactory.createProperty(NS + "includedExecutionPlatform"); public final static Property includeSuggestions = ResourceFactory.createProperty(NS + "includeSuggestions"); - + public final static Property index = ResourceFactory.createProperty(NS + "index"); - + public final static Property indexed = ResourceFactory.createProperty(NS + "indexed"); public final static Property js = ResourceFactory.createProperty(NS + "js"); @@ -240,17 +240,17 @@ public class DASH { public final static Property mimeTypes = ResourceFactory.createProperty(NS + "mimeTypes"); public final static Property neverMaterialize = ResourceFactory.createProperty(NS + "neverMaterialize"); - + public final static Property node = ResourceFactory.createProperty(NS + "node"); public final static Property onAllValues = ResourceFactory.createProperty(NS + "onAllValues"); - + public final static Property private_ = ResourceFactory.createProperty(NS + "private"); - + public final static Property propertyRole = ResourceFactory.createProperty(NS + "propertyRole"); - + public final static Property propertySuggestionGenerator = ResourceFactory.createProperty(NS + "propertySuggestionGenerator"); - + public final static Property requiredExecutionPlatform = ResourceFactory.createProperty(NS + "requiredExecutionPlatform"); public final static Property resultVariable = ResourceFactory.createProperty(NS + "resultVariable"); @@ -258,11 +258,11 @@ public class DASH { public final static Property rootClass = ResourceFactory.createProperty(NS + "rootClass"); public final static Property readOnly = ResourceFactory.createProperty(NS + "readOnly"); - + public final static Property reifiableBy = ResourceFactory.createProperty(NS + "reifiableBy"); - + public final static Property resourceAction = ResourceFactory.createProperty(NS + "resourceAction"); - + public final static Property resourceService = ResourceFactory.createProperty(NS + "resourceService"); public final static Property responseContentType = ResourceFactory.createProperty(NS + "responseContentType"); @@ -272,48 +272,49 @@ public class DASH { public final static Property shape = ResourceFactory.createProperty(NS + "shape"); public final static Property shapeScript = ResourceFactory.createProperty(NS + "shapeScript"); - + public final static Property singleLine = ResourceFactory.createProperty(NS + "singleLine"); - + public final static Property suggestion = ResourceFactory.createProperty(NS + "suggestion"); - + public final static Property suggestionConfidence = ResourceFactory.createProperty(NS + "suggestionConfidence"); - + public final static Property suggestionGenerator = ResourceFactory.createProperty(NS + "suggestionGenerator"); - + public final static Property suggestionGroup = ResourceFactory.createProperty(NS + "suggestionGroup"); public final static Property testCase = ResourceFactory.createProperty(NS + "testCase"); public final static Property testGraph = ResourceFactory.createProperty(NS + "testGraph"); - + public final static Property uri = ResourceFactory.createProperty(NS + "uri"); - + public final static Property uriStart = ResourceFactory.createProperty(NS + "uriStart"); - + public final static Property validateShapes = ResourceFactory.createProperty(NS + "validateShapes"); public final static Property variables = ResourceFactory.createProperty(NS + "variables"); - + public final static Property viewer = ResourceFactory.createProperty(NS + "viewer"); - + public final static Property x = ResourceFactory.createProperty(NS + "x"); - + public final static Property y = ResourceFactory.createProperty(NS + "y"); public static String getURI() { return NS; } - - + + /** * Checks whether a given feature shall be included into the generated APIs. - * @param feature the feature to check + * + * @param feature the feature to check * @return true currently if the feature has any value for dash:apiStatus but this may change in case we introduce - * additional stati in the future. + * additional stati in the future. */ public static boolean isAPI(Resource feature) { - return feature.hasProperty(DASH.apiStatus); + return feature.hasProperty(DASH.apiStatus); } } diff --git a/src/main/java/org/topbraid/shacl/vocabulary/SH.java b/src/main/java/org/topbraid/shacl/vocabulary/SH.java index 310f957b..868840d1 100644 --- a/src/main/java/org/topbraid/shacl/vocabulary/SH.java +++ b/src/main/java/org/topbraid/shacl/vocabulary/SH.java @@ -22,14 +22,14 @@ import org.apache.jena.sparql.core.Var; /** - * Vocabulary for http://www.w3.org/ns/shacl# - * + * Vocabulary for SHACL + * * @author Holger Knublauch */ public class SH { public final static String BASE_URI = "http://www.w3.org/ns/shacl#"; - + public final static String NAME = "SHACL"; public final static String NS = BASE_URI; @@ -50,7 +50,7 @@ public class SH { public final static Resource ClassConstraintComponent = ResourceFactory.createResource(NS + "ClassConstraintComponent"); public final static Resource ClosedConstraintComponent = ResourceFactory.createResource(NS + "ClosedConstraintComponent"); - + public final static Resource Constraint = ResourceFactory.createResource(NS + "Constraint"); public final static Resource ConstraintComponent = ResourceFactory.createResource(NS + "ConstraintComponent"); @@ -104,7 +104,7 @@ public class SH { public final static Resource NotConstraintComponent = ResourceFactory.createResource(NS + "NotConstraintComponent"); public final static Resource OrConstraintComponent = ResourceFactory.createResource(NS + "OrConstraintComponent"); - + public final static Resource Parameter = ResourceFactory.createResource(NS + "Parameter"); public final static Resource Parameterizable = ResourceFactory.createResource(NS + "Parameterizable"); @@ -126,31 +126,31 @@ public class SH { public final static Resource ResultAnnotation = ResourceFactory.createResource(NS + "ResultAnnotation"); public final static Resource Shape = ResourceFactory.createResource(NS + "Shape"); - + public final static Resource SPARQLAskValidator = ResourceFactory.createResource(NS + "SPARQLAskValidator"); - + public final static Resource SPARQLConstraint = ResourceFactory.createResource(NS + "SPARQLConstraint"); - + public final static Resource SPARQLConstraintComponent = ResourceFactory.createResource(NS + "SPARQLConstraintComponent"); - + public final static Resource SPARQLConstructRule = ResourceFactory.createResource(NS + "SPARQLConstructRule"); public final static Resource SPARQLExecutable = ResourceFactory.createResource(NS + "SPARQLExecutable"); public final static Resource SPARQLFunction = ResourceFactory.createResource(NS + "SPARQLFunction"); - + public final static Resource SPARQLSelectValidator = ResourceFactory.createResource(NS + "SPARQLSelectValidator"); - + public final static Resource SPARQLTarget = ResourceFactory.createResource(NS + "SPARQLTarget"); - + public final static Resource SPARQLValuesDeriver = ResourceFactory.createResource(NS + "SPARQLValuesDeriver"); public final static Resource UniqueLangConstraintComponent = ResourceFactory.createResource(NS + "UniqueLangConstraintComponent"); - + public final static Resource ValidationReport = ResourceFactory.createResource(NS + "ValidationReport"); - + public final static Resource ValidationResult = ResourceFactory.createResource(NS + "ValidationResult"); - + public final static Resource Validator = ResourceFactory.createResource(NS + "Validator"); public final static Resource Violation = ResourceFactory.createResource(NS + "Violation"); @@ -161,13 +161,13 @@ public class SH { public final static Property alternativePath = ResourceFactory.createProperty(NS + "alternativePath"); - + public final static Property and = ResourceFactory.createProperty(NS + "and"); public final static Property ask = ResourceFactory.createProperty(NS + "ask"); public final static Property class_ = ResourceFactory.createProperty(NS + "class"); - + public final static Property closed = ResourceFactory.createProperty(NS + "closed"); public final static Property condition = ResourceFactory.createProperty(NS + "condition"); @@ -177,7 +177,7 @@ public class SH { public final static Property construct = ResourceFactory.createProperty(NS + "construct"); public final static Property datatype = ResourceFactory.createProperty(NS + "datatype"); - + public final static Property deactivated = ResourceFactory.createProperty(NS + "deactivated"); public final static Property declare = ResourceFactory.createProperty(NS + "declare"); @@ -197,13 +197,13 @@ public class SH { public final static Property flags = ResourceFactory.createProperty(NS + "flags"); public final static Property focusNode = ResourceFactory.createProperty(NS + "focusNode"); - + public final static Property group = ResourceFactory.createProperty(NS + "group"); public final static Property hasValue = ResourceFactory.createProperty(NS + "hasValue"); - + public final static Property ignoredProperties = ResourceFactory.createProperty(NS + "ignoredProperties"); - + public final static Property in = ResourceFactory.createProperty(NS + "in"); public final static Property inversePath = ResourceFactory.createProperty(NS + "inversePath"); @@ -237,13 +237,13 @@ public class SH { public final static Property name = ResourceFactory.createProperty(NS + "name"); public final static Property namespace = ResourceFactory.createProperty(NS + "namespace"); - + public final static Property node = ResourceFactory.createProperty(NS + "node"); public final static Property nodeKind = ResourceFactory.createProperty(NS + "nodeKind"); public final static Property nodeValidator = ResourceFactory.createProperty(NS + "nodeValidator"); - + public final static Property not = ResourceFactory.createProperty(NS + "not"); public final static Property oneOrMorePath = ResourceFactory.createProperty(NS + "oneOrMorePath"); @@ -291,19 +291,19 @@ public class SH { public final static Property shapesGraph = ResourceFactory.createProperty(NS + "shapesGraph"); public final static Property sourceConstraint = ResourceFactory.createProperty(NS + "sourceConstraint"); - + public final static Property sourceConstraintComponent = ResourceFactory.createProperty(NS + "sourceConstraintComponent"); public final static Property sourceShape = ResourceFactory.createProperty(NS + "sourceShape"); public final static Property sparql = ResourceFactory.createProperty(NS + "sparql"); - + public final static Property targetClass = ResourceFactory.createProperty(NS + "targetClass"); public final static Property targetNode = ResourceFactory.createProperty(NS + "targetNode"); - + public final static Property targetObjectsOf = ResourceFactory.createProperty(NS + "targetObjectsOf"); - + public final static Property targetSubjectsOf = ResourceFactory.createProperty(NS + "targetSubjectsOf"); public final static Property uniqueLang = ResourceFactory.createProperty(NS + "uniqueLang"); @@ -317,8 +317,8 @@ public class SH { public final static Property zeroOrMorePath = ResourceFactory.createProperty(NS + "zeroOrMorePath"); public final static Property zeroOrOnePath = ResourceFactory.createProperty(NS + "zeroOrOnePath"); - - + + // Advanced features public final static Resource ExpressionConstraintComponent = ResourceFactory.createResource(NS + "ExpressionConstraintComponent"); @@ -334,11 +334,11 @@ public class SH { public final static Resource JSFunction = ResourceFactory.createResource(NS + "JSFunction"); public final static Resource JSLibrary = ResourceFactory.createResource(NS + "JSLibrary"); - + public final static Resource JSRule = ResourceFactory.createResource(NS + "JSRule"); - + public final static Resource JSTarget = ResourceFactory.createResource(NS + "JSTarget"); - + public final static Resource JSTargetType = ResourceFactory.createResource(NS + "JSTargetType"); public final static Resource JSValidator = ResourceFactory.createResource(NS + "JSValidator"); @@ -346,24 +346,24 @@ public class SH { public final static Resource Rule = ResourceFactory.createResource(NS + "Rule"); public final static Resource Rules = ResourceFactory.createResource(NS + "Rules"); - + public final static Resource SPARQLRule = ResourceFactory.createResource(NS + "SPARQLRule"); - + public final static Resource Target = ResourceFactory.createResource(NS + "Target"); - + public final static Resource this_ = ResourceFactory.createResource(NS + "this"); - + public final static Resource TripleRule = ResourceFactory.createResource(NS + "TripleRule"); - - + + public final static Property expression = ResourceFactory.createProperty(NS + "expression"); - + public final static Property filterShape = ResourceFactory.createProperty(NS + "filterShape"); - + public final static Property intersection = ResourceFactory.createProperty(NS + "intersection"); - + public final static Property js = ResourceFactory.createProperty(NS + "js"); - + public final static Property jsFunctionName = ResourceFactory.createProperty(NS + "jsFunctionName"); public final static Property jsLibrary = ResourceFactory.createProperty(NS + "jsLibrary"); @@ -371,79 +371,78 @@ public class SH { public final static Property jsLibraryURL = ResourceFactory.createProperty(NS + "jsLibraryURL"); public final static Property member = ResourceFactory.createProperty(NS + "member"); - + public final static Property nodes = ResourceFactory.createProperty(NS + "nodes"); - + public final static Property object = ResourceFactory.createProperty(NS + "object"); - + public final static Property predicate = ResourceFactory.createProperty(NS + "predicate"); - + public final static Property returnType = ResourceFactory.createProperty(NS + "returnType"); - + public final static Property rule = ResourceFactory.createProperty(NS + "rule"); - + public final static Property subject = ResourceFactory.createProperty(NS + "subject"); - + public final static Property target = ResourceFactory.createProperty(NS + "target"); - + public final static Property union = ResourceFactory.createProperty(NS + "union"); - + // Features not in SHACL 1.0 but candidates for next release - - public final static Property count = ResourceFactory.createProperty(NS + "count"); - - public final static Property desc = ResourceFactory.createProperty(NS + "desc"); - - public final static Property distinct = ResourceFactory.createProperty(NS + "distinct"); + + public final static Property count = ResourceFactory.createProperty(NS + "count"); + + public final static Property desc = ResourceFactory.createProperty(NS + "desc"); + + public final static Property distinct = ResourceFactory.createProperty(NS + "distinct"); public final static Property else_ = ResourceFactory.createProperty(NS + "else"); public final static Property exists = ResourceFactory.createProperty(NS + "exists"); - - public final static Property groupConcat = ResourceFactory.createProperty(NS + "groupConcat"); + + public final static Property groupConcat = ResourceFactory.createProperty(NS + "groupConcat"); public final static Property if_ = ResourceFactory.createProperty(NS + "if"); - - public final static Property limit = ResourceFactory.createProperty(NS + "limit"); - - public final static Property max = ResourceFactory.createProperty(NS + "max"); - - public final static Property min = ResourceFactory.createProperty(NS + "min"); - - public final static Property minus = ResourceFactory.createProperty(NS + "minus"); - - public final static Property offset = ResourceFactory.createProperty(NS + "offset"); - - public final static Property orderBy = ResourceFactory.createProperty(NS + "orderBy"); - - public final static Property separator = ResourceFactory.createProperty(NS + "separator"); - - public final static Property sum = ResourceFactory.createProperty(NS + "sum"); + + public final static Property limit = ResourceFactory.createProperty(NS + "limit"); + + public final static Property max = ResourceFactory.createProperty(NS + "max"); + + public final static Property min = ResourceFactory.createProperty(NS + "min"); + + public final static Property minus = ResourceFactory.createProperty(NS + "minus"); + + public final static Property offset = ResourceFactory.createProperty(NS + "offset"); + + public final static Property orderBy = ResourceFactory.createProperty(NS + "orderBy"); + + public final static Property separator = ResourceFactory.createProperty(NS + "separator"); + + public final static Property sum = ResourceFactory.createProperty(NS + "sum"); public final static Property then = ResourceFactory.createProperty(NS + "then"); - - public final static Property values = ResourceFactory.createProperty(NS + "values"); + + public final static Property values = ResourceFactory.createProperty(NS + "values"); - - public static final Var currentShapeVar = Var.alloc("currentShape"); + public static final Var currentShapeVar = Var.alloc("currentShape"); - public static final Var failureVar = Var.alloc("failure"); + public static final Var failureVar = Var.alloc("failure"); - public static final Var PATHVar = Var.alloc("PATH"); + public static final Var PATHVar = Var.alloc("PATH"); - public static final Var pathVar = Var.alloc(path.getLocalName()); + public static final Var pathVar = Var.alloc(path.getLocalName()); + + public static final Var shapesGraphVar = Var.alloc("shapesGraph"); + + public static final Var thisVar = Var.alloc("this"); + + public static final Var valueVar = Var.alloc("value"); - public static final Var shapesGraphVar = Var.alloc("shapesGraph"); - - public static final Var thisVar = Var.alloc("this"); - - public static final Var valueVar = Var.alloc("value"); - public final static String JS_DATA_VAR = "$data"; - + public final static String JS_SHAPES_VAR = "$shapes"; diff --git a/src/main/java/org/topbraid/shacl/vocabulary/SPARQL.java b/src/main/java/org/topbraid/shacl/vocabulary/SPARQL.java index 9085f783..39e9810a 100644 --- a/src/main/java/org/topbraid/shacl/vocabulary/SPARQL.java +++ b/src/main/java/org/topbraid/shacl/vocabulary/SPARQL.java @@ -5,8 +5,8 @@ import org.apache.jena.rdf.model.ResourceFactory; /** - * Vocabulary for http://datashapes.org/sparql - * + * Vocabulary for Datashapes SPARQL + *

* Automatically generated with TopBraid Composer. */ public class SPARQL { diff --git a/src/main/java/org/topbraid/shacl/vocabulary/TOSH.java b/src/main/java/org/topbraid/shacl/vocabulary/TOSH.java index ec284824..3523fdf8 100644 --- a/src/main/java/org/topbraid/shacl/vocabulary/TOSH.java +++ b/src/main/java/org/topbraid/shacl/vocabulary/TOSH.java @@ -21,12 +21,12 @@ import org.apache.jena.rdf.model.ResourceFactory; /** - * Vocabulary for http://topbraid.org/tosh + * Vocabulary for TOSH */ public class TOSH { public final static String BASE_URI = "http://topbraid.org/tosh"; - + public final static String NAME = "TopBraid Data Shapes Vocabulary"; public final static String NS = BASE_URI + "#"; @@ -37,15 +37,15 @@ public class TOSH { public final static Resource count = ResourceFactory.createResource(NS + "count"); public final static Resource DatatypePropertyShapeView = ResourceFactory.createResource(NS + "DatatypePropertyShapeView"); - + public final static Resource DeleteTripleSuggestionGenerator = ResourceFactory.createResource(NS + "DeleteTripleSuggestionGenerator"); public final static Resource NodeProcessor = ResourceFactory.createResource(NS + "NodeProcessor"); public final static Resource NodeShapeConstraintsShape = ResourceFactory.createResource(NS + "NodeShapeConstraintsShape"); - + public final static Resource ObjectPropertyShapeView = ResourceFactory.createResource(NS + "ObjectPropertyShapeView"); - + public final static Resource ResultsGenerators = ResourceFactory.createResource(NS + "ResultsGenerators"); public final static Resource TeamworkPlatform = ResourceFactory.createResource(NS + "TeamworkPlatform"); @@ -53,27 +53,27 @@ public class TOSH { public final static Resource TopBraidPlatform = ResourceFactory.createResource(NS + "TopBraidPlatform"); public final static Resource evalExpr = ResourceFactory.createResource(NS + "evalExpr"); - + public final static Resource hasShape = ResourceFactory.createResource(NS + "hasShape"); - + public final static Resource isInTargetOf = ResourceFactory.createResource(NS + "isInTargetOf"); - + public final static Resource targetContains = ResourceFactory.createResource(NS + "targetContains"); public final static Resource values = ResourceFactory.createResource(NS + "values"); - + public final static Property closed = ResourceFactory.createProperty(NS + "closed"); - + public final static Property editGroupDescription = ResourceFactory.createProperty(NS + "editGroupDescription"); - + public final static Property javaMethod = ResourceFactory.createProperty(NS + "javaMethod"); public final static Property useDeclaredDatatype = ResourceFactory.createProperty(NS + "useDeclaredDatatype"); // Note this property may be deleted in future versions public final static Property viewGadget = ResourceFactory.createProperty(NS + "viewGadget"); - + public final static Property viewGroupDescription = ResourceFactory.createProperty(NS + "viewGroupDescription"); public final static Property viewWidget = ResourceFactory.createProperty(NS + "viewWidget");