http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java index 86359f0..8d6feb6 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java @@ -52,24 +52,22 @@ import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerGraph; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.model.vocabulary.OWL; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.helpers.RDFHandlerBase; +import org.eclipse.rdf4j.common.iteration.CloseableIteration; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Statement; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; +import org.eclipse.rdf4j.model.vocabulary.OWL; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.model.vocabulary.RDFS; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.rio.RDFHandlerException; +import org.eclipse.rdf4j.rio.helpers.AbstractRDFHandler; import com.google.common.collect.Sets; -import info.aduna.iteration.CloseableIteration; - /** * Will pull down inference relationships from dao every x seconds. <br> * Will infer extra relationships. <br> @@ -77,9 +75,9 @@ import info.aduna.iteration.CloseableIteration; */ public class InferenceEngine { private static final Logger log = Logger.getLogger(InferenceEngine.class); - private static final ValueFactory VF = ValueFactoryImpl.getInstance(); - private static final URI HAS_SELF = VF.createURI(OWL.NAMESPACE, "hasSelf"); - private static final URI REFLEXIVE_PROPERTY = VF.createURI(OWL.NAMESPACE, "ReflexiveProperty"); + private static final ValueFactory VF = SimpleValueFactory.getInstance(); + private static final IRI HAS_SELF = VF.createIRI(OWL.NAMESPACE, "hasSelf"); + private static final IRI REFLEXIVE_PROPERTY = VF.createIRI(OWL.NAMESPACE, "ReflexiveProperty"); public static final String URI_PROP = "uri"; private final ReentrantLock refreshLock = new ReentrantLock(); @@ -87,22 +85,22 @@ public class InferenceEngine { private final AtomicReference<Graph> subClassOfGraph = new AtomicReference<>(); private final AtomicReference<Graph> subPropertyOfGraph = new AtomicReference<>(); - private final Set<URI> symmetricPropertySet = ConcurrentHashMap.newKeySet();; - private final Map<URI, URI> inverseOfMap = new ConcurrentHashMap<>(); - private final Set<URI> transitivePropertySet = ConcurrentHashMap.newKeySet();; - private final Set<URI> reflexivePropertySet = ConcurrentHashMap.newKeySet();; - private final Map<URI, Set<URI>> domainByType = new ConcurrentHashMap<>(); - private final Map<URI, Set<URI>> rangeByType = new ConcurrentHashMap<>(); - private final Map<Resource, Map<URI, Value>> hasValueByType = new ConcurrentHashMap<>(); - private final Map<URI, Map<Resource, Value>> hasValueByProperty = new ConcurrentHashMap<>(); - private final Map<Resource, Map<Resource, URI>> someValuesFromByRestrictionType = new ConcurrentHashMap<>(); - private final Map<Resource, Map<Resource, URI>> allValuesFromByValueType = new ConcurrentHashMap<>(); + private final Set<IRI> symmetricPropertySet = ConcurrentHashMap.newKeySet();; + private final Map<IRI, IRI> inverseOfMap = new ConcurrentHashMap<>(); + private final Set<IRI> transitivePropertySet = ConcurrentHashMap.newKeySet();; + private final Set<IRI> reflexivePropertySet = ConcurrentHashMap.newKeySet();; + private final Map<IRI, Set<IRI>> domainByType = new ConcurrentHashMap<>(); + private final Map<IRI, Set<IRI>> rangeByType = new ConcurrentHashMap<>(); + private final Map<Resource, Map<IRI, Value>> hasValueByType = new ConcurrentHashMap<>(); + private final Map<IRI, Map<Resource, Value>> hasValueByProperty = new ConcurrentHashMap<>(); + private final Map<Resource, Map<Resource, IRI>> someValuesFromByRestrictionType = new ConcurrentHashMap<>(); + private final Map<Resource, Map<Resource, IRI>> allValuesFromByValueType = new ConcurrentHashMap<>(); private final Map<Resource, List<Set<Resource>>> intersections = new ConcurrentHashMap<>(); private final Map<Resource, Set<Resource>> enumerations = new ConcurrentHashMap<>(); - private final Map<URI, List<URI>> propertyChainPropertyToChain = new ConcurrentHashMap<>(); + private final Map<IRI, List<IRI>> propertyChainPropertyToChain = new ConcurrentHashMap<>(); // hasSelf maps. - private final Map<URI, Set<Resource>> hasSelfByProperty = new ConcurrentHashMap<>(); - private final Map<Resource, Set<URI>> hasSelfByType = new ConcurrentHashMap<>(); + private final Map<IRI, Set<Resource>> hasSelfByProperty = new ConcurrentHashMap<>(); + private final Map<Resource, Set<IRI>> hasSelfByType = new ConcurrentHashMap<>(); private RyaDAO<?> ryaDAO; private RdfCloudTripleStoreConfiguration conf; @@ -207,13 +205,13 @@ public class InferenceEngine { * to have few members, such as ontology vocabulary terms, as instances will be collected in * memory. */ - private Set<URI> fetchInstances(final URI type) throws QueryEvaluationException { - final Set<URI> instances = new HashSet<>(); - ryaDaoQueryWrapper.queryAll(null, RDF.TYPE, type, new RDFHandlerBase() { + private Set<IRI> fetchInstances(final IRI type) throws QueryEvaluationException { + final Set<IRI> instances = new HashSet<>(); + ryaDaoQueryWrapper.queryAll(null, RDF.TYPE, type, new AbstractRDFHandler() { @Override public void handleStatement(final Statement st) throws RDFHandlerException { - if (st.getSubject() instanceof URI) { - instances.add((URI) st.getSubject()); + if (st.getSubject() instanceof IRI) { + instances.add((IRI) st.getSubject()); } } }); @@ -231,7 +229,7 @@ public class InferenceEngine { * @param edgeName Label that will be given to all added edges * @throws QueryEvaluationException */ - private void addPredicateEdges(final URI predicate, final Direction dir, final Graph graph, final String edgeName) + private void addPredicateEdges(final IRI predicate, final Direction dir, final Graph graph, final String edgeName) throws QueryEvaluationException { final CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, null, predicate, null, conf); @@ -242,7 +240,7 @@ public class InferenceEngine { addStatementEdge(graph, edgeName, st); } if (Direction.IN.equals(dir) || Direction.BOTH.equals(dir)) { - addStatementEdge(graph, edgeName, new StatementImpl((Resource) st.getObject(), + addStatementEdge(graph, edgeName, VF.createStatement((Resource) st.getObject(), st.getPredicate(), st.getSubject())); } } @@ -309,12 +307,12 @@ public class InferenceEngine { private void refreshInverseOf() throws QueryEvaluationException { final CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, null, OWL.INVERSEOF, null, conf); - final Map<URI, URI> invProp = new HashMap<>(); + final Map<IRI, IRI> invProp = new HashMap<>(); try { while (iter.hasNext()) { final Statement st = iter.next(); - invProp.put((URI) st.getSubject(), (URI) st.getObject()); - invProp.put((URI) st.getObject(), (URI) st.getSubject()); + invProp.put((IRI) st.getSubject(), (IRI) st.getObject()); + invProp.put((IRI) st.getObject(), (IRI) st.getSubject()); } } finally { if (iter != null) { @@ -329,14 +327,14 @@ public class InferenceEngine { private void refreshPropertyChainPropertyToChain() throws QueryEvaluationException { CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, null, - VF.createURI("http://www.w3.org/2002/07/owl#propertyChainAxiom"), + VF.createIRI("http://www.w3.org/2002/07/owl#propertyChainAxiom"), null, conf); - final Map<URI,URI> propertyChainPropertiesToBNodes = new HashMap<>(); - final Map<URI, List<URI>> tempPropertyChainPropertyToChain = new HashMap<>(); + final Map<IRI, IRI> propertyChainPropertiesToBNodes = new HashMap<>(); + final Map<IRI, List<IRI>> tempPropertyChainPropertyToChain = new HashMap<>(); try { while (iter.hasNext()){ final Statement st = iter.next(); - propertyChainPropertiesToBNodes.put((URI)st.getSubject(), (URI)st.getObject()); + propertyChainPropertiesToBNodes.put((IRI)st.getSubject(), (IRI)st.getObject()); } } finally { if (iter != null) { @@ -344,19 +342,19 @@ public class InferenceEngine { } } // now for each property chain bNode, get the indexed list of properties associated with that chain - for (final URI propertyChainProperty : propertyChainPropertiesToBNodes.keySet()){ - final URI bNode = propertyChainPropertiesToBNodes.get(propertyChainProperty); + for (final IRI propertyChainProperty : propertyChainPropertiesToBNodes.keySet()){ + final IRI bNode = propertyChainPropertiesToBNodes.get(propertyChainProperty); // query for the list of indexed properties - iter = RyaDAOHelper.query(ryaDAO, bNode, VF.createURI("http://www.w3.org/2000/10/swap/list#index"), + iter = RyaDAOHelper.query(ryaDAO, bNode, VF.createIRI("http://www.w3.org/2000/10/swap/list#index"), null, conf); - final TreeMap<Integer, URI> orderedProperties = new TreeMap<>(); + final TreeMap<Integer, IRI> orderedProperties = new TreeMap<>(); // TODO refactor this. Wish I could execute sparql try { while (iter.hasNext()){ final Statement st = iter.next(); final String indexedElement = st.getObject().stringValue(); log.info(indexedElement); - CloseableIteration<Statement, QueryEvaluationException> iter2 = RyaDAOHelper.query(ryaDAO, VF.createURI(st.getObject().stringValue()), RDF.FIRST, + CloseableIteration<Statement, QueryEvaluationException> iter2 = RyaDAOHelper.query(ryaDAO, VF.createIRI(st.getObject().stringValue()), RDF.FIRST, null, conf); String integerValue = ""; Value anonPropNode = null; @@ -369,7 +367,7 @@ public class InferenceEngine { } iter2.close(); } - iter2 = RyaDAOHelper.query(ryaDAO, VF.createURI(st.getObject().stringValue()), RDF.REST, + iter2 = RyaDAOHelper.query(ryaDAO, VF.createIRI(st.getObject().stringValue()), RDF.REST, null, conf); if (iter2 != null){ while (iter2.hasNext()){ @@ -379,7 +377,7 @@ public class InferenceEngine { } iter2.close(); if (anonPropNode != null){ - iter2 = RyaDAOHelper.query(ryaDAO, VF.createURI(anonPropNode.stringValue()), RDF.FIRST, + iter2 = RyaDAOHelper.query(ryaDAO, VF.createIRI(anonPropNode.stringValue()), RDF.FIRST, null, conf); while (iter2.hasNext()){ final Statement iter2Statement = iter2.next(); @@ -392,7 +390,7 @@ public class InferenceEngine { if (!integerValue.isEmpty() && propURI!=null) { try { final int indexValue = Integer.parseInt(integerValue); - final URI chainPropURI = VF.createURI(propURI.stringValue()); + final IRI chainPropURI = VF.createIRI(propURI.stringValue()); orderedProperties.put(indexValue, chainPropURI); } catch (final Exception e){ @@ -405,44 +403,44 @@ public class InferenceEngine { iter.close(); } } - final List<URI> properties = new ArrayList<>(); - for (final Map.Entry<Integer, URI> entry : orderedProperties.entrySet()){ + final List<IRI> properties = new ArrayList<>(); + for (final Map.Entry<Integer, IRI> entry : orderedProperties.entrySet()){ properties.add(entry.getValue()); } tempPropertyChainPropertyToChain.put(propertyChainProperty, properties); } // could also be represented as a list of properties (some of which may be blank nodes) - for (final URI propertyChainProperty : propertyChainPropertiesToBNodes.keySet()){ - final List<URI> existingChain = tempPropertyChainPropertyToChain.get(propertyChainProperty); + for (final IRI propertyChainProperty : propertyChainPropertiesToBNodes.keySet()){ + final List<IRI> existingChain = tempPropertyChainPropertyToChain.get(propertyChainProperty); // if we didn't get a chain, try to get it through following the collection if ((existingChain == null) || existingChain.isEmpty()) { CloseableIteration<Statement, QueryEvaluationException> iter2 = RyaDAOHelper.query(ryaDAO, propertyChainPropertiesToBNodes.get(propertyChainProperty), RDF.FIRST, null, conf); - final List<URI> properties = new ArrayList<>(); - URI previousBNode = propertyChainPropertiesToBNodes.get(propertyChainProperty); + final List<IRI> properties = new ArrayList<>(); + IRI previousBNode = propertyChainPropertiesToBNodes.get(propertyChainProperty); if (iter2.hasNext()) { Statement iter2Statement = iter2.next(); Value currentPropValue = iter2Statement.getObject(); while ((currentPropValue != null) && (!currentPropValue.stringValue().equalsIgnoreCase(RDF.NIL.stringValue()))){ - if (currentPropValue instanceof URI){ - iter2 = RyaDAOHelper.query(ryaDAO, VF.createURI(currentPropValue.stringValue()), RDF.FIRST, + if (currentPropValue instanceof IRI){ + iter2 = RyaDAOHelper.query(ryaDAO, VF.createIRI(currentPropValue.stringValue()), RDF.FIRST, null, conf); if (iter2.hasNext()){ iter2Statement = iter2.next(); - if (iter2Statement.getObject() instanceof URI){ - properties.add((URI)iter2Statement.getObject()); + if (iter2Statement.getObject() instanceof IRI){ + properties.add((IRI)iter2Statement.getObject()); } } // otherwise see if there is an inverse declaration else { - iter2 = RyaDAOHelper.query(ryaDAO, VF.createURI(currentPropValue.stringValue()), OWL.INVERSEOF, + iter2 = RyaDAOHelper.query(ryaDAO, VF.createIRI(currentPropValue.stringValue()), OWL.INVERSEOF, null, conf); if (iter2.hasNext()){ iter2Statement = iter2.next(); - if (iter2Statement.getObject() instanceof URI){ - properties.add(new InverseURI((URI)iter2Statement.getObject())); + if (iter2Statement.getObject() instanceof IRI){ + properties.add(new InverseURI((IRI)iter2Statement.getObject())); } } } @@ -451,7 +449,7 @@ public class InferenceEngine { null, conf); if (iter2.hasNext()){ iter2Statement = iter2.next(); - previousBNode = (URI)currentPropValue; + previousBNode = (IRI)currentPropValue; currentPropValue = iter2Statement.getObject(); } else { @@ -490,8 +488,8 @@ public class InferenceEngine { * @throws QueryEvaluationException */ private void refreshDomainRange() throws QueryEvaluationException { - final Map<URI, Set<URI>> domainByTypePartial = new ConcurrentHashMap<>(); - final Map<URI, Set<URI>> rangeByTypePartial = new ConcurrentHashMap<>(); + final Map<IRI, Set<IRI>> domainByTypePartial = new ConcurrentHashMap<>(); + final Map<IRI, Set<IRI>> rangeByTypePartial = new ConcurrentHashMap<>(); // First, populate domain and range based on direct domain/range triples. CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, null, RDFS.DOMAIN, null, conf); try { @@ -499,11 +497,11 @@ public class InferenceEngine { final Statement st = iter.next(); final Resource property = st.getSubject(); final Value domainType = st.getObject(); - if (domainType instanceof URI && property instanceof URI) { + if (domainType instanceof IRI && property instanceof IRI) { if (!domainByTypePartial.containsKey(domainType)) { - domainByTypePartial.put((URI) domainType, new HashSet<>()); + domainByTypePartial.put((IRI) domainType, new HashSet<>()); } - domainByTypePartial.get(domainType).add((URI) property); + domainByTypePartial.get(domainType).add((IRI) property); } } } finally { @@ -517,11 +515,11 @@ public class InferenceEngine { final Statement st = iter.next(); final Resource property = st.getSubject(); final Value rangeType = st.getObject(); - if (rangeType instanceof URI && property instanceof URI) { + if (rangeType instanceof IRI && property instanceof IRI) { if (!rangeByTypePartial.containsKey(rangeType)) { - rangeByTypePartial.put((URI) rangeType, new HashSet<>()); + rangeByTypePartial.put((IRI) rangeType, new HashSet<>()); } - rangeByTypePartial.get(rangeType).add((URI) property); + rangeByTypePartial.get(rangeType).add((IRI) property); } } } finally { @@ -531,26 +529,26 @@ public class InferenceEngine { } // Then combine with the subclass/subproperty graphs and the inverse property map to compute // the closure of domain and range per class. - final Set<URI> domainRangeTypeSet = new HashSet<>(domainByTypePartial.keySet()); + final Set<IRI> domainRangeTypeSet = new HashSet<>(domainByTypePartial.keySet()); domainRangeTypeSet.addAll(rangeByTypePartial.keySet()); // Extend to subproperties: make sure that using a more specific form of a property // still triggers its domain/range inferences. // Mirror for inverse properties: make sure that using the inverse form of a property // triggers the inverse domain/range inferences. // These two rules can recursively trigger one another. - for (final URI domainRangeType : domainRangeTypeSet) { - final Set<URI> propertiesWithDomain = domainByTypePartial.getOrDefault(domainRangeType, new HashSet<>()); - final Set<URI> propertiesWithRange = rangeByTypePartial.getOrDefault(domainRangeType, new HashSet<>()); + for (final IRI domainRangeType : domainRangeTypeSet) { + final Set<IRI> propertiesWithDomain = domainByTypePartial.getOrDefault(domainRangeType, new HashSet<>()); + final Set<IRI> propertiesWithRange = rangeByTypePartial.getOrDefault(domainRangeType, new HashSet<>()); // Since findParents will traverse the subproperty graph and find all indirect // subproperties, the subproperty rule does not need to trigger itself directly. // And since no more than one inverseOf relationship is stored for any property, the // inverse property rule does not need to trigger itself directly. However, each rule // can trigger the other, so keep track of how the inferred domains/ranges were // discovered so we can apply only those rules that might yield new information. - final Stack<URI> domainViaSuperProperty = new Stack<>(); - final Stack<URI> rangeViaSuperProperty = new Stack<>(); - final Stack<URI> domainViaInverseProperty = new Stack<>(); - final Stack<URI> rangeViaInverseProperty = new Stack<>(); + final Stack<IRI> domainViaSuperProperty = new Stack<>(); + final Stack<IRI> rangeViaSuperProperty = new Stack<>(); + final Stack<IRI> domainViaInverseProperty = new Stack<>(); + final Stack<IRI> rangeViaInverseProperty = new Stack<>(); // Start with the direct domain/range assertions, which can trigger any rule. domainViaSuperProperty.addAll(propertiesWithDomain); domainViaInverseProperty.addAll(propertiesWithDomain); @@ -563,8 +561,8 @@ public class InferenceEngine { // For a type c and property p, if c is a domain of p, then c is the range of any // inverse of p. Would be redundant for properties discovered via inverseOf. while (!domainViaSuperProperty.isEmpty()) { - final URI property = domainViaSuperProperty.pop(); - final URI inverseProperty = findInverseOf(property); + final IRI property = domainViaSuperProperty.pop(); + final IRI inverseProperty = findInverseOf(property); if (inverseProperty != null && propertiesWithRange.add(inverseProperty)) { rangeViaInverseProperty.push(inverseProperty); } @@ -572,8 +570,8 @@ public class InferenceEngine { // For a type c and property p, if c is a range of p, then c is the domain of any // inverse of p. Would be redundant for properties discovered via inverseOf. while (!rangeViaSuperProperty.isEmpty()) { - final URI property = rangeViaSuperProperty.pop(); - final URI inverseProperty = findInverseOf(property); + final IRI property = rangeViaSuperProperty.pop(); + final IRI inverseProperty = findInverseOf(property); if (inverseProperty != null && propertiesWithDomain.add(inverseProperty)) { domainViaInverseProperty.push(inverseProperty); } @@ -581,8 +579,8 @@ public class InferenceEngine { // For a type c and property p, if c is a domain of p, then c is also a domain of // p's subproperties. Would be redundant for properties discovered via this rule. while (!domainViaInverseProperty.isEmpty()) { - final URI property = domainViaInverseProperty.pop(); - final Set<URI> subProperties = getSubProperties(property); + final IRI property = domainViaInverseProperty.pop(); + final Set<IRI> subProperties = getSubProperties(property); subProperties.removeAll(propertiesWithDomain); propertiesWithDomain.addAll(subProperties); domainViaSuperProperty.addAll(subProperties); @@ -590,8 +588,8 @@ public class InferenceEngine { // For a type c and property p, if c is a range of p, then c is also a range of // p's subproperties. Would be redundant for properties discovered via this rule. while (!rangeViaInverseProperty.isEmpty()) { - final URI property = rangeViaInverseProperty.pop(); - final Set<URI> subProperties = getSubProperties(property); + final IRI property = rangeViaInverseProperty.pop(); + final Set<IRI> subProperties = getSubProperties(property); subProperties.removeAll(propertiesWithRange); propertiesWithRange.addAll(subProperties); rangeViaSuperProperty.addAll(subProperties); @@ -607,23 +605,23 @@ public class InferenceEngine { // Once all properties have been found for each domain/range class, extend to superclasses: // make sure that the consequent of a domain/range inference goes on to apply any more // general classes as well. - for (final URI subtype : domainRangeTypeSet) { - final Set<URI> supertypes = getSuperClasses(subtype); - final Set<URI> propertiesWithDomain = domainByTypePartial.getOrDefault(subtype, new HashSet<>()); - final Set<URI> propertiesWithRange = rangeByTypePartial.getOrDefault(subtype, new HashSet<>()); - for (final URI supertype : supertypes) { + for (final IRI subtype : domainRangeTypeSet) { + final Set<IRI> supertypes = getSuperClasses(subtype); + final Set<IRI> propertiesWithDomain = domainByTypePartial.getOrDefault(subtype, new HashSet<>()); + final Set<IRI> propertiesWithRange = rangeByTypePartial.getOrDefault(subtype, new HashSet<>()); + for (final IRI supertype : supertypes) { // For a property p and its domain c: all of c's superclasses are also domains of p. if (!propertiesWithDomain.isEmpty() && !domainByTypePartial.containsKey(supertype)) { domainByTypePartial.put(supertype, new HashSet<>()); } - for (final URI property : propertiesWithDomain) { + for (final IRI property : propertiesWithDomain) { domainByTypePartial.get(supertype).add(property); } // For a property p and its range c: all of c's superclasses are also ranges of p. if (!propertiesWithRange.isEmpty() && !rangeByTypePartial.containsKey(supertype)) { rangeByTypePartial.put(supertype, new HashSet<>()); } - for (final URI property : propertiesWithRange) { + for (final IRI property : propertiesWithRange) { rangeByTypePartial.get(supertype).add(property); } } @@ -641,11 +639,11 @@ public class InferenceEngine { private void refreshPropertyRestrictions() throws QueryEvaluationException { // Get a set of all property restrictions of any type final CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, null, OWL.ONPROPERTY, null, conf); - final Map<Resource, URI> restrictions = new HashMap<>(); + final Map<Resource, IRI> restrictions = new HashMap<>(); try { while (iter.hasNext()) { final Statement st = iter.next(); - restrictions.put(st.getSubject(), (URI) st.getObject()); + restrictions.put(st.getSubject(), (IRI) st.getObject()); } } finally { if (iter != null) { @@ -659,7 +657,7 @@ public class InferenceEngine { refreshHasSelfRestrictions(restrictions); } - private void refreshHasValueRestrictions(final Map<Resource, URI> restrictions) throws QueryEvaluationException { + private void refreshHasValueRestrictions(final Map<Resource, IRI> restrictions) throws QueryEvaluationException { hasValueByType.clear(); hasValueByProperty.clear(); final CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, null, OWL.HASVALUE, null, conf); @@ -668,7 +666,7 @@ public class InferenceEngine { final Statement st = iter.next(); final Resource restrictionClass = st.getSubject(); if (restrictions.containsKey(restrictionClass)) { - final URI property = restrictions.get(restrictionClass); + final IRI property = restrictions.get(restrictionClass); final Value value = st.getObject(); if (!hasValueByType.containsKey(restrictionClass)) { hasValueByType.put(restrictionClass, new HashMap<>()); @@ -687,20 +685,20 @@ public class InferenceEngine { } } - private void refreshSomeValuesFromRestrictions(final Map<Resource, URI> restrictions) throws QueryEvaluationException { + private void refreshSomeValuesFromRestrictions(final Map<Resource, IRI> restrictions) throws QueryEvaluationException { someValuesFromByRestrictionType.clear(); - ryaDaoQueryWrapper.queryAll(null, OWL.SOMEVALUESFROM, null, new RDFHandlerBase() { + ryaDaoQueryWrapper.queryAll(null, OWL.SOMEVALUESFROM, null, new AbstractRDFHandler() { @Override public void handleStatement(final Statement statement) throws RDFHandlerException { final Resource restrictionClass = statement.getSubject(); if (restrictions.containsKey(restrictionClass) && statement.getObject() instanceof Resource) { - final URI property = restrictions.get(restrictionClass); + final IRI property = restrictions.get(restrictionClass); final Resource valueClass = (Resource) statement.getObject(); // Should also be triggered by subclasses of the value class final Set<Resource> valueClasses = new HashSet<>(); valueClasses.add(valueClass); - if (valueClass instanceof URI) { - valueClasses.addAll(getSubClasses((URI) valueClass)); + if (valueClass instanceof IRI) { + valueClasses.addAll(getSubClasses((IRI) valueClass)); } for (final Resource valueSubClass : valueClasses) { if (!someValuesFromByRestrictionType.containsKey(restrictionClass)) { @@ -713,20 +711,20 @@ public class InferenceEngine { }); } - private void refreshAllValuesFromRestrictions(final Map<Resource, URI> restrictions) throws QueryEvaluationException { + private void refreshAllValuesFromRestrictions(final Map<Resource, IRI> restrictions) throws QueryEvaluationException { allValuesFromByValueType.clear(); - ryaDaoQueryWrapper.queryAll(null, OWL.ALLVALUESFROM, null, new RDFHandlerBase() { + ryaDaoQueryWrapper.queryAll(null, OWL.ALLVALUESFROM, null, new AbstractRDFHandler() { @Override public void handleStatement(final Statement statement) throws RDFHandlerException { final Resource directRestrictionClass = statement.getSubject(); if (restrictions.containsKey(directRestrictionClass) && statement.getObject() instanceof Resource) { - final URI property = restrictions.get(directRestrictionClass); + final IRI property = restrictions.get(directRestrictionClass); final Resource valueClass = (Resource) statement.getObject(); // Should also be triggered by subclasses of the property restriction final Set<Resource> restrictionClasses = new HashSet<>(); restrictionClasses.add(directRestrictionClass); - if (directRestrictionClass instanceof URI) { - restrictionClasses.addAll(getSubClasses((URI) directRestrictionClass)); + if (directRestrictionClass instanceof IRI) { + restrictionClasses.addAll(getSubClasses((IRI) directRestrictionClass)); } for (final Resource restrictionClass : restrictionClasses) { if (!allValuesFromByValueType.containsKey(valueClass)) { @@ -739,16 +737,16 @@ public class InferenceEngine { }); } - private void refreshHasSelfRestrictions(final Map<Resource, URI> restrictions) throws QueryEvaluationException { + private void refreshHasSelfRestrictions(final Map<Resource, IRI> restrictions) throws QueryEvaluationException { hasSelfByType.clear(); hasSelfByProperty.clear(); for(final Resource type : restrictions.keySet()) { - final URI property = restrictions.get(type); + final IRI property = restrictions.get(type); final CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, type, HAS_SELF, null, conf); try { if (iter.hasNext()) { - Set<URI> typeSet = hasSelfByType.get(type); + Set<IRI> typeSet = hasSelfByType.get(type); Set<Resource> propSet = hasSelfByProperty.get(property); if (typeSet == null) { @@ -784,12 +782,12 @@ public class InferenceEngine { // _:bnode1 rdf:rest _:bnode2 . // _:bnode2 rdf:first <:C> . // _:bnode2 rdf:rest rdf:nil . - ryaDaoQueryWrapper.queryAll(null, OWL.INTERSECTIONOF, null, new RDFHandlerBase() { + ryaDaoQueryWrapper.queryAll(null, OWL.INTERSECTIONOF, null, new AbstractRDFHandler() { @Override public void handleStatement(final Statement statement) throws RDFHandlerException { final Resource type = statement.getSubject(); // head will point to a type that is part of the intersection. - final URI head = (URI) statement.getObject(); + final IRI head = (IRI) statement.getObject(); if (!intersectionsProp.containsKey(type)) { intersectionsProp.put(type, new ArrayList<Set<Resource>>()); } @@ -840,8 +838,8 @@ public class InferenceEngine { final Resource type = entry.getKey(); final List<Set<Resource>> intersectionList = entry.getValue(); - final Set<URI> superClasses = getSuperClasses((URI) type); - for (final URI superClass : superClasses) { + final Set<IRI> superClasses = getSuperClasses((IRI) type); + for (final IRI superClass : superClasses) { // Add intersections to super classes if applicable. // IF: // :A intersectionOf[:B, :C] @@ -881,12 +879,12 @@ public class InferenceEngine { // _:bnode1 rdf:rest _:bnode2 . // _:bnode2 rdf:first <:C> . // _:bnode2 rdf:rest rdf:nil . - ryaDaoQueryWrapper.queryAll(null, OWL.ONEOF, null, new RDFHandlerBase() { + ryaDaoQueryWrapper.queryAll(null, OWL.ONEOF, null, new AbstractRDFHandler() { @Override public void handleStatement(final Statement statement) throws RDFHandlerException { final Resource enumType = statement.getSubject(); // listHead will point to a type class of the enumeration. - final URI listHead = (URI) statement.getObject(); + final IRI listHead = (IRI) statement.getObject(); if (!enumTypes.containsKey(enumType)) { enumTypes.put(enumType, new LinkedHashSet<Resource>()); } @@ -926,17 +924,17 @@ public class InferenceEngine { * resource has that value for that property, it is implied to * belong to the type. */ - public Set<URI> getHasSelfImplyingType(final Resource type){ + public Set<IRI> getHasSelfImplyingType(final Resource type){ // return properties that imply this type if reflexive - final Set<URI> properties = new HashSet<>(); - Set<URI> tempProperties = hasSelfByType.get(type); + final Set<IRI> properties = new HashSet<>(); + Set<IRI> tempProperties = hasSelfByType.get(type); if (tempProperties != null) { properties.addAll(tempProperties); } //findParent gets all subclasses, add self. - if (type instanceof URI) { - for (final URI subtype : findParents(subClassOfGraph.get(), (URI) type)) { + if (type instanceof IRI) { + for (final IRI subtype : findParents(subClassOfGraph.get(), (IRI) type)) { tempProperties = hasSelfByType.get(subtype); if (tempProperties != null) { properties.addAll(tempProperties); @@ -957,7 +955,7 @@ public class InferenceEngine { * @param property The property whose owl:hasSelf restrictions to return * @return A set of types that possess the implied property. */ - public Set<Resource> getHasSelfImplyingProperty(final URI property) { + public Set<Resource> getHasSelfImplyingProperty(final IRI property) { // return types that imply this type if reflexive final Set<Resource> types = new HashSet<>(); final Set<Resource> baseTypes = hasSelfByProperty.get(property); @@ -966,8 +964,8 @@ public class InferenceEngine { types.addAll(baseTypes); // findParent gets all subclasses, add self. for (final Resource baseType : baseTypes) { - if (baseType instanceof URI) { - types.addAll(findParents(subClassOfGraph.get(), (URI) baseType)); + if (baseType instanceof IRI) { + types.addAll(findParents(subClassOfGraph.get(), (IRI) baseType)); } } } @@ -992,28 +990,28 @@ public class InferenceEngine { * @return the {@link List} of {@link Resource}s. * @throws QueryEvaluationException */ - private List<Resource> getList(final URI firstItem) throws QueryEvaluationException { - URI head = firstItem; + private List<Resource> getList(final IRI firstItem) throws QueryEvaluationException { + IRI head = firstItem; final List<Resource> list = new ArrayList<>(); // Go through and find all bnodes that are part of the defined list. while (!RDF.NIL.equals(head)) { // rdf.first will point to a type item that is in the list. - ryaDaoQueryWrapper.queryFirst(head, RDF.FIRST, null, new RDFHandlerBase() { + ryaDaoQueryWrapper.queryFirst(head, RDF.FIRST, null, new AbstractRDFHandler() { @Override public void handleStatement(final Statement statement) throws RDFHandlerException { // The object found in the query represents a type // that should be included in the list. - final URI object = (URI) statement.getObject(); + final IRI object = (IRI) statement.getObject(); list.add(object); } }); - final MutableObject<URI> headHolder = new MutableObject<>(); + final MutableObject<IRI> headHolder = new MutableObject<>(); // rdf.rest will point to the next bnode that's part of the list. - ryaDaoQueryWrapper.queryFirst(head, RDF.REST, null, new RDFHandlerBase() { + ryaDaoQueryWrapper.queryFirst(head, RDF.REST, null, new AbstractRDFHandler() { @Override public void handleStatement(final Statement statement) throws RDFHandlerException { // This object is the next bnode head to look for. - final URI object = (URI) statement.getObject(); + final IRI object = (IRI) statement.getObject(); headHolder.setValue(object); } }); @@ -1029,7 +1027,7 @@ public class InferenceEngine { } private void addSubClassOf(final Resource s, final Resource o) { - final Statement statement = new StatementImpl(s, RDFS.SUBCLASSOF, o); + final Statement statement = VF.createStatement(s, RDFS.SUBCLASSOF, o); final String edgeName = RDFS.SUBCLASSOF.stringValue(); addStatementEdge(subClassOfGraph.get(), edgeName, statement); @@ -1075,48 +1073,48 @@ public class InferenceEngine { /** * Returns all super class types of the specified type based on the * internal subclass graph. - * @param type the type {@link URI} to find super classes for. - * @return the {@link Set} of {@link URI} types that are super classes types + * @param type the type {@link IRI} to find super classes for. + * @return the {@link Set} of {@link IRI} types that are super classes types * of the specified {@code type}. Returns an empty set if nothing was found, * or if either type or the subclass graph is {@code null}. */ - public Set<URI> getSuperClasses(final URI type) { + public Set<IRI> getSuperClasses(final IRI type) { return findChildren(subClassOfGraph.get(), type); } /** * Returns all sub class types of the specified type based on the * internal subclass graph. - * @param type the type {@link URI} to find sub classes for. - * @return the {@link Set} of {@link URI} types that are sub classes types + * @param type the type {@link IRI} to find sub classes for. + * @return the {@link Set} of {@link IRI} types that are sub classes types * of the specified {@code type}. Returns an empty set if nothing was found, * or if either type or the subclass graph is {@code null}. */ - public Set<URI> getSubClasses(final URI type) { + public Set<IRI> getSubClasses(final IRI type) { return findParents(subClassOfGraph.get(), type); } /** * Returns all superproperties of the specified property based on the * internal subproperty graph. - * @param property the property {@link URI} to find superproperties for. - * @return the {@link Set} of {@link URI} properties that are superproperties + * @param property the property {@link IRI} to find superproperties for. + * @return the {@link Set} of {@link IRI} properties that are superproperties * of the specified {@code property}. Returns an empty set if nothing was found, * or if either property or the subproperty graph is {@code null}. */ - public Set<URI> getSuperProperties(final URI property) { + public Set<IRI> getSuperProperties(final IRI property) { return findChildren(subPropertyOfGraph.get(), property); } /** * Returns all subproperties of the specified property based on the * internal subproperty graph. - * @param property the property {@link URI} to find subproperties for. - * @return the {@link Set} of {@link URI} properties that are subproperties + * @param property the property {@link IRI} to find subproperties for. + * @return the {@link Set} of {@link IRI} properties that are subproperties * of the specified {@code property}. Returns an empty set if nothing was found, * or if either property or the subproperty graph is {@code null}. */ - public Set<URI> getSubProperties(final URI property) { + public Set<IRI> getSubProperties(final IRI property) { return findParents(subPropertyOfGraph.get(), property); } @@ -1128,7 +1126,7 @@ public class InferenceEngine { * @return The set of predecessors, or an empty set if none are found or if * either argument is {@code null} */ - public static Set<URI> findParents(final Graph graph, final URI vertexId) { + public static Set<IRI> findParents(final Graph graph, final IRI vertexId) { return findParents(graph, vertexId, true); } @@ -1141,7 +1139,7 @@ public class InferenceEngine { * @return The set of predecessors, or an empty set if none are found or if * either argument is {@code null} */ - public static Set<URI> findParents(final Graph graph, final URI vertexId, final boolean isRecursive) { + public static Set<IRI> findParents(final Graph graph, final IRI vertexId, final boolean isRecursive) { return findConnected(graph, vertexId, Direction.IN, isRecursive); } @@ -1153,7 +1151,7 @@ public class InferenceEngine { * @return The set of successors, or an empty set if none are found or if * either argument is {@code null} */ - public static Set<URI> findChildren(final Graph graph, final URI vertexId) { + public static Set<IRI> findChildren(final Graph graph, final IRI vertexId) { return findChildren(graph, vertexId, true); } @@ -1166,7 +1164,7 @@ public class InferenceEngine { * @return The set of successors, or an empty set if none are found or if * either argument is {@code null} */ - public static Set<URI> findChildren(final Graph graph, final URI vertexId, final boolean isRecursive) { + public static Set<IRI> findChildren(final Graph graph, final IRI vertexId, final boolean isRecursive) { return findConnected(graph, vertexId, Direction.OUT, isRecursive); } @@ -1182,8 +1180,8 @@ public class InferenceEngine { * @return The set of connected nodes, or an empty set if none are found, or * if either the graph or the starting vertex are {@code null}. */ - private static Set<URI> findConnected(final Graph graph, final URI vertexId, final Direction traversal, final boolean isRecursive) { - final Set<URI> connected = new HashSet<>(); + private static Set<IRI> findConnected(final Graph graph, final IRI vertexId, final Direction traversal, final boolean isRecursive) { + final Set<IRI> connected = new HashSet<>(); if (graph == null || vertexId == null) { return connected; } @@ -1195,14 +1193,14 @@ public class InferenceEngine { return connected; } - private static void addConnected(final Vertex v, final Set<URI> connected, final Direction traversal, final boolean isRecursive) { + private static void addConnected(final Vertex v, final Set<IRI> connected, final Direction traversal, final boolean isRecursive) { v.edges(traversal).forEachRemaining(edge -> { final Vertex ov = edge.vertices(traversal.opposite()).next(); final Object o = ov.property(URI_PROP).value(); - if (o != null && o instanceof URI) { + if (o != null && o instanceof IRI) { final boolean contains = connected.contains(o); if (!contains) { - connected.add((URI) o); + connected.add((IRI) o); if (isRecursive) { addConnected(ov, connected, traversal, isRecursive); } @@ -1211,15 +1209,15 @@ public class InferenceEngine { }); } - public boolean isSymmetricProperty(final URI prop) { + public boolean isSymmetricProperty(final IRI prop) { return (symmetricPropertySet != null) && symmetricPropertySet.contains(prop); } - public URI findInverseOf(final URI prop) { + public IRI findInverseOf(final IRI prop) { return (inverseOfMap != null) ? inverseOfMap.get(prop) : (null); } - public boolean isTransitiveProperty(final URI prop) { + public boolean isTransitiveProperty(final IRI prop) { return (transitivePropertySet != null) && transitivePropertySet.contains(prop); } @@ -1228,14 +1226,14 @@ public class InferenceEngine { * @param prop A URI * @return True if the given URI corresponds to an owl:ReflexiveProperty */ - public boolean isReflexiveProperty(final URI prop) { + public boolean isReflexiveProperty(final IRI prop) { return (reflexivePropertySet != null) && reflexivePropertySet.contains(prop); } /** * TODO: This chaining can be slow at query execution. the other option is to perform this in the query itself, but that will be constrained to how many levels we decide to go */ - public Set<Statement> findTransitiveProperty(final Resource subj, final URI prop, final Value obj, final Resource... contxts) throws InferenceEngineException { + public Set<Statement> findTransitiveProperty(final Resource subj, final IRI prop, final Value obj, final Resource... contxts) throws InferenceEngineException { if (transitivePropertySet.contains(prop)) { final Set<Statement> sts = new HashSet<>(); final boolean goUp = subj == null; @@ -1256,7 +1254,7 @@ public class InferenceEngine { return sameAs; } - public CloseableIteration<Statement, QueryEvaluationException> queryDao(final Resource subject, final URI predicate, final Value object, final Resource... contexts) throws QueryEvaluationException { + public CloseableIteration<Statement, QueryEvaluationException> queryDao(final Resource subject, final IRI predicate, final Value object, final Resource... contexts) throws QueryEvaluationException { return RyaDAOHelper.query(ryaDAO, subject, predicate, object, conf, contexts); } @@ -1305,13 +1303,13 @@ public class InferenceEngine { } } - protected void chainTransitiveProperty(final Resource subj, final URI prop, final Value obj, final Value core, final Set<Statement> sts, final boolean goUp, final Resource[] contxts) throws InferenceEngineException { + protected void chainTransitiveProperty(final Resource subj, final IRI prop, final Value obj, final Value core, final Set<Statement> sts, final boolean goUp, final Resource[] contxts) throws InferenceEngineException { CloseableIteration<Statement, QueryEvaluationException> iter = null; try { iter = queryDao(subj, prop, obj, contxts); while (iter.hasNext()) { final Statement st = iter.next(); - sts.add(new StatementImpl((goUp) ? (st.getSubject()) : (Resource) (core), prop, (!goUp) ? (st.getObject()) : (core))); + sts.add(VF.createStatement((goUp) ? (st.getSubject()) : (Resource) (core), prop, (!goUp) ? (st.getObject()) : (core))); if (goUp) { chainTransitiveProperty(null, prop, st.getSubject(), core, sts, goUp, contxts); } else { @@ -1360,15 +1358,15 @@ public class InferenceEngine { return subClassOfGraph.get(); } - public Map<URI, List<URI>> getPropertyChainMap() { + public Map<IRI, List<IRI>> getPropertyChainMap() { return propertyChainPropertyToChain; } - public List<URI> getPropertyChain(final URI chainProp) { + public List<IRI> getPropertyChain(final IRI chainProp) { if (propertyChainPropertyToChain.containsKey(chainProp)){ return propertyChainPropertyToChain.get(chainProp); } - return new ArrayList<URI>(); + return new ArrayList<IRI>(); } public Graph getSubPropertyOfGraph() { @@ -1383,15 +1381,15 @@ public class InferenceEngine { this.refreshGraphSchedule.set(refreshGraphSchedule); } - public Set<URI> getSymmetricPropertySet() { + public Set<IRI> getSymmetricPropertySet() { return symmetricPropertySet; } - public Map<URI, URI> getInverseOfMap() { + public Map<IRI, IRI> getInverseOfMap() { return inverseOfMap; } - public Set<URI> getTransitivePropertySet() { + public Set<IRI> getTransitivePropertySet() { return transitivePropertySet; } @@ -1413,17 +1411,17 @@ public class InferenceEngine { * @return For each relevant property, a set of values such that whenever a resource has that * value for that property, it is implied to belong to the type. */ - public Map<URI, Set<Value>> getHasValueByType(final Resource type) { - final Map<URI, Set<Value>> implications = new HashMap<>(); + public Map<IRI, Set<Value>> getHasValueByType(final Resource type) { + final Map<IRI, Set<Value>> implications = new HashMap<>(); if (hasValueByType != null) { final Set<Resource> types = new HashSet<>(); types.add(type); - if (type instanceof URI) { - types.addAll(getSubClasses((URI) type)); + if (type instanceof IRI) { + types.addAll(getSubClasses((IRI) type)); } for (final Resource relevantType : types) { if (hasValueByType.containsKey(relevantType)) { - for (final Map.Entry<URI, Value> propertyToValue : hasValueByType.get(relevantType).entrySet()) { + for (final Map.Entry<IRI, Value> propertyToValue : hasValueByType.get(relevantType).entrySet()) { if (!implications.containsKey(propertyToValue.getKey())) { implications.put(propertyToValue.getKey(), new HashSet<>()); } @@ -1446,7 +1444,7 @@ public class InferenceEngine { * @return A mapping from type (URIs or bnodes) to the set of any values that belonging to that * type implies. */ - public Map<Resource, Set<Value>> getHasValueByProperty(final URI property) { + public Map<Resource, Set<Value>> getHasValueByProperty(final IRI property) { final Map<Resource, Set<Value>> implications = new HashMap<>(); if (hasValueByProperty != null && hasValueByProperty.containsKey(property)) { for (final Map.Entry<Resource, Value> typeToValue : hasValueByProperty.get(property).entrySet()) { @@ -1455,8 +1453,8 @@ public class InferenceEngine { implications.put(type, new HashSet<>()); } implications.get(type).add(typeToValue.getValue()); - if (type instanceof URI) { - for (final URI subtype : getSubClasses((URI) type)) { + if (type instanceof IRI) { + for (final IRI subtype : getSubClasses((IRI) type)) { if (!implications.containsKey(subtype)) { implications.put(subtype, new HashSet<>()); } @@ -1478,8 +1476,8 @@ public class InferenceEngine { * @return The set of properties with domain of that type, meaning that any triple whose * predicate belongs to that set implies that the triple's subject belongs to the type. */ - public Set<URI> getPropertiesWithDomain(final URI domainType) { - final Set<URI> properties = new HashSet<>(); + public Set<IRI> getPropertiesWithDomain(final IRI domainType) { + final Set<IRI> properties = new HashSet<>(); if (domainByType.containsKey(domainType)) { properties.addAll(domainByType.get(domainType)); } @@ -1496,8 +1494,8 @@ public class InferenceEngine { * @return The set of properties with range of that type, meaning that any triple whose * predicate belongs to that set implies that the triple's object belongs to the type. */ - public Set<URI> getPropertiesWithRange(final URI rangeType) { - final Set<URI> properties = new HashSet<>(); + public Set<IRI> getPropertiesWithRange(final IRI rangeType) { + final Set<IRI> properties = new HashSet<>(); if (rangeByType.containsKey(rangeType)) { properties.addAll(rangeByType.get(rangeType)); } @@ -1520,23 +1518,23 @@ public class InferenceEngine { * individual type/property combination is sufficient. Returns an empty map if either * parameter is {@code null}. */ - private Map<Resource, Set<URI>> getTypePropertyImplyingType(final Resource queryType, final Map<Resource, Map<Resource, URI>> schemaMap) { - final Map<Resource, Set<URI>> implications = new HashMap<>(); + private Map<Resource, Set<IRI>> getTypePropertyImplyingType(final Resource queryType, final Map<Resource, Map<Resource, IRI>> schemaMap) { + final Map<Resource, Set<IRI>> implications = new HashMap<>(); if (schemaMap != null && queryType != null) { // Check for any subtypes which would in turn imply the type being queried for final HashSet<Resource> queryTypes = new HashSet<>(); queryTypes.add(queryType); - if (queryType instanceof URI) { - queryTypes.addAll(getSubClasses((URI) queryType)); + if (queryType instanceof IRI) { + queryTypes.addAll(getSubClasses((IRI) queryType)); } for (final Resource querySubType : queryTypes) { if (schemaMap.containsKey(querySubType)) { - final Map<Resource, URI> otherTypeToProperty = schemaMap.get(querySubType); + final Map<Resource, IRI> otherTypeToProperty = schemaMap.get(querySubType); for (final Resource otherType : otherTypeToProperty.keySet()) { if (!implications.containsKey(otherType)) { implications.put(otherType, new HashSet<>()); } - final URI property = otherTypeToProperty.get(otherType); + final IRI property = otherTypeToProperty.get(otherType); if (property != null) { implications.get(otherType).add(property); // Also add subproperties that would in turn imply the property @@ -1569,7 +1567,7 @@ public class InferenceEngine { * to the restriction type. Empty map if the parameter is {@code null} or if the * someValuesFrom schema has not been populated. */ - public Map<Resource, Set<URI>> getSomeValuesFromByRestrictionType(final Resource restrictionType) { + public Map<Resource, Set<IRI>> getSomeValuesFromByRestrictionType(final Resource restrictionType) { return getTypePropertyImplyingType(restrictionType, someValuesFromByRestrictionType); } @@ -1590,7 +1588,7 @@ public class InferenceEngine { * values it has for any of those properties belong to the value type. Empty map if the * parameter is {@code null} or if the allValuesFrom schema has not been populated. */ - public Map<Resource, Set<URI>> getAllValuesFromByValueType(final Resource valueType) { + public Map<Resource, Set<IRI>> getAllValuesFromByValueType(final Resource valueType) { return getTypePropertyImplyingType(valueType, allValuesFromByValueType); }
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/IntersectionOfVisitor.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/IntersectionOfVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/IntersectionOfVisitor.java index b4853c0..0208d68 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/IntersectionOfVisitor.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/IntersectionOfVisitor.java @@ -26,13 +26,14 @@ import java.util.TreeSet; import org.apache.log4j.Logger; import org.apache.rya.api.RdfCloudTripleStoreConfiguration; -import org.openrdf.model.Resource; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.Union; -import org.openrdf.query.algebra.Var; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.query.algebra.Join; +import org.eclipse.rdf4j.query.algebra.StatementPattern; +import org.eclipse.rdf4j.query.algebra.TupleExpr; +import org.eclipse.rdf4j.query.algebra.Union; +import org.eclipse.rdf4j.query.algebra.Var; /** * Visitor for handling owl:intersectionOf inferencing on a node. @@ -58,7 +59,7 @@ public class IntersectionOfVisitor extends AbstractInferVisitor { final Var objVar = node.getObjectVar(); final Var conVar = node.getContextVar(); if (predVar != null && objVar != null && objVar.getValue() != null && RDF.TYPE.equals(predVar.getValue()) && !EXPANDED.equals(conVar)) { - final List<Set<Resource>> intersections = inferenceEngine.getIntersectionsImplying((URI) objVar.getValue()); + final List<Set<Resource>> intersections = inferenceEngine.getIntersectionsImplying((IRI) objVar.getValue()); if (intersections != null && !intersections.isEmpty()) { final List<TupleExpr> joins = new ArrayList<>(); for (final Set<Resource> intersection : intersections) { http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InverseOfVisitor.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InverseOfVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InverseOfVisitor.java index 2f026fc..e3a74d5 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InverseOfVisitor.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InverseOfVisitor.java @@ -19,16 +19,14 @@ package org.apache.rya.rdftriplestore.inference; * under the License. */ - - import org.apache.rya.api.RdfCloudTripleStoreConfiguration; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.model.vocabulary.SESAME; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Union; -import org.openrdf.query.algebra.Var; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.model.vocabulary.RDFS; +import org.eclipse.rdf4j.model.vocabulary.SESAME; +import org.eclipse.rdf4j.query.algebra.StatementPattern; +import org.eclipse.rdf4j.query.algebra.Union; +import org.eclipse.rdf4j.query.algebra.Var; /** * All predicates are changed @@ -48,7 +46,7 @@ public class InverseOfVisitor extends AbstractInferVisitor { StatementPattern sp = node.clone(); final Var predVar = sp.getPredicateVar(); - URI pred = (URI) predVar.getValue(); + IRI pred = (IRI) predVar.getValue(); String predNamespace = pred.getNamespace(); final Var objVar = sp.getObjectVar(); @@ -65,8 +63,8 @@ public class InverseOfVisitor extends AbstractInferVisitor { " { ?b ?pred ?a } */ - URI predUri = (URI) predVar.getValue(); - URI invPropUri = inferenceEngine.findInverseOf(predUri); + IRI predUri = (IRI) predVar.getValue(); + IRI invPropUri = inferenceEngine.findInverseOf(predUri); if (invPropUri != null) { Var subjVar = sp.getSubjectVar(); Union union = new InferUnion(); http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InverseURI.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InverseURI.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InverseURI.java index 13c068a..2a96ae6 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InverseURI.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InverseURI.java @@ -18,14 +18,14 @@ */ package org.apache.rya.rdftriplestore.inference; -import org.openrdf.model.URI; +import org.eclipse.rdf4j.model.IRI; -public class InverseURI implements URI { +public class InverseURI implements IRI { private static final long serialVersionUID = 1L; - private final URI impl; + private final IRI impl; - public InverseURI(final URI uri) { + public InverseURI(final IRI uri) { this.impl = uri; } http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/OneOfVisitor.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/OneOfVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/OneOfVisitor.java index 004a4b0..5a5c2c7 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/OneOfVisitor.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/OneOfVisitor.java @@ -23,13 +23,13 @@ import java.util.Set; import org.apache.log4j.Logger; import org.apache.rya.api.RdfCloudTripleStoreConfiguration; -import org.openrdf.model.Resource; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.query.BindingSet; -import org.openrdf.query.algebra.BindingSetAssignment; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.QueryBindingSet; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.algebra.BindingSetAssignment; +import org.eclipse.rdf4j.query.algebra.StatementPattern; +import org.eclipse.rdf4j.query.algebra.Var; +import org.eclipse.rdf4j.query.algebra.evaluation.QueryBindingSet; /** * Visitor for handling owl:oneOf inferencing on a node. http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/PropertyChainVisitor.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/PropertyChainVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/PropertyChainVisitor.java index ae7e059..2df3461 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/PropertyChainVisitor.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/PropertyChainVisitor.java @@ -23,14 +23,14 @@ import java.util.List; import java.util.UUID; import org.apache.rya.api.RdfCloudTripleStoreConfiguration; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.model.vocabulary.SESAME; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.Var; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.model.vocabulary.RDFS; +import org.eclipse.rdf4j.model.vocabulary.SESAME; +import org.eclipse.rdf4j.query.algebra.Join; +import org.eclipse.rdf4j.query.algebra.StatementPattern; +import org.eclipse.rdf4j.query.algebra.TupleExpr; +import org.eclipse.rdf4j.query.algebra.Var; /** * All predicates are changed @@ -51,7 +51,7 @@ public class PropertyChainVisitor extends AbstractInferVisitor { final StatementPattern sp = node.clone(); final Var predVar = sp.getPredicateVar(); - final URI pred = (URI) predVar.getValue(); + final IRI pred = (IRI) predVar.getValue(); final String predNamespace = pred.getNamespace(); final Var objVar = sp.getObjectVar(); @@ -62,8 +62,8 @@ public class PropertyChainVisitor extends AbstractInferVisitor { !RDFS.NAMESPACE.equals(predNamespace) && !EXPANDED.equals(cntxtVar)) { - final URI chainPropURI = (URI) predVar.getValue(); - final List<URI> chain = inferenceEngine.getPropertyChain(chainPropURI); + final IRI chainPropURI = (IRI) predVar.getValue(); + final List<IRI> chain = inferenceEngine.getPropertyChain(chainPropURI); final List<StatementPattern> expandedPatterns = new ArrayList<StatementPattern>(); if (chain.size() > 0) { final Var originalSubj = sp.getSubjectVar(); @@ -71,7 +71,7 @@ public class PropertyChainVisitor extends AbstractInferVisitor { Var nextSubj = originalSubj; StatementPattern lastStatementPatternAdded = null; - for (final URI chainElement : chain ){ + for (final IRI chainElement : chain ){ final String s = UUID.randomUUID().toString(); final Var currentObj = new Var("c-" + s); StatementPattern statementPattern = new StatementPattern(nextSubj, new Var(chainElement.stringValue()), currentObj, sp.getContextVar()); http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/ReflexivePropertyVisitor.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/ReflexivePropertyVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/ReflexivePropertyVisitor.java index d515bcf..23df2a4 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/ReflexivePropertyVisitor.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/ReflexivePropertyVisitor.java @@ -19,10 +19,10 @@ package org.apache.rya.rdftriplestore.inference; */ import org.apache.rya.api.RdfCloudTripleStoreConfiguration; -import org.openrdf.model.URI; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.ZeroLengthPath; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.query.algebra.StatementPattern; +import org.eclipse.rdf4j.query.algebra.Var; +import org.eclipse.rdf4j.query.algebra.ZeroLengthPath; /** * Expands the query tree to account for any relevant reflexive properties @@ -59,7 +59,7 @@ public class ReflexivePropertyVisitor extends AbstractInferVisitor { protected void meetSP(StatementPattern node) throws Exception { // Only applies when the predicate is defined and reflexive final Var predVar = node.getPredicateVar(); - if (predVar.getValue() != null && inferenceEngine.isReflexiveProperty((URI) predVar.getValue())) { + if (predVar.getValue() != null && inferenceEngine.isReflexiveProperty((IRI) predVar.getValue())) { final StatementPattern originalSP = node.clone(); // The reflexive solution is a ZeroLengthPath between subject and // object: they can be matched to one another, whether constants or http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SameAsVisitor.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SameAsVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SameAsVisitor.java index c616419..a71aaaa 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SameAsVisitor.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SameAsVisitor.java @@ -19,25 +19,23 @@ package org.apache.rya.rdftriplestore.inference; * under the License. */ - +import java.util.HashSet; +import java.util.Set; +import java.util.UUID; import org.apache.rya.api.RdfCloudTripleStoreConfiguration; import org.apache.rya.api.utils.NullableStatementImpl; import org.apache.rya.rdftriplestore.utils.FixedStatementPattern; import org.apache.rya.rdftriplestore.utils.TransitivePropertySP; -import org.openrdf.model.Resource; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.vocabulary.OWL; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.model.vocabulary.SESAME; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -import java.util.HashSet; -import java.util.Set; -import java.util.UUID; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.vocabulary.OWL; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.model.vocabulary.RDFS; +import org.eclipse.rdf4j.model.vocabulary.SESAME; +import org.eclipse.rdf4j.query.algebra.StatementPattern; +import org.eclipse.rdf4j.query.algebra.Var; /** * All predicates are changed @@ -74,7 +72,7 @@ public class SameAsVisitor extends AbstractInferVisitor { boolean shouldExpand = true; if (predVar.hasValue()){ - URI pred = (URI) predVar.getValue(); + IRI pred = (IRI) predVar.getValue(); String predNamespace = pred.getNamespace(); shouldExpand = !pred.equals(OWL.SAMEAS) && !RDF.NAMESPACE.equals(predNamespace) && @@ -136,7 +134,8 @@ public class SameAsVisitor extends AbstractInferVisitor { StatementPattern origDummyStatement = new DoNotExpandSP(origStatement.getSubjectVar(), origStatement.getPredicateVar(), dummyVar, cntxtVar); FixedStatementPattern fsp = new FixedStatementPattern(dummyVar, new Var("c-" + s, OWL.SAMEAS), objVar, cntxtVar); for (Resource sameAs : objURIs){ - NullableStatementImpl newStatement = new NullableStatementImpl(sameAs, OWL.SAMEAS, (Resource)objVar.getValue(), getVarValue(cntxtVar)); + NullableStatementImpl newStatement = new NullableStatementImpl(sameAs, OWL.SAMEAS, + objVar.getValue(), getVarValue(cntxtVar)); fsp.statements.add(newStatement); } InferJoin interimJoin = new InferJoin(fsp, origDummyStatement); @@ -169,7 +168,8 @@ public class SameAsVisitor extends AbstractInferVisitor { } FixedStatementPattern fsp = new FixedStatementPattern(dummyVar, new Var("c-" + s, OWL.SAMEAS), subVar, cntxtVar); for (Resource sameAs : uris){ - NullableStatementImpl newStatement = new NullableStatementImpl(sameAs, OWL.SAMEAS, (Resource)subVar.getValue(), getVarValue(cntxtVar)); + NullableStatementImpl newStatement = new NullableStatementImpl(sameAs, OWL.SAMEAS, + subVar.getValue(), getVarValue(cntxtVar)); fsp.statements.add(newStatement); } InferJoin join = new InferJoin(fsp, origStatement); http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitor.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitor.java index 16a315e..bd03f54 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitor.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitor.java @@ -25,12 +25,12 @@ import java.util.UUID; import org.apache.rya.api.RdfCloudTripleStoreConfiguration; import org.apache.rya.api.utils.NullableStatementImpl; import org.apache.rya.rdftriplestore.utils.FixedStatementPattern; -import org.openrdf.model.Resource; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.OWL; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.vocabulary.OWL; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.query.algebra.StatementPattern; +import org.eclipse.rdf4j.query.algebra.Var; /** * Expands the query tree to account for any existential class expressions (property restrictions @@ -78,7 +78,7 @@ public class SomeValuesFromVisitor extends AbstractInferVisitor { // Only applies to type queries where the type is defined if (predVar != null && RDF.TYPE.equals(predVar.getValue()) && objVar != null && objVar.getValue() instanceof Resource) { final Resource typeToInfer = (Resource) objVar.getValue(); - Map<Resource, Set<URI>> relevantSvfRestrictions = inferenceEngine.getSomeValuesFromByRestrictionType(typeToInfer); + Map<Resource, Set<IRI>> relevantSvfRestrictions = inferenceEngine.getSomeValuesFromByRestrictionType(typeToInfer); if (!relevantSvfRestrictions.isEmpty()) { // We can infer the queried type if it is to a someValuesFrom restriction (or a // supertype of one), and the node in question (subjVar) is the subject of a triple @@ -97,7 +97,7 @@ public class SomeValuesFromVisitor extends AbstractInferVisitor { final FixedStatementPattern svfPropertyTypes = new FixedStatementPattern(svfPredVar, new Var(OWL.SOMEVALUESFROM.stringValue(), OWL.SOMEVALUESFROM), valueTypeVar); for (Resource svfValueType : relevantSvfRestrictions.keySet()) { - for (URI svfProperty : relevantSvfRestrictions.get(svfValueType)) { + for (IRI svfProperty : relevantSvfRestrictions.get(svfValueType)) { svfPropertyTypes.statements.add(new NullableStatementImpl(svfProperty, OWL.SOMEVALUESFROM, svfValueType)); } http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SubClassOfVisitor.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SubClassOfVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SubClassOfVisitor.java index 1f2fd27..0d11918 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SubClassOfVisitor.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SubClassOfVisitor.java @@ -24,11 +24,11 @@ import java.util.UUID; import org.apache.rya.api.RdfCloudTripleStoreConfiguration; import org.apache.rya.api.utils.NullableStatementImpl; import org.apache.rya.rdftriplestore.utils.FixedStatementPattern; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.model.vocabulary.RDFS; +import org.eclipse.rdf4j.query.algebra.StatementPattern; +import org.eclipse.rdf4j.query.algebra.Var; /** * Class SubClassOfVisitor @@ -61,14 +61,14 @@ public class SubClassOfVisitor extends AbstractInferVisitor { // join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE); // node.replaceWith(join); - final URI subclassof_uri = (URI) objVar.getValue(); - final Collection<URI> parents = InferenceEngine.findParents(inferenceEngine.getSubClassOfGraph(), subclassof_uri); + final IRI subclassof_uri = (IRI) objVar.getValue(); + final Collection<IRI> parents = InferenceEngine.findParents(inferenceEngine.getSubClassOfGraph(), subclassof_uri); if (parents != null && parents.size() > 0) { final String s = UUID.randomUUID().toString(); final Var typeVar = new Var(s); final FixedStatementPattern fsp = new FixedStatementPattern(typeVar, new Var("c-" + s, RDFS.SUBCLASSOF), objVar, conVar); parents.add(subclassof_uri); - for (final URI u : parents) { + for (final IRI u : parents) { fsp.statements.add(new NullableStatementImpl(u, RDFS.SUBCLASSOF, subclassof_uri)); } http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SubPropertyOfVisitor.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SubPropertyOfVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SubPropertyOfVisitor.java index f3a40ab..102c0b2 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SubPropertyOfVisitor.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SubPropertyOfVisitor.java @@ -24,12 +24,12 @@ import java.util.UUID; import org.apache.rya.api.RdfCloudTripleStoreConfiguration; import org.apache.rya.api.utils.NullableStatementImpl; import org.apache.rya.rdftriplestore.utils.FixedStatementPattern; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.model.vocabulary.SESAME; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.model.vocabulary.RDFS; +import org.eclipse.rdf4j.model.vocabulary.SESAME; +import org.eclipse.rdf4j.query.algebra.StatementPattern; +import org.eclipse.rdf4j.query.algebra.Var; /** * All predicates are changed @@ -49,7 +49,7 @@ public class SubPropertyOfVisitor extends AbstractInferVisitor { final StatementPattern sp = node.clone(); final Var predVar = sp.getPredicateVar(); - final URI pred = (URI) predVar.getValue(); + final IRI pred = (IRI) predVar.getValue(); final String predNamespace = pred.getNamespace(); final Var objVar = sp.getObjectVar(); @@ -94,8 +94,8 @@ public class SubPropertyOfVisitor extends AbstractInferVisitor { // node.replaceWith(new StatementPattern(subjVar, vc, objVar, node.getContextVar())); // } - final URI subprop_uri = (URI) predVar.getValue(); - final Set<URI> parents = InferenceEngine.findParents(inferenceEngine.getSubPropertyOfGraph(), subprop_uri); + final IRI subprop_uri = (IRI) predVar.getValue(); + final Set<IRI> parents = InferenceEngine.findParents(inferenceEngine.getSubPropertyOfGraph(), subprop_uri); if (parents != null && parents.size() > 0) { final String s = UUID.randomUUID().toString(); final Var typeVar = new Var(s); @@ -103,7 +103,7 @@ public class SubPropertyOfVisitor extends AbstractInferVisitor { // fsp.statements.add(new NullableStatementImpl(subprop_uri, RDFS.SUBPROPERTYOF, subprop_uri)); //add self parents.add(subprop_uri); - for (final URI u : parents) { + for (final IRI u : parents) { fsp.statements.add(new NullableStatementImpl(u, RDFS.SUBPROPERTYOF, subprop_uri)); } http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/81b99327/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java ---------------------------------------------------------------------- diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java index 7195ccd..c8e1a7d 100644 --- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java +++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java @@ -19,16 +19,14 @@ package org.apache.rya.rdftriplestore.inference; * under the License. */ - - import org.apache.rya.api.RdfCloudTripleStoreConfiguration; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.model.vocabulary.SESAME; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Union; -import org.openrdf.query.algebra.Var; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.model.vocabulary.RDFS; +import org.eclipse.rdf4j.model.vocabulary.SESAME; +import org.eclipse.rdf4j.query.algebra.StatementPattern; +import org.eclipse.rdf4j.query.algebra.Union; +import org.eclipse.rdf4j.query.algebra.Var; /** * All predicates are changed @@ -48,7 +46,7 @@ public class SymmetricPropertyVisitor extends AbstractInferVisitor { StatementPattern sp = node.clone(); final Var predVar = sp.getPredicateVar(); - URI pred = (URI) predVar.getValue(); + IRI pred = (IRI) predVar.getValue(); String predNamespace = pred.getNamespace(); final Var objVar = sp.getObjectVar(); @@ -65,7 +63,7 @@ public class SymmetricPropertyVisitor extends AbstractInferVisitor { " { ?b ?pred ?a } */ - URI symmPropUri = (URI) predVar.getValue(); + IRI symmPropUri = (IRI) predVar.getValue(); if(inferenceEngine.isSymmetricProperty(symmPropUri)) { Var subjVar = sp.getSubjectVar(); Union union = new InferUnion();
