Modified: 
stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20Helper.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20Helper.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20Helper.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20Helper.java
 Tue May 17 22:20:49 2016
@@ -30,13 +30,13 @@ import java.util.EnumMap;
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.nlp.NlpAnnotations;
 import org.apache.stanbol.enhancer.nlp.model.AnalysedText;
@@ -62,9 +62,9 @@ public final class Nif20Helper {
 
     private Nif20Helper(){}
     
-    public static final Map<SpanTypeEnum,UriRef> SPAN_TYPE_TO_SSO_TYPE;
+    public static final Map<SpanTypeEnum,IRI> SPAN_TYPE_TO_SSO_TYPE;
     static {
-        Map<SpanTypeEnum,UriRef> mapping = new 
EnumMap<SpanTypeEnum,UriRef>(SpanTypeEnum.class);
+        Map<SpanTypeEnum,IRI> mapping = new 
EnumMap<SpanTypeEnum,IRI>(SpanTypeEnum.class);
         //mapping.put(SpanTypeEnum.Text, null);
         //mapping.put(SpanTypeEnum.TextSection, null);
         mapping.put(SpanTypeEnum.Sentence, Nif20.Sentence.getUri());
@@ -78,15 +78,15 @@ public final class Nif20Helper {
      * Concept representing the Phrase (e.g. {@link LexicalCategory#Noun} maps
      * to "<code>http://purl.org/olia/olia.owl#NounPhrase</code>").
      */
-    public static final Map<LexicalCategory,UriRef> 
LEXICAL_TYPE_TO_PHRASE_TYPE;
+    public static final Map<LexicalCategory,IRI> LEXICAL_TYPE_TO_PHRASE_TYPE;
     static {
         String olia = "http://purl.org/olia/olia.owl#";;
-        Map<LexicalCategory,UriRef> mapping = new 
EnumMap<LexicalCategory,UriRef>(LexicalCategory.class);
-        mapping.put(LexicalCategory.Noun, new UriRef(olia+"NounPhrase"));
-        mapping.put(LexicalCategory.Verb, new UriRef(olia+"VerbPhrase"));
-        mapping.put(LexicalCategory.Adjective, new 
UriRef(olia+"AdjectivePhrase"));
-        mapping.put(LexicalCategory.Adverb, new UriRef(olia+"AdverbPhrase"));
-        mapping.put(LexicalCategory.Conjuction, new 
UriRef(olia+"ConjuctionPhrase"));
+        Map<LexicalCategory,IRI> mapping = new 
EnumMap<LexicalCategory,IRI>(LexicalCategory.class);
+        mapping.put(LexicalCategory.Noun, new IRI(olia+"NounPhrase"));
+        mapping.put(LexicalCategory.Verb, new IRI(olia+"VerbPhrase"));
+        mapping.put(LexicalCategory.Adjective, new 
IRI(olia+"AdjectivePhrase"));
+        mapping.put(LexicalCategory.Adverb, new IRI(olia+"AdverbPhrase"));
+        mapping.put(LexicalCategory.Conjuction, new 
IRI(olia+"ConjuctionPhrase"));
         LEXICAL_TYPE_TO_PHRASE_TYPE = Collections.unmodifiableMap(mapping);
     }    
     /**
@@ -97,10 +97,10 @@ public final class Nif20Helper {
      * @param end the end position or values &lt; 1 when open ended.
      * @return the NIF 2.0 Fragment URI
      * @throws IllegalArgumentException if <code>null</code> is parsed as base
-     * {@link UriRef} or the end position is &gt;=0 but &lt= the parsed start
+     * {@link IRI} or the end position is &gt;=0 but &lt= the parsed start
      * position.
      */
-    public static final UriRef getNifFragmentURI(UriRef base, int start,int 
end){
+    public static final IRI getNifFragmentURI(IRI base, int start,int end){
         if(base == null){
             throw new IllegalArgumentException("Base URI MUST NOT be NULL!");
         }
@@ -113,10 +113,10 @@ public final class Nif20Helper {
             }
             sb.append(end);
         } //else open ended ...
-        return new UriRef(sb.toString());
+        return new IRI(sb.toString());
     }
  
-    public static final UriRef getNifRFC5147URI(UriRef base, int start, int 
end){
+    public static final IRI getNifRFC5147URI(IRI base, int start, int end){
         if(base == null){
             throw new IllegalArgumentException("Base URI MUST NOT be NULL!");
         }
@@ -128,7 +128,7 @@ public final class Nif20Helper {
         if(end >= 0){
             sb.append(',').append(end);
         } //else select the whole string ...
-        return new UriRef(sb.toString());
+        return new IRI(sb.toString());
     }
     
     public static final int NIF_HASH_CONTEXT_LENGTH = 10;
@@ -136,7 +136,7 @@ public final class Nif20Helper {
     
     public static final Charset UTF8 = Charset.forName("UTF8");
     
-    public static final UriRef getNifHashURI(UriRef base, int start, int end, 
String text){
+    public static final IRI getNifHashURI(IRI base, int start, int end, String 
text){
         if(base == null){
             throw new IllegalArgumentException("Base URI MUST NOT be NULL!");
         }
@@ -161,7 +161,7 @@ public final class Nif20Helper {
         sb.append('_');
         sb.append(text.substring(start, 
             Math.min(end,start+NIF_HASH_MAX_STRING_LENGTH)));
-        return new UriRef(sb.toString());
+        return new IRI(sb.toString());
     }
 
     /**
@@ -212,7 +212,7 @@ public final class Nif20Helper {
      * @param segmentUri the URI of the resource representing the parsed 
      * annotated element in the graph
      */
-    public static void writePos(MGraph graph, Annotated annotated, UriRef 
segmentUri) {
+    public static void writePos(Graph graph, Annotated annotated, IRI 
segmentUri) {
         Value<PosTag> posTag = 
annotated.getAnnotation(NlpAnnotations.POS_ANNOTATION);
         if(posTag != null){
             if(posTag.value().isMapped()){
@@ -241,7 +241,7 @@ public final class Nif20Helper {
      * @param segmentUri
      * @param value
      */
-    private static void setOliaConf(MGraph graph, UriRef segmentUri,
+    private static void setOliaConf(Graph graph, IRI segmentUri,
             Value<?> value) {
         Iterator<Triple> existingConfValues = graph.filter(segmentUri, 
Nif20.oliaConf.getUri(), null);
         while(existingConfValues.hasNext()){
@@ -262,10 +262,10 @@ public final class Nif20Helper {
      * @param segmentUri the URI of the resource representing the parsed 
      * annotated element in the graph
      */
-    public static void writePhrase(MGraph graph, Annotated annotated, UriRef 
segmentUri) {
+    public static void writePhrase(Graph graph, Annotated annotated, IRI 
segmentUri) {
         Value<PhraseTag> phraseTag = 
annotated.getAnnotation(NlpAnnotations.PHRASE_ANNOTATION);
         if(phraseTag != null){
-            UriRef phraseTypeUri = 
LEXICAL_TYPE_TO_PHRASE_TYPE.get(phraseTag.value().getCategory());
+            IRI phraseTypeUri = 
LEXICAL_TYPE_TO_PHRASE_TYPE.get(phraseTag.value().getCategory());
             if(phraseTypeUri != null){ //add the oliaLink for the Phrase
                 graph.add(new TripleImpl(segmentUri, 
Nif20.oliaCategory.getUri(), phraseTypeUri));
                 setOliaConf(graph, segmentUri, phraseTag);

Modified: 
stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20MetadataEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20MetadataEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20MetadataEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20MetadataEngine.java
 Tue May 17 22:20:49 2016
@@ -26,12 +26,12 @@ import java.util.EnumSet;
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -126,7 +126,7 @@ public class Nif20MetadataEngine extends
     
     private final Logger log = 
LoggerFactory.getLogger(Nif20MetadataEngine.class);
     //TODO: replace this with a reald ontology
-    private final static UriRef SENTIMENT_PROPERTY = new 
UriRef(NamespaceEnum.fise+"sentiment-value");
+    private final static IRI SENTIMENT_PROPERTY = new 
IRI(NamespaceEnum.fise+"sentiment-value");
     private final LiteralFactory lf = LiteralFactory.getInstance();
     
     /**
@@ -184,24 +184,24 @@ public class Nif20MetadataEngine extends
         if(words){
             activeTypes.add(SpanTypeEnum.Token);
         }
-        MGraph metadata = ci.getMetadata();
-        UriRef base = ci.getUri();
+        Graph metadata = ci.getMetadata();
+        IRI base = ci.getUri();
         ci.getLock().writeLock().lock();
         try {
             //write the context
-            UriRef text = writeSpan(metadata, base, at, language, at);
+            IRI text = writeSpan(metadata, base, at, language, at);
             metadata.add(new TripleImpl(text, Nif20.sourceUrl.getUri(), 
ci.getUri()));
             
             Iterator<Span> spans = at.getEnclosed(activeTypes);
-            UriRef sentence = null;
-            UriRef phrase = null;
-            UriRef word = null;
+            IRI sentence = null;
+            IRI phrase = null;
+            IRI word = null;
             boolean firstWordInSentence = true;
             while(spans.hasNext()){
                 Span span = spans.next();
                 //TODO: filter Spans based on additional requirements
                 //(1) write generic information about the span
-                UriRef current = writeSpan(metadata, base, at, language, span);
+                IRI current = writeSpan(metadata, base, at, language, span);
                 //write the context
                 metadata.add(new TripleImpl(current, 
Nif20.referenceContext.getUri(), text));
                 //(2) add the relations between the different spans
@@ -282,11 +282,11 @@ public class Nif20MetadataEngine extends
      * @param text the {@link AnalysedText}
      * @param language the {@link Language} or <code>null</code> if not known
      * @param span the {@link Span} to write.
-     * @return the {@link UriRef} representing the parsed {@link Span} in the
+     * @return the {@link IRI} representing the parsed {@link Span} in the
      * graph
      */
-    public UriRef writeSpan(MGraph graph, UriRef base, AnalysedText text, 
Language language, Span span){
-        UriRef segment = Nif20Helper.getNifRFC5147URI(base, span.getStart(), 
+    public IRI writeSpan(Graph graph, IRI base, AnalysedText text, Language 
language, Span span){
+        IRI segment = Nif20Helper.getNifRFC5147URI(base, span.getStart(), 
                 span.getType() == SpanTypeEnum.Text ? -1 : span.getEnd());
         if(!contextOnlyUriScheme || span.getType() == SpanTypeEnum.Text){
             graph.add(new TripleImpl(segment, RDF_TYPE, 
Nif20.RFC5147String.getUri()));

Modified: 
stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nlp2RdfMetadataEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nlp2RdfMetadataEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nlp2RdfMetadataEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nlp2RdfMetadataEngine.java
 Tue May 17 22:20:49 2016
@@ -28,11 +28,11 @@ import java.util.EnumSet;
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -78,7 +78,7 @@ public class Nlp2RdfMetadataEngine exten
 
     private final Logger log = 
LoggerFactory.getLogger(Nlp2RdfMetadataEngine.class);
     //TODO: replace this with a reald ontology
-    private final static UriRef SENTIMENT_PROPERTY = new 
UriRef(NamespaceEnum.fise+"sentiment-value");
+    private final static IRI SENTIMENT_PROPERTY = new 
IRI(NamespaceEnum.fise+"sentiment-value");
     private final LiteralFactory lf = LiteralFactory.getInstance();
     
     /**
@@ -123,20 +123,20 @@ public class Nlp2RdfMetadataEngine exten
         if(words){
             activeTypes.add(SpanTypeEnum.Token);
         }
-        MGraph metadata = ci.getMetadata();
-        UriRef base = ci.getUri();
+        Graph metadata = ci.getMetadata();
+        IRI base = ci.getUri();
         ci.getLock().writeLock().lock();
         try {
             Iterator<Span> spans = at.getEnclosed(activeTypes);
-            UriRef sentence = null;
-            UriRef phrase = null;
-            UriRef word = null;
+            IRI sentence = null;
+            IRI phrase = null;
+            IRI word = null;
             boolean firstWordInSentence = true;
             while(spans.hasNext()){
                 Span span = spans.next();
                 //TODO: filter Spans based on additional requirements
                 //(1) write generic information about the span
-                UriRef current = writeSpan(metadata, base, at, language, span);
+                IRI current = writeSpan(metadata, base, at, language, span);
                 //(2) add the relations between the different spans
                 switch (span.getType()) {
                     case Sentence:

Modified: 
stanbol/trunk/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/CalaisEntityOccurrence.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/CalaisEntityOccurrence.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/CalaisEntityOccurrence.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/CalaisEntityOccurrence.java
 Tue May 17 22:20:49 2016
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.enhancer.engines.opencalais.impl;
 
-import org.apache.clerezza.rdf.core.Resource;
+import org.apache.clerezza.commons.rdf.RDFTerm;
 
 /**
  * Stores the values extracted from the Calais entity data.
@@ -25,8 +25,8 @@ import org.apache.clerezza.rdf.core.Reso
  */
 public class CalaisEntityOccurrence {
 
-    public Resource id;
-    public Resource type;
+    public RDFTerm id;
+    public RDFTerm type;
     public String name;
     public Integer offset;
     public Integer length;

Modified: 
stanbol/trunk/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/OpenCalaisEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/OpenCalaisEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/OpenCalaisEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/OpenCalaisEngine.java
 Tue May 17 22:20:49 2016
@@ -50,19 +50,19 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.sparql.ParseException;
@@ -178,7 +178,7 @@ public class OpenCalaisEngine
     /**
      * a map for mapping Calais classes to other classes (e.g. from dbpedia)
      */
-    private Map<UriRef,UriRef> calaisTypeMap;
+    private Map<IRI,IRI> calaisTypeMap;
     
     /**
      * the default file containing type mappings. Key and value are separated 
by the regular expression ' ?= ?'.
@@ -213,11 +213,11 @@ public class OpenCalaisEngine
         this.calaisUrl = calaisUrl;
     }
 
-    public Map<UriRef,UriRef> getCalaisTypeMap() {
+    public Map<IRI,IRI> getCalaisTypeMap() {
       return calaisTypeMap;
     }
 
-    public void setCalaisTypeMap(Map<UriRef,UriRef> calaisTypeMap) {
+    public void setCalaisTypeMap(Map<IRI,IRI> calaisTypeMap) {
       this.calaisTypeMap = calaisTypeMap;
     }
 
@@ -245,7 +245,7 @@ public class OpenCalaisEngine
             continue;
           String[] entry = line.split("\\s*=\\s*");
           if (entry.length == 2) {
-            calaisTypeMap.put(new UriRef(entry[0]), new UriRef(entry[1]));
+            calaisTypeMap.put(new IRI(entry[0]), new IRI(entry[1]));
           }
         }
         reader.close();
@@ -271,7 +271,7 @@ public class OpenCalaisEngine
     }
 
     public void computeEnhancements(ContentItem ci) throws EngineException {
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
SUPPORTED_MIMETYPES);
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
SUPPORTED_MIMETYPES);
         if(contentPart == null){
             throw new IllegalStateException("No ContentPart with an supported 
Mimetype '"
                     + SUPPORTED_MIMETYPES+"' found for ContentItem 
"+ci.getUri()
@@ -286,7 +286,7 @@ public class OpenCalaisEngine
             throw new InvalidContentException(this, ci, e);
         }
 
-        MGraph calaisModel = getCalaisAnalysis(text, 
contentPart.getValue().getMimeType());
+        Graph calaisModel = getCalaisAnalysis(text, 
contentPart.getValue().getMimeType());
         if (calaisModel != null) {
             //Acquire a write lock on the ContentItem when adding the 
enhancements
             ci.getLock().writeLock().lock();
@@ -328,11 +328,11 @@ public class OpenCalaisEngine
             language = null;
         }
         //TODO create TextEnhancement (form, start, end, type?) and 
EntityAnnotation (id, name, type)
-        HashMap<Resource, UriRef> entityAnnotationMap = new HashMap<Resource, 
UriRef>();
+        HashMap<RDFTerm, IRI> entityAnnotationMap = new HashMap<RDFTerm, 
IRI>();
         for (CalaisEntityOccurrence occ : occs) {
-            UriRef textAnnotation = 
EnhancementEngineHelper.createTextEnhancement(
+            IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(
                     ci, this);
-            MGraph model = ci.getMetadata();
+            Graph model = ci.getMetadata();
             model.add(new TripleImpl(textAnnotation, DC_TYPE, occ.type));
             // for autotagger use the name instead of the matched term (that 
might be a pronoun!)
             if (onlyNERMode) {
@@ -360,7 +360,7 @@ public class OpenCalaisEngine
                 entityAnnotationMap.put(occ.id,textAnnotation);
                 }
                 else {
-//                UriRef entityAnnotation = 
EnhancementEngineHelper.createEntityEnhancement(ci, this);
+//                IRI entityAnnotation = 
EnhancementEngineHelper.createEntityEnhancement(ci, this);
 //                entityAnnotationMap.put(occ.id, entityAnnotation);
 //                model.add(new TripleImpl(entityAnnotation, DC_RELATION, 
textAnnotation));
 //                model.add(new TripleImpl(entityAnnotation, 
ENHANCER_ENTITY_LABEL, occ.name));
@@ -372,15 +372,15 @@ public class OpenCalaisEngine
     }
 
     /**
-     * Retrieves the annotations from OpenCalais as RDF/XML. From that an 
MGraph is created.
+     * Retrieves the annotations from OpenCalais as RDF/XML. From that an 
Graph is created.
      *
      * @param text the text to send to OpenCalais
      *
-     * @return an MGraph with all annotations
+     * @return an Graph with all annotations
      *
      * @throws EngineException
      */
-    public MGraph getCalaisAnalysis(String text, String mimeType) throws 
EngineException {
+    public Graph getCalaisAnalysis(String text, String mimeType) throws 
EngineException {
         if (mimeType.equals("text/plain")) {
             mimeType = "text/raw";
         }
@@ -395,7 +395,7 @@ public class OpenCalaisEngine
                 ">" +
                 "</c:processingDirectives>" +
                 "</c:params>";
-        MGraph model = null;
+        Graph model = null;
         try {
             StringBuilder postParams = new StringBuilder();
             postParams
@@ -426,18 +426,18 @@ public class OpenCalaisEngine
     }
 
     /**
-     * Parses an InputStream of RDF data and produces an MGraph from them
+     * Parses an InputStream of RDF data and produces an Graph from them
      *
      * @param in The InputStream of RDF data
      * @param format the format of the RDF data
      *
-     * @return the resulting MGraph or null if the RDF serialization format is 
not supported by the parser
+     * @return the resulting Graph or null if the RDF serialization format is 
not supported by the parser
      */
-    public MGraph readModel(InputStream in, String format) {
+    public Graph readModel(InputStream in, String format) {
         Parser parser = Parser.getInstance();
         if (parser.getSupportedFormats().contains(format)) {
-            Graph graph = parser.parse(in, format);
-            MGraph model = new SimpleMGraph(graph);
+            ImmutableGraph graph = parser.parse(in, format);
+            Graph model = new SimpleGraph(graph);
             return model;
         } else {
             log.warn("Unsupported RDF format: {}\nSupported RDF formats: {}",
@@ -450,13 +450,13 @@ public class OpenCalaisEngine
      * Extracts the relevant entity information from the Calais RDF data.
      * The entities and the relted information is extracted by a Sparql query.
      *
-     * @param model the MGraph representing the Calais data
+     * @param model the Graph representing the Calais data
      *
      * @return a Collection of entity information
      * @throws EngineException on a {@link ParseException} while processing the
      * Sparql query.
      */
-    public Collection<CalaisEntityOccurrence> queryModel(MGraph model) throws 
EngineException {
+    public Collection<CalaisEntityOccurrence> queryModel(Graph model) throws 
EngineException {
         //TODO extract also Geo info (latitude/longitude)?
         String query =
                 "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> " +
@@ -493,7 +493,7 @@ public class OpenCalaisEngine
             while (rs.hasNext()) {
                 SolutionMapping row = rs.next();
                 CalaisEntityOccurrence occ = new CalaisEntityOccurrence();
-                Resource disambiguated = row.get("did");
+                RDFTerm disambiguated = row.get("did");
                 occ.id = (disambiguated == null ? row.get("id") : 
disambiguated);
                 if (onlyNERMode) {
                     occ.type = row.get("type");
@@ -502,7 +502,7 @@ public class OpenCalaisEngine
                     occ.type = (disambiguated == null ? row.get("type") : 
row.get("dtype"));
                 }
                 if (calaisTypeMap != null) {
-                    UriRef mappedType = calaisTypeMap.get(occ.type);
+                    IRI mappedType = calaisTypeMap.get(occ.type);
                     if (mappedType != null) {
                         occ.type = mappedType;
                     }
@@ -618,7 +618,7 @@ public class OpenCalaisEngine
         String standAlone = (String)properties.get(CALAIS_NER_ONLY_MODE_KEY);
         setLicenseKey(license);
         setCalaisUrl(url);
-        calaisTypeMap = new HashMap<UriRef,UriRef>();
+        calaisTypeMap = new HashMap<IRI,IRI>();
         loadTypeMap(calaisTypeMapFile);
         onlyNERMode = Boolean.parseBoolean(standAlone);
         //      this.tcManager = TcManager.getInstance();

Modified: 
stanbol/trunk/enhancement-engines/opencalais/src/test/java/org/apache/stanbol/enhancer/engines/opencalais/impl/TestOpenCalaisEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opencalais/src/test/java/org/apache/stanbol/enhancer/engines/opencalais/impl/TestOpenCalaisEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opencalais/src/test/java/org/apache/stanbol/enhancer/engines/opencalais/impl/TestOpenCalaisEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opencalais/src/test/java/org/apache/stanbol/enhancer/engines/opencalais/impl/TestOpenCalaisEngine.java
 Tue May 17 22:20:49 2016
@@ -26,11 +26,11 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import 
org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.ContentItemFactory;
@@ -79,7 +79,7 @@ public class TestOpenCalaisEngine {
     @BeforeClass
     public static void oneTimeSetup() throws ConfigurationException {
         calaisExtractor = new OpenCalaisEngine();
-        calaisExtractor.setCalaisTypeMap(new HashMap<UriRef,UriRef>());
+        calaisExtractor.setCalaisTypeMap(new HashMap<IRI,IRI>());
         calaisExtractor.tcManager = TcManager.getInstance();
         if (TEST_LICENSE_KEY != null && TEST_LICENSE_KEY.matches("\\w+")) {
             calaisExtractor.setLicenseKey(TEST_LICENSE_KEY);
@@ -96,7 +96,7 @@ public class TestOpenCalaisEngine {
         String format = "application/rdf+xml";
         InputStream in = 
this.getClass().getClassLoader().getResourceAsStream(testFile);
         Assert.assertNotNull("failed to load resource " + testFile, in);
-        MGraph model = calaisExtractor.readModel(in, format);
+        Graph model = calaisExtractor.readModel(in, format);
         Assert.assertNotNull("model reader failed with format: " + format, 
model);
         Collection<CalaisEntityOccurrence> entities;
         try {
@@ -111,7 +111,7 @@ public class TestOpenCalaisEngine {
         //test the generation of the Enhancements
         ContentItem ci = wrapAsContentItem(TEST_TEXT);
         calaisExtractor.createEnhancements(entities, ci);
-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, 
             LiteralFactory.getInstance().createTypedLiteral(
@@ -130,7 +130,7 @@ public class TestOpenCalaisEngine {
         ci.getMetadata().add(
             new TripleImpl(ci.getUri(), Properties.DC_LANGUAGE, 
LiteralFactory.getInstance()
                     .createTypedLiteral("en")));
-        MGraph model;
+        Graph model;
         try {
             model = calaisExtractor.getCalaisAnalysis(TEST_TEXT, "text/plain");
         } catch (EngineException e) {

Modified: 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/CustomNERModelEnhancementEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/CustomNERModelEnhancementEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/CustomNERModelEnhancementEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/CustomNERModelEnhancementEngine.java
 Tue May 17 22:20:49 2016
@@ -32,7 +32,7 @@ import java.util.Set;
 
 import opennlp.tools.namefind.TokenNameFinderModel;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
 import org.apache.felix.scr.annotations.Property;
@@ -175,7 +175,7 @@ public class CustomNERModelEnhancementEn
                             dcTypeUri,o);
                         continue configs;
                     }
-                    this.config.setMappedType(namedEntityType,new 
UriRef(dcTypeUri));
+                    this.config.setMappedType(namedEntityType,new 
IRI(dcTypeUri));
                     log.info("  add mapping {} > 
{}",namedEntityType,dcTypeUri);
                 }
             }

Modified: 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineConfig.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineConfig.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineConfig.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineConfig.java
 Tue May 17 22:20:49 2016
@@ -27,7 +27,7 @@ import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.CopyOnWriteArrayList;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.commons.opennlp.OpenNLP;
 import org.apache.stanbol.enhancer.nlp.model.tag.TagSet;
 import org.apache.stanbol.enhancer.nlp.ner.NerTag;
@@ -39,10 +39,10 @@ public class NEREngineConfig {
      * Default mapping for Concept types to dc:type values added for
      * TextAnnotations.
      */
-    public static final Map<String,UriRef> DEFAULT_ENTITY_TYPE_MAPPINGS;
+    public static final Map<String,IRI> DEFAULT_ENTITY_TYPE_MAPPINGS;
     
     static { //the default mappings for the default NER types
-        Map<String,UriRef> mappings = new TreeMap<String,UriRef>();
+        Map<String,IRI> mappings = new TreeMap<String,IRI>();
         mappings.put("person", OntologicalClasses.DBPEDIA_PERSON);
         mappings.put("location", OntologicalClasses.DBPEDIA_PLACE);
         mappings.put("organization", OntologicalClasses.DBPEDIA_ORGANISATION);
@@ -51,7 +51,7 @@ public class NEREngineConfig {
     
     /**
      * Holds the configured {@link NerTag}s - the mappings from the
-     * named entity name to the {@link UriRef} type used for the
+     * named entity name to the {@link IRI} type used for the
      * <code>dc:type</code> value for <code>fise:TextAnnotation</code>s
      */
     private TagSet<NerTag> nerTagSet = new TagSet<NerTag>("NER TagSet");
@@ -70,7 +70,7 @@ public class NEREngineConfig {
     private String defaultLanguage;
     
     public NEREngineConfig(){
-        for(Entry<String,UriRef> mapping : 
DEFAULT_ENTITY_TYPE_MAPPINGS.entrySet()){
+        for(Entry<String,IRI> mapping : 
DEFAULT_ENTITY_TYPE_MAPPINGS.entrySet()){
             nerTagSet.addTag(new NerTag(mapping.getKey(), mapping.getValue()));
         }
     }
@@ -171,7 +171,7 @@ public class NEREngineConfig {
      * @throws IllegalArgumentException if the parsed NamedEntity
      * type is <code>null</code> or an empty String.
      */
-    public void setMappedType(String namedEntityType,UriRef dcType){
+    public void setMappedType(String namedEntityType,IRI dcType){
         if(namedEntityType != null && !namedEntityType.isEmpty()){
             nerTagSet.addTag(new NerTag(namedEntityType, dcType));
         } else {

Modified: 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineCore.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineCore.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineCore.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineCore.java
 Tue May 17 22:20:49 2016
@@ -46,12 +46,12 @@ import opennlp.tools.tokenize.Tokenizer;
 import opennlp.tools.util.InvalidFormatException;
 import opennlp.tools.util.Span;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.lang.StringUtils;
 import org.apache.stanbol.commons.opennlp.OpenNLP;
 import 
org.apache.stanbol.commons.stanboltools.datafileprovider.DataFileProvider;
@@ -151,7 +151,7 @@ public abstract class NEREngineCore
             text = null;
         } else { //no AnalysedText with tokens ...
             //fallback to processing the plain text is still supported
-            Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
SUPPORTED_MIMETYPES);
+            Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
SUPPORTED_MIMETYPES);
             if(contentPart == null){
                 throw new IllegalStateException("No ContentPart with Mimetype 
'"
                     + TEXT_PLAIN_MIMETYPE+"' found for ContentItem 
"+ci.getUri()
@@ -240,7 +240,7 @@ public abstract class NEREngineCore
                                   StringUtils.abbreviate(at != null ? 
at.getSpan() : text, 100) });
         }
         LiteralFactory literalFactory = LiteralFactory.getInstance();
-        MGraph g = ci.getMetadata();
+        Graph g = ci.getMetadata();
         Map<String,List<NameOccurrence>> entityNames;
         if(at != null){
             entityNames = extractNameOccurrences(nameFinderModel, at, lang);
@@ -250,16 +250,16 @@ public abstract class NEREngineCore
         //lock the ContentItem while writing the RDF data for found Named 
Entities
         ci.getLock().writeLock().lock();
         try {
-            Map<String,UriRef> previousAnnotations = new 
LinkedHashMap<String,UriRef>();
+            Map<String,IRI> previousAnnotations = new 
LinkedHashMap<String,IRI>();
             for (Map.Entry<String,List<NameOccurrence>> nameInContext : 
entityNames.entrySet()) {
     
                 String name = nameInContext.getKey();
                 List<NameOccurrence> occurrences = nameInContext.getValue();
     
-                UriRef firstOccurrenceAnnotation = null;
+                IRI firstOccurrenceAnnotation = null;
     
                 for (NameOccurrence occurrence : occurrences) {
-                    UriRef textAnnotation = 
EnhancementEngineHelper.createTextEnhancement(ci, this);
+                    IRI textAnnotation = 
EnhancementEngineHelper.createTextEnhancement(ci, this);
                     g.add(new TripleImpl(textAnnotation, 
ENHANCER_SELECTED_TEXT, 
                         new PlainLiteralImpl(name, language)));
                     g.add(new TripleImpl(textAnnotation, 
ENHANCER_SELECTION_CONTEXT, 
@@ -283,7 +283,7 @@ public abstract class NEREngineCore
                     if (firstOccurrenceAnnotation == null) {
                         // check already extracted annotations to find a first 
most
                         // specific occurrence
-                        for (Map.Entry<String,UriRef> entry : 
previousAnnotations.entrySet()) {
+                        for (Map.Entry<String,IRI> entry : 
previousAnnotations.entrySet()) {
                             if (entry.getKey().contains(name)) {
                                 // we have found a most specific previous
                                 // occurrence, use it as subsumption target

Modified: 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NameOccurrence.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NameOccurrence.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NameOccurrence.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NameOccurrence.java
 Tue May 17 22:20:49 2016
@@ -16,13 +16,13 @@
  */
 package org.apache.stanbol.enhancer.engines.opennlp.impl;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 public class NameOccurrence {
 
     public final String name;
     
-    public final UriRef type;
+    public final IRI type;
     
     public final Integer start;
 
@@ -32,7 +32,7 @@ public class NameOccurrence {
 
     public final Double confidence;
 
-    public NameOccurrence(String name, Integer start, Integer end, UriRef type,
+    public NameOccurrence(String name, Integer start, Integer end, IRI type,
             String context, Double confidence) {
         this.name = name;
         this.type = type;

Modified: 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/ClasspathDataFileProvider.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/ClasspathDataFileProvider.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/ClasspathDataFileProvider.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/ClasspathDataFileProvider.java
 Tue May 17 22:20:49 2016
@@ -74,7 +74,7 @@ public class ClasspathDataFileProvider i
         // load default OpenNLP models from classpath (embedded in the 
defaultdata bundle)
         final String resourcePath = RESOURCE_BASE_PATH + filename;
         final URL dataFile = 
getClass().getClassLoader().getResource(resourcePath);
-        //log.debug("Resource {} found: {}", (in == null ? "NOT" : ""), 
resourcePath);
+        //log.debug("RDFTerm {} found: {}", (in == null ? "NOT" : ""), 
resourcePath);
         return dataFile;
     }
 }

Modified: 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/TestNamedEntityExtractionEnhancementEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/TestNamedEntityExtractionEnhancementEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/TestNamedEntityExtractionEnhancementEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/TestNamedEntityExtractionEnhancementEngine.java
 Tue May 17 22:20:49 2016
@@ -27,11 +27,11 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.commons.opennlp.OpenNLP;
 import 
org.apache.stanbol.commons.stanboltools.datafileprovider.DataFileProvider;
 import 
org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
@@ -87,7 +87,7 @@ public class TestNamedEntityExtractionEn
 
     public static ContentItem wrapAsContentItem(final String id,
             final String text, String language) throws IOException {
-       ContentItem ci =  ciFactory.createContentItem(new UriRef(id),new 
StringSource(text));
+       ContentItem ci =  ciFactory.createContentItem(new IRI(id),new 
StringSource(text));
        if(language != null){
            ci.getMetadata().add(new TripleImpl(ci.getUri(), DC_LANGUAGE, new 
PlainLiteralImpl(language)));
        }
@@ -151,12 +151,12 @@ public class TestNamedEntityExtractionEn
             throws EngineException, IOException {
         ContentItem ci = 
wrapAsContentItem("urn:test:content-item:single:sentence", 
SINGLE_SENTENCE,"en");
         nerEngine.computeEnhancements(ci);
-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, 
LiteralFactory.getInstance().createTypedLiteral(nerEngine.getClass().getName()));
         //adding null as expected for confidence makes it a required property
         expectedValues.put(Properties.ENHANCER_CONFIDENCE, null);
-        MGraph g = ci.getMetadata();
+        Graph g = ci.getMetadata();
         int textAnnotationCount = 
validateAllTextAnnotations(g,SINGLE_SENTENCE,expectedValues);
         assertEquals(3, textAnnotationCount);
     }
@@ -167,16 +167,16 @@ public class TestNamedEntityExtractionEn
         nerEngine.config.getDefaultModelTypes().clear(); 
         //but instead a custom model provided by the test data
         nerEngine.config.addCustomNameFinderModel("en", 
"bionlp2004-DNA-en.bin");
-        nerEngine.config.setMappedType("DNA", new 
UriRef("http://www.bootstrep.eu/ontology/GRO#DNA";));
+        nerEngine.config.setMappedType("DNA", new 
IRI("http://www.bootstrep.eu/ontology/GRO#DNA";));
         nerEngine.computeEnhancements(ci);
-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, 
LiteralFactory.getInstance().createTypedLiteral(nerEngine.getClass().getName()));
         //adding null as expected for confidence makes it a required property
         expectedValues.put(Properties.ENHANCER_CONFIDENCE, null);
         //and dc:type values MUST be the URI set as mapped type
-        expectedValues.put(Properties.DC_TYPE, new 
UriRef("http://www.bootstrep.eu/ontology/GRO#DNA";));
-        MGraph g = ci.getMetadata();
+        expectedValues.put(Properties.DC_TYPE, new 
IRI("http://www.bootstrep.eu/ontology/GRO#DNA";));
+        Graph g = ci.getMetadata();
         int textAnnotationCount = 
validateAllTextAnnotations(g,EHEALTH,expectedValues);
         assertEquals(7, textAnnotationCount);
     }

Modified: 
stanbol/trunk/enhancement-engines/opennlp/opennlp-pos/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/pos/services/OpenNlpPosTaggingEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opennlp/opennlp-pos/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/pos/services/OpenNlpPosTaggingEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opennlp/opennlp-pos/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/pos/services/OpenNlpPosTaggingEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opennlp/opennlp-pos/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/pos/services/OpenNlpPosTaggingEngine.java
 Tue May 17 22:20:49 2016
@@ -38,7 +38,7 @@ import opennlp.tools.sentdetect.Sentence
 import opennlp.tools.tokenize.Tokenizer;
 import opennlp.tools.util.Sequence;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -161,7 +161,7 @@ public class OpenNlpPosTaggingEngine ext
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }

Modified: 
stanbol/trunk/enhancement-engines/opennlp/opennlp-sentence/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/sentence/impl/OpenNlpSentenceDetectionEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opennlp/opennlp-sentence/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/sentence/impl/OpenNlpSentenceDetectionEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opennlp/opennlp-sentence/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/sentence/impl/OpenNlpSentenceDetectionEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opennlp/opennlp-sentence/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/sentence/impl/OpenNlpSentenceDetectionEngine.java
 Tue May 17 22:20:49 2016
@@ -28,7 +28,7 @@ import opennlp.tools.sentdetect.Sentence
 import opennlp.tools.sentdetect.SentenceDetectorME;
 import opennlp.tools.sentdetect.SentenceModel;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -131,7 +131,7 @@ public class OpenNlpSentenceDetectionEng
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }

Modified: 
stanbol/trunk/enhancement-engines/opennlp/opennlp-token/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/token/impl/OpenNlpTokenizerEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/opennlp/opennlp-token/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/token/impl/OpenNlpTokenizerEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/opennlp/opennlp-token/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/token/impl/OpenNlpTokenizerEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/opennlp/opennlp-token/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/token/impl/OpenNlpTokenizerEngine.java
 Tue May 17 22:20:49 2016
@@ -30,7 +30,7 @@ import opennlp.tools.tokenize.Tokenizer;
 import opennlp.tools.tokenize.TokenizerME;
 import opennlp.tools.tokenize.TokenizerModel;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -137,7 +137,7 @@ public class OpenNlpTokenizerEngine exte
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }

Modified: 
stanbol/trunk/enhancement-engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
 Tue May 17 22:20:49 2016
@@ -30,11 +30,11 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcProvider;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -44,7 +44,7 @@ import org.apache.felix.scr.annotations.
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.owl.transformation.OWLAPIToClerezzaConverter;
 import org.apache.stanbol.enhancer.engines.refactor.dereferencer.Dereferencer;
 import 
org.apache.stanbol.enhancer.engines.refactor.dereferencer.DereferencerImpl;
@@ -85,7 +85,6 @@ import org.osgi.service.component.Compon
 import org.osgi.service.component.ComponentFactory;
 import org.osgi.service.component.ComponentInstance;
 import org.semanticweb.owlapi.apibinding.OWLManager;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntology;
 import org.semanticweb.owlapi.model.OWLOntologyCreationException;
 import org.semanticweb.owlapi.model.OWLOntologyID;
@@ -123,7 +122,7 @@ public class RefactorEnhancementEngine e
      */
     private class GraphContentSourceWithPhysicalIRI extends 
GraphContentInputSource {
 
-        public GraphContentSourceWithPhysicalIRI(InputStream content, IRI 
physicalIri) {
+        public GraphContentSourceWithPhysicalIRI(InputStream content, 
org.semanticweb.owlapi.model.IRI physicalIri) {
             super(content);
             bindPhysicalOrigin(Origin.create(physicalIri));
         }
@@ -238,31 +237,31 @@ public class RefactorEnhancementEngine e
         log.debug("Refactor enhancement job will run in session '{}'.", 
session.getID());
 
         // Retrieve and filter the metadata graph for entities recognized by 
the engines.
-        final MGraph metadataGraph = ci.getMetadata(), signaturesGraph = new 
IndexedMGraph();
+        final Graph metadataGraph = ci.getMetadata(), signaturesGraph = new 
IndexedGraph();
         // FIXME the Stanbol Enhancer vocabulary should be retrieved from 
somewhere in the enhancer API.
-        final UriRef ENHANCER_ENTITY_REFERENCE = new UriRef(
+        final IRI ENHANCER_ENTITY_REFERENCE = new IRI(
                 "http://fise.iks-project.eu/ontology/entity-reference";);
         Iterator<Triple> tripleIt = metadataGraph.filter(null, 
ENHANCER_ENTITY_REFERENCE, null);
         while (tripleIt.hasNext()) {
             // Get the entity URI
-            Resource obj = tripleIt.next().getObject();
-            if (!(obj instanceof UriRef)) {
-                log.warn("Invalid UriRef for entity reference {}. Skipping.", 
obj);
+            RDFTerm obj = tripleIt.next().getObject();
+            if (!(obj instanceof IRI)) {
+                log.warn("Invalid IRI for entity reference {}. Skipping.", 
obj);
                 continue;
             }
-            final String entityReference = ((UriRef) obj).getUnicodeString();
+            final String entityReference = ((IRI) obj).getUnicodeString();
             log.debug("Trying to resolve entity {}", entityReference);
 
             // Populate the entity signatures graph, by querying either the 
Entity Hub or the dereferencer.
             if (engineConfiguration.isEntityHubUsed()) {
-                MGraph result = populateWithEntity(entityReference, 
signaturesGraph);
+                Graph result = populateWithEntity(entityReference, 
signaturesGraph);
                 if (result != signaturesGraph && result != null) {
                     log.warn("Entity Hub query added triples to a new graph 
instead of populating the supplied one!"
                              + " New signatures will be discarded.");
                 }
             } else try {
-                OntologyInputSource<TripleCollection> source = new 
GraphContentSourceWithPhysicalIRI(
-                        dereferencer.resolve(entityReference), 
IRI.create(entityReference));
+                OntologyInputSource<Graph> source = new 
GraphContentSourceWithPhysicalIRI(
+                        dereferencer.resolve(entityReference), 
org.semanticweb.owlapi.model.IRI.create(entityReference));
                 signaturesGraph.addAll(source.getRootOntology());
             } catch (FileNotFoundException e) {
                 log.error("Failed to dereference entity " + entityReference + 
". Skipping.", e);
@@ -301,17 +300,17 @@ public class RefactorEnhancementEngine e
              * 
              * To perform the refactoring of the ontology to a given 
vocabulary we use the Stanbol Refactor.
              */
-            Recipe recipe = ruleStore.getRecipe(new 
UriRef(engineConfiguration.getRecipeId()));
+            Recipe recipe = ruleStore.getRecipe(new 
IRI(engineConfiguration.getRecipeId()));
 
             log.debug("Recipe {} contains {} rules.", recipe, 
recipe.getRuleList().size());
             log.debug("The ontology to be refactor is {}", ontology);
 
-            TripleCollection tc = refactorer.graphRefactoring(
-                
OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph(ontology), recipe);
+            Graph tc = refactorer.graphRefactoring(
+                
OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph(ontology), recipe);
 
             /*
              * ontology = refactorer .ontologyRefactoring(ontology,
-             * IRI.create(engineConfiguration.getRecipeId()));
+             * 
org.semanticweb.owlapi.model.IRI.create(engineConfiguration.getRecipeId()));
              */
             /*
              * The newly generated ontology is converted to Clarezza format 
and then added os substitued to
@@ -346,7 +345,7 @@ public class RefactorEnhancementEngine e
             for (OWLOntologyID id : session.listManagedOntologies()) {
                 try {
                     String key = ontologyProvider.getKey(id.getOntologyIRI());
-                    ontologyProvider.getStore().deleteTripleCollection(new 
UriRef(key));
+                    ontologyProvider.getStore().deleteGraph(new IRI(key));
                 } catch (Exception ex) {
                     log.error("Failed to delete triple collection " + id, ex);
                     continue;
@@ -374,7 +373,7 @@ public class RefactorEnhancementEngine e
 
         // Deactivation clears all the rules and releases OntoNet resources.
 
-        UriRef recipeId = new UriRef(engineConfiguration.getRecipeId());
+        IRI recipeId = new IRI(engineConfiguration.getRecipeId());
         try {
             // step 1: get all the rules
             log.debug("Recipe {} and its associated rules will be removed from 
the rule store.", recipeId);
@@ -420,9 +419,9 @@ public class RefactorEnhancementEngine e
      *            {@link String}
      * @return the {@link OWLOntology} of the entity
      */
-    private MGraph populateWithEntity(String entityURI, MGraph target) {
+    private Graph populateWithEntity(String entityURI, Graph target) {
         log.debug("Requesting signature of entity {}", entityURI);
-        MGraph graph = target != null ? target : new IndexedMGraph();
+        Graph graph = target != null ? target : new IndexedGraph();
         // Query the Entity Hub
         Entity signature = referencedSiteManager.getEntity(entityURI);
         if (signature != null) {
@@ -451,7 +450,7 @@ public class RefactorEnhancementEngine e
      */
     private void initEngine(RefactorEnhancementEngineConf engineConfiguration) 
{
 
-        // IRI dulcifierScopeIRI = IRI.create((String) 
context.getProperties().get(SCOPE));
+        // IRI dulcifierScopeIRI = 
org.semanticweb.owlapi.model.IRI.create((String) 
context.getProperties().get(SCOPE));
         String scopeId = engineConfiguration.getScope();
 
         // Create or get the scope with the configured ID
@@ -470,11 +469,11 @@ public class RefactorEnhancementEngine e
         try {
             log.info("Will now load requested ontology into the core space of 
scope '{}'.", scopeId);
             OWLOntologyManager sharedManager = 
OWLManager.createOWLOntologyManager();
-            IRI physicalIRI = null;
+            org.semanticweb.owlapi.model.IRI physicalIRI = null;
             for (int o = 0; o < coreScopeOntologySet.length; o++) {
                 String url = coreScopeOntologySet[o];
                 try {
-                    physicalIRI = IRI.create(url);
+                    physicalIRI = org.semanticweb.owlapi.model.IRI.create(url);
                 } catch (Exception e) {
                     failed.add(url);
                 }
@@ -506,7 +505,7 @@ public class RefactorEnhancementEngine e
         String recipeId = engineConfiguration.getRecipeId();
         Recipe recipe = null;
         try {
-            recipe = ruleStore.createRecipe(new UriRef(recipeId), null);
+            recipe = ruleStore.createRecipe(new IRI(recipeId), null);
         } catch (AlreadyExistingRecipeException e1) {
             log.error("A recipe with ID {} already exists in the store.", 
recipeId);
         }

Modified: 
stanbol/trunk/enhancement-engines/restful-langident/src/main/java/org/apache/stanbol/enhancer/engines/restful/langident/impl/RestfulLangidentEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/restful-langident/src/main/java/org/apache/stanbol/enhancer/engines/restful/langident/impl/RestfulLangidentEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/restful-langident/src/main/java/org/apache/stanbol/enhancer/engines/restful/langident/impl/RestfulLangidentEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/restful-langident/src/main/java/org/apache/stanbol/enhancer/engines/restful/langident/impl/RestfulLangidentEngine.java
 Tue May 17 22:20:49 2016
@@ -40,10 +40,10 @@ import java.util.Map;
 import java.util.Map.Entry;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -176,7 +176,7 @@ public class RestfulLangidentEngine exte
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> entry = getPlainText(this, ci, false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }
@@ -202,7 +202,7 @@ public class RestfulLangidentEngine exte
     @Override
     public void computeEnhancements(final ContentItem ci) throws 
EngineException {
         //get the plain text Blob
-        Map.Entry<UriRef,Blob> textBlob = getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> textBlob = getPlainText(this, ci, false);
         Blob blob = textBlob.getValue();
         //send the text to the server
         final HttpPost request = new HttpPost(serviceUrl);
@@ -230,7 +230,7 @@ public class RestfulLangidentEngine exte
                 throw RuntimeException.class.cast(e);
             }
         }
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         log.debug("Detected Languages for ContentItem {} and Blob {}");
         ci.getLock().writeLock().lock();
         try { //write TextAnnotations for the detected languages
@@ -238,7 +238,7 @@ public class RestfulLangidentEngine exte
                 // add a hypothesis
                 log.debug(" > {}@{}", suggestion.getLanguage(),
                     suggestion.hasProbability() ? suggestion.getProbability() 
: "-,--");
-                UriRef textEnhancement = 
EnhancementEngineHelper.createTextEnhancement(ci, this);
+                IRI textEnhancement = 
EnhancementEngineHelper.createTextEnhancement(ci, this);
                 metadata.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new 
PlainLiteralImpl(suggestion.getLanguage())));
                 metadata.add(new TripleImpl(textEnhancement, DC_TYPE, 
DCTERMS_LINGUISTIC_SYSTEM));
                 if(suggestion.hasProbability()){
@@ -407,8 +407,8 @@ public class RestfulLangidentEngine exte
      * @throws IllegalStateException if exception is <code>true</code> and the
      * language could not be retrieved from the parsed {@link ContentItem}.
      */
-    public static Entry<UriRef,Blob> getPlainText(EnhancementEngine engine, 
ContentItem ci, boolean exception) {
-        Entry<UriRef,Blob> textBlob = ContentItemHelper.getBlob(
+    public static Entry<IRI,Blob> getPlainText(EnhancementEngine engine, 
ContentItem ci, boolean exception) {
+        Entry<IRI,Blob> textBlob = ContentItemHelper.getBlob(
             ci, singleton("text/plain"));
         if(textBlob != null) {
             return textBlob;

Modified: 
stanbol/trunk/enhancement-engines/restful-nlp/src/main/java/org/apache/stanbol/enhancer/engines/restful/nlp/impl/RestfulNlpAnalysisEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/restful-nlp/src/main/java/org/apache/stanbol/enhancer/engines/restful/nlp/impl/RestfulNlpAnalysisEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/restful-nlp/src/main/java/org/apache/stanbol/enhancer/engines/restful/nlp/impl/RestfulNlpAnalysisEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/restful-nlp/src/main/java/org/apache/stanbol/enhancer/engines/restful/nlp/impl/RestfulNlpAnalysisEngine.java
 Tue May 17 22:20:49 2016
@@ -44,12 +44,12 @@ import java.util.Map;
 import java.util.Set;
 import java.util.StringTokenizer;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -179,23 +179,23 @@ public class RestfulNlpAnalysisEngine ex
     /**
      * The property used to write the sum of all positive classified words
      */
-    public static final UriRef POSITIVE_SENTIMENT_PROPERTY = new 
UriRef(NamespaceEnum.fise+"positive-sentiment");
+    public static final IRI POSITIVE_SENTIMENT_PROPERTY = new 
IRI(NamespaceEnum.fise+"positive-sentiment");
     /**
      * The property used to write the sum of all negative classified words
      */
-    public static final UriRef NEGATIVE_SENTIMENT_PROPERTY = new 
UriRef(NamespaceEnum.fise+"negative-sentiment");
+    public static final IRI NEGATIVE_SENTIMENT_PROPERTY = new 
IRI(NamespaceEnum.fise+"negative-sentiment");
     /**
      * The sentiment of the section (sum of positive and negative 
classifications)
      */
-    public static final UriRef SENTIMENT_PROPERTY = new 
UriRef(NamespaceEnum.fise+"sentiment");
+    public static final IRI SENTIMENT_PROPERTY = new 
IRI(NamespaceEnum.fise+"sentiment");
     /**
      * The dc:type value used for fise:TextAnnotations indicating a Sentiment
      */
-    public static final UriRef SENTIMENT_TYPE = new 
UriRef(NamespaceEnum.fise+"Sentiment");
+    public static final IRI SENTIMENT_TYPE = new 
IRI(NamespaceEnum.fise+"Sentiment");
     /**
      * The dc:Type value sued for the sentiment annotation of the whole 
document
      */
-    public static final UriRef DOCUMENT_SENTIMENT_TYPE = new 
UriRef(NamespaceEnum.fise+"DocumentSentiment");
+    public static final IRI DOCUMENT_SENTIMENT_TYPE = new 
IRI(NamespaceEnum.fise+"DocumentSentiment");
 
     private static final Map<String,Object> SERVICE_PROPERTIES;
     static {
@@ -254,7 +254,7 @@ public class RestfulNlpAnalysisEngine ex
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }
@@ -340,7 +340,7 @@ public class RestfulNlpAnalysisEngine ex
 
             Iterator<Span> spans = 
at.getEnclosed(EnumSet.of(SpanTypeEnum.Sentence,SpanTypeEnum.Chunk));
             Sentence context = null;
-            MGraph metadata = ci.getMetadata();
+            Graph metadata = ci.getMetadata();
             Language lang = new Language(language);
             LiteralFactory lf = LiteralFactory.getInstance();
             ci.getLock().writeLock().lock();
@@ -354,7 +354,7 @@ public class RestfulNlpAnalysisEngine ex
                         default:
                             Value<NerTag> nerAnno = 
span.getAnnotation(NER_ANNOTATION);
                             if(nerAnno != null){
-                                UriRef ta = 
EnhancementEngineHelper.createTextEnhancement(ci, this);
+                                IRI ta = 
EnhancementEngineHelper.createTextEnhancement(ci, this);
                                 //add span related data
                                 metadata.add(new TripleImpl(ta, 
ENHANCER_SELECTED_TEXT, 
                                     new PlainLiteralImpl(span.getSpan(), 
lang)));
@@ -382,7 +382,7 @@ public class RestfulNlpAnalysisEngine ex
                                 Double sentiment = sentimentAnnotation.value();
 
                                                                //Create a 
fise:TextAnnotation for the sentiment
-                                UriRef ta = 
EnhancementEngineHelper.createTextEnhancement(ci, this);
+                                IRI ta = 
EnhancementEngineHelper.createTextEnhancement(ci, this);
                                 metadata.add(new TripleImpl(ta, ENHANCER_START,
                                         
lf.createTypedLiteral(span.getStart())));
                                 metadata.add(new TripleImpl(ta, ENHANCER_END,
@@ -393,7 +393,7 @@ public class RestfulNlpAnalysisEngine ex
                                 //add the generic dc:type used for all 
Sentiment annotation
                                 metadata.add(new TripleImpl(ta, DC_TYPE, 
SENTIMENT_TYPE));
                                                                //determine the 
specific dc:type for the sentiment annotation
-                                UriRef ssoType = 
NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(span.getType());
+                                IRI ssoType = 
NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(span.getType());
                                 if(ssoType != null){
                                     metadata.add(new TripleImpl(ta, DC_TYPE, 
ssoType));
                                 }
@@ -416,7 +416,7 @@ public class RestfulNlpAnalysisEngine ex
 
                 //Add the annotation for the overall sentiment of the document 
                 if ( sentimentCount > 0 ) {
-                UriRef ta = EnhancementEngineHelper.createTextEnhancement(ci, 
this);
+                IRI ta = EnhancementEngineHelper.createTextEnhancement(ci, 
this);
                     //calculate the average sentiment for a document
                     //TODO: Think on a better way to calculate a general 
sentiment value for a document.
                     metadata.add(new TripleImpl(ta, SENTIMENT_PROPERTY,

Modified: 
stanbol/trunk/enhancement-engines/sentiment-summarization/src/main/java/org/apache/stanbol/enhancer/engines/sentiment/summarize/SentimentSummarizationEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/sentiment-summarization/src/main/java/org/apache/stanbol/enhancer/engines/sentiment/summarize/SentimentSummarizationEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/sentiment-summarization/src/main/java/org/apache/stanbol/enhancer/engines/sentiment/summarize/SentimentSummarizationEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/sentiment-summarization/src/main/java/org/apache/stanbol/enhancer/engines/sentiment/summarize/SentimentSummarizationEngine.java
 Tue May 17 22:20:49 2016
@@ -35,12 +35,12 @@ import java.util.Map.Entry;
 import java.util.NavigableMap;
 import java.util.TreeMap;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -123,23 +123,23 @@ public class SentimentSummarizationEngin
     /**
      * The property used to write the sum of all positive classified words
      */
-    public static final UriRef POSITIVE_SENTIMENT_PROPERTY = new 
UriRef(NamespaceEnum.fise+"positive-sentiment");
+    public static final IRI POSITIVE_SENTIMENT_PROPERTY = new 
IRI(NamespaceEnum.fise+"positive-sentiment");
     /**
      * The property used to write the sum of all negative classified words
      */
-    public static final UriRef NEGATIVE_SENTIMENT_PROPERTY = new 
UriRef(NamespaceEnum.fise+"negative-sentiment");
+    public static final IRI NEGATIVE_SENTIMENT_PROPERTY = new 
IRI(NamespaceEnum.fise+"negative-sentiment");
     /**
      * The sentiment of the section (sum of positive and negative 
classifications)
      */
-    public static final UriRef SENTIMENT_PROPERTY = new 
UriRef(NamespaceEnum.fise+"sentiment");
+    public static final IRI SENTIMENT_PROPERTY = new 
IRI(NamespaceEnum.fise+"sentiment");
     /**
      * The dc:type value used for fise:TextAnnotations indicating a Sentiment
      */
-    public static final UriRef SENTIMENT_TYPE = new 
UriRef(NamespaceEnum.fise+"Sentiment");
+    public static final IRI SENTIMENT_TYPE = new 
IRI(NamespaceEnum.fise+"Sentiment");
     /**
      * The dc:Type value sued for the sentiment annotation of the whole 
document
      */
-    public static final UriRef DOCUMENT_SENTIMENT_TYPE = new 
UriRef(NamespaceEnum.fise+"DocumentSentiment");
+    public static final IRI DOCUMENT_SENTIMENT_TYPE = new 
IRI(NamespaceEnum.fise+"DocumentSentiment");
 
 
     private static final int DEFAULT_NEGATION_CONTEXT = 2;
@@ -590,7 +590,7 @@ public class SentimentSummarizationEngin
     
     private void writeSentimentEnhancements(ContentItem ci, 
List<SentimentPhrase> sentimentPhrases, AnalysedText at, Language lang) {
         // TODO Auto-generated method stub
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         Sentence currentSentence = null;
         final List<SentimentPhrase> sentencePhrases = new 
ArrayList<SentimentPhrase>();
         for(SentimentPhrase sentPhrase : sentimentPhrases){
@@ -606,7 +606,7 @@ public class SentimentSummarizationEngin
                 }
             }
             if(writeSentimentPhrases){
-                UriRef enh = createTextEnhancement(ci, this);
+                IRI enh = createTextEnhancement(ci, this);
                 String phraseText = 
at.getSpan().substring(sentPhrase.getStartIndex(), sentPhrase.getEndIndex());
                 metadata.add(new TripleImpl(enh, ENHANCER_SELECTED_TEXT, 
                     new PlainLiteralImpl(phraseText, lang)));
@@ -634,7 +634,7 @@ public class SentimentSummarizationEngin
                     lf.createTypedLiteral(sentPhrase.getSentiment())));        
       
                 //add the Sentiment type as well as the type of the SSO 
Ontology
                 metadata.add(new TripleImpl(enh, DC_TYPE, SENTIMENT_TYPE));
-                UriRef ssoType = 
NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(SpanTypeEnum.Chunk);
+                IRI ssoType = 
NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(SpanTypeEnum.Chunk);
                 if(ssoType != null){
                     metadata.add(new TripleImpl(enh, DC_TYPE, ssoType));
                 }
@@ -665,8 +665,8 @@ public class SentimentSummarizationEngin
         if(section == null || sectionPhrases == null || 
sectionPhrases.isEmpty()){
             return; //nothing to do
         }
-        UriRef enh = createTextEnhancement(ci, this);
-        MGraph metadata = ci.getMetadata();
+        IRI enh = createTextEnhancement(ci, this);
+        Graph metadata = ci.getMetadata();
         if(section.getType() == SpanTypeEnum.Sentence){
             //TODO use the fise:TextAnnotation new model for 
             //add start/end positions
@@ -708,7 +708,7 @@ public class SentimentSummarizationEngin
 
         //add the Sentiment type as well as the type of the SSO Ontology
         metadata.add(new TripleImpl(enh, DC_TYPE, SENTIMENT_TYPE));
-        UriRef ssoType = 
NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(section.getType());
+        IRI ssoType = NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(section.getType());
         if(ssoType != null){
             metadata.add(new TripleImpl(enh, DC_TYPE, ssoType));
         }

Modified: 
stanbol/trunk/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnSentenceEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnSentenceEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnSentenceEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnSentenceEngine.java
 Tue May 17 22:20:49 2016
@@ -26,7 +26,7 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.input.CharSequenceReader;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -110,7 +110,7 @@ public class SmartcnSentenceEngine exten
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }

Modified: 
stanbol/trunk/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnTokenizerEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnTokenizerEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnTokenizerEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnTokenizerEngine.java
 Tue May 17 22:20:49 2016
@@ -26,7 +26,7 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.input.CharSequenceReader;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -110,7 +110,7 @@ public class SmartcnTokenizerEngine exte
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, 
false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }

Modified: 
stanbol/trunk/enhancement-engines/textannotationnewmodel/src/main/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationsNewModelEngine.java
URL: 
http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/textannotationnewmodel/src/main/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationsNewModelEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- 
stanbol/trunk/enhancement-engines/textannotationnewmodel/src/main/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationsNewModelEngine.java
 (original)
+++ 
stanbol/trunk/enhancement-engines/textannotationnewmodel/src/main/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationsNewModelEngine.java
 Tue May 17 22:20:49 2016
@@ -35,15 +35,17 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+
 
-import org.apache.clerezza.rdf.core.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
 import org.apache.felix.scr.annotations.Properties;
@@ -115,7 +117,7 @@ public class TextAnnotationsNewModelEngi
      */
     @Override
     public void computeEnhancements(ContentItem contentItem) throws 
EngineException {
-        Entry<UriRef,Blob> textBlob = getBlob(contentItem, supportedMimeTypes);
+        Entry<IRI,Blob> textBlob = getBlob(contentItem, supportedMimeTypes);
         if(textBlob == null){
             return;
         }
@@ -128,13 +130,13 @@ public class TextAnnotationsNewModelEngi
             throw new EngineException(this, contentItem, "Unable to read Plain 
Text Blob", e);
         }
         Set<Triple> addedTriples = new HashSet<Triple>();
-        MGraph metadata = contentItem.getMetadata();
+        Graph metadata = contentItem.getMetadata();
         //extract all the necessary information within a read lock
         contentItem.getLock().readLock().lock();
         try {
             Iterator<Triple> it = metadata.filter(null, RDF_TYPE, 
ENHANCER_TEXTANNOTATION);
             while(it.hasNext()){
-                NonLiteral ta = it.next().getSubject();
+                BlankNodeOrIRI ta = it.next().getSubject();
                 boolean hasPrefix = metadata.filter(ta, 
ENHANCER_SELECTION_PREFIX, null).hasNext();
                 boolean hasSuffix = metadata.filter(ta, 
ENHANCER_SELECTION_SUFFIX, null).hasNext();
                 boolean hasSelected = metadata.filter(ta, 
ENHANCER_SELECTED_TEXT, null).hasNext();


Reply via email to