Author: anuzzolese
Date: Thu Apr  7 16:03:52 2011
New Revision: 1089929

URL: http://svn.apache.org/viewvc?rev=1089929&view=rev
Log:
This code contains what was previously called the Dulcifier engine.
Now it is refactored to enhancer.engines.refactor and provides an engine which 
allows to transform the RDF graph returned by other engines to a target 
vocabulary.

Added:
    incubator/stanbol/trunk/enhancer/engines/refactor/README.txt
    incubator/stanbol/trunk/enhancer/engines/refactor/pom.xml
    incubator/stanbol/trunk/enhancer/engines/refactor/src/
    incubator/stanbol/trunk/enhancer/engines/refactor/src/main/
    incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/
    incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/
    incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/Dereferencer.java
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/IDereferencer.java
    incubator/stanbol/trunk/enhancer/engines/refactor/src/main/resources/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/resources/META-INF/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/resources/META-INF/default/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/resources/META-INF/default/seo_rules.sem
    incubator/stanbol/trunk/enhancer/engines/refactor/src/test/
    incubator/stanbol/trunk/enhancer/engines/refactor/src/test/java/
    incubator/stanbol/trunk/enhancer/engines/refactor/src/test/java/org/
    incubator/stanbol/trunk/enhancer/engines/refactor/src/test/java/org/apache/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/test/java/org/apache/stanbol/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/test/java/org/apache/stanbol/enhancer/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/test/java/org/apache/stanbol/enhancer/engines/
    
incubator/stanbol/trunk/enhancer/engines/refactor/src/test/java/org/apache/stanbol/enhancer/engines/refactor/
Modified:
    incubator/stanbol/trunk/enhancer/engines/refactor/   (props changed)

Propchange: incubator/stanbol/trunk/enhancer/engines/refactor/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Thu Apr  7 16:03:52 2011
@@ -0,0 +1,4 @@
+.classpath
+.settings
+.project
+target

Added: incubator/stanbol/trunk/enhancer/engines/refactor/README.txt
URL: 
http://svn.apache.org/viewvc/incubator/stanbol/trunk/enhancer/engines/refactor/README.txt?rev=1089929&view=auto
==============================================================================
--- incubator/stanbol/trunk/enhancer/engines/refactor/README.txt (added)
+++ incubator/stanbol/trunk/enhancer/engines/refactor/README.txt Thu Apr  7 
16:03:52 2011
@@ -0,0 +1,10 @@
+This enhancement engine requires:
+       - Stanbol Entityhub
+       - Stanbol Refactor
+       - Stanbol OntoNet
+
+It refactor the RDF graphs of recognized entities to a target vocabulary.
+The engines is provided with a default set of rules (a recipe) for the 
refactoring which allows
+to produce an RDF graph according to the google vocabulary. That default 
recipe allows to produce google rich
+snippets. 
+

Added: incubator/stanbol/trunk/enhancer/engines/refactor/pom.xml
URL: 
http://svn.apache.org/viewvc/incubator/stanbol/trunk/enhancer/engines/refactor/pom.xml?rev=1089929&view=auto
==============================================================================
--- incubator/stanbol/trunk/enhancer/engines/refactor/pom.xml (added)
+++ incubator/stanbol/trunk/enhancer/engines/refactor/pom.xml Thu Apr  7 
16:03:52 2011
@@ -0,0 +1,220 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+  -->
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
+
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+       <groupId>org.apache.stanbol</groupId>
+       <artifactId>stanbol-parent</artifactId>
+       <version>0.9-SNAPSHOT</version>
+       <relativePath>../../parent</relativePath>
+  </parent>
+
+  <groupId>org.apache.stanbol</groupId>
+  <artifactId>org.apache.stanbol.enhancer.engines.refactor</artifactId>
+  <packaging>bundle</packaging>
+
+  <name>Apache Stanbol Enhancer Enhancement Engine : Refactoring service</name>
+  <description>Implementation of an enhancement engine which refactor the 
ouptut to a target vocabulary 
+  </description>
+
+  <inceptionYear>2010</inceptionYear>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+    <scm>
+        <connection>
+            
scm:svn:http://svn.apache.org/repos/asf/incubator/stanbol/trunk/enhancer/engines/geonames/
+        </connection>
+        <developerConnection>
+            
scm:svn:https://svn.apache.org/repos/asf/incubator/stanbol/trunk/enhancer/engines/geonames/
+        </developerConnection>
+        <url>http://incubator.apache.org/stanbol/</url>
+    </scm>
+
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.felix</groupId>
+        <artifactId>maven-bundle-plugin</artifactId>
+        <extensions>true</extensions>
+        <configuration>
+          <instructions>
+            <Export-Package>
+              
org.apache.stanbol.enhancer.engines.refactor;version=${pom.version}
+            </Export-Package>
+            <Import-Package>
+              !javax.xml.stream.*,
+              *
+            </Import-Package>
+            <!-- TODO those should be bundles! -->
+          </instructions>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.felix</groupId>
+        <artifactId>maven-scr-plugin</artifactId>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <!--  added temporary for the AutotaggingServlet -->
+    <dependency>
+      <groupId>javax.servlet</groupId>
+      <artifactId>servlet-api</artifactId>
+      <scope>provided</scope>
+    </dependency>
+       <dependency>
+         <groupId>org.json</groupId>
+         <artifactId>json</artifactId>
+         <version>20090211</version>
+      <scope>provided</scope>
+       </dependency>    
+    <!--  end -->
+    
+    <!-- Enhancer Sevice API -->
+    <dependency>
+      <groupId>org.apache.stanbol</groupId>
+      <artifactId>org.apache.stanbol.enhancer.servicesapi</artifactId>
+    </dependency>
+    
+    <!-- EntityHub -->
+    <dependency>
+      <groupId>org.apache.stanbol</groupId>
+      <artifactId>org.apache.stanbol.entityhub.servicesapi</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.stanbol</groupId>
+       <artifactId>org.apache.stanbol.entityhub.model.clerezza</artifactId>
+    </dependency>
+        
+    <dependency>
+      <groupId>org.apache.felix</groupId>
+      <artifactId>org.apache.felix.scr.annotations</artifactId>
+      <version>1.2.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.clerezza</groupId>
+      <artifactId>org.apache.clerezza.rdf.core</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <!-- KReS dependencies -->
+    <dependency>
+      <groupId>org.apache.stanbol</groupId>
+      <artifactId>org.apache.stanbol.ontologymanager.ontonet</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.felix</groupId>
+          <artifactId>org.apache.felix.scr.annotations</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.hp.hpl.jena</groupId>
+          <artifactId>arq</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.hp.hpl.jena</groupId>
+          <artifactId>jena</artifactId>
+        </exclusion>
+      </exclusions>
+      <version>0.9-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.stanbol</groupId>
+      <artifactId>org.apache.stanbol.owl</artifactId>
+      <scope>provided</scope>
+      <version>0.9-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.stanbol</groupId>
+      <artifactId>org.apache.stanbol.reasoners.base</artifactId>
+      <scope>provided</scope>
+      <version>0.9-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.stanbol</groupId>
+      <artifactId>org.apache.stanbol.rules.base</artifactId>
+      <scope>provided</scope>
+      <version>0.9-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.stanbol</groupId>
+      <artifactId>org.apache.stanbol.rules.manager</artifactId>
+      <scope>provided</scope>
+      <version>0.9-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.stanbol</groupId>
+      <artifactId>org.apache.stanbol.rules.refactor</artifactId>
+      <scope>provided</scope>
+      <version>0.9-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>com.hp.hpl.jena</groupId>
+      <artifactId>jena</artifactId>
+      <version>2.6.2</version>
+      <scope>provided</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-log4j12</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>com.hp.hpl.jena</groupId>
+      <artifactId>arq</artifactId>
+      <version>2.8.2</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-log4j12</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jmx</groupId>
+          <artifactId>jmxri</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.jms</groupId>
+          <artifactId>jms</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jdmk</groupId>
+          <artifactId>jmxtools</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+  </dependencies>
+
+</project>

Added: 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
URL: 
http://svn.apache.org/viewvc/incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java?rev=1089929&view=auto
==============================================================================
--- 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
 (added)
+++ 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
 Thu Apr  7 16:03:52 2011
@@ -0,0 +1,693 @@
+package org.apache.stanbol.enhancer.engines.refactor;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.clerezza.rdf.core.MGraph;
+import org.apache.clerezza.rdf.core.Resource;
+import org.apache.clerezza.rdf.core.Triple;
+import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Property;
+import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.Service;
+import org.apache.stanbol.enhancer.engines.refactor.dereferencer.Dereferencer;
+import org.apache.stanbol.enhancer.engines.refactor.dereferencer.IDereferencer;
+import org.apache.stanbol.enhancer.servicesapi.ContentItem;
+import org.apache.stanbol.enhancer.servicesapi.EngineException;
+import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
+import org.apache.stanbol.enhancer.servicesapi.ServiceProperties;
+import org.apache.stanbol.entityhub.model.clerezza.RdfRepresentation;
+import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
+import org.apache.stanbol.entityhub.servicesapi.model.Representation;
+import org.apache.stanbol.entityhub.servicesapi.model.Sign;
+import org.apache.stanbol.entityhub.servicesapi.site.ReferencedSiteManager;
+import org.apache.stanbol.ontologymanager.ontonet.api.DuplicateIDException;
+import org.apache.stanbol.ontologymanager.ontonet.api.ONManager;
+import org.apache.stanbol.ontologymanager.ontonet.api.io.OntologyInputSource;
+import org.apache.stanbol.ontologymanager.ontonet.api.ontology.OntologyScope;
+import 
org.apache.stanbol.ontologymanager.ontonet.api.ontology.OntologyScopeFactory;
+import org.apache.stanbol.ontologymanager.ontonet.api.ontology.OntologySpace;
+import 
org.apache.stanbol.ontologymanager.ontonet.api.ontology.OntologySpaceFactory;
+import org.apache.stanbol.ontologymanager.ontonet.api.ontology.ScopeRegistry;
+import 
org.apache.stanbol.ontologymanager.ontonet.api.ontology.UnmodifiableOntologySpaceException;
+import org.apache.stanbol.ontologymanager.ontonet.api.session.Session;
+import org.apache.stanbol.ontologymanager.ontonet.api.session.SessionManager;
+import org.apache.stanbol.owl.trasformation.OWLAPIToClerezzaConverter;
+import org.apache.stanbol.reasoners.base.api.Reasoner;
+import org.apache.stanbol.rules.base.api.NoSuchRecipeException;
+import org.apache.stanbol.rules.base.api.Recipe;
+import org.apache.stanbol.rules.base.api.Rule;
+import org.apache.stanbol.rules.base.api.RuleStore;
+import org.apache.stanbol.rules.base.api.util.RuleList;
+import org.apache.stanbol.rules.refactor.api.Refactorer;
+import org.apache.stanbol.rules.refactor.api.RefactoringException;
+import org.osgi.service.component.ComponentContext;
+import org.semanticweb.owlapi.apibinding.OWLManager;
+import org.semanticweb.owlapi.model.IRI;
+import org.semanticweb.owlapi.model.OWLOntology;
+import org.semanticweb.owlapi.model.OWLOntologyCreationException;
+import org.semanticweb.owlapi.model.OWLOntologyManager;
+import org.semanticweb.owlapi.model.OWLOntologySetProvider;
+import org.semanticweb.owlapi.util.OWLOntologyMerger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * 
+ * This an engine to post-process the enhancements. Its main goal is to
+ * refactor the RDF produced by the enhancement applying some vocabulary 
related
+ * to a specific task.
+ * 
+ * To do that, exploit a Refactor recipe and an ontology scope of OntoNet.
+ * 
+ * The first implementation is targeted to SEO use case. * It retrieves data by
+ * dereferencing the entities, * includes the DBpedia ontology * refactor the
+ * data using the google rich snippets vocabulary.
+ * 
+ * @author andrea.nuzzolese
+ * 
+ */
+
+@Component(immediate = true, metatype = true)
+@Service(EnhancementEngine.class)
+public class RefactorEnhancementEngine implements EnhancementEngine, 
ServiceProperties {
+
+       /*
+        * TODO This are the scope and recipe IDs to be used by this 
implementation
+        * In future implementation this will be configurable
+        */
+    
+       @Property(value = 
"http://http://incubator.apache.org/stanbol/enhancer/engines/refactor/scope";)
+       public static final String SCOPE = "engine.refactor.scope";
+
+       @Property(value = "")
+       public static final String RECIPE_URI = "engine.refactor.recipe";
+
+    
@Property(value={"http://ontologydesignpatterns.org/ont/iks/kres/dbpedia_demo.owl",""},
 cardinality=1000, description="To fix a set of resolvable ontology URIs for 
the scope's ontologies.")
+    public static final String SCOPE_CORE_ONTOLOGY 
="engine.refactor.scope.core.ontology";
+
+    @Property(value="true",description="If true: the previously generated RDF 
is deleted and substituted with the new one. If false: the new one is appended 
to the old RDF. Possible value: true or false.")
+    public static final String APPEND_OTHER_ENHANCEMENT_GRAPHS = 
"engine.refactor.append.graphs";
+
+       @Reference
+       ONManager onManager;
+
+       @Reference
+       ReferencedSiteManager referencedSiteManager;
+
+       @Reference
+       RuleStore ruleStore;
+
+       @Reference
+       Reasoner reasoner;
+
+       @Reference
+       Refactorer refactorer;
+
+       private OntologyScope scope;
+       private IRI recipeIRI;
+        private boolean graph_append;
+
+       private final Logger log = LoggerFactory.getLogger(getClass());
+
+       @Override
+       public int canEnhance(ContentItem ci) throws EngineException {
+               /*
+                * Dulcifier can enhance only content items that are previously 
enhanced
+                * by other enhancement engines, as it must be the last engine 
in the chain.
+                * 
+                * Works only if some enhancement has been produced.
+                */
+               MGraph mGraph = ci.getMetadata();
+               if (mGraph != null) {
+                       return ENHANCE_SYNCHRONOUS;
+               } else {
+                       return CANNOT_ENHANCE;
+               }
+       }
+
+       @Override
+       public void computeEnhancements(ContentItem ci) throws EngineException {
+               /*
+                * Retrieve the graph
+                */
+               final MGraph mGraph = ci.getMetadata();
+
+               /*
+                * We filter the entities recognized by the engines
+                */
+               UriRef fiseEntityReference = new 
UriRef("http://fise.iks-project.eu/ontology/entity-reference";);
+               Iterator<Triple> tripleIt = mGraph.filter(null, 
fiseEntityReference,null);
+
+               /*
+                * Now we prepare the OntoNet environment. First we create the 
OntoNet session
+                * in which run the whole
+                */
+               final IRI sessionIRI = createAndAddSessionSpaceToScope();
+
+
+               /*
+                * We retrieve the session space
+                */
+               OntologySpace sessionSpace = scope.getSessionSpace(sessionIRI);
+
+               while (tripleIt.hasNext()) {
+                       Triple triple = tripleIt.next();
+                       Resource entityReference = triple.getObject();
+                       /*
+                        * the entity uri
+                        */
+                       final String entityReferenceString = 
entityReference.toString()
+                                       .replace("<", "").replace(">", "");
+                       log.debug("Trying to resolve entity " + 
entityReferenceString);
+                       /**
+                        * We fetch the entity in the OntologyInputSource object
+                        */
+                       try {
+                               
+                               
+                               final IRI fetchedIri = 
IRI.create(entityReferenceString);
+                               
+                               /*
+                                * The RDF graph of an entity is fetched via 
the EntityHub.
+                                * The getEntityOntology is a method the do the 
job of asking
+                                * the entity to the EntityHub and wrap the RDF 
graph into
+                                * an OWLOntology.
+                                */
+                               final OWLOntology fetched = 
getEntityOntology(entityReferenceString);
+                                                               
+                               OntologyInputSource ontologySource = new 
OntologyInputSource() {
+
+                                       @Override
+                                       public boolean hasRootOntology() {
+                                               return (fetched != null);
+                                       }
+
+                                       @Override
+                                       public boolean hasPhysicalIRI() {
+                                               return true;
+                                       }
+
+                                       @Override
+                                       public OWLOntology getRootOntology() {
+                                               return fetched;
+                                       }
+
+                                       @Override
+                                       public IRI getPhysicalIRI() {
+                                               return fetchedIri;
+                                       }
+                               };
+                               sessionSpace.addOntology(ontologySource);
+
+                               log.debug("Added " + entityReferenceString
+                                               + " to the session space of 
scope "
+                                               + scope.getID().toString(), 
this);
+                       
+                               
+                       } catch (UnmodifiableOntologySpaceException e) {
+                               log.error("Cannot load the entity",e);
+                       }
+
+               }
+
+               /*
+                * Now we merge the RDF from the T-box - the ontologies - and 
the A-box
+                * - the RDF data fetched
+                * 
+                */
+
+               final OWLOntologyManager man = 
OWLManager.createOWLOntologyManager();
+
+               OWLOntologySetProvider provider = new OWLOntologySetProvider() {
+
+                       @Override
+                       public Set<OWLOntology> getOntologies() {
+                               
+                               Set<OWLOntology> ontologies = new 
HashSet<OWLOntology>();
+                               OntologySpace sessionSpace = 
scope.getSessionSpace(sessionIRI);
+                               ontologies.addAll(sessionSpace.getOntologies());
+                               
+                               /*
+                                * We add to the set the graph containing the 
metadata generated by previous
+                                * enhancement engines. It is important becaus 
we want to menage during the refactoring
+                                * also some information fron that graph.
+                                * As the graph is provided as a Clerezza 
MGraph, we first need to convert it to an OWLAPI
+                                * OWLOntology.
+                                * There is no chance that the mGraph could be 
null as it was previously controlled by the JobManager
+                                * through the canEnhance method and the 
computeEnhancement is always called iff the former returns true.  
+                                */
+                               OWLOntology fiseMetadataOntology = 
OWLAPIToClerezzaConverter.clerezzaMGraphToOWLOntology(mGraph);
+                               ontologies.add(fiseMetadataOntology);
+                               return ontologies;
+                       }
+               };
+
+               /*
+                * We merge all the ontologies from the session space of the 
scope into
+                * a single ontology that will be used for the refactoring.
+                */
+               OWLOntologyMerger merger = new OWLOntologyMerger(provider);
+
+               OWLOntology ontology;
+               try {
+                       ontology = 
merger.createMergedOntology(man,IRI.create("http://fise.iks-project.eu/dulcifier/integrity-check";));
+
+                       /*
+                        * To perform the refactoring of the ontology to a given
+                        * vocabulary we use the Stanbol Refactor.
+                        */
+
+                       log.debug("Refactoring recipe IRI is : " + recipeIRI);
+
+                       /*
+                        * We pass the ontology and the recipe IRI to the 
Refactor that
+                        * returns the refactored graph expressed by using the 
given
+                        * vocabulary.
+                        */
+                       try {
+
+                               Recipe recipe = ruleStore.getRecipe(recipeIRI);
+
+                               log.debug("Rules in the recipe are : "+ 
recipe.getkReSRuleList().size(), this);
+
+                               log.debug("The ontology to be refactor is : " + 
ontology, this);
+                               
+                               ontology = 
refactorer.ontologyRefactoring(ontology, recipeIRI);
+                                
+                       } catch (RefactoringException e) {
+                               log.error("The refactoring engine failed the 
execution.", e);
+                       } catch (NoSuchRecipeException e) {
+                               log.error("The recipe with ID " + recipeIRI
+                                               + " does not exists", e);
+                       }
+
+                       log.debug("Merged ontologies in " + ontology);
+               
+                       /*
+                * The new generated ontology is converted to Clarezza format 
and than added os substitued to the old mGraph.
+                */
+                       if(graph_append){
+                               
mGraph.addAll(OWLAPIToClerezzaConverter.owlOntologyToClerezzaTriples(ontology));
+                               log.debug("Metadata of the content passd have 
been substituted",this);
+                       }
+                       else{
+                               mGraph.removeAll(mGraph);
+                               
mGraph.addAll(OWLAPIToClerezzaConverter.owlOntologyToClerezzaTriples(ontology));
+                               log.debug("Metadata of the content is appended 
to the existent one",this);
+                       }
+
+                       /*
+                        * The session needs to be destroyed, as it is no more 
useful.
+                        */
+                       
onManager.getSessionManager().destroySession(sessionIRI);
+
+               } catch (OWLOntologyCreationException e) {
+                       log.error("Cannot create the ontology for the 
refactoring", e);
+               }
+       }
+
+       /**
+        * Setup the KReS session
+        * 
+        * @return
+        */
+       private IRI createAndAddSessionSpaceToScope() {
+               /*
+                * Retrieve the session manager
+                */
+               SessionManager sessionManager = onManager.getSessionManager();
+               log.debug("Starting create session for the dulcifier");
+               
+               /*
+                * Create and setup the session. TODO FIXME This is an 
operation that
+                * should be made easier for developers to do through the API
+                */
+               Session session = sessionManager.createSession();
+               OntologySpaceFactory ontologySpaceFactory = 
onManager.getOntologySpaceFactory();
+               OntologySpace sessionSpace = 
ontologySpaceFactory.createSessionOntologySpace(scope.getID());
+               scope.addSessionSpace(sessionSpace, session.getID());
+               
+               /*
+                * Finally, we return the session ID to be used by the caller
+                */
+               log.debug("Session " + session.getID() + " created", this);
+               return session.getID();
+       }
+
+    /**
+     * To create the input source necesary to load the ontology inside the 
scope.
+     * @param uri -- A resolvable string uri.
+     * @return An OntologyInputSource
+     */
+    private OntologyInputSource oisForScope(final String uri){
+        /*
+                * The scope factory needs an OntologyInputSource as input for 
the core
+                * ontology space. We want to use the dbpedia ontology as core 
ontology
+                * of our scope.
+                */
+               OntologyInputSource ois = new OntologyInputSource() {
+
+                       @Override
+                       public boolean hasRootOntology() {
+                               return true;
+                       }
+
+                       @Override
+                       public boolean hasPhysicalIRI() {
+                               return false;
+                       }
+
+                       @Override
+                       public OWLOntology getRootOntology() {
+
+                               InputStream inputStream;
+                               try {
+                                       /*
+                                        * The input stream for the dbpedia 
ontology is obtained
+                                        * through the dereferencer component.
+                                        */
+                                       
+                                       
+                                        //inputStream = 
dereferencer.resolve(uri);
+                                        OWLOntologyManager manager = 
OWLManager.createOWLOntologyManager();
+                                        return 
manager.loadOntologyFromOntologyDocument(IRI.create(uri));
+                                        
+                                       
+                                       //return getEntityOntology(uri);
+                               } catch (Exception e) {
+                                       log.error("Cannot load the ontology 
"+uri, e);
+                               }
+                               /** If some errors occur **/
+                               return null;
+                       }
+
+                       @Override
+                       public IRI getPhysicalIRI() {
+                               return null;
+                       }
+               };
+
+                return ois;
+        }
+
+
+    /**
+     * Activating the component
+     * @param context
+     */
+       protected void activate(ComponentContext context) {
+            
+       /*
+        * Read property to indicate if the the new eanchment metada must be 
append to the existing mGraph 
+        */
+               graph_append = 
Boolean.parseBoolean(((String)context.getProperties().get(APPEND_OTHER_ENHANCEMENT_GRAPHS)).toLowerCase());
+               
+               /*
+                * Get the Scope Factory from the ONM of KReS that allows to 
create new
+                * scopes
+                */
+               OntologyScopeFactory scopeFactory = 
onManager.getOntologyScopeFactory();
+       
+               
+               /*
+        * Adding ontologies to the scope core ontology.
+        * 1) Get all the ontologies from the property.
+        * 2) Create a base scope with an empity ontology.
+        * 3) Retrieve the ontology space from the scope.
+        * 4) Add the ontologies to the scope via ontology space.
+        */
+               //Step 1
+               Object obj = context.getProperties().get(SCOPE_CORE_ONTOLOGY);
+        String[] coreScopeOntologySet;
+        if(obj instanceof String[]){
+               coreScopeOntologySet = (String[]) obj;
+        }
+        else{
+               String[] aux = new String[1];
+               aux[0] = (String) obj;
+               coreScopeOntologySet =aux;
+        }
+                
+        //Step 2
+        OntologyInputSource oisbase = new OntologyInputSource() {
+
+               @Override
+                       public boolean hasRootOntology() {
+                               return true;
+                       }
+
+                       @Override
+                       public boolean hasPhysicalIRI() {
+                               return false;
+                       }
+
+                       @Override
+                       public OWLOntology getRootOntology() {
+
+                               try {
+                                       /*
+                                        * The input stream for the dbpedia 
ontology is obtained
+                                        * through the dereferencer component.
+                                        */
+                                       OWLOntologyManager manager = 
OWLManager.createOWLOntologyManager();
+                                       return manager.createOntology();
+                               } catch (OWLOntologyCreationException e) {
+                                       log.error("Cannot create the scope with 
empity ontology.", e);
+                               } catch (Exception e) {
+                                       log.error("Cannot create the scope with 
empity ontology.", e);
+                               }
+                               /** If some errors occur **/
+                               return null;
+                       }
+
+                       @Override
+                       public IRI getPhysicalIRI() {
+                               return null;
+                       }
+               };
+
+               IRI dulcifierScopeIRI = IRI.create((String) 
context.getProperties().get(SCOPE));
+
+               /*
+                * The scope is created by the ScopeFactory or loaded from the 
scope
+                * registry of KReS
+                */
+               try {
+                       scope = 
scopeFactory.createOntologyScope(dulcifierScopeIRI, oisbase);
+               } catch (DuplicateIDException e) {
+                       ScopeRegistry scopeRegistry = 
onManager.getScopeRegistry();
+                       scope = scopeRegistry.getScope(dulcifierScopeIRI);
+               }
+
+        /*
+         * Step 3
+         */
+         OntologySpace ontologySpace = scope.getCoreSpace();
+         
+        /*
+         * Step 4
+         */
+        ontologySpace.tearDown();
+        for(int o = 0; o<coreScopeOntologySet.length; o++){
+            OntologyInputSource ois = oisForScope(coreScopeOntologySet[o]);
+            try {
+                ontologySpace.addOntology(ois);
+            } catch (UnmodifiableOntologySpaceException ex) {
+                log.error("Unmodifiable Ontology SpaceException.",ex);
+            }
+        }
+        ontologySpace.setUp();
+
+        log.debug("The set of ontologies loaded in the core scope space is: 
"+ontologySpace.getOntologies()+
+                "\nN.B. The root.owl ontology is the first (on the list) 
ontology added when the scope is created.");
+        
+        /*
+         * The first thing to do is to create a recipe in the rule store that 
can be used
+         * by the engine to refactor the enhancement graphs. 
+         */
+               recipeIRI = IRI.create((String) 
context.getProperties().get(RECIPE_URI));
+
+               log.debug("Start create the Recipe", this);
+
+               ruleStore.addRecipe(recipeIRI, null);
+
+               log.debug("The recipe has been created", this);
+        
+               
+               /* 
+                * The set of rule to put in the recipe can be provided by the 
user.
+                * A default set of rules is provided in 
/META-INF/default/seo_rules.sem.
+                * Use the property engine.refactor in the felix console to 
pass to the engine
+                * your set of rules.  
+         * 
+         */
+               
+               String recipeURI = (String) 
context.getProperties().get(RECIPE_URI);
+               
+               InputStream recipeStream = null;
+               String recipeString = null;
+               
+               if(recipeURI != null && !recipeURI.isEmpty()){
+                       IDereferencer dereferencer = new Dereferencer();
+                       try {
+                               recipeStream = dereferencer.resolve(recipeURI);
+                       } catch (FileNotFoundException e) {
+                               // TODO Auto-generated catch block
+                               e.printStackTrace();
+                       }
+               }
+               else{
+                       recipeStream = 
RefactorEnhancementEngine.class.getResourceAsStream("/META-INF/default/seo_rules.sem");
+               }
+               
+               System.out.println("Refactorer engine recipe stream 
"+recipeStream);
+               
+               if(recipeStream != null){
+                       
+                       recipeString = "";
+                       
+                       BufferedReader reader = new BufferedReader(new 
InputStreamReader(recipeStream));
+                       
+                       String line = null;
+                       try {
+                               while((line=reader.readLine()) != null){
+                                       recipeString += line;
+                               }
+                       } catch (IOException e) {
+                               // TODO Auto-generated catch block
+                               e.printStackTrace();
+                       }
+               }
+        
+
+        /*
+         * step 3
+         */
+        try {
+               //ruleStore.addRuleToRecipe(recipeIRI.toString(), 
kReSRuleSyntax);
+               ruleStore.addRuleToRecipe(recipeIRI.toString(), recipeString);
+               log.debug("Added rules to recipe " + recipeIRI.toString());
+               } catch (NoSuchRecipeException e) {
+                       log.error("The recipe does not exists: ", e);
+               }
+               log.info("Activated Dulcifier engine");
+                
+       }
+
+       protected void deactivate(ComponentContext context) {
+
+               /* Deactivating the dulcifier. The procedure require:
+                * 1) get all the rules from the recipe
+                * 2) remove the recipe.
+                * 3) remove the single rule.
+                * 4) tear down the scope ontologySpace and the scope itself.
+                */
+
+               try {
+                       /*
+                        * step 1: get all the rule
+                        */
+                       RuleList recipeRuleList = 
ruleStore.getRecipe(recipeIRI).getkReSRuleList();
+
+                       /*
+                        * step 2: remove the recipe
+                        */
+                       if(ruleStore.removeRecipe(recipeIRI)){
+                               log.info("The recipe "+recipeIRI+" has been 
removed correctly");
+                       }
+               else{
+                       log.error("The recipe "+recipeIRI+" can not be 
removed");
+               }
+
+               /*
+                * step 3: remove the rules
+                */
+               for(Rule rule : recipeRuleList){
+                       if(ruleStore.removeRule(rule)){
+                               log.info("The rule "+rule.getRuleName()+" has 
been removed correctly");
+                       }
+                else{
+                       log.error("The rule "+rule.getRuleName()+" can not be 
removed");
+                }
+               }
+
+               /*
+                * step 4:
+                */
+               scope.getCoreSpace().tearDown();
+               scope.tearDown();
+               
+                
+            } catch (NoSuchRecipeException ex) {
+                log.error("The recipe "+recipeIRI+" doesn't exist",ex);
+            }
+
+            log.info("Deactivated Dulcifier engine");
+
+       }
+
+       @Override
+       public Map<String, Object> getServiceProperties() {
+               return Collections.unmodifiableMap(Collections.singletonMap(
+                               ServiceProperties.ENHANCEMENT_ENGINE_ORDERING,
+                               (Object) 
ServiceProperties.ORDERING_POST_PROCESSING));
+       }
+       
+       /**
+        * Fetch the OWLOntology containing the graph associated to an entity 
from Linked Data.
+        * It uses the Entity Hub for accessing LOD and fetching entities.
+        * 
+        * @param entityURI {@link String}
+        * @return the {@link OWLOntology} of the entity
+        */
+       private OWLOntology getEntityOntology(String entityURI){
+               
+               OWLOntology fetchedOntology = null; 
+               
+               
+               
+               log.debug("Asking entity: "+entityURI);
+               /*
+                * Ask to the entityhub the fetch the entity.
+                */
+               Sign entitySign = referencedSiteManager.getSign(entityURI);
+               
+               /*
+                * Wrap the entity graph into an owl ontology.
+                * 
+                */
+               MGraph entityMGraph = null;
+               
+               if(entitySign != null){
+                       Representation entityRepresentation = 
entitySign.getRepresentation();
+                       RdfRepresentation entityRdfRepresentation = 
RdfValueFactory.getInstance().toRdfRepresentation(entityRepresentation);
+                       TripleCollection tripleCollection = 
entityRdfRepresentation.getRdfGraph();
+                       entityMGraph = new SimpleMGraph();
+                       entityMGraph.addAll(tripleCollection);
+               }
+               
+               if(entityMGraph != null){
+                       /*
+                        * OWLOntologyManager manager = 
OWLManager.createOWLOntologyManager();
+                        * final OWLOntology fetched = 
manager.loadOntologyFromOntologyDocument(dereferencer.resolve(entityReferenceString));
+                        */
+                       
+                       fetchedOntology = 
OWLAPIToClerezzaConverter.clerezzaMGraphToOWLOntology(entityMGraph);
+               }
+               
+               return fetchedOntology;
+               
+       }
+}

Added: 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/Dereferencer.java
URL: 
http://svn.apache.org/viewvc/incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/Dereferencer.java?rev=1089929&view=auto
==============================================================================
--- 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/Dereferencer.java
 (added)
+++ 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/Dereferencer.java
 Thu Apr  7 16:03:52 2011
@@ -0,0 +1,95 @@
+package org.apache.stanbol.enhancer.engines.refactor.dereferencer;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.URLConnection;
+
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.Service;
+
+
+/**
+ * 
+ * @author andrea.nuzzolese
+ *
+ */
+@Component(immediate = true, metatype = true)
+@Service(IDereferencer.class)
+public class Dereferencer implements IDereferencer {
+
+       @Override
+       public InputStream resolve(String location) throws 
FileNotFoundException {
+               InputStream inputStream = null;
+               try {
+                       URI uri = new URI(location);
+                       if(uri.isAbsolute()){
+                               System.out.println("URL : absolute");
+                               URL url = new URL(location);
+                               
+                               URLConnection connection= url.openConnection();
+                               inputStream = connection.getInputStream();
+                       }
+                       else{
+                               System.out.println("URL : not absolute 
"+location);
+                               inputStream = new FileInputStream(location);
+                       }
+                       
+               } catch (MalformedURLException e) {
+                       e.printStackTrace();
+                       throw new FileNotFoundException();
+               } catch (IOException e) {
+                       throw new FileNotFoundException();
+               } catch (URISyntaxException e) {
+                       e.printStackTrace();
+                       throw new FileNotFoundException();
+               }
+               
+               return inputStream;
+               
+       }
+       
+       public boolean isAbsoluteLocation(String location){
+               URI uri;
+               
+               try {
+                       uri = new URI(location);
+                       return uri.isAbsolute();
+               } catch (URISyntaxException e) {
+                       e.printStackTrace();
+               }
+               
+               return false;
+               
+       }
+       
+       
+       public String getLocalName(String location) throws 
FileNotFoundException {
+               String localName = null;
+               try {
+                       URI uri = new URI(location);
+                       if(uri.isAbsolute()){
+                               localName = location;
+                       }
+                       else{
+                               System.out.println("URL : not absolute 
"+location);
+                               File file = new File(location);
+                               localName = file.getName();
+                       }
+                       
+               } catch (URISyntaxException e) {
+                       e.printStackTrace();
+                       throw new FileNotFoundException();
+               }
+               
+               return localName;
+               
+       }
+
+}

Added: 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/IDereferencer.java
URL: 
http://svn.apache.org/viewvc/incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/IDereferencer.java?rev=1089929&view=auto
==============================================================================
--- 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/IDereferencer.java
 (added)
+++ 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/dereferencer/IDereferencer.java
 Thu Apr  7 16:03:52 2011
@@ -0,0 +1,23 @@
+package org.apache.stanbol.enhancer.engines.refactor.dereferencer;
+
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+
+/**
+ * 
+ * @author andrea.nuzzolese
+ *
+ */
+public interface IDereferencer {
+
+       /**
+        * 
+        * The resolve method dereferences location and returns input streams.
+        * Locations can be local to the file system or remote URIs.
+        * 
+        * @param location
+        * @return {@link InputStream} if the location is resolved. Otherwise a 
{@link FileNotFoundException} is thrown.
+        * @throws FileNotFoundException
+        */
+    InputStream resolve(String location) throws FileNotFoundException;
+}

Added: 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/resources/META-INF/default/seo_rules.sem
URL: 
http://svn.apache.org/viewvc/incubator/stanbol/trunk/enhancer/engines/refactor/src/main/resources/META-INF/default/seo_rules.sem?rev=1089929&view=auto
==============================================================================
--- 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/resources/META-INF/default/seo_rules.sem
 (added)
+++ 
incubator/stanbol/trunk/enhancer/engines/refactor/src/main/resources/META-INF/default/seo_rules.sem
 Thu Apr  7 16:03:52 2011
@@ -0,0 +1,63 @@
+dbpedia = <http://dbpedia.org/ontology/> . 
+dbprop = <http://dbpedia.org/property/> . 
+google = <http://rdf.data-vocabulary.org/#> . 
+foaf = <http://xmlns.com/foaf/0.1/> . 
+rdf = <http://www.w3.org/1999/02/22-rdf-syntax-ns#> . 
+wgs84_pos = <http://www.w3.org/2003/01/geo/wgs84_pos#> . 
+skos = <http://www.w3.org/2004/02/skos/core#> . 
+gn = <http://www.geonames.org/ontology#> . 
+fise = <http://fise.iks-project.eu/ontology/> . 
+owl = <http://www.w3.org/2002/07/owl#> . 
+dc = <http://purl.org/dc/elements/1.1/> . 
+
+peopleTypeRule[is(dbpedia:Person, ?x) -> is(google:Person, ?x)] .  
+myRule[has(fise:entity-reference,  ?y,  ?x) . 
has(<http://purl.org/dc/terms/relation>,  ?y,  ?r) ->  
has(<http://purl.org/dc/terms/relation>,  ?x,  ?r)] . 
+fiseStartRul1[has(fise:entity-reference,  ?y,  ?x) . 
has(<http://purl.org/dc/terms/relation>,  ?y,  ?r) . values(fise:selected-text, 
 ?r,  ?t) . values(fise:start,  ?r,  ?start) -> is(fise:enhancementContext,  
?t) . values(fise:start-position,  ?r,  ?start) . 
has(fise:hasEnhancementContext,  ?x,  ?r)] . 
+fiseEndRule1[has(fise:entity-reference,  ?y,  ?x) . 
has(<http://purl.org/dc/terms/relation>,  ?y,  ?r) . values(fise:selected-text, 
 ?r,  ?t) . values(fise:end,  ?r,  ?end) -> is(fise:enhancementContext,  ?t) . 
values(fise:end-position,  ?r,  ?end) . has(fise:hasEnhancementContext,  ?x,  
?r) ] . 
+fiseContextRule1[has(fise:entity-reference,  ?y,  ?x) . 
has(<http://purl.org/dc/terms/relation>,  ?y,  ?r) . values(fise:selected-text, 
 ?r,  ?t) . values(fise:selection-context,  ?r,  ?context) -> 
is(fise:enhancementContext,  ?t) . values(fise:context,  ?r,  ?context) . 
has(fise:hasEnhancementContext,  ?x,  ?r)] . 
+peopleNameRule[is(dbpedia:Person, ?x) . values(foaf:name, ?x, ?y) -> 
values(google:name, ?x, ?y)] . 
+peopleNickRule[is(dbpedia:Person, ?x) . values(foaf:nick, ?x, ?y) -> 
values(google:nickname, ?x, ?y)] . 
+peoplePhotoRule[is(dbpedia:Person, ?x) . has(dbpedia:thumbnail, ?x, ?y) -> 
has(google:photo, ?x, ?y)] . 
+peopleProfessionRule[is(dbpedia:Person, ?x) . has(dbpedia:profession, ?x, ?y) 
-> has(google:title, ?x, ?y)] . 
+peopleOccupationRule[is(dbpedia:Person, ?x) . has(dbpedia:occupation, ?x, ?y) 
-> has(google:title, ?x, ?y)] . 
+peopleRoleRule[is(dbpedia:Person, ?x) . values(dbpedia:role, ?x, ?y) -> 
values(google:role, ?x, ?y)] . 
+peopleHomepageRule[is(dbpedia:Person, ?x) . has(foaf:homepage, ?x, ?y) -> 
has(google:url, ?x, ?y)] . 
+peopleAffiliationRule[is(dbpedia:Person, ?x) . has(dbpedia:employer, ?x, ?y) 
-> has(google:affiliation, ?x, ?y)] . 
+peopleKnowsRule[is(dbpedia:Person, ?x) . has(foaf:knows, ?x, ?y) -> 
has(google:friend, ?x, ?y)] . 
+peopleAddressRule[is(dbpedia:Person, ?x) . values(dbpedia:address, ?x, ?y) -> 
values(google:address, ?x, ?y)] . 
+peopleOccupationRule2[is(dbpedia:Person, ?x) . has(dc:description, ?x, ?y) -> 
has(google:title, ?x, ?y)] . 
+peopleOccupationRule3[is(dbpedia:Person, ?x) . has(skos:subject, ?x, ?y) -> 
has(google:affiliation, ?x, ?y)] . 
+productTypeRule[is(dbpedia:Organisation, ?x) . has(dbpedia:product, ?x, ?y) -> 
is(google:Product, ?y)] . 
+productNameRule1[is(dbpedia:Organisation, ?x) . has(dbpedia:product, ?x, ?y) . 
values(foaf:name, ?y, ?z) -> values(google:name, ?y, ?z)] . 
+productNameRule2[is(dbpedia:Organisation, ?x) . has(dbpedia:product, ?x, ?y) . 
values(dbprop:name, ?y, ?z) -> values(google:name, ?y, ?z)] . 
+productNameRule3[is(dbpedia:Organisation, ?x) . has(dbpedia:product, ?x, ?y) . 
values(rdf:label, ?y, ?z) -> values(google:name, ?y, ?z)] . 
+productImageRule[is(dbpedia:Organisation, ?x) . has(dbpedia:product, ?x, ?y) . 
has(dbpedia:thumbnail, ?y, ?z) -> has(google:photo, ?y, ?z)] . 
+productDescriptionRule[is(dbpedia:Organisation, ?x) . has(dbpedia:product, ?x, 
?y) . values(dbpedia:thumbnail, ?y, ?z) -> values(google:description, ?y, ?z)] 
. 
+productBrandRule[is(dbpedia:Organisation, ?x) . has(dbpedia:product, ?x, ?y) . 
values(rdf:label, ?y, ?z) -> values(google:brand, ?y, ?z)] . 
+productIdentifierRule[is(dbpedia:Organisation, ?x) . has(dbpedia:product, ?x, 
?y) . values(dbpedia:isbn, ?y, ?z) -> values(google:identifier, ?y, ?z)] . 
+productHomepageRule[is(dbpedia:Organisation, ?x) . has(dbpedia:product, ?x, 
?y) . values(foaf:homepage, ?y, ?z) -> values(google:url, ?y, ?z)] . 
+productCategoryRule[is(dbpedia:Organisation, ?x) . has(dbpedia:product, ?x, 
?y) . has(skos:currency, ?y, ?z) -> has(google:category, ?y, ?z)] . 
+organizationTypeRule[is(dbpedia:Organisation, ?x) -> is(google:Organization, 
?x)] . 
+organizationNameRule[is(dbpedia:Organisation, ?x) . values(foaf:name, ?x, ?y) 
-> values(google:name, ?x, ?y)] . 
+organizationHomepageRule[is(dbpedia:Organisation, ?x) . values(foaf:homepage, 
?x, ?y) -> values(google:url, ?x, ?y)] . 
+organizationRegionRule[is(dbpedia:Organisation, ?x) . has(dbpedia:region, ?x, 
?y) -> has(google:region, ?x, ?y)] . 
+organizationCountryRule[is(dbpedia:Organisation, ?x) . 
has(dbpedia:locationCountry, ?x, ?y) -> has(google:country-name, ?x, ?y)] . 
+organizationAddressRule[is(dbpedia:Organisation, ?x) . values(dbprop:address, 
?x, ?y) -> values(google:address, ?x, ?y)] . 
+organizationStreetAddressRule[is(dbpedia:Organisation, ?x) . 
values(dbprop:streetaddress, ?x, ?y) -> values(google:street-address, ?x, ?y)] 
. 
+organizationLocationRule[is(dbpedia:Organisation, ?x) . has(dbpedia:location, 
?x, ?y) -> has(google:locality, ?x, ?y)] . 
+organizationTelephoneRule[is(dbpedia:Organisation, ?x) . 
values(dbprop:telephon, ?x, ?y) -> values(google:tel, ?x, ?y)] . 
+organizationPostalCodeRule[is(dbpedia:Organisation, ?x) . 
values(dbpedia:postalCode, ?x, ?y) -> has(google:postal-code, ?x, ?y)] . 
+organizationGeoLatRule[is(dbpedia:Organisation, ?x) . values(gn:lat, ?x, ?y) 
-> values(google:latitude, ?x, ?y)] . 
+organizationGeoLongRule[is(dbpedia:Organisation, ?x) . values(gn:long, ?x, ?y) 
-> values(google:longitude, ?x, ?y)] . 
+organizationCategoryRule[is(dbpedia:Organisation, ?x) . has(skos:subject, ?x, 
?y) -> has(google:category, ?x, ?y)] . 
+eventTypeRule[is(dbpedia:Event, ?x) -> is(google:Event, ?x)] . 
+eventURLRule[is(dbpedia:Event, ?x) . has(foaf:page, ?x, ?y) -> has(google:url, 
?x, ?y)] . 
+eventLocationRule1[is(dbpedia:Event, ?x) . has(dbpedia:place, ?x, ?y) -> 
has(google:location, ?x, ?y)] . 
+eventLocationRule2[is(dbpedia:Event, ?x) . has(dbpedia:place, ?x, ?y) . 
has(owl:sameAs, ?y, ?z) . is(gn:Feature, ?z) . values(wgs84_pos:lat, ?z, ?lat) 
. values(wgs84_pos:long, ?z, ?long) -> is(google:geo, ?z) . 
has(google:location, ?x, ?y) . has(google:geo, ?y, ?z) . 
values(google:latitude, ?z, ?lat) . values(google:longitude, ?z, ?long)] . 
+eventDateRule1[is(dbpedia:Event, ?x) . values(dbpedia:date, ?x, ?y) -> 
values(google:startDate, ?x, ?y)] . 
+eventCategoryRule[is(dbpedia:Event, ?x) . has(skos:subject, ?x, ?y) -> 
has(google:eventType, ?x, ?y)] . 
+eventPhotoRule[is(dbpedia:Event, ?x) . has(dbpedia:thumbnail, ?x, ?y) -> 
has(google:photo, ?x, ?y)] . 
+recipeClassAssertionRule[has(skos:subject, ?x, 
<http://dbpedia.org/page/Category:World_cuisine>) -> is(google:Recipe, ?x)] . 
+recipeTypeRule[has(skos:subject, ?x, 
<http://dbpedia.org/page/Category:World_cuisine>) . has(skos:subject, ?x, ?y) 
-> has(google:recipeType, ?x, ?y)] . 
+recipePhotoRule1[has(skos:subject, ?x, 
<http://dbpedia.org/page/Category:World_cuisine>) . has(dbpedia:thumbnail, ?x, 
?y) -> has(google:photo, ?x, ?y)] . 
+recipePhotoRule2[has(skos:subject, ?x, 
<http://dbpedia.org/page/Category:World_cuisine>) . values(dbpedia:abstract, 
?x, ?y) -> values(google:summary, ?x, ?y)]
\ No newline at end of file


Reply via email to