Github user amihalik commented on a diff in the pull request:

    https://github.com/apache/incubator-rya/pull/219#discussion_r137567339
  
    --- Diff: extras/indexingExample/src/main/java/InferenceExamples.java ---
    @@ -0,0 +1,592 @@
    +import java.io.IOException;
    +import java.util.List;
    +
    +import org.apache.commons.lang.Validate;
    +import org.apache.hadoop.conf.Configuration;
    +import org.apache.log4j.ConsoleAppender;
    +import org.apache.log4j.Level;
    +import org.apache.log4j.LogManager;
    +import org.apache.log4j.Logger;
    +import org.apache.log4j.PatternLayout;
    +import org.apache.rya.indexing.accumulo.ConfigUtils;
    +import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration;
    +import 
org.apache.rya.indexing.mongodb.MongoIndexingConfiguration.MongoDBIndexingConfigBuilder;
    +import org.apache.rya.mongodb.MockMongoFactory;
    +import org.apache.rya.mongodb.MongoConnectorFactory;
    +import org.apache.rya.rdftriplestore.RdfCloudTripleStore;
    +import org.apache.rya.rdftriplestore.inference.InferenceEngineException;
    +import org.apache.rya.sail.config.RyaSailFactory;
    +import org.apache.zookeeper.ClientCnxn;
    +import org.openrdf.model.Namespace;
    +import org.openrdf.model.URI;
    +import org.openrdf.model.ValueFactory;
    +import org.openrdf.model.vocabulary.OWL;
    +import org.openrdf.model.vocabulary.RDF;
    +import org.openrdf.model.vocabulary.RDFS;
    +import org.openrdf.query.BindingSet;
    +import org.openrdf.query.MalformedQueryException;
    +import org.openrdf.query.QueryEvaluationException;
    +import org.openrdf.query.QueryLanguage;
    +import org.openrdf.query.QueryResultHandlerException;
    +import org.openrdf.query.TupleQuery;
    +import org.openrdf.query.TupleQueryResultHandler;
    +import org.openrdf.query.TupleQueryResultHandlerException;
    +import org.openrdf.query.Update;
    +import org.openrdf.query.UpdateExecutionException;
    +import org.openrdf.repository.RepositoryException;
    +import org.openrdf.repository.RepositoryResult;
    +import org.openrdf.repository.sail.SailRepository;
    +import org.openrdf.repository.sail.SailRepositoryConnection;
    +import org.openrdf.sail.Sail;
    +
    +import com.mongodb.MongoClient;
    +import com.mongodb.ServerAddress;
    +
    +
    +//
    +//See notes in inferenceExamples_readme.txt
    +//
    +
    +public class InferenceExamples {
    +      private static final Logger log = 
Logger.getLogger(InferenceExamples.class);
    +
    +       private static final boolean IS_DETAILED_LOGGING_ENABLED = false;
    +
    +       //
    +       // Connection configuration parameters
    +       //
    +
    +       private static final boolean PRINT_QUERIES = true;
    +       private static final String MONGO_DB = "rya";
    +       private static final String MONGO_COLL_PREFIX = "rya_";
    +       private static final boolean USE_EMBEDDED_MONGO = true;
    +       private static final String MONGO_INSTANCE_URL = "localhost";
    +       private static final String MONGO_INSTANCE_PORT = "27017";
    +       private static final String MongoUserName="usern";
    +       private static final String MongoUserPassword="passwd";
    +
    +       public static void setupLogging() {
    +           final Logger rootLogger = LogManager.getRootLogger();
    +           rootLogger.setLevel(Level.OFF);
    +           final ConsoleAppender ca = (ConsoleAppender) 
rootLogger.getAppender("stdout");
    +           ca.setLayout(new PatternLayout("%d{MMM dd yyyy HH:mm:ss} %5p 
[%t] (%F:%L) - %m%n"));
    +           rootLogger.setLevel(Level.INFO);
    +           // Filter out noisy messages from the following classes.
    +           Logger.getLogger(ClientCnxn.class).setLevel(Level.OFF);
    +           Logger.getLogger(MockMongoFactory.class).setLevel(Level.OFF);
    +       }
    +
    +       public static void main(final String[] args) throws Exception {
    +           if (IS_DETAILED_LOGGING_ENABLED) {
    +               setupLogging();
    +           }
    +           final Configuration conf = getConf();
    +           conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
    +
    +           SailRepository repository = null;
    +           SailRepositoryConnection conn = null;
    +           try {
    +               log.info("Connecting to Indexing Sail Repository.");
    +               final Sail sail = RyaSailFactory.getInstance(conf);
    +               repository = new SailRepository(sail);
    +               conn = repository.getConnection();
    +
    +               
    +               final long start = System.currentTimeMillis();
    +
    +                   testInfer(conn, sail);
    +                   testPropertyChainInference(conn, sail);
    +                   testPropertyChainInferenceAltRepresentation(conn, sail);
    +                   testSomeValuesFromInference(conn, sail);
    +                   testAllValuesFromInference(conn, sail);
    +                   testIntersectionOfInference(conn, sail);
    +                   testOneOfInference(conn, sail);
    +
    +               log.info("TIME: " + (System.currentTimeMillis() - start) / 
1000.);
    +           } finally {
    +               log.info("Shutting down");
    +               closeQuietly(conn);
    +               closeQuietly(repository);
    +               MongoConnectorFactory.closeMongoClient();
    +           }
    +       }
    +
    +       private static void closeQuietly(final SailRepository repository) {
    +           if (repository != null) {
    +               try {
    +                   repository.shutDown();
    +               } catch (final RepositoryException e) {
    +                   // quietly absorb this exception
    +               }
    +           }
    +       }
    +
    +       private static void closeQuietly(final SailRepositoryConnection 
conn) {
    +           if (conn != null) {
    +               try {
    +                   conn.close();
    +               } catch (final RepositoryException e) {
    +                   // quietly absorb this exception
    +               }
    +           }
    +       }
    +
    +       private static Configuration getConf() throws IOException {
    +
    +          // MongoDBIndexingConfigBuilder builder = 
MongoIndexingConfiguration.builder()
    +          //     
.setUseMockMongo(USE_MOCK).setUseInference(USE_INFER).setAuths("U");
    +           MongoDBIndexingConfigBuilder builder = 
MongoIndexingConfiguration.builder()
    +                       
.setUseMockMongo(USE_EMBEDDED_MONGO).setUseInference(true).setAuths("U");
    +           
    +           if (USE_EMBEDDED_MONGO) {
    +               final MongoClient c = 
MockMongoFactory.newFactory().newMongoClient();
    +               final ServerAddress address = c.getAddress();
    +               final String url = address.getHost();
    +               final String port = Integer.toString(address.getPort());
    +               c.close();
    +               builder.setMongoHost(url).setMongoPort(port);
    +           } else {
    +               // User name and password must be filled in:
    +               builder = builder.setMongoUser(MongoUserName)
    +                                .setMongoPassword(MongoUserPassword)
    +                                .setMongoHost(MONGO_INSTANCE_URL)
    +                                .setMongoPort(MONGO_INSTANCE_PORT);
    +           }
    +
    +           return builder.setMongoDBName(MONGO_DB)
    +                  .setMongoCollectionPrefix(MONGO_COLL_PREFIX)
    +                  .setUseMongoFreetextIndex(true)
    +                  
.setMongoFreeTextPredicates(RDFS.LABEL.stringValue()).build();
    +
    +       }
    +
    +
    +       public static void 
testPropertyChainInferenceAltRepresentation(final SailRepositoryConnection 
conn, final Sail sail) throws MalformedQueryException, RepositoryException,
    +       UpdateExecutionException, QueryEvaluationException, 
TupleQueryResultHandlerException, InferenceEngineException {
    +
    +           // Add data
    +           String query = "INSERT DATA\n"//
    +                   + "{ GRAPH <http://updated/test> {\n"//
    +                   + "  <urn:jenGreatGranMother> <urn:Motherof> 
<urn:jenGranMother> . "
    +                   + "  <urn:jenGranMother> <urn:isChildOf> 
<urn:jenGreatGranMother> . "
    +                   + "  <urn:jenGranMother> <urn:Motherof> <urn:jenMother> 
. "
    +                   + "  <urn:jenMother> <urn:isChildOf> 
<urn:jenGranMother> . "
    +                   + " <urn:jenMother> <urn:Motherof> <urn:jen> . "
    +                   + "  <urn:jen> <urn:isChildOf> <urn:jenMother> . "
    +                   + " <urn:jen> <urn:Motherof> <urn:jenDaughter> .  }}";
    +
    +           log.info("Performing Query");
    +
    +           Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
    +           update.execute();
    +
    +           query = "select ?p { GRAPH <http://updated/test> {?s 
<urn:Motherof>/<urn:Motherof> ?p}}";
    +           CountingResultHandler resultHandler = new 
CountingResultHandler();
    +           TupleQuery tupleQuery = 
conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    +           tupleQuery.evaluate(resultHandler);
    +           log.info("Result count : " + resultHandler.getCount());
    +
    +
    +           // try adding a property chain and querying for it
    +           query = "INSERT DATA\n"//
    +                   + "{ GRAPH <http://updated/test> {\n"//
    --- End diff --
    
    Is there a cleaner sparql syntax for this?  You don't need to define the 
blank nodes, and there's a nice syntax for rdf list.


---

Reply via email to