[2/4] incubator-rya git commit: Minor fixes in the docs

2018-10-01 Thread pujav65
Minor fixes in the docs


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/0018afd5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/0018afd5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/0018afd5

Branch: refs/heads/master
Commit: 0018afd5368e8cc1315ea447ff2666fa2444aa16
Parents: 639b980
Author: Maxim Kolchin 
Authored: Thu Jul 5 12:03:48 2018 +0300
Committer: Maxim Kolchin 
Committed: Thu Jul 5 12:03:48 2018 +0300

--
 extras/rya.manual/src/site/markdown/loaddata.md   | 9 +
 extras/rya.manual/src/site/markdown/quickstart.md | 2 +-
 2 files changed, 6 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/0018afd5/extras/rya.manual/src/site/markdown/loaddata.md
--
diff --git a/extras/rya.manual/src/site/markdown/loaddata.md 
b/extras/rya.manual/src/site/markdown/loaddata.md
index 9d43edd..8894cbd 100644
--- a/extras/rya.manual/src/site/markdown/loaddata.md
+++ b/extras/rya.manual/src/site/markdown/loaddata.md
@@ -104,7 +104,7 @@ Here are the steps to prepare and run the job:
   * Run the following sample command:
 
 ```
-hadoop hdfs://volume/rya.mapreduce--shaded.jar 
org.apache.rya.accumulo.mr.tools.RdfFileInputTool -Dac.zk=localhost:2181 
-Dac.instance=accumulo -Dac.username=root -Dac.pwd=secret 
-Drdf.tablePrefix=triplestore_ -Drdf.format=N-Triples 
hdfs://volume/dir1,hdfs://volume/dir2,hdfs://volume/file1.nt
+hadoop hdfs://volume/rya.mapreduce--shaded.jar 
org.apache.rya.accumulo.mr.tools.RdfFileInputTool -Dac.zk=localhost:2181 
-Dac.instance=accumulo -Dac.username=root -Dac.pwd=secret 
-Drdf.tablePrefix=rya_ -Drdf.format=N-Triples 
hdfs://volume/dir1,hdfs://volume/dir2,hdfs://volume/file1.nt
 ```
 
 Options:
@@ -124,10 +124,11 @@ They need to be loaded into HDFS before running. If 
loading a directory,
 all files should have the same RDF format.
 
 Once the data is loaded, it is actually a good practice to compact your tables.
-You can do this by opening the accumulo shell shell and running the compact
+You can do this by opening the accumulo shell and running the compact
 command on the generated tables. Remember the generated tables will be
-prefixed by the rdf.tablePrefix property you assigned above.
-The default tablePrefix is `rts`.
+prefixed by the `rdf.tablePrefix` property you assigned above.
+The default tablePrefix is `rya_`.
+
 Here is a sample Accumulo Shell command:
 
 ```

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/0018afd5/extras/rya.manual/src/site/markdown/quickstart.md
--
diff --git a/extras/rya.manual/src/site/markdown/quickstart.md 
b/extras/rya.manual/src/site/markdown/quickstart.md
index 7a93cda..4483aa8 100644
--- a/extras/rya.manual/src/site/markdown/quickstart.md
+++ b/extras/rya.manual/src/site/markdown/quickstart.md
@@ -48,7 +48,7 @@ instance.name=accumulo  #Accumulo instance name
 instance.zk=localhost:2181  #Accumulo Zookeepers
 instance.username=root  #Accumulo username
 instance.password=secret  #Accumulo pwd
-rya.tableprefix=triplestore_  #Rya Table Prefix
+rya.tableprefix=rya_  #Rya Table Prefix
 rya.displayqueryplan=true  #To display the query plan
 ```
 



[4/4] incubator-rya git commit: Minor fix in loaddata.md; Closes #299

2018-10-01 Thread pujav65
Minor fix in loaddata.md; Closes #299


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/5463da23
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/5463da23
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/5463da23

Branch: refs/heads/master
Commit: 5463da23c7cd5eaddf2ab13f0b42141211ab59f2
Parents: 6dd6882
Author: Maxim Kolchin 
Authored: Thu Jul 5 17:04:47 2018 +0300
Committer: Valiyil 
Committed: Mon Oct 1 09:07:45 2018 -0400

--
 extras/rya.manual/src/site/markdown/loaddata.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5463da23/extras/rya.manual/src/site/markdown/loaddata.md
--
diff --git a/extras/rya.manual/src/site/markdown/loaddata.md 
b/extras/rya.manual/src/site/markdown/loaddata.md
index 8894cbd..0770eed 100644
--- a/extras/rya.manual/src/site/markdown/loaddata.md
+++ b/extras/rya.manual/src/site/markdown/loaddata.md
@@ -99,7 +99,7 @@ Bulk loading data is done through Map Reduce jobs.
 This Map Reduce job will read files into memory and parse them into 
statements. The statements are saved into the triplestore. 
 Here are the steps to prepare and run the job:
 
-  * Load the RDF data to HDFS. It can be single of multiple volumes and 
directories in them.
+  * Load the RDF data to HDFS. It can be single of multiple volumes and can 
have directories in them.
   * Also load the `mapreduce/target/rya.mapreduce--shaded.jar` 
executable jar file to HDFS.
   * Run the following sample command:
 



[3/4] incubator-rya git commit: Remove the Typical First Steps section

2018-10-01 Thread pujav65
Remove the Typical First Steps section


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/6dd68828
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/6dd68828
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/6dd68828

Branch: refs/heads/master
Commit: 6dd6882877c817dfd2308563b6cb479c8e7e52ba
Parents: 0018afd
Author: Maxim Kolchin 
Authored: Thu Jul 5 12:07:42 2018 +0300
Committer: Maxim Kolchin 
Committed: Thu Jul 5 12:07:42 2018 +0300

--
 extras/rya.manual/src/site/markdown/_index.md   |  3 +-
 extras/rya.manual/src/site/markdown/index.md|  3 +-
 .../src/site/markdown/sm-firststeps.md  | 80 
 3 files changed, 4 insertions(+), 82 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/6dd68828/extras/rya.manual/src/site/markdown/_index.md
--
diff --git a/extras/rya.manual/src/site/markdown/_index.md 
b/extras/rya.manual/src/site/markdown/_index.md
index 07dfe50..7a3aed9 100644
--- a/extras/rya.manual/src/site/markdown/_index.md
+++ b/extras/rya.manual/src/site/markdown/_index.md
@@ -36,7 +36,7 @@ This project contains documentation about Apache Rya, a 
scalable RDF triple stor
 - [Kafka Connect Integration](kafka-connect-integration.md)
 
 # Samples
-- [Typical First Steps](sm-firststeps.md)
+
 - [Simple Add/Query/Remove Statements](sm-simpleaqr.md)
 - [Sparql query](sm-sparqlquery.md)
 - [Adding Authentication](sm-addauth.md)
@@ -46,4 +46,5 @@ This project contains documentation about Apache Rya, a 
scalable RDF triple stor
 - [Alx](alx.md)
 
 # Development
+
 - [Building From Source](build-source.md)

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/6dd68828/extras/rya.manual/src/site/markdown/index.md
--
diff --git a/extras/rya.manual/src/site/markdown/index.md 
b/extras/rya.manual/src/site/markdown/index.md
index e686736..54f30e6 100644
--- a/extras/rya.manual/src/site/markdown/index.md
+++ b/extras/rya.manual/src/site/markdown/index.md
@@ -38,7 +38,7 @@ This project contains documentation about Apache Rya, a 
scalable RDF triple stor
 - [Kafka Connect Integration](kafka-connect-integration.md)
 
 # Samples
-- [Typical First Steps](sm-firststeps.md)
+
 - [Simple Add/Query/Remove Statements](sm-simpleaqr.md)
 - [Sparql query](sm-sparqlquery.md)
 - [Adding Authentication](sm-addauth.md)
@@ -48,4 +48,5 @@ This project contains documentation about Apache Rya, a 
scalable RDF triple stor
 - [Alx](alx.md)
 
 # Development
+
 - [Building From Source](build-source.md)

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/6dd68828/extras/rya.manual/src/site/markdown/sm-firststeps.md
--
diff --git a/extras/rya.manual/src/site/markdown/sm-firststeps.md 
b/extras/rya.manual/src/site/markdown/sm-firststeps.md
deleted file mode 100644
index 228bfb5..000
--- a/extras/rya.manual/src/site/markdown/sm-firststeps.md
+++ /dev/null
@@ -1,80 +0,0 @@
-
-
-# Typical First Steps
-
-In this tutorial, I will give you a quick overview of some of the first steps 
I perform to get data loaded and read for query.
-
-## Prerequisites
-
- We are assuming Accumulo 1.5+ usage here.
-
- * Apache Rya Source Code (`web.rya.war`)
- * Accumulo on top of Hadoop 0.20+
- * RDF Data (in N-Triples format, this format is the easiest to bulk load)
-
-## Building Source
-
-Skip this section if you already have the Map Reduce artifact and the WAR
-
-See the [Build From Source Section](build-source.md) to get the appropriate 
artifacts built
-
-## Load Data
-
-I find that the best way to load the data is through the Bulk Load Map Reduce 
job.
-
-* Save the RDF Data above onto HDFS. From now on we will refer to this 
location as ``
-* Move the `rya.mapreduce--job.jar` onto the hadoop cluster
-* Bulk load the data. Here is a sample command line:
-
-```
-hadoop jar ../rya.mapreduce-3.2.10-SNAPSHOT-job.jar 
org.apache.rya.accumulo.mr.RdfFileInputTool -Drdf.tablePrefix=lubm_ 
-Dcb.username=user -Dcb.pwd=cbpwd -Dcb.instance=instance 
-Dcb.zk=zookeeperLocation -Drdf.format=N-Triples 
-```
-
-Once the data is loaded, it is actually a good practice to compact your 
tables. You can do this by opening the accumulo shell `shell` and running the 
`compact` command on the generated tables. Remember the generated tables will 
be prefixed by the `rdf.tablePrefix` property you assigned above. The default 
tablePrefix is `rts`.
-
-Here is a sample accumulo shell command:
-
-```
-compact -p lubm_(.*)
-```
-
-See the [Load Data Section](loaddata.md) for more options on loading rdf data
-
-## Run the Statistics Optimizer
-
-For the best query performance, i

[1/4] incubator-rya git commit: Make RdfFileInputTool to accept multiple input paths. Doc improvements

2018-10-01 Thread pujav65
Repository: incubator-rya
Updated Branches:
  refs/heads/master 3c3ab0dfd -> 5463da23c


Make RdfFileInputTool to accept multiple input paths. Doc improvements


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/639b980c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/639b980c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/639b980c

Branch: refs/heads/master
Commit: 639b980ce80677ec4703ba39e19cfd9e7943c506
Parents: 3c3ab0d
Author: Maxim Kolchin 
Authored: Wed Jul 4 13:04:30 2018 +0300
Committer: Maxim Kolchin 
Committed: Wed Jul 4 13:04:30 2018 +0300

--
 extras/rya.manual/src/site/markdown/loaddata.md | 48 +
 .../rya.manual/src/site/markdown/quickstart.md  |  4 +-
 mapreduce/pom.xml   | 56 ++--
 .../rya/accumulo/mr/AbstractAccumuloMRTool.java |  6 +--
 .../rya/accumulo/mr/tools/RdfFileInputTool.java |  2 +-
 .../accumulo/mr/tools/RdfFileInputToolTest.java | 40 +++---
 mapreduce/src/test/resources/test2.ntriples |  3 ++
 7 files changed, 108 insertions(+), 51 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/639b980c/extras/rya.manual/src/site/markdown/loaddata.md
--
diff --git a/extras/rya.manual/src/site/markdown/loaddata.md 
b/extras/rya.manual/src/site/markdown/loaddata.md
index e5c7bd2..9d43edd 100644
--- a/extras/rya.manual/src/site/markdown/loaddata.md
+++ b/extras/rya.manual/src/site/markdown/loaddata.md
@@ -21,7 +21,7 @@
 -->
 # Load Data
 
-There are a few mechanisms to load data
+There are a few mechanisms to load data.
 
 ## Web REST endpoint
 
@@ -92,29 +92,55 @@ The default "format" is RDF/XML, but these formats are 
supported : RDFXML, NTRIP
 
 ## Bulk Loading data
 
-Bulk loading data is done through Map Reduce jobs
+Bulk loading data is done through Map Reduce jobs.
 
 ### Bulk Load RDF data
 
-This Map Reduce job will read files into memory and parse them into 
statements. The statements are saved into the store. Here is an example for 
storing in Accumulo:
+This Map Reduce job will read files into memory and parse them into 
statements. The statements are saved into the triplestore. 
+Here are the steps to prepare and run the job:
+
+  * Load the RDF data to HDFS. It can be single of multiple volumes and 
directories in them.
+  * Also load the `mapreduce/target/rya.mapreduce--shaded.jar` 
executable jar file to HDFS.
+  * Run the following sample command:
 
 ```
-hadoop jar target/rya.mapreduce-3.2.10-SNAPSHOT-shaded.jar 
org.apache.rya.accumulo.mr.RdfFileInputTool -Dac.zk=localhost:2181 
-Dac.instance=accumulo -Dac.username=root -Dac.pwd=secret 
-Drdf.tablePrefix=triplestore_ -Drdf.format=N-Triples /tmp/temp.ntrips
+hadoop hdfs://volume/rya.mapreduce--shaded.jar 
org.apache.rya.accumulo.mr.tools.RdfFileInputTool -Dac.zk=localhost:2181 
-Dac.instance=accumulo -Dac.username=root -Dac.pwd=secret 
-Drdf.tablePrefix=triplestore_ -Drdf.format=N-Triples 
hdfs://volume/dir1,hdfs://volume/dir2,hdfs://volume/file1.nt
 ```
 
 Options:
 
-- rdf.tablePrefix : The tables (spo, po, osp) are prefixed with this 
qualifier. The tables become: 
(rdf.tablePrefix)spo,(rdf.tablePrefix)po,(rdf.tablePrefix)osp
-- ac.* : Accumulo connection parameters
-- rdf.format : See RDFFormat from RDF4J, samples include (Trig, N-Triples, 
RDF/XML)
-- sc.use_freetext, sc.use_geo, sc.use_temporal, sc.use_entity : If any of 
these are set to true, statements will also be
+- **rdf.tablePrefix** - The tables (spo, po, osp) are prefixed with this 
qualifier.
+The tables become: 
(rdf.tablePrefix)spo,(rdf.tablePrefix)po,(rdf.tablePrefix)osp
+- **ac.*** - Accumulo connection parameters
+- **rdf.format** - See RDFFormat from RDF4J, samples include (Trig, N-Triples, 
RDF/XML)
+- **sc.use_freetext, sc.use_geo, sc.use_temporal, sc.use_entity** - If any of 
these are set to true, statements will also be
 added to the enabled secondary indices.
-- sc.freetext.predicates, sc.geo.predicates, sc.temporal.predicates: If the 
associated indexer is enabled, these options specify
+- **sc.freetext.predicates, sc.geo.predicates, sc.temporal.predicates** - If 
the associated indexer is enabled, these options specify
 which statements should be sent to that indexer (based on the predicate). 
If not given, all indexers will attempt to index
 all statements.
 
-The argument is the directory/file to load. This file needs to be loaded into 
HDFS before running. If loading a directory, all files should have the same RDF
-format.
+The positional argument is a comma separated list of directories/files to load.
+They need to be loaded into HDFS before running. If loading a directory,
+all files should have the same RDF format.

[3/3] incubator-rya git commit: More code review; Closes #300

2018-10-01 Thread pujav65
More code review; Closes #300


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/22d282b7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/22d282b7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/22d282b7

Branch: refs/heads/3.x/master
Commit: 22d282b7660f3fdd69352c1c8ba4a0b0d156fa78
Parents: 010f40d
Author: Andrew Smith 
Authored: Fri Sep 14 18:19:56 2018 -0400
Committer: Valiyil 
Committed: Mon Oct 1 09:13:59 2018 -0400

--
 .../AbstractMongoDBRdfConfigurationBuilder.java |  1 -
 .../apache/rya/mongodb/MongoDBQueryEngine.java  |  8 +++
 .../rya/mongodb/MongoDBRdfConfiguration.java| 21 +++--
 .../org/apache/rya/mongodb/MongoDBRyaDAO.java   |  6 ++---
 .../SparqlToPipelineTransformVisitor.java   |  2 +-
 .../mongodb/MongoDBRdfConfigurationTest.java|  2 --
 .../apache/rya/mongodb/MongoDBRyaDAO2IT.java| 10 
 .../org/apache/rya/mongodb/MongoDBRyaDAOIT.java | 12 +-
 .../org/apache/rya/mongodb/MongoITBase.java |  5 ++--
 .../client/mongo/MongoConnectionDetails.java|  1 -
 .../indexing/mongodb/AbstractMongoIndexer.java  | 12 +++---
 .../mongodb/freetext/MongoFreeTextIndexer.java  |  6 -
 .../mongodb/temporal/MongoTemporalIndexer.java  |  3 +--
 .../apache/rya/sail/config/RyaSailFactory.java  |  4 ++--
 .../rya/api/client/mongo/MongoInstallIT.java|  8 +++
 .../mongo/MongoIndexingConfigurationTest.java   |  2 --
 .../indexing/mongo/MongoPcjIntegrationTest.java | 24 ++--
 .../indexing/mongo/MongoTemporalIndexerIT.java  |  2 +-
 ...tatementMetadataExternalSetProviderTest.java |  2 +-
 .../StatementMetadataOptimizerTest.java |  2 +-
 .../client/conf/MergeConfigHadoopAdapter.java   |  2 +-
 .../org/apache/rya/indexing/export/ITBase.java  |  6 +
 .../strategy/MongoPipelineStrategy.java |  2 +-
 .../apache/rya/indexing/GeoRyaSailFactory.java  |  4 ++--
 .../indexing/mongo/MongoIndexerDeleteIT.java| 16 ++---
 25 files changed, 67 insertions(+), 96 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/22d282b7/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/AbstractMongoDBRdfConfigurationBuilder.java
--
diff --git 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/AbstractMongoDBRdfConfigurationBuilder.java
 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/AbstractMongoDBRdfConfigurationBuilder.java
index ee7cb61..02032b0 100644
--- 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/AbstractMongoDBRdfConfigurationBuilder.java
+++ 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/AbstractMongoDBRdfConfigurationBuilder.java
@@ -166,7 +166,6 @@ public abstract class 
AbstractMongoDBRdfConfigurationBuilderhttp://git-wip-us.apache.org/repos/asf/incubator-rya/blob/22d282b7/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBQueryEngine.java
--
diff --git 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBQueryEngine.java 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBQueryEngine.java
index 9ddb15a..a526437 100644
--- 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBQueryEngine.java
+++ 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBQueryEngine.java
@@ -78,8 +78,8 @@ public class MongoDBQueryEngine implements 
RyaQueryEngine entry = new 
AbstractMap.SimpleEntry<>(stmt, new MapBindingSet());
-Collection> collection = 
Collections.singleton(entry);
+final Entry entry = new 
AbstractMap.SimpleEntry<>(stmt, new MapBindingSet());
+final Collection> collection = 
Collections.singleton(entry);
 
 return new RyaStatementCursorIterator(queryWithBindingSet(collection, 
conf));
 }
@@ -143,12 +143,12 @@ public class MongoDBQueryEngine implements 
RyaQueryEngine iterator = new 
RyaStatementCursorIterator(queryWithBindingSet(queries.entrySet(), getConf()));
+final Iterator iterator = new 
RyaStatementCursorIterator(queryWithBindingSet(queries.entrySet(), getConf()));
 return CloseableIterables.wrap((Iterable) () -> 
iterator);
 }
 
 private MongoCollection getCollection(final 
StatefulMongoDBRdfConfiguration conf) {
-final MongoDatabase db = 
conf.getMongoClient().getDatabase(conf.getMongoDBName());
+final MongoDatabase db = 
conf.getMongoClient().getDatabase(conf.getRyaInstanceName());
 return db.getCollection(conf.getTriplesCollectionName());
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/22d282b7/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
---

[1/3] incubator-rya git commit: RYA-135 Hard code triples collection

2018-10-01 Thread pujav65
Repository: incubator-rya
Updated Branches:
  refs/heads/3.x/master 5c84df21d -> 22d282b76


RYA-135 Hard code triples collection

There is no need for the triples collection to be configurable, as
only the mongo database name will change the rya instance.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/942d9d62
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/942d9d62
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/942d9d62

Branch: refs/heads/3.x/master
Commit: 942d9d62350815a33f28a22f101cdb5c262c70ed
Parents: 5c84df2
Author: Andrew Smith 
Authored: Fri Sep 14 12:33:10 2018 -0400
Committer: Andrew Smith 
Committed: Fri Sep 14 12:44:51 2018 -0400

--
 .../rya/mongodb/MongoDBRdfConfiguration.java   | 17 +
 .../api/client/mongo/MongoConnectionDetails.java   |  4 ++--
 2 files changed, 11 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/942d9d62/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
--
diff --git 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
index 44dc851..bdb802c 100644
--- 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
+++ 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
@@ -105,7 +105,7 @@ public class MongoDBRdfConfiguration extends 
RdfCloudTripleStoreConfiguration {
  * @param useMock - {@code true} to use an embedded Mongo DB instance; 
{@code false} to connect to a real server.
  */
 public void setUseMock(final boolean useMock) {
-this.setBoolean(USE_MOCK_MONGO, useMock);
+setBoolean(USE_MOCK_MONGO, useMock);
 }
 
 /**
@@ -197,7 +197,7 @@ public class MongoDBRdfConfiguration extends 
RdfCloudTripleStoreConfiguration {
  * @return The name of the Rya instance to connect to. (default: rya)
  */
 public String getRyaInstanceName() {
-return get(MONGO_COLLECTION_PREFIX, "rya");
+return get(MONGO_DB_NAME, "rya");
 }
 
 /**
@@ -205,14 +205,14 @@ public class MongoDBRdfConfiguration extends 
RdfCloudTripleStoreConfiguration {
  */
 public void setRyaInstanceName(final String name) {
 requireNonNull(name);
-set(MONGO_COLLECTION_PREFIX, name);
+set(MONGO_DB_NAME, name);
 }
 
 /**
- * @return The name of the MongoDB Collection that contains Rya 
statements. (default: rya_triples)
+ * @return The name of the MongoDB Collection that contains Rya 
statements. (rya_triples)
  */
 public String getTriplesCollectionName() {
-return getRyaInstanceName() + "_triples";
+return "rya_triples";
 }
 
 /**
@@ -274,16 +274,17 @@ public class MongoDBRdfConfiguration extends 
RdfCloudTripleStoreConfiguration {
  * on their child subtrees.
  * @param value whether to use aggregation pipeline optimization.
  */
-public void setUseAggregationPipeline(boolean value) {
+public void setUseAggregationPipeline(final boolean value) {
 setBoolean(USE_AGGREGATION_PIPELINE, value);
 }
 
 @Override
 public List> getOptimizers() {
-List> optimizers = super.getOptimizers();
+final List> optimizers = super.getOptimizers();
 if (getUseAggregationPipeline()) {
-Class cl = AggregationPipelineQueryOptimizer.class;
+final Class cl = AggregationPipelineQueryOptimizer.class;
 @SuppressWarnings("unchecked")
+final
 Class optCl = (Class) cl;
 optimizers.add(optCl);
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/942d9d62/extras/indexing/src/main/java/org/apache/rya/api/client/mongo/MongoConnectionDetails.java
--
diff --git 
a/extras/indexing/src/main/java/org/apache/rya/api/client/mongo/MongoConnectionDetails.java
 
b/extras/indexing/src/main/java/org/apache/rya/api/client/mongo/MongoConnectionDetails.java
index 192ea20..f4f969c 100644
--- 
a/extras/indexing/src/main/java/org/apache/rya/api/client/mongo/MongoConnectionDetails.java
+++ 
b/extras/indexing/src/main/java/org/apache/rya/api/client/mongo/MongoConnectionDetails.java
@@ -78,7 +78,7 @@ public class MongoConnectionDetails {
   * @return The username that was used to establish the connection when 
performing administrative operations.
   */
  public Optional getUsername() {
- return this.username;
+ return username;
  }
 
  /**
@@ -103,7 +103,7 @@ public class MongoCon

[2/3] incubator-rya git commit: Responded to code review

2018-10-01 Thread pujav65
Responded to code review

removed all references to mongo collection prefix


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/010f40de
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/010f40de
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/010f40de

Branch: refs/heads/3.x/master
Commit: 010f40de276b2152451a42e1c830a2ad88b94860
Parents: 942d9d6
Author: Andrew Smith 
Authored: Fri Sep 14 17:06:27 2018 -0400
Committer: Andrew Smith 
Committed: Fri Sep 14 17:08:55 2018 -0400

--
 .../AbstractMongoDBRdfConfigurationBuilder.java | 34 -
 .../rya/mongodb/MongoDBRdfConfiguration.java|  9 ++--
 .../mongodb/MongoDBRdfConfigurationBuilder.java | 12 ++---
 .../mongodb/MongoDBRdfConfigurationTest.java| 12 ++---
 .../indexing/mongodb/AbstractMongoIndexer.java  |  9 ++--
 .../mongodb/MongoIndexingConfiguration.java |  8 ++-
 .../mongodb/freetext/MongoFreeTextIndexer.java  | 10 ++--
 .../mongodb/temporal/MongoTemporalIndexer.java  |  5 +-
 .../mongo/MongoIndexingConfigurationTest.java   | 44 
 .../indexing/mongo/MongoTemporalIndexerIT.java  |  4 +-
 .../src/main/java/InferenceExamples.java|  1 -
 .../src/main/java/MongoRyaDirectExample.java|  1 -
 .../org/apache/rya/indexing/export/ITBase.java  |  2 -
 .../geoExamples/RyaMongoGeoDirectExample.java   | 53 ++--
 14 files changed, 83 insertions(+), 121 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/010f40de/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/AbstractMongoDBRdfConfigurationBuilder.java
--
diff --git 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/AbstractMongoDBRdfConfigurationBuilder.java
 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/AbstractMongoDBRdfConfigurationBuilder.java
index 8afcb42..ee7cb61 100644
--- 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/AbstractMongoDBRdfConfigurationBuilder.java
+++ 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/AbstractMongoDBRdfConfigurationBuilder.java
@@ -41,14 +41,12 @@ public abstract class 
AbstractMongoDBRdfConfigurationBuilderhttp://git-wip-us.apache.org/repos/asf/incubator-rya/blob/010f40de/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
--
diff --git 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
index bdb802c..2c7a40f 100644
--- 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
+++ 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfiguration.java
@@ -35,6 +35,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
  * A {@link RdfCloudTripleStoreConfiguration} that configures how Rya connects 
to a MongoDB Rya triple store.
  */
 public class MongoDBRdfConfiguration extends RdfCloudTripleStoreConfiguration {
+public static final String RYA_TRIPLES_COLLECTION = "rya_triples";
 
 // MongoDB Server connection values.
 public static final String MONGO_HOSTNAME = "mongo.db.instance";
@@ -45,9 +46,6 @@ public class MongoDBRdfConfiguration extends 
RdfCloudTripleStoreConfiguration {
 public static final String MONGO_USER = "mongo.db.user";
 public static final String MONGO_USER_PASSWORD = "mongo.db.userpassword";
 
-// Rya Instance values.
-public static final String MONGO_COLLECTION_PREFIX = 
"mongo.db.collectionprefix";
-
 // Rya Sail configuration values.
 public static final String USE_MOCK_MONGO = ".useMockInstance";
 public static final String CONF_FLUSH_EACH_UPDATE = 
"rya.mongodb.dao.flusheachupdate";
@@ -212,7 +210,7 @@ public class MongoDBRdfConfiguration extends 
RdfCloudTripleStoreConfiguration {
  * @return The name of the MongoDB Collection that contains Rya 
statements. (rya_triples)
  */
 public String getTriplesCollectionName() {
-return "rya_triples";
+return RYA_TRIPLES_COLLECTION;
 }
 
 /**
@@ -284,8 +282,7 @@ public class MongoDBRdfConfiguration extends 
RdfCloudTripleStoreConfiguration {
 if (getUseAggregationPipeline()) {
 final Class cl = AggregationPipelineQueryOptimizer.class;
 @SuppressWarnings("unchecked")
-final
-Class optCl = (Class) cl;
+final Class optCl = (Class) cl;
 optimizers.add(optCl);
 }
 return optimizers;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/010f40de/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MongoDBRdfConfigurationBuilder.java
---

incubator-rya git commit: RYA-501 Change guava Iterators/Collections; Closes #302

2018-10-01 Thread pujav65
Repository: incubator-rya
Updated Branches:
  refs/heads/3.x/master 22d282b76 -> 83aff37b5


RYA-501 Change guava Iterators/Collections; Closes #302

Google changed the Iterators object emptyIterator() visibility.
This can cause versioning issues with anything depending on
a newer version of guava.  Using Java's Collections.emptyIterator() instead.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/83aff37b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/83aff37b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/83aff37b

Branch: refs/heads/3.x/master
Commit: 83aff37b50c0ba505de3104f548638f015b39545
Parents: 22d282b
Author: Andrew Smith 
Authored: Fri Sep 14 12:37:16 2018 -0400
Committer: Valiyil 
Committed: Mon Oct 1 09:28:00 2018 -0400

--
 .../iter/RyaStatementBindingSetCursorIterator.java| 10 +-
 1 file changed, 5 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/83aff37b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
--
diff --git 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
index 727538b..d66d1b6 100644
--- 
a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
+++ 
b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
@@ -19,6 +19,7 @@
 package org.apache.rya.mongodb.iter;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
@@ -37,7 +38,6 @@ import org.bson.Document;
 import org.openrdf.query.BindingSet;
 
 import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Iterators;
 import com.google.common.collect.Multimap;
 import com.mongodb.DBObject;
 import com.mongodb.client.AggregateIterable;
@@ -92,7 +92,7 @@ public class RyaStatementBindingSetCursorIterator implements 
CloseableIteration<
 }
 
 private boolean currentBindingSetIteratorIsValid() {
-return (currentBindingSetIterator != null) && 
currentBindingSetIterator.hasNext();
+return currentBindingSetIterator != null && 
currentBindingSetIterator.hasNext();
 }
 
 private void findNextResult() {
@@ -131,7 +131,7 @@ public class RyaStatementBindingSetCursorIterator 
implements CloseableIteration<
 }
 
 private static boolean isResult(final RyaType query, final RyaType result) 
{
-return (query == null) || query.equals(result);
+return query == null || query.equals(result);
 }
 
 private void submitBatchQuery() {
@@ -153,7 +153,7 @@ public class RyaStatementBindingSetCursorIterator 
implements CloseableIteration<
 } else if (match.size() == 1) {
 pipeline.add(new Document("$match", match.get(0)));
 } else {
-batchQueryResultsIterator = Iterators.emptyIterator();
+batchQueryResultsIterator = Collections.emptyIterator();
 return;
 }
 
@@ -167,7 +167,7 @@ public class RyaStatementBindingSetCursorIterator 
implements CloseableIteration<
 }
 
 private boolean currentBatchQueryResultCursorIsValid() {
-return (batchQueryResultsIterator != null) && 
batchQueryResultsIterator.hasNext();
+return batchQueryResultsIterator != null && 
batchQueryResultsIterator.hasNext();
 }