Unnecessary reuseObjects is removed.
Project: http://git-wip-us.apache.org/repos/asf/gora/repo Commit: http://git-wip-us.apache.org/repos/asf/gora/commit/92b71a6d Tree: http://git-wip-us.apache.org/repos/asf/gora/tree/92b71a6d Diff: http://git-wip-us.apache.org/repos/asf/gora/diff/92b71a6d Branch: refs/heads/master Commit: 92b71a6d2c95492621a56e48470e951e982cc34f Parents: 8584911 Author: Furkan KAMACI <[email protected]> Authored: Mon Aug 17 23:34:40 2015 +0300 Committer: Furkan KAMACI <[email protected]> Committed: Mon Aug 17 23:34:40 2015 +0300 ---------------------------------------------------------------------- .../java/org/apache/gora/spark/GoraSparkEngine.java | 13 +++++-------- .../apache/gora/tutorial/log/LogAnalyticsSpark.java | 2 +- 2 files changed, 6 insertions(+), 9 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/gora/blob/92b71a6d/gora-core/src/main/java/org/apache/gora/spark/GoraSparkEngine.java ---------------------------------------------------------------------- diff --git a/gora-core/src/main/java/org/apache/gora/spark/GoraSparkEngine.java b/gora-core/src/main/java/org/apache/gora/spark/GoraSparkEngine.java index 7a819fd..98dafea 100644 --- a/gora-core/src/main/java/org/apache/gora/spark/GoraSparkEngine.java +++ b/gora-core/src/main/java/org/apache/gora/spark/GoraSparkEngine.java @@ -99,17 +99,16 @@ public class GoraSparkEngine<K, V extends Persistent> { /** * Creates a job and sets the output parameters for the conf that Spark will use * @param dataStore the datastore as the output - * @param reuseObjects whether to reuse objects in serialization */ - public <K, V extends Persistent> Configuration generateOutputConf(DataStore<K, V> dataStore, - boolean reuseObjects) throws IOException { + public <K, V extends Persistent> Configuration generateOutputConf(DataStore<K, V> dataStore) + throws IOException { Configuration hadoopConf = new Configuration(); GoraMapReduceUtils.setIOSerializations(hadoopConf, true); Job job = Job.getInstance(hadoopConf); return generateOutputConf(job, dataStore.getClass(), dataStore.getKeyClass(), - dataStore.getPersistentClass(), reuseObjects); + dataStore.getPersistentClass()); } /** @@ -121,7 +120,7 @@ public class GoraSparkEngine<K, V extends Persistent> { public <K, V extends Persistent> Configuration generateOutputConf(Job job, DataStore<K, V> dataStore, boolean reuseObjects) { return generateOutputConf(job, dataStore.getClass(), dataStore.getKeyClass(), - dataStore.getPersistentClass(), reuseObjects); + dataStore.getPersistentClass()); } /** @@ -131,13 +130,11 @@ public class GoraSparkEngine<K, V extends Persistent> { * @param dataStoreClass the datastore class * @param keyClass output key class * @param persistentClass output value class - * @param reuseObjects whether to reuse objects in serialization */ @SuppressWarnings("rawtypes") public <K, V extends Persistent> Configuration generateOutputConf(Job job, Class<? extends DataStore> dataStoreClass, - Class<K> keyClass, Class<V> persistentClass, - boolean reuseObjects) { + Class<K> keyClass, Class<V> persistentClass) { job.setOutputFormatClass(GoraOutputFormat.class); job.setOutputKeyClass(keyClass); http://git-wip-us.apache.org/repos/asf/gora/blob/92b71a6d/gora-tutorial/src/main/java/org/apache/gora/tutorial/log/LogAnalyticsSpark.java ---------------------------------------------------------------------- diff --git a/gora-tutorial/src/main/java/org/apache/gora/tutorial/log/LogAnalyticsSpark.java b/gora-tutorial/src/main/java/org/apache/gora/tutorial/log/LogAnalyticsSpark.java index c66f7e4..8c37f8c 100644 --- a/gora-tutorial/src/main/java/org/apache/gora/tutorial/log/LogAnalyticsSpark.java +++ b/gora-tutorial/src/main/java/org/apache/gora/tutorial/log/LogAnalyticsSpark.java @@ -190,7 +190,7 @@ public class LogAnalyticsSpark { // //write output to datastore - Configuration sparkHadoopConf = goraSparkEngine.generateOutputConf(outStore, true); + Configuration sparkHadoopConf = goraSparkEngine.generateOutputConf(outStore); reducedGoraRdd.saveAsNewAPIHadoopDataset(sparkHadoopConf); //
