Format

Project: http://git-wip-us.apache.org/repos/asf/incubator-hivemall/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-hivemall/commit/e88d74fa
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hivemall/tree/e88d74fa
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hivemall/diff/e88d74fa

Branch: refs/heads/master
Commit: e88d74fa40aae6a817e91ab47e149c0375ff5734
Parents: 739cde5
Author: Takuya Kitazawa <tak...@apache.org>
Authored: Tue Apr 17 11:28:53 2018 +0900
Committer: Takuya Kitazawa <tak...@apache.org>
Committed: Tue Apr 17 11:28:53 2018 +0900

----------------------------------------------------------------------
 core/src/main/java/hivemall/fm/Feature.java     |  6 +-
 .../hivemall/ftvec/ranking/BprSamplingUDTF.java | 12 +--
 .../hivemall/math/matrix/sparse/CSRMatrix.java  |  4 +-
 .../matrix/sparse/floats/CSRFloatMatrix.java    |  4 +-
 .../hivemall/optimizer/OptimizerOptions.java    |  3 +-
 .../java/hivemall/sketch/bloom/BloomAndUDF.java |  3 +-
 .../hivemall/sketch/bloom/BloomContainsUDF.java |  3 +-
 .../java/hivemall/sketch/bloom/BloomOrUDF.java  |  3 +-
 .../RandomForestClassifierUDTF.java             |  8 +-
 .../regression/RandomForestRegressionUDTF.java  |  5 +-
 .../hivemall/smile/tools/TreePredictUDF.java    | 14 ++--
 .../hivemall/smile/tools/TreePredictUDFv1.java  | 31 ++++----
 .../hivemall/statistics/MovingAverageUDTF.java  |  3 +-
 .../main/java/hivemall/tools/TryCastUDF.java    |  5 +-
 .../hivemall/tools/array/ArrayAppendUDF.java    |  6 +-
 .../hivemall/tools/array/ArrayConcatUDF.java    |  8 +-
 .../hivemall/tools/array/ArrayFlattenUDF.java   | 11 ++-
 .../hivemall/tools/array/ArraySliceUDF.java     |  7 +-
 .../hivemall/tools/array/ArrayUnionUDF.java     |  9 +--
 .../tools/array/ConditionalEmitUDTF.java        |  3 +-
 .../hivemall/tools/array/SelectKBestUDF.java    | 13 ++--
 .../java/hivemall/tools/json/FromJsonUDF.java   | 17 ++---
 .../java/hivemall/tools/json/ToJsonUDF.java     | 10 +--
 .../hivemall/tools/vector/VectorAddUDF.java     | 10 +--
 .../hivemall/tools/vector/VectorDotUDF.java     |  7 +-
 .../java/hivemall/utils/hadoop/HiveUtils.java   | 50 ++++++------
 .../hivemall/utils/hadoop/JsonSerdeUtils.java   | 64 ++++++++--------
 .../java/hivemall/utils/math/MatrixUtils.java   |  6 +-
 .../hivemall/sketch/bloom/BloomAndUDFTest.java  |  4 +-
 .../hivemall/sketch/bloom/BloomOrUDFTest.java   |  4 +-
 .../smile/tools/TreePredictUDFv1Test.java       | 63 ++++++++-------
 .../java/hivemall/tools/TryCastUDFTest.java     |  3 +-
 .../tools/array/ArrayAppendUDFTest.java         | 19 ++---
 .../tools/array/ArrayElementAtUDFTest.java      | 21 ++---
 .../tools/array/ArrayFlattenUDFTest.java        |  4 +-
 .../hivemall/tools/array/ArraySliceUDFTest.java | 22 ++----
 .../hivemall/tools/array/ArrayUnionUDFTest.java | 20 ++---
 .../tools/array/ConditionalEmitUDTFTest.java    | 14 ++--
 .../tools/array/FirstElementUDFTest.java        | 10 +--
 .../tools/array/LastElementUDFTest.java         | 10 +--
 .../tools/array/SelectKBestUDFTest.java         | 16 ++--
 .../hivemall/tools/json/FromJsonUDFTest.java    |  8 +-
 .../java/hivemall/tools/json/ToJsonUDFTest.java |  4 +-
 .../hivemall/tools/vector/VectorAddUDFTest.java | 19 ++---
 .../hivemall/tools/vector/VectorDotUDFTest.java | 18 ++---
 .../utils/hadoop/JsonSerdeUtilsTest.java        | 80 +++++++++-----------
 .../hivemall/nlp/tokenizer/KuromojiUDF.java     | 30 ++++----
 ...isticRegressionDataGeneratorUDTFWrapper.java |  3 +-
 .../tools/XGBoostMulticlassPredictUDTF.java     |  3 +-
 49 files changed, 322 insertions(+), 378 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/fm/Feature.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/fm/Feature.java 
b/core/src/main/java/hivemall/fm/Feature.java
index e6a03ac..2ccf7be 100644
--- a/core/src/main/java/hivemall/fm/Feature.java
+++ b/core/src/main/java/hivemall/fm/Feature.java
@@ -224,7 +224,8 @@ public abstract class Feature {
         final int pos2 = rest.indexOf(':');
         if (pos2 == -1) {
             throw new HiveException(
-                "Invalid FFM feature representation. Expected 
<field>:<index>:<value> but got " + fv);
+                "Invalid FFM feature representation. Expected 
<field>:<index>:<value> but got "
+                        + fv);
         }
 
         final short field;
@@ -295,7 +296,8 @@ public abstract class Feature {
         final int pos2 = rest.indexOf(':');
         if (pos2 == -1) {
             throw new HiveException(
-                "Invalid FFM feature representation. Expected 
<field>:<index>:<value> but got " + fv);
+                "Invalid FFM feature representation. Expected 
<field>:<index>:<value> but got "
+                        + fv);
         }
 
         final short field;

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/ftvec/ranking/BprSamplingUDTF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/ftvec/ranking/BprSamplingUDTF.java 
b/core/src/main/java/hivemall/ftvec/ranking/BprSamplingUDTF.java
index 821c734..3967c82 100644
--- a/core/src/main/java/hivemall/ftvec/ranking/BprSamplingUDTF.java
+++ b/core/src/main/java/hivemall/ftvec/ranking/BprSamplingUDTF.java
@@ -210,8 +210,7 @@ public final class BprSamplingUDTF extends UDTFWithOptions {
         }
     }
 
-    private void forward(final int user, final int posItem, final int negItem)
-            throws HiveException {
+    private void forward(final int user, final int posItem, final int negItem) 
throws HiveException {
         assert (user >= 0) : user;
         assert (posItem >= 0) : posItem;
         assert (negItem >= 0) : negItem;
@@ -269,8 +268,9 @@ public final class BprSamplingUDTF extends UDTFWithOptions {
      * Caution: This is not a perfect 'without sampling' but it does 'without 
sampling' for positive
      * feedbacks.
      */
-    private void uniformUserSamplingWithoutReplacement(@Nonnull final 
PositiveOnlyFeedback feedback,
-            final int numSamples) throws HiveException {
+    private void uniformUserSamplingWithoutReplacement(
+            @Nonnull final PositiveOnlyFeedback feedback, final int numSamples)
+            throws HiveException {
         int numUsers = feedback.getNumUsers();
         if (numUsers == 0) {
             return;
@@ -288,8 +288,8 @@ public final class BprSamplingUDTF extends UDTFWithOptions {
             int nthUser = rand.nextInt(numUsers);
             int user = BitUtils.indexOfSetBit(userBits, nthUser);
             if (user == -1) {
-                throw new HiveException(
-                    "Cannot find " + nthUser + "-th user among " + numUsers + 
" users");
+                throw new HiveException("Cannot find " + nthUser + "-th user 
among " + numUsers
+                        + " users");
             }
 
             IntArrayList posItems = feedback.getItems(user, true);

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/math/matrix/sparse/CSRMatrix.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/math/matrix/sparse/CSRMatrix.java 
b/core/src/main/java/hivemall/math/matrix/sparse/CSRMatrix.java
index 5b63b68..9ce3054 100644
--- a/core/src/main/java/hivemall/math/matrix/sparse/CSRMatrix.java
+++ b/core/src/main/java/hivemall/math/matrix/sparse/CSRMatrix.java
@@ -53,8 +53,8 @@ public final class CSRMatrix extends RowMajorMatrix {
     public CSRMatrix(@Nonnull int[] rowPointers, @Nonnull int[] columnIndices,
             @Nonnull double[] values, @Nonnegative int numColumns) {
         super();
-        Preconditions.checkArgument(rowPointers.length >= 1,
-            "rowPointers must be greater than 0: " + rowPointers.length);
+        Preconditions.checkArgument(rowPointers.length >= 1, "rowPointers must 
be greater than 0: "
+                + rowPointers.length);
         Preconditions.checkArgument(columnIndices.length == values.length, 
"#columnIndices ("
                 + columnIndices.length + ") must be equals to #values (" + 
values.length + ")");
         this.rowPointers = rowPointers;

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/math/matrix/sparse/floats/CSRFloatMatrix.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/hivemall/math/matrix/sparse/floats/CSRFloatMatrix.java 
b/core/src/main/java/hivemall/math/matrix/sparse/floats/CSRFloatMatrix.java
index 174e8e6..38e28a9 100644
--- a/core/src/main/java/hivemall/math/matrix/sparse/floats/CSRFloatMatrix.java
+++ b/core/src/main/java/hivemall/math/matrix/sparse/floats/CSRFloatMatrix.java
@@ -53,8 +53,8 @@ public final class CSRFloatMatrix extends RowMajorFloatMatrix 
{
     public CSRFloatMatrix(@Nonnull int[] rowPointers, @Nonnull int[] 
columnIndices,
             @Nonnull float[] values, @Nonnegative int numColumns) {
         super();
-        Preconditions.checkArgument(rowPointers.length >= 1,
-            "rowPointers must be greater than 0: " + rowPointers.length);
+        Preconditions.checkArgument(rowPointers.length >= 1, "rowPointers must 
be greater than 0: "
+                + rowPointers.length);
         Preconditions.checkArgument(columnIndices.length == values.length, 
"#columnIndices ("
                 + columnIndices.length + ") must be equals to #values (" + 
values.length + ")");
         this.rowPointers = rowPointers;

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/optimizer/OptimizerOptions.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/optimizer/OptimizerOptions.java 
b/core/src/main/java/hivemall/optimizer/OptimizerOptions.java
index 2fc838d..741c888 100644
--- a/core/src/main/java/hivemall/optimizer/OptimizerOptions.java
+++ b/core/src/main/java/hivemall/optimizer/OptimizerOptions.java
@@ -61,8 +61,7 @@ public final class OptimizerOptions {
         opts.addOption("scale", true, "Scaling factor for cumulative weights 
[100.0]");
     }
 
-    public static void processOptions(@Nullable CommandLine cl,
-            @Nonnull Map<String, String> options) {
+    public static void processOptions(@Nullable CommandLine cl, @Nonnull 
Map<String, String> options) {
         if (cl == null) {
             return;
         }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/sketch/bloom/BloomAndUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/sketch/bloom/BloomAndUDF.java 
b/core/src/main/java/hivemall/sketch/bloom/BloomAndUDF.java
index 87769da..9b029d4 100644
--- a/core/src/main/java/hivemall/sketch/bloom/BloomAndUDF.java
+++ b/core/src/main/java/hivemall/sketch/bloom/BloomAndUDF.java
@@ -30,7 +30,8 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.bloom.DynamicBloomFilter;
 import org.apache.hadoop.util.bloom.Filter;
 
-@Description(name = "bloom_and",
+@Description(
+        name = "bloom_and",
         value = "_FUNC_(string bloom1, string bloom2) - Returns the logical 
AND of two bloom filters")
 @UDFType(deterministic = true, stateful = false)
 public final class BloomAndUDF extends UDF {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/sketch/bloom/BloomContainsUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/sketch/bloom/BloomContainsUDF.java 
b/core/src/main/java/hivemall/sketch/bloom/BloomContainsUDF.java
index 2da65b3..2aa7510 100644
--- a/core/src/main/java/hivemall/sketch/bloom/BloomContainsUDF.java
+++ b/core/src/main/java/hivemall/sketch/bloom/BloomContainsUDF.java
@@ -32,7 +32,8 @@ import org.apache.hadoop.util.bloom.DynamicBloomFilter;
 import org.apache.hadoop.util.bloom.Filter;
 import org.apache.hadoop.util.bloom.Key;
 
-@Description(name = "bloom_contains",
+@Description(
+        name = "bloom_contains",
         value = "_FUNC_(string bloom, string key) - Returns true if the bloom 
filter contains the given key")
 @UDFType(deterministic = true, stateful = false)
 public final class BloomContainsUDF extends UDF {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/sketch/bloom/BloomOrUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/sketch/bloom/BloomOrUDF.java 
b/core/src/main/java/hivemall/sketch/bloom/BloomOrUDF.java
index 7d2980e..7f60be4 100644
--- a/core/src/main/java/hivemall/sketch/bloom/BloomOrUDF.java
+++ b/core/src/main/java/hivemall/sketch/bloom/BloomOrUDF.java
@@ -30,7 +30,8 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.bloom.DynamicBloomFilter;
 import org.apache.hadoop.util.bloom.Filter;
 
-@Description(name = "bloom_or",
+@Description(
+        name = "bloom_or",
         value = "_FUNC_(string bloom1, string bloom2) - Returns the logical OR 
of two bloom filters")
 @UDFType(deterministic = true, stateful = false)
 public final class BloomOrUDF extends UDF {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/smile/classification/RandomForestClassifierUDTF.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/hivemall/smile/classification/RandomForestClassifierUDTF.java
 
b/core/src/main/java/hivemall/smile/classification/RandomForestClassifierUDTF.java
index 85420f3..71eeca2 100644
--- 
a/core/src/main/java/hivemall/smile/classification/RandomForestClassifierUDTF.java
+++ 
b/core/src/main/java/hivemall/smile/classification/RandomForestClassifierUDTF.java
@@ -133,7 +133,10 @@ public final class RandomForestClassifierUDTF extends 
UDTFWithOptions {
         Options opts = new Options();
         opts.addOption("trees", "num_trees", true,
             "The number of trees for each task [default: 50]");
-        opts.addOption("vars", "num_variables", true,
+        opts.addOption(
+            "vars",
+            "num_variables",
+            true,
             "The number of random selected features [default: 
ceil(sqrt(x[0].length))]."
                     + " int(num_variables * x[0].length) is considered if 
num_variable is (0.0,1.0]");
         opts.addOption("depth", "max_depth", true,
@@ -147,7 +150,8 @@ public final class RandomForestClassifierUDTF extends 
UDTFWithOptions {
         opts.addOption("seed", true, "seed value in long [default: -1 
(random)]");
         opts.addOption("attrs", "attribute_types", true, "Comma separated 
attribute types "
                 + "(Q for quantitative variable and C for categorical 
variable. e.g., [Q,C,Q,C])");
-        opts.addOption("rule", "split_rule", true, "Split algorithm [default: 
GINI, ENTROPY, CLASSIFICATION_ERROR]");
+        opts.addOption("rule", "split_rule", true,
+            "Split algorithm [default: GINI, ENTROPY, CLASSIFICATION_ERROR]");
         opts.addOption("stratified", "stratified_sampling", false,
             "Enable Stratified sampling for unbalanced data");
         opts.addOption("subsample", true, "Sampling rate in range (0.0,1.0]. 
[default: 1.0]");

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/smile/regression/RandomForestRegressionUDTF.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/hivemall/smile/regression/RandomForestRegressionUDTF.java 
b/core/src/main/java/hivemall/smile/regression/RandomForestRegressionUDTF.java
index ab0f72d..f3f4d4f 100644
--- 
a/core/src/main/java/hivemall/smile/regression/RandomForestRegressionUDTF.java
+++ 
b/core/src/main/java/hivemall/smile/regression/RandomForestRegressionUDTF.java
@@ -121,7 +121,10 @@ public final class RandomForestRegressionUDTF extends 
UDTFWithOptions {
         Options opts = new Options();
         opts.addOption("trees", "num_trees", true,
             "The number of trees for each task [default: 50]");
-        opts.addOption("vars", "num_variables", true,
+        opts.addOption(
+            "vars",
+            "num_variables",
+            true,
             "The number of random selected features [default: 
ceil(sqrt(x[0].length))]."
                     + " int(num_variables * x[0].length) is considered if 
num_variable is (0.0,1.0]");
         opts.addOption("depth", "max_depth", true,

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/smile/tools/TreePredictUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/smile/tools/TreePredictUDF.java 
b/core/src/main/java/hivemall/smile/tools/TreePredictUDF.java
index 9b775bf..6e28935 100644
--- a/core/src/main/java/hivemall/smile/tools/TreePredictUDF.java
+++ b/core/src/main/java/hivemall/smile/tools/TreePredictUDF.java
@@ -55,7 +55,8 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspe
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
-@Description(name = "tree_predict",
+@Description(
+        name = "tree_predict",
         value = "_FUNC_(string modelId, string model, array<double|string> 
features [, const string options | const boolean classification=false])"
                 + " - Returns a prediction result of a random forest"
                 + " in <int value, array<double> a posteriori> for 
classification and <double> for regression")
@@ -133,8 +134,7 @@ public final class TreePredictUDF extends UDFWithOptions {
             fieldNames.add("value");
             
fieldOIs.add(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
             fieldNames.add("posteriori");
-            fieldOIs.add(ObjectInspectorFactory.getStandardListObjectInspector(
-                
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector));
+            
fieldOIs.add(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector));
             return 
ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
         } else {
             return 
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
@@ -163,8 +163,8 @@ public final class TreePredictUDF extends UDFWithOptions {
         this.featuresProbe = parseFeatures(arg2, featuresProbe);
 
         if (evaluator == null) {
-            this.evaluator =
-                    classification ? new ClassificationEvaluator() : new 
RegressionEvaluator();
+            this.evaluator = classification ? new ClassificationEvaluator()
+                    : new RegressionEvaluator();
         }
         return evaluator.evaluate(modelId, model, featuresProbe);
     }
@@ -221,8 +221,8 @@ public final class TreePredictUDF extends UDFWithOptions {
                 }
 
                 if (feature.indexOf(':') != -1) {
-                    throw new UDFArgumentException(
-                        "Invalid feature format `<index>:<value>`: " + col);
+                    throw new UDFArgumentException("Invalid feature format 
`<index>:<value>`: "
+                            + col);
                 }
 
                 final int colIndex = Integer.parseInt(feature);

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/smile/tools/TreePredictUDFv1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/smile/tools/TreePredictUDFv1.java 
b/core/src/main/java/hivemall/smile/tools/TreePredictUDFv1.java
index 5d16248..87c022d 100644
--- a/core/src/main/java/hivemall/smile/tools/TreePredictUDFv1.java
+++ b/core/src/main/java/hivemall/smile/tools/TreePredictUDFv1.java
@@ -63,7 +63,8 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
 
-@Description(name = "tree_predict_v1",
+@Description(
+        name = "tree_predict_v1",
         value = "_FUNC_(string modelId, int modelType, string script, 
array<double> features [, const boolean classification])"
                 + " - Returns a prediction result of a random forest")
 @UDFType(deterministic = true, stateful = false)
@@ -295,7 +296,7 @@ public final class TreePredictUDFv1 extends GenericUDF {
             return new IntWritable(result);
         }
 
-               @Nonnull
+        @Nonnull
         @VisibleForTesting
         static DtNodeV1 deserializeDecisionTree(@Nonnull final byte[] 
serializedObj,
                 final int length, final boolean compressed) throws 
HiveException {
@@ -307,8 +308,8 @@ public final class TreePredictUDFv1 extends GenericUDF {
                     ObjectUtils.readObject(serializedObj, length, root);
                 }
             } catch (IOException ioe) {
-                throw new HiveException("IOException cause while deserializing 
DecisionTree object",
-                    ioe);
+                throw new HiveException(
+                    "IOException cause while deserializing DecisionTree 
object", ioe);
             } catch (Exception e) {
                 throw new HiveException("Exception cause while deserializing 
DecisionTree object",
                     e);
@@ -332,8 +333,8 @@ public final class TreePredictUDFv1 extends GenericUDF {
 
         @Nonnull
         @VisibleForTesting
-         static RtNodeV1 deserializeRegressionTree(final byte[] serializedObj,
-                final int length, final boolean compressed) throws 
HiveException {
+        static RtNodeV1 deserializeRegressionTree(final byte[] serializedObj, 
final int length,
+                final boolean compressed) throws HiveException {
             final RtNodeV1 root = new RtNodeV1();
             try {
                 if (compressed) {
@@ -342,8 +343,8 @@ public final class TreePredictUDFv1 extends GenericUDF {
                     ObjectUtils.readObject(serializedObj, length, root);
                 }
             } catch (IOException ioe) {
-                throw new HiveException("IOException cause while deserializing 
DecisionTree object",
-                    ioe);
+                throw new HiveException(
+                    "IOException cause while deserializing DecisionTree 
object", ioe);
             } catch (Exception e) {
                 throw new HiveException("Exception cause while deserializing 
DecisionTree object",
                     e);
@@ -427,8 +428,8 @@ public final class TreePredictUDFv1 extends GenericUDF {
                         return falseChild.predict(x);
                     }
                 } else {
-                    throw new IllegalStateException(
-                        "Unsupported attribute type: " + splitFeatureType);
+                    throw new IllegalStateException("Unsupported attribute 
type: "
+                            + splitFeatureType);
                 }
             }
         }
@@ -530,8 +531,8 @@ public final class TreePredictUDFv1 extends GenericUDF {
                         return falseChild.predict(x);
                     }
                 } else {
-                    throw new IllegalStateException(
-                        "Unsupported attribute type: " + splitFeatureType);
+                    throw new IllegalStateException("Unsupported attribute 
type: "
+                            + splitFeatureType);
                 }
             }
         }
@@ -646,9 +647,9 @@ public final class TreePredictUDFv1 extends GenericUDF {
             ScriptEngineManager manager = new ScriptEngineManager();
             ScriptEngine engine = manager.getEngineByExtension("js");
             if (!(engine instanceof Compilable)) {
-                throw new UDFArgumentException(
-                    "ScriptEngine was not compilable: " + 
engine.getFactory().getEngineName()
-                            + " version " + 
engine.getFactory().getEngineVersion());
+                throw new UDFArgumentException("ScriptEngine was not 
compilable: "
+                        + engine.getFactory().getEngineName() + " version "
+                        + engine.getFactory().getEngineVersion());
             }
             this.scriptEngine = engine;
             this.compilableEngine = (Compilable) engine;

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/statistics/MovingAverageUDTF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/statistics/MovingAverageUDTF.java 
b/core/src/main/java/hivemall/statistics/MovingAverageUDTF.java
index 112c47f..fd24ec0 100644
--- a/core/src/main/java/hivemall/statistics/MovingAverageUDTF.java
+++ b/core/src/main/java/hivemall/statistics/MovingAverageUDTF.java
@@ -62,8 +62,7 @@ public final class MovingAverageUDTF extends GenericUDTF {
         this.forwardObjs = new Object[] {result};
 
         List<String> fieldNames = Arrays.asList("avg");
-        List<ObjectInspector> fieldOIs = Arrays.<ObjectInspector>asList(
-            PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        List<ObjectInspector> fieldOIs = 
Arrays.<ObjectInspector>asList(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
 
         return 
ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/TryCastUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/TryCastUDF.java 
b/core/src/main/java/hivemall/tools/TryCastUDF.java
index a0f3257..69ddc2f 100644
--- a/core/src/main/java/hivemall/tools/TryCastUDF.java
+++ b/core/src/main/java/hivemall/tools/TryCastUDF.java
@@ -32,9 +32,8 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 
-@Description(name = "try_cast",
-        value = "_FUNC_(ANY src, const string typeName)"
-                + " - Explicitly cast a value as a type. Returns null if cast 
fails.",
+@Description(name = "try_cast", value = "_FUNC_(ANY src, const string 
typeName)"
+        + " - Explicitly cast a value as a type. Returns null if cast fails.",
         extended = "Usage: select try_cast(array(1.0,2.0,3.0), 
'array<string>')\n"
                 + "     select try_cast(map('A',10,'B',20,'C',30), 
'map<string,double>')")
 @UDFType(deterministic = true, stateful = false)

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/array/ArrayAppendUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/array/ArrayAppendUDF.java 
b/core/src/main/java/hivemall/tools/array/ArrayAppendUDF.java
index 25d0f4c..8c715c4 100644
--- a/core/src/main/java/hivemall/tools/array/ArrayAppendUDF.java
+++ b/core/src/main/java/hivemall/tools/array/ArrayAppendUDF.java
@@ -52,16 +52,14 @@ public final class ArrayAppendUDF extends GenericUDF {
     @Override
     public ObjectInspector initialize(ObjectInspector[] argOIs) throws 
UDFArgumentException {
         this.listInspector = HiveUtils.asListOI(argOIs[0]);
-        this.listElemInspector =
-                
HiveUtils.asPrimitiveObjectInspector(listInspector.getListElementObjectInspector());
+        this.listElemInspector = 
HiveUtils.asPrimitiveObjectInspector(listInspector.getListElementObjectInspector());
         this.primInspector = HiveUtils.asPrimitiveObjectInspector(argOIs[1]);
         if (listElemInspector.getPrimitiveCategory() != 
primInspector.getPrimitiveCategory()) {
             throw new UDFArgumentException(
                 "array_append expects the list type to match the type of the 
value being appended");
         }
         this.returnWritables = listElemInspector.preferWritable();
-        return ObjectInspectorFactory.getStandardListObjectInspector(
-            
ObjectInspectorUtils.getStandardObjectInspector(listElemInspector));
+        return 
ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(listElemInspector));
     }
 
     @Nullable

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/array/ArrayConcatUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/array/ArrayConcatUDF.java 
b/core/src/main/java/hivemall/tools/array/ArrayConcatUDF.java
index d0f69b8..baeca60 100644
--- a/core/src/main/java/hivemall/tools/array/ArrayConcatUDF.java
+++ b/core/src/main/java/hivemall/tools/array/ArrayConcatUDF.java
@@ -64,10 +64,10 @@ public class ArrayConcatUDF extends GenericUDF {
                         break;
                     }
                 default:
-                    throw new UDFArgumentTypeException(0,
-                        "Argument " + i + " of function CONCAT_ARRAY must be " 
+ LIST_TYPE_NAME
-                                + "<" + Category.PRIMITIVE + ">, but " + 
arguments[0].getTypeName()
-                                + " was found.");
+                    throw new UDFArgumentTypeException(0, "Argument " + i
+                            + " of function CONCAT_ARRAY must be " + 
LIST_TYPE_NAME + "<"
+                            + Category.PRIMITIVE + ">, but " + 
arguments[0].getTypeName()
+                            + " was found.");
             }
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/array/ArrayFlattenUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/array/ArrayFlattenUDF.java 
b/core/src/main/java/hivemall/tools/array/ArrayFlattenUDF.java
index 906d594..b35ad1e 100644
--- a/core/src/main/java/hivemall/tools/array/ArrayFlattenUDF.java
+++ b/core/src/main/java/hivemall/tools/array/ArrayFlattenUDF.java
@@ -48,21 +48,20 @@ public final class ArrayFlattenUDF extends GenericUDF {
     @Override
     public ObjectInspector initialize(ObjectInspector[] argOIs) throws 
UDFArgumentException {
         if (argOIs.length != 1) {
-            throw new UDFArgumentException(
-                "array_flatten expects exactly one argument: " + 
argOIs.length);
+            throw new UDFArgumentException("array_flatten expects exactly one 
argument: "
+                    + argOIs.length);
         }
 
         this.listOI = HiveUtils.asListOI(argOIs[0]);
         ObjectInspector listElemOI = listOI.getListElementObjectInspector();
         if (listElemOI.getCategory() != Category.LIST) {
-            throw new UDFArgumentException(
-                "array_flatten takes array of array for the argument: " + 
listOI.toString());
+            throw new UDFArgumentException("array_flatten takes array of array 
for the argument: "
+                    + listOI.toString());
         }
         this.nextedListOI = HiveUtils.asListOI(listElemOI);
         this.elemOI = nextedListOI.getListElementObjectInspector();
 
-        return ObjectInspectorFactory.getStandardListObjectInspector(
-            ObjectInspectorUtils.getStandardObjectInspector(elemOI));
+        return 
ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(elemOI));
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/array/ArraySliceUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/array/ArraySliceUDF.java 
b/core/src/main/java/hivemall/tools/array/ArraySliceUDF.java
index 4676acc..f4be2bc 100644
--- a/core/src/main/java/hivemall/tools/array/ArraySliceUDF.java
+++ b/core/src/main/java/hivemall/tools/array/ArraySliceUDF.java
@@ -39,7 +39,8 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 
-@Description(name = "array_slice",
+@Description(
+        name = "array_slice",
         value = "_FUNC_(array<ANY> values, int offset [, int length]) - Slices 
the given array by the given offset and length parameters.")
 @UDFType(deterministic = true, stateful = false)
 public final class ArraySliceUDF extends GenericUDF {
@@ -54,8 +55,8 @@ public final class ArraySliceUDF extends GenericUDF {
     @Override
     public ObjectInspector initialize(ObjectInspector[] argOIs) throws 
UDFArgumentException {
         if (argOIs.length != 2 && argOIs.length != 3) {
-            throw new UDFArgumentLengthException(
-                "Expected 2 or 3 arguments, but got " + argOIs.length);
+            throw new UDFArgumentLengthException("Expected 2 or 3 arguments, 
but got "
+                    + argOIs.length);
         }
 
         this.valuesOI = HiveUtils.asListOI(argOIs[0]);

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/array/ArrayUnionUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/array/ArrayUnionUDF.java 
b/core/src/main/java/hivemall/tools/array/ArrayUnionUDF.java
index 921bbfa..b215351 100644
--- a/core/src/main/java/hivemall/tools/array/ArrayUnionUDF.java
+++ b/core/src/main/java/hivemall/tools/array/ArrayUnionUDF.java
@@ -67,17 +67,16 @@ public final class ArrayUnionUDF extends GenericUDF {
             ListObjectInspector checkOI = HiveUtils.asListOI(argOIs[i]);
             if (!ObjectInspectorUtils.compareTypes(arg0ElemOI,
                 checkOI.getListElementObjectInspector())) {
-                throw new UDFArgumentException("Array types does not match: " 
+ arg0OI.getTypeName()
-                        + " != " + checkOI.getTypeName());
+                throw new UDFArgumentException("Array types does not match: "
+                        + arg0OI.getTypeName() + " != " + 
checkOI.getTypeName());
             }
             listOIs[i] = checkOI;
         }
 
         this._listOIs = listOIs;
 
-        return ObjectInspectorFactory.getStandardListObjectInspector(
-            ObjectInspectorUtils.getStandardObjectInspector(arg0ElemOI,
-                ObjectInspectorCopyOption.WRITABLE));
+        return 
ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(
+            arg0ElemOI, ObjectInspectorCopyOption.WRITABLE));
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/array/ConditionalEmitUDTF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/array/ConditionalEmitUDTF.java 
b/core/src/main/java/hivemall/tools/array/ConditionalEmitUDTF.java
index a73a06f..c7acde6 100644
--- a/core/src/main/java/hivemall/tools/array/ConditionalEmitUDTF.java
+++ b/core/src/main/java/hivemall/tools/array/ConditionalEmitUDTF.java
@@ -79,8 +79,7 @@ public final class ConditionalEmitUDTF extends GenericUDTF {
         this.condElemOI = 
HiveUtils.asBooleanOI(conditionsOI.getListElementObjectInspector());
 
         this.featuresOI = HiveUtils.asListOI(argOIs[1]);
-        this.featureElemOI =
-                
HiveUtils.asPrimitiveObjectInspector(featuresOI.getListElementObjectInspector());
+        this.featureElemOI = 
HiveUtils.asPrimitiveObjectInspector(featuresOI.getListElementObjectInspector());
 
         List<String> fieldNames = Arrays.asList("feature");
         List<ObjectInspector> fieldOIs = 
Arrays.<ObjectInspector>asList(featureElemOI);

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/array/SelectKBestUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/array/SelectKBestUDF.java 
b/core/src/main/java/hivemall/tools/array/SelectKBestUDF.java
index ff37217..527060c 100644
--- a/core/src/main/java/hivemall/tools/array/SelectKBestUDF.java
+++ b/core/src/main/java/hivemall/tools/array/SelectKBestUDF.java
@@ -82,8 +82,7 @@ public final class SelectKBestUDF extends GenericUDF {
         this.featuresOI = HiveUtils.asListOI(OIs[0]);
         this.featureOI = 
HiveUtils.asDoubleCompatibleOI(featuresOI.getListElementObjectInspector());
         this.importanceListOI = HiveUtils.asListOI(OIs[1]);
-        this.importanceElemOI =
-                
HiveUtils.asDoubleCompatibleOI(importanceListOI.getListElementObjectInspector());
+        this.importanceElemOI = 
HiveUtils.asDoubleCompatibleOI(importanceListOI.getListElementObjectInspector());
 
         this._k = HiveUtils.getConstInt(OIs[2]);
         Preconditions.checkArgument(_k >= 1, UDFArgumentException.class);
@@ -93,15 +92,14 @@ public final class SelectKBestUDF extends GenericUDF {
         }
         this._result = result;
 
-        return ObjectInspectorFactory.getStandardListObjectInspector(
-            PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        return 
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
     }
 
     @Override
     public List<DoubleWritable> evaluate(DeferredObject[] dObj) throws 
HiveException {
         final double[] features = HiveUtils.asDoubleArray(dObj[0].get(), 
featuresOI, featureOI);
-        final double[] importanceList =
-                HiveUtils.asDoubleArray(dObj[1].get(), importanceListOI, 
importanceElemOI);
+        final double[] importanceList = HiveUtils.asDoubleArray(dObj[1].get(), 
importanceListOI,
+            importanceElemOI);
 
         Preconditions.checkNotNull(features, UDFArgumentException.class);
         Preconditions.checkNotNull(importanceList, UDFArgumentException.class);
@@ -111,8 +109,7 @@ public final class SelectKBestUDF extends GenericUDF {
 
         int[] topKIndices = _topKIndices;
         if (topKIndices == null) {
-            final List<Map.Entry<Integer, Double>> list =
-                    new ArrayList<Map.Entry<Integer, Double>>();
+            final List<Map.Entry<Integer, Double>> list = new 
ArrayList<Map.Entry<Integer, Double>>();
             for (int i = 0; i < importanceList.length; i++) {
                 list.add(new AbstractMap.SimpleEntry<Integer, Double>(i, 
importanceList[i]));
             }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/json/FromJsonUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/json/FromJsonUDF.java 
b/core/src/main/java/hivemall/tools/json/FromJsonUDF.java
index 36c29cc..8ee2a2d 100644
--- a/core/src/main/java/hivemall/tools/json/FromJsonUDF.java
+++ b/core/src/main/java/hivemall/tools/json/FromJsonUDF.java
@@ -43,7 +43,8 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hive.hcatalog.data.HCatRecordObjectInspectorFactory;
 
-@Description(name = "from_json",
+@Description(
+        name = "from_json",
         value = "_FUNC_(string jsonString, const string returnTypes [, const 
array<string>|const string columnNames])"
                 + " - Return Hive object.")
 @UDFType(deterministic = true, stateful = false)
@@ -58,8 +59,8 @@ public final class FromJsonUDF extends GenericUDF {
     @Override
     public ObjectInspector initialize(ObjectInspector[] argOIs) throws 
UDFArgumentException {
         if (argOIs.length != 2 && argOIs.length != 3) {
-            throw new UDFArgumentException(
-                "from_json takes two or three arguments: " + argOIs.length);
+            throw new UDFArgumentException("from_json takes two or three 
arguments: "
+                    + argOIs.length);
         }
 
         this.jsonOI = HiveUtils.asStringOI(argOIs[0]);
@@ -94,8 +95,7 @@ public final class FromJsonUDF extends GenericUDF {
         final int numColumns = columnTypes.size();
         if (numColumns == 1) {
             TypeInfo type = columnTypes.get(0);
-            returnOI =
-                    
HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(type);
+            returnOI = 
HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(type);
         } else {
             if (columnNames == null) {
                 columnNames = new ArrayList<>(numColumns);
@@ -111,9 +111,7 @@ public final class FromJsonUDF extends GenericUDF {
             final ObjectInspector[] fieldOIs = new ObjectInspector[numColumns];
             for (int i = 0; i < fieldOIs.length; i++) {
                 TypeInfo type = columnTypes.get(i);
-                fieldOIs[i] =
-                        
HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(
-                            type);
+                fieldOIs[i] = 
HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(type);
             }
             returnOI = 
ObjectInspectorFactory.getStandardStructObjectInspector(columnNames,
                 Arrays.asList(fieldOIs));
@@ -134,8 +132,7 @@ public final class FromJsonUDF extends GenericUDF {
             result = JsonSerdeUtils.deserialize(jsonString, columnNames, 
columnTypes);
         } catch (Throwable e) {
             throw new HiveException("Failed to deserialize Json: \n" + 
jsonString.toString() + '\n'
-                    + ExceptionUtils.prettyPrintStackTrace(e),
-                e);
+                    + ExceptionUtils.prettyPrintStackTrace(e), e);
         }
         return result;
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/json/ToJsonUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/json/ToJsonUDF.java 
b/core/src/main/java/hivemall/tools/json/ToJsonUDF.java
index 70c62b9..416d0c9 100644
--- a/core/src/main/java/hivemall/tools/json/ToJsonUDF.java
+++ b/core/src/main/java/hivemall/tools/json/ToJsonUDF.java
@@ -37,7 +37,8 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
-@Description(name = "to_json",
+@Description(
+        name = "to_json",
         value = "_FUNC_(ANY object [, const array<string>|const string 
columnNames]) - Returns Json string")
 @UDFType(deterministic = true, stateful = false)
 public final class ToJsonUDF extends GenericUDF {
@@ -50,8 +51,7 @@ public final class ToJsonUDF extends GenericUDF {
     @Override
     public ObjectInspector initialize(ObjectInspector[] argOIs) throws 
UDFArgumentException {
         if (argOIs.length != 1 && argOIs.length != 2) {
-            throw new UDFArgumentException(
-                "from_json takes one or two arguments: " + argOIs.length);
+            throw new UDFArgumentException("from_json takes one or two 
arguments: " + argOIs.length);
         }
 
         this.objOI = argOIs[0];
@@ -81,8 +81,8 @@ public final class ToJsonUDF extends GenericUDF {
         try {
             return JsonSerdeUtils.serialize(obj, objOI, columnNames);
         } catch (Throwable e) {
-            throw new HiveException(
-                "Failed to serialize: " + obj + '\n' + 
ExceptionUtils.prettyPrintStackTrace(e), e);
+            throw new HiveException("Failed to serialize: " + obj + '\n'
+                    + ExceptionUtils.prettyPrintStackTrace(e), e);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/vector/VectorAddUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/vector/VectorAddUDF.java 
b/core/src/main/java/hivemall/tools/vector/VectorAddUDF.java
index 8442ae3..ecff2f4 100644
--- a/core/src/main/java/hivemall/tools/vector/VectorAddUDF.java
+++ b/core/src/main/java/hivemall/tools/vector/VectorAddUDF.java
@@ -63,12 +63,10 @@ public final class VectorAddUDF extends GenericUDF {
 
         if (HiveUtils.isIntegerOI(xElemOI) && HiveUtils.isIntegerOI(yElemOI)) {
             this.floatingPoints = false;
-            return ObjectInspectorFactory.getStandardListObjectInspector(
-                PrimitiveObjectInspectorFactory.javaLongObjectInspector);
+            return 
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaLongObjectInspector);
         } else {
             this.floatingPoints = true;
-            return ObjectInspectorFactory.getStandardListObjectInspector(
-                PrimitiveObjectInspectorFactory.javaDoubleObjectInspector);
+            return 
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector);
         }
     }
 
@@ -84,8 +82,8 @@ public final class VectorAddUDF extends GenericUDF {
         final int xLen = xOI.getListLength(arg0);
         final int yLen = yOI.getListLength(arg1);
         if (xLen != yLen) {
-            throw new HiveException(
-                "vector lengths do not match. x=" + xOI.getList(arg0) + ", y=" 
+ yOI.getList(arg1));
+            throw new HiveException("vector lengths do not match. x=" + 
xOI.getList(arg0) + ", y="
+                    + yOI.getList(arg1));
         }
 
         if (floatingPoints) {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/tools/vector/VectorDotUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/tools/vector/VectorDotUDF.java 
b/core/src/main/java/hivemall/tools/vector/VectorDotUDF.java
index 2aa3c03..15568f5 100644
--- a/core/src/main/java/hivemall/tools/vector/VectorDotUDF.java
+++ b/core/src/main/java/hivemall/tools/vector/VectorDotUDF.java
@@ -57,8 +57,8 @@ public final class VectorDotUDF extends GenericUDF {
 
         ObjectInspector argOI0 = argOIs[0];
         if (!HiveUtils.isNumberListOI(argOI0)) {
-            throw new UDFArgumentException(
-                "Expected array<number> for the first argument: " + 
argOI0.getTypeName());
+            throw new UDFArgumentException("Expected array<number> for the 
first argument: "
+                    + argOI0.getTypeName());
         }
         ListObjectInspector xListOI = HiveUtils.asListOI(argOI0);
 
@@ -72,8 +72,7 @@ public final class VectorDotUDF extends GenericUDF {
                 "Expected array<number> or number for the send argument: " + 
argOI1.getTypeName());
         }
 
-        return ObjectInspectorFactory.getStandardListObjectInspector(
-            PrimitiveObjectInspectorFactory.javaDoubleObjectInspector);
+        return 
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/utils/hadoop/HiveUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/utils/hadoop/HiveUtils.java 
b/core/src/main/java/hivemall/utils/hadoop/HiveUtils.java
index f3fe703..cbb1989 100644
--- a/core/src/main/java/hivemall/utils/hadoop/HiveUtils.java
+++ b/core/src/main/java/hivemall/utils/hadoop/HiveUtils.java
@@ -105,8 +105,8 @@ public final class HiveUtils {
         if (o instanceof LongWritable) {
             long l = ((LongWritable) o).get();
             if (l > 0x7fffffffL) {
-                throw new IllegalArgumentException(
-                    "feature index must be less than " + Integer.MAX_VALUE + 
", but was " + l);
+                throw new IllegalArgumentException("feature index must be less 
than "
+                        + Integer.MAX_VALUE + ", but was " + l);
             }
             return (int) l;
         }
@@ -503,8 +503,8 @@ public final class HiveUtils {
         }
         ConstantObjectInspector constOI = (ConstantObjectInspector) oi;
         if (constOI.getCategory() != Category.LIST) {
-            throw new UDFArgumentException(
-                "argument must be an array: " + 
TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
+            throw new UDFArgumentException("argument must be an array: "
+                    + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
         }
         final List<?> lst = (List<?>) constOI.getWritableConstantValue();
         if (lst == null) {
@@ -530,12 +530,11 @@ public final class HiveUtils {
         }
         ConstantObjectInspector constOI = (ConstantObjectInspector) oi;
         if (constOI.getCategory() != Category.LIST) {
-            throw new UDFArgumentException(
-                "argument must be an array: " + 
TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
+            throw new UDFArgumentException("argument must be an array: "
+                    + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
         }
         StandardConstantListObjectInspector listOI = 
(StandardConstantListObjectInspector) constOI;
-        PrimitiveObjectInspector elemOI =
-                
HiveUtils.asDoubleCompatibleOI(listOI.getListElementObjectInspector());
+        PrimitiveObjectInspector elemOI = 
HiveUtils.asDoubleCompatibleOI(listOI.getListElementObjectInspector());
 
         final List<?> lst = listOI.getWritableConstantValue();
         if (lst == null) {
@@ -796,8 +795,8 @@ public final class HiveUtils {
         }
         final int length = listOI.getListLength(argObj);
         if (out.length != length) {
-            throw new UDFArgumentException(
-                "Dimension mismatched. Expected: " + out.length + ", Actual: " 
+ length);
+            throw new UDFArgumentException("Dimension mismatched. Expected: " 
+ out.length
+                    + ", Actual: " + length);
         }
         for (int i = 0; i < length; i++) {
             Object o = listOI.getListElement(argObj, i);
@@ -822,8 +821,8 @@ public final class HiveUtils {
         }
         final int length = listOI.getListLength(argObj);
         if (out.length != length) {
-            throw new UDFArgumentException(
-                "Dimension mismatched. Expected: " + out.length + ", Actual: " 
+ length);
+            throw new UDFArgumentException("Dimension mismatched. Expected: " 
+ out.length
+                    + ", Actual: " + length);
         }
         for (int i = 0; i < length; i++) {
             Object o = listOI.getListElement(argObj, i);
@@ -958,8 +957,8 @@ public final class HiveUtils {
             case STRING:
                 break;
             default:
-                throw new UDFArgumentTypeException(0,
-                    "Unexpected type '" + argOI.getTypeName() + "' is 
passed.");
+                throw new UDFArgumentTypeException(0, "Unexpected type '" + 
argOI.getTypeName()
+                        + "' is passed.");
         }
         return oi;
     }
@@ -985,8 +984,8 @@ public final class HiveUtils {
             case TIMESTAMP:
                 break;
             default:
-                throw new UDFArgumentTypeException(0,
-                    "Unexpected type '" + argOI.getTypeName() + "' is 
passed.");
+                throw new UDFArgumentTypeException(0, "Unexpected type '" + 
argOI.getTypeName()
+                        + "' is passed.");
         }
         return oi;
     }
@@ -1006,15 +1005,15 @@ public final class HiveUtils {
             case BYTE:
                 break;
             default:
-                throw new UDFArgumentTypeException(0,
-                    "Unexpected type '" + argOI.getTypeName() + "' is 
passed.");
+                throw new UDFArgumentTypeException(0, "Unexpected type '" + 
argOI.getTypeName()
+                        + "' is passed.");
         }
         return oi;
     }
 
     @Nonnull
-    public static PrimitiveObjectInspector asDoubleCompatibleOI(
-            @Nonnull final ObjectInspector argOI) throws 
UDFArgumentTypeException {
+    public static PrimitiveObjectInspector asDoubleCompatibleOI(@Nonnull final 
ObjectInspector argOI)
+            throws UDFArgumentTypeException {
         if (argOI.getCategory() != Category.PRIMITIVE) {
             throw new UDFArgumentTypeException(0, "Only primitive type 
arguments are accepted but "
                     + argOI.getTypeName() + " is passed.");
@@ -1177,8 +1176,8 @@ public final class HiveUtils {
 
     @Nonnull
     public static LazyString lazyString(@Nonnull final String str, final byte 
escapeChar) {
-        LazyStringObjectInspector oi =
-                
LazyPrimitiveObjectInspectorFactory.getLazyStringObjectInspector(false, 
escapeChar);
+        LazyStringObjectInspector oi = 
LazyPrimitiveObjectInspectorFactory.getLazyStringObjectInspector(
+            false, escapeChar);
         return lazyString(str, oi);
     }
 
@@ -1195,16 +1194,15 @@ public final class HiveUtils {
 
     @Nonnull
     public static LazyInteger lazyInteger(@Nonnull final int v) {
-        LazyInteger lazy =
-                new 
LazyInteger(LazyPrimitiveObjectInspectorFactory.LAZY_INT_OBJECT_INSPECTOR);
+        LazyInteger lazy = new LazyInteger(
+            LazyPrimitiveObjectInspectorFactory.LAZY_INT_OBJECT_INSPECTOR);
         lazy.getWritableObject().set(v);
         return lazy;
     }
 
     @Nonnull
     public static LazyLong lazyLong(@Nonnull final long v) {
-        LazyLong lazy =
-                new 
LazyLong(LazyPrimitiveObjectInspectorFactory.LAZY_LONG_OBJECT_INSPECTOR);
+        LazyLong lazy = new 
LazyLong(LazyPrimitiveObjectInspectorFactory.LAZY_LONG_OBJECT_INSPECTOR);
         lazy.getWritableObject().set(v);
         return lazy;
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/utils/hadoop/JsonSerdeUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/utils/hadoop/JsonSerdeUtils.java 
b/core/src/main/java/hivemall/utils/hadoop/JsonSerdeUtils.java
index 1315537..34932bd 100644
--- a/core/src/main/java/hivemall/utils/hadoop/JsonSerdeUtils.java
+++ b/core/src/main/java/hivemall/utils/hadoop/JsonSerdeUtils.java
@@ -127,9 +127,9 @@ public final class JsonSerdeUtils {
     /**
      * Serialize Hive objects as Text.
      */
-    private static void serializeStruct(@Nonnull final StringBuilder sb, 
@Nullable final Object obj,
-            @Nonnull final StructObjectInspector soi, @Nullable final 
List<String> columnNames)
-            throws SerDeException {
+    private static void serializeStruct(@Nonnull final StringBuilder sb,
+            @Nullable final Object obj, @Nonnull final StructObjectInspector 
soi,
+            @Nullable final List<String> columnNames) throws SerDeException {
         if (obj == null) {
             sb.append("null");
         } else {
@@ -273,8 +273,7 @@ public final class JsonSerdeUtils {
                     break;
                 }
                 case STRING: {
-                    String s = SerDeUtils.escapeString(
-                        ((StringObjectInspector) 
poi).getPrimitiveJavaObject(obj));
+                    String s = 
SerDeUtils.escapeString(((StringObjectInspector) 
poi).getPrimitiveJavaObject(obj));
                     appendWithQuotes(sb, s);
                     break;
                 }
@@ -297,28 +296,30 @@ public final class JsonSerdeUtils {
                     sb.append(((HiveDecimalObjectInspector) 
poi).getPrimitiveJavaObject(obj));
                     break;
                 case VARCHAR: {
-                    String s = SerDeUtils.escapeString(
-                        ((HiveVarcharObjectInspector) 
poi).getPrimitiveJavaObject(obj).toString());
+                    String s = 
SerDeUtils.escapeString(((HiveVarcharObjectInspector) 
poi).getPrimitiveJavaObject(
+                        obj)
+                                                                               
          .toString());
                     appendWithQuotes(sb, s);
                     break;
                 }
                 case CHAR: {
                     //this should use HiveChar.getPaddedValue() but it's 
protected; currently (v0.13)
                     // HiveChar.toString() returns getPaddedValue()
-                    String s = SerDeUtils.escapeString(
-                        ((HiveCharObjectInspector) 
poi).getPrimitiveJavaObject(obj).toString());
+                    String s = 
SerDeUtils.escapeString(((HiveCharObjectInspector) poi).getPrimitiveJavaObject(
+                        obj)
+                                                                               
       .toString());
                     appendWithQuotes(sb, s);
                     break;
                 }
                 default:
-                    throw new SerDeException(
-                        "Unknown primitive type: " + 
poi.getPrimitiveCategory());
+                    throw new SerDeException("Unknown primitive type: "
+                            + poi.getPrimitiveCategory());
             }
         }
     }
 
-    private static void buildJSONString(@Nonnull final StringBuilder sb, 
@Nullable final Object obj,
-            @Nonnull final ObjectInspector oi) throws SerDeException {
+    private static void buildJSONString(@Nonnull final StringBuilder sb,
+            @Nullable final Object obj, @Nonnull final ObjectInspector oi) 
throws SerDeException {
         switch (oi.getCategory()) {
             case PRIMITIVE: {
                 PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
@@ -375,12 +376,13 @@ public final class JsonSerdeUtils {
 
     @SuppressWarnings("unchecked")
     @Nonnull
-    public static <T> T deserialize(@Nonnull final Text t, @Nullable final 
List<String> columnNames,
-            @Nullable final List<TypeInfo> columnTypes) throws SerDeException {
+    public static <T> T deserialize(@Nonnull final Text t,
+            @Nullable final List<String> columnNames, @Nullable final 
List<TypeInfo> columnTypes)
+            throws SerDeException {
         final Object result;
         try {
-            JsonParser p =
-                    new JsonFactory().createJsonParser(new 
FastByteArrayInputStream(t.getBytes()));
+            JsonParser p = new JsonFactory().createJsonParser(new 
FastByteArrayInputStream(
+                t.getBytes()));
             final JsonToken token = p.nextToken();
             if (token == JsonToken.START_OBJECT) {
                 result = parseObject(p, columnNames, columnTypes);
@@ -400,8 +402,8 @@ public final class JsonSerdeUtils {
     @Nonnull
     private static Object parseObject(@Nonnull final JsonParser p,
             @CheckForNull final List<String> columnNames,
-            @CheckForNull final List<TypeInfo> columnTypes)
-            throws JsonParseException, IOException, SerDeException {
+            @CheckForNull final List<TypeInfo> columnTypes) throws 
JsonParseException, IOException,
+            SerDeException {
         Preconditions.checkNotNull(columnNames, "columnNames MUST NOT be null 
in parseObject",
             SerDeException.class);
         Preconditions.checkNotNull(columnTypes, "columnTypes MUST NOT be null 
in parseObject",
@@ -435,8 +437,8 @@ public final class JsonSerdeUtils {
 
     @Nonnull
     private static List<Object> parseArray(@Nonnull final JsonParser p,
-            @CheckForNull final List<TypeInfo> columnTypes)
-            throws HCatException, IOException, SerDeException {
+            @CheckForNull final List<TypeInfo> columnTypes) throws 
HCatException, IOException,
+            SerDeException {
         Preconditions.checkNotNull(columnTypes, "columnTypes MUST NOT be null",
             SerDeException.class);
         if (columnTypes.size() != 1) {
@@ -457,8 +459,8 @@ public final class JsonSerdeUtils {
     }
 
     @Nonnull
-    private static Object parseValue(@Nonnull final JsonParser p)
-            throws JsonParseException, IOException {
+    private static Object parseValue(@Nonnull final JsonParser p) throws 
JsonParseException,
+            IOException {
         final JsonToken t = p.getCurrentToken();
         switch (t) {
             case VALUE_FALSE:
@@ -479,8 +481,8 @@ public final class JsonSerdeUtils {
     }
 
     private static void populateRecord(@Nonnull final List<Object> r,
-            @Nonnull final JsonToken token, @Nonnull final JsonParser p,
-            @Nonnull final HCatSchema s) throws IOException {
+            @Nonnull final JsonToken token, @Nonnull final JsonParser p, 
@Nonnull final HCatSchema s)
+            throws IOException {
         if (token != JsonToken.FIELD_NAME) {
             throw new IOException("Field name expected");
         }
@@ -575,8 +577,8 @@ public final class JsonSerdeUtils {
                 break;
             case VARCHAR:
                 int vLen = ((BaseCharTypeInfo) 
hcatFieldSchema.getTypeInfo()).getLength();
-                val = (valueToken == JsonToken.VALUE_NULL) ? null
-                        : new HiveVarchar(p.getText(), vLen);
+                val = (valueToken == JsonToken.VALUE_NULL) ? null : new 
HiveVarchar(p.getText(),
+                    vLen);
                 break;
             case CHAR:
                 int cLen = ((BaseCharTypeInfo) 
hcatFieldSchema.getTypeInfo()).getLength();
@@ -676,8 +678,8 @@ public final class JsonSerdeUtils {
             case CHAR:
                 return new HiveChar(s, ((BaseCharTypeInfo) 
mapKeyType).getLength());
             default:
-                throw new IOException(
-                    "Could not convert from string to map type " + 
mapKeyType.getTypeName());
+                throw new IOException("Could not convert from string to map 
type "
+                        + mapKeyType.getTypeName());
         }
     }
 
@@ -691,8 +693,8 @@ public final class JsonSerdeUtils {
         }
     }
 
-    private static void skipValue(@Nonnull final JsonParser p)
-            throws JsonParseException, IOException {
+    private static void skipValue(@Nonnull final JsonParser p) throws 
JsonParseException,
+            IOException {
         JsonToken valueToken = p.nextToken();
         if ((valueToken == JsonToken.START_ARRAY) || (valueToken == 
JsonToken.START_OBJECT)) {
             // if the currently read token is a beginning of an array or 
object, move stream forward

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/main/java/hivemall/utils/math/MatrixUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/hivemall/utils/math/MatrixUtils.java 
b/core/src/main/java/hivemall/utils/math/MatrixUtils.java
index dda77d3..36e378b 100644
--- a/core/src/main/java/hivemall/utils/math/MatrixUtils.java
+++ b/core/src/main/java/hivemall/utils/math/MatrixUtils.java
@@ -422,8 +422,8 @@ public final class MatrixUtils {
 
     @Nonnull
     public static RealMatrix combinedMatrices(@Nonnull final RealMatrix[] 
grid) {
-        Preconditions.checkArgument(grid.length >= 1,
-            "The number of rows must be greater than 0: " + grid.length);
+        Preconditions.checkArgument(grid.length >= 1, "The number of rows must 
be greater than 0: "
+                + grid.length);
 
         final int rows = grid.length;
         final int rowDims = grid[0].getRowDimension();
@@ -512,7 +512,7 @@ public final class MatrixUtils {
     /**
      * Find the first singular vector/value of a matrix A based on the Power 
method.
      *
-     * @see http
+     * @see http 
      *      
://www.cs.yale.edu/homes/el327/datamining2013aFiles/07_singular_value_decomposition.pdf
      * @param A target matrix
      * @param x0 initial vector

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/test/java/hivemall/sketch/bloom/BloomAndUDFTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/sketch/bloom/BloomAndUDFTest.java 
b/core/src/test/java/hivemall/sketch/bloom/BloomAndUDFTest.java
index 97ad7c6..76f32e8 100644
--- a/core/src/test/java/hivemall/sketch/bloom/BloomAndUDFTest.java
+++ b/core/src/test/java/hivemall/sketch/bloom/BloomAndUDFTest.java
@@ -50,8 +50,8 @@ public class BloomAndUDFTest {
 
         Assert.assertEquals(expected, actual);
 
-        DynamicBloomFilter deserialized =
-                BloomFilterUtils.deserialize(actual, new DynamicBloomFilter());
+        DynamicBloomFilter deserialized = BloomFilterUtils.deserialize(actual,
+            new DynamicBloomFilter());
         assertNotContains(bf1, deserialized, 1L, 10000);
         assertNotContains(bf1, deserialized, 2L, 10000);
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/test/java/hivemall/sketch/bloom/BloomOrUDFTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/sketch/bloom/BloomOrUDFTest.java 
b/core/src/test/java/hivemall/sketch/bloom/BloomOrUDFTest.java
index 64f95e0..0179a30 100644
--- a/core/src/test/java/hivemall/sketch/bloom/BloomOrUDFTest.java
+++ b/core/src/test/java/hivemall/sketch/bloom/BloomOrUDFTest.java
@@ -50,8 +50,8 @@ public class BloomOrUDFTest {
 
         Assert.assertEquals(expected, actual);
 
-        DynamicBloomFilter deserialized =
-                BloomFilterUtils.deserialize(actual, new DynamicBloomFilter());
+        DynamicBloomFilter deserialized = BloomFilterUtils.deserialize(actual,
+            new DynamicBloomFilter());
         assertEquals(bf1, deserialized, 1L, 10000);
         assertEquals(bf1, deserialized, 2L, 10000);
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/test/java/hivemall/smile/tools/TreePredictUDFv1Test.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/smile/tools/TreePredictUDFv1Test.java 
b/core/src/test/java/hivemall/smile/tools/TreePredictUDFv1Test.java
index 75bbe78..4251ca9 100644
--- a/core/src/test/java/hivemall/smile/tools/TreePredictUDFv1Test.java
+++ b/core/src/test/java/hivemall/smile/tools/TreePredictUDFv1Test.java
@@ -69,8 +69,8 @@ public class TreePredictUDFv1Test {
         byte[] serialized = IOUtils.toByteArray(gis);
 
         byte[] b = Base91.decode(serialized);
-        DtNodeV1 deserialized =
-                JavaSerializationEvaluator.deserializeDecisionTree(b, 
b.length, true);
+        DtNodeV1 deserialized = 
JavaSerializationEvaluator.deserializeDecisionTree(b, b.length,
+            true);
 
         Assert.assertNotNull(deserialized);
     }
@@ -97,8 +97,8 @@ public class TreePredictUDFv1Test {
             int[] trainy = Math.slice(y, loocv.train[i]);
 
             Attribute[] attrs = 
SmileExtUtils.convertAttributeTypes(iris.attributes());
-            DecisionTree tree = new DecisionTree(attrs,
-                new RowMajorDenseMatrix2d(trainx, x[0].length), trainy, 4);
+            DecisionTree tree = new DecisionTree(attrs, new 
RowMajorDenseMatrix2d(trainx,
+                x[0].length), trainy, 4);
             assertEquals(tree.predict(x[loocv.test[i]]), evalPredict(tree, 
x[loocv.test[i]]));
         }
     }
@@ -125,8 +125,8 @@ public class TreePredictUDFv1Test {
             double[][] testx = Math.slice(datax, cv.test[i]);
 
             Attribute[] attrs = 
SmileExtUtils.convertAttributeTypes(data.attributes());
-            RegressionTree tree = new RegressionTree(attrs,
-                new RowMajorDenseMatrix2d(trainx, trainx[0].length), trainy, 
20);
+            RegressionTree tree = new RegressionTree(attrs, new 
RowMajorDenseMatrix2d(trainx,
+                trainx[0].length), trainy, 20);
 
             for (int j = 0; j < testx.length; j++) {
                 assertEquals(tree.predict(testx[j]), evalPredict(tree, 
testx[j]), 1.0);
@@ -165,8 +165,8 @@ public class TreePredictUDFv1Test {
         }
 
         Attribute[] attrs = 
SmileExtUtils.convertAttributeTypes(data.attributes());
-        RegressionTree tree = new RegressionTree(attrs,
-            new RowMajorDenseMatrix2d(trainx, trainx[0].length), trainy, 20);
+        RegressionTree tree = new RegressionTree(attrs, new 
RowMajorDenseMatrix2d(trainx,
+            trainx[0].length), trainy, 20);
         debugPrint(String.format("RMSE = %.4f\n", rmse(tree, testx, testy)));
 
         for (int i = m; i < n; i++) {
@@ -183,46 +183,45 @@ public class TreePredictUDFv1Test {
         return new RMSE().measure(y, predictions);
     }
 
-    private static int evalPredict(DecisionTree tree, double[] x)
-            throws HiveException, IOException {
+    private static int evalPredict(DecisionTree tree, double[] x) throws 
HiveException, IOException {
         String opScript = tree.predictOpCodegen(StackMachine.SEP);
         debugPrint(opScript);
 
         TreePredictUDFv1 udf = new TreePredictUDFv1();
-        udf.initialize(
-            new ObjectInspector[] 
{PrimitiveObjectInspectorFactory.javaStringObjectInspector,
-                    PrimitiveObjectInspectorFactory.javaIntObjectInspector,
-                    PrimitiveObjectInspectorFactory.javaStringObjectInspector,
-                    ObjectInspectorFactory.getStandardListObjectInspector(
-                        
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector),
-                    ObjectInspectorUtils.getConstantObjectInspector(
-                        
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, true)});
+        udf.initialize(new ObjectInspector[] {
+                PrimitiveObjectInspectorFactory.javaStringObjectInspector,
+                PrimitiveObjectInspectorFactory.javaIntObjectInspector,
+                PrimitiveObjectInspectorFactory.javaStringObjectInspector,
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector),
+                ObjectInspectorUtils.getConstantObjectInspector(
+                    
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, true)});
         DeferredObject[] arguments = new DeferredObject[] {new 
DeferredJavaObject("model_id#1"),
-                new DeferredJavaObject(ModelType.opscode.getId()), new 
DeferredJavaObject(opScript),
-                new DeferredJavaObject(ArrayUtils.toList(x)), new 
DeferredJavaObject(true)};
+                new DeferredJavaObject(ModelType.opscode.getId()),
+                new DeferredJavaObject(opScript), new 
DeferredJavaObject(ArrayUtils.toList(x)),
+                new DeferredJavaObject(true)};
 
         IntWritable result = (IntWritable) udf.evaluate(arguments);
         udf.close();
         return result.get();
     }
 
-    private static double evalPredict(RegressionTree tree, double[] x)
-            throws HiveException, IOException {
+    private static double evalPredict(RegressionTree tree, double[] x) throws 
HiveException,
+            IOException {
         String opScript = tree.predictOpCodegen(StackMachine.SEP);
         debugPrint(opScript);
 
         TreePredictUDFv1 udf = new TreePredictUDFv1();
-        udf.initialize(
-            new ObjectInspector[] 
{PrimitiveObjectInspectorFactory.javaStringObjectInspector,
-                    PrimitiveObjectInspectorFactory.javaIntObjectInspector,
-                    PrimitiveObjectInspectorFactory.javaStringObjectInspector,
-                    ObjectInspectorFactory.getStandardListObjectInspector(
-                        
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector),
-                    ObjectInspectorUtils.getConstantObjectInspector(
-                        
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, false)});
+        udf.initialize(new ObjectInspector[] {
+                PrimitiveObjectInspectorFactory.javaStringObjectInspector,
+                PrimitiveObjectInspectorFactory.javaIntObjectInspector,
+                PrimitiveObjectInspectorFactory.javaStringObjectInspector,
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector),
+                ObjectInspectorUtils.getConstantObjectInspector(
+                    
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, false)});
         DeferredObject[] arguments = new DeferredObject[] {new 
DeferredJavaObject("model_id#1"),
-                new DeferredJavaObject(ModelType.opscode.getId()), new 
DeferredJavaObject(opScript),
-                new DeferredJavaObject(ArrayUtils.toList(x)), new 
DeferredJavaObject(false)};
+                new DeferredJavaObject(ModelType.opscode.getId()),
+                new DeferredJavaObject(opScript), new 
DeferredJavaObject(ArrayUtils.toList(x)),
+                new DeferredJavaObject(false)};
 
         DoubleWritable result = (DoubleWritable) udf.evaluate(arguments);
         udf.close();

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/test/java/hivemall/tools/TryCastUDFTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/tools/TryCastUDFTest.java 
b/core/src/test/java/hivemall/tools/TryCastUDFTest.java
index 7b8f6af..7cd75ba 100644
--- a/core/src/test/java/hivemall/tools/TryCastUDFTest.java
+++ b/core/src/test/java/hivemall/tools/TryCastUDFTest.java
@@ -41,8 +41,7 @@ public class TryCastUDFTest {
         TryCastUDF udf = new TryCastUDF();
 
         udf.initialize(new ObjectInspector[] {
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
                 
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
                     TypeInfoFactory.stringTypeInfo, new 
Text("array<string>"))});
 

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/test/java/hivemall/tools/array/ArrayAppendUDFTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/tools/array/ArrayAppendUDFTest.java 
b/core/src/test/java/hivemall/tools/array/ArrayAppendUDFTest.java
index ef4b3e5..113e993 100644
--- a/core/src/test/java/hivemall/tools/array/ArrayAppendUDFTest.java
+++ b/core/src/test/java/hivemall/tools/array/ArrayAppendUDFTest.java
@@ -40,14 +40,12 @@ public class ArrayAppendUDFTest {
         ArrayAppendUDF udf = new ArrayAppendUDF();
 
         udf.initialize(new ObjectInspector[] {
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
                 PrimitiveObjectInspectorFactory.javaDoubleObjectInspector});
 
         DeferredObject[] args = new DeferredObject[] {
-                new GenericUDF.DeferredJavaObject(
-                    WritableUtils.toWritableList(new double[] {0, 1, 2})),
-                new GenericUDF.DeferredJavaObject(new Double(3))};
+                new 
GenericUDF.DeferredJavaObject(WritableUtils.toWritableList(new double[] {0, 1,
+                        2})), new GenericUDF.DeferredJavaObject(new 
Double(3))};
 
         List<Object> result = udf.evaluate(args);
 
@@ -64,14 +62,12 @@ public class ArrayAppendUDFTest {
         ArrayAppendUDF udf = new ArrayAppendUDF();
 
         udf.initialize(new ObjectInspector[] {
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
                 PrimitiveObjectInspectorFactory.javaDoubleObjectInspector});
 
         DeferredObject[] args = new DeferredObject[] {
-                new GenericUDF.DeferredJavaObject(
-                    WritableUtils.toWritableList(new double[] {0, 1, 2})),
-                new GenericUDF.DeferredJavaObject(null)};
+                new 
GenericUDF.DeferredJavaObject(WritableUtils.toWritableList(new double[] {0, 1,
+                        2})), new GenericUDF.DeferredJavaObject(null)};
 
         List<Object> result = udf.evaluate(args);
 
@@ -89,8 +85,7 @@ public class ArrayAppendUDFTest {
         ArrayAppendUDF udf = new ArrayAppendUDF();
 
         udf.initialize(new ObjectInspector[] {
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
                 PrimitiveObjectInspectorFactory.javaDoubleObjectInspector});
 
         DeferredObject[] args = new DeferredObject[] {new 
GenericUDF.DeferredJavaObject(null),

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/test/java/hivemall/tools/array/ArrayElementAtUDFTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/tools/array/ArrayElementAtUDFTest.java 
b/core/src/test/java/hivemall/tools/array/ArrayElementAtUDFTest.java
index c22ea05..95ef1a2 100644
--- a/core/src/test/java/hivemall/tools/array/ArrayElementAtUDFTest.java
+++ b/core/src/test/java/hivemall/tools/array/ArrayElementAtUDFTest.java
@@ -39,27 +39,23 @@ public class ArrayElementAtUDFTest {
         ArrayElementAtUDF udf = new ArrayElementAtUDF();
 
         udf.initialize(new ObjectInspector[] {
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
                 PrimitiveObjectInspectorFactory.javaIntObjectInspector});
 
         DeferredObject[] args = new DeferredObject[] {
-                new GenericUDF.DeferredJavaObject(
-                    WritableUtils.toWritableList(new double[] {0, 1, 2})),
-                new GenericUDF.DeferredJavaObject(new Integer(1))};
+                new 
GenericUDF.DeferredJavaObject(WritableUtils.toWritableList(new double[] {0, 1,
+                        2})), new GenericUDF.DeferredJavaObject(new 
Integer(1))};
 
         Assert.assertEquals(new DoubleWritable(1), udf.evaluate(args));
 
         args = new DeferredObject[] {
-                new GenericUDF.DeferredJavaObject(
-                    WritableUtils.toWritableList(new double[] {0, 1, 2})),
-                new GenericUDF.DeferredJavaObject(new Integer(4))};
+                new 
GenericUDF.DeferredJavaObject(WritableUtils.toWritableList(new double[] {0, 1,
+                        2})), new GenericUDF.DeferredJavaObject(new 
Integer(4))};
         Assert.assertNull(udf.evaluate(args));
 
         args = new DeferredObject[] {
-                new GenericUDF.DeferredJavaObject(
-                    WritableUtils.toWritableList(new double[] {0, 1, 2})),
-                new GenericUDF.DeferredJavaObject(new Integer(-2))};
+                new 
GenericUDF.DeferredJavaObject(WritableUtils.toWritableList(new double[] {0, 1,
+                        2})), new GenericUDF.DeferredJavaObject(new 
Integer(-2))};
         Assert.assertEquals(new DoubleWritable(1), udf.evaluate(args));
 
         udf.close();
@@ -70,8 +66,7 @@ public class ArrayElementAtUDFTest {
         ArrayElementAtUDF udf = new ArrayElementAtUDF();
 
         udf.initialize(new ObjectInspector[] {
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    
PrimitiveObjectInspectorFactory.writableStringObjectInspector),
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector),
                 PrimitiveObjectInspectorFactory.javaIntObjectInspector});
 
         DeferredObject[] args = new DeferredObject[] {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/test/java/hivemall/tools/array/ArrayFlattenUDFTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/tools/array/ArrayFlattenUDFTest.java 
b/core/src/test/java/hivemall/tools/array/ArrayFlattenUDFTest.java
index ae8bc2c..3824fd5 100644
--- a/core/src/test/java/hivemall/tools/array/ArrayFlattenUDFTest.java
+++ b/core/src/test/java/hivemall/tools/array/ArrayFlattenUDFTest.java
@@ -37,9 +37,7 @@ public class ArrayFlattenUDFTest {
     public void testEvaluate() throws HiveException, IOException {
         ArrayFlattenUDF udf = new ArrayFlattenUDF();
 
-        udf.initialize(new ObjectInspector[] 
{ObjectInspectorFactory.getStandardListObjectInspector(
-            ObjectInspectorFactory.getStandardListObjectInspector(
-                PrimitiveObjectInspectorFactory.javaIntObjectInspector))});
+        udf.initialize(new ObjectInspector[] 
{ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector))});
 
         DeferredObject[] args = new DeferredObject[] {new 
GenericUDF.DeferredJavaObject(
             Arrays.asList(Arrays.asList(0, 1, 2, 3), Arrays.asList(4, 5), 
Arrays.asList(6, 7)))};

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/test/java/hivemall/tools/array/ArraySliceUDFTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/tools/array/ArraySliceUDFTest.java 
b/core/src/test/java/hivemall/tools/array/ArraySliceUDFTest.java
index ca526c5..b18ae54 100644
--- a/core/src/test/java/hivemall/tools/array/ArraySliceUDFTest.java
+++ b/core/src/test/java/hivemall/tools/array/ArraySliceUDFTest.java
@@ -40,8 +40,7 @@ public class ArraySliceUDFTest {
         ArraySliceUDF udf = new ArraySliceUDF();
 
         udf.initialize(new ObjectInspector[] {
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    PrimitiveObjectInspectorFactory.javaStringObjectInspector),
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector),
                 PrimitiveObjectInspectorFactory.writableIntObjectInspector,
                 PrimitiveObjectInspectorFactory.writableIntObjectInspector});
 
@@ -51,11 +50,9 @@ public class ArraySliceUDFTest {
         DeferredObject arg2 = new GenericUDF.DeferredJavaObject(length);
         DeferredObject nullarg = new GenericUDF.DeferredJavaObject(null);
 
-        DeferredObject[] args =
-                new DeferredObject[] {
-                        new 
GenericUDF.DeferredJavaObject(Arrays.asList("zero", "one", "two",
-                            "three", "four", "five", "six", "seven", "eight", 
"nine", "ten")),
-                        arg1, arg2};
+        DeferredObject[] args = new DeferredObject[] {
+                new GenericUDF.DeferredJavaObject(Arrays.asList("zero", "one", 
"two", "three",
+                    "four", "five", "six", "seven", "eight", "nine", "ten")), 
arg1, arg2};
 
         offset.set(0);
         length.set(3);
@@ -91,8 +88,7 @@ public class ArraySliceUDFTest {
         ArraySliceUDF udf = new ArraySliceUDF();
 
         udf.initialize(new ObjectInspector[] {
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    PrimitiveObjectInspectorFactory.javaStringObjectInspector),
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector),
                 PrimitiveObjectInspectorFactory.writableIntObjectInspector,
                 PrimitiveObjectInspectorFactory.writableIntObjectInspector});
 
@@ -101,11 +97,9 @@ public class ArraySliceUDFTest {
         DeferredObject arg1 = new GenericUDF.DeferredJavaObject(offset);
         DeferredObject arg2 = new GenericUDF.DeferredJavaObject(length);
 
-        DeferredObject[] args =
-                new DeferredObject[] {
-                        new 
GenericUDF.DeferredJavaObject(Arrays.asList("zero", "one", "two",
-                            "three", "four", "five", "six", "seven", "eight", 
"nine", "ten")),
-                        arg1, arg2};
+        DeferredObject[] args = new DeferredObject[] {
+                new GenericUDF.DeferredJavaObject(Arrays.asList("zero", "one", 
"two", "three",
+                    "four", "five", "six", "seven", "eight", "nine", "ten")), 
arg1, arg2};
 
 
         offset.set(-12);

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/e88d74fa/core/src/test/java/hivemall/tools/array/ArrayUnionUDFTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/tools/array/ArrayUnionUDFTest.java 
b/core/src/test/java/hivemall/tools/array/ArrayUnionUDFTest.java
index a65a182..1138e9a 100644
--- a/core/src/test/java/hivemall/tools/array/ArrayUnionUDFTest.java
+++ b/core/src/test/java/hivemall/tools/array/ArrayUnionUDFTest.java
@@ -39,20 +39,16 @@ public class ArrayUnionUDFTest {
         ArrayUnionUDF udf = new ArrayUnionUDF();
 
         udf.initialize(new ObjectInspector[] {
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
-                ObjectInspectorFactory.getStandardListObjectInspector(
-                    
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector)});
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
+                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector)});
 
         DeferredObject[] args = new DeferredObject[] {
-                new GenericUDF.DeferredJavaObject(
-                    WritableUtils.toWritableList(new double[] {0, 1, 2})),
-                new GenericUDF.DeferredJavaObject(
-                    WritableUtils.toWritableList(new double[] {2, 3, 4})),
-                new GenericUDF.DeferredJavaObject(
-                    WritableUtils.toWritableList(new double[] {4, 5}))};
+                new 
GenericUDF.DeferredJavaObject(WritableUtils.toWritableList(new double[] {0, 1,
+                        2})),
+                new 
GenericUDF.DeferredJavaObject(WritableUtils.toWritableList(new double[] {2, 3,
+                        4})),
+                new 
GenericUDF.DeferredJavaObject(WritableUtils.toWritableList(new double[] {4, 
5}))};
 
         List<Object> result = udf.evaluate(args);
 

Reply via email to