Repository: incubator-hivemall
Updated Branches:
  refs/heads/master ae0d3c410 -> c40366959


http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/core/src/test/java/hivemall/topicmodel/PLSAUDTFTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/topicmodel/PLSAUDTFTest.java 
b/core/src/test/java/hivemall/topicmodel/PLSAUDTFTest.java
index 87b2f4c..7f344d1 100644
--- a/core/src/test/java/hivemall/topicmodel/PLSAUDTFTest.java
+++ b/core/src/test/java/hivemall/topicmodel/PLSAUDTFTest.java
@@ -41,7 +41,8 @@ public class PLSAUDTFTest {
         PLSAUDTF udtf = new PLSAUDTF();
 
         ObjectInspector[] argOIs = new ObjectInspector[] {
-                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector),
+                ObjectInspectorFactory.getStandardListObjectInspector(
+                    PrimitiveObjectInspectorFactory.javaStringObjectInspector),
                 ObjectInspectorUtils.getConstantObjectInspector(
                     PrimitiveObjectInspectorFactory.javaStringObjectInspector,
                     "-topics 2 -alpha 0.1 -delta 0.00001 -iter 10000")};
@@ -49,8 +50,8 @@ public class PLSAUDTFTest {
         udtf.initialize(argOIs);
 
         String[] doc1 = new String[] {"fruits:1", "healthy:1", "vegetables:1"};
-        String[] doc2 = new String[] {"apples:1", "avocados:1", "colds:1", 
"flu:1", "like:2",
-                "oranges:1"};
+        String[] doc2 =
+                new String[] {"apples:1", "avocados:1", "colds:1", "flu:1", 
"like:2", "oranges:1"};
 
         udtf.process(new Object[] {Arrays.asList(doc1)});
         udtf.process(new Object[] {Arrays.asList(doc2)});
@@ -92,8 +93,9 @@ public class PLSAUDTFTest {
             k2 = 0;
         }
 
-        Assert.assertTrue("doc1 is in topic " + k1 + " (" + (topicDistr[k1] * 
100) + "%), "
-                + "and `vegetables` SHOULD be more suitable topic word than 
`flu` in the topic",
+        Assert.assertTrue(
+            "doc1 is in topic " + k1 + " (" + (topicDistr[k1] * 100) + "%), "
+                    + "and `vegetables` SHOULD be more suitable topic word 
than `flu` in the topic",
             udtf.getWordScore("vegetables", k1) > udtf.getWordScore("flu", 
k1));
         Assert.assertTrue("doc2 is in topic " + k2 + " (" + (topicDistr[k2] * 
100) + "%), "
                 + "and `avocados` SHOULD be more suitable topic word than 
`healthy` in the topic",
@@ -105,7 +107,8 @@ public class PLSAUDTFTest {
         PLSAUDTF udtf = new PLSAUDTF();
 
         ObjectInspector[] argOIs = new ObjectInspector[] {
-                
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector),
+                ObjectInspectorFactory.getStandardListObjectInspector(
+                    PrimitiveObjectInspectorFactory.javaStringObjectInspector),
                 ObjectInspectorUtils.getConstantObjectInspector(
                     PrimitiveObjectInspectorFactory.javaStringObjectInspector,
                     "-topics 2 -alpha 0.1 -delta 0.00001 -iter 10000 
-mini_batch_size 1")};
@@ -155,25 +158,26 @@ public class PLSAUDTFTest {
             k2 = 0;
         }
 
-        Assert.assertTrue("doc1 is in topic " + k1 + " (" + (topicDistr[k1] * 
100) + "%), "
-                + "and `野菜` SHOULD be more suitable topic word than 
`インフルエンザ` in the topic",
+        Assert.assertTrue(
+            "doc1 is in topic " + k1 + " (" + (topicDistr[k1] * 100) + "%), "
+                    + "and `野菜` SHOULD be more suitable topic word than 
`インフルエンザ` in the topic",
             udtf.getWordScore("野菜", k1) > 
udtf.getWordScore("インフルエンザ", k1));
-        Assert.assertTrue("doc2 is in topic " + k2 + " (" + (topicDistr[k2] * 
100) + "%), "
-                + "and `アボカド` SHOULD be more suitable topic word than 
`健康` in the topic",
+        Assert.assertTrue(
+            "doc2 is in topic " + k2 + " (" + (topicDistr[k2] * 100) + "%), "
+                    + "and `アボカド` SHOULD be more suitable topic word 
than `健康` in the topic",
             udtf.getWordScore("アボカド", k2) > 
udtf.getWordScore("健康", k2));
     }
 
     @Test
     public void testSerialization() throws HiveException {
-        TestUtils.testGenericUDTFSerialization(
-            PLSAUDTF.class,
+        TestUtils.testGenericUDTFSerialization(PLSAUDTF.class,
             new ObjectInspector[] {
-                    
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector),
+                    ObjectInspectorFactory.getStandardListObjectInspector(
+                        
PrimitiveObjectInspectorFactory.javaStringObjectInspector),
                     ObjectInspectorUtils.getConstantObjectInspector(
                         
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
                         "-topics 2 -alpha 0.1 -delta 0.00001 -iter 10000")},
-            new Object[][] {
-                    {Arrays.asList("fruits:1", "healthy:1", "vegetables:1")},
+            new Object[][] {{Arrays.asList("fruits:1", "healthy:1", 
"vegetables:1")},
                     {Arrays.asList("apples:1", "avocados:1", "colds:1", 
"flu:1", "like:2",
                         "oranges:1")}});
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/core/src/test/java/hivemall/utils/codec/Base91Test.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/utils/codec/Base91Test.java 
b/core/src/test/java/hivemall/utils/codec/Base91Test.java
index 809ba97..6ab3ebf 100644
--- a/core/src/test/java/hivemall/utils/codec/Base91Test.java
+++ b/core/src/test/java/hivemall/utils/codec/Base91Test.java
@@ -44,8 +44,9 @@ public class Base91Test {
 
     @Test
     public void testLongEncodeDecode() throws IOException {
-        byte[] expected1 = IOUtils.toString(
-            
Base91Test.class.getResourceAsStream("Base91Test.class")).getBytes();
+        byte[] expected1 =
+                
IOUtils.toString(Base91Test.class.getResourceAsStream("Base91Test.class"))
+                       .getBytes();
         Assert.assertTrue(expected1.length > 1000);
         byte[] actual1 = Base91.decode(Base91.encode(expected1));
         Assert.assertArrayEquals(expected1, actual1);

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/core/src/test/java/hivemall/utils/codec/DeflateCodecTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/utils/codec/DeflateCodecTest.java 
b/core/src/test/java/hivemall/utils/codec/DeflateCodecTest.java
index 091644e..48c93de 100644
--- a/core/src/test/java/hivemall/utils/codec/DeflateCodecTest.java
+++ b/core/src/test/java/hivemall/utils/codec/DeflateCodecTest.java
@@ -43,7 +43,8 @@ public class DeflateCodecTest {
         byte[] compressed1 = codec.compress(original1);
         byte[] decompressed1 = codec.decompress(compressed1);
         Assert.assertTrue("compressed size (" + compressed1.length + " bytes) 
< original size ("
-                + original1.length + " bytes)", compressed1.length < 
original1.length);
+                + original1.length + " bytes)",
+            compressed1.length < original1.length);
         Assert.assertArrayEquals(original1, decompressed1);
         codec.close();
     }
@@ -51,12 +52,15 @@ public class DeflateCodecTest {
     @Test
     public void testNonString() throws IOException {
         DeflateCodec codec = new DeflateCodec();
-        byte[] original1 = IOUtils.toString(
-            
DeflateCodecTest.class.getResourceAsStream("DeflateCodecTest.class")).getBytes();
+        byte[] original1 = IOUtils
+                                  
.toString(DeflateCodecTest.class.getResourceAsStream(
+                                      "DeflateCodecTest.class"))
+                                  .getBytes();
         byte[] compressed1 = codec.compress(original1);
         byte[] decompressed1 = codec.decompress(compressed1);
         Assert.assertTrue("compressed size (" + compressed1.length + " bytes) 
< original size ("
-                + original1.length + " bytes)", compressed1.length < 
original1.length);
+                + original1.length + " bytes)",
+            compressed1.length < original1.length);
         Assert.assertArrayEquals(original1, decompressed1);
         codec.close();
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/core/src/test/java/hivemall/utils/collections/BoundedPriorityQueueTest.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/hivemall/utils/collections/BoundedPriorityQueueTest.java 
b/core/src/test/java/hivemall/utils/collections/BoundedPriorityQueueTest.java
index 1220d76..b8aa559 100644
--- 
a/core/src/test/java/hivemall/utils/collections/BoundedPriorityQueueTest.java
+++ 
b/core/src/test/java/hivemall/utils/collections/BoundedPriorityQueueTest.java
@@ -31,13 +31,13 @@ public class BoundedPriorityQueueTest {
 
     @Test
     public void testTop3() {
-        BoundedPriorityQueue<Integer> queue = new 
BoundedPriorityQueue<Integer>(3,
-            new Comparator<Integer>() {
-                @Override
-                public int compare(Integer o1, Integer o2) {
-                    return Integer.compare(o1, o2);
-                }
-            });
+        BoundedPriorityQueue<Integer> queue =
+                new BoundedPriorityQueue<Integer>(3, new Comparator<Integer>() 
{
+                    @Override
+                    public int compare(Integer o1, Integer o2) {
+                        return Integer.compare(o1, o2);
+                    }
+                });
         Assert.assertTrue(queue.offer(1));
         Assert.assertTrue(queue.offer(4));
         Assert.assertTrue(queue.offer(3));
@@ -58,8 +58,8 @@ public class BoundedPriorityQueueTest {
 
     @Test
     public void testTail3() {
-        BoundedPriorityQueue<Integer> queue = new 
BoundedPriorityQueue<Integer>(3,
-            Collections.<Integer>reverseOrder());
+        BoundedPriorityQueue<Integer> queue =
+                new BoundedPriorityQueue<Integer>(3, 
Collections.<Integer>reverseOrder());
         Assert.assertTrue(queue.offer(1));
         Assert.assertTrue(queue.offer(4));
         Assert.assertTrue(queue.offer(3));
@@ -80,13 +80,13 @@ public class BoundedPriorityQueueTest {
 
     @Test
     public void testString1() {
-        BoundedPriorityQueue<String> queue = new BoundedPriorityQueue<>(3,
-            new Comparator<String>() {
-                @Override
-                public int compare(String o1, String o2) {
-                    return StringUtils.compare(o1, o2);
-                }
-            });
+        BoundedPriorityQueue<String> queue =
+                new BoundedPriorityQueue<>(3, new Comparator<String>() {
+                    @Override
+                    public int compare(String o1, String o2) {
+                        return StringUtils.compare(o1, o2);
+                    }
+                });
         queue.offer("B");
         queue.offer("A");
         queue.offer("C");
@@ -99,8 +99,8 @@ public class BoundedPriorityQueueTest {
 
     @Test
     public void testString2() {
-        BoundedPriorityQueue<String> queue = new BoundedPriorityQueue<>(3,
-            NaturalComparator.<String>getInstance());
+        BoundedPriorityQueue<String> queue =
+                new BoundedPriorityQueue<>(3, 
NaturalComparator.<String>getInstance());
         queue.offer("B");
         queue.offer("A");
         queue.offer("C");

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/core/src/test/java/hivemall/utils/geospatial/GeoSpatialUtilsTest.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/hivemall/utils/geospatial/GeoSpatialUtilsTest.java 
b/core/src/test/java/hivemall/utils/geospatial/GeoSpatialUtilsTest.java
index 6f551be..4600e2a 100644
--- a/core/src/test/java/hivemall/utils/geospatial/GeoSpatialUtilsTest.java
+++ b/core/src/test/java/hivemall/utils/geospatial/GeoSpatialUtilsTest.java
@@ -25,14 +25,13 @@ public class GeoSpatialUtilsTest {
 
     @Test
     public void testTile() {
-        double[] lat_array = new double[] {GeoSpatialUtils.MIN_LATITUDE, 0.d,
-                GeoSpatialUtils.MAX_LATITUDE};
+        double[] lat_array =
+                new double[] {GeoSpatialUtils.MIN_LATITUDE, 0.d, 
GeoSpatialUtils.MAX_LATITUDE};
         double[] lon_array = new double[] {-180.d, 0.d, 180.d};
         for (double lat : lat_array) {
             for (double lon : lon_array) {
-                Assert.assertTrue(
-                    String.format("lat=%s, lon=%s, tile=%s", lat, lon,
-                        GeoSpatialUtils.tile(lat, lon, 4)), 
GeoSpatialUtils.tile(lat, lon, 4) >= 0);
+                Assert.assertTrue(String.format("lat=%s, lon=%s, tile=%s", 
lat, lon,
+                    GeoSpatialUtils.tile(lat, lon, 4)), 
GeoSpatialUtils.tile(lat, lon, 4) >= 0);
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/core/src/test/java/hivemall/utils/hadoop/JsonSerdeUtilsTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/utils/hadoop/JsonSerdeUtilsTest.java 
b/core/src/test/java/hivemall/utils/hadoop/JsonSerdeUtilsTest.java
index 556cc71..a3e81d2 100644
--- a/core/src/test/java/hivemall/utils/hadoop/JsonSerdeUtilsTest.java
+++ b/core/src/test/java/hivemall/utils/hadoop/JsonSerdeUtilsTest.java
@@ -80,18 +80,17 @@ public class JsonSerdeUtilsTest {
     @Test
     public void testLooseJsonReadability() throws Exception {
         List<String> columnNames = Arrays.asList("s,k".split(","));
-        List<TypeInfo> columnTypes = 
TypeInfoUtils.getTypeInfosFromTypeString("struct<a:int,b:string>,int");
+        List<TypeInfo> columnTypes =
+                
TypeInfoUtils.getTypeInfosFromTypeString("struct<a:int,b:string>,int");
 
-        Text jsonText1 = new Text(
-            "{ \"x\" : \"abc\" , "
-                    + " \"t\" : { \"a\":\"1\", \"b\":\"2\", \"c\":[ { \"x\":2 
, \"y\":3 } , { \"x\":3 , \"y\":2 }] } ,"
-                    + "\"s\" : { \"a\" : 2 , \"b\" : \"blah\", \"c\": \"woo\" 
} }");
+        Text jsonText1 = new Text("{ \"x\" : \"abc\" , "
+                + " \"t\" : { \"a\":\"1\", \"b\":\"2\", \"c\":[ { \"x\":2 , 
\"y\":3 } , { \"x\":3 , \"y\":2 }] } ,"
+                + "\"s\" : { \"a\" : 2 , \"b\" : \"blah\", \"c\": \"woo\" } 
}");
 
-        Text jsonText2 = new Text(
-            "{ \"x\" : \"abc\" , "
-                    + " \"t\" : { \"a\":\"1\", \"b\":\"2\", \"c\":[ { \"x\":2 
, \"y\":3 } , { \"x\":3 , \"y\":2 }] } ,"
-                    + "\"s\" : { \"a\" : 2 , \"b\" : \"blah\", \"c\": \"woo\" 
} , "
-                    + "\"k\" : 113 " + "}");
+        Text jsonText2 = new Text("{ \"x\" : \"abc\" , "
+                + " \"t\" : { \"a\":\"1\", \"b\":\"2\", \"c\":[ { \"x\":2 , 
\"y\":3 } , { \"x\":3 , \"y\":2 }] } ,"
+                + "\"s\" : { \"a\" : 2 , \"b\" : \"blah\", \"c\": \"woo\" } , 
" + "\"k\" : 113 "
+                + "}");
 
         List<Object> expected1 = Arrays.<Object>asList(Arrays.asList(2, 
"blah"), null);
         List<Object> expected2 = Arrays.<Object>asList(Arrays.asList(2, 
"blah"), 113);
@@ -105,7 +104,8 @@ public class JsonSerdeUtilsTest {
     @Test
     public void testMapValues() throws SerDeException {
         List<String> columnNames = Arrays.asList("a,b".split(","));
-        List<TypeInfo> columnTypes = 
TypeInfoUtils.getTypeInfosFromTypeString("array<string>,map<string,int>");
+        List<TypeInfo> columnTypes =
+                
TypeInfoUtils.getTypeInfosFromTypeString("array<string>,map<string,int>");
 
         Text text1 = new Text("{ \"a\":[\"aaa\"],\"b\":{\"bbb\":1}} ");
         Text text2 = new Text("{\"a\":[\"yyy\"],\"b\":{\"zzz\":123}}");
@@ -193,18 +193,22 @@ public class JsonSerdeUtilsTest {
 
         DefaultHCatRecord r = new DefaultHCatRecord(rlist);
 
-        List<String> columnNames = 
Arrays.asList("ti,si,i,bi,d,f,s,n,r,l,m,b,c1,bd,hc,hvc,dt,ts,bin".split(","));
-        List<TypeInfo> columnTypes = 
TypeInfoUtils.getTypeInfosFromTypeString("tinyint,smallint,int,bigint,double,float,string,string,"
-                + 
"struct<a:string,b:string>,array<int>,map<smallint,string>,boolean,"
-                + 
"array<struct<i1:int,i2:struct<ii1:array<int>,ii2:map<string,struct<iii1:int>>>>>,"
-                + "decimal(5,2),char(10),varchar(20),date,timestamp,binary");
+        List<String> columnNames =
+                
Arrays.asList("ti,si,i,bi,d,f,s,n,r,l,m,b,c1,bd,hc,hvc,dt,ts,bin".split(","));
+        List<TypeInfo> columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(
+            "tinyint,smallint,int,bigint,double,float,string,string,"
+                    + 
"struct<a:string,b:string>,array<int>,map<smallint,string>,boolean,"
+                    + 
"array<struct<i1:int,i2:struct<ii1:array<int>,ii2:map<string,struct<iii1:int>>>>>,"
+                    + 
"decimal(5,2),char(10),varchar(20),date,timestamp,binary");
 
-        StructTypeInfo rowTypeInfo = (StructTypeInfo) 
TypeInfoFactory.getStructTypeInfo(
-            columnNames, columnTypes);
-        HCatRecordObjectInspector objInspector = 
HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo);
+        StructTypeInfo rowTypeInfo =
+                (StructTypeInfo) 
TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
+        HCatRecordObjectInspector objInspector =
+                
HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo);
 
         Text serialized = JsonSerdeUtils.serialize(r, objInspector, 
columnNames);
-        List<Object> deserialized = JsonSerdeUtils.deserialize(serialized, 
columnNames, columnTypes);
+        List<Object> deserialized =
+                JsonSerdeUtils.deserialize(serialized, columnNames, 
columnTypes);
 
         assertRecordEquals(rlist, deserialized);
     }
@@ -236,18 +240,22 @@ public class JsonSerdeUtilsTest {
 
         DefaultHCatRecord r = new DefaultHCatRecord(nlist);
 
-        List<String> columnNames = 
Arrays.asList("ti,si,i,bi,d,f,s,n,r,l,m,b,c1,bd,hc,hvc,dt,ts,bin".split(","));
-        List<TypeInfo> columnTypes = 
TypeInfoUtils.getTypeInfosFromTypeString("tinyint,smallint,int,bigint,double,float,string,string,"
-                + 
"struct<a:string,b:string>,array<int>,map<smallint,string>,boolean,"
-                + 
"array<struct<i1:int,i2:struct<ii1:array<int>,ii2:map<string,struct<iii1:int>>>>>,"
-                + "decimal(5,2),char(10),varchar(20),date,timestamp,binary");
+        List<String> columnNames =
+                
Arrays.asList("ti,si,i,bi,d,f,s,n,r,l,m,b,c1,bd,hc,hvc,dt,ts,bin".split(","));
+        List<TypeInfo> columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(
+            "tinyint,smallint,int,bigint,double,float,string,string,"
+                    + 
"struct<a:string,b:string>,array<int>,map<smallint,string>,boolean,"
+                    + 
"array<struct<i1:int,i2:struct<ii1:array<int>,ii2:map<string,struct<iii1:int>>>>>,"
+                    + 
"decimal(5,2),char(10),varchar(20),date,timestamp,binary");
 
-        StructTypeInfo rowTypeInfo = (StructTypeInfo) 
TypeInfoFactory.getStructTypeInfo(
-            columnNames, columnTypes);
-        HCatRecordObjectInspector objInspector = 
HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo);
+        StructTypeInfo rowTypeInfo =
+                (StructTypeInfo) 
TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
+        HCatRecordObjectInspector objInspector =
+                
HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo);
 
         Text serialized = JsonSerdeUtils.serialize(r, objInspector, 
columnNames);
-        List<Object> deserialized = JsonSerdeUtils.deserialize(serialized, 
columnNames, columnTypes);
+        List<Object> deserialized =
+                JsonSerdeUtils.deserialize(serialized, columnNames, 
columnTypes);
 
         assertRecordEquals(nlist, deserialized);
     }
@@ -258,8 +266,8 @@ public class JsonSerdeUtilsTest {
         TypeInfo type1 = 
TypeInfoUtils.getTypeInfoFromTypeString("struct<name:string,age:int>");
         List<Object> expected1 = Arrays.<Object>asList("makoto", 37);
 
-        List<Object> deserialized1 = JsonSerdeUtils.deserialize(json1, 
Arrays.asList("person"),
-            Arrays.asList(type1));
+        List<Object> deserialized1 =
+                JsonSerdeUtils.deserialize(json1, Arrays.asList("person"), 
Arrays.asList(type1));
 
         assertRecordEquals(expected1, deserialized1);
     }
@@ -329,15 +337,15 @@ public class JsonSerdeUtilsTest {
         int mySz = first.size();
         int urSz = second.size();
         if (mySz != urSz) {
-            throw new RuntimeException("#expected != #actual. #expected=" + 
mySz + ", #actual="
-                    + urSz);
+            throw new RuntimeException(
+                "#expected != #actual. #expected=" + mySz + ", #actual=" + 
urSz);
         } else {
             for (int i = 0; i < first.size(); i++) {
                 int c = DataType.compare(first.get(i), second.get(i));
                 if (c != 0) {
                     String msg = "first.get(" + i + "}='" + first.get(i) + "' 
second.get(" + i
-                            + ")='" + second.get(i) + "' compared as " + c + 
"\n"
-                            + "Types 1st/2nd=" + 
DataType.findType(first.get(i)) + "/"
+                            + ")='" + second.get(i) + "' compared as " + c + 
"\n" + "Types 1st/2nd="
+                            + DataType.findType(first.get(i)) + "/"
                             + DataType.findType(second.get(i)) + '\n' + 
"first='" + first.get(i)
                             + "' second='" + second.get(i) + "'";
                     if (first.get(i) instanceof Date) {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/core/src/test/java/hivemall/utils/lang/HalfFloatTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/utils/lang/HalfFloatTest.java 
b/core/src/test/java/hivemall/utils/lang/HalfFloatTest.java
index 42396c2..8b0c8c0 100644
--- a/core/src/test/java/hivemall/utils/lang/HalfFloatTest.java
+++ b/core/src/test/java/hivemall/utils/lang/HalfFloatTest.java
@@ -97,18 +97,18 @@ public class HalfFloatTest {
                 33, 1.966953E-6f, 83, 4.947186E-6f, 167, 9.953976E-6f, 335, 
1.996756E-5f, 838,
                 4.994869E-5f, 1677, 9.995699E-05f, 2701, 0.000199914f, 4120, 
0.0004997253f, 5144,
                 0.0009994507f, 6168, 0.001998901f, 7454, 0.004997253f, 8478, 
0.009994507f, 9502,
-                0.01998901f, 10854, 0.04998779f, 11878, 0.09997559f, 12902, 
0.1999512f, 14336,
-                0.5f, 15360, 1f, 16384, 2f, 17664, 5f, 18688, 10f, 19712, 20f, 
21056, 50f, 22080,
-                100f, 23104, 200f, 24528, 500f, 25552, 1000f, 26576, 2000f, 
27874, 5000f, 28898,
-                10000f, 29922, 20000f, 31258, 49984f, 32769, -5.960464E-08f, 
32771, -1.788139E-07f,
-                32776, -4.768372E-07f, 32784, -9.536743E-07f, 32801, 
-1.966953E-06f, 32851,
-                -4.947186E-06f, 32935, -9.953976E-06f, 33103, -1.996756E-05f, 
33606,
-                -4.994869E-05f, 34445, -9.995699E-05f, 35469, -0.000199914f, 
36888, -0.0004997253f,
-                37912, -0.0009994507f, 38936, -0.001998901f, 40222, 
-0.004997253f, 41246,
-                -0.009994507f, 42270, -0.01998901f, 43622, -0.04998779f, 
44646, -0.09997559f,
-                45670, -0.1999512f, 47104, -0.5f, 48128, -1f, 49152, -2f, 
50432, -5f, 51456, -10f,
-                52480, -20f, 53824, -50f, 54848, -100f, 55872, -200f, 57296, 
-500f, 58320, -1000f,
-                59344, -2000f, 60642, -5000f, 61666, -10000f, 62690, -20000f, 
64026, -49984f};
+                0.01998901f, 10854, 0.04998779f, 11878, 0.09997559f, 12902, 
0.1999512f, 14336, 0.5f,
+                15360, 1f, 16384, 2f, 17664, 5f, 18688, 10f, 19712, 20f, 
21056, 50f, 22080, 100f,
+                23104, 200f, 24528, 500f, 25552, 1000f, 26576, 2000f, 27874, 
5000f, 28898, 10000f,
+                29922, 20000f, 31258, 49984f, 32769, -5.960464E-08f, 32771, 
-1.788139E-07f, 32776,
+                -4.768372E-07f, 32784, -9.536743E-07f, 32801, -1.966953E-06f, 
32851, -4.947186E-06f,
+                32935, -9.953976E-06f, 33103, -1.996756E-05f, 33606, 
-4.994869E-05f, 34445,
+                -9.995699E-05f, 35469, -0.000199914f, 36888, -0.0004997253f, 
37912, -0.0009994507f,
+                38936, -0.001998901f, 40222, -0.004997253f, 41246, 
-0.009994507f, 42270,
+                -0.01998901f, 43622, -0.04998779f, 44646, -0.09997559f, 45670, 
-0.1999512f, 47104,
+                -0.5f, 48128, -1f, 49152, -2f, 50432, -5f, 51456, -10f, 52480, 
-20f, 53824, -50f,
+                54848, -100f, 55872, -200f, 57296, -500f, 58320, -1000f, 
59344, -2000f, 60642,
+                -5000f, 61666, -10000f, 62690, -20000f, 64026, -49984f};
         for (int i = 0; i < test.length; i += 2) {
             nonExactCheck((short) test[i], test[i + 1]);
         }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/core/src/test/java/hivemall/utils/math/MatrixUtilsTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/hivemall/utils/math/MatrixUtilsTest.java 
b/core/src/test/java/hivemall/utils/math/MatrixUtilsTest.java
index 8b3053a..0623cca 100644
--- a/core/src/test/java/hivemall/utils/math/MatrixUtilsTest.java
+++ b/core/src/test/java/hivemall/utils/math/MatrixUtilsTest.java
@@ -82,13 +82,16 @@ public class MatrixUtilsTest {
         // 3  2  1
         Assert.assertArrayEquals(new RealMatrix[] {new 
Array2DRowRealMatrix(new double[] {1}),
                 new Array2DRowRealMatrix(new double[] {2}),
-                new Array2DRowRealMatrix(new double[] {3})}, A[0]);
+                new Array2DRowRealMatrix(new double[] {3})},
+            A[0]);
         Assert.assertArrayEquals(new RealMatrix[] {new 
Array2DRowRealMatrix(new double[] {2}),
                 new Array2DRowRealMatrix(new double[] {1}),
-                new Array2DRowRealMatrix(new double[] {2})}, A[1]);
+                new Array2DRowRealMatrix(new double[] {2})},
+            A[1]);
         Assert.assertArrayEquals(new RealMatrix[] {new 
Array2DRowRealMatrix(new double[] {3}),
                 new Array2DRowRealMatrix(new double[] {2}),
-                new Array2DRowRealMatrix(new double[] {1})}, A[2]);
+                new Array2DRowRealMatrix(new double[] {1})},
+            A[2]);
     }
 
     @Test
@@ -110,12 +113,12 @@ public class MatrixUtilsTest {
     @Test
     public void testCombinedMatrices2D() {
         RealMatrix[] m1 = new RealMatrix[] {
-                new Array2DRowRealMatrix(new double[][] {new double[] {1, 2, 
3},
-                        new double[] {4, 5, 6}}),
-                new Array2DRowRealMatrix(new double[][] {new double[] {7, 8, 
9},
-                        new double[] {10, 11, 12}}),
-                new Array2DRowRealMatrix(new double[][] {new double[] {13, 14, 
15},
-                        new double[] {16, 17, 18}})};
+                new Array2DRowRealMatrix(
+                    new double[][] {new double[] {1, 2, 3}, new double[] {4, 
5, 6}}),
+                new Array2DRowRealMatrix(
+                    new double[][] {new double[] {7, 8, 9}, new double[] {10, 
11, 12}}),
+                new Array2DRowRealMatrix(
+                    new double[][] {new double[] {13, 14, 15}, new double[] 
{16, 17, 18}})};
         RealMatrix flatten1 = MatrixUtils.combinedMatrices(m1);
         Assert.assertEquals(3, flatten1.getColumnDimension());
         Assert.assertEquals(6, flatten1.getRowDimension());
@@ -172,12 +175,12 @@ public class MatrixUtilsTest {
     @Test
     public void testFlatten2D() {
         RealMatrix[] m1 = new RealMatrix[] {
-                new Array2DRowRealMatrix(new double[][] {new double[] {1, 2, 
3},
-                        new double[] {4, 5, 6}}),
-                new Array2DRowRealMatrix(new double[][] {new double[] {7, 8, 
9},
-                        new double[] {10, 11, 12}}),
-                new Array2DRowRealMatrix(new double[][] {new double[] {13, 14, 
15},
-                        new double[] {16, 17, 18}})};
+                new Array2DRowRealMatrix(
+                    new double[][] {new double[] {1, 2, 3}, new double[] {4, 
5, 6}}),
+                new Array2DRowRealMatrix(
+                    new double[][] {new double[] {7, 8, 9}, new double[] {10, 
11, 12}}),
+                new Array2DRowRealMatrix(
+                    new double[][] {new double[] {13, 14, 15}, new double[] 
{16, 17, 18}})};
         double[] actual = MatrixUtils.flatten(m1);
         double[] expected = new double[18];
         for (int i = 0; i < expected.length; i++) {
@@ -195,22 +198,22 @@ public class MatrixUtilsTest {
         RealMatrix[] actual = MatrixUtils.unflatten(data, 2, 3, 4);
 
         RealMatrix[] expected = new RealMatrix[] {
-                new Array2DRowRealMatrix(new double[][] {new double[] {1, 2, 
3},
-                        new double[] {4, 5, 6}}),
-                new Array2DRowRealMatrix(new double[][] {new double[] {7, 8, 
9},
-                        new double[] {10, 11, 12}}),
-                new Array2DRowRealMatrix(new double[][] {new double[] {13, 14, 
15},
-                        new double[] {16, 17, 18}}),
-                new Array2DRowRealMatrix(new double[][] {new double[] {19, 20, 
21},
-                        new double[] {22, 23, 24}})};
+                new Array2DRowRealMatrix(
+                    new double[][] {new double[] {1, 2, 3}, new double[] {4, 
5, 6}}),
+                new Array2DRowRealMatrix(
+                    new double[][] {new double[] {7, 8, 9}, new double[] {10, 
11, 12}}),
+                new Array2DRowRealMatrix(
+                    new double[][] {new double[] {13, 14, 15}, new double[] 
{16, 17, 18}}),
+                new Array2DRowRealMatrix(
+                    new double[][] {new double[] {19, 20, 21}, new double[] 
{22, 23, 24}})};
 
         Assert.assertArrayEquals(expected, actual);
     }
 
     @Test
     public void testPower1() {
-        RealMatrix A = new Array2DRowRealMatrix(new double[][] {new double[] 
{1, 2, 3},
-                new double[] {4, 5, 6}});
+        RealMatrix A = new Array2DRowRealMatrix(
+            new double[][] {new double[] {1, 2, 3}, new double[] {4, 5, 6}});
 
         double[] x = new double[3];
         x[0] = Math.random();
@@ -241,9 +244,9 @@ public class MatrixUtilsTest {
         RealMatrix actual = new Array2DRowRealMatrix(new double[4][4]);
         MatrixUtils.lanczosTridiagonalization(C, a, actual);
 
-        RealMatrix expected = new Array2DRowRealMatrix(new double[][] {new 
double[] {40, 60, 0, 0},
-                new double[] {60, 10, 120, 0}, new double[] {0, 120, 10, 120},
-                new double[] {0, 0, 120, 10}});
+        RealMatrix expected = new Array2DRowRealMatrix(
+            new double[][] {new double[] {40, 60, 0, 0}, new double[] {60, 10, 
120, 0},
+                    new double[] {0, 120, 10, 120}, new double[] {0, 0, 120, 
10}});
 
         Assert.assertEquals(expected, actual);
     }
@@ -251,9 +254,9 @@ public class MatrixUtilsTest {
     @Test
     public void testTridiagonalEigen() {
         // Tridiagonal Matrix
-        RealMatrix T = new Array2DRowRealMatrix(new double[][] {new double[] 
{40, 60, 0, 0},
-                new double[] {60, 10, 120, 0}, new double[] {0, 120, 10, 120},
-                new double[] {0, 0, 120, 10}});
+        RealMatrix T = new Array2DRowRealMatrix(
+            new double[][] {new double[] {40, 60, 0, 0}, new double[] {60, 10, 
120, 0},
+                    new double[] {0, 120, 10, 120}, new double[] {0, 0, 120, 
10}});
 
         double[] eigvals = new double[4];
         RealMatrix eigvecs = new Array2DRowRealMatrix(new double[4][4]);

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/mixserv/src/main/java/hivemall/mix/server/MixServer.java
----------------------------------------------------------------------
diff --git a/mixserv/src/main/java/hivemall/mix/server/MixServer.java 
b/mixserv/src/main/java/hivemall/mix/server/MixServer.java
index 67ed25f..c6aa219 100644
--- a/mixserv/src/main/java/hivemall/mix/server/MixServer.java
+++ b/mixserv/src/main/java/hivemall/mix/server/MixServer.java
@@ -109,8 +109,8 @@ public final class MixServer implements Runnable {
     public String toString() {
         return "[port=" + port + ", numWorkers=" + numWorkers + ", ssl=" + ssl 
+ ", scale=" + scale
                 + ", syncThreshold=" + syncThreshold + ", sessionTTLinSec=" + 
sessionTTLinSec
-                + ", sweepIntervalInSec=" + sweepIntervalInSec + ", jmx=" + 
jmx + ", state="
-                + state + "]";
+                + ", sweepIntervalInSec=" + sweepIntervalInSec + ", jmx=" + 
jmx + ", state=" + state
+                + "]";
     }
 
     public ServerState getState() {
@@ -143,7 +143,8 @@ public final class MixServer implements Runnable {
         // configure metrics
         ScheduledExecutorService metricCollector = 
Executors.newScheduledThreadPool(1);
         MixServerMetrics metrics = new MixServerMetrics();
-        ThroughputCounter throughputCounter = new 
ThroughputCounter(metricCollector, 5000L, metrics);
+        ThroughputCounter throughputCounter =
+                new ThroughputCounter(metricCollector, 5000L, metrics);
         if (jmx) {// register mbean
             MetricsRegistry.registerMBeans(metrics, port);
         }
@@ -151,8 +152,8 @@ public final class MixServer implements Runnable {
         // configure initializer
         SessionStore sessionStore = new SessionStore();
         MixServerHandler msgHandler = new MixServerHandler(sessionStore, 
syncThreshold, scale);
-        MixServerInitializer initializer = new 
MixServerInitializer(msgHandler, throughputCounter,
-            sslCtx);
+        MixServerInitializer initializer =
+                new MixServerInitializer(msgHandler, throughputCounter, 
sslCtx);
 
         Runnable cleanSessionTask = new IdleSessionSweeper(sessionStore, 
sessionTTLinSec * 1000L);
         ScheduledExecutorService idleSessionChecker = 
Executors.newScheduledThreadPool(1);

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/mixserv/src/main/java/hivemall/mix/server/MixServerHandler.java
----------------------------------------------------------------------
diff --git a/mixserv/src/main/java/hivemall/mix/server/MixServerHandler.java 
b/mixserv/src/main/java/hivemall/mix/server/MixServerHandler.java
index ae5c27c..bf33c69 100644
--- a/mixserv/src/main/java/hivemall/mix/server/MixServerHandler.java
+++ b/mixserv/src/main/java/hivemall/mix/server/MixServerHandler.java
@@ -90,7 +90,8 @@ public final class MixServerHandler extends 
SimpleChannelInboundHandler<MixMessa
     }
 
     @Nonnull
-    private PartialResult getPartialResult(@Nonnull MixMessage msg, @Nonnull 
SessionObject session) {
+    private PartialResult getPartialResult(@Nonnull MixMessage msg,
+            @Nonnull SessionObject session) {
         final ConcurrentMap<Object, PartialResult> map = session.get();
 
         Object feature = msg.getFeature();

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/mixserv/src/main/java/hivemall/mix/store/PartialArgminKLD.java
----------------------------------------------------------------------
diff --git a/mixserv/src/main/java/hivemall/mix/store/PartialArgminKLD.java 
b/mixserv/src/main/java/hivemall/mix/store/PartialArgminKLD.java
index a6ffc4b..85d0936 100644
--- a/mixserv/src/main/java/hivemall/mix/store/PartialArgminKLD.java
+++ b/mixserv/src/main/java/hivemall/mix/store/PartialArgminKLD.java
@@ -52,7 +52,8 @@ public final class PartialArgminKLD extends PartialResult {
     }
 
     @Override
-    public void subtract(float localWeight, float covar, @Nonnegative int 
deltaUpdates, float scale) {
+    public void subtract(float localWeight, float covar, @Nonnegative int 
deltaUpdates,
+            float scale) {
         this.sum_mean_div_covar -= (localWeight / covar) / scale;
         this.sum_inv_covar -= (1.f / covar) / scale;
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/mixserv/src/main/java/hivemall/mix/store/PartialAverage.java
----------------------------------------------------------------------
diff --git a/mixserv/src/main/java/hivemall/mix/store/PartialAverage.java 
b/mixserv/src/main/java/hivemall/mix/store/PartialAverage.java
index e03a6d5..0d1156b 100644
--- a/mixserv/src/main/java/hivemall/mix/store/PartialAverage.java
+++ b/mixserv/src/main/java/hivemall/mix/store/PartialAverage.java
@@ -54,7 +54,8 @@ public final class PartialAverage extends PartialResult {
     }
 
     @Override
-    public void subtract(float localWeight, float covar, @Nonnegative int 
deltaUpdates, float scale) {
+    public void subtract(float localWeight, float covar, @Nonnegative int 
deltaUpdates,
+            float scale) {
         assert (deltaUpdates > 0) : deltaUpdates;
         scaledSumWeights -= ((localWeight / scale) * deltaUpdates);
         totalUpdates -= deltaUpdates; // note deltaUpdates is in range (0,127]

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/mixserv/src/main/java/hivemall/mix/store/SessionStore.java
----------------------------------------------------------------------
diff --git a/mixserv/src/main/java/hivemall/mix/store/SessionStore.java 
b/mixserv/src/main/java/hivemall/mix/store/SessionStore.java
index 1a2892e..9ff0a53 100644
--- a/mixserv/src/main/java/hivemall/mix/store/SessionStore.java
+++ b/mixserv/src/main/java/hivemall/mix/store/SessionStore.java
@@ -51,8 +51,8 @@ public final class SessionStore {
     public SessionObject get(@Nonnull String groupID) {
         SessionObject sessionObj = sessions.get(groupID);
         if (sessionObj == null) {
-            ConcurrentMap<Object, PartialResult> map = new 
ConcurrentHashMap<Object, PartialResult>(
-                EXPECTED_MODEL_SIZE);
+            ConcurrentMap<Object, PartialResult> map =
+                    new ConcurrentHashMap<Object, 
PartialResult>(EXPECTED_MODEL_SIZE);
             sessionObj = new SessionObject(map);
             SessionObject existing = sessions.putIfAbsent(groupID, sessionObj);
             if (existing != null) {
@@ -76,7 +76,8 @@ public final class SessionStore {
         private final ConcurrentMap<String, SessionObject> sessions;
         private final long ttl;
 
-        public IdleSessionSweeper(@Nonnull SessionStore sessionStore, 
@Nonnegative long ttlInMillis) {
+        public IdleSessionSweeper(@Nonnull SessionStore sessionStore,
+                @Nonnegative long ttlInMillis) {
             this.sessions = sessionStore.getSessions();
             this.ttl = ttlInMillis;
         }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/mixserv/src/test/java/hivemall/mix/server/MixServerHandlerTest.java
----------------------------------------------------------------------
diff --git 
a/mixserv/src/test/java/hivemall/mix/server/MixServerHandlerTest.java 
b/mixserv/src/test/java/hivemall/mix/server/MixServerHandlerTest.java
index fbb9cc1..7d4b2f3 100644
--- a/mixserv/src/test/java/hivemall/mix/server/MixServerHandlerTest.java
+++ b/mixserv/src/test/java/hivemall/mix/server/MixServerHandlerTest.java
@@ -52,8 +52,9 @@ public final class MixServerHandlerTest extends 
HivemallTestBase {
         SessionStore session = new SessionStore();
         MixServerHandler handler = new MixServerHandler(session, 4, 1.0f);
 
-        Method mixMethod = MixServerHandler.class.getDeclaredMethod("mix",
-            ChannelHandlerContext.class, MixMessage.class, 
PartialResult.class, SessionObject.class);
+        Method mixMethod =
+                MixServerHandler.class.getDeclaredMethod("mix", 
ChannelHandlerContext.class,
+                    MixMessage.class, PartialResult.class, 
SessionObject.class);
         mixMethod.setAccessible(true);
 
         SessionObject sessionObj = session.get("dummy");
@@ -77,8 +78,8 @@ public final class MixServerHandlerTest extends 
HivemallTestBase {
         Assert.assertEquals(5.0, acc.getWeight(1.0f), 0.001);
 
         // Check expected exceptions
-        exception.expectCause(new CauseMatcher(IllegalArgumentException.class,
-            "Illegal deltaUpdates received: 0"));
+        exception.expectCause(
+            new CauseMatcher(IllegalArgumentException.class, "Illegal 
deltaUpdates received: 0"));
         MixMessage msg4 = new MixMessage(MixEventName.average, dummyFeature, 
0.0f, (short) 0, 0);
         mixMethod.invoke(handler, ctx, msg4, acc, sessionObj);
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/mixserv/src/test/java/hivemall/mix/server/MixServerTest.java
----------------------------------------------------------------------
diff --git a/mixserv/src/test/java/hivemall/mix/server/MixServerTest.java 
b/mixserv/src/test/java/hivemall/mix/server/MixServerTest.java
index f6db5f8..afa201a 100644
--- a/mixserv/src/test/java/hivemall/mix/server/MixServerTest.java
+++ b/mixserv/src/test/java/hivemall/mix/server/MixServerTest.java
@@ -298,8 +298,8 @@ public class MixServerTest extends HivemallTestBase {
 
     private static void invokeClient01(String groupId, int serverPort, boolean 
denseModel,
             boolean cancelMix) throws InterruptedException {
-        PredictionModel model = denseModel ? new NewDenseModel(100)
-                : new NewSparseModel(100, false);
+        PredictionModel model =
+                denseModel ? new NewDenseModel(100) : new NewSparseModel(100, 
false);
         model.configureClock();
         MixClient client = null;
         try {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/mixserv/src/test/java/hivemall/test/HivemallTestBase.java
----------------------------------------------------------------------
diff --git a/mixserv/src/test/java/hivemall/test/HivemallTestBase.java 
b/mixserv/src/test/java/hivemall/test/HivemallTestBase.java
index ee80eba..5eb1879 100644
--- a/mixserv/src/test/java/hivemall/test/HivemallTestBase.java
+++ b/mixserv/src/test/java/hivemall/test/HivemallTestBase.java
@@ -39,7 +39,7 @@ public abstract class HivemallTestBase {
 
     @Before
     public void afterEachTest() {
-        logger.info("\n\n===== FINISHED " + packageName + ": '" + 
testName.getMethodName()
-                + "' =====\n");
+        logger.info(
+            "\n\n===== FINISHED " + packageName + ": '" + 
testName.getMethodName() + "' =====\n");
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/nlp/src/main/java/hivemall/nlp/tokenizer/KuromojiUDF.java
----------------------------------------------------------------------
diff --git a/nlp/src/main/java/hivemall/nlp/tokenizer/KuromojiUDF.java 
b/nlp/src/main/java/hivemall/nlp/tokenizer/KuromojiUDF.java
index cdf6dab..48b566f 100644
--- a/nlp/src/main/java/hivemall/nlp/tokenizer/KuromojiUDF.java
+++ b/nlp/src/main/java/hivemall/nlp/tokenizer/KuromojiUDF.java
@@ -59,8 +59,7 @@ import org.apache.lucene.analysis.ja.dict.UserDictionary;
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.analysis.util.CharArraySet;
 
-@Description(
-        name = "tokenize_ja",
+@Description(name = "tokenize_ja",
         value = "_FUNC_(String line [, const string mode = \"normal\", const 
array<string> stopWords, const array<string> stopTags, const array<string> 
userDict (or string userDictURL)])"
                 + " - returns tokenized strings in array<string>",
         extended = "select 
tokenize_ja(\"kuromojiを使った分かち書きのテストです。第二引数にはnormal/search/extendedを指定できます。デフォルトではnormalモードです。\");\n"
@@ -86,8 +85,8 @@ public final class KuromojiUDF extends GenericUDF {
     public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
         final int arglen = arguments.length;
         if (arglen < 1 || arglen > 5) {
-            throw new UDFArgumentException("Invalid number of arguments for 
`tokenize_ja`: "
-                    + arglen);
+            throw new UDFArgumentException(
+                "Invalid number of arguments for `tokenize_ja`: " + arglen);
         }
 
         this._mode = (arglen >= 2) ? tokenizationMode(arguments[1]) : 
Mode.NORMAL;
@@ -96,8 +95,8 @@ public final class KuromojiUDF extends GenericUDF {
             this._stopWordsArray = HiveUtils.getConstStringArray(arguments[2]);
         }
 
-        this._stopTags = (arglen >= 4) ? stopTags(arguments[3])
-                : JapaneseAnalyzer.getDefaultStopTags();
+        this._stopTags =
+                (arglen >= 4) ? stopTags(arguments[3]) : 
JapaneseAnalyzer.getDefaultStopTags();
 
         if (arglen >= 5) {
             if (HiveUtils.isConstListOI(arguments[4])) {
@@ -112,7 +111,8 @@ public final class KuromojiUDF extends GenericUDF {
 
         this._analyzer = null;
 
-        return 
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
+        return ObjectInspectorFactory.getStandardListObjectInspector(
+            PrimitiveObjectInspectorFactory.writableStringObjectInspector);
     }
 
     @Override
@@ -271,17 +271,17 @@ public final class KuromojiUDF extends GenericUDF {
 
         final InputStream is;
         try {
-            is = 
IOUtils.decodeInputStream(HttpUtils.getLimitedInputStream(conn,
-                MAX_INPUT_STREAM_SIZE));
+            is = IOUtils.decodeInputStream(
+                HttpUtils.getLimitedInputStream(conn, MAX_INPUT_STREAM_SIZE));
         } catch (NullPointerException | IOException e) {
             throw new UDFArgumentException("Failed to get input stream from 
the connection: "
                     + userDictURL + '\n' + 
ExceptionUtils.prettyPrintStackTrace(e));
         }
 
-        CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder()
-                                                       
.onMalformedInput(CodingErrorAction.REPORT)
-                                                       .onUnmappableCharacter(
-                                                           
CodingErrorAction.REPORT);
+        CharsetDecoder decoder =
+                StandardCharsets.UTF_8.newDecoder()
+                                      
.onMalformedInput(CodingErrorAction.REPORT)
+                                      
.onUnmappableCharacter(CodingErrorAction.REPORT);
         final Reader reader = new InputStreamReader(is, decoder);
         try {
             return UserDictionary.open(reader); // return null if empty

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/nlp/src/main/java/hivemall/nlp/tokenizer/SmartcnUDF.java
----------------------------------------------------------------------
diff --git a/nlp/src/main/java/hivemall/nlp/tokenizer/SmartcnUDF.java 
b/nlp/src/main/java/hivemall/nlp/tokenizer/SmartcnUDF.java
index d185b0d..cf6249f 100644
--- a/nlp/src/main/java/hivemall/nlp/tokenizer/SmartcnUDF.java
+++ b/nlp/src/main/java/hivemall/nlp/tokenizer/SmartcnUDF.java
@@ -56,14 +56,15 @@ public final class SmartcnUDF extends GenericUDF {
     public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
         final int arglen = arguments.length;
         if (arglen < 1 || arglen > 2) {
-            throw new UDFArgumentException("Invalid number of arguments for 
`tokenize_cn`: "
-                    + arglen);
+            throw new UDFArgumentException(
+                "Invalid number of arguments for `tokenize_cn`: " + arglen);
         }
 
         this._stopWordsArray = (arglen >= 2) ? 
HiveUtils.getConstStringArray(arguments[1]) : null;
         this._analyzer = null;
 
-        return 
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
+        return ObjectInspectorFactory.getStandardListObjectInspector(
+            PrimitiveObjectInspectorFactory.writableStringObjectInspector);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/nlp/src/test/java/hivemall/nlp/tokenizer/KuromojiUDFTest.java
----------------------------------------------------------------------
diff --git a/nlp/src/test/java/hivemall/nlp/tokenizer/KuromojiUDFTest.java 
b/nlp/src/test/java/hivemall/nlp/tokenizer/KuromojiUDFTest.java
index 356507d..eb755e0 100644
--- a/nlp/src/test/java/hivemall/nlp/tokenizer/KuromojiUDFTest.java
+++ b/nlp/src/test/java/hivemall/nlp/tokenizer/KuromojiUDFTest.java
@@ -351,8 +351,7 @@ public class KuromojiUDFTest {
         // userDictUrl (Kuromoji official sample user defined dict on GitHub)
         // e.g., "日本経済新聞" will be "日本", "経済", and "新聞"
         argOIs[4] = 
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
-            stringType,
-            new Text(
+            stringType, new Text(
                 
"https://raw.githubusercontent.com/atilika/kuromoji/909fd6b32bf4e9dc86b7599de5c9b50ca8f004a1/kuromoji-core/src/test/resources/userdict.txt";));
         udf.initialize(argOIs);
 

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/spark/common/src/main/java/hivemall/dataset/LogisticRegressionDataGeneratorUDTFWrapper.java
----------------------------------------------------------------------
diff --git 
a/spark/common/src/main/java/hivemall/dataset/LogisticRegressionDataGeneratorUDTFWrapper.java
 
b/spark/common/src/main/java/hivemall/dataset/LogisticRegressionDataGeneratorUDTFWrapper.java
index a6d5468..cf10ed7 100644
--- 
a/spark/common/src/main/java/hivemall/dataset/LogisticRegressionDataGeneratorUDTFWrapper.java
+++ 
b/spark/common/src/main/java/hivemall/dataset/LogisticRegressionDataGeneratorUDTFWrapper.java
@@ -41,7 +41,8 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 @Description(name = "lr_datagen",
         value = "_FUNC_(options string) - Generates a logistic regression 
dataset")
 public final class LogisticRegressionDataGeneratorUDTFWrapper extends 
UDTFWithOptions {
-    private transient LogisticRegressionDataGeneratorUDTF udtf = new 
LogisticRegressionDataGeneratorUDTF();
+    private transient LogisticRegressionDataGeneratorUDTF udtf =
+            new LogisticRegressionDataGeneratorUDTF();
 
     @Override
     protected Options getOptions() {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/spark/common/src/main/java/hivemall/ftvec/AddBiasUDFWrapper.java
----------------------------------------------------------------------
diff --git a/spark/common/src/main/java/hivemall/ftvec/AddBiasUDFWrapper.java 
b/spark/common/src/main/java/hivemall/ftvec/AddBiasUDFWrapper.java
index b454fd9..e7da7cb 100644
--- a/spark/common/src/main/java/hivemall/ftvec/AddBiasUDFWrapper.java
+++ b/spark/common/src/main/java/hivemall/ftvec/AddBiasUDFWrapper.java
@@ -65,7 +65,8 @@ public class AddBiasUDFWrapper extends GenericUDF {
                 throw new UDFArgumentTypeException(0, "Type mismatch: 
features");
         }
 
-        return 
ObjectInspectorFactory.getStandardListObjectInspector(argumentOI.getListElementObjectInspector());
+        return ObjectInspectorFactory.getStandardListObjectInspector(
+            argumentOI.getListElementObjectInspector());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/spark/common/src/main/java/hivemall/ftvec/AddFeatureIndexUDFWrapper.java
----------------------------------------------------------------------
diff --git 
a/spark/common/src/main/java/hivemall/ftvec/AddFeatureIndexUDFWrapper.java 
b/spark/common/src/main/java/hivemall/ftvec/AddFeatureIndexUDFWrapper.java
index 0b687db..6be3a9e 100644
--- a/spark/common/src/main/java/hivemall/ftvec/AddFeatureIndexUDFWrapper.java
+++ b/spark/common/src/main/java/hivemall/ftvec/AddFeatureIndexUDFWrapper.java
@@ -39,8 +39,7 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
  * NOTE: This is needed to avoid the issue of Spark reflection. That is, spark 
cannot handle List<>
  * as a return type in Hive UDF. Therefore, the type must be passed via 
ObjectInspector.
  */
-@Description(
-        name = "add_feature_index",
+@Description(name = "add_feature_index",
         value = "_FUNC_(dense features in array<double>) - Returns a feature 
vector with feature indices")
 @UDFType(deterministic = true, stateful = false)
 public class AddFeatureIndexUDFWrapper extends GenericUDF {
@@ -67,7 +66,8 @@ public class AddFeatureIndexUDFWrapper extends GenericUDF {
                 throw new UDFArgumentTypeException(0, "Type mismatch: 
features");
         }
 
-        return 
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
+        return ObjectInspectorFactory.getStandardListObjectInspector(
+            PrimitiveObjectInspectorFactory.javaStringObjectInspector);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/spark/common/src/main/java/hivemall/ftvec/ExtractWeightUDFWrapper.java
----------------------------------------------------------------------
diff --git 
a/spark/common/src/main/java/hivemall/ftvec/ExtractWeightUDFWrapper.java 
b/spark/common/src/main/java/hivemall/ftvec/ExtractWeightUDFWrapper.java
index 8580247..b5ef807 100644
--- a/spark/common/src/main/java/hivemall/ftvec/ExtractWeightUDFWrapper.java
+++ b/spark/common/src/main/java/hivemall/ftvec/ExtractWeightUDFWrapper.java
@@ -56,7 +56,8 @@ public class ExtractWeightUDFWrapper extends GenericUDF {
             throw new UDFArgumentTypeException(0, "Type mismatch: feature");
         }
 
-        return 
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.DOUBLE);
+        return 
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+            PrimitiveCategory.DOUBLE);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/spark/common/src/main/java/hivemall/ftvec/SortByFeatureUDFWrapper.java
----------------------------------------------------------------------
diff --git 
a/spark/common/src/main/java/hivemall/ftvec/SortByFeatureUDFWrapper.java 
b/spark/common/src/main/java/hivemall/ftvec/SortByFeatureUDFWrapper.java
index 584be6c..e13e030 100644
--- a/spark/common/src/main/java/hivemall/ftvec/SortByFeatureUDFWrapper.java
+++ b/spark/common/src/main/java/hivemall/ftvec/SortByFeatureUDFWrapper.java
@@ -61,8 +61,10 @@ public class SortByFeatureUDFWrapper extends GenericUDF {
                 ObjectInspector valueOI = 
argumentOI.getMapValueObjectInspector();
                 if (keyOI.getCategory().equals(Category.PRIMITIVE)
                         && valueOI.getCategory().equals(Category.PRIMITIVE)) {
-                    final PrimitiveCategory keyCategory = 
((PrimitiveObjectInspector) keyOI).getPrimitiveCategory();
-                    final PrimitiveCategory valueCategory = 
((PrimitiveObjectInspector) valueOI).getPrimitiveCategory();
+                    final PrimitiveCategory keyCategory =
+                            ((PrimitiveObjectInspector) 
keyOI).getPrimitiveCategory();
+                    final PrimitiveCategory valueCategory =
+                            ((PrimitiveObjectInspector) 
valueOI).getPrimitiveCategory();
                     if (keyCategory == PrimitiveCategory.INT
                             && valueCategory == PrimitiveCategory.FLOAT) {
                         break;
@@ -81,7 +83,8 @@ public class SortByFeatureUDFWrapper extends GenericUDF {
     public Object evaluate(DeferredObject[] arguments) throws HiveException {
         assert (arguments.length == 1);
         @SuppressWarnings("unchecked")
-        final Map<IntWritable, FloatWritable> input = (Map<IntWritable, 
FloatWritable>) argumentOI.getMap(arguments[0].get());
+        final Map<IntWritable, FloatWritable> input =
+                (Map<IntWritable, FloatWritable>) 
argumentOI.getMap(arguments[0].get());
         return udf.evaluate(input);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/spark/common/src/main/java/hivemall/ftvec/scaling/L2NormalizationUDFWrapper.java
----------------------------------------------------------------------
diff --git 
a/spark/common/src/main/java/hivemall/ftvec/scaling/L2NormalizationUDFWrapper.java
 
b/spark/common/src/main/java/hivemall/ftvec/scaling/L2NormalizationUDFWrapper.java
index db533be..dcdba24 100644
--- 
a/spark/common/src/main/java/hivemall/ftvec/scaling/L2NormalizationUDFWrapper.java
+++ 
b/spark/common/src/main/java/hivemall/ftvec/scaling/L2NormalizationUDFWrapper.java
@@ -56,7 +56,8 @@ public class L2NormalizationUDFWrapper extends GenericUDF {
 
         switch (arguments[0].getCategory()) {
             case LIST:
-                ObjectInspector elmOI = ((ListObjectInspector) 
arguments[0]).getListElementObjectInspector();
+                ObjectInspector elmOI =
+                        ((ListObjectInspector) 
arguments[0]).getListElementObjectInspector();
                 if (elmOI.getCategory().equals(Category.PRIMITIVE)) {
                     if (((PrimitiveObjectInspector) 
elmOI).getPrimitiveCategory() == PrimitiveCategory.STRING) {
                         break;
@@ -71,7 +72,8 @@ public class L2NormalizationUDFWrapper extends GenericUDF {
         // Create a ObjectInspector converter for arguments
         ObjectInspector outputElemOI = 
ObjectInspectorFactory.getReflectionObjectInspector(
             Text.class, ObjectInspectorOptions.JAVA);
-        ObjectInspector outputOI = 
ObjectInspectorFactory.getStandardListObjectInspector(outputElemOI);
+        ObjectInspector outputOI =
+                
ObjectInspectorFactory.getStandardListObjectInspector(outputElemOI);
         toListText = ObjectInspectorConverters.getConverter(arguments[0], 
outputOI);
 
         ObjectInspector listElemOI = 
PrimitiveObjectInspectorFactory.javaStringObjectInspector;

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/spark/common/src/main/java/hivemall/knn/lsh/MinHashesUDFWrapper.java
----------------------------------------------------------------------
diff --git 
a/spark/common/src/main/java/hivemall/knn/lsh/MinHashesUDFWrapper.java 
b/spark/common/src/main/java/hivemall/knn/lsh/MinHashesUDFWrapper.java
index d3bcbe6..3c1fe9b 100644
--- a/spark/common/src/main/java/hivemall/knn/lsh/MinHashesUDFWrapper.java
+++ b/spark/common/src/main/java/hivemall/knn/lsh/MinHashesUDFWrapper.java
@@ -35,8 +35,7 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 
 /** A wrapper of [[hivemall.knn.lsh.MinHashesUDF]]. */
-@Description(
-        name = "minhashes",
+@Description(name = "minhashes",
         value = "_FUNC_(features in array<string>, noWeight in boolean) - 
Returns hashed features as array<int>")
 @UDFType(deterministic = true, stateful = false)
 public class MinHashesUDFWrapper extends GenericUDF {
@@ -70,7 +69,9 @@ public class MinHashesUDFWrapper extends GenericUDF {
             throw new UDFArgumentException("Type mismatch: noWeight");
         }
 
-        return 
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.INT));
+        return ObjectInspectorFactory.getStandardListObjectInspector(
+            
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+                PrimitiveCategory.INT));
     }
 
     @Override
@@ -78,8 +79,8 @@ public class MinHashesUDFWrapper extends GenericUDF {
         assert (arguments.length == 2);
         @SuppressWarnings("unchecked")
         final List<String> features = (List<String>) 
featuresOI.getList(arguments[0].get());
-        final Boolean noWeight = 
PrimitiveObjectInspectorUtils.getBoolean(arguments[1].get(),
-            noWeightOI);
+        final Boolean noWeight =
+                PrimitiveObjectInspectorUtils.getBoolean(arguments[1].get(), 
noWeightOI);
         return udf.evaluate(features, noWeight);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/spark/common/src/main/java/hivemall/tools/mapred/RowIdUDFWrapper.java
----------------------------------------------------------------------
diff --git 
a/spark/common/src/main/java/hivemall/tools/mapred/RowIdUDFWrapper.java 
b/spark/common/src/main/java/hivemall/tools/mapred/RowIdUDFWrapper.java
index f386223..e907c38 100644
--- a/spark/common/src/main/java/hivemall/tools/mapred/RowIdUDFWrapper.java
+++ b/spark/common/src/main/java/hivemall/tools/mapred/RowIdUDFWrapper.java
@@ -30,8 +30,7 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
 /** An alternative implementation of [[hivemall.tools.mapred.RowIdUDF]]. */
-@Description(
-        name = "rowid",
+@Description(name = "rowid",
         value = "_FUNC_() - Returns a generated row id of a form 
{TASK_ID}-{UUID}-{SEQUENCE_NUMBER}")
 @UDFType(deterministic = false, stateful = true)
 public class RowIdUDFWrapper extends GenericUDF {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/tools/hivemall-docs/src/main/java/hivemall/docs/FuncsListGenerator.java
----------------------------------------------------------------------
diff --git 
a/tools/hivemall-docs/src/main/java/hivemall/docs/FuncsListGenerator.java 
b/tools/hivemall-docs/src/main/java/hivemall/docs/FuncsListGenerator.java
index 187c752..c940c1d 100644
--- a/tools/hivemall-docs/src/main/java/hivemall/docs/FuncsListGenerator.java
+++ b/tools/hivemall-docs/src/main/java/hivemall/docs/FuncsListGenerator.java
@@ -113,7 +113,8 @@ public class FuncsListGenerator extends AbstractMojo {
             Collections.singletonList("hivemall.ftvec.selection"));
         funcsHeaders.put("## Feature transformation and vectorization",
             Collections.singletonList("hivemall.ftvec.trans"));
-        funcsHeaders.put("# Geospatial functions", 
Collections.singletonList("hivemall.geospatial"));
+        funcsHeaders.put("# Geospatial functions",
+            Collections.singletonList("hivemall.geospatial"));
         funcsHeaders.put("# Distance measures", 
Collections.singletonList("hivemall.knn.distance"));
         funcsHeaders.put("# Locality-sensitive hashing",
             Collections.singletonList("hivemall.knn.lsh"));
@@ -138,12 +139,10 @@ public class FuncsListGenerator extends AbstractMojo {
             return;
         }
 
-        generate(
-            new File(basedir, pathToGenericFuncs),
+        generate(new File(basedir, pathToGenericFuncs),
             "This page describes a list of useful Hivemall generic functions. 
See also a [list of machine-learning-related functions](./funcs.md).",
             genericFuncsHeaders);
-        generate(
-            new File(basedir, pathToFuncs),
+        generate(new File(basedir, pathToFuncs),
             "This page describes a list of Hivemall functions. See also a 
[list of generic Hivemall functions](./generic_funcs.md) for more 
general-purpose functions such as array and map UDFs.",
             funcsHeaders);
     }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/xgboost/src/main/java/hivemall/xgboost/NativeLibLoader.java
----------------------------------------------------------------------
diff --git a/xgboost/src/main/java/hivemall/xgboost/NativeLibLoader.java 
b/xgboost/src/main/java/hivemall/xgboost/NativeLibLoader.java
index 207376f..bf3ea1f 100644
--- a/xgboost/src/main/java/hivemall/xgboost/NativeLibLoader.java
+++ b/xgboost/src/main/java/hivemall/xgboost/NativeLibLoader.java
@@ -98,8 +98,8 @@ public final class NativeLibLoader {
             try {
                 File tempFile = 
createTempFileFromResource(userDefinedLibFile.getName(),
                     new FileInputStream(userDefinedLibFile.getAbsolutePath()));
-                logger.info("Copyed the user-defined native library as "
-                        + tempFile.getAbsolutePath());
+                logger.info(
+                    "Copyed the user-defined native library as " + 
tempFile.getAbsolutePath());
                 addLibraryPath(tempFile.getParent());
             } catch (Exception e) {
                 // Simply ignore it here
@@ -126,12 +126,12 @@ public final class NativeLibLoader {
      * @throws IOException
      * @throws IllegalArgumentException
      */
-    static File createTempFileFromResource(String libName, InputStream is) 
throws IOException,
-            IllegalArgumentException {
+    static File createTempFileFromResource(String libName, InputStream is)
+            throws IOException, IllegalArgumentException {
         // Create a temporary folder with a random number for the native lib
         final String uuid = UUID.randomUUID().toString();
-        final File tempFolder = new File(System.getProperty("java.io.tmpdir"), 
String.format(
-            "%s-%s", getPrefix(libName), uuid));
+        final File tempFolder = new File(System.getProperty("java.io.tmpdir"),
+            String.format("%s-%s", getPrefix(libName), uuid));
         if (!tempFolder.exists()) {
             boolean created = tempFolder.mkdirs();
             if (!created) {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/xgboost/src/main/java/hivemall/xgboost/XGBoostPredictUDTF.java
----------------------------------------------------------------------
diff --git a/xgboost/src/main/java/hivemall/xgboost/XGBoostPredictUDTF.java 
b/xgboost/src/main/java/hivemall/xgboost/XGBoostPredictUDTF.java
index fd4c0b4..9c3b476 100644
--- a/xgboost/src/main/java/hivemall/xgboost/XGBoostPredictUDTF.java
+++ b/xgboost/src/main/java/hivemall/xgboost/XGBoostPredictUDTF.java
@@ -87,8 +87,8 @@ public abstract class XGBoostPredictUDTF extends 
UDTFWithOptions {
             cl = this.parseOptions(rawArgs);
             _batch_size = 
Primitives.parseInt(cl.getOptionValue("_batch_size"), _batch_size);
             if (_batch_size < 1) {
-                throw new IllegalArgumentException("batch_size must be greater 
than 0: "
-                        + _batch_size);
+                throw new IllegalArgumentException(
+                    "batch_size must be greater than 0: " + _batch_size);
             }
         }
         this.batch_size = _batch_size;
@@ -171,8 +171,8 @@ public abstract class XGBoostPredictUDTF extends 
UDTFWithOptions {
         }
         final String modelId = 
PrimitiveObjectInspectorUtils.getString(args[2], modelIdOI);
         if (!mapToModel.containsKey(modelId)) {
-            final byte[] predModel = 
PrimitiveObjectInspectorUtils.getBinary(args[3], modelOI)
-                                                                  .getBytes();
+            final byte[] predModel =
+                    PrimitiveObjectInspectorUtils.getBinary(args[3], 
modelOI).getBytes();
             mapToModel.put(modelId, initXgBooster(predModel));
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/xgboost/src/main/java/hivemall/xgboost/XGBoostUDTF.java
----------------------------------------------------------------------
diff --git a/xgboost/src/main/java/hivemall/xgboost/XGBoostUDTF.java 
b/xgboost/src/main/java/hivemall/xgboost/XGBoostUDTF.java
index 3f3c08f..272614f 100644
--- a/xgboost/src/main/java/hivemall/xgboost/XGBoostUDTF.java
+++ b/xgboost/src/main/java/hivemall/xgboost/XGBoostUDTF.java
@@ -135,16 +135,15 @@ public abstract class XGBoostUDTF extends UDTFWithOptions 
{
         /** Parameters for Tree Booster */
         opts.addOption("eta", true,
             "Step size shrinkage used in update to prevents overfitting 
[default: 0.3]");
-        opts.addOption(
-            "gamma",
-            true,
+        opts.addOption("gamma", true,
             "Minimum loss reduction required to make a further partition on a 
leaf node of the tree [default: 0.0]");
         opts.addOption("max_depth", true, "Max depth of decision tree 
[default: 6]");
         opts.addOption("min_child_weight", true,
             "Minimum sum of instance weight(hessian) needed in a child 
[default: 1]");
         opts.addOption("max_delta_step", true,
             "Maximum delta step we allow each tree's weight estimation to be 
[default: 0]");
-        opts.addOption("subsample", true, "Subsample ratio of the training 
instance [default: 1.0]");
+        opts.addOption("subsample", true,
+            "Subsample ratio of the training instance [default: 1.0]");
         opts.addOption("colsample_bytree", true,
             "Subsample ratio of columns when constructing each tree [default: 
1.0]");
         opts.addOption("colsample_bylevel", true,
@@ -218,7 +217,8 @@ public abstract class XGBoostUDTF extends UDTFWithOptions {
                 params.put("subsample", 
Double.valueOf(cl.getOptionValue("subsample")));
             }
             if (cl.hasOption("colsample_bytree")) {
-                params.put("colsamle_bytree", 
Double.valueOf(cl.getOptionValue("colsample_bytree")));
+                params.put("colsamle_bytree",
+                    Double.valueOf(cl.getOptionValue("colsample_bytree")));
             }
             if (cl.hasOption("colsample_bylevel")) {
                 params.put("colsamle_bylevel",
@@ -309,8 +309,8 @@ public abstract class XGBoostUDTF extends UDTFWithOptions {
         Class<?>[] args = {Map.class, DMatrix[].class};
         Constructor<Booster> ctor = Booster.class.getDeclaredConstructor(args);
         ctor.setAccessible(true);
-        return ctor.newInstance(new Object[] {params,
-                new DMatrix[] {new DMatrix(input.iterator(), "")}});
+        return ctor.newInstance(
+            new Object[] {params, new DMatrix[] {new DMatrix(input.iterator(), 
"")}});
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/xgboost/src/main/java/hivemall/xgboost/XGBoostUtils.java
----------------------------------------------------------------------
diff --git a/xgboost/src/main/java/hivemall/xgboost/XGBoostUtils.java 
b/xgboost/src/main/java/hivemall/xgboost/XGBoostUtils.java
index 0472229..aa3d7fd 100644
--- a/xgboost/src/main/java/hivemall/xgboost/XGBoostUtils.java
+++ b/xgboost/src/main/java/hivemall/xgboost/XGBoostUtils.java
@@ -29,7 +29,8 @@ public final class XGBoostUtils {
 
     /** Transform List<String> inputs into a XGBoost input format */
     @Nullable
-    public static LabeledPoint parseFeatures(final double target, @Nonnull 
final String[] features) {
+    public static LabeledPoint parseFeatures(final double target,
+            @Nonnull final String[] features) {
         final int size = features.length;
         if (size == 0) {
             return null;

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/xgboost/src/main/java/hivemall/xgboost/classification/XGBoostBinaryClassifierUDTF.java
----------------------------------------------------------------------
diff --git 
a/xgboost/src/main/java/hivemall/xgboost/classification/XGBoostBinaryClassifierUDTF.java
 
b/xgboost/src/main/java/hivemall/xgboost/classification/XGBoostBinaryClassifierUDTF.java
index 6636bc1..1138e73 100644
--- 
a/xgboost/src/main/java/hivemall/xgboost/classification/XGBoostBinaryClassifierUDTF.java
+++ 
b/xgboost/src/main/java/hivemall/xgboost/classification/XGBoostBinaryClassifierUDTF.java
@@ -27,8 +27,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
  * A XGBoost binary classification and the document is as follows; -
  * https://github.com/dmlc/xgboost/tree/master/demo/binary_classification
  */
-@Description(
-        name = "train_xgboost_classifier",
+@Description(name = "train_xgboost_classifier",
         value = "_FUNC_(string[] features, double target [, string options]) - 
Returns a relation consisting of <string model_id, array<byte> pred_model>")
 public final class XGBoostBinaryClassifierUDTF extends XGBoostUDTF {
 

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/xgboost/src/main/java/hivemall/xgboost/classification/XGBoostMulticlassClassifierUDTF.java
----------------------------------------------------------------------
diff --git 
a/xgboost/src/main/java/hivemall/xgboost/classification/XGBoostMulticlassClassifierUDTF.java
 
b/xgboost/src/main/java/hivemall/xgboost/classification/XGBoostMulticlassClassifierUDTF.java
index 62ede2c..7bdb16f 100644
--- 
a/xgboost/src/main/java/hivemall/xgboost/classification/XGBoostMulticlassClassifierUDTF.java
+++ 
b/xgboost/src/main/java/hivemall/xgboost/classification/XGBoostMulticlassClassifierUDTF.java
@@ -31,8 +31,7 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
  * A XGBoost multiclass classification and the document is as follows; -
  * https://github.com/dmlc/xgboost/tree/master/demo/multiclass_classification
  */
-@Description(
-        name = "train_multiclass_xgboost_classifier",
+@Description(name = "train_multiclass_xgboost_classifier",
         value = "_FUNC_(string[] features, double target [, string options]) - 
Returns a relation consisting of <string model_id, array<byte> pred_model>")
 public final class XGBoostMulticlassClassifierUDTF extends XGBoostUDTF {
 
@@ -61,8 +60,8 @@ public final class XGBoostMulticlassClassifierUDTF extends 
XGBoostUDTF {
             if (cli.hasOption("num_class")) {
                 int _num_class = 
Integer.valueOf(cli.getOptionValue("num_class"));
                 if (_num_class < 2) {
-                    throw new UDFArgumentException("num_class must be greater 
than 1: "
-                            + _num_class);
+                    throw new UDFArgumentException(
+                        "num_class must be greater than 1: " + _num_class);
                 }
                 params.put("num_class", _num_class);
             }

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/xgboost/src/main/java/hivemall/xgboost/regression/XGBoostRegressionUDTF.java
----------------------------------------------------------------------
diff --git 
a/xgboost/src/main/java/hivemall/xgboost/regression/XGBoostRegressionUDTF.java 
b/xgboost/src/main/java/hivemall/xgboost/regression/XGBoostRegressionUDTF.java
index 3a7aec6..5d38afe 100644
--- 
a/xgboost/src/main/java/hivemall/xgboost/regression/XGBoostRegressionUDTF.java
+++ 
b/xgboost/src/main/java/hivemall/xgboost/regression/XGBoostRegressionUDTF.java
@@ -27,8 +27,7 @@ import hivemall.xgboost.XGBoostUDTF;
  * A XGBoost regression and the document is as follows; -
  * https://github.com/dmlc/xgboost/tree/master/demo/regression
  */
-@Description(
-        name = "train_xgboost_regr",
+@Description(name = "train_xgboost_regr",
         value = "_FUNC_(string[] features, double target [, string options]) - 
Returns a relation consisting of <string model_id, array<byte> pred_model>")
 public final class XGBoostRegressionUDTF extends XGBoostUDTF {
 

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/xgboost/src/main/java/hivemall/xgboost/tools/XGBoostMulticlassPredictUDTF.java
----------------------------------------------------------------------
diff --git 
a/xgboost/src/main/java/hivemall/xgboost/tools/XGBoostMulticlassPredictUDTF.java
 
b/xgboost/src/main/java/hivemall/xgboost/tools/XGBoostMulticlassPredictUDTF.java
index d3e1bcb..b80f95a 100644
--- 
a/xgboost/src/main/java/hivemall/xgboost/tools/XGBoostMulticlassPredictUDTF.java
+++ 
b/xgboost/src/main/java/hivemall/xgboost/tools/XGBoostMulticlassPredictUDTF.java
@@ -32,8 +32,7 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 
 import hivemall.utils.lang.Preconditions;
 
-@Description(
-        name = "xgboost_multiclass_predict",
+@Description(name = "xgboost_multiclass_predict",
         value = "_FUNC_(string rowid, string[] features, string model_id, 
array<byte> pred_model [, string options]) "
                 + "- Returns a prediction result as (string rowid, string 
label, float probability)")
 public final class XGBoostMulticlassPredictUDTF extends 
hivemall.xgboost.XGBoostPredictUDTF {

http://git-wip-us.apache.org/repos/asf/incubator-hivemall/blob/c4036695/xgboost/src/main/java/hivemall/xgboost/tools/XGBoostPredictUDTF.java
----------------------------------------------------------------------
diff --git 
a/xgboost/src/main/java/hivemall/xgboost/tools/XGBoostPredictUDTF.java 
b/xgboost/src/main/java/hivemall/xgboost/tools/XGBoostPredictUDTF.java
index df5498d..85d0fb1 100644
--- a/xgboost/src/main/java/hivemall/xgboost/tools/XGBoostPredictUDTF.java
+++ b/xgboost/src/main/java/hivemall/xgboost/tools/XGBoostPredictUDTF.java
@@ -32,8 +32,7 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
-@Description(
-        name = "xgboost_predict",
+@Description(name = "xgboost_predict",
         value = "_FUNC_(string rowid, string[] features, string model_id, 
array<byte> pred_model [, string options]) "
                 + "- Returns a prediction result as (string rowid, float 
predicted)")
 public final class XGBoostPredictUDTF extends 
hivemall.xgboost.XGBoostPredictUDTF {

Reply via email to