Repository: incubator-griffin
Updated Branches:
  refs/heads/master 485c5cfc7 -> 18fc4cf4c


http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/18fc4cf4/measure/src/main/scala/org/apache/griffin/measure/step/write/RecordWriteStep.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/main/scala/org/apache/griffin/measure/step/write/RecordWriteStep.scala
 
b/measure/src/main/scala/org/apache/griffin/measure/step/write/RecordWriteStep.scala
index 2bc373c..1fef694 100644
--- 
a/measure/src/main/scala/org/apache/griffin/measure/step/write/RecordWriteStep.scala
+++ 
b/measure/src/main/scala/org/apache/griffin/measure/step/write/RecordWriteStep.scala
@@ -18,12 +18,13 @@ under the License.
 */
 package org.apache.griffin.measure.step.write
 
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql._
+
 import org.apache.griffin.measure.configuration.enums._
 import org.apache.griffin.measure.context.DQContext
 import org.apache.griffin.measure.step.builder.ConstantColumns
 import org.apache.griffin.measure.utils.JsonUtil
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql._
 
 /**
   * write records needs to be sink
@@ -39,18 +40,16 @@ case class RecordWriteStep(name: String,
 
     val writeMode = writeTimestampOpt.map(_ => 
SimpleMode).getOrElse(context.writeMode)
     writeMode match {
-      case SimpleMode => {
+      case SimpleMode =>
         // batch records
         val recordsOpt = getBatchRecords(context)
         // write records
         recordsOpt match {
-          case Some(records) => {
+          case Some(records) =>
             context.getSink(timestamp).sinkRecords(records, name)
-          }
-          case _ => {}
+          case _ =>
         }
-      }
-      case TimestampMode => {
+      case TimestampMode =>
         // streaming records
         val (recordsOpt, emptyTimestamps) = getStreamingRecords(context)
         // write records
@@ -63,7 +62,6 @@ case class RecordWriteStep(name: String,
         emptyTimestamps.foreach { t =>
           context.getSink(t).sinkRecords(Nil, name)
         }
-      }
     }
     true
   }
@@ -81,28 +79,31 @@ case class RecordWriteStep(name: String,
       val df = context.sqlContext.table(s"`${name}`")
       Some(df)
     } catch {
-      case e: Throwable => {
+      case e: Throwable =>
         error(s"get data frame ${name} fails")
         None
-      }
     }
   }
 
-  private def getRecordDataFrame(context: DQContext): Option[DataFrame] = 
getDataFrame(context, inputName)
+  private def getRecordDataFrame(context: DQContext): Option[DataFrame]
+    = getDataFrame(context, inputName)
 
-  private def getFilterTableDataFrame(context: DQContext): Option[DataFrame] = 
filterTableNameOpt.flatMap(getDataFrame(context, _))
+  private def getFilterTableDataFrame(context: DQContext): Option[DataFrame]
+    = filterTableNameOpt.flatMap(getDataFrame(context, _))
 
   private def getBatchRecords(context: DQContext): Option[RDD[String]] = {
     getRecordDataFrame(context).map(_.toJSON.rdd);
   }
 
-  private def getStreamingRecords(context: DQContext): (Option[RDD[(Long, 
Iterable[String])]], Set[Long]) = {
+  private def getStreamingRecords(context: DQContext)
+    : (Option[RDD[(Long, Iterable[String])]], Set[Long])
+    = {
     implicit val encoder = Encoders.tuple(Encoders.scalaLong, Encoders.STRING)
     val defTimestamp = context.contextId.timestamp
     getRecordDataFrame(context) match {
-      case Some(df) => {
+      case Some(df) =>
         val (filterFuncOpt, emptyTimestamps) = 
getFilterTableDataFrame(context) match {
-          case Some(filterDf) => {
+          case Some(filterDf) =>
             // timestamps with empty flag
             val tmsts: Array[(Long, Boolean)] = (filterDf.collect.flatMap { 
row =>
               try {
@@ -120,13 +121,12 @@ case class RecordWriteStep(name: String,
             } else None
 
             (filterFuncOpt, emptyTmsts)
-          }
           case _ => (Some((t: Long) => true), Set[Long]())
         }
 
         // filter timestamps need to record
         filterFuncOpt match {
-          case Some(filterFunc) => {
+          case Some(filterFunc) =>
             val records = df.flatMap { row =>
               val tmst = getTmst(row, defTimestamp)
               if (filterFunc(tmst)) {
@@ -140,10 +140,8 @@ case class RecordWriteStep(name: String,
               } else None
             }
             (Some(records.rdd.groupByKey), emptyTimestamps)
-          }
           case _ => (None, emptyTimestamps)
         }
-      }
       case _ => (None, Set[Long]())
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/18fc4cf4/measure/src/main/scala/org/apache/griffin/measure/step/write/SparkRowFormatter.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/main/scala/org/apache/griffin/measure/step/write/SparkRowFormatter.scala
 
b/measure/src/main/scala/org/apache/griffin/measure/step/write/SparkRowFormatter.scala
index 592c1d4..220c46b 100644
--- 
a/measure/src/main/scala/org/apache/griffin/measure/step/write/SparkRowFormatter.scala
+++ 
b/measure/src/main/scala/org/apache/griffin/measure/step/write/SparkRowFormatter.scala
@@ -18,10 +18,11 @@ under the License.
 */
 package org.apache.griffin.measure.step.write
 
+import scala.collection.mutable.ArrayBuffer
+
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.types.{ArrayType, DataType, StructField, 
StructType}
 
-import scala.collection.mutable.ArrayBuffer
 
 /**
   * spark row formatter
@@ -46,7 +47,8 @@ object SparkRowFormatter {
       case (sf, a) =>
         sf.dataType match {
           case ArrayType(et, _) =>
-            Map(sf.name -> (if (a == null) a else formatArray(et, 
a.asInstanceOf[ArrayBuffer[Any]])))
+            Map(sf.name ->
+              (if (a == null) a else formatArray(et, 
a.asInstanceOf[ArrayBuffer[Any]])))
           case StructType(s) =>
             Map(sf.name -> (if (a == null) a else formatStruct(s, 
a.asInstanceOf[Row])))
           case _ => Map(sf.name -> a)

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/18fc4cf4/measure/src/main/scala/org/apache/griffin/measure/utils/FSUtil.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/main/scala/org/apache/griffin/measure/utils/FSUtil.scala 
b/measure/src/main/scala/org/apache/griffin/measure/utils/FSUtil.scala
index 023a138..7ac8b4f 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/FSUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/FSUtil.scala
@@ -21,11 +21,12 @@ package org.apache.griffin.measure.utils
 import java.io.File
 import java.net.URI
 
-import org.apache.griffin.measure.Loggable
+import scala.collection.mutable.{Map => MutableMap}
+
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.FileSystem
 
-import scala.collection.mutable.{Map => MutableMap}
+import org.apache.griffin.measure.Loggable
 
 object FSUtil extends Loggable {
 
@@ -34,23 +35,20 @@ object FSUtil extends Loggable {
 
   def getFileSystem(path: String): FileSystem = {
     getUriOpt(path) match {
-      case Some(uri) => {
+      case Some(uri) =>
         fsMap.get(uri.getScheme) match {
           case Some(fs) => fs
-          case _ => {
+          case _ =>
             val fs = try {
               FileSystem.get(uri, getConfiguration)
             } catch {
-              case e: Throwable => {
+              case e: Throwable =>
                 error(s"get file system error: ${e.getMessage}")
                 throw e
-              }
             }
             fsMap += (uri.getScheme -> fs)
             fs
-          }
         }
-      }
       case _ => defaultFS
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/18fc4cf4/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala 
b/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
index 89f505a..0cae5bd 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/HdfsUtil.scala
@@ -18,9 +18,10 @@ under the License.
 */
 package org.apache.griffin.measure.utils
 
-import org.apache.griffin.measure.Loggable
 import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, Path}
 
+import org.apache.griffin.measure.Loggable
+
 object HdfsUtil extends Loggable {
 
   private val seprator = "/"
@@ -90,24 +91,9 @@ object HdfsUtil extends Loggable {
     }
   }
 
-  //  def listPathFiles(dirPath: String): Iterable[String] = {
-  //    val path = new Path(dirPath)
-  //    try {
-  //      val fileStatusArray = dfs.listStatus(path)
-  //      fileStatusArray.flatMap { fileStatus =>
-  //        if (fileStatus.isFile) {
-  //          Some(fileStatus.getPath.getName)
-  //        } else None
-  //      }
-  //    } catch {
-  //      case e: Throwable => {
-  //        println(s"list path files error: ${e.getMessage}")
-  //        Nil
-  //      }
-  //    }
-  //  }
-
-  def listSubPathsByType(dirPath: String, subType: String, fullPath: Boolean = 
false): Iterable[String] = {
+
+  def listSubPathsByType(dirPath: String, subType: String, fullPath: Boolean = 
false)
+    : Iterable[String] = {
     if (existPath(dirPath)) {
       try {
         implicit val path = new Path(dirPath)
@@ -123,15 +109,15 @@ object HdfsUtil extends Loggable {
           if (fullPath) getHdfsFilePath(dirPath, fname) else fname
         }
       } catch {
-        case e: Throwable => {
+        case e: Throwable =>
           warn(s"list path [${dirPath}] warn: ${e.getMessage}")
           Nil
-        }
       }
     } else Nil
   }
 
-  def listSubPathsByTypes(dirPath: String, subTypes: Iterable[String], 
fullPath: Boolean = false): Iterable[String] = {
+  def listSubPathsByTypes(dirPath: String, subTypes: Iterable[String], 
fullPath: Boolean = false)
+    : Iterable[String] = {
     subTypes.flatMap { subType =>
       listSubPathsByType(dirPath, subType, fullPath)
     }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/18fc4cf4/measure/src/main/scala/org/apache/griffin/measure/utils/HttpUtil.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/main/scala/org/apache/griffin/measure/utils/HttpUtil.scala 
b/measure/src/main/scala/org/apache/griffin/measure/utils/HttpUtil.scala
index e016b60..4949642 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/HttpUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/HttpUtil.scala
@@ -27,22 +27,30 @@ object HttpUtil {
   val PUT_REGEX = """^(?i)put$""".r
   val DELETE_REGEX = """^(?i)delete$""".r
 
-  def postData(url: String, params: Map[String, Object], headers: Map[String, 
Object], data: String): Boolean = {
-    val response = 
Http(url).params(convertObjMap2StrMap(params)).headers(convertObjMap2StrMap(headers)).postData(data).asString
+  def postData(url: String,
+               params: Map[String, Object],
+               headers: Map[String, Object],
+               data: String): Boolean = {
+    val response = Http(url).params(convertObjMap2StrMap(params))
+      .headers(convertObjMap2StrMap(headers)).postData(data).asString
+
     response.isSuccess
   }
 
-  def httpRequest(url: String, method: String, params: Map[String, Object], 
headers: Map[String, Object], data: String): Boolean = {
-    val httpReq = 
Http(url).params(convertObjMap2StrMap(params)).headers(convertObjMap2StrMap(headers))
+  def httpRequest(url: String,
+                  method: String,
+                  params: Map[String, Object],
+                  headers: Map[String, Object],
+                  data: String): Boolean = {
+    val httpReq = Http(url).params(convertObjMap2StrMap(params))
+        .headers(convertObjMap2StrMap(headers))
     method match {
-      case POST_REGEX() => {
+      case POST_REGEX() =>
         val res = httpReq.postData(data).asString
         res.isSuccess
-      }
-      case PUT_REGEX() => {
+      case PUT_REGEX() =>
         val res = httpReq.put(data).asString
         res.isSuccess
-      }
       case _ => false
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/18fc4cf4/measure/src/main/scala/org/apache/griffin/measure/utils/JsonUtil.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/main/scala/org/apache/griffin/measure/utils/JsonUtil.scala 
b/measure/src/main/scala/org/apache/griffin/measure/utils/JsonUtil.scala
index 175bbd8..cbb8734 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/JsonUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/JsonUtil.scala
@@ -20,10 +20,11 @@ package org.apache.griffin.measure.utils
 
 import java.io.InputStream
 
+import scala.reflect._
+
 import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
 import com.fasterxml.jackson.module.scala.DefaultScalaModule
 
-import scala.reflect._
 
 object JsonUtil {
   val mapper = new ObjectMapper()
@@ -31,7 +32,7 @@ object JsonUtil {
   mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
 
   def toJson(value: Map[Symbol, Any]): String = {
-    toJson(value map { case (k,v) => k.name -> v})
+    toJson(value map { case (k, v) => k.name -> v})
   }
 
   def toJson(value: Any): String = {

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/18fc4cf4/measure/src/main/scala/org/apache/griffin/measure/utils/ParamUtil.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/main/scala/org/apache/griffin/measure/utils/ParamUtil.scala 
b/measure/src/main/scala/org/apache/griffin/measure/utils/ParamUtil.scala
index fccbfb5..c4420ef 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/ParamUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/ParamUtil.scala
@@ -177,8 +177,10 @@ object ParamUtil {
       }
     }
 
-    case class StringAnyMap(values:Map[String,Any])
-    def getParamMap(key: String, defValue: Map[String, Any] = Map[String, 
Any]()): Map[String, Any] = {
+    case class StringAnyMap(values: Map[String, Any])
+
+    def getParamMap(key: String, defValue: Map[String, Any]
+      = Map[String, Any]()): Map[String, Any] = {
       params.get(key) match {
         case Some(v: StringAnyMap) => v.values
         case _ => defValue
@@ -193,7 +195,7 @@ object ParamUtil {
     }
 
     def getArr[T](key: String): Seq[T] = {
-      case class TSeqs(values:Seq[T])
+      case class TSeqs(values: Seq[T])
       params.get(key) match {
         case Some(seq: TSeqs) => seq.values
         case _ => Nil

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/18fc4cf4/measure/src/main/scala/org/apache/griffin/measure/utils/TimeUtil.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/main/scala/org/apache/griffin/measure/utils/TimeUtil.scala 
b/measure/src/main/scala/org/apache/griffin/measure/utils/TimeUtil.scala
index e96cbb1..9707c65 100644
--- a/measure/src/main/scala/org/apache/griffin/measure/utils/TimeUtil.scala
+++ b/measure/src/main/scala/org/apache/griffin/measure/utils/TimeUtil.scala
@@ -18,18 +18,18 @@ under the License.
 */
 package org.apache.griffin.measure.utils
 
-import org.apache.griffin.measure.Loggable
-
-import scala.util.matching.Regex
 import scala.util.{Failure, Success, Try}
+import scala.util.matching.Regex
+
+import org.apache.griffin.measure.Loggable
 
 object TimeUtil extends Loggable {
 
   private object Units {
     case class TimeUnit(name: String, shortName: String, ut: Long, regex: 
Regex) {
-      def toMs(t: Long) = t * ut
-      def fromMs(ms: Long) = ms / ut
-      def fitUnit(ms: Long) = (ms % ut == 0)
+      def toMs(t: Long) : Long = t * ut
+      def fromMs(ms: Long) : Long = ms / ut
+      def fitUnit(ms: Long) : Boolean = (ms % ut == 0)
     }
 
     val dayUnit = TimeUnit("day", "d", 24 * 60 * 60 * 1000, 
"""^(?i)d(?:ay)?$""".r)
@@ -50,7 +50,7 @@ object TimeUtil extends Loggable {
     val value: Option[Long] = {
       Try {
         timeString match {
-          case TimeRegex(time, unit) => {
+          case TimeRegex(time, unit) =>
             val t = time.toLong
             unit match {
               case dayUnit.regex() => dayUnit.toMs(t)
@@ -60,11 +60,9 @@ object TimeUtil extends Loggable {
               case msUnit.regex() => msUnit.toMs(t)
               case _ => throw new Exception(s"${timeString} is invalid time 
format")
             }
-          }
-          case PureTimeRegex(time) => {
+          case PureTimeRegex(time) =>
             val t = time.toLong
             msUnit.toMs(t)
-          }
           case _ => throw new Exception(s"${timeString} is invalid time 
format")
         }
       } match {

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/18fc4cf4/scalastyle-config.xml
----------------------------------------------------------------------
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
new file mode 100644
index 0000000..05877d8
--- /dev/null
+++ b/scalastyle-config.xml
@@ -0,0 +1,246 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+<!--
+
+If you wish to turn off checking for a section of code, you can put a comment 
in the source
+before and after the section, with the following syntax:
+
+  // scalastyle:off
+  ...  // stuff that breaks the styles
+  // scalastyle:on
+
+You can also disable only one rule, by specifying its rule id, as specified in:
+  http://www.scalastyle.org/rules-0.7.0.html
+
+  // scalastyle:off no.finalize
+  override def finalize(): Unit = ...
+  // scalastyle:on no.finalize
+
+This file is divided into 3 sections:
+ (1) rules that we enforce.
+ (2) rules that we would like to enforce, but haven't cleaned up the codebase 
to turn on yet
+     (or we need to make the scalastyle rule more configurable).
+ (3) rules that we don't want to enforce.
+-->
+
+<!--
+Reference: Spark scalastyle-config.xml 
(https://github.com/apache/spark/blob/master/scalastyle-config.xml)
+-->
+<scalastyle>
+  <name>Scalastyle standard configuration</name>
+
+  <!-- 
================================================================================
 -->
+  <!--                               rules we enforce                          
         -->
+  <!-- 
================================================================================
 -->
+
+  <check level="error" class="org.scalastyle.file.FileTabChecker" 
enabled="true"></check>
+
+  <check level="error" class="org.scalastyle.file.HeaderMatchesChecker" 
enabled="true">
+    <parameters>
+       <parameter name="header"><![CDATA[/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */]]></parameter>
+    </parameters>
+  </check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.SpacesAfterPlusChecker" 
enabled="true"></check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.SpacesBeforePlusChecker" 
enabled="true"></check>
+
+  <check level="error" class="org.scalastyle.file.WhitespaceEndOfLineChecker" 
enabled="true"></check>
+
+  <check level="error" class="org.scalastyle.file.FileLineLengthChecker" 
enabled="true">
+    <parameters>
+      <parameter name="maxLineLength"><![CDATA[120]]></parameter>
+      <parameter name="tabSize"><![CDATA[2]]></parameter>
+      <parameter name="ignoreImports">true</parameter>
+    </parameters>
+  </check>
+
+  <check level="error" class="org.scalastyle.scalariform.ClassNamesChecker" 
enabled="true">
+    <parameters><parameter 
name="regex"><![CDATA[[A-Z][A-Za-z]*]]></parameter></parameters>
+  </check>
+
+  <check level="error" class="org.scalastyle.scalariform.ObjectNamesChecker" 
enabled="true">
+    <parameters><parameter 
name="regex"><![CDATA[[A-Z][A-Za-z]*]]></parameter></parameters>
+  </check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.PackageObjectNamesChecker" enabled="true">
+    <parameters><parameter 
name="regex"><![CDATA[^[a-z][A-Za-z]*$]]></parameter></parameters>
+  </check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.ParameterNumberChecker" enabled="true">
+    <parameters><parameter 
name="maxParameters"><![CDATA[10]]></parameter></parameters>
+  </check>
+
+  <check level="error" class="org.scalastyle.scalariform.NoFinalizeChecker" 
enabled="true"></check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.CovariantEqualsChecker" 
enabled="true"></check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.StructuralTypeChecker" enabled="true"></check>
+
+  <check level="error" class="org.scalastyle.scalariform.UppercaseLChecker" 
enabled="true"></check>
+
+  <check level="error" class="org.scalastyle.scalariform.IfBraceChecker" 
enabled="true">
+    <parameters>
+      <parameter name="singleLineAllowed"><![CDATA[true]]></parameter>
+      <parameter name="doubleLineAllowed"><![CDATA[true]]></parameter>
+    </parameters>
+  </check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.PublicMethodsHaveTypeChecker" 
enabled="true"></check>
+
+  <check level="error" class="org.scalastyle.file.NewLineAtEofChecker" 
enabled="true"></check>
+
+  <check customId="nonascii" level="error" 
class="org.scalastyle.scalariform.NonASCIICharacterChecker" 
enabled="true"></check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.SpaceAfterCommentStartChecker" 
enabled="true"></check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.EnsureSingleSpaceBeforeTokenChecker" 
enabled="true">
+   <parameters>
+     <parameter name="tokens">ARROW, EQUALS, ELSE, TRY, CATCH, FINALLY, 
LARROW, RARROW</parameter>
+   </parameters>
+  </check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.EnsureSingleSpaceAfterTokenChecker" 
enabled="true">
+    <parameters>
+     <parameter name="tokens">ARROW, EQUALS, COMMA, COLON, IF, ELSE, DO, 
WHILE, FOR, MATCH, TRY, CATCH, FINALLY, LARROW, RARROW</parameter>
+    </parameters>
+  </check>
+
+  <!-- ??? usually shouldn't be checked into the code base. -->
+  <check level="error" 
class="org.scalastyle.scalariform.NotImplementedErrorUsage" 
enabled="true"></check>
+
+  <check level="error" class="org.scalastyle.scalariform.ImportOrderChecker" 
enabled="true">
+    <parameters>
+      <parameter name="groups">java,scala,3rdParty,griffin</parameter>
+      <parameter name="group.java">javax?\..*</parameter>
+      <parameter name="group.scala">scala\..*</parameter>
+      <parameter name="group.3rdParty">(?!org\.apache\.griffin\.).*</parameter>
+      <parameter name="group.griffin">org\.apache\.griffin\..*</parameter>
+    </parameters>
+  </check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.DisallowSpaceBeforeTokenChecker" 
enabled="true">
+    <parameters>
+      <parameter name="tokens">COMMA</parameter>
+    </parameters>
+  </check>
+
+  <!-- Single Space between ')' and '{' -->
+  <check customId="SingleSpaceBetweenRParenAndLCurlyBrace" level="error" 
class="org.scalastyle.file.RegexChecker" enabled="true">
+    <parameters><parameter name="regex">\)\{</parameter></parameters>
+    <customMessage><![CDATA[
+      Single Space between ')' and `{`.
+    ]]></customMessage>
+  </check>
+
+  <check customId="OmitBracesInCase" level="error" 
class="org.scalastyle.file.RegexChecker" enabled="true">
+    <parameters><parameter 
name="regex">case[^\n>]*=>\s*\{</parameter></parameters>
+    <customMessage>Omit braces in case clauses.</customMessage>
+  </check>
+
+  <check level="error" 
class="org.scalastyle.scalariform.DeprecatedJavaChecker" enabled="true"></check>
+
+  <!-- 
================================================================================
 -->
+  <!--       rules we'd like to enforce, but haven't cleaned up the codebase 
yet        -->
+  <!-- 
================================================================================
 -->
+
+  <!-- We cannot turn the following two on, because it'd fail a lot of string 
interpolation use cases. -->
+  <!-- Ideally the following two rules should be configurable to rule out 
string interpolation. -->
+  <!--<check level="error" 
class="org.scalastyle.scalariform.NoWhitespaceBeforeLeftBracketChecker" 
enabled="false"></check>-->
+  <!--<check level="error" 
class="org.scalastyle.scalariform.NoWhitespaceAfterLeftBracketChecker" 
enabled="false"></check>-->
+
+  <!-- This breaks symbolic method names so we don't turn it on. -->
+  <!-- Maybe we should update it to allow basic symbolic names, and then we 
are good to go. -->
+  <!--<check level="error" 
class="org.scalastyle.scalariform.MethodNamesChecker" enabled="false">-->
+    <!--<parameters>-->
+    <!--<parameter name="regex"><![CDATA[^[a-z][A-Za-z0-9]*$]]></parameter>-->
+    <!--</parameters>-->
+  <!--</check>-->
+
+  <!-- Should turn this on, but we have a few places that need to be fixed 
first -->
+  <!--<check level="error" 
class="org.scalastyle.scalariform.EqualsHashCodeChecker" 
enabled="true"></check>-->
+
+  <!-- 
================================================================================
 -->
+  <!--                               rules we don't want                       
         -->
+  <!-- 
================================================================================
 -->
+
+  <!--<check level="error" 
class="org.scalastyle.scalariform.IllegalImportsChecker" enabled="false">-->
+    <!--<parameters><parameter 
name="illegalImports"><![CDATA[sun._,java.awt._]]></parameter></parameters>-->
+  <!--</check>-->
+
+  <!-- We want the opposite of this: NewLineAtEofChecker -->
+  <check level="error" class="org.scalastyle.file.NoNewLineAtEofChecker" 
enabled="false"></check>
+
+  <!-- This one complains about all kinds of random things. Disable. -->
+  <check level="error" 
class="org.scalastyle.scalariform.SimplifyBooleanExpressionChecker" 
enabled="false"></check>
+
+  <!-- We use return quite a bit for control flows and guards -->
+  <check level="error" class="org.scalastyle.scalariform.ReturnChecker" 
enabled="false"></check>
+
+  <!-- We use null a lot in low level code and to interface with 3rd party 
code -->
+  <check level="error" class="org.scalastyle.scalariform.NullChecker" 
enabled="false"></check>
+
+  <!-- Doesn't seem super big deal here ... -->
+  <check level="error" class="org.scalastyle.scalariform.NoCloneChecker" 
enabled="false"></check>
+
+  <!-- Doesn't seem super big deal here ... -->
+  <check level="error" class="org.scalastyle.file.FileLengthChecker" 
enabled="false">
+    <parameters><parameter name="maxFileLength">800></parameter></parameters>
+  </check>
+
+  <!-- Doesn't seem super big deal here ... -->
+  <check level="error" class="org.scalastyle.scalariform.NumberOfTypesChecker" 
enabled="false">
+    <parameters><parameter name="maxTypes">30</parameter></parameters>
+  </check>
+
+  <!-- Doesn't seem super big deal here ... -->
+  <check level="error" 
class="org.scalastyle.scalariform.CyclomaticComplexityChecker" enabled="false">
+    <parameters><parameter name="maximum">10</parameter></parameters>
+  </check>
+
+  <!-- Doesn't seem super big deal here ... -->
+  <check level="error" class="org.scalastyle.scalariform.MethodLengthChecker" 
enabled="false">
+    <parameters><parameter name="maxLength">50</parameter></parameters>
+  </check>
+
+  <!-- Not exactly feasible to enforce this right now. -->
+  <!-- It is also infrequent that somebody introduces a new class with a lot 
of methods. -->
+  <check level="error" 
class="org.scalastyle.scalariform.NumberOfMethodsInTypeChecker" enabled="false">
+    <parameters><parameter 
name="maxMethods"><![CDATA[30]]></parameter></parameters>
+  </check>
+
+  <!-- Doesn't seem super big deal here, and we have a lot of magic numbers 
... -->
+  <check level="error" class="org.scalastyle.scalariform.MagicNumberChecker" 
enabled="false">
+    <parameters><parameter name="ignore">-1,0,1,2,3</parameter></parameters>
+  </check>
+
+</scalastyle>

Reply via email to