Repository: incubator-griffin
Updated Branches:
  refs/heads/master 0dd1d3599 -> 1f984da1a


http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/1f984da1/measure/src/test/scala/org/apache/griffin/measure/process/StreamingProcessTest.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/test/scala/org/apache/griffin/measure/process/StreamingProcessTest.scala
 
b/measure/src/test/scala/org/apache/griffin/measure/process/StreamingProcessTest.scala
deleted file mode 100644
index caecc9c..0000000
--- 
a/measure/src/test/scala/org/apache/griffin/measure/process/StreamingProcessTest.scala
+++ /dev/null
@@ -1,147 +0,0 @@
-///*
-//Licensed to the Apache Software Foundation (ASF) under one
-//or more contributor license agreements.  See the NOTICE file
-//distributed with this work for additional information
-//regarding copyright ownership.  The ASF licenses this file
-//to you under the Apache License, Version 2.0 (the
-//"License"); you may not use this file except in compliance
-//with the License.  You may obtain a copy of the License at
-//
-//  http://www.apache.org/licenses/LICENSE-2.0
-//
-//Unless required by applicable law or agreed to in writing,
-//software distributed under the License is distributed on an
-//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-//KIND, either express or implied.  See the License for the
-//specific language governing permissions and limitations
-//under the License.
-//*/
-//package org.apache.griffin.measure.process
-//
-//import org.apache.griffin.measure.config.params._
-//import org.apache.griffin.measure.config.params.env._
-//import org.apache.griffin.measure.config.params.user._
-//import org.apache.griffin.measure.config.reader.ParamReaderFactory
-//import org.apache.griffin.measure.config.validator.AllParamValidator
-//import org.apache.griffin.measure.log.Loggable
-//import org.apache.griffin.measure.persist.PersistThreadPool
-//import org.junit.runner.RunWith
-//import org.scalatest.junit.JUnitRunner
-//import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-//
-//import scala.util.{Failure, Success, Try}
-//
-//@RunWith(classOf[JUnitRunner])
-//class StreamingProcessTest extends FunSuite with Matchers with 
BeforeAndAfter with Loggable {
-//
-//  val envFile = "src/test/resources/env-streaming.json"
-////  val confFile = 
"src/test/resources/config-test-accuracy-streaming-multids.json"
-//  val confFile = "src/test/resources/config-test-accuracy-streaming.json"
-////  val confFile = "src/test/resources/config-test-profiling-streaming.json"
-//
-//  val envFsType = "local"
-//  val userFsType = "local"
-//
-//  val args = Array(envFile, confFile)
-//
-//  var allParam: AllParam = _
-//
-//  before {
-//    // read param files
-//    val envParam = readParamFile[EnvParam](envFile, envFsType) match {
-//      case Success(p) => p
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-2)
-//      }
-//    }
-//    val userParam = readParamFile[UserParam](confFile, userFsType) match {
-//      case Success(p) => p
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-2)
-//      }
-//    }
-//    allParam = AllParam(envParam, userParam)
-//
-//    // validate param files
-//    validateParams(allParam) match {
-//      case Failure(ex) => {
-//        error(ex.getMessage)
-//        sys.exit(-3)
-//      }
-//      case _ => {
-//        info("params validation pass")
-//      }
-//    }
-//  }
-//
-//  test ("streaming process") {
-//    val procType = ProcessType(allParam.userParam.procType)
-//    val proc: DqProcess = procType match {
-//      case BatchProcessType => BatchDqProcess(allParam)
-//      case StreamingProcessType => StreamingDqProcess(allParam)
-//      case _ => {
-//        error(s"${procType} is unsupported process type!")
-//        sys.exit(-4)
-//      }
-//    }
-//
-//    // process init
-//    proc.init match {
-//      case Success(_) => {
-//        info("process init success")
-//      }
-//      case Failure(ex) => {
-//        error(s"process init error: ${ex.getMessage}")
-//        shutdown
-//        sys.exit(-5)
-//      }
-//    }
-//
-//    // process run
-//    proc.run match {
-//      case Success(_) => {
-//        info("process run success")
-//      }
-//      case Failure(ex) => {
-//        error(s"process run error: ${ex.getMessage}")
-//
-//        if (proc.retriable) {
-//          throw ex
-//        } else {
-//          shutdown
-//          sys.exit(-5)
-//        }
-//      }
-//    }
-//
-//    // process end
-//    proc.end match {
-//      case Success(_) => {
-//        info("process end success")
-//      }
-//      case Failure(ex) => {
-//        error(s"process end error: ${ex.getMessage}")
-//        shutdown
-//        sys.exit(-5)
-//      }
-//    }
-//
-//    shutdown
-//  }
-//
-//  private def readParamFile[T <: Param](file: String, fsType: 
String)(implicit m : Manifest[T]): Try[T] = {
-//    val paramReader = ParamReaderFactory.getParamReader(file, fsType)
-//    paramReader.readConfig[T]
-//  }
-//
-//  private def validateParams(allParam: AllParam): Try[Boolean] = {
-//    val allParamValidator = AllParamValidator()
-//    allParamValidator.validate(allParam)
-//  }
-//
-//  private def shutdown(): Unit = {
-//    PersistThreadPool.shutdown
-//  }
-//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/1f984da1/measure/src/test/scala/org/apache/griffin/measure/sql/SqlTest.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/test/scala/org/apache/griffin/measure/sql/SqlTest.scala 
b/measure/src/test/scala/org/apache/griffin/measure/sql/SqlTest.scala
deleted file mode 100644
index 7b23062..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/sql/SqlTest.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-//package org.apache.griffin.measure.sql
-//
-//import org.apache.griffin.measure.config.params.user.EvaluateRuleParam
-//import org.apache.griffin.measure.rule.expr.{Expr, StatementExpr}
-//import org.apache.spark.sql.{DataFrame, SQLContext}
-//import org.apache.spark.sql.types.{ArrayType, IntegerType, StructField, 
StructType}
-//import org.apache.spark.{SparkConf, SparkContext}
-//import org.junit.runner.RunWith
-//import org.scalatest.junit.JUnitRunner
-//import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-//
-//@RunWith(classOf[JUnitRunner])
-//class SqlTest extends FunSuite with BeforeAndAfter with Matchers {
-//
-//  var sc: SparkContext = _
-//  var sqlContext: SQLContext = _
-//
-//  before {
-//    val conf = new SparkConf().setMaster("local[*]").setAppName("test")
-//    sc = new SparkContext(conf)
-//    sqlContext = new SQLContext(sc)
-//  }
-//
-//  test ("spark sql") {
-//
-//    val squared = (s: Int) => {
-//      s * s
-//    }
-//    sqlContext.udf.register("square", squared)
-//
-//    val a = sqlContext.range(1, 20)
-//    a.show
-//
-//    a.registerTempTable("test")
-//
-//    val table = sqlContext.sql("select * from test")
-//    table.show()
-//
-//    val result = sqlContext.sql("select id, square(id) as id_squared from 
test")
-//    result.show()
-//
-//  }
-//
-//  test ("json") {
-//    def jsonToDataFrame(json: String, schema: Option[StructType] = None): 
DataFrame = {
-//      val reader = sqlContext.read
-//      val rd = schema match {
-//        case Some(scm) => reader.schema(scm)
-//        case _ => reader
-//      }
-//      rd.json(sc.parallelize(json :: Nil))
-//    }
-//
-//    val json =
-//      """
-//        |{
-//        |  "a": [
-//        |     1, 2, 3
-//        |  ]
-//        |}
-//      """.stripMargin
-//
-////    val bt = StructField("b", IntegerType)
-////    val at = StructField("a", StructType(bt :: Nil))
-////    val schema = StructType(at :: Nil)
-//
-//    val at = StructField("a", ArrayType(IntegerType))
-//    val schema = StructType(at :: Nil)
-//
-//    val df = jsonToDataFrame(json, Some(schema))
-//
-//    df.registerTempTable("json")
-//
-//    val result = sqlContext.sql("select a[1] from json")
-//    result.show
-//
-//  }
-//
-//  test ("json file") {
-//
-//    // read json file directly
-////    val filePath = "src/test/resources/test-data.jsonFile"
-////    val reader = sqlContext.read
-////    val df = reader.json(filePath)
-////    df.show
-////
-////    df.registerTempTable("ttt")
-////    val result = sqlContext.sql("select * from ttt where list[0].c = 11")
-////    result.show
-//
-//    // whole json file
-////    val filePath = "src/test/resources/test-data0.json"
-//////    val filePath = "hdfs://localhost/test/file/t1.json"
-////    val jsonRDD = sc.wholeTextFiles(s"${filePath},${filePath}").map(x => 
x._2)
-////    val namesJson = sqlContext.read.json(jsonRDD)
-////    namesJson.printSchema
-////    namesJson.show
-//
-//    // read text file then convert to json
-//    val filePath = "src/test/resources/test-data.jsonFile"
-//    val rdd = sc.textFile(filePath)
-//    val reader = sqlContext.read
-//    val df = reader.json(rdd)
-//    df.show
-//    df.printSchema
-//
-//    df.registerTempTable("ttt")
-//    val result = sqlContext.sql("select * from ttt where list[0].c = 11")
-//    result.show
-//
-//    // udf
-//    val slice = (arr: Seq[Long], f: Int, e: Int) => arr.slice(f, e)
-////    val slice = (arr: Seq[Long]) => arr.slice(0, 1)
-//    sqlContext.udf.register("slice", slice)
-//
-//    val result1 = sqlContext.sql("select slice(t, 0, 2) from ttt")
-//    result1.show
-//
-//  }
-//
-//  test ("accu sql") {
-////    val file1 =
-//  }
-//
-//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/1f984da1/measure/src/test/scala/org/apache/griffin/measure/utils/JsonUtilTest.scala
----------------------------------------------------------------------
diff --git 
a/measure/src/test/scala/org/apache/griffin/measure/utils/JsonUtilTest.scala 
b/measure/src/test/scala/org/apache/griffin/measure/utils/JsonUtilTest.scala
deleted file mode 100644
index 233d78c..0000000
--- a/measure/src/test/scala/org/apache/griffin/measure/utils/JsonUtilTest.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-///*
-//Licensed to the Apache Software Foundation (ASF) under one
-//or more contributor license agreements.  See the NOTICE file
-//distributed with this work for additional information
-//regarding copyright ownership.  The ASF licenses this file
-//to you under the Apache License, Version 2.0 (the
-//"License"); you may not use this file except in compliance
-//with the License.  You may obtain a copy of the License at
-//
-//  http://www.apache.org/licenses/LICENSE-2.0
-//
-//Unless required by applicable law or agreed to in writing,
-//software distributed under the License is distributed on an
-//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-//KIND, either express or implied.  See the License for the
-//specific language governing permissions and limitations
-//under the License.
-//*/
-//package org.apache.griffin.measure.utils
-//
-//import org.junit.runner.RunWith
-//import org.scalatest.junit.JUnitRunner
-//import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
-//
-//
-//@RunWith(classOf[JUnitRunner])
-//class JsonUtilTest extends FunSuite with Matchers with BeforeAndAfter {
-//
-//  val map = Map[String, Any](("name" -> "test"), ("age" -> 15))
-//  val json = """{"name":"test","age":15}"""
-//
-//  val person = JsonUtilTest.Person("test", 15)
-//
-//  test ("toJson 1") {
-//    val symbolMap = map.map(p => (Symbol(p._1), p._2))
-//    JsonUtil.toJson(symbolMap) should equal (json)
-//  }
-//
-//  test ("toJson 2") {
-//    JsonUtil.toJson(map) should equal (json)
-//  }
-//
-//  test ("toMap") {
-//    JsonUtil.toMap(json) should equal (map)
-//  }
-//
-//  test ("fromJson 1") {
-//    JsonUtil.fromJson[JsonUtilTest.Person](json) should equal (person)
-//  }
-//
-//  test ("fromJson 2") {
-//    val is = new java.io.ByteArrayInputStream(json.getBytes("utf-8"));
-//    JsonUtil.fromJson[JsonUtilTest.Person](is) should equal (person)
-//  }
-//
-//}
-//
-//object JsonUtilTest {
-//  case class Person(name: String, age: Int){}
-//}

Reply via email to