Repository: incubator-s2graph
Updated Branches:
refs/heads/master 6841d3456 -> a35cf9496
[S2GRAPH-93]: Remove MySQL JDBC driver.
JIRA:
[S2GRAPH-93] https://issues.apache.org/jira/browse/S2GRAPH-93
Pull Request:
Closes #65
Authors:
DO YUNG YOON: [email protected]
Project: http://git-wip-us.apache.org/repos/asf/incubator-s2graph/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-s2graph/commit/a35cf949
Tree: http://git-wip-us.apache.org/repos/asf/incubator-s2graph/tree/a35cf949
Diff: http://git-wip-us.apache.org/repos/asf/incubator-s2graph/diff/a35cf949
Branch: refs/heads/master
Commit: a35cf94969dc5cc7bba589fb9d07b8b9fb8653c0
Parents: 6841d34
Author: DO YUNG YOON <[email protected]>
Authored: Mon Aug 29 03:48:25 2016 +0900
Committer: DO YUNG YOON <[email protected]>
Committed: Mon Aug 29 03:48:25 2016 +0900
----------------------------------------------------------------------
CHANGES | 2 +
s2core/build.sbt | 1 -
s2core/src/main/resources/reference.conf | 5 +-
.../scala/org/apache/s2graph/core/Graph.scala | 4 +-
.../s2graph/core/Integrate/QueryTest.scala | 2 +-
.../s2graph/core/mysqls/ExperimentSpec.scala | 6 +-
s2counter_core/build.sbt | 2 +-
s2rest_netty/conf/reference.conf | 2 +-
s2rest_netty/src/main/resources/reference.conf | 2 +-
s2rest_play/conf/reference.conf | 2 +-
.../rest/play/benchmark/BenchmarkCommon.scala | 5 +
.../rest/play/benchmark/GraphUtilSpec.scala | 110 +++++++++----------
.../play/benchmark/SamplingBenchmarkSpec.scala | 8 +-
13 files changed, 78 insertions(+), 73 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/CHANGES
----------------------------------------------------------------------
diff --git a/CHANGES b/CHANGES
index e68a0e5..3aab524 100644
--- a/CHANGES
+++ b/CHANGES
@@ -149,6 +149,8 @@ Release 0.12.1 - unreleased
S2GRAPH-89: Change version number on project. (Committed by DOYUNG YOON).
S2GRAPH-91: Remove custom repository on project's build.sbt (Committed by
DOYUNG YOON).
+
+ S2GRAPH-93: Remove MySQL JDBC driver (Committed by DOYUNG YOON).
TEST
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2core/build.sbt
----------------------------------------------------------------------
diff --git a/s2core/build.sbt b/s2core/build.sbt
index 46c2795..02e5a0e 100644
--- a/s2core/build.sbt
+++ b/s2core/build.sbt
@@ -36,7 +36,6 @@ libraryDependencies ++= Seq(
"commons-pool" % "commons-pool" % "1.6",
"org.scalatest" %% "scalatest" % "2.2.4" % "test",
"org.scalikejdbc" %% "scalikejdbc" % "2.1.+",
- "mysql" % "mysql-connector-java" % "5.1.28",
"com.h2database" % "h2" % "1.4.192",
"com.github.danielwegener" % "logback-kafka-appender" % "0.0.4"
)
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2core/src/main/resources/reference.conf
----------------------------------------------------------------------
diff --git a/s2core/src/main/resources/reference.conf
b/s2core/src/main/resources/reference.conf
index 1840306..86dfa67 100644
--- a/s2core/src/main/resources/reference.conf
+++ b/s2core/src/main/resources/reference.conf
@@ -46,8 +46,9 @@ cache.max.size=100000
# DB
s2graph.models.table.name = "models-dev"
-db.default.driver = "com.mysql.jdbc.Driver"
-db.default.url = "jdbc:mysql://"${host}":3306/graph_dev"
+
+db.default.driver = "org.h2.Driver"
+db.default.url="jdbc:h2:file:./var/metastore;MODE=MYSQL"
db.default.user = "graph"
db.default.password = "graph"
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2core/src/main/scala/org/apache/s2graph/core/Graph.scala
----------------------------------------------------------------------
diff --git a/s2core/src/main/scala/org/apache/s2graph/core/Graph.scala
b/s2core/src/main/scala/org/apache/s2graph/core/Graph.scala
index f4ce7b4..aee0e95 100644
--- a/s2core/src/main/scala/org/apache/s2graph/core/Graph.scala
+++ b/s2core/src/main/scala/org/apache/s2graph/core/Graph.scala
@@ -43,8 +43,8 @@ object Graph {
"hbase.table.name" -> "s2graph",
"hbase.table.compression.algorithm" -> "gz",
"phase" -> "dev",
- "db.default.driver" -> "com.mysql.jdbc.Driver",
- "db.default.url" -> "jdbc:mysql://localhost:3306/graph_dev",
+ "db.default.driver" -> "org.h2.Driver",
+ "db.default.url" -> "jdbc:h2:file:./var/metastore;MODE=MYSQL",
"db.default.password" -> "graph",
"db.default.user" -> "graph",
"cache.max.size" -> java.lang.Integer.valueOf(10000),
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2core/src/test/scala/org/apache/s2graph/core/Integrate/QueryTest.scala
----------------------------------------------------------------------
diff --git
a/s2core/src/test/scala/org/apache/s2graph/core/Integrate/QueryTest.scala
b/s2core/src/test/scala/org/apache/s2graph/core/Integrate/QueryTest.scala
index 8bdc99b..c0071fa 100644
--- a/s2core/src/test/scala/org/apache/s2graph/core/Integrate/QueryTest.scala
+++ b/s2core/src/test/scala/org/apache/s2graph/core/Integrate/QueryTest.scala
@@ -307,7 +307,7 @@ class QueryTest extends IntegrateCommon with
BeforeAndAfterEach {
"label": "$testLabelName",
"direction": "in",
"offset": 0,
- "limit": -1
+ "limit": 10
}
]]
}""".stripMargin)
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2core/src/test/scala/org/apache/s2graph/core/mysqls/ExperimentSpec.scala
----------------------------------------------------------------------
diff --git
a/s2core/src/test/scala/org/apache/s2graph/core/mysqls/ExperimentSpec.scala
b/s2core/src/test/scala/org/apache/s2graph/core/mysqls/ExperimentSpec.scala
index 4a46e0e..229d9bd 100644
--- a/s2core/src/test/scala/org/apache/s2graph/core/mysqls/ExperimentSpec.scala
+++ b/s2core/src/test/scala/org/apache/s2graph/core/mysqls/ExperimentSpec.scala
@@ -40,10 +40,10 @@ class ExperimentSpec extends FlatSpec with Matchers with
BeforeAndAfterAll {
sql"""DELETE FROM buckets""".update().apply()
sql"""DELETE FROM experiments""".update().apply()
- val expId = sql"""INSERT INTO experiments(service_id, service_name,
`name`, description) VALUES(1, "s1", "exp1",
"")""".updateAndReturnGeneratedKey().apply()
+ val expId = sql"""INSERT INTO experiments(service_id, service_name, name,
description) VALUES(1, 's1', 'exp1',
'')""".updateAndReturnGeneratedKey().apply()
sql"""INSERT INTO
buckets(experiment_id, modular, http_verb, api_path, request_body,
impression_id)
- VALUES($expId, "1~100", "POST", "/a/b/c", "None",
"imp1")""".update().apply()
+ VALUES($expId, '1~100', 'POST', '/a/b/c', 'None',
'imp1')""".update().apply()
}
@@ -63,7 +63,7 @@ class ExperimentSpec extends FlatSpec with Matchers with
BeforeAndAfterAll {
implicit val session = AutoSession
- sql"""UPDATE buckets SET impression_id = "imp2" WHERE id =
${bucket.id}""".update().apply()
+ sql"""UPDATE buckets SET impression_id = 'imp2' WHERE id =
${bucket.id}""".update().apply()
}
// sleep ttl time
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2counter_core/build.sbt
----------------------------------------------------------------------
diff --git a/s2counter_core/build.sbt b/s2counter_core/build.sbt
index d4c0bdf..9a75faf 100644
--- a/s2counter_core/build.sbt
+++ b/s2counter_core/build.sbt
@@ -38,7 +38,7 @@ libraryDependencies ++= Seq(
"org.apache.hadoop" % "hadoop-hdfs" % Common.hadoopVersion,
"redis.clients" % "jedis" % "2.6.0",
"org.apache.kafka" % "kafka-clients" % "0.8.2.0",
- "mysql" % "mysql-connector-java" % "5.1.28",
+ "com.h2database" % "h2" % "1.4.192",
"org.scalikejdbc" %% "scalikejdbc" % "2.1.+",
"org.specs2" %% "specs2-core" % "3.6" % "test",
"org.scalatest" %% "scalatest" % "2.2.1" % "test"
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2rest_netty/conf/reference.conf
----------------------------------------------------------------------
diff --git a/s2rest_netty/conf/reference.conf b/s2rest_netty/conf/reference.conf
index 9b37eda..4a5080f 100644
--- a/s2rest_netty/conf/reference.conf
+++ b/s2rest_netty/conf/reference.conf
@@ -86,7 +86,7 @@ host=localhost
# DB
s2graph.models.table.name="models-dev"
hbase.zookeeper.quorum=${host}
-db.default.url="jdbc:mysql://"${host}":3306/graph_dev"
+
# Query server
is.query.server=true
is.write.server=true
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2rest_netty/src/main/resources/reference.conf
----------------------------------------------------------------------
diff --git a/s2rest_netty/src/main/resources/reference.conf
b/s2rest_netty/src/main/resources/reference.conf
index 9b37eda..4a5080f 100644
--- a/s2rest_netty/src/main/resources/reference.conf
+++ b/s2rest_netty/src/main/resources/reference.conf
@@ -86,7 +86,7 @@ host=localhost
# DB
s2graph.models.table.name="models-dev"
hbase.zookeeper.quorum=${host}
-db.default.url="jdbc:mysql://"${host}":3306/graph_dev"
+
# Query server
is.query.server=true
is.write.server=true
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2rest_play/conf/reference.conf
----------------------------------------------------------------------
diff --git a/s2rest_play/conf/reference.conf b/s2rest_play/conf/reference.conf
index 0a54fb5..bda503c 100644
--- a/s2rest_play/conf/reference.conf
+++ b/s2rest_play/conf/reference.conf
@@ -86,7 +86,7 @@ host=localhost
# DB
s2graph.models.table.name="models-dev"
hbase.zookeeper.quorum=${host}
-db.default.url="jdbc:mysql://"${host}":3306/graph_dev"
+
# Query server
is.query.server=true
is.write.server=true
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/BenchmarkCommon.scala
----------------------------------------------------------------------
diff --git
a/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/BenchmarkCommon.scala
b/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/BenchmarkCommon.scala
index 4662e48..240421c 100644
---
a/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/BenchmarkCommon.scala
+++
b/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/BenchmarkCommon.scala
@@ -19,7 +19,12 @@
package org.apache.s2graph.rest.play.benchmark
+import com.typesafe.config.{ConfigFactory, Config}
+import org.apache.s2graph.core.{Management, Graph}
import org.specs2.mutable.Specification
+import scalikejdbc.AutoSession
+
+import scala.concurrent.ExecutionContext
trait BenchmarkCommon extends Specification {
val wrapStr = s"\n=================================================="
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/GraphUtilSpec.scala
----------------------------------------------------------------------
diff --git
a/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/GraphUtilSpec.scala
b/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/GraphUtilSpec.scala
index 75026fa..737b828 100644
---
a/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/GraphUtilSpec.scala
+++
b/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/GraphUtilSpec.scala
@@ -22,7 +22,7 @@ package org.apache.s2graph.rest.play.benchmark
import org.apache.hadoop.hbase.util.Bytes
import org.apache.s2graph.core.GraphUtil
import org.apache.s2graph.core.types.{HBaseType, InnerVal, SourceVertexId}
-import play.api.test.{FakeApplication, PlaySpecification}
+import play.api.test.PlaySpecification
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
@@ -65,68 +65,68 @@ class GraphUtilSpec extends BenchmarkCommon with
PlaySpecification {
}
"test murmur hash skew2" in {
- running(FakeApplication()) {
- import HBaseType._
- val testNum = 1000000L
- val regionCount = 40
- val window = Int.MaxValue / regionCount
- val rangeBytes = new ListBuffer[(List[Byte], List[Byte])]()
- for {
- i <- (0 until regionCount)
- } yield {
- val startKey = Bytes.toBytes(i * window)
- val endKey = Bytes.toBytes((i + 1) * window)
- rangeBytes += (startKey.toList -> endKey.toList)
- }
+
+ import HBaseType._
+ val testNum = 1000000L
+ val regionCount = 40
+ val window = Int.MaxValue / regionCount
+ val rangeBytes = new ListBuffer[(List[Byte], List[Byte])]()
+ for {
+ i <- (0 until regionCount)
+ } yield {
+ val startKey = Bytes.toBytes(i * window)
+ val endKey = Bytes.toBytes((i + 1) * window)
+ rangeBytes += (startKey.toList -> endKey.toList)
+ }
- val stats = new collection.mutable.HashMap[Int, ((List[Byte],
List[Byte]), Long)]()
- val counts = new collection.mutable.HashMap[Short, Long]()
- stats += (0 -> (rangeBytes.head -> 0L))
-
- for (i <- (0L until testNum)) {
- val vertexId = SourceVertexId(DEFAULT_COL_ID, InnerVal.withLong(i,
HBaseType.DEFAULT_VERSION))
- val bytes = vertexId.bytes
- val shortKey = GraphUtil.murmur3(vertexId.innerId.toIdString())
- val shortVal = counts.getOrElse(shortKey, 0L) + 1L
- counts += (shortKey -> shortVal)
- var j = 0
- var found = false
- while (j < rangeBytes.size && !found) {
- val (start, end) = rangeBytes(j)
- if (between(bytes, start.toArray, end.toArray)) {
- found = true
- }
- j += 1
- }
- val head = rangeBytes(j - 1)
- val key = j - 1
- val value = stats.get(key) match {
- case None => 0L
- case Some(v) => v._2 + 1
+ val stats = new collection.mutable.HashMap[Int, ((List[Byte],
List[Byte]), Long)]()
+ val counts = new collection.mutable.HashMap[Short, Long]()
+ stats += (0 -> (rangeBytes.head -> 0L))
+
+ for (i <- (0L until testNum)) {
+ val vertexId = SourceVertexId(DEFAULT_COL_ID, InnerVal.withLong(i,
HBaseType.DEFAULT_VERSION))
+ val bytes = vertexId.bytes
+ val shortKey = GraphUtil.murmur3(vertexId.innerId.toIdString())
+ val shortVal = counts.getOrElse(shortKey, 0L) + 1L
+ counts += (shortKey -> shortVal)
+ var j = 0
+ var found = false
+ while (j < rangeBytes.size && !found) {
+ val (start, end) = rangeBytes(j)
+ if (between(bytes, start.toArray, end.toArray)) {
+ found = true
}
- stats += (key -> (head, value))
+ j += 1
}
- val sorted = stats.toList.sortBy(kv => kv._2._2).reverse
- println(s"Index: StartBytes ~ EndBytes\tStartShortBytes ~
EndShortBytes\tStartShort ~ EndShort\tCount\tShortCount")
- sorted.foreach { case (idx, ((start, end), cnt)) =>
- val startShort = Bytes.toShort(start.take(2).toArray)
- val endShort = Bytes.toShort(end.take(2).toArray)
- val count = counts.count(t => startShort <= t._1 && t._1 < endShort)
- println(s"$idx: $start ~ $end\t${start.take(2)} ~
${end.take(2)}\t$startShort ~ $endShort\t$cnt\t$count")
-
+ val head = rangeBytes(j - 1)
+ val key = j - 1
+ val value = stats.get(key) match {
+ case None => 0L
+ case Some(v) => v._2 + 1
}
- println("\n" * 10)
- println(s"Index: StartBytes ~ EndBytes\tStartShortBytes ~
EndShortBytes\tStartShort ~ EndShort\tCount\tShortCount")
- stats.toList.sortBy(kv => kv._1).reverse.foreach { case (idx, ((start,
end), cnt)) =>
- val startShort = Bytes.toShort(start.take(2).toArray)
- val endShort = Bytes.toShort(end.take(2).toArray)
- val count = counts.count(t => startShort <= t._1 && t._1 < endShort)
- println(s"$idx: $start ~ $end\t${start.take(2)} ~
${end.take(2)}\t$startShort ~ $endShort\t$cnt\t$count")
+ stats += (key ->(head, value))
+ }
+ val sorted = stats.toList.sortBy(kv => kv._2._2).reverse
+ println(s"Index: StartBytes ~ EndBytes\tStartShortBytes ~
EndShortBytes\tStartShort ~ EndShort\tCount\tShortCount")
+ sorted.foreach { case (idx, ((start, end), cnt)) =>
+ val startShort = Bytes.toShort(start.take(2).toArray)
+ val endShort = Bytes.toShort(end.take(2).toArray)
+ val count = counts.count(t => startShort <= t._1 && t._1 < endShort)
+ println(s"$idx: $start ~ $end\t${start.take(2)} ~
${end.take(2)}\t$startShort ~ $endShort\t$cnt\t$count")
- }
}
+ println("\n" * 10)
+ println(s"Index: StartBytes ~ EndBytes\tStartShortBytes ~
EndShortBytes\tStartShort ~ EndShort\tCount\tShortCount")
+ stats.toList.sortBy(kv => kv._1).reverse.foreach { case (idx, ((start,
end), cnt)) =>
+ val startShort = Bytes.toShort(start.take(2).toArray)
+ val endShort = Bytes.toShort(end.take(2).toArray)
+ val count = counts.count(t => startShort <= t._1 && t._1 < endShort)
+ println(s"$idx: $start ~ $end\t${start.take(2)} ~
${end.take(2)}\t$startShort ~ $endShort\t$cnt\t$count")
+
+ }
+
true
}
http://git-wip-us.apache.org/repos/asf/incubator-s2graph/blob/a35cf949/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/SamplingBenchmarkSpec.scala
----------------------------------------------------------------------
diff --git
a/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/SamplingBenchmarkSpec.scala
b/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/SamplingBenchmarkSpec.scala
index 1cda277..b5d6691 100644
---
a/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/SamplingBenchmarkSpec.scala
+++
b/s2rest_play/test/org/apache/s2graph/rest/play/benchmark/SamplingBenchmarkSpec.scala
@@ -26,9 +26,8 @@ import scala.util.Random
class SamplingBenchmarkSpec extends BenchmarkCommon with PlaySpecification {
"sample" should {
- implicit val app = FakeApplication()
- "sample benchmark" in new WithApplication(app) {
+ "sample benchmark" in {
@tailrec
def randomInt(n: Int, range: Int, set: Set[Int] = Set.empty[Int]):
Set[Int] = {
if (set.size == n) set
@@ -61,7 +60,7 @@ class SamplingBenchmarkSpec extends BenchmarkCommon with
PlaySpecification {
while (m < num) {
val u = Random.nextDouble()
- if ( (N - t)*u < num - m) {
+ if ((N - t) * u < num - m) {
sampled = ls(t) :: sampled
m += 1
}
@@ -99,8 +98,7 @@ class SamplingBenchmarkSpec extends BenchmarkCommon with
PlaySpecification {
val sampled = rngSample(testNum, testData)
}
}
+ true
}
-
-
}
}