This is an automated email from the ASF dual-hosted git repository. dongjoon pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new a6afd09f7f2f [SPARK-46205][CORE][TESTS][FOLLOWUP] Simplify PersistenceEngineBenchmark a6afd09f7f2f is described below commit a6afd09f7f2fca38b693eac243d5410708c1ef70 Author: Dongjoon Hyun <dh...@apple.com> AuthorDate: Sat Dec 2 11:27:34 2023 -0800 [SPARK-46205][CORE][TESTS][FOLLOWUP] Simplify PersistenceEngineBenchmark ### What changes were proposed in this pull request? This is a follow-up of #44113 to address a comment about simplifying the benchmark. ### Why are the changes needed? To simplify. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Manually. ``` $ build/sbt "core/Test/runMain org.apache.spark.deploy.master.PersistenceEngineBenchmark" ... [info] OpenJDK 64-Bit Server VM 17.0.9+9-LTS on Mac OS X 14.2 [info] Apple M1 Max [info] 1000 Workers: Best Time(ms) Avg Time(ms) Stdev(ms) Rate(M/s) Per Row(ns) Relative [info] ------------------------------------------------------------------------------------------------------------------------------- [info] ZooKeeperPersistenceEngine with JavaSerializer 12602 12847 253 0.0 12601717.5 1.0X [info] ZooKeeperPersistenceEngine with KryoSerializer 12116 12130 13 0.0 12116373.8 1.0X [info] FileSystemPersistenceEngine with JavaSerializer 429 435 6 0.0 429374.9 29.3X [info] FileSystemPersistenceEngine with KryoSerializer 179 180 2 0.0 178795.7 70.5X [info] BlackHolePersistenceEngine 0 0 0 46.6 21.5 587273.6X [success] Total time: 126 s (02:06), completed Dec 1, 2023, 7:59:25 PM ``` ### Was this patch authored or co-authored using generative AI tooling? Closes #44118 from dongjoon-hyun/SPARK-46205-2. Authored-by: Dongjoon Hyun <dh...@apple.com> Signed-off-by: Dongjoon Hyun <dh...@apple.com> --- .../deploy/master/PersistenceEngineBenchmark.scala | 52 +++++++++------------- 1 file changed, 20 insertions(+), 32 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineBenchmark.scala b/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineBenchmark.scala index 730ae05fa146..2f8e9a8eff2d 100644 --- a/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineBenchmark.scala +++ b/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineBenchmark.scala @@ -46,8 +46,7 @@ import org.apache.spark.util.Utils object PersistenceEngineBenchmark extends BenchmarkBase { val conf = new SparkConf() - val serializerJava = new JavaSerializer(conf) - val serializerKryo = new KryoSerializer(conf) + val serializers = Seq(new JavaSerializer(conf), new KryoSerializer(conf)) val zkTestServer = new TestingServer(findFreePort(conf)) override def runBenchmarkSuite(mainArgs: Array[String]): Unit = { @@ -61,38 +60,27 @@ object PersistenceEngineBenchmark extends BenchmarkBase { runBenchmark("PersistenceEngineBenchmark") { val benchmark = new Benchmark(s"$numWorkers Workers", numWorkers, output = output) - benchmark.addCase("ZooKeeperPersistenceEngine with JavaSerializer", numIters) { _ => - val engine = new ZooKeeperPersistenceEngine(conf, serializerJava) - workers.foreach(engine.addWorker) - engine.read[WorkerInfo]("worker_") - workers.foreach(engine.removeWorker) - engine.close() - } - - benchmark.addCase("ZooKeeperPersistenceEngine with KryoSerializer", numIters) { _ => - val engine = new ZooKeeperPersistenceEngine(conf, serializerKryo) - workers.foreach(engine.addWorker) - engine.read[WorkerInfo]("worker_") - workers.foreach(engine.removeWorker) - engine.close() + serializers.foreach { serializer => + val serializerName = serializer.getClass.getSimpleName + benchmark.addCase(s"ZooKeeperPersistenceEngine with $serializerName", numIters) { _ => + val engine = new ZooKeeperPersistenceEngine(conf, serializer) + workers.foreach(engine.addWorker) + engine.read[WorkerInfo]("worker_") + workers.foreach(engine.removeWorker) + engine.close() + } } - benchmark.addCase("FileSystemPersistenceEngine with JavaSerializer", numIters) { _ => - val dir = Utils.createTempDir().getAbsolutePath - val engine = new FileSystemPersistenceEngine(dir, serializerJava) - workers.foreach(engine.addWorker) - engine.read[WorkerInfo]("worker_") - workers.foreach(engine.removeWorker) - engine.close() - } - - benchmark.addCase("FileSystemPersistenceEngine with KryoSerializer", numIters) { _ => - val dir = Utils.createTempDir().getAbsolutePath - val engine = new FileSystemPersistenceEngine(dir, serializerKryo) - workers.foreach(engine.addWorker) - engine.read[WorkerInfo]("worker_") - workers.foreach(engine.removeWorker) - engine.close() + serializers.foreach { serializer => + val serializerName = serializer.getClass.getSimpleName + benchmark.addCase(s"FileSystemPersistenceEngine with $serializerName", numIters) { _ => + val dir = Utils.createTempDir().getAbsolutePath + val engine = new FileSystemPersistenceEngine(dir, serializer) + workers.foreach(engine.addWorker) + engine.read[WorkerInfo]("worker_") + workers.foreach(engine.removeWorker) + engine.close() + } } benchmark.addCase("BlackHolePersistenceEngine", numIters) { _ => --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org