Fix long lines
Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/94b5881e Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/94b5881e Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/94b5881e Branch: refs/heads/master Commit: 94b5881ee9d6c67f096ea9c2891a63978f256394 Parents: 5a864e3 Author: Aaron Davidson <[email protected]> Authored: Fri Dec 6 00:22:00 2013 -0800 Committer: Aaron Davidson <[email protected]> Committed: Fri Dec 6 00:22:00 2013 -0800 ---------------------------------------------------------------------- .../main/scala/org/apache/spark/deploy/client/Client.scala | 6 ++++-- .../main/scala/org/apache/spark/deploy/worker/Worker.scala | 6 ++++-- .../scala/org/apache/spark/storage/BlockObjectWriter.scala | 2 +- core/src/main/scala/org/apache/spark/util/AkkaUtils.scala | 8 +++----- 4 files changed, 12 insertions(+), 10 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/94b5881e/core/src/main/scala/org/apache/spark/deploy/client/Client.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/deploy/client/Client.scala b/core/src/main/scala/org/apache/spark/deploy/client/Client.scala index f60e56d..d0d65ca 100644 --- a/core/src/main/scala/org/apache/spark/deploy/client/Client.scala +++ b/core/src/main/scala/org/apache/spark/deploy/client/Client.scala @@ -104,8 +104,10 @@ private[spark] class Client( activeMasterUrl = url master = context.actorSelection(Master.toAkkaUrl(activeMasterUrl)) masterAddress = activeMasterUrl match { - case Master.sparkUrlRegex(host, port) => Address("akka.tcp", Master.systemName, host, port.toInt) - case x => throw new SparkException("Invalid spark URL:"+x) + case Master.sparkUrlRegex(host, port) => + Address("akka.tcp", Master.systemName, host, port.toInt) + case x => + throw new SparkException("Invalid spark URL: " + x) } } http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/94b5881e/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala index 808b54c..87531b6 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala @@ -138,8 +138,10 @@ private[spark] class Worker( activeMasterWebUiUrl = uiUrl master = context.actorSelection(Master.toAkkaUrl(activeMasterUrl)) masterAddress = activeMasterUrl match { - case Master.sparkUrlRegex(_host, _port) => Address("akka.tcp", Master.systemName, _host, _port.toInt) - case x => throw new SparkException("Invalid spark URL:"+x) + case Master.sparkUrlRegex(_host, _port) => + Address("akka.tcp", Master.systemName, _host, _port.toInt) + case x => + throw new SparkException("Invalid spark URL: " + x) } connected = true } http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/94b5881e/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala index df33f6b..b4451fc 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala @@ -44,7 +44,7 @@ abstract class BlockObjectWriter(val blockId: BlockId) { * Flush the partial writes and commit them as a single atomic block. Return the * number of bytes written for this commit. */ - def commit(): LongSpark + def commit(): Long /** * Reverts writes that haven't been flushed yet. Callers should invoke this function http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/94b5881e/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala index 9f3f163..74133ce 100644 --- a/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala +++ b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala @@ -17,11 +17,8 @@ package org.apache.spark.util -import akka.actor.{IndestructibleActorSystem, ActorSystem, ExtendedActorSystem} +import akka.actor.{ActorSystem, ExtendedActorSystem, IndestructibleActorSystem} import com.typesafe.config.ConfigFactory -import scala.concurrent.duration._ -import scala.concurrent.Await -import akka.remote.RemoteActorRefProvider /** * Various utility classes for working with Akka. @@ -47,7 +44,8 @@ private[spark] object AkkaUtils { val akkaTimeout = System.getProperty("spark.akka.timeout", "100").toInt val akkaFrameSize = System.getProperty("spark.akka.frameSize", "10").toInt - val lifecycleEvents = if (System.getProperty("spark.akka.logLifecycleEvents", "false").toBoolean) "on" else "off" + val lifecycleEvents = + if (System.getProperty("spark.akka.logLifecycleEvents", "false").toBoolean) "on" else "off" val akkaHeartBeatPauses = System.getProperty("spark.akka.heartbeat.pauses", "600").toInt val akkaFailureDetector =
