component.AbstractLifeCycle: FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use

2015-01-14 Thread Jianguo Li
Hi,

I am using the sbt tool to build and run the scala tests related to spark.
In my /src/test/scala directory, there are two test classes (TestA, TestB),
both of which use the class in Spark for creating SparkContext, something
like

trait LocalTestSparkContext extends BeforeAndAfterAll { self: Suite =
  @transient var sc: SparkContext = _

  override def beforeAll() {
super.beforeAll()
val conf = new SparkConf()
  .setMaster(local[2])
  .setAppName(LocalSparkUnitTest)
sc = new SparkContext(conf)
  }

  override def afterAll() {
if (sc != null) {
  sc.stop()
}
super.afterAll()
  }
}

So, TestA and TestB are defined as

class TestA extends FunSuite with LocalTestSparkContext
class TestB extends FunSuite with LocalTestSparkContext

However, when I built the project using sbt and ran sbt test, I got the
following error. However, no error occurred if I only had one test. Is this
related to the SparkContext? Only one sc should be active? However, I
thought the LocalTestSparkContext should already take care of this since it
stops sc at the end of each class. I am totally lost, could someone let me
know what is the issue and how to resolve it? Thanks a lot.

15/01/14 14:12:43 WARN component.AbstractLifeCycle: FAILED
SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address
already in use: bind
java.net.BindException: Address already in use: bind
at sun.nio.ch.Net.bind0(Native Method)
at sun.nio.ch.Net.bind(Unknown Source)
at sun.nio.ch.Net.bind(Unknown Source)
at sun.nio.ch.ServerSocketChannelImpl.bind(Unknown Source)
at sun.nio.ch.ServerSocketAdaptor.bind(Unknown Source)
at
org.eclipse.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
at
org.eclipse.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
at
org.eclipse.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
at
org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
at org.eclipse.jetty.server.Server.doStart(Server.java:293)
at
org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
at
org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:195)
at org.apache.spark.ui.JettyUtils$$anonfun$4.apply(JettyUtils.scala:205)
at org.apache.spark.ui.JettyUtils$$anonfun$4.apply(JettyUtils.scala:205)
at
org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1504)
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1495)
at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:205)
at org.apache.spark.ui.WebUI.bind(WebUI.scala:102)
at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:234)
at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:234)
at scala.Option.foreach(Option.scala:236)
at org.apache.spark.SparkContext.init(SparkContext.scala:234)
at
com.unittest.LocalTestSparkContext$class.beforeAll(LocalTestSparkContext.scala:35)
at com.unittestt.TestB.beforeAll(TestB.scala:14)
at
org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
at com.unittest.TestB.beforeAll(TestB.scala:14)
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)
at com.unittest.TestB.run(TestB.scala:14)
at org.scalatest.tools.Framework.org
$scalatest$tools$Framework$$runSuite(Framework.scala:444)
at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:651)
at sbt.TestRunner.runTest$1(TestFramework.scala:76)
at sbt.TestRunner.run(TestFramework.scala:85)
at
sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
at
sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
at
sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:185)
at
sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
at
sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
at sbt.TestFunction.apply(TestFramework.scala:207)
at sbt.Tests$$anonfun$9.apply(Tests.scala:216)
at sbt.Tests$$anonfun$9.apply(Tests.scala:216)
at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
at sbt.std.Transform$$anon$4.work(System.scala:63)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:226)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:226)
at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
at sbt.Execute.work(Execute.scala:235)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:226)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:226)
at
sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
at java.util.concurrent.FutureTask.run(Unknown Source

Re: component.AbstractLifeCycle: FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use

2015-01-14 Thread Jianguo Li
I solved the issue. In case anyone else is looking for an answer, by
default, scalatest executes all the tests in parallel. To disable this,
just put the following line in your build.sbt

parallelExecution in Test := false

Thanks

On Wed, Jan 14, 2015 at 2:30 PM, Jianguo Li flyingfromch...@gmail.com
wrote:

 Hi,

 I am using the sbt tool to build and run the scala tests related to spark.
 In my /src/test/scala directory, there are two test classes (TestA, TestB),
 both of which use the class in Spark for creating SparkContext, something
 like

 trait LocalTestSparkContext extends BeforeAndAfterAll { self: Suite =
   @transient var sc: SparkContext = _

   override def beforeAll() {
 super.beforeAll()
 val conf = new SparkConf()
   .setMaster(local[2])
   .setAppName(LocalSparkUnitTest)
 sc = new SparkContext(conf)
   }

   override def afterAll() {
 if (sc != null) {
   sc.stop()
 }
 super.afterAll()
   }
 }

 So, TestA and TestB are defined as

 class TestA extends FunSuite with LocalTestSparkContext
 class TestB extends FunSuite with LocalTestSparkContext

 However, when I built the project using sbt and ran sbt test, I got the
 following error. However, no error occurred if I only had one test. Is this
 related to the SparkContext? Only one sc should be active? However, I
 thought the LocalTestSparkContext should already take care of this since it
 stops sc at the end of each class. I am totally lost, could someone let me
 know what is the issue and how to resolve it? Thanks a lot.

 15/01/14 14:12:43 WARN component.AbstractLifeCycle: FAILED
 SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address
 already in use: bind
 java.net.BindException: Address already in use: bind
 at sun.nio.ch.Net.bind0(Native Method)
 at sun.nio.ch.Net.bind(Unknown Source)
 at sun.nio.ch.Net.bind(Unknown Source)
 at sun.nio.ch.ServerSocketChannelImpl.bind(Unknown Source)
 at sun.nio.ch.ServerSocketAdaptor.bind(Unknown Source)
 at
 org.eclipse.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
 at
 org.eclipse.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
 at
 org.eclipse.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
 at
 org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
 at org.eclipse.jetty.server.Server.doStart(Server.java:293)
 at
 org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
 at
 org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:195)
 at org.apache.spark.ui.JettyUtils$$anonfun$4.apply(JettyUtils.scala:205)
 at org.apache.spark.ui.JettyUtils$$anonfun$4.apply(JettyUtils.scala:205)
 at
 org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1504)
 at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
 at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1495)
 at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:205)
 at org.apache.spark.ui.WebUI.bind(WebUI.scala:102)
 at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:234)
 at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:234)
 at scala.Option.foreach(Option.scala:236)
 at org.apache.spark.SparkContext.init(SparkContext.scala:234)
 at
 com.unittest.LocalTestSparkContext$class.beforeAll(LocalTestSparkContext.scala:35)
 at com.unittestt.TestB.beforeAll(TestB.scala:14)
 at
 org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
 at com.unittest.TestB.beforeAll(TestB.scala:14)
 at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)
 at com.unittest.TestB.run(TestB.scala:14)
 at org.scalatest.tools.Framework.org
 $scalatest$tools$Framework$$runSuite(Framework.scala:444)
 at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:651)
 at sbt.TestRunner.runTest$1(TestFramework.scala:76)
 at sbt.TestRunner.run(TestFramework.scala:85)
 at
 sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
 at
 sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
 at
 sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:185)
 at
 sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
 at
 sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
 at sbt.TestFunction.apply(TestFramework.scala:207)
 at sbt.Tests$$anonfun$9.apply(Tests.scala:216)
 at sbt.Tests$$anonfun$9.apply(Tests.scala:216)
 at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
 at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
 at sbt.std.Transform$$anon$4.work(System.scala:63)
 at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:226)
 at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:226)
 at sbt.ErrorHandling