Github user viirya commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22085#discussion_r209464679
  
    --- Diff: 
core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala ---
    @@ -180,7 +183,42 @@ private[spark] abstract class BasePythonRunner[IN, 
OUT](
             dataOut.writeInt(partitionIndex)
             // Python version of driver
             PythonRDD.writeUTF(pythonVer, dataOut)
    +        // Init a GatewayServer to port current BarrierTaskContext to 
Python side.
    +        val isBarrier = context.isInstanceOf[BarrierTaskContext]
    +        val secret = if (isBarrier) {
    +          Utils.createSecret(env.conf)
    +        } else {
    +          ""
    +        }
    +        val gatewayServer: Option[GatewayServer] = if (isBarrier) {
    +          Some(new GatewayServer.GatewayServerBuilder()
    +            .entryPoint(context.asInstanceOf[BarrierTaskContext])
    +            .authToken(secret)
    +            .javaPort(0)
    +            .callbackClient(GatewayServer.DEFAULT_PYTHON_PORT, 
GatewayServer.defaultAddress(),
    +              secret)
    +            .build())
    +        } else {
    +          None
    +        }
    +        gatewayServer.map(_.start())
    +        gatewayServer.foreach { server =>
    +          context.addTaskCompletionListener(_ => server.shutdown())
    +        }
    +        val boundPort: Int = 
gatewayServer.map(_.getListeningPort).getOrElse(0)
    +        if (boundPort == -1) {
    +          val message = "GatewayServer to port BarrierTaskContext failed 
to bind to Java side."
    +          logError(message)
    +          throw new SparkException(message)
    +        } else {
    +          logDebug(s"Started GatewayServer to port BarrierTaskContext on 
port $boundPort.")
    +        }
             // Write out the TaskContextInfo
    --- End diff --
    
    This comment should be moved too.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to