Github user rxin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/4588#discussion_r25530015
  
    --- Diff: core/src/main/scala/org/apache/spark/rpc/RpcEnv.scala ---
    @@ -0,0 +1,343 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.spark.rpc
    +
    +import java.net.URI
    +
    +import scala.concurrent.Future
    +import scala.concurrent.duration.FiniteDuration
    +import scala.reflect.ClassTag
    +
    +import org.apache.spark.{SecurityManager, SparkConf}
    +import org.apache.spark.util.Utils
    +
    +/**
    + * An RPC environment.
    + */
    +private[spark] trait RpcEnv {
    +
    +  private[rpc] def endpointRef(endpoint: RpcEndpoint): RpcEndpointRef
    +
    +  def scheduler: ActionScheduler
    +
    +  /**
    +   * Return the address that [[RpcEnv]] is listening to.
    +   */
    +  def address: RpcAddress
    +
    +  /**
    +   * Register a [[RpcEndpoint]] with a name and return its 
[[RpcEndpointRef]].
    +   */
    +  def setupEndpoint(name: String, endpoint: RpcEndpoint): RpcEndpointRef
    +
    +  /**
    +   * Retrieve a [[RpcEndpointRef]] which is located in the driver via its 
name.
    +   */
    +  def setupDriverEndpointRef(name: String): RpcEndpointRef
    +
    +  /**
    +   * Retrieve the [[RpcEndpointRef]] represented by `url`.
    +   */
    +  def setupEndpointRefByUrl(url: String): RpcEndpointRef
    +
    +  /**
    +   * Retrieve the [[RpcEndpointRef]] represented by `systemName`, 
`address` and `endpointName`
    +   */
    +  def setupEndpointRef(
    +       systemName: String, address: RpcAddress, endpointName: String): 
RpcEndpointRef
    +
    +  /**
    +   * Stop [[RpcEndpoint]] specified by `endpoint`.
    +   */
    +  def stop(endpoint: RpcEndpointRef): Unit
    +
    +  /**
    +   * Shutdown this [[RpcEnv]] asynchronously. If need to make sure 
[[RpcEnv]] exits successfully,
    +   * call [[awaitTermination()]] straight after [[shutdown()]].
    +   */
    +  def shutdown(): Unit
    +
    +  /**
    +   * Wait until [[RpcEnv]] exits.
    +   *
    +   * TODO do we need a timeout parameter?
    +   */
    +  def awaitTermination(): Unit
    +
    +  /**
    +   * Create a URI used to create a [[RpcEndpointRef]]
    +   */
    +  def uriOf(systemName: String, address: RpcAddress, endpointName: 
String): String
    +}
    +
    +private[spark] case class RpcEnvConfig(
    +    conf: SparkConf,
    +    name: String,
    +    host: String,
    +    port: Int,
    +    securityManager: SecurityManager)
    +
    +/**
    + * A RpcEnv implementation must have a companion object with an
    + * `apply(config: RpcEnvConfig): RpcEnv` method so that it can be created 
via Reflection.
    + *
    + * {{{
    + * object MyCustomRpcEnv {
    + *   def apply(config: RpcEnvConfig): RpcEnv = {
    + *     ...
    + *   }
    + * }
    + * }}}
    + */
    +private[spark] object RpcEnv {
    +
    +  private def getRpcEnvCompanion(conf: SparkConf): AnyRef = {
    +    // Add more RpcEnv implementations here
    +    val rpcEnvNames = Map("akka" -> "org.apache.spark.rpc.akka.AkkaRpcEnv")
    +    val rpcEnvName = conf.get("spark.rpc", "akka")
    +    val rpcEnvClassName = rpcEnvNames.getOrElse(rpcEnvName.toLowerCase, 
rpcEnvName)
    +    val companion = Class.forName(
    +      rpcEnvClassName + "$", true, 
Utils.getContextOrSparkClassLoader).getField("MODULE$").get(null)
    +    companion
    +  }
    +
    +  def create(
    +       name: String,
    +       host: String,
    +       port: Int,
    +       conf: SparkConf,
    +       securityManager: SecurityManager): RpcEnv = {
    +    // Using Reflection to create the RpcEnv to avoid to depend on Akka 
directly
    +    val config = RpcEnvConfig(conf, name, host, port, securityManager)
    +    val companion = getRpcEnvCompanion(conf)
    +    companion.getClass.getMethod("apply", classOf[RpcEnvConfig]).
    +      invoke(companion, config).asInstanceOf[RpcEnv]
    +  }
    +
    +}
    +
    +/**
    + * An end point for the RPC that defines what functions to trigger given a 
message.
    + *
    + * RpcEndpoint will be guaranteed that `onStart`, `receive` and `onStop` 
will
    + * be called in sequence.
    + *
    + * The lift-cycle will be:
    + *
    + * constructor onStart receive* onStop
    + *
    + * If any error is thrown from one of RpcEndpoint methods except 
`onError`, [[RpcEndpoint.onError]]
    + * will be invoked with the cause. If onError throws an error, [[RpcEnv]] 
will ignore it.
    + */
    +private[spark] trait RpcEndpoint {
    +
    +  /**
    +   * The [[RpcEnv]] that this [[RpcEndpoint]] is registered to.
    +   */
    +  val rpcEnv: RpcEnv
    +
    +  /**
    +   * Provide the implicit sender. `self` will become valid when `onStart` 
is called.
    +   *
    +   * Note: Because before `onStart`, [[RpcEndpoint]] has not yet been 
registered and there is not
    +   * valid [[RpcEndpointRef]] for it. So don't call `self` before 
`onStart` is called. In the other
    +   * words, don't call [[RpcEndpointRef.send]] in the constructor of 
[[RpcEndpoint]].
    +   */
    +  implicit final def self: RpcEndpointRef = {
    --- End diff --
    
    is there a case in which we would ever want a different sender? if not, i'd 
say just remove this and always use the self sendor


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to