Github user JoshRosen commented on a diff in the pull request:

    https://github.com/apache/spark/pull/4324#discussion_r23985360
  
    --- Diff: core/src/main/scala/org/apache/spark/TaskContext.scala ---
    @@ -15,112 +15,116 @@
      * limitations under the License.
      */
     
    -package org.apache.spark;
    +package org.apache.spark
     
    -import java.io.Serializable;
    +import java.io.Serializable
     
    -import scala.Function0;
    -import scala.Function1;
    -import scala.Unit;
    +import org.apache.spark.annotation.DeveloperApi
    +import org.apache.spark.executor.TaskMetrics
    +import org.apache.spark.util.TaskCompletionListener
     
    -import org.apache.spark.annotation.DeveloperApi;
    -import org.apache.spark.executor.TaskMetrics;
    -import org.apache.spark.util.TaskCompletionListener;
     
    -/**
    - * Contextual information about a task which can be read or mutated during
    - * execution. To access the TaskContext for a running task use
    - * TaskContext.get().
    - */
    -public abstract class TaskContext implements Serializable {
    +object TaskContext {
       /**
        * Return the currently active TaskContext. This can be called inside of
        * user functions to access contextual information about running tasks.
        */
    -  public static TaskContext get() {
    -    return taskContext.get();
    -  }
    +  def get(): TaskContext = taskContext.get
    +
    +  private val taskContext: ThreadLocal[TaskContext] = new 
ThreadLocal[TaskContext]
    +
    +  private[spark] def setTaskContext(tc: TaskContext): Unit = 
taskContext.set(tc)
    +
    +  private[spark] def unset(): Unit = taskContext.remove()
    +}
     
    -  private static ThreadLocal<TaskContext> taskContext =
    -    new ThreadLocal<TaskContext>();
     
    -  static void setTaskContext(TaskContext tc) {
    -    taskContext.set(tc);
    -  }
    +/**
    + * Contextual information about a task which can be read or mutated during
    + * execution. To access the TaskContext for a running task, use:
    + * {{{
    + *   org.apache.spark.TaskContext.get()
    + * }}}
    + */
    +abstract class TaskContext extends Serializable {
    +  // Note: TaskContext must NOT define a get method. Otherwise it will 
prevent the Scala compiler
    +  // from generating a static get method (based on the companion object's 
get method).
     
    -  static void unset() {
    -    taskContext.remove();
    -  }
    +  // Note: getters in this class are defined with parentheses to maintain 
backward compatibility.
     
       /**
    -   * Whether the task has completed.
    +   * Returns true if the task has completed.
        */
    -  public abstract boolean isCompleted();
    +  def isCompleted(): Boolean
     
       /**
    -   * Whether the task has been killed.
    +   * Returns true if the task has been killed.
        */
    -  public abstract boolean isInterrupted();
    +  def isInterrupted(): Boolean
     
    -  /** @deprecated use {@link #isRunningLocally()} */
    -  @Deprecated
    -  public abstract boolean runningLocally();
    +  /** @deprecated use { @link #isRunningLocally()}*/
    --- End diff --
    
    Scalastyle didn't like the whitespace here.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to