allisonwang-db commented on a change in pull request #33536:
URL: https://github.com/apache/spark/pull/33536#discussion_r687084081



##########
File path: core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
##########
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.errors
+
+import java.io.EOFException
+
+import org.apache.spark.{SparkException, TaskKilledException}
+import org.apache.spark.api.r.JVMObjectId
+
+/**
+ * Object for grouping error messages from (most) exceptions thrown during 
query execution.
+ */
+object SparkCoreErrors {
+  def juRemoveError(): Throwable = {
+    new UnsupportedOperationException("remove")
+  }
+
+  def unexpectedPy4JServerError(other: Object): Throwable = {
+    new RuntimeException(s"Unexpected Py4J server ${other.getClass}")
+  }
+
+  def cannotBeUsedDataOfTypeError(className: String): Throwable = {
+    new SparkException(s"Data of type $className cannot be used")
+  }
+
+  def unexpectedValuePairwiseRDDError(x: Seq[Array[Byte]]): Throwable = {
+    new SparkException("PairwiseRDD: unexpected value: " + x)
+  }
+
+  def unexpectedElementTypeError(other: Object): Throwable = {
+    new SparkException("Unexpected element type " + other.getClass)
+  }
+
+  def eofBeforePythonServerAcknowledgedError(): Throwable = {
+    new SparkException("EOF reached before Python server acknowledged")
+  }
+
+  def taskInterruptionError(reason: String): Throwable = {
+    new TaskKilledException(reason)
+  }
+
+  def crashedUnexpectedlyPythonWorkerError(eof: EOFException): Throwable = {

Review comment:
       pythonWorkerCrashedError

##########
File path: core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
##########
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.errors
+
+import java.io.EOFException
+
+import org.apache.spark.{SparkException, TaskKilledException}
+import org.apache.spark.api.r.JVMObjectId
+
+/**
+ * Object for grouping error messages from (most) exceptions thrown during 
query execution.
+ */
+object SparkCoreErrors {
+  def juRemoveError(): Throwable = {

Review comment:
       removeUnsupportedError()

##########
File path: core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
##########
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.errors
+
+import java.io.EOFException
+
+import org.apache.spark.{SparkException, TaskKilledException}
+import org.apache.spark.api.r.JVMObjectId
+
+/**
+ * Object for grouping error messages from (most) exceptions thrown during 
query execution.
+ */
+object SparkCoreErrors {
+  def juRemoveError(): Throwable = {
+    new UnsupportedOperationException("remove")
+  }
+
+  def unexpectedPy4JServerError(other: Object): Throwable = {
+    new RuntimeException(s"Unexpected Py4J server ${other.getClass}")
+  }
+
+  def cannotBeUsedDataOfTypeError(className: String): Throwable = {
+    new SparkException(s"Data of type $className cannot be used")
+  }
+
+  def unexpectedValuePairwiseRDDError(x: Seq[Array[Byte]]): Throwable = {
+    new SparkException("PairwiseRDD: unexpected value: " + x)
+  }
+
+  def unexpectedElementTypeError(other: Object): Throwable = {
+    new SparkException("Unexpected element type " + other.getClass)
+  }
+
+  def eofBeforePythonServerAcknowledgedError(): Throwable = {
+    new SparkException("EOF reached before Python server acknowledged")
+  }
+
+  def taskInterruptionError(reason: String): Throwable = {
+    new TaskKilledException(reason)
+  }
+
+  def crashedUnexpectedlyPythonWorkerError(eof: EOFException): Throwable = {
+    new SparkException("Python worker exited unexpectedly (crashed)", eof)
+  }
+
+  def serverSocketFailedError(message: String): Throwable = {
+    new SparkException(message)
+  }
+
+  def invalidPortNumberError(exceptionMessage: String): Throwable = {
+    new SparkException(exceptionMessage)
+  }
+
+  def failedToConnectBackPythonWorkerError(e: Exception): Throwable = {
+    new SparkException("Python worker failed to connect back.", e)
+  }
+
+  def eofExceptionWhileReadPortNumberFromAliveDaemonError(daemonModule: 
String): Throwable = {

Review comment:
       eofExceptionWhileReadingPortNumberFromDaemonOutputError

##########
File path: core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
##########
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.errors
+
+import java.io.EOFException
+
+import org.apache.spark.{SparkException, TaskKilledException}
+import org.apache.spark.api.r.JVMObjectId
+
+/**
+ * Object for grouping error messages from (most) exceptions thrown during 
query execution.
+ */
+object SparkCoreErrors {
+  def juRemoveError(): Throwable = {
+    new UnsupportedOperationException("remove")
+  }
+
+  def unexpectedPy4JServerError(other: Object): Throwable = {
+    new RuntimeException(s"Unexpected Py4J server ${other.getClass}")
+  }
+
+  def cannotBeUsedDataOfTypeError(className: String): Throwable = {
+    new SparkException(s"Data of type $className cannot be used")
+  }
+
+  def unexpectedValuePairwiseRDDError(x: Seq[Array[Byte]]): Throwable = {
+    new SparkException("PairwiseRDD: unexpected value: " + x)
+  }
+
+  def unexpectedElementTypeError(other: Object): Throwable = {
+    new SparkException("Unexpected element type " + other.getClass)
+  }
+
+  def eofBeforePythonServerAcknowledgedError(): Throwable = {
+    new SparkException("EOF reached before Python server acknowledged")
+  }
+
+  def taskInterruptionError(reason: String): Throwable = {
+    new TaskKilledException(reason)
+  }
+
+  def crashedUnexpectedlyPythonWorkerError(eof: EOFException): Throwable = {
+    new SparkException("Python worker exited unexpectedly (crashed)", eof)
+  }
+
+  def serverSocketFailedError(message: String): Throwable = {
+    new SparkException(message)
+  }
+
+  def invalidPortNumberError(exceptionMessage: String): Throwable = {
+    new SparkException(exceptionMessage)
+  }
+
+  def failedToConnectBackPythonWorkerError(e: Exception): Throwable = {
+    new SparkException("Python worker failed to connect back.", e)
+  }
+
+  def eofExceptionWhileReadPortNumberFromAliveDaemonError(daemonModule: 
String): Throwable = {
+    new SparkException("EOFException occurred while reading the port number " +
+      s"from $daemonModule's stdout")
+  }
+
+  def eofExceptionWhileReadPortNumberError(
+      daemonModule: String,
+      daemonExitValue: Int): Throwable = {
+    new SparkException(
+      s"EOFException occurred while reading the port number from 
$daemonModule's" +
+        s" stdout and terminated with code: $daemonExitValue.")
+  }
+
+  def cannotBeUsedRDDElementError(otherName: String): Throwable = {

Review comment:
       RDDElementOfTypeCannotBeUsedError

##########
File path: core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
##########
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.errors
+
+import java.io.EOFException
+
+import org.apache.spark.{SparkException, TaskKilledException}
+import org.apache.spark.api.r.JVMObjectId
+
+/**
+ * Object for grouping error messages from (most) exceptions thrown during 
query execution.
+ */
+object SparkCoreErrors {
+  def juRemoveError(): Throwable = {
+    new UnsupportedOperationException("remove")
+  }
+
+  def unexpectedPy4JServerError(other: Object): Throwable = {
+    new RuntimeException(s"Unexpected Py4J server ${other.getClass}")
+  }
+
+  def cannotBeUsedDataOfTypeError(className: String): Throwable = {

Review comment:
       dataOfTypeCannotBeUsedError

##########
File path: core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
##########
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.errors
+
+import java.io.EOFException
+
+import org.apache.spark.{SparkException, TaskKilledException}
+import org.apache.spark.api.r.JVMObjectId
+
+/**
+ * Object for grouping error messages from (most) exceptions thrown during 
query execution.
+ */
+object SparkCoreErrors {
+  def juRemoveError(): Throwable = {
+    new UnsupportedOperationException("remove")
+  }
+
+  def unexpectedPy4JServerError(other: Object): Throwable = {
+    new RuntimeException(s"Unexpected Py4J server ${other.getClass}")
+  }
+
+  def cannotBeUsedDataOfTypeError(className: String): Throwable = {
+    new SparkException(s"Data of type $className cannot be used")
+  }
+
+  def unexpectedValuePairwiseRDDError(x: Seq[Array[Byte]]): Throwable = {
+    new SparkException("PairwiseRDD: unexpected value: " + x)
+  }
+
+  def unexpectedElementTypeError(other: Object): Throwable = {
+    new SparkException("Unexpected element type " + other.getClass)
+  }
+
+  def eofBeforePythonServerAcknowledgedError(): Throwable = {
+    new SparkException("EOF reached before Python server acknowledged")
+  }
+
+  def taskInterruptionError(reason: String): Throwable = {
+    new TaskKilledException(reason)
+  }
+
+  def crashedUnexpectedlyPythonWorkerError(eof: EOFException): Throwable = {
+    new SparkException("Python worker exited unexpectedly (crashed)", eof)
+  }
+
+  def serverSocketFailedError(message: String): Throwable = {
+    new SparkException(message)
+  }
+
+  def invalidPortNumberError(exceptionMessage: String): Throwable = {
+    new SparkException(exceptionMessage)
+  }
+
+  def failedToConnectBackPythonWorkerError(e: Exception): Throwable = {
+    new SparkException("Python worker failed to connect back.", e)
+  }
+
+  def eofExceptionWhileReadPortNumberFromAliveDaemonError(daemonModule: 
String): Throwable = {
+    new SparkException("EOFException occurred while reading the port number " +
+      s"from $daemonModule's stdout")
+  }
+
+  def eofExceptionWhileReadPortNumberError(
+      daemonModule: String,
+      daemonExitValue: Int): Throwable = {
+    new SparkException(
+      s"EOFException occurred while reading the port number from 
$daemonModule's" +
+        s" stdout and terminated with code: $daemonExitValue.")
+  }
+
+  def cannotBeUsedRDDElementError(otherName: String): Throwable = {
+    new SparkException(s"RDD element of type $otherName cannot be used")
+  }
+
+  def unsupportedDataTypeError(other: Any): Throwable = {
+    new SparkException(s"Data of type $other is not supported")
+  }
+
+  def workerProducedError(msg: String, e: Exception): Throwable = {

Review comment:
       RWorkerExitedError

##########
File path: 
core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
##########
@@ -241,7 +240,7 @@ private[spark] class PythonWorkerFactory(pythonExec: 
String, envVars: Map[String
             |  $pythonPath
             |Also, check if you have a sitecustomize.py module in your python 
path,
             |or in your python installation, that is printing to standard 
output"""
-          throw new SparkException(exceptionMessage.stripMargin)
+          throw 
SparkCoreErrors.invalidPortNumberError(exceptionMessage.stripMargin)

Review comment:
       Shall we move the exception message inside the error function? 




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to