Github user mridulm commented on a diff in the pull request:

    https://github.com/apache/spark/pull/15861#discussion_r90129155
  
    --- Diff: core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala 
---
    @@ -1089,66 +1064,10 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])
        * MapReduce job.
        */
       def saveAsHadoopDataset(conf: JobConf): Unit = self.withScope {
    -    // Rename this as hadoopConf internally to avoid shadowing (see 
SPARK-2038).
    -    val hadoopConf = conf
    -    val outputFormatInstance = hadoopConf.getOutputFormat
    -    val keyClass = hadoopConf.getOutputKeyClass
    -    val valueClass = hadoopConf.getOutputValueClass
    -    if (outputFormatInstance == null) {
    -      throw new SparkException("Output format class not set")
    -    }
    -    if (keyClass == null) {
    -      throw new SparkException("Output key class not set")
    -    }
    -    if (valueClass == null) {
    -      throw new SparkException("Output value class not set")
    -    }
    -    SparkHadoopUtil.get.addCredentials(hadoopConf)
    -
    -    logDebug("Saving as hadoop file of type (" + keyClass.getSimpleName + 
", " +
    -      valueClass.getSimpleName + ")")
    -
    -    if (SparkHadoopWriterUtils.isOutputSpecValidationEnabled(self.conf)) {
    -      // FileOutputFormat ignores the filesystem parameter
    -      val ignoredFs = FileSystem.get(hadoopConf)
    -      hadoopConf.getOutputFormat.checkOutputSpecs(ignoredFs, hadoopConf)
    -    }
    --- End diff --
    
    These validations should go into HadoopMapReduceWriteConfigUtil


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to