imback82 commented on a change in pull request #26593: [SPARK-29890][SQL] 
DataFrameNaFunctions.fill should handle duplicate columns
URL: https://github.com/apache/spark/pull/26593#discussion_r348170528
 
 

 ##########
 File path: 
sql/core/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala
 ##########
 @@ -468,37 +481,62 @@ final class DataFrameNaFunctions private[sql](df: 
DataFrame) {
       s"Unsupported value type ${v.getClass.getName} ($v).")
   }
 
+  private def getTargetType[T](value: T): AbstractDataType = {
+    value match {
+      case _: Double | _: Long => NumericType
+      case _: String => StringType
+      case _: Boolean => BooleanType
+      case _ =>
+        throw new IllegalArgumentException(
+          s"Unsupported value type for fill(): ${value.getClass.getName} 
($value).")
+    }
+  }
+
+  private def typeMatches(targetType: AbstractDataType, sourceType: DataType): 
Boolean = {
+    (targetType, sourceType) match {
+      case (NumericType, dt) => dt.isInstanceOf[NumericType]
+      case (StringType, dt) => dt == StringType
+      case (BooleanType, dt) => dt == BooleanType
+      case _ =>
+        throw new IllegalArgumentException(s"$targetType is not matched for 
fill().")
+    }
+  }
+
   /**
    * Returns a new `DataFrame` that replaces null or NaN values in specified
    * numeric, string columns. If a specified column is not a numeric, string
    * or boolean column it is ignored.
    */
   private def fillValue[T](value: T, cols: Seq[String]): DataFrame = {
 
 Review comment:
   Thanks for the suggestion!

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to