Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21585#discussion_r196506912
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoDataSourceCommand.scala
 ---
    @@ -38,9 +38,8 @@ case class InsertIntoDataSourceCommand(
       override def run(sparkSession: SparkSession): Seq[Row] = {
         val relation = 
logicalRelation.relation.asInstanceOf[InsertableRelation]
         val data = Dataset.ofRows(sparkSession, query)
    -    // Apply the schema of the existing table to the new data.
    -    val df = 
sparkSession.internalCreateDataFrame(data.queryExecution.toRdd, 
logicalRelation.schema)
    -    relation.insert(df, overwrite)
    +    // Data should have been casted to the schema of the insert relation.
    --- End diff --
    
    it's better to mention which rule did it


---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to