Repository: spark
Updated Branches:
  refs/heads/branch-1.6 0eb82133f -> 5da7d4130


[SPARK-10946][SQL] JDBC - Use Statement.executeUpdate instead of 
PreparedStatement.executeUpdate for DDLs

New changes with JDBCRDD

Author: somideshmukh <somi...@us.ibm.com>

Closes #9733 from somideshmukh/SomilBranch-1.1.

(cherry picked from commit b8f4379ba1c5c1a8f3b4c88bd97031dc8ad2dfea)
Signed-off-by: Sean Owen <so...@cloudera.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5da7d413
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5da7d413
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5da7d413

Branch: refs/heads/branch-1.6
Commit: 5da7d41307c02e0204a50a5c3d9a35e839d29910
Parents: 0eb8213
Author: somideshmukh <somi...@us.ibm.com>
Authored: Wed Nov 18 08:51:01 2015 +0000
Committer: Sean Owen <so...@cloudera.com>
Committed: Wed Nov 18 08:54:09 2015 +0000

----------------------------------------------------------------------
 sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala | 2 +-
 .../apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala    | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/5da7d413/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
index e63a4d5..03867be 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
@@ -297,7 +297,7 @@ final class DataFrameWriter private[sql](df: DataFrame) {
       if (!tableExists) {
         val schema = JdbcUtils.schemaString(df, url)
         val sql = s"CREATE TABLE $table ($schema)"
-        conn.prepareStatement(sql).executeUpdate()
+        conn.createStatement.executeUpdate(sql)
       }
     } finally {
       conn.close()

http://git-wip-us.apache.org/repos/asf/spark/blob/5da7d413/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index 32d28e5..7375a5c 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -55,7 +55,7 @@ object JdbcUtils extends Logging {
    * Drops a table from the JDBC database.
    */
   def dropTable(conn: Connection, table: String): Unit = {
-    conn.prepareStatement(s"DROP TABLE $table").executeUpdate()
+    conn.createStatement.executeUpdate(s"DROP TABLE $table")
   }
 
   /**


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to