This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-2.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-2.3 by this push:
     new 6d063ee  [SPARK-26538][SQL] Set default precision and scale for 
elements of postgres numeric array
6d063ee is described below

commit 6d063ee07c3ee591131d2ad1debdb9540428b5ff
Author: Oleksii Shkarupin <a.shkaru...@gmail.com>
AuthorDate: Sat Jan 12 11:06:39 2019 -0800

    [SPARK-26538][SQL] Set default precision and scale for elements of postgres 
numeric array
    
    ## What changes were proposed in this pull request?
    
    When determining CatalystType for postgres columns with type `numeric[]` 
set the type of array element to `DecimalType(38, 18)` instead of 
`DecimalType(0,0)`.
    
    ## How was this patch tested?
    
    Tested with modified `org.apache.spark.sql.jdbc.JDBCSuite`.
    Ran the `PostgresIntegrationSuite` manually.
    
    Closes #23456 from a-shkarupin/postgres_numeric_array.
    
    Lead-authored-by: Oleksii Shkarupin <a.shkaru...@gmail.com>
    Co-authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
    (cherry picked from commit 5b37092311bfc1255f1d4d81127ae4242ba1d1aa)
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala | 12 ++++++++----
 .../scala/org/apache/spark/sql/jdbc/PostgresDialect.scala    |  5 ++++-
 .../src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala |  3 +++
 3 files changed, 15 insertions(+), 5 deletions(-)

diff --git 
a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
 
b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
index be32cb8..e8d5b46 100644
--- 
a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
+++ 
b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
@@ -46,14 +46,15 @@ class PostgresIntegrationSuite extends 
DockerJDBCIntegrationSuite {
     conn.prepareStatement("CREATE TABLE bar (c0 text, c1 integer, c2 double 
precision, c3 bigint, "
       + "c4 bit(1), c5 bit(10), c6 bytea, c7 boolean, c8 inet, c9 cidr, "
       + "c10 integer[], c11 text[], c12 real[], c13 numeric(2,2)[], c14 
enum_type, "
-      + "c15 float4, c16 smallint)").executeUpdate()
+      + "c15 float4, c16 smallint, c17 numeric[])").executeUpdate()
     conn.prepareStatement("INSERT INTO bar VALUES ('hello', 42, 1.25, 
123456789012345, B'0', "
       + "B'1000100101', E'\\\\xDEADBEEF', true, '172.16.0.42', 
'192.168.0.0/16', "
-      + """'{1, 2}', '{"a", null, "b"}', '{0.11, 0.22}', '{0.11, 0.22}', 'd1', 
1.01, 1)"""
+      + """'{1, 2}', '{"a", null, "b"}', '{0.11, 0.22}', '{0.11, 0.22}', 'd1', 
1.01, 1, """
+      + "'{111.2222, 333.4444}')"
     ).executeUpdate()
     conn.prepareStatement("INSERT INTO bar VALUES (null, null, null, null, 
null, "
       + "null, null, null, null, null, "
-      + "null, null, null, null, null, null, null)"
+      + "null, null, null, null, null, null, null, null)"
     ).executeUpdate()
 
     conn.prepareStatement("CREATE TABLE ts_with_timezone " +
@@ -85,7 +86,7 @@ class PostgresIntegrationSuite extends 
DockerJDBCIntegrationSuite {
     assert(rows.length == 2)
     // Test the types, and values using the first row.
     val types = rows(0).toSeq.map(x => x.getClass)
-    assert(types.length == 17)
+    assert(types.length == 18)
     assert(classOf[String].isAssignableFrom(types(0)))
     assert(classOf[java.lang.Integer].isAssignableFrom(types(1)))
     assert(classOf[java.lang.Double].isAssignableFrom(types(2)))
@@ -103,6 +104,7 @@ class PostgresIntegrationSuite extends 
DockerJDBCIntegrationSuite {
     assert(classOf[String].isAssignableFrom(types(14)))
     assert(classOf[java.lang.Float].isAssignableFrom(types(15)))
     assert(classOf[java.lang.Short].isAssignableFrom(types(16)))
+    assert(classOf[Seq[BigDecimal]].isAssignableFrom(types(17)))
     assert(rows(0).getString(0).equals("hello"))
     assert(rows(0).getInt(1) == 42)
     assert(rows(0).getDouble(2) == 1.25)
@@ -123,6 +125,8 @@ class PostgresIntegrationSuite extends 
DockerJDBCIntegrationSuite {
     assert(rows(0).getString(14) == "d1")
     assert(rows(0).getFloat(15) == 1.01f)
     assert(rows(0).getShort(16) == 1)
+    assert(rows(0).getSeq(17) ==
+      Seq("111.222200000000000000", 
"333.444400000000000000").map(BigDecimal(_).bigDecimal))
 
     // Test reading null values using the second row.
     assert(0.until(16).forall(rows(1).isNullAt(_)))
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
index 13a2035..faaf20f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
@@ -60,7 +60,10 @@ private object PostgresDialect extends JdbcDialect {
     case "bytea" => Some(BinaryType)
     case "timestamp" | "timestamptz" | "time" | "timetz" => Some(TimestampType)
     case "date" => Some(DateType)
-    case "numeric" | "decimal" => Some(DecimalType.bounded(precision, scale))
+    case "numeric" | "decimal" if precision > 0 => 
Some(DecimalType.bounded(precision, scale))
+    case "numeric" | "decimal" =>
+      // SPARK-26538: handle numeric without explicit precision and scale.
+      Some(DecimalType. SYSTEM_DEFAULT)
     case _ => None
   }
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 5238adc..6b2f5a0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -806,8 +806,11 @@ class JDBCSuite extends SparkFunSuite
 
   test("PostgresDialect type mapping") {
     val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db")
+    val md = new MetadataBuilder().putLong("scale", 0)
     assert(Postgres.getCatalystType(java.sql.Types.OTHER, "json", 1, null) === 
Some(StringType))
     assert(Postgres.getCatalystType(java.sql.Types.OTHER, "jsonb", 1, null) 
=== Some(StringType))
+    assert(Postgres.getCatalystType(java.sql.Types.ARRAY, "_numeric", 0, md) ==
+      Some(ArrayType(DecimalType.SYSTEM_DEFAULT)))
     assert(Postgres.getJDBCType(FloatType).map(_.databaseTypeDefinition).get 
== "FLOAT4")
     assert(Postgres.getJDBCType(DoubleType).map(_.databaseTypeDefinition).get 
== "FLOAT8")
     val errMsg = intercept[IllegalArgumentException] {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to