This is an automated email from the ASF dual-hosted git repository.

imbruced pushed a commit to branch feature/geopackage-reader
in repository https://gitbox.apache.org/repos/asf/sedona.git

commit 323405eac547c425c8bb9d5b3a1e8058be937d55
Author: pawelkocinski <[email protected]>
AuthorDate: Mon Sep 23 19:04:31 2024 +0200

    Add other missing data types.
---
 .../geopackage/model/GeoPackageField.scala         | 38 +++++++++++++---------
 .../geopackage/model/GeoPackageType.scala          |  5 +++
 .../geopackage/transform/ValuesMapper.scala        |  7 +++-
 3 files changed, 34 insertions(+), 16 deletions(-)

diff --git 
a/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/model/GeoPackageField.scala
 
b/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/model/GeoPackageField.scala
index fea83c5ee..9bac47182 100644
--- 
a/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/model/GeoPackageField.scala
+++ 
b/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/model/GeoPackageField.scala
@@ -11,27 +11,35 @@ case class GeoPackageField(name: String, dataType: String, 
isNullable: Boolean)
 
   def toStructField(tableType: TableType): StructField = {
     dataType match {
-      case startsWith: String if startsWith.startsWith("TEXT") => 
StructField(name, StringType)
-      case startsWith: String if startsWith.startsWith("BLOB") => {
+      case startsWith: String if startsWith.startsWith(GeoPackageType.TEXT) => 
StructField(name, StringType)
+      case startsWith: String if startsWith.startsWith(GeoPackageType.BLOB) => 
{
         if (tableType == TableType.TILES) {
           return StructField(name, RasterUDT)
         }
 
         StructField(name, BinaryType)
       }
-      case "INTEGER" => StructField(name, IntegerType)
-      case "POINT" => StructField(name, GeometryUDT)
-      case "LINESTRING" => StructField(name, GeometryUDT)
-      case "POLYGON" => StructField(name, GeometryUDT)
-      case "GEOMETRY" => StructField(name, GeometryUDT)
-      case "MULTIPOINT" => StructField(name, GeometryUDT)
-      case "MULTILINESTRING" => StructField(name, GeometryUDT)
-      case "MULTIPOLYGON" => StructField(name, GeometryUDT)
-      case "GEOMETRYCOLLECTION" => StructField(name, GeometryUDT)
-      case "REAL" => StructField(name, DoubleType)
-      case "BOOLEAN" => StructField(name, BooleanType)
-      case "DATE" => StructField(name, DateType)
-      case "DATETIME" => StructField(name, TimestampType)
+      case
+        GeoPackageType.INTEGER |
+        GeoPackageType.INT |
+        GeoPackageType.SMALLINT |
+        GeoPackageType.TINY_INT |
+        GeoPackageType.MEDIUMINT =>
+        StructField(name, IntegerType)
+      case GeoPackageType.POINT => StructField(name, GeometryUDT)
+      case GeoPackageType.LINESTRING => StructField(name, GeometryUDT)
+      case GeoPackageType.POLYGON => StructField(name, GeometryUDT)
+      case GeoPackageType.GEOMETRY => StructField(name, GeometryUDT)
+      case GeoPackageType.MULTIPOINT => StructField(name, GeometryUDT)
+      case GeoPackageType.MULTILINESTRING => StructField(name, GeometryUDT)
+      case GeoPackageType.MULTIPOLYGON => StructField(name, GeometryUDT)
+      case GeoPackageType.GEOMETRYCOLLECTION => StructField(name, GeometryUDT)
+      case GeoPackageType.REAL => StructField(name, DoubleType)
+      case GeoPackageType.BOOLEAN => StructField(name, BooleanType)
+      case GeoPackageType.DATE => StructField(name, DateType)
+      case GeoPackageType.DATETIME => StructField(name, TimestampType)
+      case GeoPackageType.FLOAT => StructField(name, FloatType)
+      case GeoPackageType.DOUBLE => StructField(name, DoubleType)
       case _ => StructField(name, StringType)
     }
   }
diff --git 
a/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/model/GeoPackageType.scala
 
b/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/model/GeoPackageType.scala
index 5bd09432b..8ae862cb9 100644
--- 
a/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/model/GeoPackageType.scala
+++ 
b/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/model/GeoPackageType.scala
@@ -18,4 +18,9 @@ object GeoPackageType {
   val BOOLEAN = "BOOLEAN"
   val DATE = "DATE"
   val DATETIME = "DATETIME"
+  val TINY_INT = "TINY_INT"
+  val SMALLINT = "SMALLINT"
+  val MEDIUMINT = "MEDIUMINT"
+  val FLOAT = "FLOAT"
+  val DOUBLE = "DOUBLE"
 }
diff --git 
a/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/transform/ValuesMapper.scala
 
b/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/transform/ValuesMapper.scala
index 56437b916..03b2be0a3 100644
--- 
a/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/transform/ValuesMapper.scala
+++ 
b/spark/spark-3.3/src/main/scala/org/apache/sedona/sql/datasources/geopackage/transform/ValuesMapper.scala
@@ -12,7 +12,12 @@ object ValuesMapper {
     metadata.columns.map(
       column => {
         (column.dataType, metadata.loadOptions.tableType) match {
-          case (GeoPackageType.INTEGER, _) => rs.getInt(column.name)
+          case (GeoPackageType.INTEGER | GeoPackageType.INT, _) => 
rs.getInt(column.name)
+          case (GeoPackageType.TINY_INT, _) => rs.getInt(column.name)
+          case (GeoPackageType.SMALLINT, _) => rs.getInt(column.name)
+          case (GeoPackageType.MEDIUMINT, _) => rs.getInt(column.name)
+          case (GeoPackageType.FLOAT, _) => rs.getFloat(column.name)
+          case (GeoPackageType.DOUBLE, _) => rs.getDouble(column.name)
           case (GeoPackageType.REAL, _) => rs.getDouble(column.name)
           case (startsWith: String, _) if 
startsWith.startsWith(GeoPackageType.TEXT) =>
             UTF8String.fromString(rs.getString(column.name))

Reply via email to