milastdbx commented on code in PR #53009:
URL: https://github.com/apache/spark/pull/53009#discussion_r2528225639


##########
sql/api/src/main/scala/org/apache/spark/sql/types/GeographyType.scala:
##########
@@ -163,19 +163,19 @@ object GeographyType extends SpatialType {
    * Default CRS value for GeographyType depends on storage specification. 
Parquet and Iceberg use
    * OGC:CRS84, which translates to SRID 4326 here.
    */
-  final val GEOGRAPHY_DEFAULT_SRID = 4326
-  final val GEOGRAPHY_DEFAULT_CRS = "OGC:CRS84"
+  final lazy val GEOGRAPHY_DEFAULT_SRID = 4326
+  final lazy val GEOGRAPHY_DEFAULT_CRS = "OGC:CRS84"
 
   // The default edge interpolation algorithm value for GeographyType.
-  final val GEOGRAPHY_DEFAULT_ALGORITHM = EdgeInterpolationAlgorithm.SPHERICAL
+  final lazy val GEOGRAPHY_DEFAULT_ALGORITHM = 
EdgeInterpolationAlgorithm.SPHERICAL
 
   // Another way to represent the default parquet crs value (OGC:CRS84).
-  final val GEOGRAPHY_DEFAULT_EPSG_CRS = s"EPSG:$GEOGRAPHY_DEFAULT_SRID"
+  final lazy val GEOGRAPHY_DEFAULT_EPSG_CRS = s"EPSG:$GEOGRAPHY_DEFAULT_SRID"

Review Comment:
   not needed



##########
sql/api/src/main/scala/org/apache/spark/sql/catalyst/encoders/AgnosticEncoder.scala:
##########
@@ -279,9 +279,9 @@ object AgnosticEncoders {
     ScalaDecimalEncoder(DecimalType.SYSTEM_DEFAULT)
   val DEFAULT_JAVA_DECIMAL_ENCODER: JavaDecimalEncoder =
     JavaDecimalEncoder(DecimalType.SYSTEM_DEFAULT, lenientSerialization = 
false)
-  val DEFAULT_GEOMETRY_ENCODER: GeometryEncoder =
+  lazy val DEFAULT_GEOMETRY_ENCODER: GeometryEncoder =

Review Comment:
   i dont think this needs to be lazy



##########
sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderSuite.scala:
##########
@@ -41,24 +41,30 @@ import 
org.apache.spark.sql.catalyst.util.DateTimeConstants.MICROS_PER_SECOND
 import org.apache.spark.sql.catalyst.util.IntervalStringStyles.ANSI_STYLE
 import org.apache.spark.sql.catalyst.util.SparkDateTimeUtils._
 import org.apache.spark.sql.catalyst.util.SparkIntervalUtils._
+import org.apache.spark.sql.connect.SparkSession
 import org.apache.spark.sql.connect.client.CloseableIterator
 import org.apache.spark.sql.connect.client.arrow.FooEnum.FooEnum
-import org.apache.spark.sql.connect.test.ConnectFunSuite
+import org.apache.spark.sql.connect.test.{ConnectFunSuite, RemoteSparkSession}
 import org.apache.spark.sql.types.{ArrayType, DataType, DayTimeIntervalType, 
Decimal, DecimalType, Geography, Geometry, IntegerType, Metadata, 
SQLUserDefinedType, StringType, StructType, UserDefinedType, 
YearMonthIntervalType}
 import org.apache.spark.unsafe.types.VariantVal
 import org.apache.spark.util.{MaybeNull, SparkStringUtils}
 
 /**
  * Tests for encoding external data to and from arrow.
  */
-class ArrowEncoderSuite extends ConnectFunSuite with BeforeAndAfterAll {
+class ArrowEncoderSuite extends ConnectFunSuite with RemoteSparkSession with 
BeforeAndAfterAll {
   private val allocator = new RootAllocator()
 
   private def newAllocator(name: String): BufferAllocator = {
     allocator.newChildAllocator(name, 0, allocator.getLimit)
   }
 
-  protected override def afterAll(): Unit = {
+  override def beforeAll(): Unit = {
+    super.beforeAll()
+    SparkSession.getActiveSession.get.conf.set("spark.sql.geospatial.enabled", 
"true")
+  }

Review Comment:
   ideally this should not be needed, i encoruage you to investigate why config 
does not exist when its not specified explicitly



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/st/stExpressions.scala:
##########
@@ -17,12 +17,40 @@
 
 package org.apache.spark.sql.catalyst.expressions.st
 
+import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
 import org.apache.spark.sql.catalyst.expressions.objects._
 import org.apache.spark.sql.catalyst.trees._
 import org.apache.spark.sql.catalyst.util.{Geography, Geometry, STUtils}
+import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types._
 
+/**
+ * ST expressions are behind a feature flag while the geospatial module is 
under development.
+ */
+
+private[sql] case class GeospatialGuard(child: Expression)

Review Comment:
   this will be present in analyzed plan - so i really think its better if we 
have analysis rule to do this check rather than wrap expressions in another 
expressions



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to