This is an automated email from the ASF dual-hosted git repository.
jiayu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/sedona.git
The following commit(s) were added to refs/heads/master by this push:
new 5cb7a210 [SEDONA-301] Add ST_Affine (#866)
5cb7a210 is described below
commit 5cb7a2100189b5a96035cdb4bbf530c8c570e3d0
Author: Nilesh Gajwani <[email protected]>
AuthorDate: Sun Jun 25 22:06:56 2023 -0700
[SEDONA-301] Add ST_Affine (#866)
---
.../java/org/apache/sedona/common/Functions.java | 15 ++
.../org/apache/sedona/common/utils/GeomUtils.java | 20 +-
.../org/apache/sedona/common/FunctionsTest.java | 89 ++++++++-
docs/api/flink/Function.md | 61 ++++++
docs/api/sql/Function.md | 62 ++++++
.../main/java/org/apache/sedona/flink/Catalog.java | 1 +
.../apache/sedona/flink/expressions/Functions.java | 21 ++-
python/sedona/sql/st_functions.py | 29 +++
python/tests/sql/test_dataframe_api.py | 1 +
python/tests/sql/test_function.py | 207 ++++++++++++---------
.../scala/org/apache/sedona/sql/UDF/Catalog.scala | 1 +
.../sql/sedona_sql/expressions/Functions.scala | 8 +-
.../expressions/InferredExpression.scala | 41 ++++
.../sql/sedona_sql/expressions/st_functions.scala | 11 ++
.../apache/sedona/sql/dataFrameAPITestScala.scala | 16 +-
.../org/apache/sedona/sql/functionTestScala.scala | 22 +++
16 files changed, 514 insertions(+), 91 deletions(-)
diff --git a/common/src/main/java/org/apache/sedona/common/Functions.java
b/common/src/main/java/org/apache/sedona/common/Functions.java
index 8997a508..13cfc619 100644
--- a/common/src/main/java/org/apache/sedona/common/Functions.java
+++ b/common/src/main/java/org/apache/sedona/common/Functions.java
@@ -905,6 +905,21 @@ public class Functions {
return geometry;
}
+ public static Geometry affine(Geometry geometry, Double a, Double b,
Double d, Double e, Double xOff, Double yOff, Double c,
+ Double f, Double g, Double h, Double i,
Double zOff) {
+ if (!geometry.isEmpty()) {
+ GeomUtils.affineGeom(geometry, a, b, d, e, xOff, yOff, c, f, g, h,
i, zOff);
+ }
+ return geometry;
+ }
+
+ public static Geometry affine(Geometry geometry, Double a, Double b,
Double d, Double e, Double xOff, Double yOff) {
+ if (!geometry.isEmpty()) {
+ GeomUtils.affineGeom(geometry, a, b, d, e, xOff, yOff, null, null,
null, null, null, null);
+ }
+ return geometry;
+ }
+
public static Geometry geometricMedian(Geometry geometry, double
tolerance, int maxIter, boolean failIfNotConverged) throws Exception {
String geometryType = geometry.getGeometryType();
if(!(Geometry.TYPENAME_POINT.equals(geometryType) ||
Geometry.TYPENAME_MULTIPOINT.equals(geometryType))) {
diff --git a/common/src/main/java/org/apache/sedona/common/utils/GeomUtils.java
b/common/src/main/java/org/apache/sedona/common/utils/GeomUtils.java
index a47dc510..2a210f56 100644
--- a/common/src/main/java/org/apache/sedona/common/utils/GeomUtils.java
+++ b/common/src/main/java/org/apache/sedona/common/utils/GeomUtils.java
@@ -28,7 +28,6 @@ import org.locationtech.jts.operation.polygonize.Polygonizer;
import org.locationtech.jts.operation.union.UnaryUnionOp;
import org.locationtech.jts.algorithm.distance.DiscreteHausdorffDistance;
-import java.awt.*;
import java.nio.ByteOrder;
import java.util.*;
import java.util.List;
@@ -462,6 +461,25 @@ public class GeomUtils {
geometry.geometryChanged();
}
}
+ public static void affineGeom(Geometry geometry, Double a, Double b,
Double d, Double e, Double xOff, Double yOff, Double c,
+ Double f, Double g, Double h, Double i,
Double zOff) {
+ Coordinate[] coordinates = geometry.getCoordinates();
+ for (Coordinate currCoordinate : coordinates) {
+ double x = currCoordinate.getX(), y = currCoordinate.getY(), z =
Double.isNaN(currCoordinate.getZ()) ? 0 : currCoordinate.getZ();
+ double newX = a * x + b * y + xOff;
+ if (c != null) newX += c * z;
+ double newY = d * x + e * y + yOff;
+ if (f != null) newY += f * z;
+ currCoordinate.setX(newX);
+ currCoordinate.setY(newY);
+
+ if (g != null && h != null && i != null &&
!Double.isNaN(currCoordinate.getZ())) {
+ double newZ = g * x + h * y + i * z + zOff;
+ currCoordinate.setZ(newZ);
+ }
+ }
+ geometry.geometryChanged();
+ }
public static Double getHausdorffDistance(Geometry g1, Geometry g2, double
densityFrac) throws Exception {
if (g1.isEmpty() || g2.isEmpty()) return 0.0;
diff --git a/common/src/test/java/org/apache/sedona/common/FunctionsTest.java
b/common/src/test/java/org/apache/sedona/common/FunctionsTest.java
index 6901cb7c..3dde5180 100644
--- a/common/src/test/java/org/apache/sedona/common/FunctionsTest.java
+++ b/common/src/test/java/org/apache/sedona/common/FunctionsTest.java
@@ -24,6 +24,7 @@ import org.junit.Test;
import org.locationtech.jts.geom.*;
import org.locationtech.jts.io.WKTReader;
import org.locationtech.jts.io.WKTWriter;
+
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
@@ -932,6 +933,7 @@ public class FunctionsTest {
assertEquals(wktWriter3D.write(expectedPoint3D),
wktWriter3D.write(actualGeometry.getGeometryN(0).getGeometryN(1)));
assertEquals(emptyLineString.toText(),
actualGeometry.getGeometryN(0).getGeometryN(2).toText());
}
+
@Test
public void boundingDiagonalGeom2D() {
Polygon polygon = GEOMETRY_FACTORY.createPolygon(coordArray(1, 0, 1,
1, 2, 1, 2, 2, 2, 0, 1, 0));
@@ -959,7 +961,6 @@ public class FunctionsTest {
@Test
public void boundingDiagonalGeomCollection2D() {
- // ("'GEOMETRYCOLLECTION (MULTIPOLYGON (((1 1, 1 -1, 2 2, 2 9, 9
1, 1 1)), ((5 5, 4 4, 2 2 , 5 5))), POINT (-1 0))'") -> "'LINESTRING (-1 -1, 9
9)'"
Polygon polygon1 = GEOMETRY_FACTORY.createPolygon(coordArray(1, 1, 1,
-1, 2, 2, 2, 9, 9, 1, 1, 1));
Polygon polygon2 = GEOMETRY_FACTORY.createPolygon(coordArray(5, 5, 4,
4, 2, 2, 5, 5));
MultiPolygon multiPolygon = GEOMETRY_FACTORY.createMultiPolygon(new
Polygon[] {polygon1, polygon2});
@@ -992,7 +993,91 @@ public class FunctionsTest {
String actual = Functions.boundingDiagonal(point).toText();
assertEquals(expected, actual);
}
-
+
+ @Test
+ public void affineEmpty3D() {
+ LineString emptyLineString = GEOMETRY_FACTORY.createLineString();
+ String expected = emptyLineString.toText();
+ String actual = Functions.affine(emptyLineString, 1.0, 1.0, 2.0, 3.0,
5.0, 6.0, 2.0, 3.0, 4.0, 4.0, 5.0, 6.0).toText();
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void affineEmpty2D() {
+ LineString emptyLineString = GEOMETRY_FACTORY.createLineString();
+ String expected = emptyLineString.toText();
+ String actual = Functions.affine(emptyLineString, 1.0, 2.0, 3.0, 4.0,
1.0, 2.0).toText();
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void affine3DGeom2D() {
+ LineString lineString =
GEOMETRY_FACTORY.createLineString(coordArray(1, 0, 1, 1, 1, 2));
+ String expected = GEOMETRY_FACTORY.createLineString(coordArray(6, 8,
7, 11, 8, 14)).toText();
+ String actual = Functions.affine(lineString, 1.0, 1.0, 2.0, 3.0, 5.0,
6.0, 2.0, 3.0, 4.0, 4.0, 5.0, 6.0).toText();
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void affine3DGeom3D() {
+ WKTWriter wktWriter = new WKTWriter(3);
+ LineString lineString =
GEOMETRY_FACTORY.createLineString(coordArray3d(1, 0, 1, 1, 1, 2, 1, 2, 2));
+ String expected =
wktWriter.write(GEOMETRY_FACTORY.createLineString(coordArray3d(8, 11, 15, 11,
17, 24, 12, 20, 28)));
+ String actual = wktWriter.write(Functions.affine(lineString, 1.0, 1.0,
2.0, 3.0, 5.0, 6.0, 2.0, 3.0, 4.0, 4.0, 5.0, 6.0));
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void affine3DHybridGeomCollection() {
+ Point point3D = GEOMETRY_FACTORY.createPoint(new Coordinate(1, 1, 1));
+ Polygon polygon1 = GEOMETRY_FACTORY.createPolygon(coordArray3d(1, 0,
2, 1, 1, 2, 2, 1, 2, 2, 0, 2, 1, 0, 2));
+ Polygon polygon2 = GEOMETRY_FACTORY.createPolygon(coordArray3d(1, 0,
1, 1, 1, 1, 2, 2, 2, 1, 0, 1));
+ MultiPolygon multiPolygon = GEOMETRY_FACTORY.createMultiPolygon(new
Polygon[] {polygon1, polygon2});
+ Geometry geomCollection =
GEOMETRY_FACTORY.createGeometryCollection(new Geometry[]
{GEOMETRY_FACTORY.createGeometryCollection(new Geometry[] {point3D,
multiPolygon})});
+ Geometry actualGeomCollection = Functions.affine(geomCollection, 1.0,
2.0, 1.0, 2.0, 1.0, 2.0, 3.0, 3.0, 1.0, 2.0, 3.0, 3.0);
+ WKTWriter wktWriter3D = new WKTWriter(3);
+ Point expectedPoint3D = GEOMETRY_FACTORY.createPoint(new Coordinate(7,
8, 9));
+ Polygon expectedPolygon1 =
GEOMETRY_FACTORY.createPolygon(coordArray3d(8, 9, 10, 10, 11, 12, 11, 12, 13,
9, 10, 11, 8, 9, 10));
+ Polygon expectedPolygon2 =
GEOMETRY_FACTORY.createPolygon(coordArray3d(5, 6, 7, 7, 8, 9, 13, 14, 15, 5, 6,
7));
+ assertEquals(wktWriter3D.write(expectedPoint3D),
wktWriter3D.write(actualGeomCollection.getGeometryN(0).getGeometryN(0)));
+ assertEquals(wktWriter3D.write(expectedPolygon1),
wktWriter3D.write(actualGeomCollection.getGeometryN(0).getGeometryN(1).getGeometryN(0)));
+ assertEquals(wktWriter3D.write(expectedPolygon2),
wktWriter3D.write(actualGeomCollection.getGeometryN(0).getGeometryN(1).getGeometryN(1)));
+ }
+
+ @Test
+ public void affine2DGeom3D() {
+ WKTWriter wktWriter = new WKTWriter(3);
+ LineString lineString =
GEOMETRY_FACTORY.createLineString(coordArray3d(1, 0, 1, 1, 1, 2, 1, 2, 2));
+ String expected =
wktWriter.write(GEOMETRY_FACTORY.createLineString(coordArray3d(6, 8, 1, 7, 11,
2, 8, 14, 2)));
+ String actual = wktWriter.write(Functions.affine(lineString, 1d, 1d,
2d, 3d, 5d, 6d));
+ assertEquals(expected, actual);
+ }
+
+
+ @Test
+ public void affine2DGeom2D() {
+ LineString lineString =
GEOMETRY_FACTORY.createLineString(coordArray(1, 0, 1, 1, 1, 2));
+ String expected = GEOMETRY_FACTORY.createLineString(coordArray(6, 8,
7, 11, 8, 14)).toText();
+ String actual = Functions.affine(lineString, 1.0, 1.0, 2.0, 3.0, 5.0,
6.0).toText();
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void affine2DHybridGeomCollection() {
+ Point point3D = GEOMETRY_FACTORY.createPoint(new Coordinate(1, 1));
+ Polygon polygon1 = GEOMETRY_FACTORY.createPolygon(coordArray(1, 0, 1,
1, 2, 1, 2, 0, 1, 0));
+ Polygon polygon2 = GEOMETRY_FACTORY.createPolygon(coordArray(3, 4, 3,
5, 3, 7, 10, 7, 3, 4));
+ MultiPolygon multiPolygon = GEOMETRY_FACTORY.createMultiPolygon(new
Polygon[] {polygon1, polygon2});
+ Geometry geomCollection =
GEOMETRY_FACTORY.createGeometryCollection(new Geometry[]
{GEOMETRY_FACTORY.createGeometryCollection(new Geometry[] {point3D,
multiPolygon})});
+ Geometry actualGeomCollection = Functions.affine(geomCollection, 1.0,
2.0, 1.0, 2.0, 1.0, 2.0);
+ Point expectedPoint3D = GEOMETRY_FACTORY.createPoint(new Coordinate(4,
5));
+ Polygon expectedPolygon1 =
GEOMETRY_FACTORY.createPolygon(coordArray(2, 3, 4, 5, 5, 6, 3, 4, 2, 3));
+ Polygon expectedPolygon2 =
GEOMETRY_FACTORY.createPolygon(coordArray(12, 13, 14, 15, 18, 19, 25, 26, 12,
13));
+ assertEquals(expectedPoint3D.toText(),
actualGeomCollection.getGeometryN(0).getGeometryN(0).toText());
+ assertEquals(expectedPolygon1.toText(),
actualGeomCollection.getGeometryN(0).getGeometryN(1).getGeometryN(0).toText());
+ assertEquals(expectedPolygon2.toText(),
actualGeomCollection.getGeometryN(0).getGeometryN(1).getGeometryN(1).toText());
+ }
+
@Test
public void hausdorffDistanceDefaultGeom2D() throws Exception {
Polygon polygon1 = GEOMETRY_FACTORY.createPolygon(coordArray3d(1, 0,
1, 1, 1, 2, 2, 1, 5, 2, 0, 1, 1, 0, 1));
diff --git a/docs/api/flink/Function.md b/docs/api/flink/Function.md
index cc8ee0c0..9b172dd5 100644
--- a/docs/api/flink/Function.md
+++ b/docs/api/flink/Function.md
@@ -36,6 +36,67 @@ LINESTRING(0 0, 21 52, 1 1, 1 0)
LINESTRING(0 0, 1 1, 1 0, 21 52)
```
+## ST_Affine
+
+Introduction: Apply an affine transformation to the given geometry.
+
+ST_Affine has 2 overloaded signatures:
+
+`ST_Affine(geometry, a, b, d, e, xOff, yOff, c, f, g, h, i, zOff)`
+
+`ST_Affine(geometry, a, b, d, e, xOff, yOff)`
+
+
+Based on the invoked function, the following transformation is applied:
+
+`x = a * x + b * y + c * z + xOff OR x = a * x + b * y + xOff`
+
+`y = d * x + e * y + f * z + yOff OR y = d * x + e * y + yOff`
+
+`z = g * x + f * y + i * z + zOff OR z = g * x + f * y + zOff`
+
+If the given geometry is empty, the result is also empty.
+
+Format: `ST_Affine(geometry, a, b, d, e, xOff, yOff, c, f, g, h, i, zOff)`
+Format: `ST_Affine(geometry, a, b, d, e, xOff, yOff)`
+
+Since: `1.5.0`
+
+Example:
+
+```sql
+ST_Affine(geometry, 1, 2, 4, 1, 1, 2, 3, 2, 5, 4, 8, 3)
+```
+
+Input: `LINESTRING EMPTY`
+
+Output: `LINESTRING EMPTY`
+
+Input: `POLYGON ((1 0 1, 1 1 1, 2 2 2, 1 0 1))`
+
+Output: `POLYGON Z((5 8 16, 7 9 20, 13 16 37, 5 8 16))`
+
+Input: `POLYGON ((1 0, 1 1, 2 1, 2 0, 1 0), (1 0.5, 1 0.75, 1.5 0.75, 1.5 0.5,
1 0.5))`
+
+Output: `POLYGON ((2 6, 4 7, 5 11, 3 10, 2 6), (3 6.5, 3.5 6.75, 4 8.75, 3.5
8.5, 3 6.5))`
+
+
+```sql
+ST_Affine(geometry, 1, 2, 1, 2, 1, 2)
+```
+
+Input: `POLYGON EMPTY`
+
+Output: `POLYGON EMPTY`
+
+Input: `GEOMETRYCOLLECTION (MULTIPOLYGON (((1 0, 1 1, 2 1, 2 0, 1 0), (1 0.5,
1 0.75, 1.5 0.75, 1.5 0.5, 1 0.5)), ((5 0, 5 5, 7 5, 7 0, 5 0))), POINT (10
10))`
+
+Output: `GEOMETRYCOLLECTION (MULTIPOLYGON (((2 3, 4 5, 5 6, 3 4, 2 3), (3 4,
3.5 4.5, 4 5, 3.5 4.5, 3 4)), ((6 7, 16 17, 18 19, 8 9, 6 7))), POINT (31 32))`
+
+Input: `POLYGON ((1 0 1, 1 1 1, 2 2 2, 1 0 1))`
+
+Output: `POLYGON Z((2 3 1, 4 5 1, 7 8 2, 2 3 1))`
+
## ST_Area
Introduction: Return the area of A
diff --git a/docs/api/sql/Function.md b/docs/api/sql/Function.md
index a45f8b9f..2ea607b2 100644
--- a/docs/api/sql/Function.md
+++ b/docs/api/sql/Function.md
@@ -35,6 +35,68 @@ LINESTRING(0 0, 21 52, 1 1, 1 0)
LINESTRING(0 0, 1 1, 1 0, 21 52)
```
+## ST_Affine
+
+Introduction: Apply an affine transformation to the given geometry.
+
+ST_Affine has 2 overloaded signatures:
+
+`ST_Affine(geometry, a, b, d, e, xOff, yOff, c, f, g, h, i, zOff)`
+
+`ST_Affine(geometry, a, b, d, e, xOff, yOff)`
+
+
+Based on the invoked function, the following transformation is applied:
+
+`x = a * x + b * y + c * z + xOff OR x = a * x + b * y + xOff`
+
+`y = d * x + e * y + f * z + yOff OR y = d * x + e * y + yOff`
+
+`z = g * x + f * y + i * z + zOff OR z = g * x + f * y + zOff`
+
+If the given geometry is empty, the result is also empty.
+
+Format: `ST_Affine(geometry, a, b, d, e, xOff, yOff, c, f, g, h, i, zOff)`
+Format: `ST_Affine(geometry, a, b, d, e, xOff, yOff)`
+
+Since: `1.5.0`
+
+Example:
+
+```sql
+ST_Affine(geometry, 1, 2, 4, 1, 1, 2, 3, 2, 5, 4, 8, 3)
+```
+
+Input: `LINESTRING EMPTY`
+
+Output: `LINESTRING EMPTY`
+
+Input: `POLYGON ((1 0 1, 1 1 1, 2 2 2, 1 0 1))`
+
+Output: `POLYGON Z((5 8 16, 7 9 20, 13 16 37, 5 8 16))`
+
+Input: `POLYGON ((1 0, 1 1, 2 1, 2 0, 1 0), (1 0.5, 1 0.75, 1.5 0.75, 1.5 0.5,
1 0.5))`
+
+Output: `POLYGON ((2 6, 4 7, 5 11, 3 10, 2 6), (3 6.5, 3.5 6.75, 4 8.75, 3.5
8.5, 3 6.5))`
+
+
+```sql
+ST_Affine(geometry, 1, 2, 1, 2, 1, 2)
+```
+
+Input: `POLYGON EMPTY`
+
+Output: `POLYGON EMPTY`
+
+Input: `GEOMETRYCOLLECTION (MULTIPOLYGON (((1 0, 1 1, 2 1, 2 0, 1 0), (1 0.5,
1 0.75, 1.5 0.75, 1.5 0.5, 1 0.5)), ((5 0, 5 5, 7 5, 7 0, 5 0))), POINT (10
10))`
+
+Output: `GEOMETRYCOLLECTION (MULTIPOLYGON (((2 3, 4 5, 5 6, 3 4, 2 3), (3 4,
3.5 4.5, 4 5, 3.5 4.5, 3 4)), ((6 7, 16 17, 18 19, 8 9, 6 7))), POINT (31 32))`
+
+Input: `POLYGON ((1 0 1, 1 1 1, 2 2 2, 1 0 1))`
+
+Output: `POLYGON Z((2 3 1, 4 5 1, 7 8 2, 2 3 1))`
+
+
## ST_Area
Introduction: Return the area of A
diff --git a/flink/src/main/java/org/apache/sedona/flink/Catalog.java
b/flink/src/main/java/org/apache/sedona/flink/Catalog.java
index 1c654944..d9ee14e8 100644
--- a/flink/src/main/java/org/apache/sedona/flink/Catalog.java
+++ b/flink/src/main/java/org/apache/sedona/flink/Catalog.java
@@ -100,6 +100,7 @@ public class Catalog {
new Functions.ST_Force3D(),
new Functions.ST_NRings(),
new Functions.ST_Translate(),
+ new Functions.ST_Affine(),
new Functions.ST_BoundingDiagonal(),
new Functions.ST_HausdorffDistance(),
};
diff --git
a/flink/src/main/java/org/apache/sedona/flink/expressions/Functions.java
b/flink/src/main/java/org/apache/sedona/flink/expressions/Functions.java
index 2eb719a5..e1a22bb7 100644
--- a/flink/src/main/java/org/apache/sedona/flink/expressions/Functions.java
+++ b/flink/src/main/java/org/apache/sedona/flink/expressions/Functions.java
@@ -13,7 +13,6 @@
*/
package org.apache.sedona.flink.expressions;
-import org.apache.calcite.runtime.Geometries;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.functions.ScalarFunction;
import org.locationtech.jts.geom.Geometry;
@@ -633,6 +632,26 @@ public class Functions {
}
}
+ public static class ST_Affine extends ScalarFunction {
+ @DataTypeHint(value = "RAW", bridgedTo =
org.locationtech.jts.geom.Geometry.class)
+ public Geometry eval(@DataTypeHint(value = "RAW", bridgedTo =
Geometry.class) Object o, @DataTypeHint("Double") Double a,
+ @DataTypeHint("Double") Double b,
@DataTypeHint("Double") Double d, @DataTypeHint("Double") Double e,
@DataTypeHint("Double") Double xOff, @DataTypeHint("Double") Double yOff,
@DataTypeHint("Double") Double c,
+ @DataTypeHint("Double") Double f,
@DataTypeHint("Double") Double g, @DataTypeHint("Double") Double h,
@DataTypeHint("Double") Double i,
+ @DataTypeHint("Double") Double zOff) {
+ Geometry geometry = (Geometry) o;
+ return org.apache.sedona.common.Functions.affine(geometry, a, b,
d, e, xOff, yOff, c, f, g, h, i, zOff);
+ }
+
+ @DataTypeHint(value = "RAW", bridgedTo =
org.locationtech.jts.geom.Geometry.class)
+ public Geometry eval(@DataTypeHint(value = "RAW", bridgedTo =
org.locationtech.jts.geom.Geometry.class) Object o, @DataTypeHint("Double")
Double a,
+ @DataTypeHint("Double") Double b,
@DataTypeHint("Double") Double d, @DataTypeHint("Double") Double e,
+ @DataTypeHint("Double") Double xOff,
@DataTypeHint("Double") Double yOff) {
+ Geometry geometry = (Geometry) o;
+ return org.apache.sedona.common.Functions.affine(geometry, a, b,
d, e, xOff, yOff);
+ }
+
+ }
+
public static class ST_BoundingDiagonal extends ScalarFunction {
@DataTypeHint(value = "RAW", bridgedTo =
org.locationtech.jts.geom.Geometry.class)
public Geometry eval(@DataTypeHint(value = "RAW", bridgedTo =
org.locationtech.jts.geom.Geometry.class) Object o) {
diff --git a/python/sedona/sql/st_functions.py
b/python/sedona/sql/st_functions.py
index 96420d94..6ee6c64f 100644
--- a/python/sedona/sql/st_functions.py
+++ b/python/sedona/sql/st_functions.py
@@ -113,6 +113,7 @@ __all__ = [
"ST_Force3D",
"ST_NRings",
"ST_Translate",
+ "ST_Affine",
"ST_BoundingDiagonal"
]
@@ -1290,6 +1291,34 @@ def ST_Translate(geometry: ColumnOrName, deltaX:
Union[ColumnOrName, float], del
return _call_st_function("ST_Translate", args)
@validate_argument_types
+def ST_Affine(geometry: ColumnOrName, a: Union[ColumnOrName, float], b:
Union[ColumnOrName, float], d: Union[ColumnOrName, float],
+ e: Union[ColumnOrName, float], xOff: Union[ColumnOrName,
float], yOff: Union[ColumnOrName, float], c: Optional[Union[ColumnOrName,
float]] = None, f: Optional[Union[ColumnOrName, float]] = None,
+ g: Optional[Union[ColumnOrName, float]] = None, h:
Optional[Union[ColumnOrName, float]] = None,
+ i: Optional[Union[ColumnOrName, float]] = None, zOff:
Optional[Union[ColumnOrName, float]] = None) -> Column:
+ """
+ Apply a 3D/2D affine tranformation to the given geometry
+ x = a * x + b * y + c * z + xOff | x = a * x + b * y + xOff
+ y = d * x + e * y + f * z + yOff | y = d * x + e * y + yOff
+ z = g * x + h * y + i * z + zOff
+ :param geometry: Geometry to apply affine transformation to
+ :param a:
+ :param b:
+ :param c: Default 0.0
+ :param d:
+ :param e:
+ :param f: Default 0.0
+ :param g: Default 0.0
+ :param h: Default 0.0
+ :param i: Default 0.0
+ :param xOff:
+ :param yOff:
+ :param zOff: Default 0.0
+ :return: Geometry with affine transformation applied
+ """
+ args = (geometry, a, b, d, e, xOff, yOff, c, f, g, h, i, zOff)
+ return _call_st_function("ST_Affine", args)
+
+
def ST_BoundingDiagonal(geometry: ColumnOrName) -> Column:
"""
Returns a LineString with the min/max values of each dimension of the
bounding box of the given geometry as its
diff --git a/python/tests/sql/test_dataframe_api.py
b/python/tests/sql/test_dataframe_api.py
index beacf71a..d61bc4c1 100644
--- a/python/tests/sql/test_dataframe_api.py
+++ b/python/tests/sql/test_dataframe_api.py
@@ -50,6 +50,7 @@ test_configurations = [
# functions
(stf.ST_3DDistance, ("a", "b"), "two_points", "", 5.0),
+ (stf.ST_Affine, ("geom", 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0), "square_geom", "", "POLYGON ((2 3, 4 5, 5 6, 3 4, 2 3))"),
(stf.ST_AddPoint, ("line", lambda: f.expr("ST_Point(1.0, 1.0)")),
"linestring_geom", "", "LINESTRING (0 0, 1 0, 2 0, 3 0, 4 0, 5 0, 1 1)"),
(stf.ST_AddPoint, ("line", lambda: f.expr("ST_Point(1.0, 1.0)"), 1),
"linestring_geom", "", "LINESTRING (0 0, 1 1, 1 0, 2 0, 3 0, 4 0, 5 0)"),
(stf.ST_Area, ("geom",), "triangle_geom", "", 0.5),
diff --git a/python/tests/sql/test_function.py
b/python/tests/sql/test_function.py
index dd0fe079..a161a6dd 100644
--- a/python/tests/sql/test_function.py
+++ b/python/tests/sql/test_function.py
@@ -31,7 +31,6 @@ from typing import List
class TestPredicateJoin(TestBase):
-
geo_schema = StructType(
[StructField("geom", GeometryType(), False)]
)
@@ -184,29 +183,34 @@ class TestPredicateJoin(TestBase):
polygon_df = self.spark.sql("select ST_GeomFromWKT(polygontable._c0)
as countyshape from polygontable")
polygon_df.createOrReplaceTempView("polygondf")
polygon_df.show()
- function_df = self.spark.sql("select
ST_Transform(ST_FlipCoordinates(polygondf.countyshape),
'epsg:4326','epsg:3857', false) from polygondf")
+ function_df = self.spark.sql(
+ "select ST_Transform(ST_FlipCoordinates(polygondf.countyshape),
'epsg:4326','epsg:3857', false) from polygondf")
function_df.show()
def test_st_intersection_intersects_but_not_contains(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON((1 1, 8 1,
8 8, 1 8, 1 1))') as a,ST_GeomFromWKT('POLYGON((2 2, 9 2, 9 9, 2 9, 2 2))') as
b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON((1 1, 8 1, 8 8, 1 8, 1 1))') as
a,ST_GeomFromWKT('POLYGON((2 2, 9 2, 9 9, 2 9, 2 2))') as b")
test_table.createOrReplaceTempView("testtable")
intersect = self.spark.sql("select ST_Intersection(a,b) from
testtable")
assert intersect.take(1)[0][0].wkt == "POLYGON ((2 8, 8 8, 8 2, 2 2, 2
8))"
def test_st_intersection_intersects_but_left_contains_right(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON((1 1, 1 5,
5 5, 1 1))') as a,ST_GeomFromWKT('POLYGON((2 2, 2 3, 3 3, 2 2))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON((1 1, 1 5, 5 5, 1 1))') as
a,ST_GeomFromWKT('POLYGON((2 2, 2 3, 3 3, 2 2))') as b")
test_table.createOrReplaceTempView("testtable")
intersects = self.spark.sql("select ST_Intersection(a,b) from
testtable")
assert intersects.take(1)[0][0].wkt == "POLYGON ((2 2, 2 3, 3 3, 2 2))"
def test_st_intersection_intersects_but_right_contains_left(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON((2 2, 2 3,
3 3, 2 2))') as a,ST_GeomFromWKT('POLYGON((1 1, 1 5, 5 5, 1 1))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON((2 2, 2 3, 3 3, 2 2))') as
a,ST_GeomFromWKT('POLYGON((1 1, 1 5, 5 5, 1 1))') as b")
test_table.createOrReplaceTempView("testtable")
intersects = self.spark.sql("select ST_Intersection(a,b) from
testtable")
assert intersects.take(1)[0][0].wkt == "POLYGON ((2 2, 2 3, 3 3, 2 2))"
def test_st_intersection_not_intersects(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON((40 21, 40
22, 40 23, 40 21))') as a,ST_GeomFromWKT('POLYGON((2 2, 9 2, 9 9, 2 9, 2 2))')
as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON((40 21, 40 22, 40 23, 40 21))') as
a,ST_GeomFromWKT('POLYGON((2 2, 9 2, 9 9, 2 9, 2 2))') as b")
test_table.createOrReplaceTempView("testtable")
intersects = self.spark.sql("select ST_Intersection(a,b) from
testtable")
assert intersects.take(1)[0][0].wkt == "POLYGON EMPTY"
@@ -255,16 +259,17 @@ class TestPredicateJoin(TestBase):
wkt_df = self.spark.sql("select ST_AsText(countyshape) as wkt from
polygondf")
assert polygon_df.take(1)[0]["countyshape"].wkt ==
loads(wkt_df.take(1)[0]["wkt"]).wkt
-
def test_st_astext_3d(self):
input_df = self.spark.createDataFrame([
("Point(21 52 87)",),
("Polygon((0 0 1, 0 1 1, 1 1 1, 1 0 1, 0 0 1))",),
("Linestring(0 0 1, 1 1 2, 1 0 3)",),
("MULTIPOINT ((10 40 66), (40 30 77), (20 20 88), (30 10 99))",),
- ("MULTIPOLYGON (((30 20 11, 45 40 11, 10 40 11, 30 20 11)), ((15 5
11, 40 10 11, 10 20 11, 5 10 11, 15 5 11)))",),
+ (
+ "MULTIPOLYGON (((30 20 11, 45 40 11, 10 40 11, 30 20 11)), ((15 5
11, 40 10 11, 10 20 11, 5 10 11, 15 5 11)))",),
("MULTILINESTRING ((10 10 11, 20 20 11, 10 40 11), (40 40 11, 30
30 11, 40 20 11, 30 10 11))",),
- ("MULTIPOLYGON (((40 40 11, 20 45 11, 45 30 11, 40 40 11)), ((20
35 11, 10 30 11, 10 10 11, 30 5 11, 45 20 11, 20 35 11), (30 20 11, 20 15 11,
20 25 11, 30 20 11)))",),
+ (
+ "MULTIPOLYGON (((40 40 11, 20 45 11, 45 30 11, 40 40 11)), ((20 35
11, 10 30 11, 10 10 11, 30 5 11, 45 20 11, 20 35 11), (30 20 11, 20 15 11, 20
25 11, 30 20 11)))",),
("POLYGON((0 0 11, 0 5 11, 5 5 11, 5 0 11, 0 0 11), (1 1 11, 2 1
11, 2 2 11, 1 2 11, 1 1 11))",),
], ["wkt"])
@@ -285,64 +290,78 @@ class TestPredicateJoin(TestBase):
assert polygon_df.take(1)[0]["countyshape"].wkt ==
loads(wkt_df.take(1)[0]["wkt"]).wkt
def test_st_n_points(self):
- test = self.spark.sql("SELECT
ST_NPoints(ST_GeomFromText('LINESTRING(77.29 29.07,77.42 29.26,77.27
29.31,77.29 29.07)'))")
+ test = self.spark.sql(
+ "SELECT ST_NPoints(ST_GeomFromText('LINESTRING(77.29 29.07,77.42
29.26,77.27 29.31,77.29 29.07)'))")
def test_st_geometry_type(self):
- test = self.spark.sql("SELECT
ST_GeometryType(ST_GeomFromText('LINESTRING(77.29 29.07,77.42 29.26,77.27
29.31,77.29 29.07)'))")
+ test = self.spark.sql(
+ "SELECT ST_GeometryType(ST_GeomFromText('LINESTRING(77.29
29.07,77.42 29.26,77.27 29.31,77.29 29.07)'))")
def test_st_difference_right_overlaps_left(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON ((-3 -3, 3
-3, 3 3, -3 3, -3 -3))') as a,ST_GeomFromWKT('POLYGON ((0 -4, 4 -4, 4 4, 0 4, 0
-4))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON ((-3 -3, 3 -3, 3 3, -3 3, -3
-3))') as a,ST_GeomFromWKT('POLYGON ((0 -4, 4 -4, 4 4, 0 4, 0 -4))') as b")
test_table.createOrReplaceTempView("test_diff")
diff = self.spark.sql("select ST_Difference(a,b) from test_diff")
assert diff.take(1)[0][0].wkt == "POLYGON ((0 -3, -3 -3, -3 3, 0 3, 0
-3))"
def test_st_difference_right_not_overlaps_left(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON ((-3 -3, 3
-3, 3 3, -3 3, -3 -3))') as a,ST_GeomFromWKT('POLYGON ((5 -3, 7 -3, 7 -1, 5 -1,
5 -3))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON ((-3 -3, 3 -3, 3 3, -3 3, -3
-3))') as a,ST_GeomFromWKT('POLYGON ((5 -3, 7 -3, 7 -1, 5 -1, 5 -3))') as b")
test_table.createOrReplaceTempView("test_diff")
diff = self.spark.sql("select ST_Difference(a,b) from test_diff")
assert diff.take(1)[0][0].wkt == "POLYGON ((-3 -3, 3 -3, 3 3, -3 3, -3
-3))"
def test_st_difference_left_contains_right(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON ((-3 -3, 3
-3, 3 3, -3 3, -3 -3))') as a,ST_GeomFromWKT('POLYGON ((-1 -1, 1 -1, 1 1, -1 1,
-1 -1))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON ((-3 -3, 3 -3, 3 3, -3 3, -3
-3))') as a,ST_GeomFromWKT('POLYGON ((-1 -1, 1 -1, 1 1, -1 1, -1 -1))') as b")
test_table.createOrReplaceTempView("test_diff")
diff = self.spark.sql("select ST_Difference(a,b) from test_diff")
assert diff.take(1)[0][0].wkt == "POLYGON ((-3 -3, -3 3, 3 3, 3 -3, -3
-3), (-1 -1, 1 -1, 1 1, -1 1, -1 -1))"
def test_st_difference_right_not_overlaps_left(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON ((-1 -1, 1
-1, 1 1, -1 1, -1 -1))') as a,ST_GeomFromWKT('POLYGON ((-3 -3, 3 -3, 3 3, -3 3,
-3 -3))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON ((-1 -1, 1 -1, 1 1, -1 1, -1
-1))') as a,ST_GeomFromWKT('POLYGON ((-3 -3, 3 -3, 3 3, -3 3, -3 -3))') as b")
test_table.createOrReplaceTempView("test_diff")
diff = self.spark.sql("select ST_Difference(a,b) from test_diff")
assert diff.take(1)[0][0].wkt == "POLYGON EMPTY"
def test_st_sym_difference_part_of_right_overlaps_left(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON ((-1 -1, 1
-1, 1 1, -1 1, -1 -1))') as a,ST_GeomFromWKT('POLYGON ((0 -2, 2 -2, 2 0, 0 0, 0
-2))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON ((-1 -1, 1 -1, 1 1, -1 1, -1
-1))') as a,ST_GeomFromWKT('POLYGON ((0 -2, 2 -2, 2 0, 0 0, 0 -2))') as b")
test_table.createOrReplaceTempView("test_sym_diff")
diff = self.spark.sql("select ST_SymDifference(a,b) from
test_sym_diff")
- assert diff.take(1)[0][0].wkt == "MULTIPOLYGON (((0 -1, -1 -1, -1 1, 1
1, 1 0, 0 0, 0 -1)), ((0 -1, 1 -1, 1 0, 2 0, 2 -2, 0 -2, 0 -1)))"
+ assert diff.take(1)[0][
+ 0].wkt == "MULTIPOLYGON (((0 -1, -1 -1, -1 1, 1 1, 1 0, 0
0, 0 -1)), ((0 -1, 1 -1, 1 0, 2 0, 2 -2, 0 -2, 0 -1)))"
def test_st_sym_difference_not_overlaps_left(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON ((-3 -3, 3
-3, 3 3, -3 3, -3 -3))') as a,ST_GeomFromWKT('POLYGON ((5 -3, 7 -3, 7 -1, 5 -1,
5 -3))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON ((-3 -3, 3 -3, 3 3, -3 3, -3
-3))') as a,ST_GeomFromWKT('POLYGON ((5 -3, 7 -3, 7 -1, 5 -1, 5 -3))') as b")
test_table.createOrReplaceTempView("test_sym_diff")
diff = self.spark.sql("select ST_SymDifference(a,b) from
test_sym_diff")
- assert diff.take(1)[0][0].wkt == "MULTIPOLYGON (((-3 -3, -3 3, 3 3, 3
-3, -3 -3)), ((5 -3, 5 -1, 7 -1, 7 -3, 5 -3)))"
+ assert diff.take(1)[0][
+ 0].wkt == "MULTIPOLYGON (((-3 -3, -3 3, 3 3, 3 -3, -3 -3)),
((5 -3, 5 -1, 7 -1, 7 -3, 5 -3)))"
def test_st_sym_difference_contains(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON ((-3 -3, 3
-3, 3 3, -3 3, -3 -3))') as a,ST_GeomFromWKT('POLYGON ((-1 -1, 1 -1, 1 1, -1 1,
-1 -1))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON ((-3 -3, 3 -3, 3 3, -3 3, -3
-3))') as a,ST_GeomFromWKT('POLYGON ((-1 -1, 1 -1, 1 1, -1 1, -1 -1))') as b")
test_table.createOrReplaceTempView("test_sym_diff")
diff = self.spark.sql("select ST_SymDifference(a,b) from
test_sym_diff")
assert diff.take(1)[0][0].wkt == "POLYGON ((-3 -3, -3 3, 3 3, 3 -3, -3
-3), (-1 -1, 1 -1, 1 1, -1 1, -1 -1))"
def test_st_union_part_of_right_overlaps_left(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON ((-3 -3, 3
-3, 3 3, -3 3, -3 -3))') as a, ST_GeomFromWKT('POLYGON ((-2 1, 2 1, 2 4, -2 4,
-2 1))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON ((-3 -3, 3 -3, 3 3, -3 3, -3
-3))') as a, ST_GeomFromWKT('POLYGON ((-2 1, 2 1, 2 4, -2 4, -2 1))') as b")
test_table.createOrReplaceTempView("test_union")
union = self.spark.sql("select ST_Union(a,b) from test_union")
assert union.take(1)[0][0].wkt == "POLYGON ((2 3, 3 3, 3 -3, -3 -3, -3
3, -2 3, -2 4, 2 4, 2 3))"
def test_st_union_not_overlaps_left(self):
- test_table = self.spark.sql("select ST_GeomFromWKT('POLYGON ((-3 -3, 3
-3, 3 3, -3 3, -3 -3))') as a,ST_GeomFromWKT('POLYGON ((5 -3, 7 -3, 7 -1, 5 -1,
5 -3))') as b")
+ test_table = self.spark.sql(
+ "select ST_GeomFromWKT('POLYGON ((-3 -3, 3 -3, 3 3, -3 3, -3
-3))') as a,ST_GeomFromWKT('POLYGON ((5 -3, 7 -3, 7 -1, 5 -1, 5 -3))') as b")
test_table.createOrReplaceTempView("test_union")
union = self.spark.sql("select ST_Union(a,b) from test_union")
- assert union.take(1)[0][0].wkt == "MULTIPOLYGON (((-3 -3, -3 3, 3 3, 3
-3, -3 -3)), ((5 -3, 5 -1, 7 -1, 7 -3, 5 -3)))"
+ assert union.take(1)[0][
+ 0].wkt == "MULTIPOLYGON (((-3 -3, -3 3, 3 3, 3 -3, -3 -3)),
((5 -3, 5 -1, 7 -1, 7 -3, 5 -3)))"
def test_st_azimuth(self):
sample_points = create_sample_points(20)
@@ -385,11 +404,11 @@ class TestPredicateJoin(TestBase):
linestrings = linestring_df.selectExpr("ST_X(geom) as x").filter("x IS
NOT NULL")
- assert([point[0] for point in points] == [-71.064544, -88.331492,
88.331492, 1.0453, 32.324142])
+ assert ([point[0] for point in points] == [-71.064544, -88.331492,
88.331492, 1.0453, 32.324142])
- assert(not linestrings.count())
+ assert (not linestrings.count())
- assert(not polygons.count())
+ assert (not polygons.count())
def test_st_y(self):
point_df = create_sample_points_df(self.spark, 5)
@@ -403,11 +422,11 @@ class TestPredicateJoin(TestBase):
linestrings = linestring_df.selectExpr("ST_Y(geom) as y").filter("y IS
NOT NULL")
- assert([point[0] for point in points] == [42.28787, 32.324142,
32.324142, 5.3324324, -88.331492])
+ assert ([point[0] for point in points] == [42.28787, 32.324142,
32.324142, 5.3324324, -88.331492])
- assert(not linestrings.count())
+ assert (not linestrings.count())
- assert(not polygons.count())
+ assert (not polygons.count())
def test_st_z(self):
point_df = self.spark.sql(
@@ -427,27 +446,27 @@ class TestPredicateJoin(TestBase):
linestrings = linestring_df.selectExpr("ST_Z(geom) as z").filter("z IS
NOT NULL")
- assert([point[0] for point in points] == [3.3])
+ assert ([point[0] for point in points] == [3.3])
- assert(not linestrings.count())
+ assert (not linestrings.count())
- assert(not polygons.count())
+ assert (not polygons.count())
def test_st_z_max(self):
linestring_df = self.spark.sql("SELECT ST_GeomFromWKT('LINESTRING Z (0
0 1, 0 1 2)') as geom")
linestring_row = [lnstr_row[0] for lnstr_row in
linestring_df.selectExpr("ST_ZMax(geom)").collect()]
- assert(linestring_row == [2.0])
+ assert (linestring_row == [2.0])
def test_st_z_min(self):
- linestring_df = self.spark.sql("SELECT ST_GeomFromWKT('POLYGON Z ((0 0
2, 0 1 1, 1 1 2, 1 0 2, 0 0 2))') as geom")
+ linestring_df = self.spark.sql(
+ "SELECT ST_GeomFromWKT('POLYGON Z ((0 0 2, 0 1 1, 1 1 2, 1 0 2, 0
0 2))') as geom")
linestring_row = [lnstr_row[0] for lnstr_row in
linestring_df.selectExpr("ST_ZMin(geom)").collect()]
- assert(linestring_row == [1.0])
+ assert (linestring_row == [1.0])
def test_st_n_dims(self):
point_df = self.spark.sql("SELECT ST_GeomFromWKT('POINT(1 1 2)') as
geom")
point_row = [pt_row[0] for pt_row in
point_df.selectExpr("ST_NDims(geom)").collect()]
- assert(point_row == [3])
-
+ assert (point_row == [3])
def test_st_start_point(self):
@@ -469,11 +488,11 @@ class TestPredicateJoin(TestBase):
linestrings = linestring_df.selectExpr("ST_StartPoint(geom) as
geom").filter("geom IS NOT NULL")
- assert([line[0] for line in linestrings.collect()] == [wkt.loads(el)
for el in expected_points])
+ assert ([line[0] for line in linestrings.collect()] == [wkt.loads(el)
for el in expected_points])
- assert(not points.count())
+ assert (not points.count())
- assert(not polygons.count())
+ assert (not polygons.count())
def test_st_end_point(self):
linestring_dataframe = create_sample_lines_df(self.spark, 5)
@@ -493,10 +512,10 @@ class TestPredicateJoin(TestBase):
empty_dataframe =
other_geometry_dataframe.selectExpr("ST_EndPoint(geom) as geom"). \
filter("geom IS NOT NULL")
- assert([wkt_row[0]
- for wkt_row in
point_data_frame.selectExpr("ST_AsText(geom)").collect()] ==
expected_ending_points)
+ assert ([wkt_row[0]
+ for wkt_row in
point_data_frame.selectExpr("ST_AsText(geom)").collect()] ==
expected_ending_points)
- assert(empty_dataframe.count() == 0)
+ assert (empty_dataframe.count() == 0)
def test_st_boundary(self):
wkt_list = [
@@ -519,7 +538,7 @@ class TestPredicateJoin(TestBase):
boundary_table = geometry_table.selectExpr("ST_Boundary(geom) as geom")
boundary_wkt = [wkt_row[0] for wkt_row in
boundary_table.selectExpr("ST_AsText(geom)").collect()]
- assert(boundary_wkt == [
+ assert (boundary_wkt == [
"MULTIPOINT ((1 1), (-1 1))",
"MULTIPOINT ((100 150), (160 170))",
"MULTILINESTRING ((10 130, 50 190, 110 190, 140 150, 150 80, 100
10, 20 40, 10 130), (70 40, 100 50, 120 80, 80 110, 50 90, 70 40))",
@@ -542,33 +561,34 @@ class TestPredicateJoin(TestBase):
linestring_wkt = [wkt_row[0] for wkt_row in
linestring_df.selectExpr("ST_AsText(geom)").collect()]
- assert(linestring_wkt == ["LINESTRING (0 0, 0 1, 1 1, 1 0, 0 0)",
"LINESTRING (0 0, 1 1, 1 2, 1 1, 0 0)"])
+ assert (linestring_wkt == ["LINESTRING (0 0, 0 1, 1 1, 1 0, 0 0)",
"LINESTRING (0 0, 1 1, 1 2, 1 1, 0 0)"])
- assert(not empty_df.count())
+ assert (not empty_df.count())
def test_st_geometry_n(self):
data_frame = self.__wkt_list_to_data_frame(["MULTIPOINT((1 2), (3 4),
(5 6), (8 9))"])
wkts = [data_frame.selectExpr(f"ST_GeometryN(geom, {i}) as
geom").selectExpr("st_asText(geom)").collect()[0][0]
for i in range(0, 4)]
- assert(wkts == ["POINT (1 2)", "POINT (3 4)", "POINT (5 6)", "POINT (8
9)"])
+ assert (wkts == ["POINT (1 2)", "POINT (3 4)", "POINT (5 6)", "POINT
(8 9)"])
def test_st_interior_ring_n(self):
polygon_df = self.__wkt_list_to_data_frame(
- ["POLYGON((0 0, 0 5, 5 5, 5 0, 0 0), (1 1, 2 1, 2 2, 1 2, 1 1), (1
3, 2 3, 2 4, 1 4, 1 3), (3 3, 4 3, 4 4, 3 4, 3 3))"]
+ [
+ "POLYGON((0 0, 0 5, 5 5, 5 0, 0 0), (1 1, 2 1, 2 2, 1 2, 1 1),
(1 3, 2 3, 2 4, 1 4, 1 3), (3 3, 4 3, 4 4, 3 4, 3 3))"]
)
other_geometry = create_sample_points_df(self.spark,
5).union(create_sample_lines_df(self.spark, 5))
wholes = [polygon_df.selectExpr(f"ST_InteriorRingN(geom, {i}) as
geom").
- selectExpr("ST_AsText(geom)").collect()[0][0]
+ selectExpr("ST_AsText(geom)").collect()[0][0]
for i in range(3)]
empty_df = other_geometry.selectExpr("ST_InteriorRingN(geom, 1) as
geom").filter("geom IS NOT NULL")
- assert(not empty_df.count())
- assert(wholes == ["LINESTRING (1 1, 2 1, 2 2, 1 2, 1 1)",
- "LINESTRING (1 3, 2 3, 2 4, 1 4, 1 3)",
- "LINESTRING (3 3, 4 3, 4 4, 3 4, 3 3)"])
+ assert (not empty_df.count())
+ assert (wholes == ["LINESTRING (1 1, 2 1, 2 2, 1 2, 1 1)",
+ "LINESTRING (1 3, 2 3, 2 4, 1 4, 1 3)",
+ "LINESTRING (3 3, 4 3, 4 4, 3 4, 3 3)"])
def test_st_dumps(self):
expected_geometries = [
@@ -598,14 +618,14 @@ class TestPredicateJoin(TestBase):
dumped_geometries = geometry_df.selectExpr("ST_Dump(geom) as geom")
- assert(dumped_geometries.select(explode(col("geom"))).count() == 14)
+ assert (dumped_geometries.select(explode(col("geom"))).count() == 14)
collected_geometries = dumped_geometries \
.select(explode(col("geom")).alias("geom")) \
.selectExpr("ST_AsText(geom) as geom") \
.collect()
- assert([geom_row[0] for geom_row in collected_geometries] ==
expected_geometries)
+ assert ([geom_row[0] for geom_row in collected_geometries] ==
expected_geometries)
def test_st_dump_points(self):
expected_points = [
@@ -625,10 +645,10 @@ class TestPredicateJoin(TestBase):
dumped_points = geometry_df.selectExpr("ST_DumpPoints(geom) as geom") \
.select(explode(col("geom")).alias("geom"))
- assert(dumped_points.count() == 10)
+ assert (dumped_points.count() == 10)
collected_points = [geom_row[0] for geom_row in
dumped_points.selectExpr("ST_AsText(geom)").collect()]
- assert(collected_points == expected_points)
+ assert (collected_points == expected_points)
def test_st_is_closed(self):
expected_result = [
@@ -653,13 +673,14 @@ class TestPredicateJoin(TestBase):
(7, "MULTILINESTRING ((10 10, 20 20, 10 40, 10 10), (40 40, 30 30,
40 20, 30 10, 40 40))"),
(8, "MULTILINESTRING ((10 10, 20 20, 10 40, 10 10), (40 40, 30 30,
40 20, 30 10))"),
(9, "MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20,
30 10))"),
- (10, "GEOMETRYCOLLECTION (POINT (40 10), LINESTRING (10 10, 20 20,
10 40), POLYGON ((40 40, 20 45, 45 30, 40 40)))")
+ (10,
+ "GEOMETRYCOLLECTION (POINT (40 10), LINESTRING (10 10, 20 20, 10
40), POLYGON ((40 40, 20 45, 45 30, 40 40)))")
]
geometry_df =
self.__wkt_pair_list_with_index_to_data_frame(geometry_list)
is_closed = geometry_df.selectExpr("index",
"ST_IsClosed(geom)").collect()
is_closed_collected = [[*row] for row in is_closed]
- assert(is_closed_collected == expected_result)
+ assert (is_closed_collected == expected_result)
def test_num_interior_ring(self):
geometries = [
@@ -672,14 +693,15 @@ class TestPredicateJoin(TestBase):
(7, "MULTILINESTRING ((10 10, 20 20, 10 40, 10 10), (40 40, 30 30,
40 20, 30 10, 40 40))"),
(8, "MULTILINESTRING ((10 10, 20 20, 10 40, 10 10), (40 40, 30 30,
40 20, 30 10))"),
(9, "MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20,
30 10))"),
- (10, "GEOMETRYCOLLECTION (POINT (40 10), LINESTRING (10 10, 20 20,
10 40), POLYGON ((40 40, 20 45, 45 30, 40 40)))"),
+ (10,
+ "GEOMETRYCOLLECTION (POINT (40 10), LINESTRING (10 10, 20 20, 10
40), POLYGON ((40 40, 20 45, 45 30, 40 40)))"),
(11, "POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0), (1 1, 2 1, 2 2, 1 2, 1
1))")]
geometry_df = self.__wkt_pair_list_with_index_to_data_frame(geometries)
number_of_interior_rings = geometry_df.selectExpr("index",
"ST_NumInteriorRings(geom) as num")
collected_interior_rings = [[*row] for row in
number_of_interior_rings.filter("num is not null").collect()]
- assert(collected_interior_rings == [[2, 0], [11, 1]])
+ assert (collected_interior_rings == [[2, 0], [11, 1]])
def test_st_add_point(self):
geometry = [
@@ -692,7 +714,9 @@ class TestPredicateJoin(TestBase):
("MULTILINESTRING ((10 10, 20 20, 10 40, 10 10), (40 40, 30 30, 40
20, 30 10, 40 40))", "Point(21 52)"),
("MULTILINESTRING ((10 10, 20 20, 10 40, 10 10), (40 40, 30 30, 40
20, 30 10))", "Point(21 52)"),
("MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20, 30
10))", "Point(21 52)"),
- ("GEOMETRYCOLLECTION (POINT (40 10), LINESTRING (10 10, 20 20, 10
40), POLYGON ((40 40, 20 45, 45 30, 40 40)))", "Point(21 52)"),
+ (
+ "GEOMETRYCOLLECTION (POINT (40 10), LINESTRING (10 10, 20 20, 10
40), POLYGON ((40 40, 20 45, 45 30, 40 40)))",
+ "Point(21 52)"),
("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0), (1 1, 2 1, 2 2, 1 2, 1 1))",
"Point(21 52)")
]
geometry_df = self.__wkt_pairs_to_data_frame(geometry)
@@ -700,7 +724,7 @@ class TestPredicateJoin(TestBase):
collected_geometries = [
row[0] for row in modified_geometries.filter("geom is not
null").selectExpr("ST_AsText(geom)").collect()
]
- assert(collected_geometries[0] == "LINESTRING (0 0, 1 1, 1 0, 21 52)")
+ assert (collected_geometries[0] == "LINESTRING (0 0, 1 1, 1 0, 21 52)")
def test_st_remove_point(self):
result_and_expected = [
@@ -711,11 +735,13 @@ class TestPredicateJoin(TestBase):
[self.calculate_st_remove("POINT(0 1)", 3), None],
[self.calculate_st_remove("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0), (1
1, 2 1, 2 2, 1 2, 1 1))", 3), None],
[self.calculate_st_remove("GEOMETRYCOLLECTION (POINT (40 10),
LINESTRING (10 10, 20 20, 10 40))", 0), None],
- [self.calculate_st_remove("MULTIPOLYGON (((30 20, 45 40, 10 40, 30
20)), ((15 5, 40 10, 10 20, 5 10, 15 5)))", 3), None],
- [self.calculate_st_remove("MULTILINESTRING ((10 10, 20 20, 10 40,
10 10), (40 40, 30 30, 40 20, 30 10, 40 40))", 3), None]
+ [self.calculate_st_remove(
+ "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), ((15 5, 40 10,
10 20, 5 10, 15 5)))", 3), None],
+ [self.calculate_st_remove(
+ "MULTILINESTRING ((10 10, 20 20, 10 40, 10 10), (40 40, 30 30,
40 20, 30 10, 40 40))", 3), None]
]
for actual, expected in result_and_expected:
- assert(actual == expected)
+ assert (actual == expected)
def test_st_is_ring(self):
result_and_expected = [
@@ -726,7 +752,7 @@ class TestPredicateJoin(TestBase):
[self.calculate_st_is_ring("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0), (1
1, 2 1, 2 2, 1 2, 1 1))"), None],
]
for actual, expected in result_and_expected:
- assert(actual == expected)
+ assert (actual == expected)
def test_st_subdivide(self):
# Given
@@ -797,12 +823,11 @@ class TestPredicateJoin(TestBase):
geom_poly = geometry_df.withColumn("polygon",
expr("ST_MakePolygon(geom)"))
# Then only based on closed linestring geom is created
- geom_poly.filter("polygon IS NOT
NULL").selectExpr("ST_AsText(polygon)", "expected").\
+ geom_poly.filter("polygon IS NOT
NULL").selectExpr("ST_AsText(polygon)", "expected"). \
show()
- result = geom_poly.filter("polygon IS NOT
NULL").selectExpr("ST_AsText(polygon)", "expected").\
+ result = geom_poly.filter("polygon IS NOT
NULL").selectExpr("ST_AsText(polygon)", "expected"). \
collect()
-
assert result.__len__() == 1
for actual, expected in result:
@@ -819,7 +844,7 @@ class TestPredicateJoin(TestBase):
).select(expr("St_GeomFromText(_1)").alias("geom"),
col("_2").alias("expected_hash"))
# When
- geohash_df = geometry_df.withColumn("geohash", expr("ST_GeoHash(geom,
10)")).\
+ geohash_df = geometry_df.withColumn("geohash", expr("ST_GeoHash(geom,
10)")). \
select("geohash", "expected_hash")
# Then
@@ -886,7 +911,7 @@ class TestPredicateJoin(TestBase):
geometry_df_collected = geometry_df.withColumn("collected",
expr("ST_Collect(geom)"))
# then result should be as expected
- assert(set([el[0] for el in
geometry_df_collected.selectExpr("ST_AsText(collected)").collect()]) == {
+ assert (set([el[0] for el in
geometry_df_collected.selectExpr("ST_AsText(collected)").collect()]) == {
"MULTILINESTRING ((1 2, 3 4), (3 4, 4 5))",
"MULTIPOINT ((1 2), (-2 3))",
"MULTIPOLYGON (((1 2, 1 4, 3 4, 3 2, 1 2)), ((0.5 0.5, 5 0, 5 5, 0
5, 0.5 0.5)))"
@@ -904,7 +929,7 @@ class TestPredicateJoin(TestBase):
geometry_df_collected = geometry_df.withColumn("collected",
expr("ST_Collect(geom_left, geom_right)"))
# then result should be calculated
- assert(set([el[0] for el in
geometry_df_collected.selectExpr("ST_AsText(collected)").collect()]) == {
+ assert (set([el[0] for el in
geometry_df_collected.selectExpr("ST_AsText(collected)").collect()]) == {
"MULTILINESTRING ((1 2, 3 4), (3 4, 4 5))",
"MULTIPOINT ((1 2), (-2 3))",
"MULTIPOLYGON (((1 2, 1 4, 3 4, 3 2, 1 2)), ((0.5 0.5, 5 0, 5 5, 0
5, 0.5 0.5)))"
@@ -959,30 +984,34 @@ class TestPredicateJoin(TestBase):
return geometry_collected[0][0] if geometry_collected.__len__() != 0
else None
def __wkt_pairs_to_data_frame(self, wkt_list: List) -> DataFrame:
- return self.spark.createDataFrame([[wkt.loads(wkt_a),
wkt.loads(wkt_b)] for wkt_a, wkt_b in wkt_list], self.geo_pair_schema)
+ return self.spark.createDataFrame([[wkt.loads(wkt_a),
wkt.loads(wkt_b)] for wkt_a, wkt_b in wkt_list],
+ self.geo_pair_schema)
def __wkt_list_to_data_frame(self, wkt_list: List) -> DataFrame:
return self.spark.createDataFrame([[wkt.loads(given_wkt)] for
given_wkt in wkt_list], self.geo_schema)
def __wkt_pair_list_with_index_to_data_frame(self, wkt_list: List) ->
DataFrame:
- return self.spark.createDataFrame([[index, wkt.loads(given_wkt)] for
index, given_wkt in wkt_list], self.geo_schema_with_index)
+ return self.spark.createDataFrame([[index, wkt.loads(given_wkt)] for
index, given_wkt in wkt_list],
+ self.geo_schema_with_index)
def test_st_pointonsurface(self):
tests1 = {
- "'POINT(0 5)'":"POINT (0 5)",
- "'LINESTRING(0 5, 0 10)'":"POINT (0 5)",
- "'POLYGON((0 0, 0 5, 5 5, 5 0, 0 0))'":"POINT (2.5 2.5)",
- "'LINESTRING(0 5 1, 0 0 1, 0 10 2)'":"POINT Z(0 0 1)"
+ "'POINT(0 5)'": "POINT (0 5)",
+ "'LINESTRING(0 5, 0 10)'": "POINT (0 5)",
+ "'POLYGON((0 0, 0 5, 5 5, 5 0, 0 0))'": "POINT (2.5 2.5)",
+ "'LINESTRING(0 5 1, 0 0 1, 0 10 2)'": "POINT Z(0 0 1)"
}
for input_geom, expected_geom in tests1.items():
- pointOnSurface = self.spark.sql("select
ST_AsText(ST_PointOnSurface(ST_GeomFromText({})))".format(input_geom))
+ pointOnSurface = self.spark.sql(
+ "select
ST_AsText(ST_PointOnSurface(ST_GeomFromText({})))".format(input_geom))
assert pointOnSurface.take(1)[0][0] == expected_geom
- tests2 = { "'LINESTRING(0 5 1, 0 0 1, 0 10 2)'":"POINT Z(0 0 1)" }
+ tests2 = {"'LINESTRING(0 5 1, 0 0 1, 0 10 2)'": "POINT Z(0 0 1)"}
for input_geom, expected_geom in tests2.items():
- pointOnSurface = self.spark.sql("select
ST_AsEWKT(ST_PointOnSurface(ST_GeomFromWKT({})))".format(input_geom))
+ pointOnSurface = self.spark.sql(
+ "select
ST_AsEWKT(ST_PointOnSurface(ST_GeomFromWKT({})))".format(input_geom))
assert pointOnSurface.take(1)[0][0] == expected_geom
def test_st_pointn(self):
@@ -1058,7 +1087,8 @@ class TestPredicateJoin(TestBase):
"LINESTRING Z(10 40 66, 40 30 77, 20 20 88, 30 10 99)"
}
for input_geom, expected_geom in test_cases.items():
- line_geometry = self.spark.sql("select
ST_AsText(ST_LineFromMultiPoint(ST_GeomFromText({})))".format(input_geom))
+ line_geometry = self.spark.sql(
+ "select
ST_AsText(ST_LineFromMultiPoint(ST_GeomFromText({})))".format(input_geom))
assert line_geometry.take(1)[0][0] == expected_geom
def test_st_s2_cell_ids(self):
@@ -1094,10 +1124,17 @@ class TestPredicateJoin(TestBase):
def test_translate(self):
expected = "POLYGON ((3 5, 3 6, 4 6, 4 5, 3 5))"
- actualDf = self.spark.sql("SELECT
ST_Translate(ST_GeomFromText('POLYGON ((1 0, 1 1, 2 1, 2 0, 1 0))'), 2, 5) AS
geom")
- actual = actualDf.selectExpr("ST_AsText(geom)").take(1)[0][0]
+ actual_df = self.spark.sql(
+ "SELECT ST_Translate(ST_GeomFromText('POLYGON ((1 0, 1 1, 2 1, 2
0, 1 0))'), 2, 5) AS geom")
+ actual = actual_df.selectExpr("ST_AsText(geom)").take(1)[0][0]
assert expected == actual
+ def test_affine(self):
+ expected = "POLYGON Z((2 3 1, 4 5 1, 7 8 2, 2 3 1))"
+ actual_df = self.spark.sql("SELECT ST_Affine(ST_GeomFromText('POLYGON
((1 0 1, 1 1 1, 2 2 2, 1 0 1))'), 1, 2, "
+ "1, 2, 1, 2) AS geom")
+ actual = actual_df.selectExpr("ST_AsText(geom)").take(1)[0][0]
+ assert expected == actual
def test_boundingDiagonal(self):
expected = "LINESTRING (1 0, 2 1)"
actual_df = self.spark.sql("SELECT
ST_BoundingDiagonal(ST_GeomFromText('POLYGON ((1 0, 1 1, 2 1, 2 0, "
diff --git a/sql/common/src/main/scala/org/apache/sedona/sql/UDF/Catalog.scala
b/sql/common/src/main/scala/org/apache/sedona/sql/UDF/Catalog.scala
index e14fb554..c74513bd 100644
--- a/sql/common/src/main/scala/org/apache/sedona/sql/UDF/Catalog.scala
+++ b/sql/common/src/main/scala/org/apache/sedona/sql/UDF/Catalog.scala
@@ -152,6 +152,7 @@ object Catalog {
function[ST_Force3D](0.0),
function[ST_NRings](),
function[ST_Translate](0.0),
+ function[ST_Affine](null, null, null, null, null, null),
function[ST_BoundingDiagonal](),
function[ST_HausdorffDistance](-1),
// Expression for rasters
diff --git
a/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/Functions.scala
b/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/Functions.scala
index f8eef8a8..6ea96af1 100644
---
a/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/Functions.scala
+++
b/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/Functions.scala
@@ -1005,13 +1005,19 @@ case class ST_Translate(inputExpressions:
Seq[Expression])
}
}
+case class ST_Affine(inputExpressions: Seq[Expression])
+ extends
InferredExpression(InferrableFunction.allowSixRightNull(Functions.affine _))
with FoldableExpression {
+ protected def withNewChildrenInternal(newChildren: IndexedSeq[Expression]) =
{
+ copy(inputExpressions = newChildren)
+ }
+}
+
case class ST_Dimension(inputExpressions: Seq[Expression])
extends InferredExpression(Functions.dimension _) with FoldableExpression {
protected def withNewChildrenInternal(newChildren: IndexedSeq[Expression]) =
{
copy(inputExpressions = newChildren)
}
}
-
case class ST_BoundingDiagonal(inputExpressions: Seq[Expression])
extends InferredExpression(Functions.boundingDiagonal _) {
protected def withNewChildrenInternal(newChildren: IndexedSeq[Expression]) =
{
diff --git
a/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/InferredExpression.scala
b/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/InferredExpression.scala
index a1ce24b9..a943765c 100644
---
a/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/InferredExpression.scala
+++
b/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/InferredExpression.scala
@@ -220,4 +220,45 @@ object InferrableFunction {
}
})
}
+
+ def allowSixRightNull[R, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12,
A13](f: (A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13) => R)
+
(implicit typeTag: TypeTag[(A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11,
A12, A13) => R]): InferrableFunction = {
+ apply(typeTag, extractors => {
+ val func = f.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any,
Any, Any, Any, Any) => Any]
+ val extractor1 = extractors(0)
+ val extractor2 = extractors(1)
+ val extractor3 = extractors(2)
+ val extractor4 = extractors(3)
+ val extractor5 = extractors(4)
+ val extractor6 = extractors(5)
+ val extractor7 = extractors(6)
+ val extractor8 = extractors(7)
+ val extractor9 = extractors(8)
+ val extractor10 = extractors(9)
+ val extractor11 = extractors(10)
+ val extractor12 = extractors(11)
+ val extractor13 = extractors(12)
+ input => {
+ val arg1 = extractor1(input)
+ val arg2 = extractor2(input)
+ val arg3 = extractor3(input)
+ val arg4 = extractor4(input)
+ val arg5 = extractor5(input)
+ val arg6 = extractor6(input)
+ val arg7 = extractor7(input)
+ val arg8 = extractor8(input)
+ val arg9 = extractor9(input)
+ val arg10 = extractor10(input)
+ val arg11 = extractor11(input)
+ val arg12 = extractor12(input)
+ val arg13 = extractor13(input)
+ if (arg1 != null && arg2 != null && arg3 != null && arg4 != null &&
arg5 != null && arg6 != null && arg7 != null) {
+ func(arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10,
arg11, arg12, arg13)
+ } else {
+ null
+ }
+ }
+ })
+ }
+
}
diff --git
a/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_functions.scala
b/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_functions.scala
index 2fc3985b..562f3629 100644
---
a/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_functions.scala
+++
b/sql/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_functions.scala
@@ -329,6 +329,17 @@ object st_functions extends DataFrameAPI {
def ST_Translate(geometry: String, deltaX: Double, deltaY: Double): Column =
wrapExpression[ST_Translate](geometry, deltaX, deltaY, 0.0)
+ def ST_Affine(geometry: Column, a: Column, b: Column, d: Column, e: Column,
xOff: Column, yOff: Column, c: Column, f: Column, g: Column, h: Column, i:
Column, zOff: Column): Column =
+ wrapExpression[ST_Affine](geometry, a, b, d, e, xOff, yOff, c, f, g, h, i,
zOff)
+
+ def ST_Affine(geometry: String, a: Double, b: Double, d: Double, e: Double,
xOff: Double, yOff: Double, c: Double, f: Double, g: Double, h: Double, i:
Double, zOff: Double): Column =
+ wrapExpression[ST_Affine](geometry, a, b, d, e, xOff, yOff, c, f, g, h, i,
zOff)
+
+ def ST_Affine(geometry: Column, a: Column, b: Column, d: Column, e: Column,
xOff: Column, yOff: Column) =
+ wrapExpression[ST_Affine](geometry, a, b, d, e, xOff, yOff, null, null,
null, null, null, null)
+
+ def ST_Affine(geometry: String, a: Double, b: Double, d: Double, e: Double,
xOff: Double, yOff: Double) =
+ wrapExpression[ST_Affine](geometry, a, b, d, e, xOff, yOff, null, null,
null, null, null, null)
def ST_BoundingDiagonal(geometry: Column) =
wrapExpression[ST_BoundingDiagonal](geometry)
diff --git
a/sql/common/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
b/sql/common/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
index a1c36ade..62006f7b 100644
---
a/sql/common/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
+++
b/sql/common/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
@@ -1003,6 +1003,21 @@ class dataFrameAPITestScala extends TestBaseScala {
assert(expectedDefaultValue == actualDefaultValue)
}
+ it("Passed ST_Affine") {
+ val polyDf = sparkSession.sql("SELECT ST_GeomFromWKT('POLYGON ((2 3 1, 4
5 1, 7 8 2, 2 3 1))') AS geom")
+ val df = polyDf.select(ST_Affine("geom", 1, 2, 3, 4, 1, 2, 3, 4, 1, 4,
2, 1));
+ val dfDefaultValue = polyDf.select(ST_Affine("geom", 1, 2, 1, 2, 1, 2))
+ val wKTWriter3D = new WKTWriter(3);
+ val actualGeom = df.take(1)(0).get(0).asInstanceOf[Geometry]
+ val actualGeomDefaultValue =
dfDefaultValue.take(1)(0).get(0).asInstanceOf[Geometry]
+ val actual = wKTWriter3D.write(actualGeom)
+ val expected = "POLYGON Z((12 24 17, 18 38 27, 30 63 44, 12 24 17))"
+ val actualDefaultValue = wKTWriter3D.write(actualGeomDefaultValue)
+ val expectedDefaultValue = "POLYGON Z((9 10 1, 15 16 1, 24 25 2, 9 10
1))"
+ assertEquals(expected, actual)
+ assertEquals(expectedDefaultValue, actualDefaultValue)
+ }
+
it("Passed ST_BoundingDiagonal") {
val polyDf = sparkSession.sql("SELECT ST_GeomFromWKT('POLYGON ((1 0 1, 2
3 2, 5 0 1, 5 2 9, 1 0 1))') AS geom")
val df = polyDf.select(ST_BoundingDiagonal("geom"))
@@ -1023,6 +1038,5 @@ class dataFrameAPITestScala extends TestBaseScala {
assert(expected == actual)
assert(expected == actualDefaultValue)
}
-
}
}
diff --git
a/sql/common/src/test/scala/org/apache/sedona/sql/functionTestScala.scala
b/sql/common/src/test/scala/org/apache/sedona/sql/functionTestScala.scala
index 7b8f23eb..4200b3e6 100644
--- a/sql/common/src/test/scala/org/apache/sedona/sql/functionTestScala.scala
+++ b/sql/common/src/test/scala/org/apache/sedona/sql/functionTestScala.scala
@@ -2005,6 +2005,28 @@ class functionTestScala extends TestBaseScala with
Matchers with GeometrySample
}
}
+ it ("should pass ST_Affine") {
+ val geomTestCases = Map (
+ ("'POLYGON ((1 0 1, 1 1 1, 2 2 2, 1 0 1))'")-> ("'POLYGON Z((5 8 16, 7 9
20, 13 16 37, 5 8 16))'", "'POLYGON Z((2 3 1, 4 5 1, 7 8 2, 2 3 1))'"),
+ ("'LINESTRING EMPTY'") -> ("'LINESTRING EMPTY'", "'LINESTRING EMPTY'"),
+ ("'GEOMETRYCOLLECTION (MULTIPOLYGON (((1 0, 1 1, 2 1, 2 0, 1 0), (1 0.5,
1 0.75, 1.5 0.75, 1.5 0.5, 1 0.5)), ((5 0, 5 5, 7 5, 7 0, 5 0))), POINT (10
10))'")->
+ ("'GEOMETRYCOLLECTION
(MULTIPOLYGON (((2 6, 4 7, 5 11, 3 10, 2 6), (3 6.5, 3.5 6.75, 4 8.75, 3.5 8.5,
3 6.5)), ((6 22, 16 27, 18 35, 8 30, 6 22))), POINT (31 52))'",
+ "'GEOMETRYCOLLECTION
(MULTIPOLYGON (((2 3, 4 5, 5 6, 3 4, 2 3), (3 4, 3.5 4.5, 4 5, 3.5 4.5, 3 4)),
((6 7, 16 17, 18 19, 8 9, 6 7))), POINT (31 32))'")
+ )
+ for (((geom), expectedResult) <- geomTestCases) {
+ val df = sparkSession.sql(s"SELECT
ST_AsText(ST_Affine(ST_GeomFromWKT($geom), 1, 2, 4, 1, 1, 2, 3, 2, 5, 4, 8, 3))
AS geom, " + s"$expectedResult")
+ val actual = df.take(1)(0).get(0).asInstanceOf[String]
+ val expected =
df.take(1)(0).get(1).asInstanceOf[GenericRowWithSchema].get(0).asInstanceOf[String]
+
+ val dfDefaultValue = sparkSession.sql(s"SELECT
ST_AsText(ST_Affine(ST_GeomFromWKT($geom), 1, 2, 1, 2, 1, 2)) AS geom, " +
s"$expectedResult")
+ val actualDefaultValue =
dfDefaultValue.take(1)(0).get(0).asInstanceOf[String]
+ val expectedDefaultValue =
dfDefaultValue.take(1)(0).get(1).asInstanceOf[GenericRowWithSchema].get(1).asInstanceOf[String]
+
+ assertEquals(expected, actual)
+ assertEquals(expectedDefaultValue, actualDefaultValue)
+ }
+ }
+
it ("should pass ST_BoundingDiagonal") {
val geomTestCases = Map (
("'POINT (10 10)'")-> "'LINESTRING (10 10, 10 10)'",