This is an automated email from the ASF dual-hosted git repository.
jiayu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/sedona.git
The following commit(s) were added to refs/heads/master by this push:
new 495e83f52 [DOCS] Remove unneeded trailing semicolons from Scala files
(#1625)
495e83f52 is described below
commit 495e83f52ab3c9755d3ff3ff397da432ff42cf51
Author: John Bampton <[email protected]>
AuthorDate: Sun Oct 13 11:57:00 2024 +1000
[DOCS] Remove unneeded trailing semicolons from Scala files (#1625)
---
.../org/apache/sedona/sql/utils/Adapter.scala | 2 +-
.../apache/sedona/sql/utils/RasterSerializer.scala | 4 +-
.../sql/sedona_sql/expressions/st_functions.scala | 6 +--
.../join/BroadcastQuerySideKNNJoinExec.scala | 2 +-
.../org/apache/sedona/sql/SpatialJoinSuite.scala | 2 +-
.../apache/sedona/sql/constructorTestScala.scala | 4 +-
.../apache/sedona/sql/dataFrameAPITestScala.scala | 14 +++----
.../org/apache/sedona/sql/functionTestScala.scala | 44 +++++++++++-----------
.../org/apache/sedona/sql/predicateTestScala.scala | 2 +-
.../org/apache/sedona/sql/rasteralgebraTest.scala | 36 +++++++++---------
10 files changed, 58 insertions(+), 58 deletions(-)
diff --git
a/spark/common/src/main/scala/org/apache/sedona/sql/utils/Adapter.scala
b/spark/common/src/main/scala/org/apache/sedona/sql/utils/Adapter.scala
index 4a68b903f..9b1067a25 100644
--- a/spark/common/src/main/scala/org/apache/sedona/sql/utils/Adapter.scala
+++ b/spark/common/src/main/scala/org/apache/sedona/sql/utils/Adapter.scala
@@ -125,7 +125,7 @@ object Adapter {
import scala.jdk.CollectionConverters._
if (spatialRDD.fieldNames != null)
return toDf(spatialRDD, spatialRDD.fieldNames.asScala.toList,
sparkSession)
- toDf(spatialRDD = spatialRDD, fieldNames = null, sparkSession =
sparkSession);
+ toDf(spatialRDD = spatialRDD, fieldNames = null, sparkSession =
sparkSession)
}
def toDf[T <: Geometry](
diff --git
a/spark/common/src/main/scala/org/apache/sedona/sql/utils/RasterSerializer.scala
b/spark/common/src/main/scala/org/apache/sedona/sql/utils/RasterSerializer.scala
index fd87c9c7b..6facd9751 100644
---
a/spark/common/src/main/scala/org/apache/sedona/sql/utils/RasterSerializer.scala
+++
b/spark/common/src/main/scala/org/apache/sedona/sql/utils/RasterSerializer.scala
@@ -32,7 +32,7 @@ object RasterSerializer {
* Array of bites represents this geometry
*/
def serialize(raster: GridCoverage2D): Array[Byte] = {
- Serde.serialize(raster);
+ Serde.serialize(raster)
}
/**
@@ -44,6 +44,6 @@ object RasterSerializer {
* GridCoverage2D
*/
def deserialize(value: Array[Byte]): GridCoverage2D = {
- Serde.deserialize(value);
+ Serde.deserialize(value)
}
}
diff --git
a/spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_functions.scala
b/spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_functions.scala
index da49f0143..33e6760e2 100644
---
a/spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_functions.scala
+++
b/spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/st_functions.scala
@@ -893,13 +893,13 @@ object st_functions extends DataFrameAPI {
wrapExpression[ST_HausdorffDistance](g1, g2, -1)
def ST_HausdorffDistance(g1: String, g2: String) =
- wrapExpression[ST_HausdorffDistance](g1, g2, -1);
+ wrapExpression[ST_HausdorffDistance](g1, g2, -1)
def ST_HausdorffDistance(g1: Column, g2: Column, densityFrac: Column) =
- wrapExpression[ST_HausdorffDistance](g1, g2, densityFrac);
+ wrapExpression[ST_HausdorffDistance](g1, g2, densityFrac)
def ST_HausdorffDistance(g1: String, g2: String, densityFrac: Double) =
- wrapExpression[ST_HausdorffDistance](g1, g2, densityFrac);
+ wrapExpression[ST_HausdorffDistance](g1, g2, densityFrac)
def ST_CoordDim(geometry: Column): Column =
wrapExpression[ST_CoordDim](geometry)
diff --git
a/spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/strategy/join/BroadcastQuerySideKNNJoinExec.scala
b/spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/strategy/join/BroadcastQuerySideKNNJoinExec.scala
index 5a3c59ef9..812bc6e6d 100644
---
a/spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/strategy/join/BroadcastQuerySideKNNJoinExec.scala
+++
b/spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/strategy/join/BroadcastQuerySideKNNJoinExec.scala
@@ -130,7 +130,7 @@ case class BroadcastQuerySideKNNJoinExec(
require(kValue > 0, "The number of neighbors must be greater than 0.")
objectsShapes.setNeighborSampleNumber(kValue)
- val joinPartitions: Integer = numPartitions;
+ val joinPartitions: Integer = numPartitions
broadcastJoin = false
// expand the boundary for partition to include both RDDs
diff --git
a/spark/common/src/test/scala/org/apache/sedona/sql/SpatialJoinSuite.scala
b/spark/common/src/test/scala/org/apache/sedona/sql/SpatialJoinSuite.scala
index 737eb41df..543cf5e91 100644
--- a/spark/common/src/test/scala/org/apache/sedona/sql/SpatialJoinSuite.scala
+++ b/spark/common/src/test/scala/org/apache/sedona/sql/SpatialJoinSuite.scala
@@ -258,7 +258,7 @@ class SpatialJoinSuite extends TestBaseScala with
TableDrivenPropertyChecks {
df.createOrReplaceTempView("df10parts")
val query =
- "SELECT * FROM df10parts JOIN dfEmpty WHERE
ST_Intersects(df10parts.geom, dfEmpty.geom)";
+ "SELECT * FROM df10parts JOIN dfEmpty WHERE
ST_Intersects(df10parts.geom, dfEmpty.geom)"
withConf(Map(spatialJoinPartitionSideConfKey -> "left")) {
val resultRows = sparkSession.sql(query).collect()
assert(resultRows.isEmpty)
diff --git
a/spark/common/src/test/scala/org/apache/sedona/sql/constructorTestScala.scala
b/spark/common/src/test/scala/org/apache/sedona/sql/constructorTestScala.scala
index 1eab00fe3..2dd9cdfed 100644
---
a/spark/common/src/test/scala/org/apache/sedona/sql/constructorTestScala.scala
+++
b/spark/common/src/test/scala/org/apache/sedona/sql/constructorTestScala.scala
@@ -470,7 +470,7 @@ class constructorTestScala extends TestBaseScala {
val geometries =
sparkSession.sql("SELECT ST_GeomFromWKB(rawWKBTable.wkb) as
countyshape from rawWKBTable")
val expectedGeom =
- "LINESTRING (-2.1047439575195312 -0.354827880859375, -1.49606454372406
-0.6676061153411865)";
+ "LINESTRING (-2.1047439575195312 -0.354827880859375, -1.49606454372406
-0.6676061153411865)"
assert(geometries.first().getAs[Geometry](0).toString.equals(expectedGeom))
// null input
val nullGeom = sparkSession.sql("SELECT ST_GeomFromWKB(null)")
@@ -501,7 +501,7 @@ class constructorTestScala extends TestBaseScala {
val geometries = sparkSession.sql(
"SELECT ST_GeomFromEWKB(rawWKBTable.wkb) as countyshape from
rawWKBTable")
val expectedGeom =
- "LINESTRING (-2.1047439575195312 -0.354827880859375, -1.49606454372406
-0.6676061153411865)";
+ "LINESTRING (-2.1047439575195312 -0.354827880859375, -1.49606454372406
-0.6676061153411865)"
assert(geometries.first().getAs[Geometry](0).toString.equals(expectedGeom))
// null input
val nullGeom = sparkSession.sql("SELECT ST_GeomFromEWKB(null)")
diff --git
a/spark/common/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
b/spark/common/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
index 6d78ebd86..7daee87f0 100644
---
a/spark/common/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
+++
b/spark/common/src/test/scala/org/apache/sedona/sql/dataFrameAPITestScala.scala
@@ -1349,7 +1349,7 @@ class dataFrameAPITestScala extends TestBaseScala {
.select(ST_MinimumBoundingCircle("geom").as("geom"))
.selectExpr("ST_ReducePrecision(geom, 2)")
val actualResult =
df.take(1)(0).get(0).asInstanceOf[Geometry].getCoordinates().length
- val expectedResult = BufferParameters.DEFAULT_QUADRANT_SEGMENTS * 6 * 4
+ 1;
+ val expectedResult = BufferParameters.DEFAULT_QUADRANT_SEGMENTS * 6 * 4
+ 1
assert(actualResult == expectedResult)
}
@@ -1723,7 +1723,7 @@ class dataFrameAPITestScala extends TestBaseScala {
"SELECT ST_GeomFromWKT('Polygon ((0 0, 1 2, 2 2, 3 2, 5 0, 4 0, 3 1, 2
1, 1 0, 0 0))') as geom")
val df =
baseDF.select(ST_MakeValid(ST_Collect(ST_H3ToGeom(ST_H3CellIDs("geom", 6,
true)))))
val actualResult = df.take(1)(0).getAs[Geometry](0)
- val targetShape = baseDF.take(1)(0).getAs[Polygon](0);
+ val targetShape = baseDF.take(1)(0).getAs[Polygon](0)
assert(actualResult.contains(targetShape))
}
@@ -1982,7 +1982,7 @@ class dataFrameAPITestScala extends TestBaseScala {
val polyDf = sparkSession.sql(
"SELECT ST_GeomFromWKT('POLYGON ((1 0 1, 1 1 1, 2 1 1, 2 0 1, 1 0
1))') AS geom")
val df = polyDf.select(ST_Translate("geom", 2, 3, 1))
- val wktWriter3D = new WKTWriter(3);
+ val wktWriter3D = new WKTWriter(3)
val actualGeom = df.take(1)(0).get(0).asInstanceOf[Geometry]
val actual = wktWriter3D.write(actualGeom)
val expected = "POLYGON Z((3 3 2, 3 4 2, 4 4 2, 4 3 2, 3 3 2))"
@@ -1999,7 +1999,7 @@ class dataFrameAPITestScala extends TestBaseScala {
val polyDf = sparkSession.sql(
"SELECT ST_GeomFromWKT('MULTIPOINT (0 0, 2 2)') AS geom,
ST_Buffer(ST_GeomFromWKT('POINT(1 1)'), 10.0) as buf")
val df = polyDf.select(ST_VoronoiPolygons("geom"))
- val wktWriter3D = new WKTWriter(3);
+ val wktWriter3D = new WKTWriter(3)
val actualGeom = df.take(1)(0).get(0).asInstanceOf[Geometry]
val actual = wktWriter3D.write(actualGeom)
val expected =
@@ -2019,9 +2019,9 @@ class dataFrameAPITestScala extends TestBaseScala {
it("Passed ST_Affine") {
val polyDf = sparkSession.sql(
"SELECT ST_GeomFromWKT('POLYGON ((2 3 1, 4 5 1, 7 8 2, 2 3 1))') AS
geom")
- val df = polyDf.select(ST_Affine("geom", 1, 2, 3, 3, 4, 4, 1, 4, 2, 1,
2, 1));
+ val df = polyDf.select(ST_Affine("geom", 1, 2, 3, 3, 4, 4, 1, 4, 2, 1,
2, 1))
val dfDefaultValue = polyDf.select(ST_Affine("geom", 1, 2, 1, 2, 1, 2))
- val wKTWriter3D = new WKTWriter(3);
+ val wKTWriter3D = new WKTWriter(3)
val actualGeom = df.take(1)(0).get(0).asInstanceOf[Geometry]
val actualGeomDefaultValue =
dfDefaultValue.take(1)(0).get(0).asInstanceOf[Geometry]
val actual = wKTWriter3D.write(actualGeom)
@@ -2036,7 +2036,7 @@ class dataFrameAPITestScala extends TestBaseScala {
val polyDf = sparkSession.sql(
"SELECT ST_GeomFromWKT('POLYGON ((1 0 1, 2 3 2, 5 0 1, 5 2 9, 1 0
1))') AS geom")
val df = polyDf.select(ST_BoundingDiagonal("geom"))
- val wKTWriter = new WKTWriter(3);
+ val wKTWriter = new WKTWriter(3)
val expected = "LINESTRING Z(1 0 1, 5 3 9)"
val actual = wKTWriter.write(df.take(1)(0).get(0).asInstanceOf[Geometry])
assertEquals(expected, actual)
diff --git
a/spark/common/src/test/scala/org/apache/sedona/sql/functionTestScala.scala
b/spark/common/src/test/scala/org/apache/sedona/sql/functionTestScala.scala
index f3c5796e3..f18c4581f 100644
--- a/spark/common/src/test/scala/org/apache/sedona/sql/functionTestScala.scala
+++ b/spark/common/src/test/scala/org/apache/sedona/sql/functionTestScala.scala
@@ -61,7 +61,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf =
sparkSession.sql("select ST_ConcaveHull(polygondf.countyshape, 1,
true) from polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_ConvexHull") {
@@ -76,7 +76,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf =
sparkSession.sql("select ST_ConvexHull(polygondf.countyshape) from
polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_CrossesDateLine") {
@@ -107,7 +107,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
val functionDf =
sparkSession.sql("select ST_Buffer(polygondf.countyshape, 1) from
polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_Buffer Spheroid") {
@@ -123,11 +123,11 @@ class functionTestScala
var functionDf =
sparkSession.sql("select ST_Buffer(polygondf.countyshape, 1, true)
from polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
functionDf = sparkSession.sql(
"select ST_Buffer(polygondf.countyshape, 1, true, 'quad_segs=2') from
polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_BestSRID") {
@@ -142,7 +142,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
val functionDf =
sparkSession.sql("select ST_BestSRID(polygondf.countyshape) from
polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_ShiftLongitude") {
@@ -157,7 +157,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
val functionDf =
sparkSession.sql("select ST_ShiftLongitude(polygondf.countyshape) from
polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_Envelope") {
@@ -172,7 +172,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf =
sparkSession.sql("select ST_Envelope(polygondf.countyshape) from
polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_Expand") {
@@ -236,7 +236,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf =
sparkSession.sql("select ST_Centroid(polygondf.countyshape) from
polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_Length") {
@@ -250,7 +250,7 @@ class functionTestScala
"select ST_GeomFromWKT(polygontable._c0) as countyshape from
polygontable")
polygonDf.createOrReplaceTempView("polygondf")
var functionDf = sparkSession.sql("select
ST_Length(polygondf.countyshape) from polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_Length2D") {
@@ -265,7 +265,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf =
sparkSession.sql("select ST_Length2D(polygondf.countyshape) from
polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_Area") {
@@ -279,7 +279,7 @@ class functionTestScala
"select ST_GeomFromWKT(polygontable._c0) as countyshape from
polygontable")
polygonDf.createOrReplaceTempView("polygondf")
var functionDf = sparkSession.sql("select ST_Area(polygondf.countyshape)
from polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_Dimension with Geometry") {
@@ -325,7 +325,7 @@ class functionTestScala
polygonDf.createOrReplaceTempView("polygondf")
var functionDf = sparkSession.sql(
"select ST_Distance(polygondf.countyshape, polygondf.countyshape) from
polygondf")
- assert(functionDf.count() > 0);
+ assert(functionDf.count() > 0)
}
it("Passed ST_3DDistance") {
@@ -1849,7 +1849,7 @@ class functionTestScala
val actualDf = baseDf.selectExpr("ST_RemoveRepeatedPoints(geom, 1000) as
geom")
var actual = actualDf.selectExpr("ST_AsText(geom)").first().get(0)
var expected =
- "GEOMETRYCOLLECTION (POINT (10 10), LINESTRING (20 20, 30 30), POLYGON
((40 40, 70 70, 70 70, 40 40)), MULTIPOINT ((80 80)))";
+ "GEOMETRYCOLLECTION (POINT (10 10), LINESTRING (20 20, 30 30), POLYGON
((40 40, 70 70, 70 70, 40 40)), MULTIPOINT ((80 80)))"
assertEquals(expected, actual)
val actualSRID = actualDf.selectExpr("ST_SRID(geom)").first().get(0)
assertEquals(1000, actualSRID)
@@ -1857,21 +1857,21 @@ class functionTestScala
actual = sparkSession
.sql("SELECT
ST_AsText(ST_RemoveRepeatedPoints(ST_GeomFromWKT('MULTIPOINT ((1 1), (2 2), (3
3), (2 2))')))")
.first()
- .get(0);
+ .get(0)
expected = "MULTIPOINT ((1 1), (2 2), (3 3))"
assertEquals(expected, actual)
actual = sparkSession
.sql("SELECT
ST_AsText(ST_RemoveRepeatedPoints(ST_GeomFromWKT('LINESTRING (0 0, 0 0, 1 1, 0
0, 1 1, 2 2)')))")
.first()
- .get(0);
+ .get(0)
expected = "LINESTRING (0 0, 1 1, 0 0, 1 1, 2 2)"
assertEquals(expected, actual)
actual = sparkSession
.sql("SELECT
ST_AsText(ST_RemoveRepeatedPoints(ST_GeomFromWKT('GEOMETRYCOLLECTION
(LINESTRING (1 1, 2 2, 2 2, 3 3), POINT (4 4), POINT (4 4), POINT (5 5))')))")
.first()
- .get(0);
+ .get(0)
expected =
"GEOMETRYCOLLECTION (LINESTRING (1 1, 2 2, 3 3), POINT (4 4), POINT (4
4), POINT (5 5))"
assertEquals(expected, actual)
@@ -1879,7 +1879,7 @@ class functionTestScala
actual = sparkSession
.sql("SELECT
ST_AsText(ST_RemoveRepeatedPoints(ST_GeomFromWKT('LINESTRING (0 0, 0 0, 1 1, 5
5, 1 1, 2 2)'), 2))")
.first()
- .get(0);
+ .get(0)
expected = "LINESTRING (0 0, 5 5, 2 2)"
assertEquals(expected, actual)
}
@@ -2916,7 +2916,7 @@ class functionTestScala
.get(1)
.asInstanceOf[String]
assertEquals(expected, actual)
- assertEquals(expectedDefaultValue, actualDefaultValue);
+ assertEquals(expectedDefaultValue, actualDefaultValue)
}
}
@@ -2941,7 +2941,7 @@ class functionTestScala
.get(1)
.asInstanceOf[String]
assertEquals(expected, actual)
- assertEquals(expectedDefaultValue, actualDefaultValue);
+ assertEquals(expectedDefaultValue, actualDefaultValue)
}
}
@@ -2966,7 +2966,7 @@ class functionTestScala
.get(1)
.asInstanceOf[String]
assertEquals(expected, actual)
- assertEquals(expectedDefaultValue, actualDefaultValue);
+ assertEquals(expectedDefaultValue, actualDefaultValue)
}
}
@@ -2991,7 +2991,7 @@ class functionTestScala
.get(1)
.asInstanceOf[String]
assertEquals(expected, actual)
- assertEquals(expectedDefaultValue, actualDefaultValue);
+ assertEquals(expectedDefaultValue, actualDefaultValue)
}
}
diff --git
a/spark/common/src/test/scala/org/apache/sedona/sql/predicateTestScala.scala
b/spark/common/src/test/scala/org/apache/sedona/sql/predicateTestScala.scala
index eecea8a3f..482cb1459 100644
--- a/spark/common/src/test/scala/org/apache/sedona/sql/predicateTestScala.scala
+++ b/spark/common/src/test/scala/org/apache/sedona/sql/predicateTestScala.scala
@@ -258,7 +258,7 @@ class predicateTestScala extends TestBaseScala {
it("Passed ST_Relate") {
val baseDf = sparkSession.sql(
- "SELECT ST_GeomFromWKT('LINESTRING (1 1, 5 5)') AS g1,
ST_GeomFromWKT('POLYGON ((3 3, 3 7, 7 7, 7 3, 3 3))') as g2, '1010F0212' as
im");
+ "SELECT ST_GeomFromWKT('LINESTRING (1 1, 5 5)') AS g1,
ST_GeomFromWKT('POLYGON ((3 3, 3 7, 7 7, 7 3, 3 3))') as g2, '1010F0212' as im")
val actual = baseDf.selectExpr("ST_Relate(g1, g2)").first().get(0)
assert(actual.equals("1010F0212"))
diff --git
a/spark/common/src/test/scala/org/apache/sedona/sql/rasteralgebraTest.scala
b/spark/common/src/test/scala/org/apache/sedona/sql/rasteralgebraTest.scala
index b63953294..ecdf257de 100644
--- a/spark/common/src/test/scala/org/apache/sedona/sql/rasteralgebraTest.scala
+++ b/spark/common/src/test/scala/org/apache/sedona/sql/rasteralgebraTest.scala
@@ -1143,13 +1143,13 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
it("Passed RS_GeoReference") {
val df = sparkSession.read.format("binaryFile").load(resourceFolder +
"raster/test1.tiff")
- var result =
df.selectExpr("RS_GeoReference(RS_FromGeoTiff(content))").first().getString(0);
+ var result =
df.selectExpr("RS_GeoReference(RS_FromGeoTiff(content))").first().getString(0)
var expected: String =
"72.328613 \n0.000000 \n0.000000 \n-72.328613 \n-13095817.809482
\n4021262.748793"
assertEquals(expected, result)
result =
- df.selectExpr("RS_GeoReference(RS_FromGeoTiff(content),
'ESRI')").first().getString(0);
+ df.selectExpr("RS_GeoReference(RS_FromGeoTiff(content),
'ESRI')").first().getString(0)
expected =
"72.328613 \n0.000000 \n0.000000 \n-72.328613 \n-13095781.645176
\n4021226.584486"
assertEquals(expected, result)
@@ -1337,7 +1337,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
"RS_FromGeoTiff(content) as raster",
"RS_MultiplyFactor(RS_BandAsArray(RS_FromGeoTiff(content), 1), 2) as
band")
val raster =
- df.selectExpr("RS_AddBandFromArray(raster, band, 2,
2)").first().getAs[GridCoverage2D](0);
+ df.selectExpr("RS_AddBandFromArray(raster, band, 2,
2)").first().getAs[GridCoverage2D](0)
assertEquals(2,
RasterUtils.getNoDataValue(raster.getSampleDimension(1)), 1e-9)
}
@@ -1457,8 +1457,8 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
it("Passed RS_ConvexHull with raster") {
val df = sparkSession.read.format("binaryFile").load(resourceFolder +
"raster/test1.tiff")
val result =
-
df.selectExpr("RS_ConvexHull(RS_FromGeoTiff(content))").first().getAs[Geometry](0);
- val coordinates = result.getCoordinates;
+
df.selectExpr("RS_ConvexHull(RS_FromGeoTiff(content))").first().getAs[Geometry](0)
+ val coordinates = result.getCoordinates
val expectedCoordOne = new Coordinate(-13095817.809482181,
4021262.7487925636)
val expectedCoordTwo = new Coordinate(-13058785.559768861,
4021262.7487925636)
@@ -1798,10 +1798,10 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
.sql(
s"SELECT RS_PixelAsPoint(RS_MakeEmptyRaster($numBands,
$widthInPixel, $heightInPixel, $upperLeftX, $upperLeftY, $cellSize), 2, 1)")
.first()
- .getAs[Geometry](0);
+ .getAs[Geometry](0)
val expectedX = 127.19
val expectedY = -12
- val actualCoordinates = result.getCoordinate;
+ val actualCoordinates = result.getCoordinate
assertEquals(expectedX, actualCoordinates.x, 1e-5)
assertEquals(expectedY, actualCoordinates.y, 1e-5)
}
@@ -1817,7 +1817,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
.sql(
s"SELECT RS_PixelAsPoints(RS_MakeEmptyRaster($numBands,
$widthInPixel, $heightInPixel, $upperLeftX, $upperLeftY, $cellSize), 1)")
.first()
- .getList(0);
+ .getList(0)
val expected = "[POINT (127.19000244140625 -12),0.0,2,1]"
assertEquals(expected, result.get(1).toString)
}
@@ -1848,7 +1848,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
.sql(
s"SELECT ST_AsText(RS_PixelAsPolygon(RS_MakeEmptyRaster($numBands,
$widthInPixel, $heightInPixel, $upperLeftX, $upperLeftY, $cellSize), 2, 3))")
.first()
- .getString(0);
+ .getString(0)
val expected =
"POLYGON ((127.19000244140625 -20, 131.19000244140625 -20,
131.19000244140625 -24, 127.19000244140625 -24, 127.19000244140625 -20))"
assertEquals(expected, result)
@@ -1865,7 +1865,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
.sql(
s"SELECT RS_PixelAsPolygons(RS_MakeEmptyRaster($numBands,
$widthInPixel, $heightInPixel, $upperLeftX, $upperLeftY, $cellSize), 1)")
.first()
- .getList(0);
+ .getList(0)
val expected =
"[POLYGON ((127.19000244140625 -20, 131.19000244140625 -20,
131.19000244140625 -24, 127.19000244140625 -24, 127.19000244140625
-20)),0.0,2,3]"
assertEquals(expected, result.get(11).toString)
@@ -1897,7 +1897,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
.sql(
s"SELECT ST_AsText(RS_PixelAsCentroid(RS_MakeEmptyRaster($numBands,
$widthInPixel, $heightInPixel, $upperLeftX, $upperLeftY, $cellSize), 2, 3))")
.first()
- .getString(0);
+ .getString(0)
val expected = "POINT (253.5 -215.5)"
assertEquals(expected, result)
}
@@ -1913,7 +1913,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
.sql(
s"SELECT RS_PixelAsCentroids(RS_MakeEmptyRaster($numBands,
$widthInPixel, $heightInPixel, $upperLeftX, $upperLeftY, $cellSize), 1)")
.first()
- .getList(0);
+ .getList(0)
val expected = "[POINT (253.5 -215.5),0.0,2,3]"
assertEquals(expected, result.get(25).toString)
}
@@ -1962,7 +1962,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
.selectExpr(
"ST_AsText(RS_WorldToRasterCoord(raster, -13095817.809482181,
4021262.7487925636))")
.first()
- .getString(0);
+ .getString(0)
val expected = "POINT (1 1)"
assertEquals(expected, result)
}
@@ -1973,7 +1973,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
val result = df
.selectExpr("RS_WorldToRasterCoordX(raster, -13095817.809482181,
4021262.7487925636)")
.first()
- .getInt(0);
+ .getInt(0)
val expected = 1
assertEquals(expected, result)
}
@@ -1984,7 +1984,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
val result = df
.selectExpr("RS_WorldToRasterCoordY(raster, -13095817.809482181,
4021262.7487925636)")
.first()
- .getInt(0);
+ .getInt(0)
val expected = 1
assertEquals(expected, result)
}
@@ -2411,7 +2411,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
val df = inputDf.selectExpr(
"RS_AddBandFromArray(RS_MakeEmptyRaster(1, 4, 3, 0, 0, 1, -1, 0, 0,
0), band, 1, 0d) as emptyRaster")
val resultDf = df.selectExpr("RS_AsMatrix(emptyRaster, 1, 5) as matrix")
- val actual = resultDf.first().getString(0);
+ val actual = resultDf.first().getString(0)
val expected =
"| 1.00000 3.33333 4.00000 0.00010|\n" + "| 2.22220 9.00000
10.00000 11.11111|\n" + "| 3.00000 4.00000 5.00000 6.00000|\n"
assertEquals(expected, actual)
@@ -2423,7 +2423,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
val df = inputDf.selectExpr(
"RS_AddBandFromArray(RS_MakeEmptyRaster(1, 4, 3, 0, 0, 1, -1, 0, 0,
0), band, 1, 0d) as emptyRaster")
val resultDf = df.selectExpr("RS_AsMatrix(emptyRaster, 1) as matrix")
- val actual = resultDf.first().getString(0);
+ val actual = resultDf.first().getString(0)
val expected =
"| 1.000000 3.333333 4.000000 0.000100|\n| 2.222200 9.000000
10.000000 11.111111|\n| 3.000000 4.000000 5.000000 6.000000|\n"
.format()
@@ -2436,7 +2436,7 @@ class rasteralgebraTest extends TestBaseScala with
BeforeAndAfter with GivenWhen
val df = inputDf.selectExpr(
"RS_AddBandFromArray(RS_MakeEmptyRaster(1, 4, 3, 0, 0, 1, -1, 0, 0,
0), band, 1, 0d) as emptyRaster")
val resultDf = df.selectExpr("RS_AsMatrix(emptyRaster) as matrix")
- val actual = resultDf.first().getString(0);
+ val actual = resultDf.first().getString(0)
val expected =
"| 1.000000 3.333333 4.000000 0.000100|\n| 2.222200 9.000000
10.000000 11.111111|\n| 3.000000 4.000000 5.000000 6.000000|\n"
.format()