This is an automated email from the ASF dual-hosted git repository.

jiayu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/sedona.git


The following commit(s) were added to refs/heads/master by this push:
     new e72151640d [SEDONA-2474] New Java Spark Example (#2475)
e72151640d is described below

commit e72151640d5c7fbadb114a553026817126bcdbdf
Author: Alec Zoeller <[email protected]>
AuthorDate: Sun Nov 16 18:44:55 2025 +1300

    [SEDONA-2474] New Java Spark Example (#2475)
    
    Co-authored-by: Alec Zoeller <[email protected]>
    Co-authored-by: Jia Yu <[email protected]>
---
 examples/README.md                                 |   3 +
 examples/java-spark-sql/.gitignore                 |  20 +++
 examples/java-spark-sql/pom.xml                    | 184 +++++++++++++++++++++
 .../src/main/java/spark/GeoParquetAccessor.java    |  90 ++++++++++
 .../src/main/java/spark/SedonaGeoParquetMain.java  |  61 +++++++
 .../src/main/java/spark/SedonaSparkSession.java    |  51 ++++++
 .../src/main/resources/application.properties      |  18 ++
 .../src/test/java/spark/SedonaParquetTest.java     | 102 ++++++++++++
 .../src/test/resources/TestPoints.parquet          | Bin 0 -> 79834 bytes
 .../src/test/resources/application.properties      |  18 ++
 10 files changed, 547 insertions(+)

diff --git a/examples/README.md b/examples/README.md
index cde819ba3f..85bdd35bd1 100644
--- a/examples/README.md
+++ b/examples/README.md
@@ -29,6 +29,7 @@ The folder structure of this repository is as follows.
 
 * spark-sql: a Scala template shows how to use Sedona RDD, DataFrame and SQL 
API
 * flink-sql: a Java template show how to use Sedona SQL via Flink Table APIs
+* java-spark-sql: a pure Java template implementation and use case for Sedona 
RDD, DataFrame and SQL API
 
 ## Compile and package
 
@@ -59,6 +60,8 @@ To run the jar in this way, you need to:
 
 We highly suggest you use IDEs to run template projects on your local machine. 
For Scala, we recommend IntelliJ IDEA with Scala plug-in. For Java, we 
recommend IntelliJ IDEA and Eclipse. With the help of IDEs, **you don't have to 
prepare anything** (even don't need to download and set up Spark!). As long as 
you have Scala and Java, everything works properly!
 
+* In the case of the java-spark-sql template, execute `mvn test` to run JUnit 
tests using sample GeoParquet file.
+
 ### Scala
 
 Import the Scala template project as SBT project. Then run the Main file in 
this project.
diff --git a/examples/java-spark-sql/.gitignore 
b/examples/java-spark-sql/.gitignore
new file mode 100644
index 0000000000..b14e5ffe4d
--- /dev/null
+++ b/examples/java-spark-sql/.gitignore
@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+/.idea/
+/target/
+dependency-reduced-pom.xml
diff --git a/examples/java-spark-sql/pom.xml b/examples/java-spark-sql/pom.xml
new file mode 100644
index 0000000000..640cf400f2
--- /dev/null
+++ b/examples/java-spark-sql/pom.xml
@@ -0,0 +1,184 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+       xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
https://maven.apache.org/xsd/maven-4.0.0.xsd";>
+       <modelVersion>4.0.0</modelVersion>
+
+       <groupId>org.apache.sedona</groupId>
+       <artifactId>sedona-java-spark-example</artifactId>
+       <version>1.6.1</version>
+       <name>Sedona : Examples : Java Spark SQL</name>
+       <description>Example project for Apache Sedona with Java and 
Spark.</description>
+
+       <properties>
+        <!-- Set spark.scope to "compile" to be able to run locally with java 
-jar shaded.jar -->
+        <spark.scope>provided</spark.scope>
+        <javax.scope>test</javax.scope>
+
+               <sedona.version>1.6.1</sedona.version>
+        <geotools.version>1.8.0-33.1</geotools.version>
+        <spark.version>3.5.7</spark.version>
+        <javax.servlet.version>4.0.1</javax.servlet.version>
+        <spotless.version>3.0.0</spotless.version>
+       </properties>
+
+       <dependencies>
+        <dependency>
+               <groupId>org.datasyslab</groupId>
+               <artifactId>geotools-wrapper</artifactId>
+               <version>${geotools.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sedona</groupId>
+            <artifactId>sedona-spark-shaded-3.5_2.13</artifactId>
+            <version>${sedona.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.spark</groupId>
+            <artifactId>spark-core_2.13</artifactId>
+            <version>${spark.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.spark</groupId>
+            <artifactId>spark-sql_2.13</artifactId>
+            <version>${spark.version}</version>
+            <scope>${spark.scope}</scope>
+        </dependency>
+        <dependency>
+            <groupId>javax.servlet</groupId>
+            <artifactId>javax.servlet-api</artifactId>
+            <version>${javax.servlet.version}</version>
+            <scope>${javax.scope}</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.junit.jupiter</groupId>
+            <artifactId>junit-jupiter-engine</artifactId>
+            <version>5.2.0-M1</version>
+        </dependency>
+
+       </dependencies>
+       <build>
+               <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <version>3.2.5</version>
+                <configuration>
+                    <argLine>
+                        --add-opens=java.base/sun.nio.ch=ALL-UNNAMED
+                        --add-opens=java.base/java.nio=ALL-UNNAMED
+                        --add-opens=java.base/java.lang=ALL-UNNAMED
+                        --add-opens=java.base/java.lang.invoke=ALL-UNNAMED
+                        --add-opens=java.base/java.util=ALL-UNNAMED
+                    </argLine>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <version>2.1</version>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <!--  use transformer to handle merge of 
META-INF/services - see 
http://java.net/jira/browse/JERSEY-440?focusedCommentId=14822&page=com.atlassian.jira.plugin.system.issuetabpanels%3Acomment-tabpanel#action_14822
 -->
+                                <transformer
+                                        
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"
 />
+                                <transformer
+                                        
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
+                                    <resource>reference.conf</resource>
+                                </transformer>
+                                <transformer
+                                        
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                    <manifestEntries>
+                                        <Specification-Title>Java Advanced 
Imaging Image I/O Tools</Specification-Title>
+                                        
<Specification-Version>1.1</Specification-Version>
+                                        <Specification-Vendor>Sun 
Microsystems, Inc.</Specification-Vendor>
+                                        
<Implementation-Title>com.sun.media.imageio</Implementation-Title>
+                                        
<Implementation-Version>1.1</Implementation-Version>
+                                        <Implementation-Vendor>Sun 
Microsystems, Inc.</Implementation-Vendor>
+                                        
<Extension-Name>com.sun.media.imageio</Extension-Name>
+                                    </manifestEntries>
+                                </transformer>
+                                <transformer 
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                    
<mainClass>spark.SedonaGeoParquetMain</mainClass>
+                                </transformer>
+                            </transformers>
+                            <filters>
+                                <!-- filter to address "Invalid signature 
file" issue - see http://stackoverflow.com/a/6743609/589215 -->
+                                <filter>
+                                    <artifact>*:*</artifact>
+                                    <excludes>
+                                        <exclude>META-INF/*.SF</exclude>
+                                        <exclude>META-INF/*.DSA</exclude>
+                                        <exclude>META-INF/*.RSA</exclude>
+                                    </excludes>
+                                </filter>
+                            </filters>
+                            <argLine>
+                                --add-opens=java.base/sun.nio.ch=ALL-UNNAMED
+                                --add-opens=java.base/java.nio=ALL-UNNAMED
+                                --add-opens=java.base/java.lang=ALL-UNNAMED
+                                --add-opens=java.base/java.util=ALL-UNNAMED
+                            </argLine>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>com.diffplug.spotless</groupId>
+                <artifactId>spotless-maven-plugin</artifactId>
+                <version>${spotless.version}</version>
+                <configuration>
+                    <formats>
+                    <!-- you can define as many formats as you want, each is 
independent -->
+                    <format>
+                        <!-- define the files to apply to -->
+                        <includes>
+                        <include>.gitattributes</include>
+                        <include>.gitignore</include>
+                        </includes>
+                        <!-- define the steps to apply to those files -->
+                        <trimTrailingWhitespace/>
+                        <endWithNewline/>
+                        <indent>
+                        <tabs>true</tabs>
+                        <spacesPerTab>4</spacesPerTab>
+                        </indent>
+                    </format>
+                    </formats>
+                    <!-- define a language-specific format -->
+                    <java>
+                    <googleJavaFormat>
+                        <version>1.10</version>
+                        <style>AOSP</style>
+                        <reflowLongStrings>true</reflowLongStrings>
+                        <formatJavadoc>false</formatJavadoc>
+                    </googleJavaFormat>
+                    </java>
+                </configuration>
+                </plugin>
+               </plugins>
+       </build>
+</project>
diff --git 
a/examples/java-spark-sql/src/main/java/spark/GeoParquetAccessor.java 
b/examples/java-spark-sql/src/main/java/spark/GeoParquetAccessor.java
new file mode 100644
index 0000000000..ba8e6a1f65
--- /dev/null
+++ b/examples/java-spark-sql/src/main/java/spark/GeoParquetAccessor.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package spark;
+
+import org.apache.sedona.core.spatialOperator.RangeQuery;
+import org.apache.sedona.core.spatialOperator.SpatialPredicate;
+import org.apache.sedona.core.spatialRDD.SpatialRDD;
+import org.apache.sedona.sql.utils.Adapter;
+import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.Row;
+import org.apache.spark.sql.SparkSession;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Polygon;
+
+import java.util.List;
+
+
+public class GeoParquetAccessor {
+
+    private final SparkSession session;
+    private String parquetPath;
+
+    public GeoParquetAccessor() {
+        this.session = new SedonaSparkSession().getSession();
+        this.parquetPath = "";
+    }
+
+    //Overload with constructor that has Spark session provided
+    //Use to avoid error - can't have two SparkContext objects on one JVM
+    public GeoParquetAccessor(SparkSession session, String parquetPath) {
+        this.session = session;
+        this.parquetPath = parquetPath;
+    }
+
+    public List<Geometry> selectFeaturesByPolygon(double xmin, double ymax,
+                                                  double xmax, double ymin,
+                                                  String geometryColumn) {
+
+        //Read the GeoParquet file into a DataFrame
+        Dataset<Row> insarDF = 
session.read().format("geoparquet").load(parquetPath);
+
+        //Convert the DataFrame to a SpatialRDD
+        //The second argument to toSpatialRdd is the name of the geometry 
column.
+        SpatialRDD<Geometry> insarRDD = Adapter.toSpatialRdd(insarDF, 
geometryColumn);
+
+        // Define the polygon for the spatial query
+        GeometryFactory geometryFactory = new GeometryFactory();
+        Coordinate[] coordinates = new Coordinate[] {
+            new Coordinate(xmin, ymin),
+            new Coordinate(xmax, ymin),
+            new Coordinate(xmax, ymax),
+            new Coordinate(xmin, ymax),
+            new Coordinate(xmin, ymin) // A closed polygon has the same start 
and end coordinate
+        };
+        Polygon queryPolygon = geometryFactory.createPolygon(coordinates);
+
+        // Perform the spatial range query
+        // This will return all geometries that intersect with the query 
polygon.
+        // Alternatives are SpatialPredicate.CONTAINS or 
SpatialPredicate.WITHIN
+        SpatialRDD<Geometry> resultRDD = new SpatialRDD<>();
+        try {
+            resultRDD.rawSpatialRDD = RangeQuery.SpatialRangeQuery(insarRDD, 
queryPolygon, SpatialPredicate.INTERSECTS, false);
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        // Collect the results back to the driver
+        return resultRDD.getRawSpatialRDD().collect();
+    }
+
+}
diff --git 
a/examples/java-spark-sql/src/main/java/spark/SedonaGeoParquetMain.java 
b/examples/java-spark-sql/src/main/java/spark/SedonaGeoParquetMain.java
new file mode 100644
index 0000000000..4a11437283
--- /dev/null
+++ b/examples/java-spark-sql/src/main/java/spark/SedonaGeoParquetMain.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package spark;
+
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.List;
+import java.util.Properties;
+
+public class SedonaGeoParquetMain {
+
+    protected static Properties properties;
+    protected static String parquetPath;
+    protected static SedonaSparkSession session;
+
+    public static void main(String args[]) {
+
+        session = new SedonaSparkSession();
+        //Get parquetPath and any other application.properties
+        try {
+            ClassLoader loader = 
Thread.currentThread().getContextClassLoader();
+            Properties properties = new Properties();
+            InputStream is = 
loader.getResourceAsStream("application.properties");
+            properties.load(is);
+            parquetPath = properties.getProperty("parquet.path");
+        } catch (IOException e) {
+            e.printStackTrace();
+            parquetPath = "";
+        }
+        GeoParquetAccessor accessor = new GeoParquetAccessor(session.session, 
parquetPath);
+        //Test parquet happens to be in New Zealand Transverse Mercator 
(EPSG:2193) (meters)
+        List<Geometry> geoms = accessor.selectFeaturesByPolygon(1155850, 
4819840, 1252000, 4748100, "geometry");
+        System.out.println("Coordinates of convex hull of points in 
boundary:");
+        for (Geometry geom : geoms) {
+            Coordinate[] convexHullCoordinates = 
geom.convexHull().getCoordinates();
+            for (Coordinate coord : convexHullCoordinates) {
+                System.out.println(String.format("\t%s", coord.toString()));
+            }
+        }
+    }
+}
diff --git 
a/examples/java-spark-sql/src/main/java/spark/SedonaSparkSession.java 
b/examples/java-spark-sql/src/main/java/spark/SedonaSparkSession.java
new file mode 100644
index 0000000000..6be6c99585
--- /dev/null
+++ b/examples/java-spark-sql/src/main/java/spark/SedonaSparkSession.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package spark;
+
+import org.apache.sedona.spark.SedonaContext;
+import org.apache.spark.sql.SparkSession;
+
+
+public class SedonaSparkSession {
+
+    public SparkSession session;
+
+    public SedonaSparkSession() {
+
+        //Set configuration for localhost spark cluster. Intended to be run 
from IDE or similar.
+        //Use SedonaContext builder to create SparkSession with Sedona 
extensions
+        SparkSession config = SedonaContext.builder()
+                                   .appName(this.getClass().getSimpleName())
+                                   .master("local[*]")
+                                   .config("spark.ui.enabled", "false")
+                                   .config("spark.driver.extraJavaOptions",
+                                        
"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED 
--add-opens=java.base/java.lang.invoke=ALL-UNNAMED")
+                                   .getOrCreate();
+
+        //Create Sedona-enabled SparkSession
+        this.session = SedonaContext.create(config);
+    }
+
+    public SparkSession getSession() {
+        // Access SparkSession object
+        return this.session;
+    }
+
+}
diff --git a/examples/java-spark-sql/src/main/resources/application.properties 
b/examples/java-spark-sql/src/main/resources/application.properties
new file mode 100644
index 0000000000..2c96c734a6
--- /dev/null
+++ b/examples/java-spark-sql/src/main/resources/application.properties
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+parquet.path=src/test/resources/TestPoints.parquet
diff --git a/examples/java-spark-sql/src/test/java/spark/SedonaParquetTest.java 
b/examples/java-spark-sql/src/test/java/spark/SedonaParquetTest.java
new file mode 100644
index 0000000000..036cdda956
--- /dev/null
+++ b/examples/java-spark-sql/src/test/java/spark/SedonaParquetTest.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package spark;
+
+import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.Row;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+
+public class SedonaParquetTest {
+
+
+    protected static Properties properties;
+    protected static String parquetPath;
+    protected static SedonaSparkSession session;
+
+    public SedonaParquetTest() {
+    }
+
+    @BeforeAll
+    public static void setUpClass() throws IOException {
+
+        session = new SedonaSparkSession();
+        //Get parquetPath and any other application.properties
+        try {
+            ClassLoader loader = 
Thread.currentThread().getContextClassLoader();
+            Properties properties = new Properties();
+            InputStream is = 
loader.getResourceAsStream("application.properties");
+            properties.load(is);
+            parquetPath = properties.getProperty("parquet.path");
+        } catch (IOException e) {
+            e.printStackTrace();
+            parquetPath = "";
+        }
+
+    }
+
+    @AfterAll
+    public static void tearDownClass() {
+    }
+
+    @BeforeEach
+    public void setUp() {
+    }
+
+    @AfterEach
+    public void tearDown() {
+    }
+
+    @Test
+    public void connects() {
+        assertNotNull(session, "SparkSedonaSession not initialized 
correctly.");
+        assertNotNull(session.session, "Spark session not initialized inside 
SparkSedonaSession.");
+    }
+
+    @Test
+    public void parquetAccessible() {
+        File file = new File(parquetPath);
+        assertTrue(file.exists(), "Parquet file does not exist.");
+        assertTrue(file.canRead(), "Can't read geoparquet file on record.");
+    }
+
+    @Test
+    public void canLoadRDD() {
+        assertNotNull(session, "Session is null.");
+        Dataset<Row> insarDF = session.session.read()
+                .format("geoparquet")
+                .load(parquetPath);
+        assertNotNull(insarDF, "Dataset was not created.");
+        assertTrue(insarDF.count() > 0, "Dataset is empty.");
+    }
+
+}
diff --git a/examples/java-spark-sql/src/test/resources/TestPoints.parquet 
b/examples/java-spark-sql/src/test/resources/TestPoints.parquet
new file mode 100644
index 0000000000..8038b545c7
Binary files /dev/null and 
b/examples/java-spark-sql/src/test/resources/TestPoints.parquet differ
diff --git a/examples/java-spark-sql/src/test/resources/application.properties 
b/examples/java-spark-sql/src/test/resources/application.properties
new file mode 100644
index 0000000000..2c96c734a6
--- /dev/null
+++ b/examples/java-spark-sql/src/test/resources/application.properties
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+parquet.path=src/test/resources/TestPoints.parquet

Reply via email to