This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new 67a1c0d285 [lance] Remove oss and jindo dependency from paimon-lance
(#7286)
67a1c0d285 is described below
commit 67a1c0d28590bad6e01dd09cfe4187bfa629ac91
Author: Zouxxyy <[email protected]>
AuthorDate: Wed Feb 25 10:23:44 2026 +0800
[lance] Remove oss and jindo dependency from paimon-lance (#7286)
---
.../org/apache/paimon/fs/hadoop/HadoopFileIO.java | 4 ++++
paimon-lance/pom.xml | 21 ---------------------
.../org/apache/paimon/format/lance/LanceUtils.java | 20 +++++++++++++++-----
3 files changed, 19 insertions(+), 26 deletions(-)
diff --git
a/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopFileIO.java
b/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopFileIO.java
index 49ca2cdc87..6db49a919b 100644
--- a/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopFileIO.java
+++ b/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopFileIO.java
@@ -87,6 +87,10 @@ public class HadoopFileIO implements FileIO {
return hadoopConf.get();
}
+ public org.apache.paimon.options.Options hadoopOptions() {
+ return new org.apache.paimon.options.Options(hadoopConf.get());
+ }
+
@Override
public SeekableInputStream newInputStream(Path path) throws IOException {
org.apache.hadoop.fs.Path hadoopPath = path(path);
diff --git a/paimon-lance/pom.xml b/paimon-lance/pom.xml
index b2d1fe76d4..21bd05914e 100644
--- a/paimon-lance/pom.xml
+++ b/paimon-lance/pom.xml
@@ -61,20 +61,6 @@ under the License.
<scope>provided</scope>
</dependency>
- <dependency>
- <groupId>org.apache.paimon</groupId>
- <artifactId>paimon-oss</artifactId>
- <version>${project.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.paimon</groupId>
- <artifactId>paimon-oss-impl</artifactId>
- <version>${project.version}</version>
- <scope>provided</scope>
- </dependency>
-
<dependency>
<groupId>org.apache.paimon</groupId>
<artifactId>paimon-core</artifactId>
@@ -82,13 +68,6 @@ under the License.
<scope>provided</scope>
</dependency>
- <dependency>
- <groupId>org.apache.paimon</groupId>
- <artifactId>paimon-jindo</artifactId>
- <version>${project.version}</version>
- <scope>provided</scope>
- </dependency>
-
<!-- test dependencies -->
<dependency>
diff --git
a/paimon-lance/src/main/java/org/apache/paimon/format/lance/LanceUtils.java
b/paimon-lance/src/main/java/org/apache/paimon/format/lance/LanceUtils.java
index 3748d0403c..4edb803195 100644
--- a/paimon-lance/src/main/java/org/apache/paimon/format/lance/LanceUtils.java
+++ b/paimon-lance/src/main/java/org/apache/paimon/format/lance/LanceUtils.java
@@ -22,9 +22,7 @@ import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.Path;
import org.apache.paimon.fs.PluginFileIO;
import org.apache.paimon.fs.hadoop.HadoopFileIO;
-import org.apache.paimon.jindo.JindoFileIO;
import org.apache.paimon.options.Options;
-import org.apache.paimon.oss.OSSFileIO;
import org.apache.paimon.rest.RESTTokenFileIO;
import org.apache.paimon.utils.Pair;
@@ -116,13 +114,25 @@ public class LanceUtils {
Options originOptions;
if (ossFileIOKlass != null && ossFileIOKlass.isInstance(fileIO)) {
- originOptions = ((OSSFileIO) fileIO).hadoopOptions();
+ try {
+ originOptions = (Options)
ossFileIOKlass.getMethod("hadoopOptions").invoke(fileIO);
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to invoke hadoopOptions on
OSSFileIO", e);
+ }
} else if (jindoFileIOKlass != null &&
jindoFileIOKlass.isInstance(fileIO)) {
- originOptions = ((JindoFileIO) fileIO).hadoopOptions(path, isRead
? "read" : "write");
+ try {
+ originOptions =
+ (Options)
+ jindoFileIOKlass
+ .getMethod("hadoopOptions",
Path.class, String.class)
+ .invoke(fileIO, path, isRead ? "read"
: "write");
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to invoke hadoopOptions on
JindoFileIO", e);
+ }
} else if (pluginFileIOKlass != null &&
pluginFileIOKlass.isInstance(fileIO)) {
originOptions = ((PluginFileIO) fileIO).options();
} else if (hadoopFileIOKlass != null &&
hadoopFileIOKlass.isInstance(fileIO)) {
- originOptions = new Options(((HadoopFileIO) fileIO).hadoopConf());
+ originOptions = ((HadoopFileIO) fileIO).hadoopOptions();
} else {
originOptions = new Options();
}