This is an automated email from the ASF dual-hosted git repository.
wuchunfu pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel.git
The following commit(s) were added to refs/heads/dev by this push:
new d564ef6 [Bug] [connector] Use Hudi source Serializable Exception
(#1310)
d564ef6 is described below
commit d564ef6b0468cdea131b5f710cbec19d5a021940
Author: liujinhui <[email protected]>
AuthorDate: Thu Mar 3 18:42:44 2022 +0800
[Bug] [connector] Use Hudi source Serializable Exception (#1310)
* hudi_minor_bug
* MINOR
* MINOR CONTENT
Co-authored-by: liujh <[email protected]>
---
.../src/main/scala/org/apache/seatunnel/spark/sink/Hudi.scala | 2 +-
.../src/main/scala/org/apache/seatunnel/spark/source/Hudi.scala | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git
a/seatunnel-connectors/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/sink/Hudi.scala
b/seatunnel-connectors/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/sink/Hudi.scala
index e28573c..1ec4eb7 100644
---
a/seatunnel-connectors/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/sink/Hudi.scala
+++
b/seatunnel-connectors/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/sink/Hudi.scala
@@ -41,7 +41,7 @@ class Hudi extends SparkBatchSink {
override def output(df: Dataset[Row], environment: SparkEnvironment): Unit =
{
val writer = df.write.format("org.apache.hudi")
for (e <- config.entrySet()) {
- writer.option(e.getKey, e.getValue.toString)
+ writer.option(e.getKey, String.valueOf(e.getValue.unwrapped()))
}
writer.mode(config.getString("save_mode"))
.save(config.getString("hoodie.base.path"))
diff --git
a/seatunnel-connectors/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/source/Hudi.scala
b/seatunnel-connectors/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/source/Hudi.scala
index 18f64a2..ceeb49b 100644
---
a/seatunnel-connectors/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/source/Hudi.scala
+++
b/seatunnel-connectors/seatunnel-connector-spark-hudi/src/main/scala/org/apache/seatunnel/spark/source/Hudi.scala
@@ -36,7 +36,7 @@ class Hudi extends SparkBatchSource {
val reader = env.getSparkSession.read.format("org.apache.hudi")
for (e <- config.entrySet()) {
- reader.option(e.getKey, e.getValue.toString)
+ reader.option(e.getKey, String.valueOf(e.getValue.unwrapped()))
}
reader.load(config.getString("hoodie.datasource.read.paths"))