This is an automated email from the ASF dual-hosted git repository.
jark pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/fluss.git
The following commit(s) were added to refs/heads/main by this push:
new 5d0a5843c [hotfix] Fix compile problem of InternalRow#getMap()
interface in Spark connector
5d0a5843c is described below
commit 5d0a5843cadbf832d653cced57d8c4ba97658be9
Author: Jark Wu <[email protected]>
AuthorDate: Mon Jan 5 10:19:55 2026 +0800
[hotfix] Fix compile problem of InternalRow#getMap() interface in Spark
connector
---
.../scala/org/apache/fluss/spark/row/SparkAsFlussArray.scala | 10 +++++++---
.../scala/org/apache/fluss/spark/row/SparkAsFlussRow.scala | 7 +++++--
2 files changed, 12 insertions(+), 5 deletions(-)
diff --git
a/fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/row/SparkAsFlussArray.scala
b/fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/row/SparkAsFlussArray.scala
index 00e82ea67..4ef4a32e7 100644
---
a/fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/row/SparkAsFlussArray.scala
+++
b/fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/row/SparkAsFlussArray.scala
@@ -17,10 +17,9 @@
package org.apache.fluss.spark.row
-import org.apache.fluss.row.{BinaryString, Decimal, InternalArray =>
FlussInternalArray, InternalRow => FlussInternalRow, TimestampLtz, TimestampNtz}
-
+import org.apache.fluss.row.{BinaryString, Decimal, InternalMap, TimestampLtz,
TimestampNtz, InternalArray => FlussInternalArray, InternalRow =>
FlussInternalRow}
import org.apache.spark.sql.catalyst.util.{ArrayData => SparkArrayData}
-import org.apache.spark.sql.types.{ArrayType => SparkArrayType, DataType =>
SparkDataType, StructType}
+import org.apache.spark.sql.types.{StructType, ArrayType => SparkArrayType,
DataType => SparkDataType}
/** Wraps a Spark [[SparkArrayData]] as a Fluss [[FlussInternalArray]]. */
class SparkAsFlussArray(arrayData: SparkArrayData, elementType: SparkDataType)
@@ -129,4 +128,9 @@ class SparkAsFlussArray(arrayData: SparkArrayData,
elementType: SparkDataType)
override def getRow(pos: Int, numFields: Int): FlussInternalRow =
new SparkAsFlussRow(elementType.asInstanceOf[StructType])
.replace(arrayData.getStruct(pos, numFields))
+
+ /** Returns the map value at the given position. */
+ override def getMap(pos: Int): InternalMap = {
+ throw new UnsupportedOperationException()
+ }
}
diff --git
a/fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/row/SparkAsFlussRow.scala
b/fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/row/SparkAsFlussRow.scala
index ad8440dc7..8248692f3 100644
---
a/fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/row/SparkAsFlussRow.scala
+++
b/fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/row/SparkAsFlussRow.scala
@@ -17,8 +17,7 @@
package org.apache.fluss.spark.row
-import org.apache.fluss.row.{BinaryString, Decimal, InternalRow =>
FlussInternalRow, TimestampLtz, TimestampNtz}
-
+import org.apache.fluss.row.{BinaryString, Decimal, InternalMap, TimestampLtz,
TimestampNtz, InternalRow => FlussInternalRow}
import org.apache.spark.sql.catalyst.{InternalRow => SparkInternalRow}
import org.apache.spark.sql.types.StructType
@@ -125,4 +124,8 @@ class SparkAsFlussRow(schema: StructType) extends
FlussInternalRow with Serializ
new SparkAsFlussRow(schema.fields(pos).dataType.asInstanceOf[StructType])
.replace(row.getStruct(pos, numFields))
+ /** Returns the map value at the given position. */
+ override def getMap(pos: Int): InternalMap = {
+ throw new UnsupportedOperationException()
+ }
}