This is an automated email from the ASF dual-hosted git repository.
zhangzc pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git
The following commit(s) were added to refs/heads/main by this push:
new 7b0caf42c [GLUTEN-6067][CH] Support Spark3.5 with Scala2.13 for CH
backend (#6311)
7b0caf42c is described below
commit 7b0caf42cd86a257d3592acffcf131e53e275464
Author: Zhichao Zhang <[email protected]>
AuthorDate: Wed Jul 3 09:48:17 2024 +0800
[GLUTEN-6067][CH] Support Spark3.5 with Scala2.13 for CH backend (#6311)
Support Spark3.5 with Scala2.13 for CH backend:
1. add a profile for the Scala 2.13
2. Add `toSeq` for all the ArrayBuffer
---
backends-clickhouse/pom.xml | 6 +-
.../delta/ClickhouseOptimisticTransaction.scala | 2 +-
.../org/apache/spark/sql/delta/Snapshot.scala | 24 ++++-
.../spark/sql/delta/commands/VacuumCommand.scala | 2 +-
.../source/DeltaMergeTreeFileFormat.scala | 2 +-
.../backendsapi/clickhouse/CHIteratorApi.scala | 4 +-
.../org/apache/gluten/metrics/MetricsUtil.scala | 10 +-
.../utils/MergeTreePartsPartitionsUtil.scala | 2 +-
.../clickhouse/MergeTreeFileFormatDataWriter.scala | 10 +-
...enClickHouseTPCHParquetAQEConcurrentSuite.scala | 3 +-
backends-velox/pom.xml | 6 +-
gluten-celeborn/clickhouse/pom.xml | 4 +-
gluten-core/pom.xml | 6 +-
.../scala/org/apache/gluten/GlutenPlugin.scala | 2 +-
.../gluten/execution/WholeStageTransformer.scala | 2 +-
.../apache/gluten/expression/ConverterUtils.scala | 6 +-
.../org/apache/gluten/expression/UDFMappings.scala | 6 +-
.../columnar/enumerated/EnumeratedApplier.scala | 7 +-
.../columnar/heuristic/HeuristicApplier.scala | 7 +-
.../extension/columnar/validator/Validators.scala | 2 +-
.../apache/gluten/planner/GlutenOptimization.scala | 2 +-
.../gluten/softaffinity/SoftAffinityManager.scala | 2 +-
.../spark/sql/execution/GlutenImplicits.scala | 4 +-
.../sql/execution/ShuffledColumnarBatchRDD.scala | 2 +-
.../spark/sql/hive/HivePartitionConverter.scala | 7 +-
.../apache/gluten/ras/memo/ForwardMemoTable.scala | 2 +-
.../org/apache/gluten/ras/path/PathMask.scala | 4 +-
.../org/apache/gluten/ras/mock/MockMemoState.scala | 4 +-
gluten-ras/pom.xml | 6 +-
.../org/apache/spark/sql/GlutenTestsTrait.scala | 2 +-
gluten-ut/pom.xml | 6 +-
.../parquet/GlutenParquetFilterSuite.scala | 1 -
.../parquet/GlutenParquetRowIndexSuite.scala | 1 +
pom.xml | 112 ++++++++++++++++++++-
shims/common/pom.xml | 2 +
shims/pom.xml | 2 +-
.../gluten/sql/shims/spark34/Spark34Shims.scala | 5 +-
shims/spark35/pom.xml | 6 +-
.../gluten/sql/shims/spark35/Spark35Shims.scala | 7 +-
39 files changed, 223 insertions(+), 67 deletions(-)
diff --git a/backends-clickhouse/pom.xml b/backends-clickhouse/pom.xml
index 27ebd75f2..5672056b4 100644
--- a/backends-clickhouse/pom.xml
+++ b/backends-clickhouse/pom.xml
@@ -100,7 +100,7 @@
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
- <version>1.13.5</version>
+ <version>1.17.0</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -126,13 +126,13 @@
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-mockito_2.12</artifactId>
+ <artifactId>scalatestplus-mockito_${scala.binary.version}</artifactId>
<version>1.0.0-M2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-scalacheck_2.12</artifactId>
+ <artifactId>scalatestplus-scalacheck_${scala.binary.version}</artifactId>
<version>3.1.0.0-RC2</version>
<scope>test</scope>
</dependency>
diff --git
a/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/ClickhouseOptimisticTransaction.scala
b/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/ClickhouseOptimisticTransaction.scala
index d8ab2c1d0..9e79c4f2e 100644
---
a/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/ClickhouseOptimisticTransaction.scala
+++
b/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/ClickhouseOptimisticTransaction.scala
@@ -50,7 +50,7 @@ class ClickhouseOptimisticTransaction(
def this(
deltaLog: DeltaLog,
catalogTable: Option[CatalogTable],
- snapshotOpt: Option[Snapshot] = None) {
+ snapshotOpt: Option[Snapshot] = None) = {
this(
deltaLog,
catalogTable,
diff --git
a/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/Snapshot.scala
b/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/Snapshot.scala
index 13a91f051..8836f7c88 100644
---
a/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/Snapshot.scala
+++
b/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/Snapshot.scala
@@ -33,7 +33,6 @@ import org.apache.spark.sql.delta.stats.StatisticsCollection
import org.apache.spark.sql.delta.util.DeltaCommitFileProvider
import org.apache.spark.sql.delta.util.FileNames
import org.apache.spark.sql.delta.util.StateCache
-import org.apache.spark.sql.util.ScalaExtensions._
import org.apache.hadoop.fs.{FileStatus, Path}
import org.apache.spark.sql._
@@ -126,7 +125,27 @@ class Snapshot(
* This potentially triggers an IO operation to read the inCommitTimestamp.
* This is a lazy val, so repeated calls will not trigger multiple IO
operations.
*/
- protected lazy val getInCommitTimestampOpt: Option[Long] =
+ protected lazy val getInCommitTimestampOpt: Option[Long] = {
+ // --- modified start
+ // This implicit is for scala 2.12, copy from scala 2.13
+ implicit class OptionExtCompanion(opt: Option.type) {
+ /**
+ * When a given condition is true, evaluates the a argument and returns
Some(a).
+ * When the condition is false, a is not evaluated and None is returned.
+ */
+ def when[A](cond: Boolean)(a: => A): Option[A] = if (cond) Some(a) else
None
+
+ /**
+ * When a given condition is false, evaluates the a argument and returns
Some(a).
+ * When the condition is true, a is not evaluated and None is returned.
+ */
+ def whenNot[A](cond: Boolean)(a: => A): Option[A] = if (!cond) Some(a)
else None
+
+ /** Sum up all the `options`, substituting `default` for each `None`. */
+ def sum[N: Numeric](default: N)(options: Option[N]*): N =
+ options.map(_.getOrElse(default)).sum
+ }
+ // --- modified end
Option.when(DeltaConfigs.IN_COMMIT_TIMESTAMPS_ENABLED.fromMetaData(metadata)) {
_reconstructedProtocolMetadataAndICT.inCommitTimestamp
.getOrElse {
@@ -158,6 +177,7 @@ class Snapshot(
}
}
}
+ }
private[delta] lazy val nonFileActions: Seq[Action] = {
diff --git
a/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/commands/VacuumCommand.scala
b/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/commands/VacuumCommand.scala
index 987a7c35f..9f455fb27 100644
---
a/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/commands/VacuumCommand.scala
+++
b/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/delta/commands/VacuumCommand.scala
@@ -712,7 +712,7 @@ trait VacuumCommandImpl extends DeltaCommand {
// This is never going to be a path relative to `basePath` for DVs.
None
}
- case None => None
+ case _ => None
}
}
}
diff --git
a/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/execution/datasources/v2/clickhouse/source/DeltaMergeTreeFileFormat.scala
b/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/execution/datasources/v2/clickhouse/source/DeltaMergeTreeFileFormat.scala
index dc1d10728..2f71a0a0e 100644
---
a/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/execution/datasources/v2/clickhouse/source/DeltaMergeTreeFileFormat.scala
+++
b/backends-clickhouse/src/main/delta-32/org/apache/spark/sql/execution/datasources/v2/clickhouse/source/DeltaMergeTreeFileFormat.scala
@@ -55,7 +55,7 @@ class DeltaMergeTreeFileFormat(protocol: Protocol, metadata:
Metadata)
setIndexKeyOption: Option[Seq[String]],
primaryKeyOption: Option[Seq[String]],
clickhouseTableConfigs: Map[String, String],
- partitionColumns: Seq[String]) {
+ partitionColumns: Seq[String]) = {
this(protocol, metadata)
this.database = database
this.tableName = tableName
diff --git
a/backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/CHIteratorApi.scala
b/backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/CHIteratorApi.scala
index 376e46ebe..4b9ec7390 100644
---
a/backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/CHIteratorApi.scala
+++
b/backends-clickhouse/src/main/scala/org/apache/gluten/backendsapi/clickhouse/CHIteratorApi.scala
@@ -58,7 +58,7 @@ class CHIteratorApi extends IteratorApi with Logging with
LogLevelUtil {
}
dataSchema += newField
}
- StructType(dataSchema)
+ StructType(dataSchema.toSeq)
}
private def createNativeIterator(
@@ -114,7 +114,7 @@ class CHIteratorApi extends IteratorApi with Logging with
LogLevelUtil {
if (scan.fileFormat == ReadFileFormat.TextReadFormat) {
val names =
ConverterUtils.collectAttributeNamesWithoutExprId(scan.outputAttributes())
- localFilesNode.setFileSchema(getFileSchema(scan.getDataSchema,
names.asScala))
+ localFilesNode.setFileSchema(getFileSchema(scan.getDataSchema,
names.asScala.toSeq))
}
}
diff --git
a/backends-clickhouse/src/main/scala/org/apache/gluten/metrics/MetricsUtil.scala
b/backends-clickhouse/src/main/scala/org/apache/gluten/metrics/MetricsUtil.scala
index 1376dc6a8..e1e0f7c11 100644
---
a/backends-clickhouse/src/main/scala/org/apache/gluten/metrics/MetricsUtil.scala
+++
b/backends-clickhouse/src/main/scala/org/apache/gluten/metrics/MetricsUtil.scala
@@ -177,10 +177,12 @@ object MetricsUtil extends Logging {
/** Get all processors */
def getAllProcessorList(metricData: MetricsData): Seq[MetricsProcessor] = {
- metricData.steps.asScala.flatMap(
- step => {
- step.processors.asScala
- })
+ metricData.steps.asScala
+ .flatMap(
+ step => {
+ step.processors.asScala
+ })
+ .toSeq
}
/** Update extra time metric by the processors */
diff --git
a/backends-clickhouse/src/main/scala/org/apache/spark/sql/execution/datasources/utils/MergeTreePartsPartitionsUtil.scala
b/backends-clickhouse/src/main/scala/org/apache/spark/sql/execution/datasources/utils/MergeTreePartsPartitionsUtil.scala
index 80257c3b5..ac6ac959f 100644
---
a/backends-clickhouse/src/main/scala/org/apache/spark/sql/execution/datasources/utils/MergeTreePartsPartitionsUtil.scala
+++
b/backends-clickhouse/src/main/scala/org/apache/spark/sql/execution/datasources/utils/MergeTreePartsPartitionsUtil.scala
@@ -127,7 +127,7 @@ object MergeTreePartsPartitionsUtil extends Logging {
sparkSession
)
}
- partitions
+ partitions.toSeq
}
def genInputPartitionSeq(
diff --git
a/backends-clickhouse/src/main/scala/org/apache/spark/sql/execution/datasources/v1/clickhouse/MergeTreeFileFormatDataWriter.scala
b/backends-clickhouse/src/main/scala/org/apache/spark/sql/execution/datasources/v1/clickhouse/MergeTreeFileFormatDataWriter.scala
index 3a68ac16d..712afb378 100644
---
a/backends-clickhouse/src/main/scala/org/apache/spark/sql/execution/datasources/v1/clickhouse/MergeTreeFileFormatDataWriter.scala
+++
b/backends-clickhouse/src/main/scala/org/apache/spark/sql/execution/datasources/v1/clickhouse/MergeTreeFileFormatDataWriter.scala
@@ -117,10 +117,12 @@ abstract class MergeTreeFileFormatDataWriter(
releaseResources()
val (taskCommitMessage, taskCommitTime) = Utils.timeTakenMs {
// committer.commitTask(taskAttemptContext)
- val statuses = returnedMetrics.map(
- v => {
- v._2
- })
+ val statuses = returnedMetrics
+ .map(
+ v => {
+ v._2
+ })
+ .toSeq
new TaskCommitMessage(statuses)
}
diff --git
a/backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseTPCHParquetAQEConcurrentSuite.scala
b/backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseTPCHParquetAQEConcurrentSuite.scala
index 34e9658fb..9f4befbb0 100644
---
a/backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseTPCHParquetAQEConcurrentSuite.scala
+++
b/backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseTPCHParquetAQEConcurrentSuite.scala
@@ -24,6 +24,7 @@ import org.apache.spark.sql.types.DoubleType
import java.util.concurrent.ForkJoinPool
import scala.collection.parallel.ForkJoinTaskSupport
+import scala.collection.parallel.immutable.ParVector
class GlutenClickHouseTPCHParquetAQEConcurrentSuite
extends GlutenClickHouseTPCHAbstractSuite
@@ -74,7 +75,7 @@ class GlutenClickHouseTPCHParquetAQEConcurrentSuite
test("fix race condition at the global variable of
ColumnarOverrideRules::isAdaptiveContext") {
- val queries = ((1 to 22) ++ (1 to 22) ++ (1 to 22) ++ (1 to 22)).par
+ val queries = ParVector((1 to 22) ++ (1 to 22) ++ (1 to 22) ++ (1 to 22):
_*)
queries.tasksupport = new ForkJoinTaskSupport(new ForkJoinPool(22))
queries.map(queryId => runTPCHQuery(queryId) { df => })
diff --git a/backends-velox/pom.xml b/backends-velox/pom.xml
index 70b8b901b..de529a34d 100755
--- a/backends-velox/pom.xml
+++ b/backends-velox/pom.xml
@@ -87,7 +87,7 @@
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
- <version>1.13.5</version>
+ <version>1.17.0</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -113,13 +113,13 @@
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-mockito_2.12</artifactId>
+ <artifactId>scalatestplus-mockito_${scala.binary.version}</artifactId>
<version>1.0.0-M2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-scalacheck_2.12</artifactId>
+ <artifactId>scalatestplus-scalacheck_${scala.binary.version}</artifactId>
<version>3.1.0.0-RC2</version>
<scope>test</scope>
</dependency>
diff --git a/gluten-celeborn/clickhouse/pom.xml
b/gluten-celeborn/clickhouse/pom.xml
index 74b81031f..f17f5968d 100755
--- a/gluten-celeborn/clickhouse/pom.xml
+++ b/gluten-celeborn/clickhouse/pom.xml
@@ -127,7 +127,7 @@
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-mockito_2.12</artifactId>
+ <artifactId>scalatestplus-mockito_${scala.binary.version}</artifactId>
<version>1.0.0-M2</version>
<scope>test</scope>
</dependency>
@@ -138,7 +138,7 @@
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-scalacheck_2.12</artifactId>
+ <artifactId>scalatestplus-scalacheck_${scala.binary.version}</artifactId>
<version>3.1.0.0-RC2</version>
<scope>test</scope>
</dependency>
diff --git a/gluten-core/pom.xml b/gluten-core/pom.xml
index 740de5928..880eddb4e 100644
--- a/gluten-core/pom.xml
+++ b/gluten-core/pom.xml
@@ -84,7 +84,7 @@
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
- <version>1.13.5</version>
+ <version>1.17.0</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -111,13 +111,13 @@
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-mockito_2.12</artifactId>
+ <artifactId>scalatestplus-mockito_${scala.binary.version}</artifactId>
<version>1.0.0-M2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-scalacheck_2.12</artifactId>
+ <artifactId>scalatestplus-scalacheck_${scala.binary.version}</artifactId>
<version>3.1.0.0-RC2</version>
<scope>test</scope>
</dependency>
diff --git a/gluten-core/src/main/scala/org/apache/gluten/GlutenPlugin.scala
b/gluten-core/src/main/scala/org/apache/gluten/GlutenPlugin.scala
index 0f397c692..16929ca4b 100644
--- a/gluten-core/src/main/scala/org/apache/gluten/GlutenPlugin.scala
+++ b/gluten-core/src/main/scala/org/apache/gluten/GlutenPlugin.scala
@@ -300,7 +300,7 @@ private[gluten] class GlutenSessionExtensions extends
(SparkSessionExtensions =>
}
private[gluten] trait GlutenSparkExtensionsInjector {
- def inject(extensions: SparkSessionExtensions)
+ def inject(extensions: SparkSessionExtensions): Unit
}
private[gluten] object GlutenPlugin {
diff --git
a/gluten-core/src/main/scala/org/apache/gluten/execution/WholeStageTransformer.scala
b/gluten-core/src/main/scala/org/apache/gluten/execution/WholeStageTransformer.scala
index a49e8aa51..78132c08c 100644
---
a/gluten-core/src/main/scala/org/apache/gluten/execution/WholeStageTransformer.scala
+++
b/gluten-core/src/main/scala/org/apache/gluten/execution/WholeStageTransformer.scala
@@ -265,7 +265,7 @@ case class WholeStageTransformer(child: SparkPlan,
materializeInput: Boolean = f
}
transformChildren(child, basicScanExecTransformers)
- basicScanExecTransformers
+ basicScanExecTransformers.toSeq
}
override def doExecuteColumnar(): RDD[ColumnarBatch] = {
diff --git
a/gluten-core/src/main/scala/org/apache/gluten/expression/ConverterUtils.scala
b/gluten-core/src/main/scala/org/apache/gluten/expression/ConverterUtils.scala
index a944de3d3..473ee7f9d 100644
---
a/gluten-core/src/main/scala/org/apache/gluten/expression/ConverterUtils.scala
+++
b/gluten-core/src/main/scala/org/apache/gluten/expression/ConverterUtils.scala
@@ -73,7 +73,7 @@ object ConverterUtils extends Logging {
}
def collectAttributeTypeNodes(attributes: JList[Attribute]): JList[TypeNode]
= {
- collectAttributeTypeNodes(attributes.asScala)
+ collectAttributeTypeNodes(attributes.asScala.toSeq)
}
def collectAttributeTypeNodes(attributes: Seq[Attribute]): JList[TypeNode] =
{
@@ -85,7 +85,7 @@ object ConverterUtils extends Logging {
}
def collectAttributeNamesWithExprId(attributes: JList[Attribute]):
JList[String] = {
- collectAttributeNamesWithExprId(attributes.asScala)
+ collectAttributeNamesWithExprId(attributes.asScala.toSeq)
}
def collectAttributeNamesWithExprId(attributes: Seq[Attribute]):
JList[String] = {
@@ -197,7 +197,7 @@ object ConverterUtils extends Logging {
val (field, nullable) = parseFromSubstraitType(typ)
StructField("", field, nullable)
}
- (StructType(fields),
isNullable(substraitType.getStruct.getNullability))
+ (StructType(fields.toSeq),
isNullable(substraitType.getStruct.getNullability))
case Type.KindCase.LIST =>
val list = substraitType.getList
val (elementType, containsNull) = parseFromSubstraitType(list.getType)
diff --git
a/gluten-core/src/main/scala/org/apache/gluten/expression/UDFMappings.scala
b/gluten-core/src/main/scala/org/apache/gluten/expression/UDFMappings.scala
index 7c836252b..3b64c5117 100644
--- a/gluten-core/src/main/scala/org/apache/gluten/expression/UDFMappings.scala
+++ b/gluten-core/src/main/scala/org/apache/gluten/expression/UDFMappings.scala
@@ -32,7 +32,7 @@ object UDFMappings extends Logging {
val pythonUDFMap: Map[String, String] = Map()
val scalaUDFMap: Map[String, String] = Map()
- private def appendKVToMap(key: String, value: String, res: Map[String,
String]) {
+ private def appendKVToMap(key: String, value: String, res: Map[String,
String]): Unit = {
if (key.isEmpty || value.isEmpty()) {
throw new IllegalArgumentException(s"key:$key or value:$value is empty")
}
@@ -46,7 +46,7 @@ object UDFMappings extends Logging {
res.put(key.toLowerCase(Locale.ROOT), value)
}
- private def parseStringToMap(input: String, res: Map[String, String]) {
+ private def parseStringToMap(input: String, res: Map[String, String]): Unit
= {
input.split(",").map {
item =>
val keyValue = item.split(":")
@@ -57,7 +57,7 @@ object UDFMappings extends Logging {
}
}
- def loadFromSparkConf(conf: SparkConf) {
+ def loadFromSparkConf(conf: SparkConf): Unit = {
val strHiveUDFs = conf.get(GlutenConfig.GLUTEN_SUPPORTED_HIVE_UDFS, "")
if (!StringUtils.isBlank(strHiveUDFs)) {
parseStringToMap(strHiveUDFs, hiveUDFMap)
diff --git
a/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/enumerated/EnumeratedApplier.scala
b/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/enumerated/EnumeratedApplier.scala
index d5260f66a..3d7509abc 100644
---
a/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/enumerated/EnumeratedApplier.scala
+++
b/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/enumerated/EnumeratedApplier.scala
@@ -45,7 +45,12 @@ class EnumeratedApplier(session: SparkSession)
with Logging
with LogLevelUtil {
// An empirical value.
- private val aqeStackTraceIndex = 16
+ private val aqeStackTraceIndex =
+ if (scala.util.Properties.releaseVersion.exists(_.startsWith("2.12"))) {
+ 16
+ } else {
+ 14
+ }
private val adaptiveContext = AdaptiveContext(session, aqeStackTraceIndex)
override def apply(plan: SparkPlan, outputsColumnar: Boolean): SparkPlan =
diff --git
a/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/heuristic/HeuristicApplier.scala
b/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/heuristic/HeuristicApplier.scala
index d925bc231..34bcf3220 100644
---
a/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/heuristic/HeuristicApplier.scala
+++
b/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/heuristic/HeuristicApplier.scala
@@ -40,7 +40,12 @@ class HeuristicApplier(session: SparkSession)
with Logging
with LogLevelUtil {
// This is an empirical value, may need to be changed for supporting other
versions of spark.
- private val aqeStackTraceIndex = 19
+ private val aqeStackTraceIndex =
+ if (scala.util.Properties.releaseVersion.exists(_.startsWith("2.12"))) {
+ 19
+ } else {
+ 17
+ }
private val adaptiveContext = AdaptiveContext(session, aqeStackTraceIndex)
override def apply(plan: SparkPlan, outputsColumnar: Boolean): SparkPlan = {
diff --git
a/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/validator/Validators.scala
b/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/validator/Validators.scala
index 56b63ef84..210353750 100644
---
a/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/validator/Validators.scala
+++
b/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/validator/Validators.scala
@@ -97,7 +97,7 @@ object Validators {
if (buffer.isEmpty) {
NoopValidator
} else {
- new ValidatorPipeline(buffer)
+ new ValidatorPipeline(buffer.toSeq)
}
}
}
diff --git
a/gluten-core/src/main/scala/org/apache/gluten/planner/GlutenOptimization.scala
b/gluten-core/src/main/scala/org/apache/gluten/planner/GlutenOptimization.scala
index 555e7d614..5b24f5963 100644
---
a/gluten-core/src/main/scala/org/apache/gluten/planner/GlutenOptimization.scala
+++
b/gluten-core/src/main/scala/org/apache/gluten/planner/GlutenOptimization.scala
@@ -61,7 +61,7 @@ object GlutenOptimization {
GlutenMetadataModel(),
GlutenPropertyModel(),
GlutenExplain,
- RasRule.Factory.reuse(rules))
+ RasRule.Factory.reuse(rules.toSeq))
}
}
}
diff --git
a/gluten-core/src/main/scala/org/apache/gluten/softaffinity/SoftAffinityManager.scala
b/gluten-core/src/main/scala/org/apache/gluten/softaffinity/SoftAffinityManager.scala
index 72d590d04..278e1b550 100644
---
a/gluten-core/src/main/scala/org/apache/gluten/softaffinity/SoftAffinityManager.scala
+++
b/gluten-core/src/main/scala/org/apache/gluten/softaffinity/SoftAffinityManager.scala
@@ -263,7 +263,7 @@ abstract class AffinityManager extends LogLevelUtil with
Logging {
rand.shuffle(hosts)
logOnLevel(logLevel, s"get host for $f: ${hosts.distinct.mkString(",")}")
}
- hosts.distinct
+ hosts.distinct.toSeq
}
def updatePartitionMap(f: FilePartition, rddId: Int): Unit = {
diff --git
a/gluten-core/src/main/scala/org/apache/spark/sql/execution/GlutenImplicits.scala
b/gluten-core/src/main/scala/org/apache/spark/sql/execution/GlutenImplicits.scala
index b0dc3a958..eb42f0a88 100644
---
a/gluten-core/src/main/scala/org/apache/spark/sql/execution/GlutenImplicits.scala
+++
b/gluten-core/src/main/scala/org/apache/spark/sql/execution/GlutenImplicits.scala
@@ -205,8 +205,8 @@ object GlutenImplicits {
FallbackSummary(
totalNumGlutenNodes,
totalNumFallbackNodes,
- totalPhysicalPlanDescription,
- totalFallbackNodeToReason
+ totalPhysicalPlanDescription.toSeq,
+ totalFallbackNodeToReason.toSeq
)
}
diff --git
a/gluten-core/src/main/scala/org/apache/spark/sql/execution/ShuffledColumnarBatchRDD.scala
b/gluten-core/src/main/scala/org/apache/spark/sql/execution/ShuffledColumnarBatchRDD.scala
index 42db17b6c..0642c3a24 100644
---
a/gluten-core/src/main/scala/org/apache/spark/sql/execution/ShuffledColumnarBatchRDD.scala
+++
b/gluten-core/src/main/scala/org/apache/spark/sql/execution/ShuffledColumnarBatchRDD.scala
@@ -139,7 +139,7 @@ class ShuffledColumnarBatchRDD(
}
}
- override def clearDependencies() {
+ override def clearDependencies(): Unit = {
super.clearDependencies()
dependency = null
}
diff --git
a/gluten-core/src/main/scala/org/apache/spark/sql/hive/HivePartitionConverter.scala
b/gluten-core/src/main/scala/org/apache/spark/sql/hive/HivePartitionConverter.scala
index d76eca3d3..3a65d6f55 100644
---
a/gluten-core/src/main/scala/org/apache/spark/sql/hive/HivePartitionConverter.scala
+++
b/gluten-core/src/main/scala/org/apache/spark/sql/hive/HivePartitionConverter.scala
@@ -80,9 +80,10 @@ class HivePartitionConverter(hadoopConf: Configuration,
session: SparkSession)
// just like for Apache Spark.
val uri = p.getDataLocation.toUri
val partValues: Seq[Any] = {
- p.getValues.asScala.zip(partitionColTypes).map {
- case (value, dataType) => castFromString(value, dataType)
- }
+ p.getValues.asScala
+ .zip(partitionColTypes)
+ .map { case (value, dataType) => castFromString(value, dataType) }
+ .toSeq
}
val partValuesAsInternalRow = InternalRow.fromSeq(partValues)
diff --git
a/gluten-ras/common/src/main/scala/org/apache/gluten/ras/memo/ForwardMemoTable.scala
b/gluten-ras/common/src/main/scala/org/apache/gluten/ras/memo/ForwardMemoTable.scala
index dd4033866..b99fb280f 100644
---
a/gluten-ras/common/src/main/scala/org/apache/gluten/ras/memo/ForwardMemoTable.scala
+++
b/gluten-ras/common/src/main/scala/org/apache/gluten/ras/memo/ForwardMemoTable.scala
@@ -155,7 +155,7 @@ class ForwardMemoTable[T <: AnyRef] private (override val
ras: Ras[T])
groupBuffer(id)
}
- override def allClusterKeys(): Seq[RasClusterKey] = clusterKeyBuffer
+ override def allClusterKeys(): Seq[RasClusterKey] = clusterKeyBuffer.toSeq
override def allGroupIds(): Seq[Int] = {
val from = -dummyGroupBuffer.size
diff --git
a/gluten-ras/common/src/main/scala/org/apache/gluten/ras/path/PathMask.scala
b/gluten-ras/common/src/main/scala/org/apache/gluten/ras/path/PathMask.scala
index a8caded40..c7dd3d2c0 100644
--- a/gluten-ras/common/src/main/scala/org/apache/gluten/ras/path/PathMask.scala
+++ b/gluten-ras/common/src/main/scala/org/apache/gluten/ras/path/PathMask.scala
@@ -96,7 +96,7 @@ object PathMask {
return None
}
- PathMask(buffer)
+ PathMask(buffer.toSeq)
}
Some(out)
@@ -168,7 +168,7 @@ object PathMask {
dfs(0, 0)
- PathMask(buffer)
+ PathMask(buffer.toSeq)
}
// Return the sub-mask whose root node is the node at the input index
diff --git
a/gluten-ras/common/src/test/scala/org/apache/gluten/ras/mock/MockMemoState.scala
b/gluten-ras/common/src/test/scala/org/apache/gluten/ras/mock/MockMemoState.scala
index 37d66e2bd..1c8458af3 100644
---
a/gluten-ras/common/src/test/scala/org/apache/gluten/ras/mock/MockMemoState.scala
+++
b/gluten-ras/common/src/test/scala/org/apache/gluten/ras/mock/MockMemoState.scala
@@ -102,7 +102,7 @@ object MockMemoState {
nodeBuffer ++= nodes
}
- override def nodes(): Seq[CanonicalNode[T]] = nodeBuffer
+ override def nodes(): Seq[CanonicalNode[T]] = nodeBuffer.toSeq
}
object MockMutableCluster {
@@ -153,7 +153,7 @@ object MockMemoState {
group
}
- def allGroups(): Seq[MockMutableGroup[T]] = groupBuffer
+ def allGroups(): Seq[MockMutableGroup[T]] = groupBuffer.toSeq
}
object Factory {
diff --git a/gluten-ras/pom.xml b/gluten-ras/pom.xml
index e2e8fccb2..973af760f 100644
--- a/gluten-ras/pom.xml
+++ b/gluten-ras/pom.xml
@@ -32,7 +32,7 @@
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
- <version>1.13.5</version>
+ <version>1.17.0</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -48,13 +48,13 @@
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-mockito_2.12</artifactId>
+ <artifactId>scalatestplus-mockito_${scala.binary.version}</artifactId>
<version>1.0.0-M2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-scalacheck_2.12</artifactId>
+ <artifactId>scalatestplus-scalacheck_${scala.binary.version}</artifactId>
<version>3.1.0.0-RC2</version>
<scope>test</scope>
</dependency>
diff --git
a/gluten-ut/common/src/test/scala/org/apache/spark/sql/GlutenTestsTrait.scala
b/gluten-ut/common/src/test/scala/org/apache/spark/sql/GlutenTestsTrait.scala
index ee765ed36..5df9d0071 100644
---
a/gluten-ut/common/src/test/scala/org/apache/spark/sql/GlutenTestsTrait.scala
+++
b/gluten-ut/common/src/test/scala/org/apache/spark/sql/GlutenTestsTrait.scala
@@ -360,6 +360,6 @@ trait GlutenTestsTrait extends GlutenTestsCommonTrait {
}
_spark.internalCreateDataFrame(
_spark.sparkContext.parallelize(Seq(inputRow)),
- StructType(structFileSeq))
+ StructType(structFileSeq.toSeq))
}
}
diff --git a/gluten-ut/pom.xml b/gluten-ut/pom.xml
index 79afa94c8..1e474042d 100644
--- a/gluten-ut/pom.xml
+++ b/gluten-ut/pom.xml
@@ -98,7 +98,7 @@
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
- <version>1.13.5</version>
+ <version>1.17.0</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -145,13 +145,13 @@
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-mockito_2.12</artifactId>
+ <artifactId>scalatestplus-mockito_${scala.binary.version}</artifactId>
<version>1.0.0-M2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
- <artifactId>scalatestplus-scalacheck_2.12</artifactId>
+ <artifactId>scalatestplus-scalacheck_${scala.binary.version}</artifactId>
<version>3.1.0.0-RC2</version>
<scope>test</scope>
</dependency>
diff --git
a/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/GlutenParquetFilterSuite.scala
b/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/GlutenParquetFilterSuite.scala
index bb4a78a82..4141acee3 100644
---
a/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/GlutenParquetFilterSuite.scala
+++
b/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/GlutenParquetFilterSuite.scala
@@ -37,7 +37,6 @@ import org.apache.spark.util.Utils
import org.apache.hadoop.fs.Path
import org.apache.parquet.filter2.predicate.{FilterApi, FilterPredicate,
Operators}
import org.apache.parquet.filter2.predicate.FilterApi._
-import org.apache.parquet.filter2.predicate.Operators
import org.apache.parquet.filter2.predicate.Operators.{Column => _, Eq, Gt,
GtEq, Lt, LtEq, NotEq}
import org.apache.parquet.hadoop.{ParquetFileReader, ParquetInputFormat,
ParquetOutputFormat}
import org.apache.parquet.hadoop.util.HadoopInputFile
diff --git
a/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/GlutenParquetRowIndexSuite.scala
b/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/GlutenParquetRowIndexSuite.scala
index ad41a8395..4c5339679 100644
---
a/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/GlutenParquetRowIndexSuite.scala
+++
b/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/GlutenParquetRowIndexSuite.scala
@@ -49,6 +49,7 @@ class GlutenParquetRowIndexSuite extends ParquetRowIndexSuite
with GlutenSQLTest
.getBlocks
.asScala
.map(_.getRowCount)
+ .toSeq
}
private def readRowGroupRowCounts(dir: File): Seq[Seq[Long]] = {
diff --git a/pom.xml b/pom.xml
index 887839ce5..f12469317 100644
--- a/pom.xml
+++ b/pom.xml
@@ -59,6 +59,8 @@
<arrow-gluten.version>15.0.0-gluten</arrow-gluten.version>
<arrow-memory.artifact>arrow-memory-unsafe</arrow-memory.artifact>
<hadoop.version>2.7.4</hadoop.version>
+ <slf4j.version>2.0.7</slf4j.version>
+ <log4j.version>2.20.0</log4j.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<project.prefix>spark-sql-columnar</project.prefix>
@@ -113,6 +115,100 @@
</properties>
<profiles>
+ <profile>
+ <id>scala-2.12</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <properties>
+ <!--
+ SPARK-34774 Add this property to ensure change-scala-version.sh can
replace the public `scala.version`
+ property correctly.
+ -->
+ <scala.version>2.12.15</scala.version>
+ <scala.binary.version>2.12</scala.binary.version>
+ </properties>
+ </profile>
+ <profile>
+ <id>scala-2.13</id>
+ <properties>
+ <scala.version>2.13.8</scala.version>
+ <scala.binary.version>2.13</scala.binary.version>
+ </properties>
+ <build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>net.alchim31.maven</groupId>
+ <artifactId>scala-maven-plugin</artifactId>
+ <configuration>
+ <!-- TODO: Fix the plugin scalawarts to support scala 2.13 in
IDEA
+ <compilerPlugins>
+ <compilerPlugin>
+ <groupId>org.wartremover</groupId>
+
<artifactId>wartremover_${scala.binary.version}</artifactId>
+ <version>3.1.6</version>
+ </compilerPlugin>
+ </compilerPlugins>
+ <dependencies>
+ <dependency>
+ <groupId>io.github.zhztheplayer.scalawarts</groupId>
+ <artifactId>scalawarts_${scala.binary.version}</artifactId>
+ <version>0.1.2</version>
+ </dependency>
+ </dependencies> -->
+ <args>
+ <arg>-unchecked</arg>
+ <arg>-deprecation</arg>
+ <arg>-feature</arg>
+ <arg>-explaintypes</arg>
+ <arg>-target:jvm-1.8</arg>
+ <arg>-Wconf:cat=deprecation:wv,any:e</arg>
+ <arg>-Wunused:imports</arg>
+ <!--
+ TODO(SPARK-33805): Undo the corresponding deprecated usage
suppression rule after fixed
+ -->
+ <arg>-Wconf:cat=scaladoc:wv</arg>
+ <arg>-Wconf:cat=lint-multiarg-infix:wv</arg>
+ <arg>-Wconf:cat=other-nullary-override:wv</arg>
+ <!--
+ SPARK-33775 Suppress compilation warnings that contain the
following contents.
+ TODO(SPARK-33805): Undo the corresponding deprecated usage
suppression rule after fixed.
+ -->
+
<arg>-Wconf:msg=^(?=.*?method|value|type|object|trait|inheritance)(?=.*?deprecated)(?=.*?since
2.13).+$:s</arg>
+ <arg>-Wconf:msg=^(?=.*?Widening conversion from)(?=.*?is
deprecated because it loses precision).+$:s</arg>
+ <arg>-Wconf:msg=Auto-application to \`\(\)\` is
deprecated:s</arg>
+ <arg>-Wconf:msg=method with a single empty parameter list
overrides method without any parameter list:s</arg>
+ <arg>-Wconf:msg=method without a parameter list overrides a
method with a single empty one:s</arg>
+ <!--
+ SPARK-35574 Prevent the recurrence of compilation warnings
related to
+ `procedure syntax is deprecated`
+ -->
+ <arg>-Wconf:cat=deprecation&msg=procedure syntax is
deprecated:e</arg>
+ <!--
+ SPARK-35496 Upgrade Scala to 2.13.7 and suppress:
+ 1. `The outer reference in this type test cannot be
checked at run time`
+ 2. `the type test for pattern TypeA cannot be checked at
runtime because it
+ has type parameters eliminated by erasure`
+ 3. `abstract type TypeA in type pattern Seq[TypeA] (the
underlying of
+ Seq[TypeA]) is unchecked since it is eliminated by erasure`
+ 4. `fruitless type test: a value of TypeA cannot also be a
TypeB`
+ -->
+ <arg>-Wconf:cat=unchecked&msg=outer reference:s</arg>
+ <arg>-Wconf:cat=unchecked&msg=eliminated by
erasure:s</arg>
+ <arg>-Wconf:msg=^(?=.*?a value of type)(?=.*?cannot also
be).+$:s</arg>
+ <!--
+
<arg>-P:wartremover:traverser:io.github.zhztheplayer.scalawarts.InheritFromCaseClass</arg>
+ -->
+ </args>
+ <compilerPlugins combine.self="override">
+ </compilerPlugins>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ </build>
+ </profile>
<profile>
<id>java-8</id>
<activation>
@@ -196,6 +292,20 @@
<fasterxml.version>2.15.1</fasterxml.version>
<hadoop.version>3.3.4</hadoop.version>
</properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>${slf4j.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-slf4j2-impl</artifactId>
+ <version>${log4j.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
</profile>
<profile>
<id>hadoop-2.7.4</id>
@@ -521,7 +631,7 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
- <version>3.2.3</version>
+ <version>3.2.16</version>
<scope>test</scope>
</dependency>
<!-- Fasterxml -->
diff --git a/shims/common/pom.xml b/shims/common/pom.xml
index adf9da7c6..959a95fc0 100644
--- a/shims/common/pom.xml
+++ b/shims/common/pom.xml
@@ -61,7 +61,9 @@
<configuration>
<args>
<arg>-Wconf:cat=deprecation:silent</arg>
+ <!--
<arg>-P:wartremover:traverser:io.github.zhztheplayer.scalawarts.InheritFromCaseClass</arg>
+ -->
</args>
</configuration>
</plugin>
diff --git a/shims/pom.xml b/shims/pom.xml
index 5c17c3ec3..61bea7040 100644
--- a/shims/pom.xml
+++ b/shims/pom.xml
@@ -37,7 +37,7 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
- <version>3.2.3</version>
+ <version>3.2.16</version>
<scope>test</scope>
</dependency>
<!--The parent POM excluded these jars. Add back for testing-->
diff --git
a/shims/spark34/src/main/scala/org/apache/gluten/sql/shims/spark34/Spark34Shims.scala
b/shims/spark34/src/main/scala/org/apache/gluten/sql/shims/spark34/Spark34Shims.scala
index 420be8511..203256cf5 100644
---
a/shims/spark34/src/main/scala/org/apache/gluten/sql/shims/spark34/Spark34Shims.scala
+++
b/shims/spark34/src/main/scala/org/apache/gluten/sql/shims/spark34/Spark34Shims.scala
@@ -77,7 +77,10 @@ class Spark34Shims extends SparkShims {
Sig[Sec](ExpressionNames.SEC),
Sig[Csc](ExpressionNames.CSC),
Sig[KnownNullable](KNOWN_NULLABLE),
- Sig[Empty2Null](ExpressionNames.EMPTY2NULL)
+ Sig[Empty2Null](ExpressionNames.EMPTY2NULL),
+ Sig[TimestampAdd](ExpressionNames.TIMESTAMP_ADD),
+ Sig[RoundFloor](ExpressionNames.FLOOR),
+ Sig[RoundCeil](ExpressionNames.CEIL)
)
}
diff --git a/shims/spark35/pom.xml b/shims/spark35/pom.xml
index 27cd011ac..1c79b882b 100644
--- a/shims/spark35/pom.xml
+++ b/shims/spark35/pom.xml
@@ -43,13 +43,13 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
- <artifactId>spark-catalyst_2.12</artifactId>
+ <artifactId>spark-catalyst_${scala.binary.version}</artifactId>
<scope>provided</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
- <artifactId>spark-core_2.12</artifactId>
+ <artifactId>spark-core_${scala.binary.version}</artifactId>
<scope>provided</scope>
<optional>true</optional>
</dependency>
@@ -109,7 +109,9 @@
<configuration>
<args>
<arg>-Wconf:cat=deprecation:silent</arg>
+ <!--
<arg>-P:wartremover:traverser:io.github.zhztheplayer.scalawarts.InheritFromCaseClass</arg>
+ -->
</args>
</configuration>
</plugin>
diff --git
a/shims/spark35/src/main/scala/org/apache/gluten/sql/shims/spark35/Spark35Shims.scala
b/shims/spark35/src/main/scala/org/apache/gluten/sql/shims/spark35/Spark35Shims.scala
index 8ac8d323e..821e0f583 100644
---
a/shims/spark35/src/main/scala/org/apache/gluten/sql/shims/spark35/Spark35Shims.scala
+++
b/shims/spark35/src/main/scala/org/apache/gluten/sql/shims/spark35/Spark35Shims.scala
@@ -43,7 +43,7 @@ import org.apache.spark.sql.connector.read.{HasPartitionKey,
InputPartition, Sca
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.command.DataWritingCommandExec
import org.apache.spark.sql.execution.datasources._
-import org.apache.spark.sql.execution.datasources.parquet.{ParquetFileFormat,
ParquetFilters, ParquetRowIndexUtil}
+import org.apache.spark.sql.execution.datasources.parquet.{ParquetFileFormat,
ParquetFilters}
import org.apache.spark.sql.execution.datasources.v2.BatchScanExec
import org.apache.spark.sql.execution.datasources.v2.text.TextScan
import org.apache.spark.sql.execution.datasources.v2.utils.CatalogUtil
@@ -77,7 +77,10 @@ class Spark35Shims extends SparkShims {
Sig[Sec](ExpressionNames.SEC),
Sig[Csc](ExpressionNames.CSC),
Sig[KnownNullable](ExpressionNames.KNOWN_NULLABLE),
- Sig[Empty2Null](ExpressionNames.EMPTY2NULL)
+ Sig[Empty2Null](ExpressionNames.EMPTY2NULL),
+ Sig[TimestampAdd](ExpressionNames.TIMESTAMP_ADD),
+ Sig[RoundFloor](ExpressionNames.FLOOR),
+ Sig[RoundCeil](ExpressionNames.CEIL)
)
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]