This is an automated email from the ASF dual-hosted git repository.
yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 0b5b0d56a431 [SPARK-51293][CORE][SQL][SS][MLLIB][TESTS] Cleanup unused
private functions from test suites
0b5b0d56a431 is described below
commit 0b5b0d56a43106e12d93fde0ec3bbdfe1870b1ae
Author: yangjie01 <[email protected]>
AuthorDate: Sun Feb 23 17:51:26 2025 +0800
[SPARK-51293][CORE][SQL][SS][MLLIB][TESTS] Cleanup unused private functions
from test suites
### What changes were proposed in this pull request?
This pr aims to cleanup unused private functions from test suites.
### Why are the changes needed?
Code cleanup
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Pass Github Actions
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #50049 from LuciferYang/SPARK-51293.
Lead-authored-by: yangjie01 <[email protected]>
Co-authored-by: YangJie <[email protected]>
Signed-off-by: yangjie01 <[email protected]>
---
.../org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala | 2 --
.../spark/ExecutorAllocationManagerSuite.scala | 15 --------------
.../spark/deploy/client/AppClientSuite.scala | 8 --------
.../org/apache/spark/storage/StorageSuite.scala | 23 ----------------------
.../org/apache/spark/util/JsonProtocolSuite.scala | 16 ---------------
.../mllib/regression/IsotonicRegressionSuite.scala | 10 ----------
.../catalyst/analysis/AnsiTypeCoercionSuite.scala | 21 --------------------
.../analysis/NamedParameterFunctionSuite.scala | 10 +---------
.../encoders/EncoderErrorMessageSuite.scala | 3 ---
.../catalyst/expressions/CastWithAnsiOnSuite.scala | 4 ----
.../sql/catalyst/util/IntervalUtilsSuite.scala | 12 -----------
.../spark/sql/connector/DataSourceV2SQLSuite.scala | 10 ----------
.../apache/spark/sql/execution/SQLViewSuite.scala | 7 -------
.../StateStoreBasicOperationsBenchmark.scala | 4 ----
.../sql/streaming/FileStreamSourceSuite.scala | 8 --------
.../test/DataStreamReaderWriterSuite.scala | 2 --
.../spark/sql/hive/orc/HiveOrcQuerySuite.scala | 13 +-----------
17 files changed, 2 insertions(+), 166 deletions(-)
diff --git
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
index f97b6a6eb183..47e1c8c06dd4 100644
---
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
+++
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
@@ -806,8 +806,6 @@ private[v2] trait V2JDBCTest extends SharedSparkSession
with DockerIntegrationFu
protected def caseConvert(tableName: String): String = tableName
- private def withOrWithout(isDistinct: Boolean): String = if (isDistinct)
"with" else "without"
-
Seq(true, false).foreach { isDistinct =>
val distinct = if (isDistinct) "DISTINCT " else ""
val withOrWithout = if (isDistinct) "with" else "without"
diff --git
a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
index f034e5da0a69..8644608edee3 100644
--- a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
@@ -1924,8 +1924,6 @@ private object ExecutorAllocationManagerSuite extends
PrivateMethodTester {
PrivateMethod[mutable.HashMap[Int,
Int]](Symbol("numLocalityAwareTasksPerResourceProfileId"))
private val _rpIdToHostToLocalTaskCount =
PrivateMethod[Map[Int, Map[String,
Int]]](Symbol("rpIdToHostToLocalTaskCount"))
- private val _onSpeculativeTaskSubmitted =
- PrivateMethod[Unit](Symbol("onSpeculativeTaskSubmitted"))
private val _totalRunningTasksPerResourceProfile =
PrivateMethod[Int](Symbol("totalRunningTasksPerResourceProfile"))
@@ -1942,12 +1940,6 @@ private object ExecutorAllocationManagerSuite extends
PrivateMethodTester {
nmap(rp.id)
}
- private def updateAndSyncNumExecutorsTarget(
- manager: ExecutorAllocationManager,
- now: Long): Unit = {
- manager invokePrivate _updateAndSyncNumExecutorsTarget(now)
- }
-
private def numExecutorsTargetForDefaultProfileId(manager:
ExecutorAllocationManager): Int = {
numExecutorsTarget(manager, defaultProfile.id)
}
@@ -2025,10 +2017,6 @@ private object ExecutorAllocationManagerSuite extends
PrivateMethodTester {
manager invokePrivate _onSchedulerQueueEmpty()
}
- private def onSpeculativeTaskSubmitted(manager: ExecutorAllocationManager,
id: String) : Unit = {
- manager invokePrivate _onSpeculativeTaskSubmitted(id)
- }
-
private def localityAwareTasksForDefaultProfile(manager:
ExecutorAllocationManager): Int = {
val localMap = manager invokePrivate
_localityAwareTasksPerResourceProfileId()
localMap(defaultProfile.id)
@@ -2044,7 +2032,4 @@ private object ExecutorAllocationManagerSuite extends
PrivateMethodTester {
rpIdToHostLocal(defaultProfile.id)
}
- private def getResourceProfileIdOfExecutor(manager:
ExecutorAllocationManager): Int = {
- defaultProfile.id
- }
}
diff --git
a/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
index 3555faf5c2cb..eb6b4b23c61c 100644
--- a/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
@@ -233,14 +233,6 @@ class AppClientSuite
// | Utility methods for testing |
// ===============================
- /** Return a SparkConf for applications that want to talk to our Master. */
- private def appConf: SparkConf = {
- new SparkConf()
- .setMaster(masterRpcEnv.address.toSparkURL)
- .setAppName("test")
- .set("spark.executor.memory", "256m")
- }
-
/** Make a master to which our application will send executor requests. */
private def makeMaster(): Master = {
val master = new Master(masterRpcEnv, masterRpcEnv.address, 0,
securityManager, conf)
diff --git a/core/src/test/scala/org/apache/spark/storage/StorageSuite.scala
b/core/src/test/scala/org/apache/spark/storage/StorageSuite.scala
index 5f2abb47413f..ec436d9cd31c 100644
--- a/core/src/test/scala/org/apache/spark/storage/StorageSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/StorageSuite.scala
@@ -105,29 +105,6 @@ class StorageSuite extends SparkFunSuite {
assert(status.diskUsed === actualDiskUsed)
}
- // For testing StorageUtils.updateRddInfo and
StorageUtils.getRddBlockLocations
- private def stockStorageStatuses: Seq[StorageStatus] = {
- val status1 = new StorageStatus(BlockManagerId("big", "dog", 1), 1000L,
Some(1000L), Some(0L))
- val status2 = new StorageStatus(BlockManagerId("fat", "duck", 2), 2000L,
Some(2000L), Some(0L))
- val status3 = new StorageStatus(BlockManagerId("fat", "cat", 3), 3000L,
Some(3000L), Some(0L))
- status1.addBlock(RDDBlockId(0, 0), BlockStatus(memAndDisk, 1L, 2L))
- status1.addBlock(RDDBlockId(0, 1), BlockStatus(memAndDisk, 1L, 2L))
- status2.addBlock(RDDBlockId(0, 2), BlockStatus(memAndDisk, 1L, 2L))
- status2.addBlock(RDDBlockId(0, 3), BlockStatus(memAndDisk, 1L, 2L))
- status2.addBlock(RDDBlockId(1, 0), BlockStatus(memAndDisk, 1L, 2L))
- status2.addBlock(RDDBlockId(1, 1), BlockStatus(memAndDisk, 1L, 2L))
- status3.addBlock(RDDBlockId(0, 4), BlockStatus(memAndDisk, 1L, 2L))
- status3.addBlock(RDDBlockId(1, 2), BlockStatus(memAndDisk, 1L, 2L))
- Seq(status1, status2, status3)
- }
-
- // For testing StorageUtils.updateRddInfo
- private def stockRDDInfos: Seq[RDDInfo] = {
- val info0 = new RDDInfo(0, "0", 10, memAndDisk, false, Seq(3))
- val info1 = new RDDInfo(1, "1", 3, memAndDisk, false, Seq(4))
- Seq(info0, info1)
- }
-
private val offheap = StorageLevel.OFF_HEAP
// For testing add, update, remove, get, and contains etc. for both RDD and
non-RDD onheap
// and offheap blocks
diff --git a/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
b/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
index 30c9693e6dee..89e3d8371be4 100644
--- a/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
@@ -1370,18 +1370,6 @@ private[spark] object JsonProtocolSuite extends
Assertions {
}
}
- private def assertOptionEquals[T](
- opt1: Option[T],
- opt2: Option[T],
- assertEquals: (T, T) => Unit): Unit = {
- if (opt1.isDefined) {
- assert(opt2.isDefined)
- assertEquals(opt1.get, opt2.get)
- } else {
- assert(opt2.isEmpty)
- }
- }
-
/**
* Use different names for methods we pass in to assertSeqEquals or
assertOptionEquals
*/
@@ -1407,10 +1395,6 @@ private[spark] object JsonProtocolSuite extends
Assertions {
assert(ste1.getFileName === ste2.getFileName)
}
- private def assertEquals(rp1: ResourceProfile, rp2: ResourceProfile): Unit =
{
- assert(rp1 === rp2)
- }
-
/** ----------------------------------- *
| Util methods for constructing events |
* ------------------------------------ */
diff --git
a/mllib/src/test/scala/org/apache/spark/mllib/regression/IsotonicRegressionSuite.scala
b/mllib/src/test/scala/org/apache/spark/mllib/regression/IsotonicRegressionSuite.scala
index aa06b70307f5..0a4f81bd7aa9 100644
---
a/mllib/src/test/scala/org/apache/spark/mllib/regression/IsotonicRegressionSuite.scala
+++
b/mllib/src/test/scala/org/apache/spark/mllib/regression/IsotonicRegressionSuite.scala
@@ -72,16 +72,6 @@ class IsotonicRegressionSuite extends SparkFunSuite with
MLlibTestSparkContext w
runIsotonicRegression(labels,
Array.fill(labels.size)(1d).toImmutableArraySeq, isotonic)
}
- private def runIsotonicRegression(
- labels: Seq[Double],
- features: Seq[Double],
- weights: Seq[Double],
- isotonic: Boolean): IsotonicRegressionModel = {
- runIsotonicRegressionOnInput(
- labels.indices.map(i => (labels(i), features(i), weights(i))),
- isotonic)
- }
-
private def runIsotonicRegressionOnInput(
input: Seq[(Double, Double, Double)],
isotonic: Boolean,
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercionSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercionSuite.scala
index 139e89828f8e..0792c1657456 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercionSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercionSuite.scala
@@ -24,7 +24,6 @@ import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
-import org.apache.spark.sql.catalyst.types.DataTypeUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.types.{AbstractArrayType,
StringTypeWithCollation}
import org.apache.spark.sql.types._
@@ -64,26 +63,6 @@ class AnsiTypeCoercionSuite extends TypeCoercionSuiteBase {
override def dateTimeOperationsRule: TypeCoercionRule =
AnsiTypeCoercion.DateTimeOperations
- private def shouldCastStringLiteral(to: AbstractDataType, expected:
DataType): Unit = {
- val input = Literal("123")
- val castResult = AnsiTypeCoercion.implicitCast(input, to)
- assert(DataTypeUtils.equalsIgnoreCaseAndNullability(
- castResult.map(_.dataType).orNull, expected),
- s"Failed to cast String literal to $to")
- }
-
- private def shouldNotCastStringLiteral(to: AbstractDataType): Unit = {
- val input = Literal("123")
- val castResult = AnsiTypeCoercion.implicitCast(input, to)
- assert(castResult.isEmpty, s"Should not be able to cast String literal to
$to")
- }
-
- private def shouldNotCastStringInput(to: AbstractDataType): Unit = {
- val input = AttributeReference("s", StringType)()
- val castResult = AnsiTypeCoercion.implicitCast(input, to)
- assert(castResult.isEmpty, s"Should not be able to cast non-foldable
String input to $to")
- }
-
private def checkWidenType(
widenFunc: (DataType, DataType) => Option[DataType],
t1: DataType,
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedParameterFunctionSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedParameterFunctionSuite.scala
index 02543c9fba53..0715e27403bc 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedParameterFunctionSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedParameterFunctionSuite.scala
@@ -20,7 +20,7 @@ import org.apache.spark.SparkThrowable
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Expression, Literal,
NamedArgumentExpression}
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext,
ExprCode}
-import org.apache.spark.sql.catalyst.plans.logical.{FunctionBuilderBase,
FunctionSignature, InputParameter, NamedParametersSupport}
+import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature,
InputParameter, NamedParametersSupport}
import org.apache.spark.sql.catalyst.util.TypeUtils.toSQLId
import org.apache.spark.sql.types.DataType
@@ -89,14 +89,6 @@ class NamedParameterFunctionSuite extends AnalysisTest {
NamedParametersSupport.defaultRearrange(functionSignature, expressions,
functionName))
}
- private def parseExternalException[T <: FunctionBuilderBase[_]](
- functionName: String,
- builder: T,
- expressions: Seq[Expression]) : SparkThrowable = {
- intercept[SparkThrowable](
- FunctionRegistry.rearrangeExpressions[T](functionName, builder,
expressions))
- }
-
test("DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT") {
val condition =
"DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT.BOTH_POSITIONAL_AND_NAMED"
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
index b7309923ac20..c80466aec035 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
@@ -17,8 +17,6 @@
package org.apache.spark.sql.catalyst.encoders
-import scala.reflect.ClassTag
-
import org.apache.spark.{SPARK_DOC_ROOT, SparkFunSuite,
SparkUnsupportedOperationException}
import org.apache.spark.sql.Encoders
@@ -98,5 +96,4 @@ class EncoderErrorMessageSuite extends SparkFunSuite {
)
}
- private def clsName[T : ClassTag]: String =
implicitly[ClassTag[T]].runtimeClass.getName
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala
index 5916e0501f8b..674d306dbabb 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala
@@ -303,10 +303,6 @@ class CastWithAnsiOnSuite extends CastSuiteBase with
QueryErrorsBase {
s"cannot be cast to ${toSQLType(to)} because it is malformed."
}
- private def castErrMsg(l: Literal, to: DataType): String = {
- castErrMsg(l, to, l.dataType)
- }
-
test("cast from invalid string to numeric should throw
NumberFormatException") {
def check(value: String, dataType: DataType): Unit = {
checkExceptionInExpression[NumberFormatException](cast(value, dataType),
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
index 700dfe30a238..cbaebfa12238 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
@@ -168,18 +168,6 @@ class IntervalUtilsSuite extends SparkFunSuite with
SQLHelper {
assert(safeStringToInterval(UTF8String.fromString(input)) === null)
}
- private def checkFromInvalidStringUnknownError(input: String, word: String):
Unit = {
- checkError(
- exception = intercept[SparkIllegalArgumentException] {
- stringToInterval(UTF8String.fromString(input))
- },
- condition = "INVALID_INTERVAL_FORMAT.UNKNOWN_PARSING_ERROR",
- parameters = Map(
- "input" -> Option(input).map(_.toString).getOrElse("null"),
- "word" -> word))
- assert(safeStringToInterval(UTF8String.fromString(input)) === null)
- }
-
private def failFuncWithInvalidInput(
input: String, errorMsg: String, converter: String => CalendarInterval):
Unit = {
withClue("Expected to throw an exception for the invalid input") {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index 8d255e9efda5..301c072abbbb 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -1388,16 +1388,6 @@ class DataSourceV2SQLSuiteV1Filter
}
}
- private def testShowNamespaces(
- sqlText: String,
- expected: Seq[String]): Unit = {
- val schema = new StructType().add("namespace", StringType, nullable =
false)
-
- val df = spark.sql(sqlText)
- assert(df.schema === schema)
- assert(df.collect().map(_.getAs[String](0)).sorted === expected.sorted)
- }
-
test("Use: basic tests with USE statements") {
val catalogManager = spark.sessionState.catalogManager
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala
index b26cdfaeb756..4fd96eadfac7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala
@@ -453,13 +453,6 @@ abstract class SQLViewSuite extends QueryTest with
SQLTestUtils {
}
}
- private def assertRelationNotFound(query: String, relation: String): Unit = {
- val e = intercept[AnalysisException] {
- sql(query)
- }
- checkErrorTableNotFound(e, relation)
- }
-
private def assertRelationNotFound(query: String, relation: String, context:
ExpectedContext):
Unit = {
val e = intercept[AnalysisException] {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/StateStoreBasicOperationsBenchmark.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/StateStoreBasicOperationsBenchmark.scala
index 36035e35ee25..ff4bd41409af 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/StateStoreBasicOperationsBenchmark.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/StateStoreBasicOperationsBenchmark.scala
@@ -353,10 +353,6 @@ object StateStoreBasicOperationsBenchmark extends
SqlBasedBenchmark {
}
}
- private def getRows(store: StateStore, keys: Seq[UnsafeRow]): Seq[UnsafeRow]
= {
- keys.map(key => store.get(key))
- }
-
private def loadInitialData(
provider: StateStoreProvider,
data: Seq[(UnsafeRow, UnsafeRow)]): Long = {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
index 773be0cc08e3..a753da116924 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
@@ -235,14 +235,6 @@ class FileStreamSourceSuite extends FileStreamSourceTest {
override val streamingTimeout = 80.seconds
- /** Use `format` and `path` to create FileStreamSource via DataFrameReader */
- private def createFileStreamSource(
- format: String,
- path: String,
- schema: Option[StructType] = None): FileStreamSource = {
- getSourceFromFileStream(createFileStream(format, path, schema))
- }
-
private def createFileStreamSourceAndGetSchema(
format: Option[String],
path: Option[String],
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
index 224dec72c79b..200603cae586 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
@@ -482,8 +482,6 @@ class DataStreamReaderWriterSuite extends StreamTest with
BeforeAndAfter {
meq(Map.empty))
}
- private def newTextInput = Utils.createTempDir(namePrefix =
"text").getCanonicalPath
-
test("check foreach() catches null writers") {
val df = spark.readStream
.format("org.apache.spark.sql.streaming.test")
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/HiveOrcQuerySuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/HiveOrcQuerySuite.scala
index 0c63cbb950a7..cc82b36ed74e 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/HiveOrcQuerySuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/HiveOrcQuerySuite.scala
@@ -26,7 +26,7 @@ import org.apache.orc.OrcConf
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.HiveTableRelation
-import org.apache.spark.sql.execution.datasources.{HadoopFsRelation,
LogicalRelation, LogicalRelationWithTable}
+import org.apache.spark.sql.execution.datasources.{HadoopFsRelation,
LogicalRelation}
import org.apache.spark.sql.execution.datasources.orc.OrcQueryTest
import org.apache.spark.sql.hive.{HiveSessionCatalog, HiveUtils}
import org.apache.spark.sql.hive.test.TestHiveSingleton
@@ -231,17 +231,6 @@ class HiveOrcQuerySuite extends OrcQueryTest with
TestHiveSingleton {
.getCachedDataSourceTable(table)
}
- private def checkCached(tableIdentifier: TableIdentifier): Unit = {
- getCachedDataSourceTable(tableIdentifier) match {
- case null => fail(s"Converted ${tableIdentifier.table} should be cached
in the cache.")
- case LogicalRelationWithTable(_: HadoopFsRelation, _) => // OK
- case other =>
- fail(
- s"The cached ${tableIdentifier.table} should be a HadoopFsRelation.
" +
- s"However, $other is returned form the cache.")
- }
- }
-
test("SPARK-28573 ORC conversation could be applied for partitioned table
insertion") {
withTempView("single") {
val singleRowDF = Seq((0, "foo")).toDF("key", "value")
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]