This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 72c278a [MINOR][SQL] Fix the typo in function names: crete
72c278a is described below
commit 72c278a4bb906cd7c500d223f80bc83e0f5c1ef0
Author: Max Gekk <[email protected]>
AuthorDate: Wed Dec 22 09:29:02 2021 +0900
[MINOR][SQL] Fix the typo in function names: crete
### What changes were proposed in this pull request?
Fix the typo: crete -> create.
### Why are the changes needed?
To improve code maintenance. Find the functions by names should be easer
after the changes.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
By compiling and running related test suites:
```
$ build/sbt "test:testOnly *ParquetRebaseDatetimeV2Suite"
$ build/sbt "test:testOnly *AvroV1Suite"
```
Closes #34978 from MaxGekk/fix-typo-crete.
Authored-by: Max Gekk <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
---
.../main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala | 4 ++--
.../src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala | 4 ++--
.../apache/spark/sql/execution/datasources/DataSourceUtils.scala | 8 ++++----
.../sql/execution/datasources/parquet/ParquetRowConverter.scala | 6 +++---
.../sql/execution/datasources/parquet/ParquetWriteSupport.scala | 6 +++---
5 files changed, 14 insertions(+), 14 deletions(-)
diff --git
a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala
b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala
index d7f2fa8..54cd3ba 100644
---
a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala
+++
b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala
@@ -62,10 +62,10 @@ private[sql] class AvroDeserializer(
private lazy val decimalConversions = new DecimalConversion()
- private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInRead(
+ private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInRead(
datetimeRebaseMode, "Avro")
- private val timestampRebaseFunc =
DataSourceUtils.creteTimestampRebaseFuncInRead(
+ private val timestampRebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInRead(
datetimeRebaseMode, "Avro")
private val converter: Any => Option[Any] = try {
diff --git
a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala
b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala
index 32a84d0..f2f754a 100644
---
a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala
+++
b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala
@@ -60,10 +60,10 @@ private[sql] class AvroSerializer(
converter.apply(catalystData)
}
- private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInWrite(
+ private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInWrite(
datetimeRebaseMode, "Avro")
- private val timestampRebaseFunc =
DataSourceUtils.creteTimestampRebaseFuncInWrite(
+ private val timestampRebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInWrite(
datetimeRebaseMode, "Avro")
private val converter: Any => Any = {
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala
index 67d0399..76bc03a 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala
@@ -172,7 +172,7 @@ object DataSourceUtils extends PredicateHelper {
QueryExecutionErrors.sparkUpgradeInWritingDatesError(format, config)
}
- def creteDateRebaseFuncInRead(
+ def createDateRebaseFuncInRead(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Int => Int = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => days: Int =>
@@ -184,7 +184,7 @@ object DataSourceUtils extends PredicateHelper {
case LegacyBehaviorPolicy.CORRECTED => identity[Int]
}
- def creteDateRebaseFuncInWrite(
+ def createDateRebaseFuncInWrite(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Int => Int = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => days: Int =>
@@ -196,7 +196,7 @@ object DataSourceUtils extends PredicateHelper {
case LegacyBehaviorPolicy.CORRECTED => identity[Int]
}
- def creteTimestampRebaseFuncInRead(
+ def createTimestampRebaseFuncInRead(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Long => Long = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => micros: Long =>
@@ -208,7 +208,7 @@ object DataSourceUtils extends PredicateHelper {
case LegacyBehaviorPolicy.CORRECTED => identity[Long]
}
- def creteTimestampRebaseFuncInWrite(
+ def createTimestampRebaseFuncInWrite(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Long => Long = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => micros: Long =>
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
index 8635510..243bc16 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
@@ -190,13 +190,13 @@ private[parquet] class ParquetRowConverter(
*/
def currentRecord: InternalRow = currentRow
- private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInRead(
+ private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInRead(
datetimeRebaseMode, "Parquet")
- private val timestampRebaseFunc =
DataSourceUtils.creteTimestampRebaseFuncInRead(
+ private val timestampRebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInRead(
datetimeRebaseMode, "Parquet")
- private val int96RebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead(
+ private val int96RebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInRead(
int96RebaseMode, "Parquet INT96")
// Converters for each field.
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala
index 933f79d..183ea39 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala
@@ -82,16 +82,16 @@ class ParquetWriteSupport extends WriteSupport[InternalRow]
with Logging {
private val datetimeRebaseMode = LegacyBehaviorPolicy.withName(
SQLConf.get.getConf(SQLConf.PARQUET_REBASE_MODE_IN_WRITE))
- private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInWrite(
+ private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInWrite(
datetimeRebaseMode, "Parquet")
- private val timestampRebaseFunc =
DataSourceUtils.creteTimestampRebaseFuncInWrite(
+ private val timestampRebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInWrite(
datetimeRebaseMode, "Parquet")
private val int96RebaseMode = LegacyBehaviorPolicy.withName(
SQLConf.get.getConf(SQLConf.PARQUET_INT96_REBASE_MODE_IN_WRITE))
- private val int96RebaseFunc =
DataSourceUtils.creteTimestampRebaseFuncInWrite(
+ private val int96RebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInWrite(
int96RebaseMode, "Parquet INT96")
override def init(configuration: Configuration): WriteContext = {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]