This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.1 by this push:
new 0f6cafe [MINOR][SQL] Fix the typo in function names: crete
0f6cafe is described below
commit 0f6cafe410f55ccc1d2106ff2b66efcbbc1b1c0b
Author: Max Gekk <[email protected]>
AuthorDate: Wed Dec 22 09:29:02 2021 +0900
[MINOR][SQL] Fix the typo in function names: crete
### What changes were proposed in this pull request?
Fix the typo: crete -> create.
### Why are the changes needed?
To improve code maintenance. Find the functions by names should be easer
after the changes.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
By compiling and running related test suites:
```
$ build/sbt "test:testOnly *ParquetRebaseDatetimeV2Suite"
$ build/sbt "test:testOnly *AvroV1Suite"
```
Closes #34978 from MaxGekk/fix-typo-crete.
Authored-by: Max Gekk <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
(cherry picked from commit 72c278a4bb906cd7c500d223f80bc83e0f5c1ef0)
Signed-off-by: Hyukjin Kwon <[email protected]>
---
.../main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala | 4 ++--
.../src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala | 4 ++--
.../apache/spark/sql/execution/datasources/DataSourceUtils.scala | 8 ++++----
.../sql/execution/datasources/parquet/ParquetRowConverter.scala | 6 +++---
.../sql/execution/datasources/parquet/ParquetWriteSupport.scala | 6 +++---
5 files changed, 14 insertions(+), 14 deletions(-)
diff --git
a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala
b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala
index a6c43ac..c87d35b 100644
---
a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala
+++
b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala
@@ -58,10 +58,10 @@ private[sql] class AvroDeserializer(
private lazy val decimalConversions = new DecimalConversion()
- private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInRead(
+ private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInRead(
datetimeRebaseMode, "Avro")
- private val timestampRebaseFunc =
DataSourceUtils.creteTimestampRebaseFuncInRead(
+ private val timestampRebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInRead(
datetimeRebaseMode, "Avro")
private val converter: Any => Option[Any] = rootCatalystType match {
diff --git
a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala
b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala
index e3bd6ca..9a72d90 100644
---
a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala
+++
b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala
@@ -59,10 +59,10 @@ private[sql] class AvroSerializer(
converter.apply(catalystData)
}
- private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInWrite(
+ private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInWrite(
datetimeRebaseMode, "Avro")
- private val timestampRebaseFunc =
DataSourceUtils.creteTimestampRebaseFuncInWrite(
+ private val timestampRebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInWrite(
datetimeRebaseMode, "Avro")
private val converter: Any => Any = {
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala
index b54747a..146e5a8 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala
@@ -165,7 +165,7 @@ object DataSourceUtils {
"Gregorian calendar.", null)
}
- def creteDateRebaseFuncInRead(
+ def createDateRebaseFuncInRead(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Int => Int = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => days: Int =>
@@ -177,7 +177,7 @@ object DataSourceUtils {
case LegacyBehaviorPolicy.CORRECTED => identity[Int]
}
- def creteDateRebaseFuncInWrite(
+ def createDateRebaseFuncInWrite(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Int => Int = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => days: Int =>
@@ -189,7 +189,7 @@ object DataSourceUtils {
case LegacyBehaviorPolicy.CORRECTED => identity[Int]
}
- def creteTimestampRebaseFuncInRead(
+ def createTimestampRebaseFuncInRead(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Long => Long = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => micros: Long =>
@@ -201,7 +201,7 @@ object DataSourceUtils {
case LegacyBehaviorPolicy.CORRECTED => identity[Long]
}
- def creteTimestampRebaseFuncInWrite(
+ def createTimestampRebaseFuncInWrite(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Long => Long = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => micros: Long =>
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
index 12a71b5..3c1fc68 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
@@ -188,13 +188,13 @@ private[parquet] class ParquetRowConverter(
*/
def currentRecord: InternalRow = currentRow
- private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInRead(
+ private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInRead(
datetimeRebaseMode, "Parquet")
- private val timestampRebaseFunc =
DataSourceUtils.creteTimestampRebaseFuncInRead(
+ private val timestampRebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInRead(
datetimeRebaseMode, "Parquet")
- private val int96RebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead(
+ private val int96RebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInRead(
int96RebaseMode, "Parquet INT96")
// Converters for each field.
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala
index 2607471..892546a 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala
@@ -82,16 +82,16 @@ class ParquetWriteSupport extends WriteSupport[InternalRow]
with Logging {
private val datetimeRebaseMode = LegacyBehaviorPolicy.withName(
SQLConf.get.getConf(SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_WRITE))
- private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInWrite(
+ private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInWrite(
datetimeRebaseMode, "Parquet")
- private val timestampRebaseFunc =
DataSourceUtils.creteTimestampRebaseFuncInWrite(
+ private val timestampRebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInWrite(
datetimeRebaseMode, "Parquet")
private val int96RebaseMode = LegacyBehaviorPolicy.withName(
SQLConf.get.getConf(SQLConf.LEGACY_PARQUET_INT96_REBASE_MODE_IN_WRITE))
- private val int96RebaseFunc =
DataSourceUtils.creteTimestampRebaseFuncInWrite(
+ private val int96RebaseFunc =
DataSourceUtils.createTimestampRebaseFuncInWrite(
int96RebaseMode, "Parquet INT96")
override def init(configuration: Configuration): WriteContext = {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]