This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 93f98c0a61dd [SPARK-47523][SQL] Replace deprecated
`JsonParser#getCurrentName` with `JsonParser#currentName`
93f98c0a61dd is described below
commit 93f98c0a61ddb66eb777c3940fbf29fc58e2d79b
Author: yangjie01 <[email protected]>
AuthorDate: Fri Mar 22 08:37:09 2024 -0700
[SPARK-47523][SQL] Replace deprecated `JsonParser#getCurrentName` with
`JsonParser#currentName`
### What changes were proposed in this pull request?
This pr replaces the use of `JsonParser#getCurrentName` with
`JsonParser#currentName` in Spark code, as `JsonParser#getCurrentName` has been
deprecated since jackson 2.17.
https://github.com/FasterXML/jackson-core/blob/8fba680579885bf9cdae72e93f16de557056d6e3/src/main/java/com/fasterxml/jackson/core/JsonParser.java#L1521-L1551
```java
/**
* Deprecated alias of {link #currentName()}.
*
* return Name of the current field in the parsing context
*
* throws IOException for low-level read issues, or
* {link JsonParseException} for decoding problems
*
* deprecated Since 2.17 use {link #currentName} instead.
*/
Deprecated
public abstract String getCurrentName() throws IOException;
/**
* Method that can be called to get the name associated with
* the current token: for {link JsonToken#FIELD_NAME}s it will
* be the same as what {link #getText} returns;
* for field values it will be preceding field name;
* and for others (array values, root-level values) null.
*
* return Name of the current field in the parsing context
*
* throws IOException for low-level read issues, or
* {link JsonParseException} for decoding problems
*
* since 2.10
*/
public String currentName() throws IOException {
// !!! TODO: switch direction in 2.18 or later
return getCurrentName();
}
```
### Why are the changes needed?
Clean up deprecated API usage.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Pass GitHub Actions
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #45668 from LuciferYang/SPARK-47523.
Authored-by: yangjie01 <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../spark/sql/catalyst/expressions/jsonExpressions.scala | 6 +++---
.../org/apache/spark/sql/catalyst/json/JacksonParser.scala | 10 +++++-----
.../org/apache/spark/sql/catalyst/json/JsonInferSchema.scala | 2 +-
.../org/apache/spark/sql/errors/QueryExecutionErrors.scala | 2 +-
4 files changed, 10 insertions(+), 10 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala
index 9fca09b46a99..b155987242b3 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala
@@ -411,7 +411,7 @@ class GetJsonObjectEvaluator(cachedPath: UTF8String) {
p.nextToken()
arrayIndex(p, () => evaluatePath(p, g, style, xs))(idx)
- case (FIELD_NAME, Named(name) :: xs) if p.getCurrentName == name =>
+ case (FIELD_NAME, Named(name) :: xs) if p.currentName == name =>
// exact field match
if (p.nextToken() != JsonToken.VALUE_NULL) {
evaluatePath(p, g, style, xs)
@@ -546,7 +546,7 @@ case class JsonTuple(children: Seq[Expression])
while (parser.nextToken() != JsonToken.END_OBJECT) {
if (parser.getCurrentToken == JsonToken.FIELD_NAME) {
// check to see if this field is desired in the output
- val jsonField = parser.getCurrentName
+ val jsonField = parser.currentName
var idx = fieldNames.indexOf(jsonField)
if (idx >= 0) {
// it is, copy the child tree to the correct location in the output
row
@@ -1056,7 +1056,7 @@ case class JsonObjectKeys(child: Expression) extends
UnaryExpression with Codege
// traverse until the end of input and ensure it returns valid key
while(parser.nextValue() != null && parser.currentName() != null) {
// add current fieldName to the ArrayBuffer
- arrayBufferOfKeys += UTF8String.fromString(parser.getCurrentName)
+ arrayBufferOfKeys += UTF8String.fromString(parser.currentName)
// skip all the children of inner object or array
parser.skipChildren()
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala
index 36f37888b084..a16a23cf0049 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala
@@ -231,7 +231,7 @@ class JacksonParser(
Float.PositiveInfinity
case "-INF" | "-Infinity" if options.allowNonNumericNumbers =>
Float.NegativeInfinity
- case _ => throw StringAsDataTypeException(parser.getCurrentName,
parser.getText,
+ case _ => throw StringAsDataTypeException(parser.currentName,
parser.getText,
FloatType)
}
}
@@ -250,7 +250,7 @@ class JacksonParser(
Double.PositiveInfinity
case "-INF" | "-Infinity" if options.allowNonNumericNumbers =>
Double.NegativeInfinity
- case _ => throw StringAsDataTypeException(parser.getCurrentName,
parser.getText,
+ case _ => throw StringAsDataTypeException(parser.currentName,
parser.getText,
DoubleType)
}
}
@@ -430,7 +430,7 @@ class JacksonParser(
case token =>
// We cannot parse this token based on the given data type. So, we throw
a
// RuntimeException and this exception will be caught by `parse` method.
- throw CannotParseJSONFieldException(parser.getCurrentName,
parser.getText, token, dataType)
+ throw CannotParseJSONFieldException(parser.currentName, parser.getText,
token, dataType)
}
/**
@@ -451,7 +451,7 @@ class JacksonParser(
lazy val bitmask = ResolveDefaultColumns.existenceDefaultsBitmask(schema)
resetExistenceDefaultsBitmask(schema, bitmask)
while (!skipRow && nextUntil(parser, JsonToken.END_OBJECT)) {
- schema.getFieldIndex(parser.getCurrentName) match {
+ schema.getFieldIndex(parser.currentName) match {
case Some(index) =>
try {
row.update(index, fieldConverters(index).apply(parser))
@@ -493,7 +493,7 @@ class JacksonParser(
var badRecordException: Option[Throwable] = None
while (nextUntil(parser, JsonToken.END_OBJECT)) {
- keys += UTF8String.fromString(parser.getCurrentName)
+ keys += UTF8String.fromString(parser.currentName)
try {
values += fieldConverter.apply(parser)
} catch {
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JsonInferSchema.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JsonInferSchema.scala
index bc7038fc71d4..12c1be7c0de7 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JsonInferSchema.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JsonInferSchema.scala
@@ -194,7 +194,7 @@ class JsonInferSchema(options: JSONOptions) extends
Serializable with Logging {
val builder = Array.newBuilder[StructField]
while (nextUntil(parser, END_OBJECT)) {
builder += StructField(
- parser.getCurrentName,
+ parser.currentName,
inferField(parser),
nullable = true)
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 6d30606b232b..7506ff8df0b5 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1238,7 +1238,7 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase with ExecutionE
def cannotParseJSONFieldError(parser: JsonParser, jsonType: JsonToken,
dataType: DataType)
: SparkRuntimeException = {
- cannotParseJSONFieldError(parser.getCurrentName, parser.getText, jsonType,
dataType)
+ cannotParseJSONFieldError(parser.currentName, parser.getText, jsonType,
dataType)
}
def cannotParseJSONFieldError(
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]