This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 91c4e54f88f1 [SPARK-55823][SQL] Rename `_LEGACY_ERROR_TEMP_1134` to
`UNABLE_TO_INFER_SCHEMA_FOR_DATA_SOURCE`
91c4e54f88f1 is described below
commit 91c4e54f88f118af40c7a3d917e4d4d9cb158b97
Author: ilicmarkodb <[email protected]>
AuthorDate: Thu Mar 5 00:11:52 2026 +0800
[SPARK-55823][SQL] Rename `_LEGACY_ERROR_TEMP_1134` to
`UNABLE_TO_INFER_SCHEMA_FOR_DATA_SOURCE`
### What changes were proposed in this pull request?
Removing `_LEGACY_ERROR_TEMP_1134` and adding a proper error class
`UNABLE_TO_INFER_SCHEMA_FOR_DATA_SOURCE` with a sqlState.
### Why are the changes needed?
Proper error messaging.
### Does this PR introduce _any_ user-facing change?
Yes. The error class name changes from `_LEGACY_ERROR_TEMP_1134` to
`UNABLE_TO_INFER_SCHEMA_FOR_DATA_SOURCE`, and the SQL state `42KD9` is now
included. The error message text remains identical.
### How was this patch tested?
New unit test.
### Was this patch authored or co-authored using generative AI tooling?
Yes, co-authored with Claude Code.
Closes #54607 from ilicmarkodb/rename_LEGACY_ERROR_TEMP_1134.
Authored-by: ilicmarkodb <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
---
.../src/main/resources/error/error-conditions.json | 11 ++++++-----
.../spark/sql/errors/QueryCompilationErrors.scala | 2 +-
.../sql/errors/QueryCompilationErrorsSuite.scala | 20 ++++++++++++++++++++
3 files changed, 27 insertions(+), 6 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index 1c1f0a9eedf5..dcf7042a7470 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -6544,6 +6544,12 @@
],
"sqlState" : "42KD9"
},
+ "UNABLE_TO_INFER_SCHEMA_FOR_DATA_SOURCE" : {
+ "message" : [
+ "Unable to infer schema for <format> at <fileCatalog>. It must be
specified manually."
+ ],
+ "sqlState" : "42KD9"
+ },
"UNBOUND_SQL_PARAMETER" : {
"message" : [
"Found the unbound parameter: <name>. Please, fix `args` and provide a
mapping of the parameter to either a SQL literal or collection constructor
functions such as `map()`, `array()`, `struct()`."
@@ -8346,11 +8352,6 @@
"A schema needs to be specified when using <className>."
]
},
- "_LEGACY_ERROR_TEMP_1134" : {
- "message" : [
- "Unable to infer schema for <format> at <fileCatalog>. It must be
specified manually."
- ]
- },
"_LEGACY_ERROR_TEMP_1135" : {
"message" : [
"<className> is not a valid Spark SQL Data Source."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index f59bb9ac1207..9413dabfc508 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -1772,7 +1772,7 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase with Compilat
def dataSchemaNotSpecifiedError(format: String, fileCatalog: String):
Throwable = {
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1134",
+ errorClass = "UNABLE_TO_INFER_SCHEMA_FOR_DATA_SOURCE",
messageParameters = Map(
"format" -> format,
"fileCatalog" -> fileCatalog))
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
index e035a4c4119e..225d1642bb4a 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
@@ -1112,6 +1112,26 @@ class QueryCompilationErrorsSuite
ExpectedContext(fragment = "aggregate(array(1,2,3), x -> x + 1, 0)",
start = 7, stop = 44)
)
}
+
+ test("UNABLE_TO_INFER_SCHEMA_FOR_DATA_SOURCE: empty data source at path") {
+ withTempDir { dir =>
+ // Create _spark_metadata with a valid empty log entry (version header
only, no files)
+ val metadataDir = new java.io.File(dir, "_spark_metadata")
+ metadataDir.mkdir()
+ java.nio.file.Files.write(
+ new java.io.File(metadataDir, "0").toPath, "v1".getBytes)
+
+ checkError(
+ exception = intercept[AnalysisException] {
+ spark.read.format("json").load(dir.getCanonicalPath).collect()
+ },
+ condition = "UNABLE_TO_INFER_SCHEMA_FOR_DATA_SOURCE",
+ parameters = Map(
+ "format" -> "JSON",
+ "fileCatalog" -> "")
+ )
+ }
+ }
}
class MyCastToString extends SparkUserDefinedFunction(
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]