This is an automated email from the ASF dual-hosted git repository.

yihua pushed a commit to branch release-0.13.0
in repository https://gitbox.apache.org/repos/asf/hudi.git

commit 9c21118e146ed591259d46d5291822c80674e946
Author: Ming Wei <[email protected]>
AuthorDate: Tue Jan 31 16:26:10 2023 +0800

    [HUDI-5567] Make the bootstrapping exception message more clear (#7684)
    
    Co-authored-by: jameswei <[email protected]>
---
 .../hudi/client/bootstrap/HoodieSparkBootstrapSchemaProvider.java     | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git 
a/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/bootstrap/HoodieSparkBootstrapSchemaProvider.java
 
b/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/bootstrap/HoodieSparkBootstrapSchemaProvider.java
index b161182b83a..bc0a1663c4b 100644
--- 
a/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/bootstrap/HoodieSparkBootstrapSchemaProvider.java
+++ 
b/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/bootstrap/HoodieSparkBootstrapSchemaProvider.java
@@ -60,7 +60,7 @@ public class HoodieSparkBootstrapSchemaProvider extends 
HoodieBootstrapSchemaPro
           } else if (ORC.getFileExtension().equals(extension)) {
             return getBootstrapSourceSchemaOrc(writeConfig, context, filePath);
           } else {
-            throw new HoodieException("Could not determine schema from the 
data files.");
+            throw new HoodieException("Could not determine schema from the 
data files, supported file formats: [ORC, PARQUET].");
           }
         }
     ).filter(Objects::nonNull).findAny()
@@ -92,7 +92,7 @@ public class HoodieSparkBootstrapSchemaProvider extends 
HoodieBootstrapSchemaPro
     try {
       orcReader = OrcFile.createReader(filePath, 
OrcFile.readerOptions(context.getHadoopConf().get()));
     } catch (IOException e) {
-      throw new HoodieException("Could not determine schema from the data 
files.");
+      throw new HoodieException("Could not determine schema from the ORC data 
files.");
     }
     TypeDescription orcSchema = orcReader.getSchema();
     String tableName = 
HoodieAvroUtils.sanitizeName(writeConfig.getTableName());

Reply via email to