This is an automated email from the ASF dual-hosted git repository.
kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new ebb1e2f HIVE-25791: Improve SFS exception messages (#2859) (Zoltan
Haindrich reviewed by Krisztian Kasa)
ebb1e2f is described below
commit ebb1e2fa9914bcccecad261d53338933b699ccb1
Author: Zoltan Haindrich <[email protected]>
AuthorDate: Wed Dec 15 08:49:18 2021 +0100
HIVE-25791: Improve SFS exception messages (#2859) (Zoltan Haindrich
reviewed by Krisztian Kasa)
---
.../org/apache/hadoop/hive/ql/QOutProcessor.java | 14 +++----
.../hive/ql/qoption/QTestReplaceHandler.java | 2 +-
.../apache/hadoop/hive/ql/io/SingleFileSystem.java | 43 +++++++++++++++++-----
.../hadoop/hive/ql/io/TestSingleFileSystem.java | 32 ++++++++++++++++
.../test/queries/clientnegative/sfs_nonexistent.q | 3 ++
.../results/clientnegative/sfs_nonexistent.q.out | 6 +++
6 files changed, 82 insertions(+), 18 deletions(-)
diff --git
a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QOutProcessor.java
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QOutProcessor.java
index 22aad8e..cdf599b 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QOutProcessor.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QOutProcessor.java
@@ -51,7 +51,7 @@ public class QOutProcessor {
public static final String HDFS_DATE_MASK = "### HDFS DATE ###";
public static final String HDFS_USER_MASK = "### USER ###";
public static final String HDFS_GROUP_MASK = "### GROUP ###";
-
+
public static final String MASK_PATTERN = "#### A masked pattern was here
####";
public static final String PARTIAL_MASK_PATTERN = "#### A PARTIAL masked
pattern was here ####";
private static final PatternReplacementPair MASK_STATS = new
PatternReplacementPair(
@@ -69,7 +69,7 @@ public class QOutProcessor {
public static class LineProcessingResult {
private String line;
private boolean partialMaskWasMatched = false;
-
+
public LineProcessingResult(String line) {
this.line = line;
}
@@ -78,7 +78,7 @@ public class QOutProcessor {
return line;
}
}
-
+
private final Pattern[] planMask = toPattern(new String[] {
".*[.][.][.] [0-9]* more.*",
"pk_-?[0-9]*_[0-9]*_[0-9]*",
@@ -211,9 +211,11 @@ public class QOutProcessor {
LineProcessingResult processLine(String line) {
LineProcessingResult result = new LineProcessingResult(line);
-
+
Matcher matcher = null;
+ result.line = replaceHandler.processLine(result.line);
+
if (fsType == FsType.ENCRYPTED_HDFS) {
for (Pattern pattern : partialReservedPlanMask) {
matcher = pattern.matcher(result.line);
@@ -305,8 +307,6 @@ public class QOutProcessor {
}
}
- result.line = replaceHandler.processLine(result.line);
-
return result;
}
@@ -332,7 +332,7 @@ public class QOutProcessor {
ArrayList<PatternReplacementPair> ppm = new ArrayList<>();
ppm.add(new
PatternReplacementPair(Pattern.compile("\\{\"writeid\":[1-9][0-9]*,\"bucketid\":"),
"{\"writeid\":### Masked writeid ###,\"bucketid\":"));
-
+
ppm.add(new PatternReplacementPair(Pattern.compile("attempt_[0-9_]+"),
"attempt_#ID#"));
ppm.add(new PatternReplacementPair(Pattern.compile("vertex_[0-9_]+"),
"vertex_#ID#"));
ppm.add(new PatternReplacementPair(Pattern.compile("task_[0-9_]+"),
"task_#ID#"));
diff --git
a/itests/util/src/main/java/org/apache/hadoop/hive/ql/qoption/QTestReplaceHandler.java
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/qoption/QTestReplaceHandler.java
index aa2e3fd..06abe15 100644
---
a/itests/util/src/main/java/org/apache/hadoop/hive/ql/qoption/QTestReplaceHandler.java
+++
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/qoption/QTestReplaceHandler.java
@@ -55,7 +55,7 @@ public class QTestReplaceHandler implements
QTestOptionHandler {
throw new RuntimeException("illegal replacement expr: " + arguments + "
; expected something like /this/that/");
}
String sep = arguments.substring(0, 1);
- String[] parts = arguments.split(sep);
+ String[] parts = arguments.split(Pattern.quote(sep));
if (parts.length != 3) {
throw new RuntimeException(
"unexpected replacement expr: " + arguments + " ; expected something
like /this/that/");
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/SingleFileSystem.java
b/ql/src/java/org/apache/hadoop/hive/ql/io/SingleFileSystem.java
index e0e9bff..e073622 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/SingleFileSystem.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/SingleFileSystem.java
@@ -114,6 +114,8 @@ public abstract class SingleFileSystem extends FileSystem {
switch (info.type) {
case LEAF_FILE:
return
info.lowerTargetPath.getFileSystem(conf).open(info.lowerTargetPath, bufferSize);
+ case NONEXISTENT:
+ throw newFileNotFoundException(upperPath.toString());
default:
throw unsupported("open:" + upperPath);
}
@@ -129,6 +131,8 @@ public abstract class SingleFileSystem extends FileSystem {
return makeDirFileStatus(upperPath, removeSfsScheme(upperPath));
case SINGLEFILE_DIR:
return makeDirFileStatus(upperPath, info.lowerTargetPath);
+ case NONEXISTENT:
+ throw newFileNotFoundException(upperPath.toString());
default:
throw unsupported("fileStatus:" + upperPath);
}
@@ -143,6 +147,8 @@ public abstract class SingleFileSystem extends FileSystem {
case LEAF_FILE:
case SINGLEFILE_DIR:
return new FileStatus[] { makeFileStatus(info.upperTargetPath,
info.lowerTargetPath) };
+ case NONEXISTENT:
+ throw newFileNotFoundException(upperPath.toString());
default:
throw unsupported("listStatus: " + upperPath);
}
@@ -159,30 +165,30 @@ public abstract class SingleFileSystem extends FileSystem
{
}
@Override
- public FSDataOutputStream create(Path f, FsPermission permission, boolean
overwrite, int bufferSize,
+ public FSDataOutputStream create(Path upperPath, FsPermission permission,
boolean overwrite, int bufferSize,
short replication, long blockSize, Progressable progress) throws
IOException {
- throw unsupported("create: " + f);
+ throw unsupportedReadOnly("create", upperPath);
}
@Override
- public FSDataOutputStream append(Path f, int bufferSize, Progressable
progress) throws IOException {
- throw unsupported("append: " + f);
+ public FSDataOutputStream append(Path upperPath, int bufferSize,
Progressable progress) throws IOException {
+ throw unsupportedReadOnly("append", upperPath);
}
@Override
public boolean rename(Path src, Path dst) throws IOException {
- throw unsupported("rename: " + src + " to " + dst);
+ throw unsupportedReadOnly("rename", src);
}
@Override
- public boolean delete(Path f, boolean recursive) throws IOException {
- throw unsupported("delete: " + f);
+ public boolean delete(Path upperPath, boolean recursive) throws IOException {
+ throw unsupportedReadOnly("delete", upperPath);
}
@Override
- public boolean mkdirs(Path f, FsPermission permission) throws IOException {
- throw unsupported("mkdirs: " + f);
+ public boolean mkdirs(Path upperPath, FsPermission permission) throws
IOException {
+ throw unsupportedReadOnly("mkdirs", upperPath);
}
@Override
@@ -337,10 +343,27 @@ public abstract class SingleFileSystem extends FileSystem
{
}
private static FsPermission addExecute(FsPermission permission) {
- return new FsPermission(permission.toShort() | 1 | (1 << 3) | (1 << 6));
+ short mode = (short) (permission.toShort() | 1 | (1 << 3) | (1 << 6));
+ return new FsPermission(mode);
+ }
+
+ private IOException unsupportedReadOnly(String opName, Path path) throws
IOException {
+ SfsInfo sfsInfo = new SfsInfo(path);
+ if (sfsInfo.type == SfsInodeType.SINGLEFILE_DIR || sfsInfo.type ==
SfsInodeType.LEAF_FILE) {
+ // Try to access the the underlying file if possible; as the lower fs
may provide a more
+ // specific exception (like: FileNotFoundException)
+ FileSystem fs = sfsInfo.lowerTargetPath.getFileSystem(conf);
+ fs.getFileStatus(sfsInfo.lowerTargetPath);
+ }
+ return new IOException("SFS is readonly hence " + opName + " is not
supported! (" + path + ")");
}
private IOException unsupported(String str) {
return new IOException("Unsupported SFS filesystem operation! (" + str +
")");
}
+
+ private IOException newFileNotFoundException(String path) {
+ return new FileNotFoundException("File " + path + " does not exists!");
+ }
+
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSingleFileSystem.java
b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSingleFileSystem.java
index c7de37f..2045878 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSingleFileSystem.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSingleFileSystem.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.HashSet;
import java.util.Scanner;
@@ -85,6 +86,11 @@ public class TestSingleFileSystem {
assertSfsFile(fs.getFileStatus(new Path("sfs+" + f1path +
"/#SINGLEFILE#/f1")));
}
+ @Test(expected = FileNotFoundException.class)
+ public void testGetFileStatusOfUnknown() throws Exception {
+ assertSfsFile(fs.getFileStatus(new Path("sfs+" + f1path +
"/#SINGLEFILE#/unknown")));
+ }
+
@Test
public void testListStatusSingleFileDir() throws Exception {
String targetSfsPath = "sfs+" + f1path + "/#SINGLEFILE#";
@@ -93,6 +99,7 @@ public class TestSingleFileSystem {
assertEquals(targetSfsPath + "/f1", list[0].getPath().toString());
}
+
@Test
public void testListStatusSingleFileDirEndingInSlash() throws Exception {
String targetSfsPath = "sfs+" + f1path + "/#SINGLEFILE#/";
@@ -101,6 +108,26 @@ public class TestSingleFileSystem {
assertEquals(targetSfsPath + "f1", list[0].getPath().toString());
}
+ @Test(expected = FileNotFoundException.class)
+ public void testListSingleFileDirOfNonExistentFile() throws Exception {
+ String targetSfsPath = "sfs+" + f1path + "nonExistent/#SINGLEFILE#/";
+ fs.listStatus(new Path(targetSfsPath));
+ }
+
+ @Test
+ public void testListStatusTargetFile() throws Exception {
+ String targetSfsPath = "sfs+" + f1path + "/#SINGLEFILE#/f1";
+ FileStatus[] list = fs.listStatus(new Path(targetSfsPath));
+ assertEquals(1, list.length);
+ assertEquals(targetSfsPath, list[0].getPath().toString());
+ }
+
+ @Test(expected = FileNotFoundException.class)
+ public void testListStatusNonTargetFile() throws Exception {
+ String targetSfsPath = "sfs+" + f1path + "/#SINGLEFILE#/unknown";
+ fs.listStatus(new Path(targetSfsPath));
+ }
+
@Test
public void testListStatusFile() throws Exception {
String targetSfsPath = "sfs+" + f1path;
@@ -136,6 +163,11 @@ public class TestSingleFileSystem {
}
}
+ @Test(expected = FileNotFoundException.class)
+ public void testOpenUnknownFileInSingleFileDir() throws Exception {
+ fs.open(new Path("sfs+" + f1path + "/#SINGLEFILE#/unknown"));
+ }
+
@Test(expected = IOException.class)
public void testOpenSinglefileDir() throws Exception {
fs.open(new Path("sfs+" + f1path + "/#SINGLEFILE#/"));
diff --git a/ql/src/test/queries/clientnegative/sfs_nonexistent.q
b/ql/src/test/queries/clientnegative/sfs_nonexistent.q
new file mode 100644
index 0000000..6e05c14
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/sfs_nonexistent.q
@@ -0,0 +1,3 @@
+--! qt:replace:|file:/.*/nonexistent|FILE:///.../nonexistent|
+
+create external table t1s (a string,b string,c string) location
'sfs+file://${system:test.tmp.dir}/nonexistent/path/f1.txt/#SINGLEFILE#';
diff --git a/ql/src/test/results/clientnegative/sfs_nonexistent.q.out
b/ql/src/test/results/clientnegative/sfs_nonexistent.q.out
new file mode 100644
index 0000000..9451842
--- /dev/null
+++ b/ql/src/test/results/clientnegative/sfs_nonexistent.q.out
@@ -0,0 +1,6 @@
+PREHOOK: query: create external table t1s (a string,b string,c string)
location 'sfs+FILE:///.../nonexistent/path/f1.txt/#SINGLEFILE#'
+PREHOOK: type: CREATETABLE
+PREHOOK: Input: sfs+FILE:///.../nonexistent/path/f1.txt/#SINGLEFILE#
+PREHOOK: Output: database:default
+PREHOOK: Output: default@t1s
+FAILED: Execution Error, return code 40000 from
org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Got exception:
java.io.FileNotFoundException File FILE:///.../nonexistent/path/f1.txt does not
exist)