[
https://issues.apache.org/jira/browse/HDFS-15643?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17223748#comment-17223748
]
Hadoop QA commented on HDFS-15643:
----------------------------------
| (x) *{color:red}-1 overall{color}* |
\\
\\
|| Vote || Subsystem || Runtime || Logfile || Comment ||
| {color:blue}0{color} | {color:blue} reexec {color} | {color:blue} 0m
56s{color} | | {color:blue} Docker mode activated. {color} |
|| || || || {color:brown} Prechecks {color} || ||
| {color:green}+1{color} | {color:green} dupname {color} | {color:green} 0m
0s{color} | | {color:green} No case conflicting files found. {color} |
| {color:green}+1{color} | {color:green} @author {color} | {color:green} 0m
0s{color} | | {color:green} The patch does not contain any @author tags.
{color} |
| {color:red}-1{color} | {color:red} test4tests {color} | {color:red} 0m
0s{color} | | {color:red} The patch doesn't appear to include any new or
modified tests. Please justify why no new tests are needed for this patch. Also
please list what manual steps were performed to verify this patch. {color} |
|| || || || {color:brown} trunk Compile Tests {color} || ||
| {color:green}+1{color} | {color:green} mvninstall {color} | {color:green} 24m
15s{color} | | {color:green} trunk passed {color} |
| {color:green}+1{color} | {color:green} compile {color} | {color:green} 1m
31s{color} | | {color:green} trunk passed with JDK
Ubuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1 {color} |
| {color:green}+1{color} | {color:green} compile {color} | {color:green} 1m
16s{color} | | {color:green} trunk passed with JDK Private
Build-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10 {color} |
| {color:green}+1{color} | {color:green} checkstyle {color} | {color:green} 0m
44s{color} | | {color:green} trunk passed {color} |
| {color:green}+1{color} | {color:green} mvnsite {color} | {color:green} 1m
19s{color} | | {color:green} trunk passed {color} |
| {color:green}+1{color} | {color:green} shadedclient {color} | {color:green}
20m 48s{color} | | {color:green} branch has no errors when building and
testing our client artifacts. {color} |
| {color:red}-1{color} | {color:red} javadoc {color} | {color:red} 0m
21s{color} |
[/branch-javadoc-hadoop-hdfs-project_hadoop-hdfs-jdkUbuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/branch-javadoc-hadoop-hdfs-project_hadoop-hdfs-jdkUbuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1.txt]
| {color:red} hadoop-hdfs in trunk failed with JDK
Ubuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1. {color} |
| {color:red}-1{color} | {color:red} javadoc {color} | {color:red} 0m
29s{color} |
[/branch-javadoc-hadoop-hdfs-project_hadoop-hdfs-jdkPrivateBuild-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/branch-javadoc-hadoop-hdfs-project_hadoop-hdfs-jdkPrivateBuild-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10.txt]
| {color:red} hadoop-hdfs in trunk failed with JDK Private
Build-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10. {color} |
| {color:blue}0{color} | {color:blue} spotbugs {color} | {color:blue} 22m
10s{color} | | {color:blue} Used deprecated FindBugs config; considering
switching to SpotBugs. {color} |
| {color:red}-1{color} | {color:red} findbugs {color} | {color:red} 0m
30s{color} |
[/branch-findbugs-hadoop-hdfs-project_hadoop-hdfs.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/branch-findbugs-hadoop-hdfs-project_hadoop-hdfs.txt]
| {color:red} hadoop-hdfs in trunk failed. {color} |
|| || || || {color:brown} Patch Compile Tests {color} || ||
| {color:red}-1{color} | {color:red} mvninstall {color} | {color:red} 0m
23s{color} |
[/patch-mvninstall-hadoop-hdfs-project_hadoop-hdfs.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/patch-mvninstall-hadoop-hdfs-project_hadoop-hdfs.txt]
| {color:red} hadoop-hdfs in the patch failed. {color} |
| {color:red}-1{color} | {color:red} compile {color} | {color:red} 0m
23s{color} |
[/patch-compile-hadoop-hdfs-project_hadoop-hdfs-jdkUbuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/patch-compile-hadoop-hdfs-project_hadoop-hdfs-jdkUbuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1.txt]
| {color:red} hadoop-hdfs in the patch failed with JDK
Ubuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1. {color} |
| {color:red}-1{color} | {color:red} javac {color} | {color:red} 0m 23s{color}
|
[/patch-compile-hadoop-hdfs-project_hadoop-hdfs-jdkUbuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/patch-compile-hadoop-hdfs-project_hadoop-hdfs-jdkUbuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1.txt]
| {color:red} hadoop-hdfs in the patch failed with JDK
Ubuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1. {color} |
| {color:red}-1{color} | {color:red} compile {color} | {color:red} 0m
23s{color} |
[/patch-compile-hadoop-hdfs-project_hadoop-hdfs-jdkPrivateBuild-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/patch-compile-hadoop-hdfs-project_hadoop-hdfs-jdkPrivateBuild-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10.txt]
| {color:red} hadoop-hdfs in the patch failed with JDK Private
Build-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10. {color} |
| {color:red}-1{color} | {color:red} javac {color} | {color:red} 0m 23s{color}
|
[/patch-compile-hadoop-hdfs-project_hadoop-hdfs-jdkPrivateBuild-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/patch-compile-hadoop-hdfs-project_hadoop-hdfs-jdkPrivateBuild-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10.txt]
| {color:red} hadoop-hdfs in the patch failed with JDK Private
Build-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10. {color} |
| {color:green}+1{color} | {color:green} blanks {color} | {color:green} 0m
0s{color} | | {color:green} The patch has no blanks issues. {color} |
| {color:orange}-0{color} | {color:orange} checkstyle {color} | {color:orange}
0m 21s{color} |
[/buildtool-patch-checkstyle-hadoop-hdfs-project_hadoop-hdfs.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/buildtool-patch-checkstyle-hadoop-hdfs-project_hadoop-hdfs.txt]
| {color:orange} The patch fails to run checkstyle in hadoop-hdfs {color} |
| {color:red}-1{color} | {color:red} mvnsite {color} | {color:red} 0m
23s{color} |
[/patch-mvnsite-hadoop-hdfs-project_hadoop-hdfs.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/patch-mvnsite-hadoop-hdfs-project_hadoop-hdfs.txt]
| {color:red} hadoop-hdfs in the patch failed. {color} |
| {color:green}+1{color} | {color:green} shadedclient {color} | {color:green}
1m 53s{color} | | {color:green} patch has no errors when building and testing
our client artifacts. {color} |
| {color:red}-1{color} | {color:red} javadoc {color} | {color:red} 1m
10s{color} |
[/results-javadoc-javadoc-hadoop-hdfs-project_hadoop-hdfs-jdkUbuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/results-javadoc-javadoc-hadoop-hdfs-project_hadoop-hdfs-jdkUbuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1.txt]
| {color:red}
hadoop-hdfs-project_hadoop-hdfs-jdkUbuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1
with JDK Ubuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1 generated 99 new + 0
unchanged - 0 fixed = 99 total (was 0) {color} |
| {color:red}-1{color} | {color:red} javadoc {color} | {color:red} 1m
34s{color} |
[/results-javadoc-javadoc-hadoop-hdfs-project_hadoop-hdfs-jdkPrivateBuild-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/results-javadoc-javadoc-hadoop-hdfs-project_hadoop-hdfs-jdkPrivateBuild-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10.txt]
| {color:red}
hadoop-hdfs-project_hadoop-hdfs-jdkPrivateBuild-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10
with JDK Private Build-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10 generated 1 new
+ 0 unchanged - 0 fixed = 1 total (was 0) {color} |
| {color:green}+1{color} | {color:green} findbugs {color} | {color:green} 3m
46s{color} | | {color:green} the patch passed {color} |
|| || || || {color:brown} Other Tests {color} || ||
| {color:red}-1{color} | {color:red} unit {color} | {color:red}122m 0s{color}
|
[/patch-unit-hadoop-hdfs-project_hadoop-hdfs.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/patch-unit-hadoop-hdfs-project_hadoop-hdfs.txt]
| {color:red} hadoop-hdfs in the patch passed. {color} |
| {color:red}-1{color} | {color:red} asflicense {color} | {color:red} 0m
34s{color} |
[/results-asflicense.txt|https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/results-asflicense.txt]
| {color:red} The patch generated 19 ASF License warnings. {color} |
| {color:black}{color} | {color:black} {color} | {color:black}184m 42s{color} |
| {color:black}{color} |
\\
\\
|| Reason || Tests ||
| Failed junit tests | hadoop.fs.viewfs.TestViewFileSystemLinkFallback |
| | hadoop.hdfs.web.TestWebHDFSAcl |
| | hadoop.hdfs.server.namenode.TestDiskspaceQuotaUpdate |
| | hadoop.hdfs.TestDFSStartupVersions |
| | hadoop.hdfs.TestGetFileChecksum |
| | hadoop.hdfs.TestDFSStripedOutputStreamWithFailure |
| | hadoop.fs.viewfs.TestViewFileSystemAtHdfsRoot |
| | hadoop.fs.viewfs.TestNNStartupWhenViewFSOverloadSchemeEnabled |
| | hadoop.hdfs.tools.TestDFSZKFailoverController |
| | hadoop.fs.viewfs.TestViewFileSystemWithXAttrs |
| | hadoop.fs.viewfs.TestViewFileSystemHdfs |
| | hadoop.hdfs.TestMultipleNNPortQOP |
| | hadoop.hdfs.server.datanode.fsdataset.impl.TestFsDatasetImpl |
| | hadoop.fs.viewfs.TestViewFSOverloadSchemeWithMountTableConfigInHDFS |
| | hadoop.fs.viewfs.TestViewFsLinkFallback |
| | hadoop.fs.viewfs.TestViewFileSystemLinkRegex |
| | hadoop.hdfs.TestClientProtocolForPipelineRecovery |
| | hadoop.hdfs.server.namenode.ha.TestDFSUpgradeWithHA |
| | hadoop.hdfs.web.TestWebHDFS |
| | hadoop.fs.viewfs.TestViewFsAtHdfsRoot |
| | hadoop.hdfs.web.TestWebHdfsWithMultipleNameNodes |
| | hadoop.hdfs.server.datanode.TestDataNodeUUID |
| | hadoop.hdfs.TestStripedFileAppend |
| | hadoop.fs.viewfs.TestViewFsWithAcls |
| | hadoop.fs.viewfs.TestViewFileSystemLinkMergeSlash |
\\
\\
|| Subsystem || Report/Notes ||
| Docker | ClientAPI=1.40 ServerAPI=1.40 base:
https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/artifact/out/Dockerfile
|
| JIRA Issue | HDFS-15643 |
| JIRA Patch URL |
https://issues.apache.org/jira/secure/attachment/13014427/HDFS-15643-01.patch |
| Optional Tests | dupname asflicense compile javac javadoc mvninstall mvnsite
unit shadedclient findbugs checkstyle |
| uname | Linux ccdc929a514f 4.15.0-112-generic #113-Ubuntu SMP Thu Jul 9
23:41:39 UTC 2020 x86_64 x86_64 x86_64 GNU/Linux |
| Build tool | maven |
| Personality | personality/hadoop.sh |
| git revision | trunk / 8ee6bc2518bfdf7ad257cc1cf3c73f4208c49fc0 |
| Default Java | Private Build-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10 |
| Multi-JDK versions |
/usr/lib/jvm/java-11-openjdk-amd64:Ubuntu-11.0.9+11-Ubuntu-0ubuntu1.18.04.1
/usr/lib/jvm/java-8-openjdk-amd64:Private
Build-1.8.0_272-8u272-b10-0ubuntu1~18.04-b10 |
| Test Results |
https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/testReport/ |
| Max. process+thread count | 4210 (vs. ulimit of 5500) |
| modules | C: hadoop-hdfs-project/hadoop-hdfs U:
hadoop-hdfs-project/hadoop-hdfs |
| Console output |
https://ci-hadoop.apache.org/job/PreCommit-HDFS-Build/282/console |
| versions | git=2.17.1 maven=3.6.0 |
| Powered by | Apache Yetus 0.14.0-SNAPSHOT https://yetus.apache.org |
This message was automatically generated.
> EC: Fix checksum computation in case of native encoders
> -------------------------------------------------------
>
> Key: HDFS-15643
> URL: https://issues.apache.org/jira/browse/HDFS-15643
> Project: Hadoop HDFS
> Issue Type: Sub-task
> Reporter: Ahmed Hussein
> Assignee: Ahmed Hussein
> Priority: Blocker
> Labels: pull-request-available
> Attachments: HDFS-15643-01.patch, Test-Fix-01.patch,
> TestFileChecksum.testStripedFileChecksumWithMissedDataBlocksRangeQuery17.log,
> org.apache.hadoop.hdfs.TestFileChecksum-output.txt,
> org.apache.hadoop.hdfs.TestFileChecksum.txt
>
> Time Spent: 3h 50m
> Remaining Estimate: 0h
>
> There are many failures in {{TestFileChecksumCompositeCrc}}. The test cases
> {{testStripedFileChecksumWithMissedDataBlocksRangeQueryXX}} fail. The
> following is a sample of the stack trace in two of them Query7 and Query8.
> {code:bash}
> org.apache.hadoop.fs.PathIOException: `/striped/stripedFileChecksum1': Fail
> to get block checksum for
> LocatedStripedBlock{BP-1812707539-172.17.0.3-1602771351154:blk_-9223372036854775792_1001;
> getBlockSize()=37748736; corrupt=false; offset=0;
> locs=[DatanodeInfoWithStorage[127.0.0.1:36687,DS-b00139f0-4f28-4870-8f72-b726bd339e23,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:36303,DS-49a3c58e-da4a-4256-b1f9-893e4003ec94,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:43975,DS-ac278858-b6c8-424f-9e20-58d718dabe31,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:37507,DS-17f9d8d8-f8d3-443b-8df7-29416a2f5cb0,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:36441,DS-7e9d19b5-6220-465f-b33e-f8ed0e60fb07,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:42555,DS-ce679f5e-19fe-45b0-a0cd-8d8bec2f4735,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:39093,DS-4a7f54bb-dd39-4b5b-8dee-31a1b565cd7f,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:41699,DS-e1f939f3-37e7-413e-a522-934243477d81,DISK]];
> indices=[1, 2, 3, 4, 5, 6, 7, 8]}
> at
> org.apache.hadoop.hdfs.FileChecksumHelper$StripedFileNonStripedChecksumComputer.checksumBlocks(FileChecksumHelper.java:640)
> at
> org.apache.hadoop.hdfs.FileChecksumHelper$FileChecksumComputer.compute(FileChecksumHelper.java:252)
> at
> org.apache.hadoop.hdfs.DFSClient.getFileChecksumInternal(DFSClient.java:1851)
> at
> org.apache.hadoop.hdfs.DFSClient.getFileChecksumWithCombineMode(DFSClient.java:1871)
> at
> org.apache.hadoop.hdfs.DistributedFileSystem$34.doCall(DistributedFileSystem.java:1902)
> at
> org.apache.hadoop.hdfs.DistributedFileSystem$34.doCall(DistributedFileSystem.java:1899)
> at
> org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
> at
> org.apache.hadoop.hdfs.DistributedFileSystem.getFileChecksum(DistributedFileSystem.java:1916)
> at
> org.apache.hadoop.hdfs.TestFileChecksum.getFileChecksum(TestFileChecksum.java:584)
> at
> org.apache.hadoop.hdfs.TestFileChecksum.testStripedFileChecksumWithMissedDataBlocksRangeQuery(TestFileChecksum.java:295)
> at
> org.apache.hadoop.hdfs.TestFileChecksum.testStripedFileChecksumWithMissedDataBlocksRangeQuery7(TestFileChecksum.java:377)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
> at
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
> at
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
> at
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
> at
> org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:298)
> at
> org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:292)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at java.lang.Thread.run(Thread.java:748)
> {code}
>
> {code:bash}
> Error Message
> `/striped/stripedFileChecksum1': Fail to get block checksum for
> LocatedStripedBlock{BP-1299291876-172.17.0.3-1602771356932:blk_-9223372036854775792_1001;
> getBlockSize()=37748736; corrupt=false; offset=0;
> locs=[DatanodeInfoWithStorage[127.0.0.1:42217,DS-6c29e4b7-e4f1-4302-ad23-fb078f37d783,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:41307,DS-3d824f14-3cd0-46b1-bef1-caa808bf278d,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:37193,DS-eeb44ff5-fdf1-4774-b6cf-5be7c40147a9,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:39897,DS-36d2fbfc-64bc-405c-8360-735f1ad92e30,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:35545,DS-6fd42817-efea-416e-92fb-3e9034705142,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:39945,DS-501deff8-b6df-4cf0-9ac1-154a4253eec8,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:41359,DS-9b0449f5-377b-4a76-9eb6-0bcf2984b4bb,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:36123,DS-4184ab4a-079d-4b1c-a8cb-2ba22b0baafb,DISK]];
> indices=[0, 1, 2, 3, 4, 6, 7, 8]}
> Stacktrace
> org.apache.hadoop.fs.PathIOException: `/striped/stripedFileChecksum1': Fail
> to get block checksum for
> LocatedStripedBlock{BP-1299291876-172.17.0.3-1602771356932:blk_-9223372036854775792_1001;
> getBlockSize()=37748736; corrupt=false; offset=0;
> locs=[DatanodeInfoWithStorage[127.0.0.1:42217,DS-6c29e4b7-e4f1-4302-ad23-fb078f37d783,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:41307,DS-3d824f14-3cd0-46b1-bef1-caa808bf278d,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:37193,DS-eeb44ff5-fdf1-4774-b6cf-5be7c40147a9,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:39897,DS-36d2fbfc-64bc-405c-8360-735f1ad92e30,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:35545,DS-6fd42817-efea-416e-92fb-3e9034705142,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:39945,DS-501deff8-b6df-4cf0-9ac1-154a4253eec8,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:41359,DS-9b0449f5-377b-4a76-9eb6-0bcf2984b4bb,DISK],
>
> DatanodeInfoWithStorage[127.0.0.1:36123,DS-4184ab4a-079d-4b1c-a8cb-2ba22b0baafb,DISK]];
> indices=[0, 1, 2, 3, 4, 6, 7, 8]}
> at
> org.apache.hadoop.hdfs.FileChecksumHelper$StripedFileNonStripedChecksumComputer.checksumBlocks(FileChecksumHelper.java:640)
> at
> org.apache.hadoop.hdfs.FileChecksumHelper$FileChecksumComputer.compute(FileChecksumHelper.java:252)
> at
> org.apache.hadoop.hdfs.DFSClient.getFileChecksumInternal(DFSClient.java:1851)
> at
> org.apache.hadoop.hdfs.DFSClient.getFileChecksumWithCombineMode(DFSClient.java:1871)
> at
> org.apache.hadoop.hdfs.DistributedFileSystem$34.doCall(DistributedFileSystem.java:1902)
> at
> org.apache.hadoop.hdfs.DistributedFileSystem$34.doCall(DistributedFileSystem.java:1899)
> at
> org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
> at
> org.apache.hadoop.hdfs.DistributedFileSystem.getFileChecksum(DistributedFileSystem.java:1916)
> at
> org.apache.hadoop.hdfs.TestFileChecksum.getFileChecksum(TestFileChecksum.java:584)
> at
> org.apache.hadoop.hdfs.TestFileChecksum.testStripedFileChecksumWithMissedDataBlocksRangeQuery(TestFileChecksum.java:295)
> at
> org.apache.hadoop.hdfs.TestFileChecksum.testStripedFileChecksumWithMissedDataBlocksRangeQuery8(TestFileChecksum.java:388)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
> at
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
> at
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
> at
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
> at
> org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:298)
> at
> org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:292)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at java.lang.Thread.run(Thread.java:748)
> {code}
>
--
This message was sent by Atlassian Jira
(v8.3.4#803005)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]