[ 
https://issues.apache.org/jira/browse/HADOOP-10589?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14012440#comment-14012440
 ] 

Steve Loughran commented on HADOOP-10589:
-----------------------------------------

full stack
{code}

testSeekZeroByteFile(org.apache.hadoop.fs.contract.s3n.TestNativeS3SeekContract)
  Time elapsed: 0.364 sec  <<< ERROR!
java.io.IOException: org.jets3t.service.ServiceException: Request Error. -- 
ResponseCode: 404, ResponseStatus: Not Found, RequestId: F32A9995B752DBB3, 
HostId: 
4Fh0bJL1jBwBczb1Di3gsJ7NJR6NcZ8Pt/5/R5YbCsLZ4hT+1N+6NxZ7Ta6QAsOzbKdYvpeHSD4=
        at 
org.apache.hadoop.fs.s3native.Jets3tNativeFileSystemStore.handleServiceException(Jets3tNativeFileSystemStore.java:444)
        at 
org.apache.hadoop.fs.s3native.Jets3tNativeFileSystemStore.handleServiceException(Jets3tNativeFileSystemStore.java:406)
        at 
org.apache.hadoop.fs.s3native.Jets3tNativeFileSystemStore.retrieveMetadata(Jets3tNativeFileSystemStore.java:184)
        at sun.reflect.GeneratedMethodAccessor7.invoke(Unknown Source)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:189)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
        at org.apache.hadoop.fs.s3native.$Proxy12.retrieveMetadata(Unknown 
Source)
        at 
org.apache.hadoop.fs.s3native.NativeS3FileSystem.getFileStatus(NativeS3FileSystem.java:425)
        at 
org.apache.hadoop.fs.s3native.NativeS3FileSystem.mkdir(NativeS3FileSystem.java:550)
        at 
org.apache.hadoop.fs.s3native.NativeS3FileSystem.mkdirs(NativeS3FileSystem.java:543)
        at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:1814)
        at 
org.apache.hadoop.fs.contract.AbstractFSContractTestBase.mkdirs(AbstractFSContractTestBase.java:299)
        at 
org.apache.hadoop.fs.contract.AbstractFSContractTestBase.setup(AbstractFSContractTestBase.java:155)
        at 
org.apache.hadoop.fs.contract.AbstractSeekContractTest.setup(AbstractSeekContractTest.java:56)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at 
org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
        at 
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
        at 
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
        at 
org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:24)
        at 
org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
        at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271)
        at 
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
        at 
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
        at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
        at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
        at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
        at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
        at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
        at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
        at 
org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264)
        at 
org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153)
        at 
org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124)
        at 
org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200)
        at 
org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153)
        at 
org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)
Caused by: org.jets3t.service.ServiceException: Request Error.
        at 
org.jets3t.service.impl.rest.httpclient.RestStorageService.performRequest(RestStorageService.java:522)
        at 
org.jets3t.service.impl.rest.httpclient.RestStorageService.performRequest(RestStorageService.java:281)
        at 
org.jets3t.service.impl.rest.httpclient.RestStorageService.performRestHead(RestStorageService.java:942)
        at 
org.jets3t.service.impl.rest.httpclient.RestStorageService.getObjectImpl(RestStorageService.java:2148)
        at 
org.jets3t.service.impl.rest.httpclient.RestStorageService.getObjectDetailsImpl(RestStorageService.java:2075)
        at 
org.jets3t.service.StorageService.getObjectDetails(StorageService.java:1093)
        at 
org.jets3t.service.StorageService.getObjectDetails(StorageService.java:548)
        at 
org.apache.hadoop.fs.s3native.Jets3tNativeFileSystemStore.retrieveMetadata(Jets3tNativeFileSystemStore.java:179)
        at sun.reflect.GeneratedMethodAccessor7.invoke(Unknown Source)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:189)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
        at org.apache.hadoop.fs.s3native.$Proxy12.retrieveMetadata(Unknown 
Source)
        at 
org.apache.hadoop.fs.s3native.NativeS3FileSystem.getFileStatus(NativeS3FileSystem.java:425)
        at 
org.apache.hadoop.fs.s3native.NativeS3FileSystem.mkdir(NativeS3FileSystem.java:550)
        at 
org.apache.hadoop.fs.s3native.NativeS3FileSystem.mkdirs(NativeS3FileSystem.java:543)
        at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:1814)
        at 
org.apache.hadoop.fs.contract.AbstractFSContractTestBase.mkdirs(AbstractFSContractTestBase.java:299)
        at 
org.apache.hadoop.fs.contract.AbstractFSContractTestBase.setup(AbstractFSContractTestBase.java:155)
        at 
org.apache.hadoop.fs.contract.AbstractSeekContractTest.setup(AbstractSeekContractTest.java:56)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at 
org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
        at 
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
        at 
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
        at org.junit.internal.runners.statements.RunBefores.evaluate(
{code}

> NativeS3FileSystem throw NullPointerException when the file is empty
> --------------------------------------------------------------------
>
>                 Key: HADOOP-10589
>                 URL: https://issues.apache.org/jira/browse/HADOOP-10589
>             Project: Hadoop Common
>          Issue Type: Bug
>          Components: fs/s3
>    Affects Versions: 2.2.0
>            Reporter: shuisheng wei
>            Assignee: Steve Loughran
>
> An empty file in the s3 path.
> NativeS3FsInputStream dose not check the InputStream .
> 2014-05-06 20:29:26,961 INFO [main] 
> org.apache.hadoop.hive.ql.exec.ReduceSinkOperator: 4 forwarded 0 rows
> 2014-05-06 20:29:26,961 INFO [main] 
> org.apache.hadoop.hive.ql.exec.GroupByOperator: 3 Close done
> 2014-05-06 20:29:26,961 INFO [main] 
> org.apache.hadoop.hive.ql.exec.SelectOperator: 2 Close done
> 2014-05-06 20:29:26,961 INFO [main] 
> org.apache.hadoop.hive.ql.exec.FilterOperator: 1 Close done
> 2014-05-06 20:29:26,961 INFO [main] 
> org.apache.hadoop.hive.ql.exec.TableScanOperator: 0 Close done
> 2014-05-06 20:29:26,961 INFO [main] 
> org.apache.hadoop.hive.ql.exec.MapOperator: 5 Close done
> 2014-05-06 20:29:26,961 INFO [main] 
> org.apache.hadoop.hive.ql.exec.mr.ExecMapper: ExecMapper: processed 0 rows: 
> used memory = 602221488
> 2014-05-06 20:29:26,964 WARN [main] org.apache.hadoop.mapred.YarnChild: 
> Exception running child : java.lang.NullPointerException
>       at 
> org.apache.hadoop.fs.s3native.NativeS3FileSystem$NativeS3FsInputStream.close(NativeS3FileSystem.java:147)
>       at java.io.BufferedInputStream.close(BufferedInputStream.java:472)
>       at java.io.FilterInputStream.close(FilterInputStream.java:181)
>       at org.apache.hadoop.util.LineReader.close(LineReader.java:150)
>       at 
> org.apache.hadoop.mapred.LineRecordReader.close(LineRecordReader.java:244)
>       at 
> org.apache.hadoop.hive.ql.io.CombineHiveRecordReader.doClose(CombineHiveRecordReader.java:72)
>       at 
> org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader.close(HiveContextAwareRecordReader.java:96)
>       at 
> org.apache.hadoop.hive.shims.HadoopShimsSecure$CombineFileRecordReader.close(HadoopShimsSecure.java:248)
>       at 
> org.apache.hadoop.mapred.MapTask$TrackedRecordReader.close(MapTask.java:209)
>       at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:1950)
>       at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:445)
>       at org.apache.hadoop.mapred.MapTask.run(MapTask.java:342)
>       at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:168)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:415)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1548)
>       at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:163)
> 2014-05-06 20:29:26,970 INFO [main] org.apache.hadoop.mapred.Task: Runnning 
> cleanup for the task



--
This message was sent by Atlassian JIRA
(v6.2#6252)

Reply via email to