[ https://issues.apache.org/jira/browse/HDFS-7605?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Allen Wittenauer resolved HDFS-7605. ------------------------------------ Resolution: Cannot Reproduce > hadoop distcp hftp://192.168.80.31:50070/user/wp hdfs://192.168.210.10:8020/ > ---------------------------------------------------------------------------- > > Key: HDFS-7605 > URL: https://issues.apache.org/jira/browse/HDFS-7605 > Project: Hadoop HDFS > Issue Type: Bug > Components: distcp > Affects Versions: 2.6.0 > Environment: between hadoop1.1.2andhadoop2.6.0 distcp on centos6.4 > Reporter: weipan > > Error: java.io.IOException: File copy failed: > hftp://192.168.80.31:50070/user/wp/test.txt --> > hdfs://192.168.210.10:8020/wp/test.txt > at > org.apache.hadoop.tools.mapred.CopyMapper.copyFileWithRetry(CopyMapper.java:284) > at org.apache.hadoop.tools.mapred.CopyMapper.map(CopyMapper.java:252) > at org.apache.hadoop.tools.mapred.CopyMapper.map(CopyMapper.java:50) > at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145) > at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:784) > at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) > at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:163) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:396) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628) > at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158) > Caused by: java.io.IOException: Couldn't run retriable-command: Copying > hftp://192.168.80.31:50070/user/wp/test.txt to > hdfs://192.168.210.10:8020/wp/test.txt > at > org.apache.hadoop.tools.util.RetriableCommand.execute(RetriableCommand.java:101) > at > org.apache.hadoop.tools.mapred.CopyMapper.copyFileWithRetry(CopyMapper.java:280) > ... 10 more > Caused by: > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand$CopyReadException: > java.net.SocketTimeoutException: connect timed out > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.getInputStream(RetriableFileCopyCommand.java:303) > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.copyBytes(RetriableFileCopyCommand.java:248) > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.copyToFile(RetriableFileCopyCommand.java:184) > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.doCopy(RetriableFileCopyCommand.java:124) > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.doExecute(RetriableFileCopyCommand.java:100) > at > org.apache.hadoop.tools.util.RetriableCommand.execute(RetriableCommand.java:87) > ... 11 more > Caused by: java.net.SocketTimeoutException: connect timed out > at java.net.PlainSocketImpl.socketConnect(Native Method) > at java.net.PlainSocketImpl.doConnect(PlainSocketImpl.java:351) > at java.net.PlainSocketImpl.connectToAddress(PlainSocketImpl.java:213) > at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:200) > at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:366) > at java.net.Socket.connect(Socket.java:529) > at sun.net.NetworkClient.doConnect(NetworkClient.java:158) > at sun.net.www.http.HttpClient.openServer(HttpClient.java:411) > at sun.net.www.http.HttpClient.openServer(HttpClient.java:525) > at sun.net.www.http.HttpClient.<init>(HttpClient.java:208) > at sun.net.www.http.HttpClient.New(HttpClient.java:291) > at sun.net.www.http.HttpClient.New(HttpClient.java:310) > at > sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:987) > at > sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:923) > at > sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:841) > at > sun.net.www.protocol.http.HttpURLConnection.followRedirect(HttpURLConnection.java:2156) > at > sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1390) > at > java.net.HttpURLConnection.getResponseCode(HttpURLConnection.java:379) > at > org.apache.hadoop.hdfs.web.HftpFileSystem$RangeHeaderUrlOpener.connect(HftpFileSystem.java:370) > at > org.apache.hadoop.hdfs.web.ByteRangeInputStream.openInputStream(ByteRangeInputStream.java:120) > at > org.apache.hadoop.hdfs.web.ByteRangeInputStream.getInputStream(ByteRangeInputStream.java:104) > at > org.apache.hadoop.hdfs.web.ByteRangeInputStream.<init>(ByteRangeInputStream.java:89) > at > org.apache.hadoop.hdfs.web.HftpFileSystem$RangeHeaderInputStream.<init>(HftpFileSystem.java:383) > at > org.apache.hadoop.hdfs.web.HftpFileSystem$RangeHeaderInputStream.<init>(HftpFileSystem.java:388) > at > org.apache.hadoop.hdfs.web.HftpFileSystem.open(HftpFileSystem.java:404) > at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:766) > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.getInputStream(RetriableFileCopyCommand.java:299) > ... 16 more > 15/01/12 18:04:06 INFO mapreduce.Job: map 67% reduce 0% > 15/01/12 18:04:06 INFO mapreduce.Job: Task Id : > attempt_1420685403662_0029_m_000001_0, Status : FAILED > Error: java.io.IOException: File copy failed: > hftp://192.168.80.31:50070/user/wp/t.txt --> > hdfs://192.168.210.10:8020/wp/t.txt > at > org.apache.hadoop.tools.mapred.CopyMapper.copyFileWithRetry(CopyMapper.java:284) > at org.apache.hadoop.tools.mapred.CopyMapper.map(CopyMapper.java:252) > at org.apache.hadoop.tools.mapred.CopyMapper.map(CopyMapper.java:50) > at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145) > at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:784) > at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) > at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:163) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:396) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628) > at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158) > Caused by: java.io.IOException: Couldn't run retriable-command: Copying > hftp://192.168.80.31:50070/user/wp/t.txt to > hdfs://192.168.210.10:8020/wp/t.txt > at > org.apache.hadoop.tools.util.RetriableCommand.execute(RetriableCommand.java:101) > at > org.apache.hadoop.tools.mapred.CopyMapper.copyFileWithRetry(CopyMapper.java:280) > ... 10 more > Caused by: > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand$CopyReadException: > java.net.SocketTimeoutException: connect timed out > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.getInputStream(RetriableFileCopyCommand.java:303) > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.copyBytes(RetriableFileCopyCommand.java:248) > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.copyToFile(RetriableFileCopyCommand.java:184) > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.doCopy(RetriableFileCopyCommand.java:124) > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.doExecute(RetriableFileCopyCommand.java:100) > at > org.apache.hadoop.tools.util.RetriableCommand.execute(RetriableCommand.java:87) > ... 11 more > Caused by: java.net.SocketTimeoutException: connect timed out > at java.net.PlainSocketImpl.socketConnect(Native Method) > at java.net.PlainSocketImpl.doConnect(PlainSocketImpl.java:351) > at java.net.PlainSocketImpl.connectToAddress(PlainSocketImpl.java:213) > at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:200) > at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:366) > at java.net.Socket.connect(Socket.java:529) > at sun.net.NetworkClient.doConnect(NetworkClient.java:158) > at sun.net.www.http.HttpClient.openServer(HttpClient.java:411) > at sun.net.www.http.HttpClient.openServer(HttpClient.java:525) > at sun.net.www.http.HttpClient.<init>(HttpClient.java:208) > at sun.net.www.http.HttpClient.New(HttpClient.java:291) > at sun.net.www.http.HttpClient.New(HttpClient.java:310) > at > sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:987) > at > sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:923) > at > sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:841) > at > sun.net.www.protocol.http.HttpURLConnection.followRedirect(HttpURLConnection.java:2156) > at > sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1390) > at > java.net.HttpURLConnection.getResponseCode(HttpURLConnection.java:379) > at > org.apache.hadoop.hdfs.web.HftpFileSystem$RangeHeaderUrlOpener.connect(HftpFileSystem.java:370) > at > org.apache.hadoop.hdfs.web.ByteRangeInputStream.openInputStream(ByteRangeInputStream.java:120) > at > org.apache.hadoop.hdfs.web.ByteRangeInputStream.getInputStream(ByteRangeInputStream.java:104) > at > org.apache.hadoop.hdfs.web.ByteRangeInputStream.<init>(ByteRangeInputStream.java:89) > at > org.apache.hadoop.hdfs.web.HftpFileSystem$RangeHeaderInputStream.<init>(HftpFileSystem.java:383) > at > org.apache.hadoop.hdfs.web.HftpFileSystem$RangeHeaderInputStream.<init>(HftpFileSystem.java:388) > at > org.apache.hadoop.hdfs.web.HftpFileSystem.open(HftpFileSystem.java:404) > at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:766) > at > org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.getInputStream(RetriableFileCopyCommand.java:299) > ... 16 more -- This message was sent by Atlassian JIRA (v6.3.4#6332)