Also
2011-10-31 11:19:54,728 ERROR
org.apache.hadoop.hdfs.server.datanode.DataNode:
DatanodeRegistration(192.168.35.117:50010,
storageID=DS-1350419318-127.0.1.1-50010-1317048473534, infoPort=50075,
ipcPort=50020):DataXceiver
org.apache.hadoop.hdfs.server.datanode.BlockAlreadyExistsException:
Block blk_-5143210290039847658_37294862 is valid, and cannot be
written to.
        at 
org.apache.hadoop.hdfs.server.datanode.FSDataset.writeToBlock(FSDataset.java:1314)
        at 
org.apache.hadoop.hdfs.server.datanode.BlockReceiver.<init>(BlockReceiver.java:99)
        at 
org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:318)
        at 
org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:122)

Is it a permissions issue?
Why is 127.0.1.1 listed in the path there?

Cheers,
Tom


On Mon, Oct 31, 2011 at 10:57 AM, Tom Hall <thattommyh...@gmail.com> wrote:
> My 2 new datanodes keep getting blacklisted, most of the results on
> google come up with
>  <property>
>    <name>dfs.datanode.max.xcievers</name>
>    <value>4096</value>
>  </property>
>
>  <property>
>    <name>dfs.datanode.handler.count</name>
>    <value>10</value>
>  </property>
> Can anyone help?
>
>
> 2011-10-31 01:17:22,609 WARN
> org.apache.hadoop.hdfs.server.datanode.DataNode:
> DatanodeRegistration(192.168.35.117:50010,
> storageID=DS-1350419318-127.0.1.1-50010-1317048473534, infoPort=50075,
> ipcPort=50020):Failed to transfer blk_-5759512173538110526_48294550 to
> 192.168.35.118:50010 got java.net.SocketTimeoutException: 485000
> millis timeout while waiting for channel to be ready for write. ch :
> java.nio.channels.SocketChannel[connected local=/192.168.35.117:50368
> remote=/192.168.35.118:50010]
>        at 
> org.apache.hadoop.net.SocketIOWithTimeout.waitForIO(SocketIOWithTimeout.java:246)
>        at 
> org.apache.hadoop.net.SocketOutputStream.waitForWritable(SocketOutputStream.java:159)
>        at 
> org.apache.hadoop.net.SocketOutputStream.transferToFully(SocketOutputStream.java:198)
>        at 
> org.apache.hadoop.hdfs.server.datanode.BlockSender.sendChunks(BlockSender.java:350)
>        at 
> org.apache.hadoop.hdfs.server.datanode.BlockSender.sendBlock(BlockSender.java:436)
>        at 
> org.apache.hadoop.hdfs.server.datanode.DataNode$DataTransfer.run(DataNode.java:1370)
>        at java.lang.Thread.run(Thread.java:662)
>
> 2011-10-31 01:44:29,541 WARN
> org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in
> BlockReceiver.lastNodeRun:
> java.io.IOException: Connection reset by peer
>        at sun.nio.ch.FileDispatcher.write0(Native Method)
>        at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:29)
>        at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:72)
>        at sun.nio.ch.IOUtil.write(IOUtil.java:43)
>        at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:334)
>        at 
> org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:55)
>        at 
> org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:142)
>        at 
> org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:146)
>        at 
> org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:107)
>        at 
> java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:65)
>        at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:123)
>        at java.io.DataOutputStream.flush(DataOutputStream.java:106)
>        at 
> org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.lastDataNodeRun(BlockReceiver.java:793)
>        at 
> org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:870)
>        at java.lang.Thread.run(Thread.java:662)
> 2011-10-31 01:44:29,541 WARN
> org.apache.hadoop.hdfs.server.datanode.DataNode: checkDiskError:
> exception:
> java.io.IOException: Connection reset by peer
>        at sun.nio.ch.FileDispatcher.write0(Native Method)
>        at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:29)
>        at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:72)
>        at sun.nio.ch.IOUtil.write(IOUtil.java:43)
>        at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:334)
>        at 
> org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:55)
>        at 
> org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:142)
>        at 
> org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:146)
>        at 
> org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:107)
>        at 
> java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:65)
>        at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:123)
>        at java.io.DataOutputStream.flush(DataOutputStream.java:106)
>        at 
> org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.lastDataNodeRun(BlockReceiver.java:793)
>        at 
> org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:870)
>        at java.lang.Thread.run(Thread.java:662)
>
> java.io.EOFException: while trying to read 65557 bytes
>        at 
> org.apache.hadoop.hdfs.server.datanode.BlockReceiver.readToBuf(BlockReceiver.java:270)
>        at 
> org.apache.hadoop.hdfs.server.datanode.BlockReceiver.readNextPacket(BlockReceiver.java:314)
>        at 
> org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:378)
>        at 
> org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:534)
>        at 
> org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:417)
>        at 
> org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:122)
>

Reply via email to