dttlgotv opened a new issue #3482: org.apache.hadoop.ipc.RemoteException: 
Server IPC version 9 cannot communicate with client version 3  
URL: https://github.com/apache/incubator-heron/issues/3482
 
 
   My simple hdfs codes is:
   private String hdfsuri = "hdfs://localhost:9000";
       FilesystemSink(String name) {
         this.name = name;
       }
   
       /**
        * The setup function is called before the sink is used. Any complex
        * instantiation logic for the sink should go here.
        */
       public void setup(Context context) {
           // ====== Init HDFS File System Object
           Configuration conf = new Configuration();
     // Set FileSystem URI
           conf.set("fs.defaultFS", hdfsuri);
     // Because of Maven
           conf.set("fs.hdfs.impl", 
org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
           conf.set("fs.file.impl", 
org.apache.hadoop.fs.LocalFileSystem.class.getName());
     // Set HADOOP user
           System.setProperty("HADOOP_USER_NAME", "hdfs");
           System.setProperty("hadoop.home.dir", "/");
     //Get the filesystem - HDFS
           try{
             fs = FileSystem.get(URI.create(hdfsuri), conf);
           }catch (Exception e){
             e.printStackTrace();
           }
         }
   
   
   I write a simple topology to use hdfs, but setup hdfs error is below like:
   2020-03-09 19:07:45 +0800] [STDERR] stderr: 
org.apache.hadoop.ipc.RemoteException: Server IPC version 9 cannot communicate 
with client version 3  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.ipc.Client.call(Client.java:740)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:220)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
com.sun.proxy.$Proxy0.getProtocolVersion(Unknown Source)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.ipc.RPC.getProxy(RPC.java:359)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.hdfs.DFSClient.createRPCNamenode(DFSClient.java:106)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:207)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:170)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:82)
  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:1378)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:66)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:1390)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.hadoop.fs.FileSystem.get(FileSystem.java:196)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.examples.streamlet.KafkaHdfsTopology$FilesystemSink.setup(KafkaHdfsTopology.java:215)
  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.streamlet.impl.sinks.ComplexSink.prepare(ComplexSink.java:65)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.instance.bolt.BoltInstance.init(BoltInstance.java:209)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.instance.Slave.startInstanceIfNeeded(Slave.java:243)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.instance.Slave.handleNewAssignment(Slave.java:194)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.instance.Slave.handleNewPhysicalPlan(Slave.java:384)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.instance.Slave.access$300(Slave.java:55)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.instance.Slave$1.run(Slave.java:124)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.common.basics.WakeableLooper.executeTasksOnWakeup(WakeableLooper.java:191)
  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.common.basics.WakeableLooper.runOnce(WakeableLooper.java:110)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.common.basics.WakeableLooper.loop(WakeableLooper.java:100)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
org.apache.heron.instance.Slave.run(Slave.java:201)  
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 
 
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 
 
   [2020-03-09 19:07:45 +0800] [STDERR] stderr:         at 
java.lang.Thread.run(Thread.java:748)  
   [2020-03-09 19:07:45 +0800] [信息] org.apache.heron.instance.Slave: Instance 
is started for non-stateful topology  
   [2020-03-09 19:07:49 +0800] [信息] 
org.apache.heron.common.network.HeronClient: Connecting to endpoint: 
/127.0.0.1:51352  
   [2020-03-09 19:07:49 +0800] [信息] 
org.apache.heron.network.MetricsManagerClient: Connected to Metrics Manager. 
Ready to send register request  
   [2020-03-09 19:07:49 +0800] [信息] 
org.apache.heron.network.MetricsManagerClient: We registered ourselves to the 
Metrics Manager  
   
   
   

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to