Author: brandonwilliams
Date: Fri Oct 14 20:27:43 2011
New Revision: 1183489

URL: http://svn.apache.org/viewvc?rev=1183489&view=rev
Log:
Make CFIF use rpc_endpoint prior to trying endpoint.
Patch by Eldon Stegall and Scott Fines, reviewed by brandonwilliams for
CASSANDRA-3214

Modified:
    cassandra/branches/cassandra-0.8/CHANGES.txt
    
cassandra/branches/cassandra-0.8/src/java/org/apache/cassandra/hadoop/ColumnFamilyInputFormat.java

Modified: cassandra/branches/cassandra-0.8/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/cassandra/branches/cassandra-0.8/CHANGES.txt?rev=1183489&r1=1183488&r2=1183489&view=diff
==============================================================================
--- cassandra/branches/cassandra-0.8/CHANGES.txt (original)
+++ cassandra/branches/cassandra-0.8/CHANGES.txt Fri Oct 14 20:27:43 2011
@@ -10,6 +10,8 @@
  * Display CLI version string on startup (CASSANDRA-3196)
  * Only count compaction as active (for throttling) when they have
    successfully acquired the compaction lock (CASSANDRA-3344)
+ * (Hadoop) make CFIF try rpc_address or fallback to listen_address
+   (CASSANDRA-3214)
 
 
 0.8.7

Modified: 
cassandra/branches/cassandra-0.8/src/java/org/apache/cassandra/hadoop/ColumnFamilyInputFormat.java
URL: 
http://svn.apache.org/viewvc/cassandra/branches/cassandra-0.8/src/java/org/apache/cassandra/hadoop/ColumnFamilyInputFormat.java?rev=1183489&r1=1183488&r2=1183489&view=diff
==============================================================================
--- 
cassandra/branches/cassandra-0.8/src/java/org/apache/cassandra/hadoop/ColumnFamilyInputFormat.java
 (original)
+++ 
cassandra/branches/cassandra-0.8/src/java/org/apache/cassandra/hadoop/ColumnFamilyInputFormat.java
 Fri Oct 14 20:27:43 2011
@@ -188,13 +188,17 @@ public class ColumnFamilyInputFormat ext
         {
             ArrayList<InputSplit> splits = new ArrayList<InputSplit>();
             List<String> tokens = getSubSplits(keyspace, cfName, range, conf);
-
+            assert range.rpc_endpoints.size() == range.endpoints.size() : 
"rpc_endpoints size must match endpoints size";
             // turn the sub-ranges into InputSplits
             String[] endpoints = range.endpoints.toArray(new 
String[range.endpoints.size()]);
             // hadoop needs hostname, not ip
-            for (int i = 0; i < endpoints.length; i++)
+            int endpointIndex = 0;
+            for (String endpoint: range.rpc_endpoints)
             {
-                endpoints[i] = 
InetAddress.getByName(endpoints[i]).getHostName();
+                String endpoint_address = endpoint;
+                       if(endpoint_address == null || endpoint_address == 
"0.0.0.0")
+                               endpoint_address = 
range.endpoints.get(endpointIndex);
+                       endpoints[endpointIndex++] = 
InetAddress.getByName(endpoint_address).getHostName();
             }
 
             for (int i = 1; i < tokens.size(); i++)
@@ -210,7 +214,7 @@ public class ColumnFamilyInputFormat ext
     private List<String> getSubSplits(String keyspace, String cfName, 
TokenRange range, Configuration conf) throws IOException
     {
         int splitsize = ConfigHelper.getInputSplitSize(conf);
-        for (String host : range.endpoints)
+        for (String host : range.rpc_endpoints)
         {
             try
             {


Reply via email to