[ 
https://issues.apache.org/jira/browse/DRILL-5775?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16497401#comment-16497401
 ] 

ASF GitHub Bot commented on DRILL-5775:
---------------------------------------

ilooner closed pull request #941: DRILL-5775: Expand * into list of 
column/column family for  MapR-DB b…
URL: https://github.com/apache/drill/pull/941
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git 
a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/binary/BinaryTableGroupScan.java
 
b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/binary/BinaryTableGroupScan.java
index 282b84852f..9b64f86c72 100644
--- 
a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/binary/BinaryTableGroupScan.java
+++ 
b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/binary/BinaryTableGroupScan.java
@@ -35,6 +35,7 @@
 import org.apache.drill.exec.store.dfs.FileSystemPlugin;
 import org.apache.drill.exec.store.hbase.DrillHBaseConstants;
 import org.apache.drill.exec.store.hbase.HBaseScanSpec;
+import org.apache.drill.exec.store.hbase.HBaseUtils;
 import org.apache.drill.exec.store.mapr.db.MapRDBFormatPlugin;
 import org.apache.drill.exec.store.mapr.db.MapRDBFormatPluginConfig;
 import org.apache.drill.exec.store.mapr.db.MapRDBGroupScan;
@@ -113,8 +114,7 @@ private BinaryTableGroupScan(BinaryTableGroupScan that) {
   @Override
   public GroupScan clone(List<SchemaPath> columns) {
     BinaryTableGroupScan newScan = new BinaryTableGroupScan(this);
-    newScan.columns = columns;
-    newScan.verifyColumns();
+    newScan.columns = HBaseUtils.verifyColumnsAndConvertStar(columns, 
hTableDesc);
     return newScan;
   }
 
@@ -145,20 +145,7 @@ private void init() {
     } catch (Exception e) {
       throw new DrillRuntimeException("Error getting region info for table: " 
+ hbaseScanSpec.getTableName(), e);
     }
-    verifyColumns();
-  }
-
-  private void verifyColumns() {
-    /*
-    if (columns != null) {
-      for (SchemaPath column : columns) {
-        if (!(column.equals(ROW_KEY_PATH) || 
hTableDesc.hasFamily(HBaseUtils.getBytes(column.getRootSegment().getPath())))) {
-          DrillRuntimeException.format("The column family '%s' does not exist 
in HBase table: %s .",
-              column.getRootSegment().getPath(), hTableDesc.getNameAsString());
-        }
-      }
-    }
-    */
+    columns = HBaseUtils.verifyColumnsAndConvertStar(columns, hTableDesc);
   }
 
   protected MapRDBSubScanSpec getSubScanSpec(TabletFragmentInfo tfi) {
diff --git 
a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseGroupScan.java
 
b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseGroupScan.java
index 1ee1da812b..fb0a3e604f 100644
--- 
a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseGroupScan.java
+++ 
b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseGroupScan.java
@@ -40,7 +40,6 @@
 import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.hbase.HBaseSubScan.HBaseSubScanSpec;
-import org.apache.drill.exec.util.Utilities;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
@@ -50,7 +49,6 @@
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.util.Bytes;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -58,7 +56,6 @@
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.Iterator;
-import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -144,8 +141,7 @@ private HBaseGroupScan(HBaseGroupScan that) {
   @Override
   public GroupScan clone(List<SchemaPath> columns) {
     HBaseGroupScan newScan = new HBaseGroupScan(this);
-    newScan.columns = columns == null ? ALL_COLUMNS : columns;;
-    newScan.verifyColumnsAndConvertStar();
+    newScan.columns = HBaseUtils.verifyColumnsAndConvertStar(columns == null ? 
ALL_COLUMNS : columns, hTableDesc);
     return newScan;
   }
 
@@ -177,37 +173,7 @@ private void init() {
     } catch (IOException e) {
       throw new DrillRuntimeException("Error getting region info for table: " 
+ hbaseScanSpec.getTableName(), e);
     }
-    verifyColumnsAndConvertStar();
-  }
-
-  private void verifyColumnsAndConvertStar() {
-    boolean hasStarCol = false;
-    LinkedHashSet<SchemaPath> requestedColumns = new LinkedHashSet<>();
-
-    for (SchemaPath column : columns) {
-      // convert * into [row_key, cf1, cf2, ..., cf_n].
-      if (column.equals(Utilities.STAR_COLUMN)) {
-        hasStarCol = true;
-        Set<byte[]> families = hTableDesc.getFamiliesKeys();
-        requestedColumns.add(ROW_KEY_PATH);
-        for (byte[] family : families) {
-          SchemaPath colFamily = 
SchemaPath.getSimplePath(Bytes.toString(family));
-          requestedColumns.add(colFamily);
-        }
-      } else {
-        if (!(column.equals(ROW_KEY_PATH) ||
-            
hTableDesc.hasFamily(HBaseUtils.getBytes(column.getRootSegment().getPath())))) {
-          DrillRuntimeException.format("The column family '%s' does not exist 
in HBase table: %s .",
-              column.getRootSegment().getPath(), hTableDesc.getNameAsString());
-        }
-        requestedColumns.add(column);
-      }
-    }
-
-    // since star column has been converted, reset this.cloumns.
-    if (hasStarCol) {
-      this.columns = new ArrayList<>(requestedColumns);
-    }
+    columns = HBaseUtils.verifyColumnsAndConvertStar(columns, hTableDesc);
   }
 
   @Override
diff --git 
a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
 
b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
index d6c02b5aaf..fe1fb770d4 100644
--- 
a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
+++ 
b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
@@ -126,7 +126,7 @@ public HBaseRecordReader(Connection connection, 
HBaseSubScan.HBaseSubScanSpec su
             HBaseUtils.andFilterAtIndex(hbaseScan.getFilter(), 
HBaseUtils.LAST_FILTER, new FirstKeyOnlyFilter()));
       }
     } else {
-      throw new IllegalArgumentException("HBaseRecordReader does not allow 
column *. Column * should have been converted to list of <row_key, column 
family1, column family2, ..., column family_n");
+      throw new IllegalArgumentException("HBaseRecordReader does not allow 
column *. Column * should have been converted to list of <row_key, column 
family1, column family2, ..., column family_n>");
 //      rowKeyOnly = false;
 //      transformed.add(ROW_KEY_PATH);
     }
diff --git 
a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
 
b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
index 0d804cde72..d7e48ad586 100644
--- 
a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
+++ 
b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
@@ -19,10 +19,16 @@
 
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
+import java.util.ArrayList;
+import java.util.LinkedHashSet;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.exec.util.Utilities;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.ParseFilter;
@@ -139,4 +145,46 @@ public static Filter orFilterAtIndex(Filter currentFilter, 
int index, Filter new
     return Bytes.compareTo(left, right) < 0 ? left : right;
   }
 
+
+  /**
+   * Verify column family in schema path exists in the hbase table, or schema 
path refers to a row_key column.
+   * Convert * column into list of column family defined in HTableDescriptor. 
Return converted list if
+   * star column conversion happens.
+   *
+   * @param columns
+   * @param hTableDesc
+   * @return
+   * @throws DrillRuntimeException if column family does not exist, or is not 
row_key column.
+   */
+  public static List<SchemaPath> verifyColumnsAndConvertStar(List<SchemaPath> 
columns, HTableDescriptor hTableDesc) {
+    boolean hasStarCol = false;
+    LinkedHashSet<SchemaPath> requestedColumns = new LinkedHashSet<>();
+
+    for (SchemaPath column : columns) {
+      // convert * into [row_key, cf1, cf2, ..., cf_n].
+      if (column.equals(Utilities.STAR_COLUMN)) {
+        hasStarCol = true;
+        Set<byte[]> families = hTableDesc.getFamiliesKeys();
+        requestedColumns.add(DrillHBaseConstants.ROW_KEY_PATH);
+        for (byte[] family : families) {
+          SchemaPath colFamily = 
SchemaPath.getSimplePath(Bytes.toString(family));
+          requestedColumns.add(colFamily);
+        }
+      } else {
+        if (!(column.equals(DrillHBaseConstants.ROW_KEY_PATH) ||
+            
hTableDesc.hasFamily(getBytes(column.getRootSegment().getPath())))) {
+          DrillRuntimeException.format("The column family '%s' does not exist 
in HBase table: %s .",
+              column.getRootSegment().getPath(), hTableDesc.getNameAsString());
+        }
+        requestedColumns.add(column);
+      }
+    }
+
+    // since star column has been converted, reset.
+    if (hasStarCol) {
+      return new ArrayList<>(requestedColumns);
+    } else {
+      return columns;
+    }
+  }
 }


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


> Select * query on a maprdb binary table fails
> ---------------------------------------------
>
>                 Key: DRILL-5775
>                 URL: https://issues.apache.org/jira/browse/DRILL-5775
>             Project: Apache Drill
>          Issue Type: Bug
>          Components: Storage - MapRDB
>    Affects Versions: 1.11.0
>            Reporter: Prasad Nagaraj Subramanya
>            Assignee: Vitalii Diravka
>            Priority: Major
>              Labels: ready-to-commit
>             Fix For: 1.12.0
>
>
> Select * query on a maprdb binary table fails with the below exception
> Failed with exception
> java.sql.SQLException: SYSTEM ERROR: IllegalArgumentException: 
> HBaseRecordReader does not allow column *. Column * should have been 
> converted to list of <row_key, column family1, column family2, ..., column 
> family_n
> Commit ID - fde0a1df1734e0742b49aabdd28b02202ee2b044



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to