This is an automated email from the ASF dual-hosted git repository.

kunalkapoor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new d6faf29  [CARBONDATA-4004] [CARBONDATA-4012] Issue with select after 
update command
d6faf29 is described below

commit d6faf29d819471df5d5e5c594a0b69fcf53d3725
Author: akkio-97 <[email protected]>
AuthorDate: Tue Oct 6 00:10:43 2020 +0530

    [CARBONDATA-4004] [CARBONDATA-4012] Issue with select after update command
    
    Why is this PR needed?
    During vector filling, due to missing implementation of putAllByteArray()
    method in sliceStreamReader.java, its implementation in parent class
    CarbonColumnVectorImpl was called.
    Hence in presto on firing select query on string column, all rows never
    showed up. And on select query on multiple columns - exceptions were thrown.
    Similarly for getColumnVector(), implementation was missing - while
    reading struct datatype columns.
    
    What changes were proposed in this PR?
    Implemented the missing methods.
    
    This closes #3967
---
 .../datastore/page/encoding/compress/DirectCompressCodec.java |  5 +++++
 .../ColumnarVectorWrapperDirectWithDeleteDelta.java           |  4 ++++
 docs/prestosql-guide.md                                       |  2 ++
 .../apache/carbondata/presto/readers/SliceStreamReader.java   | 11 +++++++++++
 .../apache/carbondata/presto/readers/SliceStreamReader.java   | 11 +++++++++++
 5 files changed, 33 insertions(+)

diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/compress/DirectCompressCodec.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/compress/DirectCompressCodec.java
index fbf1d73..9cd12b3 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/compress/DirectCompressCodec.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/compress/DirectCompressCodec.java
@@ -277,6 +277,11 @@ public class DirectCompressCodec implements 
ColumnPageCodec {
         vector = ColumnarVectorWrapperDirectFactory
             .getDirectVectorWrapperFactory(vectorInfo, parentVector, 
vectorInfo.invertedIndex,
                 nullBits, vectorInfo.deletedRows, true, false);
+        // In case of update there will be two wrappers enclosing columnVector
+        if (vector.getColumnVector() != null
+            && vector.getColumnVector().getColumnVector() != null) {
+          vector = vector.getColumnVector();
+        }
         fillVectorBasedOnType(pageData, vector, vectorDataType, pageDataType, 
pageSize,
             vectorInfo, nullBits);
       } else {
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDelta.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDelta.java
index b3111c8..23aa19d 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDelta.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDelta.java
@@ -242,4 +242,8 @@ class ColumnarVectorWrapperDirectWithDeleteDelta extends 
AbstractCarbonColumnarV
       columnVector.putArray(counter++, offset, length);
     }
   }
+
+  public CarbonColumnVector getColumnVector() {
+    return this.columnVector;
+  }
 }
diff --git a/docs/prestosql-guide.md b/docs/prestosql-guide.md
index 5fc83d8..617d995 100644
--- a/docs/prestosql-guide.md
+++ b/docs/prestosql-guide.md
@@ -300,6 +300,8 @@ carbondata files.
 
 ### Supported features of presto carbon
 Presto carbon only supports reading the carbon table which is written by spark 
carbon or carbon SDK. 
+Regarding complex datatypes- currently reading of only Array and Struct 
datatypes are supported, 
+while Map datatype is not yet supported.
 During reading, it supports the non-distributed index like block index and 
bloom index.
 It doesn't support Materialized View as it needs query plan to be changed and 
presto does not allow it.
 Also, Presto carbon supports streaming segment read from streaming table 
created by spark.
diff --git 
a/integration/presto/src/main/prestodb/org/apache/carbondata/presto/readers/SliceStreamReader.java
 
b/integration/presto/src/main/prestodb/org/apache/carbondata/presto/readers/SliceStreamReader.java
index 7cadb38..f46e483 100644
--- 
a/integration/presto/src/main/prestodb/org/apache/carbondata/presto/readers/SliceStreamReader.java
+++ 
b/integration/presto/src/main/prestodb/org/apache/carbondata/presto/readers/SliceStreamReader.java
@@ -125,6 +125,17 @@ public class SliceStreamReader extends 
CarbonColumnVectorImpl implements PrestoV
   }
 
   @Override
+  public void putAllByteArray(byte[] data, int offset, int length) {
+    int[] lengths = getLengths();
+    int[] offsets = getOffsets();
+    for (int i = 0; i < lengths.length; i++) {
+      if (offsets[i] != 0) {
+        putByteArray(i, offsets[i], lengths[i], data);
+      }
+    }
+  }
+
+  @Override
   public void putNull(int rowId) {
     if (dictionaryBlock == null) {
       builder.appendNull();
diff --git 
a/integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/SliceStreamReader.java
 
b/integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/SliceStreamReader.java
index a2d3319..d40fa6f 100644
--- 
a/integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/SliceStreamReader.java
+++ 
b/integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/SliceStreamReader.java
@@ -125,6 +125,17 @@ public class SliceStreamReader extends 
CarbonColumnVectorImpl implements PrestoV
   }
 
   @Override
+  public void putAllByteArray(byte[] data, int offset, int length) {
+    int[] lengths = getLengths();
+    int[] offsets = getOffsets();
+    for (int i = 0; i < lengths.length; i++) {
+      if (offsets[i] != 0) {
+        putByteArray(i, offsets[i], lengths[i], data);
+      }
+    }
+  }
+
+  @Override
   public void putNull(int rowId) {
     if (dictionaryBlock == null) {
       builder.appendNull();

Reply via email to