[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-13 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r469863077



##
File path: 
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/FillVector.java
##
@@ -0,0 +1,347 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.BitSet;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory;
+import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
+import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class FillVector {
+  private byte[] pageData;
+  private float floatFactor = 0;
+  private double factor = 0;
+  private ColumnVectorInfo vectorInfo;
+  private BitSet nullBits;
+
+  public FillVector(byte[] pageData, ColumnVectorInfo vectorInfo, BitSet 
nullBits) {
+this.pageData = pageData;
+this.vectorInfo = vectorInfo;
+this.nullBits = nullBits;
+  }
+
+  public void setFactor(double factor) {
+this.factor = factor;
+  }
+
+  public void setFloatFactor(float floatFactor) {
+this.floatFactor = floatFactor;
+  }
+
+  public void basedOnType(CarbonColumnVector vector, DataType vectorDataType, 
int pageSize,
+  DataType pageDataType) {
+if (vectorInfo.vector.getColumnVector() != null && 
((CarbonColumnVectorImpl) vectorInfo.vector
+.getColumnVector()).isComplex()) {
+  fillComplexType(vector.getColumnVector(), pageDataType);
+} else {
+  fillPrimitiveType(vector, vectorDataType, pageSize, pageDataType);
+  vector.setIndex(0);
+}
+  }
+
+  private void fillComplexType(CarbonColumnVector vector, DataType 
pageDataType) {
+CarbonColumnVectorImpl vectorImpl = (CarbonColumnVectorImpl) vector;
+if (vector != null && vector.getChildrenVector() != null) {
+  ArrayList childElements = ((CarbonColumnVectorImpl) 
vector).getChildrenElements();
+  for (int i = 0; i < childElements.size(); i++) {
+int count = childElements.get(i);
+typeComplexObject(vectorImpl.getChildrenVector().get(0), count, 
pageDataType);
+vector.putArrayObject();
+  }
+  vectorImpl.getChildrenVector().get(0).setIndex(0);
+}
+  }
+
+  private void fillPrimitiveType(CarbonColumnVector vector, DataType 
vectorDataType, int pageSize,
+  DataType pageDataType) {
+// offset which denotes the start index for pageData
+int pageIndex = vector.getIndex();
+int rowId = 0;
+
+// Filling into vector is done based on page data type
+
+if (vectorDataType == DataTypes.FLOAT && floatFactor != 0.0) {
+  if (pageDataType == DataTypes.BOOLEAN || pageDataType == DataTypes.BYTE) 
{
+for (int i = 0; i < pageSize; i++) {
+  vector.putFloat(i, (pageData[pageIndex++] / floatFactor));
+}
+  } else if (pageDataType == DataTypes.SHORT) {
+int size = pageSize * DataTypes.SHORT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.SHORT.getSizeInBytes()) {
+  vector.putFloat(rowId++,
+  (ByteUtil.toShortLittleEndian(pageData, pageIndex + i) / 
floatFactor));
+}
+pageIndex += size;
+  } else if (pageDataType == DataTypes.SHORT_INT) {
+int size = pageSize * DataTypes.SHORT_INT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.SHORT_INT.getSizeInBytes()) {
+  vector.putFloat(rowId++, (ByteUtil.valueOf3Bytes(pageData, pageIndex 
+ i) / floatFactor));
+}
+pageIndex += size;
+  } else if (pageDataType == DataTypes.INT) {
+int size = pageSize * DataTypes.INT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.INT.getSizeInBytes()) {
+  vector.putFloat(rowId++,
+  (ByteUtil.toIntLittleEndian(pageData, pageIndex + i) / 
floatFactor));
+}
+pageInde

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-13 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r469858716



##
File path: 
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/FillVector.java
##
@@ -0,0 +1,347 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.BitSet;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory;
+import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
+import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class FillVector {
+  private byte[] pageData;
+  private float floatFactor = 0;
+  private double factor = 0;
+  private ColumnVectorInfo vectorInfo;
+  private BitSet nullBits;
+
+  public FillVector(byte[] pageData, ColumnVectorInfo vectorInfo, BitSet 
nullBits) {
+this.pageData = pageData;
+this.vectorInfo = vectorInfo;
+this.nullBits = nullBits;
+  }
+
+  public void setFactor(double factor) {
+this.factor = factor;
+  }
+
+  public void setFloatFactor(float floatFactor) {
+this.floatFactor = floatFactor;
+  }
+
+  public void basedOnType(CarbonColumnVector vector, DataType vectorDataType, 
int pageSize,
+  DataType pageDataType) {
+if (vectorInfo.vector.getColumnVector() != null && 
((CarbonColumnVectorImpl) vectorInfo.vector
+.getColumnVector()).isComplex()) {
+  fillComplexType(vector.getColumnVector(), pageDataType);
+} else {
+  fillPrimitiveType(vector, vectorDataType, pageSize, pageDataType);
+  vector.setIndex(0);
+}
+  }
+
+  private void fillComplexType(CarbonColumnVector vector, DataType 
pageDataType) {
+CarbonColumnVectorImpl vectorImpl = (CarbonColumnVectorImpl) vector;
+if (vector != null && vector.getChildrenVector() != null) {
+  ArrayList childElements = ((CarbonColumnVectorImpl) 
vector).getChildrenElements();
+  for (int i = 0; i < childElements.size(); i++) {
+int count = childElements.get(i);
+typeComplexObject(vectorImpl.getChildrenVector().get(0), count, 
pageDataType);
+vector.putArrayObject();
+  }
+  vectorImpl.getChildrenVector().get(0).setIndex(0);
+}
+  }
+
+  private void fillPrimitiveType(CarbonColumnVector vector, DataType 
vectorDataType, int pageSize,
+  DataType pageDataType) {
+// offset which denotes the start index for pageData
+int pageIndex = vector.getIndex();
+int rowId = 0;
+
+// Filling into vector is done based on page data type
+
+if (vectorDataType == DataTypes.FLOAT && floatFactor != 0.0) {
+  if (pageDataType == DataTypes.BOOLEAN || pageDataType == DataTypes.BYTE) 
{
+for (int i = 0; i < pageSize; i++) {
+  vector.putFloat(i, (pageData[pageIndex++] / floatFactor));
+}
+  } else if (pageDataType == DataTypes.SHORT) {
+int size = pageSize * DataTypes.SHORT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.SHORT.getSizeInBytes()) {
+  vector.putFloat(rowId++,
+  (ByteUtil.toShortLittleEndian(pageData, pageIndex + i) / 
floatFactor));
+}
+pageIndex += size;
+  } else if (pageDataType == DataTypes.SHORT_INT) {
+int size = pageSize * DataTypes.SHORT_INT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.SHORT_INT.getSizeInBytes()) {
+  vector.putFloat(rowId++, (ByteUtil.valueOf3Bytes(pageData, pageIndex 
+ i) / floatFactor));
+}
+pageIndex += size;
+  } else if (pageDataType == DataTypes.INT) {
+int size = pageSize * DataTypes.INT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.INT.getSizeInBytes()) {
+  vector.putFloat(rowId++,
+  (ByteUtil.toIntLittleEndian(pageData, pageIndex + i) / 
floatFactor));
+}
+pageInde

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-13 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r469858716



##
File path: 
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/FillVector.java
##
@@ -0,0 +1,347 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.BitSet;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory;
+import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
+import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class FillVector {
+  private byte[] pageData;
+  private float floatFactor = 0;
+  private double factor = 0;
+  private ColumnVectorInfo vectorInfo;
+  private BitSet nullBits;
+
+  public FillVector(byte[] pageData, ColumnVectorInfo vectorInfo, BitSet 
nullBits) {
+this.pageData = pageData;
+this.vectorInfo = vectorInfo;
+this.nullBits = nullBits;
+  }
+
+  public void setFactor(double factor) {
+this.factor = factor;
+  }
+
+  public void setFloatFactor(float floatFactor) {
+this.floatFactor = floatFactor;
+  }
+
+  public void basedOnType(CarbonColumnVector vector, DataType vectorDataType, 
int pageSize,
+  DataType pageDataType) {
+if (vectorInfo.vector.getColumnVector() != null && 
((CarbonColumnVectorImpl) vectorInfo.vector
+.getColumnVector()).isComplex()) {
+  fillComplexType(vector.getColumnVector(), pageDataType);
+} else {
+  fillPrimitiveType(vector, vectorDataType, pageSize, pageDataType);
+  vector.setIndex(0);
+}
+  }
+
+  private void fillComplexType(CarbonColumnVector vector, DataType 
pageDataType) {
+CarbonColumnVectorImpl vectorImpl = (CarbonColumnVectorImpl) vector;
+if (vector != null && vector.getChildrenVector() != null) {
+  ArrayList childElements = ((CarbonColumnVectorImpl) 
vector).getChildrenElements();
+  for (int i = 0; i < childElements.size(); i++) {
+int count = childElements.get(i);
+typeComplexObject(vectorImpl.getChildrenVector().get(0), count, 
pageDataType);
+vector.putArrayObject();
+  }
+  vectorImpl.getChildrenVector().get(0).setIndex(0);
+}
+  }
+
+  private void fillPrimitiveType(CarbonColumnVector vector, DataType 
vectorDataType, int pageSize,
+  DataType pageDataType) {
+// offset which denotes the start index for pageData
+int pageIndex = vector.getIndex();
+int rowId = 0;
+
+// Filling into vector is done based on page data type
+
+if (vectorDataType == DataTypes.FLOAT && floatFactor != 0.0) {
+  if (pageDataType == DataTypes.BOOLEAN || pageDataType == DataTypes.BYTE) 
{
+for (int i = 0; i < pageSize; i++) {
+  vector.putFloat(i, (pageData[pageIndex++] / floatFactor));
+}
+  } else if (pageDataType == DataTypes.SHORT) {
+int size = pageSize * DataTypes.SHORT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.SHORT.getSizeInBytes()) {
+  vector.putFloat(rowId++,
+  (ByteUtil.toShortLittleEndian(pageData, pageIndex + i) / 
floatFactor));
+}
+pageIndex += size;
+  } else if (pageDataType == DataTypes.SHORT_INT) {
+int size = pageSize * DataTypes.SHORT_INT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.SHORT_INT.getSizeInBytes()) {
+  vector.putFloat(rowId++, (ByteUtil.valueOf3Bytes(pageData, pageIndex 
+ i) / floatFactor));
+}
+pageIndex += size;
+  } else if (pageDataType == DataTypes.INT) {
+int size = pageSize * DataTypes.INT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.INT.getSizeInBytes()) {
+  vector.putFloat(rowId++,
+  (ByteUtil.toIntLittleEndian(pageData, pageIndex + i) / 
floatFactor));
+}
+pageInde

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-13 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r469858716



##
File path: 
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/FillVector.java
##
@@ -0,0 +1,347 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.BitSet;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory;
+import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
+import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class FillVector {
+  private byte[] pageData;
+  private float floatFactor = 0;
+  private double factor = 0;
+  private ColumnVectorInfo vectorInfo;
+  private BitSet nullBits;
+
+  public FillVector(byte[] pageData, ColumnVectorInfo vectorInfo, BitSet 
nullBits) {
+this.pageData = pageData;
+this.vectorInfo = vectorInfo;
+this.nullBits = nullBits;
+  }
+
+  public void setFactor(double factor) {
+this.factor = factor;
+  }
+
+  public void setFloatFactor(float floatFactor) {
+this.floatFactor = floatFactor;
+  }
+
+  public void basedOnType(CarbonColumnVector vector, DataType vectorDataType, 
int pageSize,
+  DataType pageDataType) {
+if (vectorInfo.vector.getColumnVector() != null && 
((CarbonColumnVectorImpl) vectorInfo.vector
+.getColumnVector()).isComplex()) {
+  fillComplexType(vector.getColumnVector(), pageDataType);
+} else {
+  fillPrimitiveType(vector, vectorDataType, pageSize, pageDataType);
+  vector.setIndex(0);
+}
+  }
+
+  private void fillComplexType(CarbonColumnVector vector, DataType 
pageDataType) {
+CarbonColumnVectorImpl vectorImpl = (CarbonColumnVectorImpl) vector;
+if (vector != null && vector.getChildrenVector() != null) {
+  ArrayList childElements = ((CarbonColumnVectorImpl) 
vector).getChildrenElements();
+  for (int i = 0; i < childElements.size(); i++) {
+int count = childElements.get(i);
+typeComplexObject(vectorImpl.getChildrenVector().get(0), count, 
pageDataType);
+vector.putArrayObject();
+  }
+  vectorImpl.getChildrenVector().get(0).setIndex(0);
+}
+  }
+
+  private void fillPrimitiveType(CarbonColumnVector vector, DataType 
vectorDataType, int pageSize,
+  DataType pageDataType) {
+// offset which denotes the start index for pageData
+int pageIndex = vector.getIndex();
+int rowId = 0;
+
+// Filling into vector is done based on page data type
+
+if (vectorDataType == DataTypes.FLOAT && floatFactor != 0.0) {
+  if (pageDataType == DataTypes.BOOLEAN || pageDataType == DataTypes.BYTE) 
{
+for (int i = 0; i < pageSize; i++) {
+  vector.putFloat(i, (pageData[pageIndex++] / floatFactor));
+}
+  } else if (pageDataType == DataTypes.SHORT) {
+int size = pageSize * DataTypes.SHORT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.SHORT.getSizeInBytes()) {
+  vector.putFloat(rowId++,
+  (ByteUtil.toShortLittleEndian(pageData, pageIndex + i) / 
floatFactor));
+}
+pageIndex += size;
+  } else if (pageDataType == DataTypes.SHORT_INT) {
+int size = pageSize * DataTypes.SHORT_INT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.SHORT_INT.getSizeInBytes()) {
+  vector.putFloat(rowId++, (ByteUtil.valueOf3Bytes(pageData, pageIndex 
+ i) / floatFactor));
+}
+pageIndex += size;
+  } else if (pageDataType == DataTypes.INT) {
+int size = pageSize * DataTypes.INT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.INT.getSizeInBytes()) {
+  vector.putFloat(rowId++,
+  (ByteUtil.toIntLittleEndian(pageData, pageIndex + i) / 
floatFactor));
+}
+pageInde

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-13 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r469858716



##
File path: 
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/FillVector.java
##
@@ -0,0 +1,347 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.BitSet;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory;
+import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
+import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class FillVector {
+  private byte[] pageData;
+  private float floatFactor = 0;
+  private double factor = 0;
+  private ColumnVectorInfo vectorInfo;
+  private BitSet nullBits;
+
+  public FillVector(byte[] pageData, ColumnVectorInfo vectorInfo, BitSet 
nullBits) {
+this.pageData = pageData;
+this.vectorInfo = vectorInfo;
+this.nullBits = nullBits;
+  }
+
+  public void setFactor(double factor) {
+this.factor = factor;
+  }
+
+  public void setFloatFactor(float floatFactor) {
+this.floatFactor = floatFactor;
+  }
+
+  public void basedOnType(CarbonColumnVector vector, DataType vectorDataType, 
int pageSize,
+  DataType pageDataType) {
+if (vectorInfo.vector.getColumnVector() != null && 
((CarbonColumnVectorImpl) vectorInfo.vector
+.getColumnVector()).isComplex()) {
+  fillComplexType(vector.getColumnVector(), pageDataType);
+} else {
+  fillPrimitiveType(vector, vectorDataType, pageSize, pageDataType);
+  vector.setIndex(0);
+}
+  }
+
+  private void fillComplexType(CarbonColumnVector vector, DataType 
pageDataType) {
+CarbonColumnVectorImpl vectorImpl = (CarbonColumnVectorImpl) vector;
+if (vector != null && vector.getChildrenVector() != null) {
+  ArrayList childElements = ((CarbonColumnVectorImpl) 
vector).getChildrenElements();
+  for (int i = 0; i < childElements.size(); i++) {
+int count = childElements.get(i);
+typeComplexObject(vectorImpl.getChildrenVector().get(0), count, 
pageDataType);
+vector.putArrayObject();
+  }
+  vectorImpl.getChildrenVector().get(0).setIndex(0);
+}
+  }
+
+  private void fillPrimitiveType(CarbonColumnVector vector, DataType 
vectorDataType, int pageSize,
+  DataType pageDataType) {
+// offset which denotes the start index for pageData
+int pageIndex = vector.getIndex();
+int rowId = 0;
+
+// Filling into vector is done based on page data type
+
+if (vectorDataType == DataTypes.FLOAT && floatFactor != 0.0) {
+  if (pageDataType == DataTypes.BOOLEAN || pageDataType == DataTypes.BYTE) 
{
+for (int i = 0; i < pageSize; i++) {
+  vector.putFloat(i, (pageData[pageIndex++] / floatFactor));
+}
+  } else if (pageDataType == DataTypes.SHORT) {
+int size = pageSize * DataTypes.SHORT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.SHORT.getSizeInBytes()) {
+  vector.putFloat(rowId++,
+  (ByteUtil.toShortLittleEndian(pageData, pageIndex + i) / 
floatFactor));
+}
+pageIndex += size;
+  } else if (pageDataType == DataTypes.SHORT_INT) {
+int size = pageSize * DataTypes.SHORT_INT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.SHORT_INT.getSizeInBytes()) {
+  vector.putFloat(rowId++, (ByteUtil.valueOf3Bytes(pageData, pageIndex 
+ i) / floatFactor));
+}
+pageIndex += size;
+  } else if (pageDataType == DataTypes.INT) {
+int size = pageSize * DataTypes.INT.getSizeInBytes();
+for (int i = 0; i < size; i += DataTypes.INT.getSizeInBytes()) {
+  vector.putFloat(rowId++,
+  (ByteUtil.toIntLittleEndian(pageData, pageIndex + i) / 
floatFactor));
+}
+pageInde

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-12 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r469722186



##
File path: 
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/FillVector.java
##
@@ -0,0 +1,347 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;

Review comment:
   please remove this class and handle in the original class. It's not 
common code to keep at one place





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-12 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r469704722



##
File path: 
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveIntegralCodec.java
##
@@ -23,6 +23,7 @@
 import java.util.BitSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Stack;

Review comment:
   You have missed to handle adaptive **delta** flows
   AdaptiveDeltaIntegralCodec
   AdaptiveDeltaFloatingCodec
   





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-12 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r469161474



##
File path: 
integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/ArrayStreamReader.java
##
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.presto.readers;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import io.prestosql.spi.type.*;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.StructField;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+
+import io.prestosql.spi.block.Block;
+import io.prestosql.spi.block.BlockBuilder;
+
+import org.apache.carbondata.presto.CarbonVectorBatch;
+
+/**
+ * Class to read the Array Stream
+ */
+
+public class ArrayStreamReader extends CarbonColumnVectorImpl implements 
PrestoVectorBlockBuilder {
+
+  protected int batchSize;
+
+  protected Type type;
+  protected BlockBuilder builder;
+  Block childBlock = null;
+  private int index = 0;
+
+  public ArrayStreamReader(int batchSize, DataType dataType, StructField 
field) {
+super(batchSize, dataType);
+this.batchSize = batchSize;
+this.type = getArrayOfType(field, dataType);
+ArrayList childrenList= new ArrayList<>();
+
childrenList.add(CarbonVectorBatch.createDirectStreamReader(this.batchSize, 
field.getDataType(), field));
+setChildrenVector(childrenList);
+this.builder = type.createBlockBuilder(null, batchSize);
+  }
+
+  public int getIndex() {
+return index;
+  }
+
+  public void setIndex(int index) {
+this.index = index;
+  }
+
+  public String getDataTypeName() {
+return "ARRAY";
+  }
+
+  Type getArrayOfType(StructField field, DataType dataType) {
+if (dataType == DataTypes.STRING) {
+  return new ArrayType(VarcharType.VARCHAR);
+} else if (dataType == DataTypes.BYTE) {
+  return new ArrayType(TinyintType.TINYINT);
+} else if (dataType == DataTypes.SHORT) {
+  return new ArrayType(SmallintType.SMALLINT);
+} else if (dataType == DataTypes.INT) {

Review comment:
   Also VARCHAR is missing and rebase PR to handle binary also





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-11 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r469026786



##
File path: 
integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/ArrayStreamReader.java
##
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.presto.readers;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import io.prestosql.spi.type.*;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.StructField;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+
+import io.prestosql.spi.block.Block;
+import io.prestosql.spi.block.BlockBuilder;
+
+import org.apache.carbondata.presto.CarbonVectorBatch;
+
+/**
+ * Class to read the Array Stream
+ */
+
+public class ArrayStreamReader extends CarbonColumnVectorImpl implements 
PrestoVectorBlockBuilder {
+
+  protected int batchSize;
+
+  protected Type type;
+  protected BlockBuilder builder;
+  Block childBlock = null;
+  private int index = 0;
+
+  public ArrayStreamReader(int batchSize, DataType dataType, StructField 
field) {
+super(batchSize, dataType);
+this.batchSize = batchSize;
+this.type = getArrayOfType(field, dataType);
+ArrayList childrenList= new ArrayList<>();
+
childrenList.add(CarbonVectorBatch.createDirectStreamReader(this.batchSize, 
field.getDataType(), field));
+setChildrenVector(childrenList);
+this.builder = type.createBlockBuilder(null, batchSize);
+  }
+
+  public int getIndex() {
+return index;
+  }
+
+  public void setIndex(int index) {
+this.index = index;
+  }
+
+  public String getDataTypeName() {
+return "ARRAY";
+  }
+
+  Type getArrayOfType(StructField field, DataType dataType) {
+if (dataType == DataTypes.STRING) {
+  return new ArrayType(VarcharType.VARCHAR);
+} else if (dataType == DataTypes.BYTE) {
+  return new ArrayType(TinyintType.TINYINT);
+} else if (dataType == DataTypes.SHORT) {
+  return new ArrayType(SmallintType.SMALLINT);
+} else if (dataType == DataTypes.INT) {

Review comment:
   decimal datatype handling is also missing 





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-07 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r467095602



##
File path: 
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/compress/DirectCompressCodec.java
##
@@ -246,7 +239,29 @@ public void decodeAndFillVector(byte[] pageData, 
ColumnVectorInfo vectorInfo, Bi
   vector = ColumnarVectorWrapperDirectFactory
   .getDirectVectorWrapperFactory(vector, vectorInfo.invertedIndex, 
nullBits, deletedRows,
   true, false);
-  fillVector(pageData, vector, vectorDataType, pageDataType, pageSize, 
vectorInfo, nullBits);
+  Deque vectorStack = vectorInfo.getVectorStack();
+  // Only if vectorStack is null, it is initialized with the parent vector
+  if (vectorStack == null && vectorInfo.vector.getColumnVector() != null) {
+vectorStack = new ArrayDeque<>();
+// pushing the parent vector
+vectorStack.push((CarbonColumnVectorImpl) 
vectorInfo.vector.getColumnVector());
+vectorInfo.setVectorStack(vectorStack);
+  }
+  /*
+   * if top of vector stack is a complex vector then
+   * add their children into the stack and load them too.
+   * TODO: If there are multiple children push them into stack and load 
them iteratively
+   */
+  if (vectorStack != null && vectorStack.peek().isComplex()) {
+vectorStack.peek().setChildrenElements(pageData);

Review comment:
   here, please consider pagesize as argument and break once elements size 
equals pagesize inside as this buffer is reusable buffer and it can be huge 
size, not actual size





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-07 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r467095038



##
File path: 
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/compress/DirectCompressCodec.java
##
@@ -246,7 +239,29 @@ public void decodeAndFillVector(byte[] pageData, 
ColumnVectorInfo vectorInfo, Bi
   vector = ColumnarVectorWrapperDirectFactory
   .getDirectVectorWrapperFactory(vector, vectorInfo.invertedIndex, 
nullBits, deletedRows,
   true, false);
-  fillVector(pageData, vector, vectorDataType, pageDataType, pageSize, 
vectorInfo, nullBits);
+  Deque vectorStack = vectorInfo.getVectorStack();
+  // Only if vectorStack is null, it is initialized with the parent vector
+  if (vectorStack == null && vectorInfo.vector.getColumnVector() != null) {
+vectorStack = new ArrayDeque<>();
+// pushing the parent vector
+vectorStack.push((CarbonColumnVectorImpl) 
vectorInfo.vector.getColumnVector());
+vectorInfo.setVectorStack(vectorStack);
+  }
+  /*
+   * if top of vector stack is a complex vector then
+   * add their children into the stack and load them too.
+   * TODO: If there are multiple children push them into stack and load 
them iteratively
+   */
+  if (vectorStack != null && vectorStack.peek().isComplex()) {
+vectorStack.peek().setChildrenElements(pageData);
+vectorStack.push(vectorStack.peek().getChildrenVector().get(0));
+vectorStack.peek().loadPage();
+return;
+  }
+
+  FillVector fill = new FillVector(pageData, vectorInfo, nullBits);
+  fill.basedOnType(vector, vectorDataType, pageSize, pageDataType);
+

Review comment:
   pop from the stack as child is processed 





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-07 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r467090464



##
File path: 
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/FillVector.java
##
@@ -0,0 +1,346 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.BitSet;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory;
+import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
+import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class FillVector {
+  private byte[] pageData;
+  private float floatFactor = 0;
+  private double factor = 0;
+  private ColumnVectorInfo vectorInfo;
+  private BitSet nullBits;
+
+  public FillVector(byte[] pageData, ColumnVectorInfo vectorInfo, BitSet 
nullBits) {
+this.pageData = pageData;
+this.vectorInfo = vectorInfo;
+this.nullBits = nullBits;
+  }
+
+  public void setFactor(double factor) {
+this.factor = factor;
+  }
+
+  public void setFloatFactor(float floatFactor) {
+this.floatFactor = floatFactor;
+  }
+
+  public void basedOnType(CarbonColumnVector vector, DataType vectorDataType, 
int pageSize,
+  DataType pageDataType) {
+if (vectorInfo.vector.getColumnVector() != null && 
((CarbonColumnVectorImpl) vectorInfo.vector
+.getColumnVector()).isComplex()) {
+  fillComplexType(vector.getColumnVector(), pageDataType);
+} else {
+  fillPrimitiveType(vector, vectorDataType, pageSize, pageDataType);
+  vector.setIndex(0);
+}
+  }
+
+  private void fillComplexType(CarbonColumnVector vector, DataType 
pageDataType) {
+CarbonColumnVectorImpl vectorImpl = (CarbonColumnVectorImpl) vector;
+if (vector != null && vector.getChildrenVector() != null) {
+  ArrayList childElements = ((CarbonColumnVectorImpl) 
vector).getChildrenElements();
+  for (int i = 0; i < childElements.size(); i++) {
+int count = childElements.get(i);
+typeComplexObject(vectorImpl.getChildrenVector().get(0), count, 
pageDataType);
+vector.putArrayObject();
+  }
+}

Review comment:
   reset the index of child vector as this page is processed here





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-06 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r466417493



##
File path: 
integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/ArrayStreamReader.java
##
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.presto.readers;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import io.prestosql.spi.type.*;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.StructField;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+
+import io.prestosql.spi.block.Block;
+import io.prestosql.spi.block.BlockBuilder;
+
+import org.apache.carbondata.presto.CarbonVectorBatch;
+
+/**
+ * Class to read the Array Stream
+ */
+
+public class ArrayStreamReader extends CarbonColumnVectorImpl implements 
PrestoVectorBlockBuilder {
+
+  protected int batchSize;
+
+  protected Type type;
+  protected BlockBuilder builder;
+  Block childBlock = null;
+  private int index = 0;
+
+  public ArrayStreamReader(int batchSize, DataType dataType, StructField 
field) {
+super(batchSize, dataType);
+this.batchSize = batchSize;
+this.type = getArrayOfType(field, dataType);
+ArrayList childrenList= new ArrayList<>();
+
childrenList.add(CarbonVectorBatch.createDirectStreamReader(this.batchSize, 
field.getDataType(), field));
+setChildrenVector(childrenList);
+this.builder = type.createBlockBuilder(null, batchSize);
+  }
+
+  public int getIndex() {
+return index;
+  }
+
+  public void setIndex(int index) {
+this.index = index;
+  }
+
+  public String getDataTypeName() {
+return "ARRAY";
+  }
+
+  Type getArrayOfType(StructField field, DataType dataType) {
+if (dataType == DataTypes.STRING) {
+  return new ArrayType(VarcharType.VARCHAR);
+} else if (dataType == DataTypes.BYTE) {
+  return new ArrayType(TinyintType.TINYINT);
+} else if (dataType == DataTypes.SHORT) {
+  return new ArrayType(SmallintType.SMALLINT);
+} else if (dataType == DataTypes.INT) {
+  return new ArrayType(IntegerType.INTEGER);
+} else if (dataType == DataTypes.LONG) {
+  return new ArrayType(BigintType.BIGINT);
+} else if (dataType == DataTypes.DOUBLE) {
+  return new ArrayType(DoubleType.DOUBLE);
+} else if (dataType == DataTypes.FLOAT) {
+  return new ArrayType(RealType.REAL);
+} else if (dataType == DataTypes.BOOLEAN) {
+  return new ArrayType(BooleanType.BOOLEAN);
+} else if (dataType == DataTypes.TIMESTAMP) {
+  return new ArrayType(TimestampType.TIMESTAMP);
+} else if (DataTypes.isArrayType(dataType)) {
+  StructField childField = field.getChildren().get(0);
+  return new ArrayType(getArrayOfType(childField, 
childField.getDataType()));
+} else {
+  throw new UnsupportedOperationException("Unsupported type: " + dataType);
+}
+  }
+
+  @Override
+  public Block buildBlock() {
+return builder.build();
+  }
+
+  public boolean isComplex() {
+return true;
+  }
+
+  @Override
+  public void setBatchSize(int batchSize) {
+this.batchSize = batchSize;
+  }
+
+  @Override
+  public void putObject(int rowId, Object value) {
+if (value == null) {
+  putNull(rowId);
+} else {
+  getChildrenVector().get(0).putObject(rowId, value);
+}
+  }
+
+  public void putArrayObject() {
+if (DataTypes.isArrayType(this.getType())) {
+  childBlock = ((ArrayStreamReader) 
getChildrenVector().get(0)).buildBlock();
+} else if (this.getType() == DataTypes.STRING) {
+  childBlock = ((SliceStreamReader) 
getChildrenVector().get(0)).buildBlock();
+} else if (this.getType() == DataTypes.INT) {
+  childBlock = ((IntegerStreamReader) 
getChildrenVector().get(0)).buildBlock();
+} else if (this.getType() == DataTypes.LONG) {
+  childBlock = ((LongStreamReader) 
getChildrenVector().get(0)).buildBlock();
+} else if (this.getType() == DataTypes.DOUBLE) {
+  childBlock = ((DoubleStreamReader) 
getChildrenVector().get(0)).buildBloc

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-06 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r466313217



##
File path: 
integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/GenerateFiles.scala
##
@@ -0,0 +1,667 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.presto.integrationtest
+
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, 
File, InputStream}
+import java.util
+
+import scala.collection.JavaConverters._
+
+import org.apache.avro
+import org.apache.avro.file.DataFileWriter
+import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, 
GenericRecord}
+import org.apache.avro.io.{DecoderFactory, Encoder}
+import org.junit.Assert
+
+import org.apache.carbondata.core.cache.dictionary.DictionaryByteArrayWrapper
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.block.TableBlockInfo
+import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk
+import 
org.apache.carbondata.core.datastore.chunk.reader.CarbonDataReaderFactory
+import 
org.apache.carbondata.core.datastore.chunk.reader.dimension.v3.DimensionChunkReaderV3
+import org.apache.carbondata.core.datastore.compression.CompressorFactory
+import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, 
CarbonFileFilter}
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import 
org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingFactory
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion
+import org.apache.carbondata.core.util.{CarbonMetadataUtil, 
DataFileFooterConverterV3}
+import org.apache.carbondata.sdk.file.CarbonWriter
+
+class GenerateFiles {
+
+  def singleLevelArrayFile() = {
+val json1: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true,"arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.111,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json2: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[1.1,2.2,4.45,3.3],
+|"arrayBooleanCol": [true, true, true]} """.stripMargin
+val json3: String =
+  """ {"stringCol": "Rio","intCol": 16,"doubleCol": 12.5,"realCol": 14.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", "Street2","Street3"],
+|"arrayStringCol2": ["China", "Brazil", "Paris", 
"France"],"arrayIntCol": [1,2,3,4,5],
+
|"arrayBigIntCol":[7,6,8000,91],"arrayRealCol":[1.1,2.2,3.3,4.45],
+|"arrayDoubleCol":[1.1,2.2,4.45,5.5,3.3], "arrayBooleanCol": [true, 
false, true]} """
+.stripMargin
+val json4: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true, "arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.1,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json5: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[4,1,21.222,15.231],
+|"arrayBooleanCol": [false, false, false]} """.stripMargin
+
+
+val mySchema =
+  """ {
+|  "name": "address",
+|  "type": "record",
+|  "fields": [
+|  {
+|  "name": "stringCol",
+|  "type": "strin

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-06 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r466313146



##
File path: 
integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/GenerateFiles.scala
##
@@ -0,0 +1,667 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.presto.integrationtest
+
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, 
File, InputStream}
+import java.util
+
+import scala.collection.JavaConverters._
+
+import org.apache.avro
+import org.apache.avro.file.DataFileWriter
+import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, 
GenericRecord}
+import org.apache.avro.io.{DecoderFactory, Encoder}
+import org.junit.Assert
+
+import org.apache.carbondata.core.cache.dictionary.DictionaryByteArrayWrapper
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.block.TableBlockInfo
+import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk
+import 
org.apache.carbondata.core.datastore.chunk.reader.CarbonDataReaderFactory
+import 
org.apache.carbondata.core.datastore.chunk.reader.dimension.v3.DimensionChunkReaderV3
+import org.apache.carbondata.core.datastore.compression.CompressorFactory
+import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, 
CarbonFileFilter}
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import 
org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingFactory
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion
+import org.apache.carbondata.core.util.{CarbonMetadataUtil, 
DataFileFooterConverterV3}
+import org.apache.carbondata.sdk.file.CarbonWriter
+
+class GenerateFiles {
+
+  def singleLevelArrayFile() = {
+val json1: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true,"arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.111,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json2: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[1.1,2.2,4.45,3.3],
+|"arrayBooleanCol": [true, true, true]} """.stripMargin
+val json3: String =
+  """ {"stringCol": "Rio","intCol": 16,"doubleCol": 12.5,"realCol": 14.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", "Street2","Street3"],
+|"arrayStringCol2": ["China", "Brazil", "Paris", 
"France"],"arrayIntCol": [1,2,3,4,5],
+
|"arrayBigIntCol":[7,6,8000,91],"arrayRealCol":[1.1,2.2,3.3,4.45],
+|"arrayDoubleCol":[1.1,2.2,4.45,5.5,3.3], "arrayBooleanCol": [true, 
false, true]} """
+.stripMargin
+val json4: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true, "arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.1,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json5: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[4,1,21.222,15.231],
+|"arrayBooleanCol": [false, false, false]} """.stripMargin
+
+
+val mySchema =
+  """ {
+|  "name": "address",
+|  "type": "record",
+|  "fields": [
+|  {
+|  "name": "stringCol",
+|  "type": "strin

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-06 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r466312957



##
File path: 
integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/GenerateFiles.scala
##
@@ -0,0 +1,667 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.presto.integrationtest
+
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, 
File, InputStream}
+import java.util
+
+import scala.collection.JavaConverters._
+
+import org.apache.avro
+import org.apache.avro.file.DataFileWriter
+import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, 
GenericRecord}
+import org.apache.avro.io.{DecoderFactory, Encoder}
+import org.junit.Assert
+
+import org.apache.carbondata.core.cache.dictionary.DictionaryByteArrayWrapper
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.block.TableBlockInfo
+import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk
+import 
org.apache.carbondata.core.datastore.chunk.reader.CarbonDataReaderFactory
+import 
org.apache.carbondata.core.datastore.chunk.reader.dimension.v3.DimensionChunkReaderV3
+import org.apache.carbondata.core.datastore.compression.CompressorFactory
+import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, 
CarbonFileFilter}
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import 
org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingFactory
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion
+import org.apache.carbondata.core.util.{CarbonMetadataUtil, 
DataFileFooterConverterV3}
+import org.apache.carbondata.sdk.file.CarbonWriter
+
+class GenerateFiles {
+
+  def singleLevelArrayFile() = {
+val json1: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true,"arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.111,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json2: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[1.1,2.2,4.45,3.3],
+|"arrayBooleanCol": [true, true, true]} """.stripMargin
+val json3: String =
+  """ {"stringCol": "Rio","intCol": 16,"doubleCol": 12.5,"realCol": 14.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", "Street2","Street3"],
+|"arrayStringCol2": ["China", "Brazil", "Paris", 
"France"],"arrayIntCol": [1,2,3,4,5],
+
|"arrayBigIntCol":[7,6,8000,91],"arrayRealCol":[1.1,2.2,3.3,4.45],
+|"arrayDoubleCol":[1.1,2.2,4.45,5.5,3.3], "arrayBooleanCol": [true, 
false, true]} """
+.stripMargin
+val json4: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true, "arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.1,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json5: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[4,1,21.222,15.231],
+|"arrayBooleanCol": [false, false, false]} """.stripMargin
+
+
+val mySchema =
+  """ {
+|  "name": "address",
+|  "type": "record",
+|  "fields": [
+|  {
+|  "name": "stringCol",
+|  "type": "strin

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-06 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r466312027



##
File path: 
integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/GenerateFiles.scala
##
@@ -0,0 +1,667 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.presto.integrationtest
+
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, 
File, InputStream}
+import java.util
+
+import scala.collection.JavaConverters._
+
+import org.apache.avro
+import org.apache.avro.file.DataFileWriter
+import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, 
GenericRecord}
+import org.apache.avro.io.{DecoderFactory, Encoder}
+import org.junit.Assert
+
+import org.apache.carbondata.core.cache.dictionary.DictionaryByteArrayWrapper
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.block.TableBlockInfo
+import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk
+import 
org.apache.carbondata.core.datastore.chunk.reader.CarbonDataReaderFactory
+import 
org.apache.carbondata.core.datastore.chunk.reader.dimension.v3.DimensionChunkReaderV3
+import org.apache.carbondata.core.datastore.compression.CompressorFactory
+import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, 
CarbonFileFilter}
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import 
org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingFactory
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion
+import org.apache.carbondata.core.util.{CarbonMetadataUtil, 
DataFileFooterConverterV3}
+import org.apache.carbondata.sdk.file.CarbonWriter
+
+class GenerateFiles {
+
+  def singleLevelArrayFile() = {
+val json1: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true,"arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.111,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json2: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[1.1,2.2,4.45,3.3],
+|"arrayBooleanCol": [true, true, true]} """.stripMargin
+val json3: String =
+  """ {"stringCol": "Rio","intCol": 16,"doubleCol": 12.5,"realCol": 14.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", "Street2","Street3"],
+|"arrayStringCol2": ["China", "Brazil", "Paris", 
"France"],"arrayIntCol": [1,2,3,4,5],
+
|"arrayBigIntCol":[7,6,8000,91],"arrayRealCol":[1.1,2.2,3.3,4.45],
+|"arrayDoubleCol":[1.1,2.2,4.45,5.5,3.3], "arrayBooleanCol": [true, 
false, true]} """
+.stripMargin
+val json4: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true, "arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.1,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json5: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[4,1,21.222,15.231],
+|"arrayBooleanCol": [false, false, false]} """.stripMargin
+
+
+val mySchema =
+  """ {
+|  "name": "address",
+|  "type": "record",
+|  "fields": [
+|  {
+|  "name": "stringCol",
+|  "type": "strin

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-06 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r466311639



##
File path: 
integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/GenerateFiles.scala
##
@@ -0,0 +1,667 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.presto.integrationtest
+
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, 
File, InputStream}
+import java.util
+
+import scala.collection.JavaConverters._
+
+import org.apache.avro
+import org.apache.avro.file.DataFileWriter
+import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, 
GenericRecord}
+import org.apache.avro.io.{DecoderFactory, Encoder}
+import org.junit.Assert
+
+import org.apache.carbondata.core.cache.dictionary.DictionaryByteArrayWrapper
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.block.TableBlockInfo
+import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk
+import 
org.apache.carbondata.core.datastore.chunk.reader.CarbonDataReaderFactory
+import 
org.apache.carbondata.core.datastore.chunk.reader.dimension.v3.DimensionChunkReaderV3
+import org.apache.carbondata.core.datastore.compression.CompressorFactory
+import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, 
CarbonFileFilter}
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import 
org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingFactory
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion
+import org.apache.carbondata.core.util.{CarbonMetadataUtil, 
DataFileFooterConverterV3}
+import org.apache.carbondata.sdk.file.CarbonWriter
+
+class GenerateFiles {
+
+  def singleLevelArrayFile() = {
+val json1: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true,"arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.111,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json2: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[1.1,2.2,4.45,3.3],
+|"arrayBooleanCol": [true, true, true]} """.stripMargin
+val json3: String =
+  """ {"stringCol": "Rio","intCol": 16,"doubleCol": 12.5,"realCol": 14.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", "Street2","Street3"],
+|"arrayStringCol2": ["China", "Brazil", "Paris", 
"France"],"arrayIntCol": [1,2,3,4,5],
+
|"arrayBigIntCol":[7,6,8000,91],"arrayRealCol":[1.1,2.2,3.3,4.45],
+|"arrayDoubleCol":[1.1,2.2,4.45,5.5,3.3], "arrayBooleanCol": [true, 
false, true]} """
+.stripMargin
+val json4: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true, "arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.1,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json5: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[4,1,21.222,15.231],
+|"arrayBooleanCol": [false, false, false]} """.stripMargin
+
+
+val mySchema =
+  """ {
+|  "name": "address",
+|  "type": "record",
+|  "fields": [
+|  {
+|  "name": "stringCol",
+|  "type": "strin

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-06 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r466310201



##
File path: 
integration/presto/src/main/java/org/apache/carbondata/presto/CarbonVectorBatch.java
##
@@ -102,6 +89,12 @@ public static CarbonColumnVectorImpl 
createDirectStreamReader(int batchSize, Dat
   } else {
 return null;
   }
+} else if (DataTypes.isArrayType(field.getDataType())) {
+  if (field.getChildren().size() > 1) {

Review comment:
   remove this assert, array can never have more than one child





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-06 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r466309537



##
File path: 
core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/CarbonColumnVectorImpl.java
##
@@ -102,6 +126,58 @@ public CarbonColumnVectorImpl(int batchSize, DataType 
dataType) {
 
   }
 
+  @Override
+  public CarbonColumnVector getColumnVector() {
+return null;
+  }
+
+  @Override
+  public List getChildrenVector() {
+return childrenVector;
+  }
+
+  @Override
+  public void putArrayObject() {
+return;
+  }
+
+  public void setChildrenVector(ArrayList 
childrenVector) {
+this.childrenVector = childrenVector;
+  }
+
+  public ArrayList getChildrenElements() {
+return childrenElements;
+  }
+
+  public void setChildrenElements(ArrayList childrenElements) {
+this.childrenElements = childrenElements;
+  }
+
+  public ArrayList getChildrenOffset() {
+return childrenOffset;
+  }
+
+  public void setChildrenOffset(ArrayList childrenOffset) {
+this.childrenOffset = childrenOffset;
+  }
+
+  public void setChildrenElementsAndOffset(byte[] childPageData) {
+ByteBuffer childInfoBuffer = ByteBuffer.wrap(childPageData);
+ArrayList childElements = new ArrayList<>();
+ArrayList childOffset = new ArrayList<>();

Review comment:
   offset is not required, even for struct type. so, please remove it





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-04 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r465477662



##
File path: 
integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/GenerateFiles.scala
##
@@ -0,0 +1,650 @@
+package org.apache.carbondata.presto.integrationtest
+
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, 
File, InputStream}
+import java.util
+
+import scala.collection.JavaConverters._
+
+import org.apache.avro
+import org.apache.avro.file.DataFileWriter
+import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, 
GenericRecord}
+import org.apache.avro.io.{DecoderFactory, Encoder}
+import org.junit.Assert
+
+import org.apache.carbondata.core.cache.dictionary.DictionaryByteArrayWrapper
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.block.TableBlockInfo
+import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk
+import 
org.apache.carbondata.core.datastore.chunk.reader.CarbonDataReaderFactory
+import 
org.apache.carbondata.core.datastore.chunk.reader.dimension.v3.DimensionChunkReaderV3
+import org.apache.carbondata.core.datastore.compression.CompressorFactory
+import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, 
CarbonFileFilter}
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import 
org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingFactory
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion
+import org.apache.carbondata.core.util.{CarbonMetadataUtil, 
DataFileFooterConverterV3}
+import org.apache.carbondata.sdk.file.CarbonWriter
+
+class GenerateFiles {
+
+  def singleLevelArrayFile() = {
+val json1: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true,"arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.111,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json2: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[1.1,2.2,4.45,3.3],
+|"arrayBooleanCol": [true, true, true]} """.stripMargin
+val json3: String =
+  """ {"stringCol": "Rio","intCol": 16,"doubleCol": 12.5,"realCol": 14.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", "Street2","Street3"],
+|"arrayStringCol2": ["China", "Brazil", "Paris", 
"France"],"arrayIntCol": [1,2,3,4,5],
+
|"arrayBigIntCol":[7,6,8000,91],"arrayRealCol":[1.1,2.2,3.3,4.45],
+|"arrayDoubleCol":[1.1,2.2,4.45,5.5,3.3], "arrayBooleanCol": [true, 
false, true]} """
+.stripMargin
+val json4: String =
+  """ {"stringCol": "bob","intCol": 14,"doubleCol": 10.5,"realCol": 12.7,
+|"boolCol": true, "arrayStringCol1":["Street1"],"arrayStringCol2": 
["India", "Egypt"],
+|"arrayIntCol": 
[1,2,3],"arrayBigIntCol":[7,6],"arrayRealCol":[1.1,2.2],
+|"arrayDoubleCol":[1.1,2.2,3.3], "arrayBooleanCol": [true, false, 
true]} """.stripMargin
+val json5: String =
+  """ {"stringCol": "Alex","intCol": 15,"doubleCol": 11.5,"realCol": 13.7,
+|"boolCol": true, "arrayStringCol1": ["Street1", 
"Street2"],"arrayStringCol2": ["Japan",
+|"China", "India"],"arrayIntCol": 
[1,2,3,4],"arrayBigIntCol":[7,6,8000],
+|"arrayRealCol":[1.1,2.2,3.3],"arrayDoubleCol":[4,1,21.222,15.231],
+|"arrayBooleanCol": [false, false, false]} """.stripMargin
+
+
+val mySchema =

Review comment:
   These code is similar to what present in TestNonTransactional carbon 
table, better to extract it to common module [sdk] and reuse it instead of 
duplicating  





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-04 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r465477333



##
File path: 
integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/GenerateFiles.scala
##
@@ -0,0 +1,650 @@
+package org.apache.carbondata.presto.integrationtest

Review comment:
   add license header





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-04 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r465477228



##
File path: 
integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoReadTableFilesTest.scala
##
@@ -0,0 +1,398 @@
+package org.apache.carbondata.presto.integrationtest
+
+import java.io.File
+import java.util
+import java.util.Arrays.asList
+
+import io.prestosql.jdbc.PrestoArray
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
+import org.apache.carbondata.presto.server.PrestoServer
+import org.apache.commons.io.FileUtils
+import org.scalatest.{BeforeAndAfterAll, FunSuiteLike, BeforeAndAfterEach}
+
+class PrestoReadTableFilesTest extends FunSuiteLike with BeforeAndAfterAll 
with BeforeAndAfterEach{
+  private val logger = LogServiceFactory
+
.getLogService(classOf[PrestoTestNonTransactionalTableFiles].getCanonicalName)

Review comment:
   class name is wrong in logger class





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-04 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r465477005



##
File path: 
integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoReadTableFilesTest.scala
##
@@ -0,0 +1,398 @@
+package org.apache.carbondata.presto.integrationtest

Review comment:
   add license header 





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-04 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r465142026



##
File path: 
integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/ArrayStreamReader.java
##
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.presto.readers;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import io.prestosql.spi.type.*;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.StructField;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+
+import io.prestosql.spi.block.Block;
+import io.prestosql.spi.block.BlockBuilder;
+
+import org.apache.carbondata.presto.CarbonVectorBatch;
+
+/**
+ * Class to read the Array Stream
+ */
+
+public class ArrayStreamReader extends CarbonColumnVectorImpl implements 
PrestoVectorBlockBuilder {
+
+  protected int batchSize;
+
+  protected Type type;
+  protected BlockBuilder builder;
+  Block childBlock = null;
+  private int index = 0;
+
+  public ArrayStreamReader(int batchSize, DataType dataType, StructField 
field) {
+super(batchSize, dataType);
+this.batchSize = batchSize;
+this.type = getArrayOfType(field, dataType);
+ArrayList childrenList= new ArrayList<>();
+
childrenList.add(CarbonVectorBatch.createDirectStreamReader(this.batchSize, 
field.getDataType(), field));
+setChildrenVector(childrenList);
+this.builder = type.createBlockBuilder(null, batchSize);
+  }
+
+  public int getIndex() {
+return index;
+  }
+
+  public void setIndex(int index) {
+this.index = index;
+  }
+
+  public String getDataTypeName() {
+return "ARRAY";
+  }
+
+  Type getArrayOfType(StructField field, DataType dataType) {
+if (dataType == DataTypes.STRING) {
+  return new ArrayType(VarcharType.VARCHAR);
+} else if (dataType == DataTypes.BYTE) {
+  return new ArrayType(TinyintType.TINYINT);
+} else if (dataType == DataTypes.SHORT) {
+  return new ArrayType(SmallintType.SMALLINT);
+} else if (dataType == DataTypes.INT) {
+  return new ArrayType(IntegerType.INTEGER);
+} else if (dataType == DataTypes.LONG) {
+  return new ArrayType(BigintType.BIGINT);
+} else if (dataType == DataTypes.DOUBLE) {
+  return new ArrayType(DoubleType.DOUBLE);
+} else if (dataType == DataTypes.FLOAT) {
+  return new ArrayType(RealType.REAL);
+} else if (dataType == DataTypes.BOOLEAN) {
+  return new ArrayType(BooleanType.BOOLEAN);
+} else if (dataType == DataTypes.TIMESTAMP) {
+  return new ArrayType(TimestampType.TIMESTAMP);
+} else if (DataTypes.isArrayType(dataType)) {
+  StructField childField = field.getChildren().get(0);
+  return new ArrayType(getArrayOfType(childField, 
childField.getDataType()));
+} else {
+  throw new UnsupportedOperationException("Unsupported type: " + dataType);
+}
+  }
+
+  @Override
+  public Block buildBlock() {
+return builder.build();
+  }
+
+  public boolean isComplex() {
+return true;
+  }
+
+  @Override
+  public void setBatchSize(int batchSize) {
+this.batchSize = batchSize;
+  }
+
+  @Override
+  public void putObject(int rowId, Object value) {
+if (value == null) {

Review comment:
   putObject is never used ? Instead of putComplexObject, may be we need to 
use the same interface 





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #3773: [CARBONDATA-3830]Presto array columns read support

2020-08-04 Thread GitBox


ajantha-bhat commented on a change in pull request #3773:
URL: https://github.com/apache/carbondata/pull/3773#discussion_r465142026



##
File path: 
integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/ArrayStreamReader.java
##
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.presto.readers;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import io.prestosql.spi.type.*;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.StructField;
+import 
org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
+
+import io.prestosql.spi.block.Block;
+import io.prestosql.spi.block.BlockBuilder;
+
+import org.apache.carbondata.presto.CarbonVectorBatch;
+
+/**
+ * Class to read the Array Stream
+ */
+
+public class ArrayStreamReader extends CarbonColumnVectorImpl implements 
PrestoVectorBlockBuilder {
+
+  protected int batchSize;
+
+  protected Type type;
+  protected BlockBuilder builder;
+  Block childBlock = null;
+  private int index = 0;
+
+  public ArrayStreamReader(int batchSize, DataType dataType, StructField 
field) {
+super(batchSize, dataType);
+this.batchSize = batchSize;
+this.type = getArrayOfType(field, dataType);
+ArrayList childrenList= new ArrayList<>();
+
childrenList.add(CarbonVectorBatch.createDirectStreamReader(this.batchSize, 
field.getDataType(), field));
+setChildrenVector(childrenList);
+this.builder = type.createBlockBuilder(null, batchSize);
+  }
+
+  public int getIndex() {
+return index;
+  }
+
+  public void setIndex(int index) {
+this.index = index;
+  }
+
+  public String getDataTypeName() {
+return "ARRAY";
+  }
+
+  Type getArrayOfType(StructField field, DataType dataType) {
+if (dataType == DataTypes.STRING) {
+  return new ArrayType(VarcharType.VARCHAR);
+} else if (dataType == DataTypes.BYTE) {
+  return new ArrayType(TinyintType.TINYINT);
+} else if (dataType == DataTypes.SHORT) {
+  return new ArrayType(SmallintType.SMALLINT);
+} else if (dataType == DataTypes.INT) {
+  return new ArrayType(IntegerType.INTEGER);
+} else if (dataType == DataTypes.LONG) {
+  return new ArrayType(BigintType.BIGINT);
+} else if (dataType == DataTypes.DOUBLE) {
+  return new ArrayType(DoubleType.DOUBLE);
+} else if (dataType == DataTypes.FLOAT) {
+  return new ArrayType(RealType.REAL);
+} else if (dataType == DataTypes.BOOLEAN) {
+  return new ArrayType(BooleanType.BOOLEAN);
+} else if (dataType == DataTypes.TIMESTAMP) {
+  return new ArrayType(TimestampType.TIMESTAMP);
+} else if (DataTypes.isArrayType(dataType)) {
+  StructField childField = field.getChildren().get(0);
+  return new ArrayType(getArrayOfType(childField, 
childField.getDataType()));
+} else {
+  throw new UnsupportedOperationException("Unsupported type: " + dataType);
+}
+  }
+
+  @Override
+  public Block buildBlock() {
+return builder.build();
+  }
+
+  public boolean isComplex() {
+return true;
+  }
+
+  @Override
+  public void setBatchSize(int batchSize) {
+this.batchSize = batchSize;
+  }
+
+  @Override
+  public void putObject(int rowId, Object value) {
+if (value == null) {

Review comment:
   putObject is never used ? Instead of putArrayObject, may be we need to 
use the same interface 





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org