This is an automated email from the ASF dual-hosted git repository.
rongr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/pinot.git
The following commit(s) were added to refs/heads/master by this push:
new e0ea3e40f6 make data block util return actual object type (#9618)
e0ea3e40f6 is described below
commit e0ea3e40f69aff4b5c55f6b273a0933d41e67bdf
Author: Rong Rong <[email protected]>
AuthorDate: Tue Oct 18 19:33:05 2022 -0700
make data block util return actual object type (#9618)
Co-authored-by: Rong Rong <[email protected]>
---
.../pinot/common/datablock/DataBlockUtils.java | 22 ++++--
.../core/common/datablock/DataBlockBuilder.java | 79 ++++++++++++++++++++--
.../pinot/core/common/datablock/DataBlockTest.java | 67 ++++++++++++++++--
.../core/common/datablock/DataBlockTestUtils.java | 27 +++++++-
.../query/runtime/TransferableBlockUtilsTest.java | 13 ++--
.../org/apache/pinot/spi/utils/ArrayCopyUtils.java | 12 ++++
6 files changed, 192 insertions(+), 28 deletions(-)
diff --git
a/pinot-common/src/main/java/org/apache/pinot/common/datablock/DataBlockUtils.java
b/pinot-common/src/main/java/org/apache/pinot/common/datablock/DataBlockUtils.java
index ed29c3b46d..f3cf116142 100644
---
a/pinot-common/src/main/java/org/apache/pinot/common/datablock/DataBlockUtils.java
+++
b/pinot-common/src/main/java/org/apache/pinot/common/datablock/DataBlockUtils.java
@@ -20,6 +20,7 @@ package org.apache.pinot.common.datablock;
import java.io.IOException;
import java.nio.ByteBuffer;
+import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@@ -77,15 +78,15 @@ public final class DataBlockUtils {
public static List<Object[]> extractRows(BaseDataBlock dataBlock) {
DataSchema dataSchema = dataBlock.getDataSchema();
- DataSchema.ColumnDataType[] storedColumnDataTypes =
dataSchema.getStoredColumnDataTypes();
+ DataSchema.ColumnDataType[] columnDataTypes =
dataSchema.getColumnDataTypes();
int numRows = dataBlock.getNumberOfRows();
- int numColumns = storedColumnDataTypes.length;
+ int numColumns = columnDataTypes.length;
List<Object[]> rows = new ArrayList<>(numRows);
for (int i = 0; i < numRows; i++) {
Object[] row = new Object[numColumns];
for (int j = 0; j < numColumns; j++) {
- switch (storedColumnDataTypes[j]) {
+ switch (columnDataTypes[j]) {
// Single-value column
case INT:
row[j] = dataBlock.getInt(i, j);
@@ -102,6 +103,12 @@ public final class DataBlockUtils {
case BIG_DECIMAL:
row[j] = dataBlock.getBigDecimal(i, j);
break;
+ case BOOLEAN:
+ row[j] =
DataSchema.ColumnDataType.BOOLEAN.convert(dataBlock.getInt(i, j));
+ break;
+ case TIMESTAMP:
+ row[j] = new Timestamp(dataBlock.getLong(i, j));
+ break;
case STRING:
row[j] = dataBlock.getString(i, j);
break;
@@ -125,10 +132,15 @@ public final class DataBlockUtils {
case STRING_ARRAY:
row[j] = dataBlock.getStringArray(i, j);
break;
-
+ case BOOLEAN_ARRAY:
+ row[j] =
DataSchema.ColumnDataType.BOOLEAN_ARRAY.convert(dataBlock.getIntArray(i, j));
+ break;
+ case TIMESTAMP_ARRAY:
+ row[j] =
DataSchema.ColumnDataType.TIMESTAMP_ARRAY.convert(dataBlock.getLongArray(i, j));
+ break;
default:
throw new IllegalStateException(
- String.format("Unsupported data type: %s for column: %s",
storedColumnDataTypes[j],
+ String.format("Unsupported data type: %s for column: %s",
columnDataTypes[j],
dataSchema.getColumnName(j)));
}
}
diff --git
a/pinot-core/src/main/java/org/apache/pinot/core/common/datablock/DataBlockBuilder.java
b/pinot-core/src/main/java/org/apache/pinot/core/common/datablock/DataBlockBuilder.java
index 72541bd02d..abc0f64d0c 100644
---
a/pinot-core/src/main/java/org/apache/pinot/core/common/datablock/DataBlockBuilder.java
+++
b/pinot-core/src/main/java/org/apache/pinot/core/common/datablock/DataBlockBuilder.java
@@ -25,6 +25,7 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
+import java.sql.Timestamp;
import java.util.List;
import javax.annotation.Nullable;
import org.apache.pinot.common.datablock.BaseDataBlock;
@@ -63,7 +64,7 @@ public class DataBlockBuilder {
private DataBlockBuilder(DataSchema dataSchema, BaseDataBlock.Type
blockType) {
_dataSchema = dataSchema;
- _columnDataTypes = dataSchema.getStoredColumnDataTypes();
+ _columnDataTypes = dataSchema.getColumnDataTypes();
_blockType = blockType;
_numColumns = dataSchema.size();
if (_blockType == BaseDataBlock.Type.COLUMNAR) {
@@ -100,11 +101,12 @@ public class DataBlockBuilder {
// Selection / Agg / Distinct all have similar code.
int numColumns = rowBuilder._numColumns;
RoaringBitmap[] nullBitmaps = new RoaringBitmap[numColumns];
+ DataSchema.ColumnDataType[] columnDataTypes =
dataSchema.getColumnDataTypes();
DataSchema.ColumnDataType[] storedColumnDataTypes =
dataSchema.getStoredColumnDataTypes();
Object[] nullPlaceholders = new Object[numColumns];
for (int colId = 0; colId < numColumns; colId++) {
nullBitmaps[colId] = new RoaringBitmap();
- nullPlaceholders[colId] =
storedColumnDataTypes[colId].getNullPlaceholder();
+ nullPlaceholders[colId] =
columnDataTypes[colId].convert(storedColumnDataTypes[colId].getNullPlaceholder());
}
rowBuilder._numRows = rows.size();
for (int rowId = 0; rowId < rows.size(); rowId++) {
@@ -133,6 +135,12 @@ public class DataBlockBuilder {
case BIG_DECIMAL:
setColumn(rowBuilder, byteBuffer, (BigDecimal) value);
break;
+ case BOOLEAN:
+ byteBuffer.putInt(((Boolean) value) ? 1 : 0);
+ break;
+ case TIMESTAMP:
+ byteBuffer.putLong(((Timestamp) value).getTime());
+ break;
case STRING:
setColumn(rowBuilder, byteBuffer, (String) value);
break;
@@ -143,11 +151,9 @@ public class DataBlockBuilder {
setColumn(rowBuilder, byteBuffer, value);
break;
// Multi-value column
- case BOOLEAN_ARRAY:
case INT_ARRAY:
setColumn(rowBuilder, byteBuffer, (int[]) value);
break;
- case TIMESTAMP_ARRAY:
case LONG_ARRAY:
// LONG_ARRAY type covers INT_ARRAY and LONG_ARRAY
if (value instanceof int[]) {
@@ -193,6 +199,20 @@ public class DataBlockBuilder {
case STRING_ARRAY:
setColumn(rowBuilder, byteBuffer, (String[]) value);
break;
+ case BOOLEAN_ARRAY:
+ boolean[] booleans = (boolean[]) value;
+ int length = booleans.length;
+ int[] ints = new int[length];
+ ArrayCopyUtils.copy(booleans, ints, length);
+ setColumn(rowBuilder, byteBuffer, ints);
+ break;
+ case TIMESTAMP_ARRAY:
+ Timestamp[] timestamps = (Timestamp[]) value;
+ length = timestamps.length;
+ long[] longs = new long[length];
+ ArrayCopyUtils.copy(timestamps, longs, length);
+ setColumn(rowBuilder, byteBuffer, longs);
+ break;
default:
throw new IllegalStateException(
String.format("Unsupported data type: %s for column: %s",
rowBuilder._columnDataTypes[colId],
@@ -216,11 +236,12 @@ public class DataBlockBuilder {
// Selection / Agg / Distinct all have similar code.
int numColumns = columnarBuilder._numColumns;
RoaringBitmap[] nullBitmaps = new RoaringBitmap[numColumns];
+ DataSchema.ColumnDataType[] columnDataTypes =
dataSchema.getColumnDataTypes();
DataSchema.ColumnDataType[] storedColumnDataTypes =
dataSchema.getStoredColumnDataTypes();
Object[] nullPlaceholders = new Object[numColumns];
for (int colId = 0; colId < numColumns; colId++) {
nullBitmaps[colId] = new RoaringBitmap();
- nullPlaceholders[colId] =
storedColumnDataTypes[colId].getNullPlaceholder();
+ nullPlaceholders[colId] =
columnDataTypes[colId].convert(storedColumnDataTypes[colId].getNullPlaceholder());
}
for (int colId = 0; colId < columns.size(); colId++) {
Object[] column = columns.get(colId);
@@ -279,6 +300,26 @@ public class DataBlockBuilder {
setColumn(columnarBuilder, byteBuffer, (BigDecimal) value);
}
break;
+ case BOOLEAN:
+ for (int rowId = 0; rowId < columnarBuilder._numRows; rowId++) {
+ value = column[rowId];
+ if (value == null) {
+ nullBitmaps[colId].add(rowId);
+ value = nullPlaceholders[colId];
+ }
+ byteBuffer.putInt(((Boolean) value) ? 1 : 0);
+ }
+ break;
+ case TIMESTAMP:
+ for (int rowId = 0; rowId < columnarBuilder._numRows; rowId++) {
+ value = column[rowId];
+ if (value == null) {
+ nullBitmaps[colId].add(rowId);
+ value = nullPlaceholders[colId];
+ }
+ byteBuffer.putLong(((Timestamp) value).getTime());
+ }
+ break;
case STRING:
for (int rowId = 0; rowId < columnarBuilder._numRows; rowId++) {
value = column[rowId];
@@ -310,7 +351,6 @@ public class DataBlockBuilder {
}
break;
// Multi-value column
- case BOOLEAN_ARRAY:
case INT_ARRAY:
for (int rowId = 0; rowId < columnarBuilder._numRows; rowId++) {
value = column[rowId];
@@ -321,7 +361,6 @@ public class DataBlockBuilder {
setColumn(columnarBuilder, byteBuffer, (int[]) value);
}
break;
- case TIMESTAMP_ARRAY:
case LONG_ARRAY:
for (int rowId = 0; rowId < columnarBuilder._numRows; rowId++) {
value = column[rowId];
@@ -382,6 +421,32 @@ public class DataBlockBuilder {
}
}
break;
+ case BOOLEAN_ARRAY:
+ for (int rowId = 0; rowId < columnarBuilder._numRows; rowId++) {
+ value = column[rowId];
+ if (value == null) {
+ nullBitmaps[colId].add(rowId);
+ value = nullPlaceholders[colId];
+ }
+ int length = ((boolean[]) value).length;
+ int[] ints = new int[length];
+ ArrayCopyUtils.copy((boolean[]) value, ints, length);
+ setColumn(columnarBuilder, byteBuffer, ints);
+ }
+ break;
+ case TIMESTAMP_ARRAY:
+ for (int rowId = 0; rowId < columnarBuilder._numRows; rowId++) {
+ value = column[rowId];
+ if (value == null) {
+ nullBitmaps[colId].add(rowId);
+ value = nullPlaceholders[colId];
+ }
+ int length = ((Timestamp[]) value).length;
+ long[] longs = new long[length];
+ ArrayCopyUtils.copy((Timestamp[]) value, longs, length);
+ setColumn(columnarBuilder, byteBuffer, longs);
+ }
+ break;
case BYTES_ARRAY:
case STRING_ARRAY:
for (int rowId = 0; rowId < columnarBuilder._numRows; rowId++) {
diff --git
a/pinot-core/src/test/java/org/apache/pinot/core/common/datablock/DataBlockTest.java
b/pinot-core/src/test/java/org/apache/pinot/core/common/datablock/DataBlockTest.java
index a359bcd629..73c47e1b76 100644
---
a/pinot-core/src/test/java/org/apache/pinot/core/common/datablock/DataBlockTest.java
+++
b/pinot-core/src/test/java/org/apache/pinot/core/common/datablock/DataBlockTest.java
@@ -21,6 +21,7 @@ package org.apache.pinot.core.common.datablock;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.nio.ByteBuffer;
+import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import org.apache.pinot.common.datablock.BaseDataBlock;
@@ -34,6 +35,7 @@ import org.apache.pinot.common.response.ProcessingException;
import org.apache.pinot.common.utils.DataSchema;
import org.apache.pinot.core.common.datatable.DataTableBuilderFactory;
import org.apache.pinot.core.query.selection.SelectionOperatorUtils;
+import org.apache.pinot.spi.utils.ArrayCopyUtils;
import org.roaringbitmap.RoaringBitmap;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
@@ -42,7 +44,8 @@ import org.testng.annotations.Test;
public class DataBlockTest {
private static final List<DataSchema.ColumnDataType> EXCLUDE_DATA_TYPES =
ImmutableList.of(
- DataSchema.ColumnDataType.OBJECT, DataSchema.ColumnDataType.BYTES_ARRAY);
+ DataSchema.ColumnDataType.OBJECT, DataSchema.ColumnDataType.JSON,
DataSchema.ColumnDataType.BYTES,
+ DataSchema.ColumnDataType.BYTES_ARRAY);
private static final int TEST_ROW_COUNT = 5;
@Test
@@ -86,18 +89,30 @@ public class DataBlockTest {
DataSchema dataSchema = new DataSchema(columnNames.toArray(new String[0]),
columnDataTypes.toArray(new DataSchema.ColumnDataType[0]));
+ int numColumns = dataSchema.getColumnDataTypes().length;
List<Object[]> rows = DataBlockTestUtils.getRandomRows(dataSchema,
TEST_ROW_COUNT, nullPercentile);
DataTableBuilderFactory.setDataTableVersion(DataTableFactory.VERSION_4);
+ convertToDataTableCompatibleRows(rows, dataSchema);
DataTable dataTableImpl =
SelectionOperatorUtils.getDataTableFromRows(rows, dataSchema, true);
- DataTable dataBlockFromDataTable =
DataBlockUtils.getDataBlock(ByteBuffer.wrap(dataTableImpl.toBytes()));
+ BaseDataBlock dataBlockFromDataTable =
DataBlockUtils.getDataBlock(ByteBuffer.wrap(dataTableImpl.toBytes()));
+ RoaringBitmap[] nullBitmaps = new RoaringBitmap[numColumns];
+ for (int coldId = 0; coldId < numColumns; coldId++) {
+ nullBitmaps[coldId] = dataTableImpl.getNullRowIds(coldId);
+ }
+
+ List<Object[]> rowsFromBlock =
DataBlockUtils.extractRows(dataBlockFromDataTable);
for (int rowId = 0; rowId < TEST_ROW_COUNT; rowId++) {
- Object[] rowFromDataTable =
SelectionOperatorUtils.extractRowFromDataTable(dataTableImpl, rowId);
- Object[] rowFromBlock =
SelectionOperatorUtils.extractRowFromDataTable(dataBlockFromDataTable, rowId);
- for (int colId = 0; colId < dataSchema.getColumnNames().length; colId++)
{
- Assert.assertEquals(rowFromBlock[colId], rowFromDataTable[colId],
"Error comparing Row/Column Block "
+ Object[] rowFromDataTable =
SelectionOperatorUtils.extractRowFromDataTableWithNullHandling(dataTableImpl,
rowId,
+ nullBitmaps);
+ Object[] rowFromBlock = rowsFromBlock.get(rowId);
+ for (int colId = 0; colId < numColumns; colId++) {
+ Object dataTableObj = rowFromDataTable[colId] == null ? null
+ :
dataSchema.getColumnDataType(colId).convert(rowFromDataTable[colId]);
+ Object dataBlockObj = rowFromBlock[colId];
+ Assert.assertEquals(dataBlockObj, dataTableObj, "Error comparing
Row/Column Block "
+ " at (" + rowId + "," + colId + ") of Type: " +
dataSchema.getColumnDataType(colId) + "! "
- + " from DataBlock: [" + rowFromBlock[rowId] + "], from DataTable:
[" + rowFromDataTable[colId] + "]");
+ + " from DataBlock: [" + dataBlockObj + "], from DataTable: [" +
dataTableObj + "]");
}
}
@@ -108,6 +123,44 @@ public class DataBlockTest {
}
}
+ private static void convertToDataTableCompatibleRows(List<Object[]> rows,
DataSchema dataSchema) {
+ int numColumns = dataSchema.getColumnNames().length;
+ for (int rowId = 0; rowId < rows.size(); rowId++) {
+ for (int colId = 0; colId < numColumns; colId++) {
+ switch (dataSchema.getColumnDataType(colId)) {
+ case BOOLEAN:
+ if (rows.get(rowId)[colId] != null) {
+ rows.get(rowId)[colId] = ((boolean) rows.get(rowId)[colId]) ? 1
: 0;
+ }
+ break;
+ case TIMESTAMP:
+ if (rows.get(rowId)[colId] != null) {
+ rows.get(rowId)[colId] = ((Timestamp)
rows.get(rowId)[colId]).getTime();
+ }
+ break;
+ case BOOLEAN_ARRAY:
+ if (rows.get(rowId)[colId] != null) {
+ boolean[] booleans = (boolean[]) rows.get(rowId)[colId];
+ int[] ints = new int[booleans.length];
+ ArrayCopyUtils.copy(booleans, ints, booleans.length);
+ rows.get(rowId)[colId] = ints;
+ }
+ break;
+ case TIMESTAMP_ARRAY:
+ if (rows.get(rowId)[colId] != null) {
+ Timestamp[] timestamps = (Timestamp[]) rows.get(rowId)[colId];
+ long[] longs = new long[timestamps.length];
+ ArrayCopyUtils.copy(timestamps, longs, timestamps.length);
+ rows.get(rowId)[colId] = longs;
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ }
+ }
+
@Test(dataProvider = "testTypeNullPercentile")
public void testAllDataTypes(int nullPercentile)
throws Exception {
diff --git
a/pinot-core/src/test/java/org/apache/pinot/core/common/datablock/DataBlockTestUtils.java
b/pinot-core/src/test/java/org/apache/pinot/core/common/datablock/DataBlockTestUtils.java
index 9c0dd4786f..d2231d7aad 100644
---
a/pinot-core/src/test/java/org/apache/pinot/core/common/datablock/DataBlockTestUtils.java
+++
b/pinot-core/src/test/java/org/apache/pinot/core/common/datablock/DataBlockTestUtils.java
@@ -19,6 +19,7 @@
package org.apache.pinot.core.common.datablock;
import java.math.BigDecimal;
+import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
@@ -42,7 +43,7 @@ public class DataBlockTestUtils {
DataSchema.ColumnDataType[] columnDataTypes =
dataSchema.getColumnDataTypes();
Object[] row = new Object[numColumns];
for (int colId = 0; colId < numColumns; colId++) {
- switch (columnDataTypes[colId].getStoredType()) {
+ switch (columnDataTypes[colId]) {
case INT:
row[colId] = RANDOM.nextInt();
break;
@@ -58,6 +59,12 @@ public class DataBlockTestUtils {
case BIG_DECIMAL:
row[colId] = BigDecimal.valueOf(RANDOM.nextDouble());
break;
+ case BOOLEAN:
+ row[colId] = RANDOM.nextInt(2) % 2 == 1;
+ break;
+ case TIMESTAMP:
+ row[colId] = new Timestamp(RANDOM.nextLong());
+ break;
case STRING:
row[colId] = RandomStringUtils.random(RANDOM.nextInt(20));
break;
@@ -68,7 +75,6 @@ public class DataBlockTestUtils {
case OBJECT:
row[colId] = RANDOM.nextDouble();
break;
- case BOOLEAN_ARRAY:
case INT_ARRAY:
int length = RANDOM.nextInt(ARRAY_SIZE);
int[] intArray = new int[length];
@@ -77,7 +83,6 @@ public class DataBlockTestUtils {
}
row[colId] = intArray;
break;
- case TIMESTAMP_ARRAY:
case LONG_ARRAY:
length = RANDOM.nextInt(ARRAY_SIZE);
long[] longArray = new long[length];
@@ -110,6 +115,22 @@ public class DataBlockTestUtils {
}
row[colId] = stringArray;
break;
+ case BOOLEAN_ARRAY:
+ length = RANDOM.nextInt(ARRAY_SIZE);
+ boolean[] booleanArray = new boolean[length];
+ for (int i = 0; i < length; i++) {
+ booleanArray[i] = RANDOM.nextInt(2) % 2 == 1;
+ }
+ row[colId] = booleanArray;
+ break;
+ case TIMESTAMP_ARRAY:
+ length = RANDOM.nextInt(ARRAY_SIZE);
+ Timestamp[] timestampArray = new Timestamp[length];
+ for (int i = 0; i < length; i++) {
+ timestampArray[i] = new Timestamp(RANDOM.nextLong());
+ }
+ row[colId] = timestampArray;
+ break;
default:
throw new UnsupportedOperationException("Can't fill random data for
column type: " + columnDataTypes[colId]);
}
diff --git
a/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/TransferableBlockUtilsTest.java
b/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/TransferableBlockUtilsTest.java
index 435eb5f614..4fa2c8aa9c 100644
---
a/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/TransferableBlockUtilsTest.java
+++
b/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/TransferableBlockUtilsTest.java
@@ -37,8 +37,9 @@ import org.testng.annotations.Test;
public class TransferableBlockUtilsTest {
private static final int TOTAL_ROW_COUNT = 50;
- private static final List<DataSchema.ColumnDataType> EXCLUDE_DATA_TYPES =
- ImmutableList.of(DataSchema.ColumnDataType.OBJECT,
DataSchema.ColumnDataType.BYTES_ARRAY);
+ private static final List<DataSchema.ColumnDataType> EXCLUDE_DATA_TYPES =
ImmutableList.of(
+ DataSchema.ColumnDataType.OBJECT, DataSchema.ColumnDataType.JSON,
DataSchema.ColumnDataType.BYTES,
+ DataSchema.ColumnDataType.BYTES_ARRAY);
private static DataSchema getDataSchema() {
DataSchema.ColumnDataType[] allDataTypes =
DataSchema.ColumnDataType.values();
@@ -110,10 +111,10 @@ public class TransferableBlockUtilsTest {
continue;
}
DataSchema.ColumnDataType columnDataType =
dataSchema.getColumnDataType(colId);
- Object actualVal =
columnDataType.getStoredType().convert(row[colId]);
- Object expectedVal =
columnDataType.getStoredType().convert(rows.get(rowId)[colId]);
- Assert.assertEquals(actualVal, expectedVal, "Error comparing split
Block at (" + rowId + "," + colId + ")"
- + " of Type: " + columnDataType + "! expected: [" +
expectedVal + "], actual: [" + actualVal + "]");
+ Object actualVal = row[colId];
+ Object expectedVal = rows.get(rowId)[colId];
+ Assert.assertEquals(actualVal, expectedVal, "Error comparing split
Block at (" + rowId + "," + colId + ")"
+ + " of Type: " + columnDataType + "! expected: [" + expectedVal
+ "], actual: [" + actualVal + "]");
}
rowId++;
}
diff --git
a/pinot-spi/src/main/java/org/apache/pinot/spi/utils/ArrayCopyUtils.java
b/pinot-spi/src/main/java/org/apache/pinot/spi/utils/ArrayCopyUtils.java
index ac463617b4..c4b8f35498 100644
--- a/pinot-spi/src/main/java/org/apache/pinot/spi/utils/ArrayCopyUtils.java
+++ b/pinot-spi/src/main/java/org/apache/pinot/spi/utils/ArrayCopyUtils.java
@@ -563,4 +563,16 @@ public class ArrayCopyUtils {
copyToTimestamp(src[i], dest[i], rowLength);
}
}
+
+ public static void copy(boolean[] src, int[] dest, int length) {
+ for (int i = 0; i < length; i++) {
+ dest[i] = src[i] ? 1 : 0;
+ }
+ }
+
+ public static void copy(Timestamp[] src, long[] dest, int length) {
+ for (int i = 0; i < length; i++) {
+ dest[i] = src[i].getTime();
+ }
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]