Repository: phoenix
Updated Branches:
refs/heads/4.x-HBase-1.4 a00949914 -> 17f00f1b8
Revert "PHOENIX-4825 Replace usage of HBase Base64 implementation with
java.util.Base64"
This reverts commit 22934e5af7af79580bf54feeb7667eccafaafc71 in order to
support JDK 1.7 for 4.x releases.
Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/17f00f1b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/17f00f1b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/17f00f1b
Branch: refs/heads/4.x-HBase-1.4
Commit: 17f00f1b89a88d8632ab2f4aac1608d0b2d0d209
Parents: a009499
Author: Ankit Singhal <[email protected]>
Authored: Fri Oct 5 16:49:56 2018 -0700
Committer: Ankit Singhal <[email protected]>
Committed: Fri Oct 5 16:49:56 2018 -0700
----------------------------------------------------------------------
.../org/apache/phoenix/end2end/QueryMoreIT.java | 7 ++---
.../phoenix/mapreduce/CsvBulkImportUtil.java | 8 ++----
.../util/PhoenixConfigurationUtil.java | 7 ++---
.../apache/phoenix/schema/types/PVarbinary.java | 4 +--
.../phoenix/util/csv/CsvUpsertExecutor.java | 4 +--
.../phoenix/util/json/JsonUpsertExecutor.java | 4 +--
.../util/AbstractUpsertExecutorTest.java | 12 ++++----
.../util/TenantIdByteConversionTest.java | 30 ++++----------------
8 files changed, 26 insertions(+), 50 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/phoenix/blob/17f00f1b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
----------------------------------------------------------------------
diff --git
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
index 528fe7f..04272fa 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
@@ -31,13 +31,12 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
-import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.query.QueryServices;
@@ -279,7 +278,7 @@ public class QueryMoreIT extends ParallelStatsDisabledIT {
values[i] = rs.getObject(i + 1);
}
conn = getTenantSpecificConnection(tenantId);
-
pkIds.add(Bytes.toString(Base64.getEncoder().encode(PhoenixRuntime.encodeColumnValues(conn,
tableOrViewName.toUpperCase(), values, columns))));
+
pkIds.add(Base64.encodeBytes(PhoenixRuntime.encodeColumnValues(conn,
tableOrViewName.toUpperCase(), values, columns)));
}
return pkIds.toArray(new String[pkIds.size()]);
}
@@ -297,7 +296,7 @@ public class QueryMoreIT extends ParallelStatsDisabledIT {
PreparedStatement stmt = conn.prepareStatement(query);
int bindCounter = 1;
for (int i = 0; i < cursorIds.length; i++) {
- Object[] pkParts = PhoenixRuntime.decodeColumnValues(conn,
tableName.toUpperCase(), Base64.getDecoder().decode(cursorIds[i]), columns);
+ Object[] pkParts = PhoenixRuntime.decodeColumnValues(conn,
tableName.toUpperCase(), Base64.decode(cursorIds[i]), columns);
for (int j = 0; j < pkParts.length; j++) {
stmt.setObject(bindCounter++, pkParts[j]);
}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/17f00f1b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
----------------------------------------------------------------------
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
index bf5a538..ff9ff72 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
@@ -17,11 +17,9 @@
*/
package org.apache.phoenix.mapreduce;
-import java.util.Base64;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Base64;
import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
import org.apache.phoenix.query.QueryConstants;
import org.apache.phoenix.query.QueryServices;
@@ -70,7 +68,7 @@ public class CsvBulkImportUtil {
@VisibleForTesting
static void setChar(Configuration conf, String confKey, char charValue) {
- conf.set(confKey,
Bytes.toString(Base64.getEncoder().encode(Character.toString(charValue).getBytes())));
+ conf.set(confKey,
Base64.encodeBytes(Character.toString(charValue).getBytes()));
}
@VisibleForTesting
@@ -79,7 +77,7 @@ public class CsvBulkImportUtil {
if (strValue == null) {
return null;
}
- return new String(Base64.getDecoder().decode(strValue)).charAt(0);
+ return new String(Base64.decode(strValue)).charAt(0);
}
public static Path getOutputPath(Path outputdir, String tableName) {
http://git-wip-us.apache.org/repos/asf/phoenix/blob/17f00f1b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java
----------------------------------------------------------------------
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java
index 2f552ea..b0ea17b 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java
@@ -24,7 +24,6 @@ import static
org.apache.phoenix.query.QueryServicesOptions.DEFAULT_USE_STATS_FO
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
-import java.util.Base64;
import java.util.List;
import java.util.Map;
import java.util.Properties;
@@ -36,7 +35,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
@@ -509,14 +508,14 @@ public final class PhoenixConfigurationUtil {
public static byte[] getIndexMaintainers(final Configuration
configuration){
Preconditions.checkNotNull(configuration);
- return
Base64.getDecoder().decode(configuration.get(INDEX_MAINTAINERS));
+ return Base64.decode(configuration.get(INDEX_MAINTAINERS));
}
public static void setIndexMaintainers(final Configuration configuration,
final ImmutableBytesWritable indexMetaDataPtr) {
Preconditions.checkNotNull(configuration);
Preconditions.checkNotNull(indexMetaDataPtr);
-
configuration.set(INDEX_MAINTAINERS,Bytes.toString(Base64.getEncoder().encode(indexMetaDataPtr.get())));
+ configuration.set(INDEX_MAINTAINERS,
Base64.encodeBytes(indexMetaDataPtr.get()));
}
public static void setDisableIndexes(Configuration configuration, String
indexName) {
http://git-wip-us.apache.org/repos/asf/phoenix/blob/17f00f1b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java
----------------------------------------------------------------------
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java
index e165a9c..b3ce57a 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java
@@ -19,8 +19,8 @@ package org.apache.phoenix.schema.types;
import java.sql.Types;
import java.text.Format;
-import java.util.Base64;
+import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.util.ByteUtil;
@@ -131,7 +131,7 @@ public class PVarbinary extends PBinaryBase {
if (value == null || value.length() == 0) {
return null;
}
- Object object = Base64.getDecoder().decode(value);
+ Object object = Base64.decode(value);
if (object == null) { throw newIllegalDataException(
"Input: [" + value + "] is not base64 encoded"); }
return object;
http://git-wip-us.apache.org/repos/asf/phoenix/blob/17f00f1b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
----------------------------------------------------------------------
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
index 0b5881f..d2529f7 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
@@ -22,13 +22,13 @@ import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.sql.Types;
-import java.util.Base64;
import java.util.List;
import java.util.Properties;
import javax.annotation.Nullable;
import org.apache.commons.csv.CSVRecord;
+import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.expression.function.EncodeFormat;
import org.apache.phoenix.jdbc.PhoenixConnection;
@@ -195,7 +195,7 @@ public class CsvUpsertExecutor extends
UpsertExecutor<CSVRecord, String> {
Object object = null;
switch (format) {
case BASE64:
- object = Base64.getDecoder().decode(input);
+ object = Base64.decode(input);
if (object == null) { throw new IllegalDataException(
"Input: [" + input + "] is not base64
encoded"); }
break;
http://git-wip-us.apache.org/repos/asf/phoenix/blob/17f00f1b/phoenix-core/src/main/java/org/apache/phoenix/util/json/JsonUpsertExecutor.java
----------------------------------------------------------------------
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/util/json/JsonUpsertExecutor.java
b/phoenix-core/src/main/java/org/apache/phoenix/util/json/JsonUpsertExecutor.java
index 9a6fef0..fa14079 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/util/json/JsonUpsertExecutor.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/util/json/JsonUpsertExecutor.java
@@ -22,13 +22,13 @@ import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
-import java.util.Base64;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.annotation.Nullable;
+import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.expression.function.EncodeFormat;
import org.apache.phoenix.query.QueryServices;
@@ -215,7 +215,7 @@ public class JsonUpsertExecutor extends
UpsertExecutor<Map<?, ?>, Object> {
Object object = null;
switch (format) {
case BASE64:
- object = Base64.getDecoder().decode(input.toString());
+ object = Base64.decode(input.toString());
if (object == null) { throw new IllegalDataException(
"Input: [" + input + "] is not base64 encoded"); }
break;
http://git-wip-us.apache.org/repos/asf/phoenix/blob/17f00f1b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
----------------------------------------------------------------------
diff --git
a/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
index aacbd8a..3ea997b 100644
---
a/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
+++
b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
@@ -36,11 +36,11 @@ import java.time.LocalTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatterBuilder;
import java.util.Arrays;
-import java.util.Base64;
import java.util.List;
import java.util.Properties;
import java.util.TimeZone;
+import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.query.BaseConnectionlessQueryTest;
import org.apache.phoenix.query.QueryServices;
@@ -94,7 +94,7 @@ public abstract class AbstractUpsertExecutorTest<R, F>
extends BaseConnectionles
@Test
public void testExecute() throws Exception {
byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue();
- String encodedBinaryData =
Bytes.toString(Base64.getEncoder().encode(binaryData));
+ String encodedBinaryData = Base64.encodeBytes(binaryData);
getUpsertExecutor().execute(
createRecord(123L, "NameValue", 42, Arrays.asList(1, 2, 3), true,
encodedBinaryData,
Timestamp.valueOf(TIMESTAMP_WITH_NANOS)));
@@ -125,7 +125,7 @@ public abstract class AbstractUpsertExecutorTest<R, F>
extends BaseConnectionles
@Test
public void testExecute_TooManyFields() throws Exception {
byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue();
- String encodedBinaryData =
Bytes.toString(Base64.getEncoder().encode(binaryData));
+ String encodedBinaryData = Base64.encodeBytes(binaryData);
R recordWithTooManyFields = createRecord(123L, "NameValue", 42,
Arrays.asList(1, 2, 3),
true, encodedBinaryData,
Timestamp.valueOf(TIMESTAMP_WITH_NANOS), "garbage");
getUpsertExecutor().execute(recordWithTooManyFields);
@@ -147,7 +147,7 @@ public abstract class AbstractUpsertExecutorTest<R, F>
extends BaseConnectionles
@Test
public void testExecute_NullField() throws Exception {
byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue();
- String encodedBinaryData =
Bytes.toString(Base64.getEncoder().encode(binaryData));
+ String encodedBinaryData = Base64.encodeBytes(binaryData);
getUpsertExecutor().execute(
createRecord(123L, "NameValue", null, Arrays.asList(1, 2, 3),
false, encodedBinaryData,
Timestamp.valueOf(TIMESTAMP_WITH_NANOS)));
@@ -169,7 +169,7 @@ public abstract class AbstractUpsertExecutorTest<R, F>
extends BaseConnectionles
@Test
public void testExecute_InvalidType() throws Exception {
byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue();
- String encodedBinaryData =
Bytes.toString(Base64.getEncoder().encode(binaryData));
+ String encodedBinaryData = Base64.encodeBytes(binaryData);
R recordWithInvalidType =
createRecord(123L, "NameValue", "ThisIsNotANumber",
Arrays.asList(1, 2, 3), true,
encodedBinaryData,
Timestamp.valueOf(TIMESTAMP_WITH_NANOS));
@@ -182,7 +182,7 @@ public abstract class AbstractUpsertExecutorTest<R, F>
extends BaseConnectionles
@Test
public void testExecute_InvalidBoolean() throws Exception {
byte[] binaryData=(byte[])PBinary.INSTANCE.getSampleValue();
- String encodedBinaryData =
Bytes.toString(Base64.getEncoder().encode(binaryData));
+ String encodedBinaryData = Base64.encodeBytes(binaryData);
R csvRecordWithInvalidType =
createRecord("123,NameValue,42,1:2:3,NotABoolean," +
encodedBinaryData + ","
+ TIMESTAMP_WITH_NANOS);
http://git-wip-us.apache.org/repos/asf/phoenix/blob/17f00f1b/phoenix-core/src/test/java/org/apache/phoenix/util/TenantIdByteConversionTest.java
----------------------------------------------------------------------
diff --git
a/phoenix-core/src/test/java/org/apache/phoenix/util/TenantIdByteConversionTest.java
b/phoenix-core/src/test/java/org/apache/phoenix/util/TenantIdByteConversionTest.java
index 3ef9230..fb70d22 100644
---
a/phoenix-core/src/test/java/org/apache/phoenix/util/TenantIdByteConversionTest.java
+++
b/phoenix-core/src/test/java/org/apache/phoenix/util/TenantIdByteConversionTest.java
@@ -22,40 +22,20 @@ import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.fail;
import java.sql.SQLException;
-import java.util.Base64;
+import org.apache.hadoop.hbase.util.Base64;
import java.util.Collection;
import java.util.List;
-import org.apache.phoenix.schema.PDatum;
-import org.apache.phoenix.schema.PName;
-import org.apache.phoenix.schema.PNameFactory;
-import org.apache.phoenix.schema.RowKeySchema;
+import org.apache.phoenix.schema.*;
+import org.apache.phoenix.schema.types.*;
import org.apache.phoenix.schema.RowKeySchema.RowKeySchemaBuilder;
-import org.apache.phoenix.schema.SortOrder;
-import org.apache.phoenix.schema.types.PBinary;
-import org.apache.phoenix.schema.types.PBoolean;
-import org.apache.phoenix.schema.types.PChar;
-import org.apache.phoenix.schema.types.PDataType;
-import org.apache.phoenix.schema.types.PDecimal;
-import org.apache.phoenix.schema.types.PDouble;
-import org.apache.phoenix.schema.types.PFloat;
-import org.apache.phoenix.schema.types.PInteger;
-import org.apache.phoenix.schema.types.PLong;
-import org.apache.phoenix.schema.types.PSmallint;
-import org.apache.phoenix.schema.types.PTinyint;
-import org.apache.phoenix.schema.types.PUnsignedDouble;
-import org.apache.phoenix.schema.types.PUnsignedFloat;
-import org.apache.phoenix.schema.types.PUnsignedInt;
-import org.apache.phoenix.schema.types.PUnsignedLong;
-import org.apache.phoenix.schema.types.PUnsignedSmallint;
-import org.apache.phoenix.schema.types.PUnsignedTinyint;
-import org.apache.phoenix.schema.types.PVarchar;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.google.common.collect.Lists;
+import org.mockito.Mockito;
/*Test the getTenantIdBytes method in ScanUtil*/
@RunWith(Parameterized.class)
@@ -221,7 +201,7 @@ public class TenantIdByteConversionTest {
//Binary
byte[] bytes = new byte[] {0, 1, 2, 3};
- String byteString = new String( Base64.getEncoder().encode(bytes) );
+ String byteString = new String( Base64.encodeBytes(bytes) );
testCases.add(new Object[] {
getDataSchema(PBinary.INSTANCE, SortOrder.getDefault()),
false,