Repository: phoenix Updated Branches: refs/heads/4.8-HBase-1.2 5f7682bb9 -> 16795aef6
PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/16795aef Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/16795aef Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/16795aef Branch: refs/heads/4.8-HBase-1.2 Commit: 16795aef6d0f22e96bf787ec50b9043d61c0c379 Parents: 5f7682b Author: James Taylor <[email protected]> Authored: Thu Nov 17 16:45:51 2016 -0800 Committer: James Taylor <[email protected]> Committed: Thu Nov 17 18:15:29 2016 -0800 ---------------------------------------------------------------------- .../apache/phoenix/end2end/UpsertValuesIT.java | 22 ++++++++++++++++++++ .../apache/phoenix/compile/UpsertCompiler.java | 15 +++++-------- .../UngroupedAggregateRegionObserver.java | 11 ++++------ .../function/ArrayConcatFunction.java | 5 ++++- .../function/ArrayModifierFunction.java | 10 ++++----- .../phoenix/schema/types/PArrayDataType.java | 8 +++---- .../apache/phoenix/schema/types/PBinary.java | 4 ++-- .../org/apache/phoenix/schema/types/PChar.java | 4 ++-- .../apache/phoenix/schema/types/PDataType.java | 22 +++++++++++--------- .../apache/phoenix/schema/types/PDecimal.java | 6 +++--- .../apache/phoenix/schema/types/PVarbinary.java | 4 ++-- .../apache/phoenix/schema/types/PVarchar.java | 4 ++-- .../phoenix/schema/types/PDataTypeTest.java | 22 ++++++++++++-------- 13 files changed, 80 insertions(+), 57 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java index 9bbe23e..cb41b81 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java @@ -164,6 +164,28 @@ public class UpsertValuesIT extends BaseClientManagedTimeIT { } @Test + public void testUpsertValuesWithDescDecimal() throws Exception { + long ts = nextTimestamp(); + Properties props = new Properties(); + props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts)); + Connection conn = DriverManager.getConnection(getUrl(), props); + conn.createStatement().execute("create table UpsertDecimalDescTest (k DECIMAL(12,3) NOT NULL PRIMARY KEY DESC)"); + conn.close(); + + props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts+5)); + conn = DriverManager.getConnection(getUrl(), props); + conn.createStatement().execute("upsert into UpsertDecimalDescTest values (0.0)"); + conn.commit(); + conn.close(); + + props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts+10)); + conn = DriverManager.getConnection(getUrl(), props); + ResultSet rs = conn.createStatement().executeQuery("select k from UpsertDecimalDescTest"); + assertTrue(rs.next()); + assertEquals(0.0, rs.getDouble(1), 0.001); + } + + @Test public void testUpsertRandomValues() throws Exception { long ts = nextTimestamp(); Properties props = new Properties(); http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java index 26855aa..8512ec4 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java @@ -33,10 +33,6 @@ import java.util.Set; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HRegionLocation; -import org.apache.hadoop.hbase.client.HRegionLocator; -import org.apache.hadoop.hbase.client.HTable; -import org.apache.hadoop.hbase.client.HTableInterface; -import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; @@ -87,14 +83,13 @@ import org.apache.phoenix.schema.PColumn; import org.apache.phoenix.schema.PColumnImpl; import org.apache.phoenix.schema.PName; import org.apache.phoenix.schema.PTable; -import org.apache.phoenix.schema.PTableKey; import org.apache.phoenix.schema.PTable.IndexType; import org.apache.phoenix.schema.PTable.ViewType; import org.apache.phoenix.schema.PTableImpl; +import org.apache.phoenix.schema.PTableKey; import org.apache.phoenix.schema.PTableType; import org.apache.phoenix.schema.ReadOnlyTableException; import org.apache.phoenix.schema.SortOrder; -import org.apache.phoenix.schema.TableNotFoundException; import org.apache.phoenix.schema.TableRef; import org.apache.phoenix.schema.TypeMismatchException; import org.apache.phoenix.schema.tuple.Tuple; @@ -203,8 +198,8 @@ public class UpsertCompiler { Integer scale = rsScale == 0 ? null : rsScale; // We are guaranteed that the two column will have compatible types, // as we checked that before. - if (!column.getDataType().isSizeCompatible(ptr, value, column.getDataType(), precision, scale, - column.getMaxLength(), column.getScale())) { throw new SQLExceptionInfo.Builder( + if (!column.getDataType().isSizeCompatible(ptr, value, column.getDataType(), SortOrder.getDefault(), precision, + scale, column.getMaxLength(), column.getScale())) { throw new SQLExceptionInfo.Builder( SQLExceptionCode.DATA_EXCEEDS_MAX_CAPACITY).setColumnName(column.getName().getString()) .setMessage("value=" + column.getDataType().toStringLiteral(ptr, null)).build() .buildException(); } @@ -927,8 +922,8 @@ public class UpsertCompiler { + constantExpression.toString() + " in column " + column); } if (!column.getDataType().isSizeCompatible(ptr, value, constantExpression.getDataType(), - constantExpression.getMaxLength(), constantExpression.getScale(), - column.getMaxLength(), column.getScale())) { + constantExpression.getSortOrder(), constantExpression.getMaxLength(), + constantExpression.getScale(), column.getMaxLength(), column.getScale())) { throw new SQLExceptionInfo.Builder( SQLExceptionCode.DATA_EXCEEDS_MAX_CAPACITY).setColumnName(column.getName().getString()) .setMessage("value=" + constantExpression.toString()).build().buildException(); http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java index a7c6fde..1b2fbb4 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java @@ -49,10 +49,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.RegionTooBusyException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; @@ -113,6 +111,7 @@ import org.apache.phoenix.util.SchemaUtil; import org.apache.phoenix.util.ServerUtil; import org.apache.phoenix.util.StringUtil; import org.apache.phoenix.util.TimeKeeper; +import org.apache.tephra.TxConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -120,8 +119,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import org.apache.tephra.TxConstants; - /** * Region observer that aggregates ungrouped rows(i.e. SQL query with aggregation function and no GROUP BY). @@ -537,9 +534,9 @@ public class UngroupedAggregateRegionObserver extends BaseScannerRegionObserver // We are guaranteed that the two column will have the // same type. if (!column.getDataType().isSizeCompatible(ptr, value, - column.getDataType(), expression.getMaxLength(), - expression.getScale(), column.getMaxLength(), - column.getScale())) { + column.getDataType(), expression.getSortOrder(), + expression.getMaxLength(), expression.getScale(), + column.getMaxLength(), column.getScale())) { throw new DataExceedsCapacityException( column.getDataType(), column.getMaxLength(), column.getScale()); http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ArrayConcatFunction.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ArrayConcatFunction.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ArrayConcatFunction.java index 77790b9..85655c6 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ArrayConcatFunction.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ArrayConcatFunction.java @@ -22,6 +22,7 @@ import java.util.List; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.phoenix.expression.Expression; import org.apache.phoenix.parse.FunctionParseNode; +import org.apache.phoenix.schema.SortOrder; import org.apache.phoenix.schema.TypeMismatchException; import org.apache.phoenix.schema.tuple.Tuple; import org.apache.phoenix.schema.types.PArrayDataType; @@ -52,16 +53,18 @@ public class ArrayConcatFunction extends ArrayModifierFunction { } boolean isLHSRowKeyOrderOptimized = PArrayDataType.isRowKeyOrderOptimized(getLHSExpr().getDataType(), getLHSExpr().getSortOrder(), ptr); + SortOrder sortOrder = getRHSExpr().getSortOrder(); int actualLengthOfArray1 = Math.abs(PArrayDataType.getArrayLength(ptr, getLHSBaseType(), getLHSExpr().getMaxLength())); int lengthArray1 = ptr.getLength(); int offsetArray1 = ptr.getOffset(); byte[] array1Bytes = ptr.get(); if (!getRHSExpr().evaluate(tuple, ptr)|| ptr.getLength() == 0){ + sortOrder = getLHSExpr().getSortOrder(); ptr.set(array1Bytes, offsetArray1, lengthArray1); return true; } - checkSizeCompatibility(ptr, getLHSExpr(), getLHSExpr().getDataType(), getRHSExpr(),getRHSExpr().getDataType()); + checkSizeCompatibility(ptr, sortOrder, getLHSExpr(), getLHSExpr().getDataType(), getRHSExpr(),getRHSExpr().getDataType()); // FIXME: calling version of coerceBytes that takes into account the separator used by LHS // If the RHS does not have the same separator, it'll be coerced to use it. It's unclear http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ArrayModifierFunction.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ArrayModifierFunction.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ArrayModifierFunction.java index 9bd7372..14796e4 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ArrayModifierFunction.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ArrayModifierFunction.java @@ -104,7 +104,7 @@ public abstract class ArrayModifierFunction extends ScalarFunction { otherExpr.evaluate(tuple, ptr); - checkSizeCompatibility(ptr, arrayExpr, baseDataType, otherExpr, otherExpressionType); + checkSizeCompatibility(ptr, otherExpr.getSortOrder(), arrayExpr, baseDataType, otherExpr, otherExpressionType); coerceBytes(ptr, arrayExpr, baseDataType, otherExpr, otherExpressionType); return modifierFunction(ptr, length, offset, arrayBytes, baseDataType, arrayLength, getMaxLength(), arrayExpr); @@ -117,11 +117,11 @@ public abstract class ArrayModifierFunction extends ScalarFunction { return false; } - protected void checkSizeCompatibility(ImmutableBytesWritable ptr, Expression arrayExpr, - PDataType baseDataType, Expression otherExpr, PDataType otherExpressionType) { + protected void checkSizeCompatibility(ImmutableBytesWritable ptr, SortOrder sortOrder, + Expression arrayExpr, PDataType baseDataType, Expression otherExpr, PDataType otherExpressionType) { if (!baseDataType.isSizeCompatible(ptr, null, otherExpressionType, - otherExpr.getMaxLength(), otherExpr.getScale(), arrayExpr.getMaxLength(), - arrayExpr.getScale())) { + sortOrder, otherExpr.getMaxLength(), otherExpr.getScale(), + arrayExpr.getMaxLength(), arrayExpr.getScale())) { throw new DataExceedsCapacityException("Values are not size compatible"); } } http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java index eb1a7ff..c856c28 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java @@ -256,15 +256,15 @@ public abstract class PArrayDataType<T> extends PDataType<T> { } @Override - public boolean isSizeCompatible(ImmutableBytesWritable ptr, Object value, PDataType srcType, Integer maxLength, - Integer scale, Integer desiredMaxLength, Integer desiredScale) { + public boolean isSizeCompatible(ImmutableBytesWritable ptr, Object value, PDataType srcType, SortOrder sortOrder, + Integer maxLength, Integer scale, Integer desiredMaxLength, Integer desiredScale) { if (value == null) return true; PhoenixArray pArr = (PhoenixArray)value; PDataType baseType = PDataType.fromTypeId(srcType.getSqlType() - PDataType.ARRAY_TYPE_BASE); for (int i = 0; i < pArr.numElements; i++) { Object val = pArr.getElement(i); - if (!baseType.isSizeCompatible(ptr, val, baseType, srcType.getMaxLength(val), scale, desiredMaxLength, - desiredScale)) { return false; } + if (!baseType.isSizeCompatible(ptr, val, baseType, sortOrder, srcType.getMaxLength(val), scale, + desiredMaxLength, desiredScale)) { return false; } } return true; } http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PBinary.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PBinary.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PBinary.java index 9aa3f42..e09b830 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PBinary.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PBinary.java @@ -136,8 +136,8 @@ public class PBinary extends PBinaryBase { @Override public boolean isSizeCompatible(ImmutableBytesWritable ptr, Object value, PDataType srcType, - Integer maxLength, Integer scale, Integer desiredMaxLength, - Integer desiredScale) { + SortOrder sortOrder, Integer maxLength, Integer scale, + Integer desiredMaxLength, Integer desiredScale) { if (ptr.getLength() != 0 && ( (srcType.equals(PVarbinary.INSTANCE) && ((String) value).length() != ptr.getLength()) || (maxLength != null && desiredMaxLength != null && maxLength > desiredMaxLength))) { http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PChar.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PChar.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PChar.java index 2853bc4..58d8b01 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PChar.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PChar.java @@ -153,8 +153,8 @@ public class PChar extends PDataType<String> { @Override public boolean isSizeCompatible(ImmutableBytesWritable ptr, Object value, PDataType srcType, - Integer maxLength, Integer scale, Integer desiredMaxLength, Integer desiredScale) { - return PVarchar.INSTANCE.isSizeCompatible(ptr, value, srcType, maxLength, scale, desiredMaxLength, desiredScale); + SortOrder sortOrder, Integer maxLength, Integer scale, Integer desiredMaxLength, Integer desiredScale) { + return PVarchar.INSTANCE.isSizeCompatible(ptr, value, srcType, sortOrder, maxLength, scale, desiredMaxLength, desiredScale); } @Override http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java index 5d611e9..a8470db 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java @@ -689,32 +689,32 @@ public abstract class PDataType<T> implements DataType<T>, Comparable<PDataType< // Calculate the precision and scale of a raw decimal bytes. Returns the values as an int // array. The first value is precision, the second value is scale. // Default scope for testing - protected static int[] getDecimalPrecisionAndScale(byte[] bytes, int offset, int length) { + protected static int[] getDecimalPrecisionAndScale(byte[] bytes, int offset, int length, SortOrder sortOrder) { // 0, which should have no precision nor scale. - if (length == 1 && bytes[offset] == ZERO_BYTE) { return new int[] { 0, 0 }; } - int signum = ((bytes[offset] & 0x80) == 0) ? -1 : 1; + if (length == 1 && sortOrder.normalize(bytes[offset]) == ZERO_BYTE) { return new int[] { 0, 0 }; } + int signum = ((sortOrder.normalize(bytes[offset]) & 0x80) == 0) ? -1 : 1; int scale; int index; int digitOffset; if (signum == 1) { - scale = (byte)(((bytes[offset] & 0x7F) - 65) * -2); + scale = (byte)(((sortOrder.normalize(bytes[offset]) & 0x7F) - 65) * -2); index = offset + length; digitOffset = POS_DIGIT_OFFSET; } else { - scale = (byte)((~bytes[offset] - 65 - 128) * -2); - index = offset + length - (bytes[offset + length - 1] == NEG_TERMINAL_BYTE ? 1 : 0); + scale = (byte)((~sortOrder.normalize(bytes[offset]) - 65 - 128) * -2); + index = offset + length - (sortOrder.normalize(bytes[offset + length - 1]) == NEG_TERMINAL_BYTE ? 1 : 0); digitOffset = -NEG_DIGIT_OFFSET; } length = index - offset; int precision = 2 * (length - 1); - int d = signum * bytes[--index] - digitOffset; + int d = signum * sortOrder.normalize(bytes[--index]) - digitOffset; if (d % 10 == 0) { // trailing zero // drop trailing zero and compensate in the scale and precision. d /= 10; scale--; precision -= 1; } - d = signum * bytes[offset + 1] - digitOffset; + d = signum * sortOrder.normalize(bytes[offset + 1]) - digitOffset; if (d < 10) { // Leading single digit // Compensate in the precision. precision -= 1; @@ -728,6 +728,7 @@ public abstract class PDataType<T> implements DataType<T>, Comparable<PDataType< return new int[] { precision, scale }; } + public boolean isCoercibleTo(PDataType targetType) { return this.equals(targetType) || targetType.equals(PVarbinary.INSTANCE); } @@ -747,14 +748,15 @@ public abstract class PDataType<T> implements DataType<T>, Comparable<PDataType< * @param ptr bytes pointer for the value * @param value object representation of the value. May be null in which case ptr will be used * @param srcType the type of the value + * @param sortOrder TODO * @param maxLength the max length of the source value or null if not applicable * @param scale the scale of the source value or null if not applicable * @param desiredMaxLength the desired max length for the value to be coerced * @param desiredScale the desired scale for the value to be coerced * @return true if the value may be coerced without losing precision and false otherwise. */ - public boolean isSizeCompatible(ImmutableBytesWritable ptr, Object value, PDataType srcType, Integer maxLength, - Integer scale, Integer desiredMaxLength, Integer desiredScale) { + public boolean isSizeCompatible(ImmutableBytesWritable ptr, Object value, PDataType srcType, SortOrder sortOrder, + Integer maxLength, Integer scale, Integer desiredMaxLength, Integer desiredScale) { return true; } http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java index 17910de..be8612e 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java @@ -301,7 +301,7 @@ public class PDecimal extends PRealNumber<BigDecimal> { @Override public boolean isSizeCompatible(ImmutableBytesWritable ptr, Object value, PDataType srcType, - Integer maxLength, Integer scale, Integer desiredMaxLength, Integer desiredScale) { + SortOrder sortOrder, Integer maxLength, Integer scale, Integer desiredMaxLength, Integer desiredScale) { if (ptr.getLength() == 0) { return true; } @@ -321,7 +321,7 @@ public class PDecimal extends PRealNumber<BigDecimal> { maxLength = v.precision(); scale = v.scale(); } else { - int[] v = getDecimalPrecisionAndScale(ptr.get(), ptr.getOffset(), ptr.getLength()); + int[] v = getDecimalPrecisionAndScale(ptr.get(), ptr.getOffset(), ptr.getLength(), sortOrder); maxLength = v[0]; scale = v[1]; } @@ -351,7 +351,7 @@ public class PDecimal extends PRealNumber<BigDecimal> { BigDecimal v = (BigDecimal) object; scale = v.scale(); } else { - int[] v = getDecimalPrecisionAndScale(ptr.get(), ptr.getOffset(), ptr.getLength()); + int[] v = getDecimalPrecisionAndScale(ptr.get(), ptr.getOffset(), ptr.getLength(), actualModifier); scale = v[1]; } } http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java index aafa1c6..0c54224 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarbinary.java @@ -112,8 +112,8 @@ public class PVarbinary extends PBinaryBase { @Override public boolean isSizeCompatible(ImmutableBytesWritable ptr, Object value, PDataType srcType, - Integer maxLength, Integer scale, Integer desiredMaxLength, - Integer desiredScale) { + SortOrder sortOrder, Integer maxLength, Integer scale, + Integer desiredMaxLength, Integer desiredScale) { if (ptr.getLength() != 0 && srcType.equals(PBinary.INSTANCE) && maxLength != null && desiredMaxLength != null) { return maxLength <= desiredMaxLength; http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarchar.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarchar.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarchar.java index 2575115..a77903f 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarchar.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PVarchar.java @@ -99,8 +99,8 @@ public class PVarchar extends PDataType<String> { @Override public boolean isSizeCompatible(ImmutableBytesWritable ptr, Object value, PDataType srcType, - Integer maxLength, Integer scale, Integer desiredMaxLength, - Integer desiredScale) { + SortOrder sortOrder, Integer maxLength, Integer scale, + Integer desiredMaxLength, Integer desiredScale) { if (ptr.getLength() != 0 && maxLength != null && desiredMaxLength != null) { return maxLength <= desiredMaxLength; } http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java index ba48a8a..b04ae1a 100644 --- a/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java +++ b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java @@ -1536,10 +1536,12 @@ public class PDataTypeTest { public void testGetDeicmalPrecisionAndScaleFromRawBytes() throws Exception { // Special case for 0. BigDecimal bd = new BigDecimal("0"); - byte[] b = PDecimal.INSTANCE.toBytes(bd); - int[] v = PDataType.getDecimalPrecisionAndScale(b, 0, b.length); - assertEquals(0, v[0]); - assertEquals(0, v[1]); + for (SortOrder sortOrder : SortOrder.values()) { + byte[] b = PDecimal.INSTANCE.toBytes(bd, sortOrder); + int[] v = PDataType.getDecimalPrecisionAndScale(b, 0, b.length, sortOrder); + assertEquals(0, v[0]); + assertEquals(0, v[1]); + } BigDecimal[] bds = new BigDecimal[] { new BigDecimal("1"), @@ -1572,8 +1574,10 @@ public class PDataTypeTest { }; for (int i=0; i<bds.length; i++) { - testReadDecimalPrecisionAndScaleFromRawBytes(bds[i]); - testReadDecimalPrecisionAndScaleFromRawBytes(bds[i].negate()); + for (SortOrder sortOrder : SortOrder.values()) { + testReadDecimalPrecisionAndScaleFromRawBytes(bds[i], sortOrder); + testReadDecimalPrecisionAndScaleFromRawBytes(bds[i].negate(), sortOrder); + } } assertTrue(new BigDecimal("5").remainder(BigDecimal.ONE).equals(BigDecimal.ZERO)); @@ -1662,9 +1666,9 @@ public class PDataTypeTest { } } - private void testReadDecimalPrecisionAndScaleFromRawBytes(BigDecimal bd) { - byte[] b = PDecimal.INSTANCE.toBytes(bd); - int[] v = PDataType.getDecimalPrecisionAndScale(b, 0, b.length); + private void testReadDecimalPrecisionAndScaleFromRawBytes(BigDecimal bd, SortOrder sortOrder) { + byte[] b = PDecimal.INSTANCE.toBytes(bd, sortOrder); + int[] v = PDataType.getDecimalPrecisionAndScale(b, 0, b.length, sortOrder); assertEquals(bd.toString(), bd.precision(), v[0]); assertEquals(bd.toString(), bd.scale(), v[1]); }
