Repository: sqoop Updated Branches: refs/heads/trunk e13dd2120 -> f37832842
SQOOP-3014: Sqoop with HCatalog import loose precision for large numbers that does not fit into double (Zoltan Toth via Boglarka Egyed) Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/f3783284 Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/f3783284 Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/f3783284 Branch: refs/heads/trunk Commit: f3783284217a75b5313e91b73bd183df4cdddff4 Parents: e13dd21 Author: Boglarka Egyed <[email protected]> Authored: Tue Sep 12 16:12:56 2017 +0200 Committer: Boglarka Egyed <[email protected]> Committed: Tue Sep 12 16:12:56 2017 +0200 ---------------------------------------------------------------------- .../mapreduce/hcat/SqoopHCatImportHelper.java | 24 +++++++- .../apache/sqoop/hcat/HCatalogImportTest.java | 31 ++++++++++ .../hcat/TestSqoopHCatImportHelper.java | 59 ++++++++++++++++++++ 3 files changed, 111 insertions(+), 3 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/sqoop/blob/f3783284/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java ---------------------------------------------------------------------- diff --git a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java index aba2458..1c1ed1e 100644 --- a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java +++ b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java @@ -149,6 +149,13 @@ public class SqoopHCatImportHelper { LOG.debug("Static partition key used : " + partKeysString); } + /* This construct is only for testing and avoiding static method + * usage + */ + SqoopHCatImportHelper() { + + } + public HCatRecord convertToHCatRecord(SqoopRecord sqr) throws IOException, InterruptedException { try { @@ -441,13 +448,24 @@ public class SqoopHCatImportHelper { HiveChar hChar = new HiveChar(val.toString(), cti.getLength()); return hChar; } else if (hfsType == HCatFieldSchema.Type.DECIMAL) { - BigDecimal bd = new BigDecimal(n.doubleValue(), - MathContext.DECIMAL128); - return HiveDecimal.create(bd); + return convertNumberIntoHiveDecimal(n); } return null; } + HiveDecimal convertNumberIntoHiveDecimal(Number number) { + BigDecimal bigDecimal = null; + + if(number instanceof BigDecimal) { + bigDecimal = (BigDecimal) number; + } else if(number instanceof Long) { + bigDecimal = BigDecimal.valueOf((Long)number); + } else if (number instanceof Double) { + bigDecimal = BigDecimal.valueOf((Double) number); + } + return HiveDecimal.create(bigDecimal); + } + public void cleanup() throws IOException { if (null != lobLoader) { lobLoader.close(); http://git-wip-us.apache.org/repos/asf/sqoop/blob/f3783284/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java index d784a20..4686493 100644 --- a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java +++ b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java @@ -42,6 +42,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hive.hcatalog.data.HCatRecord; import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; import org.apache.hive.hcatalog.data.schema.HCatSchema; @@ -401,6 +402,36 @@ public class HCatalogImportTest extends ImportJobTestCase { } @Test + public void testDecimalTypes() throws Exception{ + final int TOTAL_RECORDS = 1 * 10; + String hcatTable = getTableName().toUpperCase(); + boolean allowRoundig = false; + String dbTypeNumeric = "numeric"; + String dbTypeDecimal = "decimal"; + int sqlTypeNumeric = Types.NUMERIC; + int sqlTypeDecimal = Types.DECIMAL; + HCatFieldSchema.Type hcatTypeDecimal = HCatFieldSchema.Type.DECIMAL; + + BigDecimal inputValue1 = new BigDecimal("454018528782.42006329"); + HiveDecimal expectedValue1 = HiveDecimal.create(new BigDecimal("454018528782.42006"), allowRoundig); + BigDecimal inputValue2 = new BigDecimal("87658675864540185.123456789123456789"); + HiveDecimal expectedValue2 = HiveDecimal.create(new BigDecimal("87658675864540185.12346"), allowRoundig); + int precision = 22; + int scale = 5; + + ColumnGenerator[] hcatColumns = new ColumnGenerator[] { + HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), dbTypeNumeric, sqlTypeNumeric, + hcatTypeDecimal, precision, scale, expectedValue1, inputValue1, KeyType.NOT_A_KEY), + + HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), dbTypeDecimal, sqlTypeDecimal, + hcatTypeDecimal, precision, scale, expectedValue2, inputValue2, KeyType.NOT_A_KEY) + }; + List<String> addlArgsArray = new ArrayList<String>(); + setExtraArgs(addlArgsArray); + runHCatImport(addlArgsArray, TOTAL_RECORDS, hcatTable, hcatColumns, null); + } + + @Test public void testNumberTypes() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); http://git-wip-us.apache.org/repos/asf/sqoop/blob/f3783284/src/test/org/apache/sqoop/mapreduce/hcat/TestSqoopHCatImportHelper.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/mapreduce/hcat/TestSqoopHCatImportHelper.java b/src/test/org/apache/sqoop/mapreduce/hcat/TestSqoopHCatImportHelper.java new file mode 100644 index 0000000..3f734ea --- /dev/null +++ b/src/test/org/apache/sqoop/mapreduce/hcat/TestSqoopHCatImportHelper.java @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.mapreduce.hcat; + +import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.junit.Before; +import org.junit.Test; + +import java.math.BigDecimal; + +import static org.junit.Assert.assertEquals; + +public class TestSqoopHCatImportHelper { + + private SqoopHCatImportHelper importHelper; + + @Before + public void init() { + importHelper = new SqoopHCatImportHelper(); + } + + @Test + public void convertLongNumberIntoBigDecimalWithoutRounding() { + Long input = new Long("20160523112914897"); + HiveDecimal actual = importHelper.convertNumberIntoHiveDecimal(input); + assertEquals(new BigDecimal("20160523112914897"), actual.bigDecimalValue()); + + } + @Test + public void convertDoubleNumberIntoBigDecimalWithoutRounding() { + Double input = new Double("0.12345678912345678"); + HiveDecimal actual = importHelper.convertNumberIntoHiveDecimal(input); + assertEquals(new BigDecimal("0.12345678912345678"), actual.bigDecimalValue()); + } + + @Test + public void keepBigDecimalNumberIfInputIsBigDecimal() { + BigDecimal input = new BigDecimal("87658675864540185.123456789123456789"); + HiveDecimal actual = importHelper.convertNumberIntoHiveDecimal(input); + assertEquals(new BigDecimal("87658675864540185.123456789123456789"), actual.bigDecimalValue()); + } + +}
