Repository: sqoop Updated Branches: refs/heads/trunk 65b51b959 -> 0a7407613
SQOOP-3405: Refactor: break up Parameterized tests on a per database basis (Fero Szabo via Szabolcs Vasas) Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/0a740761 Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/0a740761 Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/0a740761 Branch: refs/heads/trunk Commit: 0a7407613b12a4bb25e737506ef0f091d3a7dae1 Parents: 65b51b9 Author: root <[email protected]> Authored: Thu Nov 22 05:43:21 2018 -0800 Committer: Szabolcs Vasas <[email protected]> Committed: Thu Nov 22 15:41:14 2018 +0100 ---------------------------------------------------------------------- .../sqoop/importjob/DatabaseAdapterFactory.java | 26 ++ .../sqoop/importjob/NumericTypesImportTest.java | 332 ------------------- .../sqoop/importjob/SplitByImportTest.java | 160 --------- .../MSSQLServerImportJobTestConfiguration.java | 68 ---- .../MySQLImportJobTestConfiguration.java | 68 ---- .../MysqlImportJobTestConfiguration.java | 68 ++++ .../SqlServerImportJobTestConfiguration.java | 68 ++++ .../MysqlNumericTypesImportTest.java | 38 +++ .../NumericTypesImportTestBase.java | 304 +++++++++++++++++ .../OracleNumericTypesImportTest.java | 56 ++++ .../PostgresNumericTypesImportTest.java | 56 ++++ .../SqlServerNumericTypesImportTest.java | 38 +++ .../splitby/MysqlSplitByImportTest.java | 33 ++ .../splitby/OracleSplitByImportTest.java | 33 ++ .../splitby/PostgresSplitByImportTest.java | 33 ++ .../splitby/SplitByImportTestBase.java | 136 ++++++++ .../splitby/SqlServerSplitByImportTest.java | 33 ++ .../adapter/MSSQLServerDatabaseAdapter.java | 52 --- .../testutil/adapter/MySqlDatabaseAdapter.java | 48 --- .../testutil/adapter/MysqlDatabaseAdapter.java | 48 +++ .../adapter/SqlServerDatabaseAdapter.java | 52 +++ 21 files changed, 1022 insertions(+), 728 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/DatabaseAdapterFactory.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/DatabaseAdapterFactory.java b/src/test/org/apache/sqoop/importjob/DatabaseAdapterFactory.java new file mode 100644 index 0000000..0e9cdc3 --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/DatabaseAdapterFactory.java @@ -0,0 +1,26 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob; + +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; + +public interface DatabaseAdapterFactory { + + DatabaseAdapter createAdapter(); +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/NumericTypesImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/NumericTypesImportTest.java b/src/test/org/apache/sqoop/importjob/NumericTypesImportTest.java deleted file mode 100644 index af310cb..0000000 --- a/src/test/org/apache/sqoop/importjob/NumericTypesImportTest.java +++ /dev/null @@ -1,332 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * <p> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.sqoop.importjob; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.parquet.schema.MessageType; -import org.apache.parquet.schema.OriginalType; -import org.apache.sqoop.SqoopOptions; -import org.apache.sqoop.importjob.configuration.AvroTestConfiguration; -import org.apache.sqoop.importjob.configuration.MSSQLServerImportJobTestConfiguration; -import org.apache.sqoop.importjob.configuration.MySQLImportJobTestConfiguration; -import org.apache.sqoop.importjob.configuration.OracleImportJobTestConfiguration; -import org.apache.sqoop.importjob.configuration.ParquetTestConfiguration; -import org.apache.sqoop.importjob.configuration.PostgresqlImportJobTestConfigurationForNumeric; -import org.apache.sqoop.testcategories.thirdpartytest.ThirdPartyTest; -import org.apache.sqoop.testutil.ArgumentArrayBuilder; -import org.apache.sqoop.testutil.AvroTestUtils; -import org.apache.sqoop.testutil.ImportJobTestCase; -import org.apache.sqoop.testutil.adapter.DatabaseAdapter; -import org.apache.sqoop.testutil.adapter.MSSQLServerDatabaseAdapter; -import org.apache.sqoop.testutil.adapter.MySqlDatabaseAdapter; -import org.apache.sqoop.testutil.adapter.OracleDatabaseAdapter; -import org.apache.sqoop.testutil.adapter.PostgresDatabaseAdapter; -import org.apache.sqoop.importjob.configuration.OracleImportJobTestConfigurationForNumber; -import org.apache.sqoop.importjob.configuration.PostgresqlImportJobTestConfigurationPaddingShouldSucceed; -import org.apache.sqoop.util.ParquetReader; -import org.apache.sqoop.util.BlockJUnit4ClassRunnerWithParametersFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -import java.io.IOException; -import java.sql.SQLException; -import java.util.Arrays; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.apache.sqoop.SqoopOptions.FileLayout.AvroDataFile; -import static org.apache.sqoop.SqoopOptions.FileLayout.ParquetFile; - -@RunWith(Parameterized.class) [email protected](BlockJUnit4ClassRunnerWithParametersFactory.class) -@Category(ThirdPartyTest.class) -/** - * This test covers the behavior of the Avro import for fixed point decimal types, i.e. NUMBER, NUMERIC - * and DECIMAL. - * - * Oracle and Postgres store numbers without padding, while other DBs store them padded with 0s. - * - * The features tested here affect two phases in Sqoop: - * 1. Avro schema generation during avro and parquet import - * Default precision and scale are used here to avoid issues with Oracle and Postgres, as these - * don't return valid precision and scale if they weren't specified in the table DDL. - * - * 2. Decimal padding during avro or parquet import - * In case of Oracle and Postgres, Sqoop has to pad the values with 0s to avoid errors. - */ -public class NumericTypesImportTest<T extends AvroTestConfiguration & ParquetTestConfiguration> extends ImportJobTestCase { - - public static final Log LOG = LogFactory.getLog(NumericTypesImportTest.class.getName()); - - private Configuration conf = new Configuration(); - - private final T configuration; - private final DatabaseAdapter adapter; - private final boolean failWithoutExtraArgs; - private final boolean failWithPadding; - - // Constants for the basic test case, that doesn't use extra arguments - // that are required to avoid errors, i.e. padding and default precision and scale. - private final static boolean SUCCEED_WITHOUT_EXTRA_ARGS = false; - private final static boolean FAIL_WITHOUT_EXTRA_ARGS = true; - - // Constants for the test case that has padding specified but not default precision and scale. - private final static boolean SUCCEED_WITH_PADDING_ONLY = false; - private final static boolean FAIL_WITH_PADDING_ONLY = true; - private Path tableDirPath; - - @Parameters(name = "Adapter: {0}| Config: {1}| failWithoutExtraArgs: {2}| failWithPadding: {3}") - public static Iterable<? extends Object> testConfigurations() { - DatabaseAdapter postgresAdapter = new PostgresDatabaseAdapter(); - OracleDatabaseAdapter oracleDatabaseAdapter = new OracleDatabaseAdapter(); - return Arrays.asList( - new Object[] {oracleDatabaseAdapter, new OracleImportJobTestConfigurationForNumber(), FAIL_WITHOUT_EXTRA_ARGS, FAIL_WITH_PADDING_ONLY}, - new Object[] {oracleDatabaseAdapter, new OracleImportJobTestConfiguration(), FAIL_WITHOUT_EXTRA_ARGS, SUCCEED_WITH_PADDING_ONLY}, - new Object[] { new MySqlDatabaseAdapter(), new MySQLImportJobTestConfiguration(), SUCCEED_WITHOUT_EXTRA_ARGS, SUCCEED_WITH_PADDING_ONLY}, - new Object[] { new MSSQLServerDatabaseAdapter(), new MSSQLServerImportJobTestConfiguration(), SUCCEED_WITHOUT_EXTRA_ARGS, SUCCEED_WITH_PADDING_ONLY}, - new Object[] { postgresAdapter, new PostgresqlImportJobTestConfigurationForNumeric(), FAIL_WITHOUT_EXTRA_ARGS, FAIL_WITH_PADDING_ONLY}, - new Object[] { postgresAdapter, new PostgresqlImportJobTestConfigurationPaddingShouldSucceed(), SUCCEED_WITHOUT_EXTRA_ARGS, SUCCEED_WITH_PADDING_ONLY} - ); - } - - public NumericTypesImportTest(DatabaseAdapter adapter, T configuration, boolean failWithoutExtraArgs, boolean failWithPaddingOnly) { - this.adapter = adapter; - this.configuration = configuration; - this.failWithoutExtraArgs = failWithoutExtraArgs; - this.failWithPadding = failWithPaddingOnly; - } - - @Rule - public ExpectedException thrown = ExpectedException.none(); - - @Override - protected Configuration getConf() { - return conf; - } - - @Override - protected boolean useHsqldbTestServer() { - return false; - } - - @Override - protected String getConnectString() { - return adapter.getConnectionString(); - } - - @Override - protected SqoopOptions getSqoopOptions(Configuration conf) { - SqoopOptions opts = new SqoopOptions(conf); - adapter.injectConnectionParameters(opts); - return opts; - } - - @Override - protected void dropTableIfExists(String table) throws SQLException { - adapter.dropTableIfExists(table, getManager()); - } - - @Before - public void setUp() { - super.setUp(); - String[] names = configuration.getNames(); - String[] types = configuration.getTypes(); - createTableWithColTypesAndNames(names, types, new String[0]); - List<String[]> inputData = configuration.getSampleData(); - for (String[] input : inputData) { - insertIntoTable(names, types, input); - } - tableDirPath = new Path(getWarehouseDir() + "/" + getTableName()); - } - - @After - public void tearDown() { - try { - dropTableIfExists(getTableName()); - } catch (SQLException e) { - LOG.warn("Error trying to drop table on tearDown: " + e); - } - super.tearDown(); - } - - private ArgumentArrayBuilder getArgsBuilder(SqoopOptions.FileLayout fileLayout) { - ArgumentArrayBuilder builder = new ArgumentArrayBuilder(); - if (AvroDataFile.equals(fileLayout)) { - builder.withOption("as-avrodatafile"); - } - else if (ParquetFile.equals(fileLayout)) { - builder.withOption("as-parquetfile"); - } - - return builder.withCommonHadoopFlags(true) - .withOption("warehouse-dir", getWarehouseDir()) - .withOption("num-mappers", "1") - .withOption("table", getTableName()) - .withOption("connect", getConnectString()); - } - - /** - * Adds properties to the given arg builder for decimal precision and scale. - * @param builder - */ - private void addPrecisionAndScale(ArgumentArrayBuilder builder) { - builder.withProperty("sqoop.avro.logical_types.decimal.default.precision", "38"); - builder.withProperty("sqoop.avro.logical_types.decimal.default.scale", "3"); - } - - /** - * Enables padding for decimals in avro and parquet import. - * @param builder - */ - private void addPadding(ArgumentArrayBuilder builder) { - builder.withProperty("sqoop.avro.decimal_padding.enable", "true"); - } - - private void addEnableAvroDecimal(ArgumentArrayBuilder builder) { - builder.withProperty("sqoop.avro.logical_types.decimal.enable", "true"); - } - - private void addEnableParquetDecimal(ArgumentArrayBuilder builder) { - builder.withProperty("sqoop.parquet.logical_types.decimal.enable", "true"); - } - - private void configureJunitToExpectFailure(boolean failWithPadding) { - if (failWithPadding) { - thrown.expect(IOException.class); - thrown.expectMessage("Failure during job; return status 1"); - } - } - - @Test - public void testAvroImportWithoutPadding() throws IOException { - configureJunitToExpectFailure(failWithoutExtraArgs); - ArgumentArrayBuilder builder = getArgsBuilder(AvroDataFile); - addEnableAvroDecimal(builder); - String[] args = builder.build(); - runImport(args); - if (!failWithoutExtraArgs) { - verify(AvroDataFile); - } - } - - @Test - public void testAvroImportWithPadding() throws IOException { - configureJunitToExpectFailure(failWithPadding); - ArgumentArrayBuilder builder = getArgsBuilder(AvroDataFile); - addEnableAvroDecimal(builder); - addPadding(builder); - runImport(builder.build()); - if (!failWithPadding) { - verify(AvroDataFile); - } - } - - @Test - public void testAvroImportWithDefaultPrecisionAndScale() throws IOException { - ArgumentArrayBuilder builder = getArgsBuilder(AvroDataFile); - addEnableAvroDecimal(builder); - addPadding(builder); - addPrecisionAndScale(builder); - runImport(builder.build()); - verify(AvroDataFile); - } - - @Test - public void testParquetImportWithoutPadding() throws IOException { - configureJunitToExpectFailure(failWithoutExtraArgs); - ArgumentArrayBuilder builder = getArgsBuilder(ParquetFile); - addEnableParquetDecimal(builder); - String[] args = builder.build(); - runImport(args); - if (!failWithoutExtraArgs) { - verify(ParquetFile); - } - } - - @Test - public void testParquetImportWithPadding() throws IOException { - configureJunitToExpectFailure(failWithPadding); - ArgumentArrayBuilder builder = getArgsBuilder(ParquetFile); - addEnableParquetDecimal(builder); - addPadding(builder); - runImport(builder.build()); - if (!failWithPadding) { - verify(ParquetFile); - } - } - - @Test - public void testParquetImportWithDefaultPrecisionAndScale() throws IOException { - ArgumentArrayBuilder builder = getArgsBuilder(ParquetFile); - addEnableParquetDecimal(builder); - addPadding(builder); - addPrecisionAndScale(builder); - runImport(builder.build()); - verify(ParquetFile); - } - - private void verify(SqoopOptions.FileLayout fileLayout) { - if (AvroDataFile.equals(fileLayout)) { - AvroTestUtils.registerDecimalConversionUsageForVerification(); - AvroTestUtils.verify(configuration.getExpectedResultsForAvro(), getConf(), getTablePath()); - } else if (ParquetFile.equals(fileLayout)) { - verifyParquetFile(); - } - } - - private void verifyParquetFile() { - verifyParquetSchema(); - verifyParquetContent(); - } - - private void verifyParquetContent() { - ParquetReader reader = new ParquetReader(tableDirPath); - assertEquals(Arrays.asList(configuration.getExpectedResultsForParquet()), reader.readAllInCsvSorted()); - } - - private void verifyParquetSchema() { - ParquetReader reader = new ParquetReader(tableDirPath); - MessageType parquetSchema = reader.readParquetSchema(); - - String[] types = configuration.getTypes(); - for (int i = 0; i < types.length; i ++) { - String type = types[i]; - if (isNumericSqlType(type)) { - OriginalType parquetFieldType = parquetSchema.getFields().get(i).getOriginalType(); - assertEquals(OriginalType.DECIMAL, parquetFieldType); - } - } - } - - private boolean isNumericSqlType(String type) { - return type.toUpperCase().startsWith("DECIMAL") - || type.toUpperCase().startsWith("NUMBER") - || type.toUpperCase().startsWith("NUMERIC"); - } -} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/SplitByImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/SplitByImportTest.java b/src/test/org/apache/sqoop/importjob/SplitByImportTest.java deleted file mode 100644 index 90b7cbb..0000000 --- a/src/test/org/apache/sqoop/importjob/SplitByImportTest.java +++ /dev/null @@ -1,160 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * <p> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.sqoop.importjob; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.sqoop.SqoopOptions; -import org.apache.sqoop.importjob.configuration.GenericImportJobSplitByTestConfiguration; -import org.apache.sqoop.testcategories.thirdpartytest.ThirdPartyTest; -import org.apache.sqoop.importjob.configuration.ImportJobTestConfiguration; -import org.apache.sqoop.importjob.configuration.ParquetTestConfiguration; -import org.apache.sqoop.testutil.ArgumentArrayBuilder; -import org.apache.sqoop.testutil.ImportJobTestCase; -import org.apache.sqoop.testutil.adapter.DatabaseAdapter; -import org.apache.sqoop.testutil.adapter.MSSQLServerDatabaseAdapter; -import org.apache.sqoop.testutil.adapter.MySqlDatabaseAdapter; -import org.apache.sqoop.testutil.adapter.OracleDatabaseAdapter; -import org.apache.sqoop.testutil.adapter.PostgresDatabaseAdapter; -import org.apache.sqoop.util.BlockJUnit4ClassRunnerWithParametersFactory; -import org.apache.sqoop.util.ParquetReader; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -import java.io.IOException; -import java.sql.SQLException; -import java.util.List; - -import static java.util.Arrays.asList; -import static org.junit.Assert.assertEquals; - -@RunWith(Parameterized.class) -@Category(ThirdPartyTest.class) [email protected](BlockJUnit4ClassRunnerWithParametersFactory.class) -public class SplitByImportTest extends ImportJobTestCase { - - public static final Log LOG = LogFactory.getLog(SplitByImportTest.class.getName()); - - private Configuration conf = new Configuration(); - - private final ParquetTestConfiguration configuration; - private final DatabaseAdapter adapter; - - @Parameters(name = "Adapter: {0}| Config: {1}") - public static Iterable<? extends Object> testConfigurations() { - GenericImportJobSplitByTestConfiguration testConfiguration = new GenericImportJobSplitByTestConfiguration(); - return asList( - new Object[] {new OracleDatabaseAdapter(), testConfiguration}, - new Object[] {new PostgresDatabaseAdapter(), testConfiguration}, - new Object[] {new MSSQLServerDatabaseAdapter(), testConfiguration}, - new Object[] {new MySqlDatabaseAdapter(), testConfiguration} - ); - } - - public SplitByImportTest(DatabaseAdapter adapter, ParquetTestConfiguration configuration) { - this.adapter = adapter; - this.configuration = configuration; - } - - @Rule - public ExpectedException thrown = ExpectedException.none(); - - @Override - protected Configuration getConf() { - return conf; - } - - @Override - protected boolean useHsqldbTestServer() { - return false; - } - - @Override - protected String getConnectString() { - return adapter.getConnectionString(); - } - - @Override - protected SqoopOptions getSqoopOptions(Configuration conf) { - SqoopOptions opts = new SqoopOptions(conf); - adapter.injectConnectionParameters(opts); - return opts; - } - - @Override - protected void dropTableIfExists(String table) throws SQLException { - adapter.dropTableIfExists(table, getManager()); - } - - @Before - public void setUp() { - super.setUp(); - String[] names = configuration.getNames(); - String[] types = configuration.getTypes(); - createTableWithColTypesAndNames(names, types, new String[0]); - List<String[]> inputData = configuration.getSampleData(); - for (String[] input : inputData) { - insertIntoTable(names, types, input); - } - } - - @After - public void tearDown() { - try { - dropTableIfExists(getTableName()); - } catch (SQLException e) { - LOG.warn("Error trying to drop table on tearDown: " + e); - } - super.tearDown(); - } - - private ArgumentArrayBuilder getArgsBuilder() { - return new ArgumentArrayBuilder() - .withCommonHadoopFlags(true) - .withProperty("org.apache.sqoop.splitter.allow_text_splitter","true") - .withOption("warehouse-dir", getWarehouseDir()) - .withOption("num-mappers", "2") - .withOption("table", getTableName()) - .withOption("connect", getConnectString()) - .withOption("split-by", GenericImportJobSplitByTestConfiguration.NAME_COLUMN) - .withOption("as-parquetfile"); - } - - @Test - public void testSplitBy() throws IOException { - ArgumentArrayBuilder builder = getArgsBuilder(); - String[] args = builder.build(); - runImport(args); - verifyParquetFile(); - } - - private void verifyParquetFile() { - ParquetReader reader = new ParquetReader(new Path(getWarehouseDir() + "/" + getTableName()), getConf()); - assertEquals(asList(configuration.getExpectedResultsForParquet()), reader.readAllInCsvSorted()); - } -} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/configuration/MSSQLServerImportJobTestConfiguration.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/configuration/MSSQLServerImportJobTestConfiguration.java b/src/test/org/apache/sqoop/importjob/configuration/MSSQLServerImportJobTestConfiguration.java deleted file mode 100644 index 4ad7def..0000000 --- a/src/test/org/apache/sqoop/importjob/configuration/MSSQLServerImportJobTestConfiguration.java +++ /dev/null @@ -1,68 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.sqoop.importjob.configuration; - -import java.util.ArrayList; -import java.util.List; - -public class MSSQLServerImportJobTestConfiguration implements ImportJobTestConfiguration, AvroTestConfiguration, ParquetTestConfiguration { - - @Override - public String[] getTypes() { - String[] columnTypes = {"INT", "NUMERIC", "NUMERIC(20)", "NUMERIC(20,5)", "NUMERIC(20,0)", "NUMERIC(38,5)", - "DECIMAL", "DECIMAL(20)", "DECIMAL(20,5)", "DECIMAL(20,0)", "DECIMAL(38,5)"}; - return columnTypes; - } - - @Override - public String[] getNames() { - String[] columnNames = {"ID", "N1", "N2", "N3", "N4", "N5", "D1", "D2", "D3", "D4", "D5"}; - return columnNames; - } - - @Override - public List<String[]> getSampleData() { - List<String[]> inputData = new ArrayList<>(); - inputData.add(new String[]{"1", "100.050", "1000000.05", "1000000.05", "1000000.05", "1000000.05", - "100.060", "1000000.05", "1000000.05", "1000000.05", "1000000.05"}); - return inputData; - } - - @Override - public String[] getExpectedResultsForAvro() { - String expectedRecord = "{\"ID\": 1, \"N1\": 100, \"N2\": 1000000, \"N3\": 1000000.05000, \"N4\": 1000000, \"N5\": 1000000.05000, " + - "\"D1\": 100, \"D2\": 1000000, \"D3\": 1000000.05000, \"D4\": 1000000, \"D5\": 1000000.05000}"; - String[] expectedResult = new String[1]; - expectedResult[0] = expectedRecord; - return expectedResult; - } - - @Override - public String[] getExpectedResultsForParquet() { - String expectedRecord = "1,100,1000000,1000000.05000,1000000,1000000.05000,100,1000000,1000000.05000,1000000,1000000.05000"; - String[] expectedResult = new String[1]; - expectedResult[0] = expectedRecord; - return expectedResult; - } - - @Override - public String toString() { - return getClass().getSimpleName(); - } -} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/configuration/MySQLImportJobTestConfiguration.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/configuration/MySQLImportJobTestConfiguration.java b/src/test/org/apache/sqoop/importjob/configuration/MySQLImportJobTestConfiguration.java deleted file mode 100644 index fbcbdeb..0000000 --- a/src/test/org/apache/sqoop/importjob/configuration/MySQLImportJobTestConfiguration.java +++ /dev/null @@ -1,68 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.sqoop.importjob.configuration; - -import java.util.ArrayList; -import java.util.List; - -public class MySQLImportJobTestConfiguration implements ImportJobTestConfiguration, AvroTestConfiguration, ParquetTestConfiguration { - - @Override - public String[] getTypes() { - String[] columnTypes = {"INT", "NUMERIC", "NUMERIC(20)", "NUMERIC(20,5)", "NUMERIC(20,0)", "NUMERIC(65,5)", - "DECIMAL", "DECIMAL(20)", "DECIMAL(20,5)", "DECIMAL(20,0)", "DECIMAL(65,5)"}; - return columnTypes; - } - - @Override - public String[] getNames() { - String[] columnNames = {"ID", "N1", "N2", "N3", "N4", "N5", "D1", "D2", "D3", "D4", "D5"}; - return columnNames; - } - - @Override - public List<String[]> getSampleData() { - List<String[]> inputData = new ArrayList<>(); - inputData.add(new String[]{"1", "100.030", "1000000.05", "1000000.05", "1000000.05", "1000000.05", - "100.040", "1000000.05", "1000000.05", "1000000.05", "1000000.05"}); - return inputData; - } - - @Override - public String[] getExpectedResultsForAvro() { - String expectedRecord = "{\"ID\": 1, \"N1\": 100, \"N2\": 1000000, \"N3\": 1000000.05000, \"N4\": 1000000, \"N5\": 1000000.05000, " + - "\"D1\": 100, \"D2\": 1000000, \"D3\": 1000000.05000, \"D4\": 1000000, \"D5\": 1000000.05000}"; - String[] expectedResult = new String[1]; - expectedResult[0] = expectedRecord; - return expectedResult; - } - - @Override - public String[] getExpectedResultsForParquet() { - String expectedRecord = "1,100,1000000,1000000.05000,1000000,1000000.05000,100,1000000,1000000.05000,1000000,1000000.05000"; - String[] expectedResult = new String[1]; - expectedResult[0] = expectedRecord; - return expectedResult; - } - - @Override - public String toString() { - return getClass().getSimpleName(); - } -} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/configuration/MysqlImportJobTestConfiguration.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/configuration/MysqlImportJobTestConfiguration.java b/src/test/org/apache/sqoop/importjob/configuration/MysqlImportJobTestConfiguration.java new file mode 100644 index 0000000..4936504 --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/configuration/MysqlImportJobTestConfiguration.java @@ -0,0 +1,68 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.configuration; + +import java.util.ArrayList; +import java.util.List; + +public class MysqlImportJobTestConfiguration implements ImportJobTestConfiguration, AvroTestConfiguration, ParquetTestConfiguration { + + @Override + public String[] getTypes() { + String[] columnTypes = {"INT", "NUMERIC", "NUMERIC(20)", "NUMERIC(20,5)", "NUMERIC(20,0)", "NUMERIC(65,5)", + "DECIMAL", "DECIMAL(20)", "DECIMAL(20,5)", "DECIMAL(20,0)", "DECIMAL(65,5)"}; + return columnTypes; + } + + @Override + public String[] getNames() { + String[] columnNames = {"ID", "N1", "N2", "N3", "N4", "N5", "D1", "D2", "D3", "D4", "D5"}; + return columnNames; + } + + @Override + public List<String[]> getSampleData() { + List<String[]> inputData = new ArrayList<>(); + inputData.add(new String[]{"1", "100.030", "1000000.05", "1000000.05", "1000000.05", "1000000.05", + "100.040", "1000000.05", "1000000.05", "1000000.05", "1000000.05"}); + return inputData; + } + + @Override + public String[] getExpectedResultsForAvro() { + String expectedRecord = "{\"ID\": 1, \"N1\": 100, \"N2\": 1000000, \"N3\": 1000000.05000, \"N4\": 1000000, \"N5\": 1000000.05000, " + + "\"D1\": 100, \"D2\": 1000000, \"D3\": 1000000.05000, \"D4\": 1000000, \"D5\": 1000000.05000}"; + String[] expectedResult = new String[1]; + expectedResult[0] = expectedRecord; + return expectedResult; + } + + @Override + public String[] getExpectedResultsForParquet() { + String expectedRecord = "1,100,1000000,1000000.05000,1000000,1000000.05000,100,1000000,1000000.05000,1000000,1000000.05000"; + String[] expectedResult = new String[1]; + expectedResult[0] = expectedRecord; + return expectedResult; + } + + @Override + public String toString() { + return getClass().getSimpleName(); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/configuration/SqlServerImportJobTestConfiguration.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/configuration/SqlServerImportJobTestConfiguration.java b/src/test/org/apache/sqoop/importjob/configuration/SqlServerImportJobTestConfiguration.java new file mode 100644 index 0000000..aacbabf --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/configuration/SqlServerImportJobTestConfiguration.java @@ -0,0 +1,68 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.configuration; + +import java.util.ArrayList; +import java.util.List; + +public class SqlServerImportJobTestConfiguration implements ImportJobTestConfiguration, AvroTestConfiguration, ParquetTestConfiguration { + + @Override + public String[] getTypes() { + String[] columnTypes = {"INT", "NUMERIC", "NUMERIC(20)", "NUMERIC(20,5)", "NUMERIC(20,0)", "NUMERIC(38,5)", + "DECIMAL", "DECIMAL(20)", "DECIMAL(20,5)", "DECIMAL(20,0)", "DECIMAL(38,5)"}; + return columnTypes; + } + + @Override + public String[] getNames() { + String[] columnNames = {"ID", "N1", "N2", "N3", "N4", "N5", "D1", "D2", "D3", "D4", "D5"}; + return columnNames; + } + + @Override + public List<String[]> getSampleData() { + List<String[]> inputData = new ArrayList<>(); + inputData.add(new String[]{"1", "100.050", "1000000.05", "1000000.05", "1000000.05", "1000000.05", + "100.060", "1000000.05", "1000000.05", "1000000.05", "1000000.05"}); + return inputData; + } + + @Override + public String[] getExpectedResultsForAvro() { + String expectedRecord = "{\"ID\": 1, \"N1\": 100, \"N2\": 1000000, \"N3\": 1000000.05000, \"N4\": 1000000, \"N5\": 1000000.05000, " + + "\"D1\": 100, \"D2\": 1000000, \"D3\": 1000000.05000, \"D4\": 1000000, \"D5\": 1000000.05000}"; + String[] expectedResult = new String[1]; + expectedResult[0] = expectedRecord; + return expectedResult; + } + + @Override + public String[] getExpectedResultsForParquet() { + String expectedRecord = "1,100,1000000,1000000.05000,1000000,1000000.05000,100,1000000,1000000.05000,1000000,1000000.05000"; + String[] expectedResult = new String[1]; + expectedResult[0] = expectedRecord; + return expectedResult; + } + + @Override + public String toString() { + return getClass().getSimpleName(); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/numerictypes/MysqlNumericTypesImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/numerictypes/MysqlNumericTypesImportTest.java b/src/test/org/apache/sqoop/importjob/numerictypes/MysqlNumericTypesImportTest.java new file mode 100644 index 0000000..e39be38 --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/numerictypes/MysqlNumericTypesImportTest.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.numerictypes; + +import org.apache.sqoop.importjob.configuration.MysqlImportJobTestConfiguration; +import org.apache.sqoop.testcategories.thirdpartytest.MysqlTest; +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; +import org.apache.sqoop.testutil.adapter.MysqlDatabaseAdapter; +import org.junit.experimental.categories.Category; + +@Category(MysqlTest.class) +public class MysqlNumericTypesImportTest extends NumericTypesImportTestBase { + + @Override + public DatabaseAdapter createAdapter() { + return new MysqlDatabaseAdapter(); + } + + public MysqlNumericTypesImportTest() { + super(new MysqlImportJobTestConfiguration(), SUCCEED_WITHOUT_EXTRA_ARGS, SUCCEED_WITH_PADDING_ONLY); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/numerictypes/NumericTypesImportTestBase.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/numerictypes/NumericTypesImportTestBase.java b/src/test/org/apache/sqoop/importjob/numerictypes/NumericTypesImportTestBase.java new file mode 100644 index 0000000..320adb3 --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/numerictypes/NumericTypesImportTestBase.java @@ -0,0 +1,304 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.numerictypes; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.parquet.schema.MessageType; +import org.apache.parquet.schema.OriginalType; +import org.apache.sqoop.SqoopOptions; +import org.apache.sqoop.importjob.DatabaseAdapterFactory; +import org.apache.sqoop.importjob.configuration.AvroTestConfiguration; +import org.apache.sqoop.importjob.configuration.ParquetTestConfiguration; +import org.apache.sqoop.testcategories.thirdpartytest.ThirdPartyTest; +import org.apache.sqoop.testutil.ArgumentArrayBuilder; +import org.apache.sqoop.testutil.AvroTestUtils; +import org.apache.sqoop.testutil.ImportJobTestCase; +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; +import org.apache.sqoop.util.ParquetReader; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.ExpectedException; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.List; + +import static org.apache.sqoop.SqoopOptions.FileLayout.AvroDataFile; +import static org.apache.sqoop.SqoopOptions.FileLayout.ParquetFile; +import static org.junit.Assert.assertEquals; + +@Category(ThirdPartyTest.class) +/** + * This test covers the behavior of the Avro import for fixed point decimal types, i.e. NUMBER, NUMERIC + * and DECIMAL. + * + * Oracle and Postgres store numbers without padding, while other DBs store them padded with 0s. + * + * The features tested here affect two phases in Sqoop: + * 1. Avro schema generation during avro and parquet import + * Default precision and scale are used here to avoid issues with Oracle and Postgres, as these + * don't return valid precision and scale if they weren't specified in the table DDL. + * + * 2. Decimal padding during avro or parquet import + * In case of Oracle and Postgres, Sqoop has to pad the values with 0s to avoid errors. + */ +public abstract class NumericTypesImportTestBase<T extends AvroTestConfiguration & ParquetTestConfiguration> extends ImportJobTestCase implements DatabaseAdapterFactory { + + public static final Log LOG = LogFactory.getLog(NumericTypesImportTestBase.class.getName()); + + private Configuration conf = new Configuration(); + + private final T configuration; + private final DatabaseAdapter adapter; + private final boolean failWithoutExtraArgs; + private final boolean failWithPadding; + + // Constants for the basic test case, that doesn't use extra arguments + // that are required to avoid errors, i.e. padding and default precision and scale. + protected final static boolean SUCCEED_WITHOUT_EXTRA_ARGS = false; + protected final static boolean FAIL_WITHOUT_EXTRA_ARGS = true; + + // Constants for the test case that has padding specified but not default precision and scale. + protected final static boolean SUCCEED_WITH_PADDING_ONLY = false; + protected final static boolean FAIL_WITH_PADDING_ONLY = true; + + private Path tableDirPath; + + public NumericTypesImportTestBase(T configuration, boolean failWithoutExtraArgs, boolean failWithPaddingOnly) { + this.adapter = createAdapter(); + this.configuration = configuration; + this.failWithoutExtraArgs = failWithoutExtraArgs; + this.failWithPadding = failWithPaddingOnly; + } + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Override + protected Configuration getConf() { + return conf; + } + + @Override + protected boolean useHsqldbTestServer() { + return false; + } + + @Override + protected String getConnectString() { + return adapter.getConnectionString(); + } + + @Override + protected SqoopOptions getSqoopOptions(Configuration conf) { + SqoopOptions opts = new SqoopOptions(conf); + adapter.injectConnectionParameters(opts); + return opts; + } + + @Override + protected void dropTableIfExists(String table) throws SQLException { + adapter.dropTableIfExists(table, getManager()); + } + + @Before + public void setUp() { + super.setUp(); + String[] names = configuration.getNames(); + String[] types = configuration.getTypes(); + createTableWithColTypesAndNames(names, types, new String[0]); + List<String[]> inputData = configuration.getSampleData(); + for (String[] input : inputData) { + insertIntoTable(names, types, input); + } + tableDirPath = new Path(getWarehouseDir() + "/" + getTableName()); + } + + @After + public void tearDown() { + try { + dropTableIfExists(getTableName()); + } catch (SQLException e) { + LOG.warn("Error trying to drop table on tearDown: " + e); + } + super.tearDown(); + } + + private ArgumentArrayBuilder getArgsBuilder(SqoopOptions.FileLayout fileLayout) { + ArgumentArrayBuilder builder = new ArgumentArrayBuilder(); + if (AvroDataFile.equals(fileLayout)) { + builder.withOption("as-avrodatafile"); + } + else if (ParquetFile.equals(fileLayout)) { + builder.withOption("as-parquetfile"); + } + + return builder.withCommonHadoopFlags(true) + .withOption("warehouse-dir", getWarehouseDir()) + .withOption("num-mappers", "1") + .withOption("table", getTableName()) + .withOption("connect", getConnectString()); + } + + /** + * Adds properties to the given arg builder for decimal precision and scale. + * @param builder + */ + private void addPrecisionAndScale(ArgumentArrayBuilder builder) { + builder.withProperty("sqoop.avro.logical_types.decimal.default.precision", "38"); + builder.withProperty("sqoop.avro.logical_types.decimal.default.scale", "3"); + } + + /** + * Enables padding for decimals in avro and parquet import. + * @param builder + */ + private void addPadding(ArgumentArrayBuilder builder) { + builder.withProperty("sqoop.avro.decimal_padding.enable", "true"); + } + + private void addEnableAvroDecimal(ArgumentArrayBuilder builder) { + builder.withProperty("sqoop.avro.logical_types.decimal.enable", "true"); + } + + private void addEnableParquetDecimal(ArgumentArrayBuilder builder) { + builder.withProperty("sqoop.parquet.logical_types.decimal.enable", "true"); + } + + private void configureJunitToExpectFailure(boolean failWithPadding) { + if (failWithPadding) { + thrown.expect(IOException.class); + thrown.expectMessage("Failure during job; return status 1"); + } + } + + @Test + public void testAvroImportWithoutPadding() throws IOException { + configureJunitToExpectFailure(failWithoutExtraArgs); + ArgumentArrayBuilder builder = getArgsBuilder(AvroDataFile); + addEnableAvroDecimal(builder); + String[] args = builder.build(); + runImport(args); + if (!failWithoutExtraArgs) { + verify(AvroDataFile); + } + } + + @Test + public void testAvroImportWithPadding() throws IOException { + configureJunitToExpectFailure(failWithPadding); + ArgumentArrayBuilder builder = getArgsBuilder(AvroDataFile); + addEnableAvroDecimal(builder); + addPadding(builder); + runImport(builder.build()); + if (!failWithPadding) { + verify(AvroDataFile); + } + } + + @Test + public void testAvroImportWithDefaultPrecisionAndScale() throws IOException { + ArgumentArrayBuilder builder = getArgsBuilder(AvroDataFile); + addEnableAvroDecimal(builder); + addPadding(builder); + addPrecisionAndScale(builder); + runImport(builder.build()); + verify(AvroDataFile); + } + + @Test + public void testParquetImportWithoutPadding() throws IOException { + configureJunitToExpectFailure(failWithoutExtraArgs); + ArgumentArrayBuilder builder = getArgsBuilder(ParquetFile); + addEnableParquetDecimal(builder); + String[] args = builder.build(); + runImport(args); + if (!failWithoutExtraArgs) { + verify(ParquetFile); + } + } + + @Test + public void testParquetImportWithPadding() throws IOException { + configureJunitToExpectFailure(failWithPadding); + ArgumentArrayBuilder builder = getArgsBuilder(ParquetFile); + addEnableParquetDecimal(builder); + addPadding(builder); + runImport(builder.build()); + if (!failWithPadding) { + verify(ParquetFile); + } + } + + @Test + public void testParquetImportWithDefaultPrecisionAndScale() throws IOException { + ArgumentArrayBuilder builder = getArgsBuilder(ParquetFile); + addEnableParquetDecimal(builder); + addPadding(builder); + addPrecisionAndScale(builder); + runImport(builder.build()); + verify(ParquetFile); + } + + private void verify(SqoopOptions.FileLayout fileLayout) { + if (AvroDataFile.equals(fileLayout)) { + AvroTestUtils.registerDecimalConversionUsageForVerification(); + AvroTestUtils.verify(configuration.getExpectedResultsForAvro(), getConf(), getTablePath()); + } else if (ParquetFile.equals(fileLayout)) { + verifyParquetFile(); + } + } + + private void verifyParquetFile() { + verifyParquetSchema(); + verifyParquetContent(); + } + + private void verifyParquetContent() { + ParquetReader reader = new ParquetReader(tableDirPath); + assertEquals(Arrays.asList(configuration.getExpectedResultsForParquet()), reader.readAllInCsvSorted()); + } + + private void verifyParquetSchema() { + ParquetReader reader = new ParquetReader(tableDirPath); + MessageType parquetSchema = reader.readParquetSchema(); + + String[] types = configuration.getTypes(); + for (int i = 0; i < types.length; i ++) { + String type = types[i]; + if (isNumericSqlType(type)) { + OriginalType parquetFieldType = parquetSchema.getFields().get(i).getOriginalType(); + assertEquals(OriginalType.DECIMAL, parquetFieldType); + } + } + } + + private boolean isNumericSqlType(String type) { + return type.toUpperCase().startsWith("DECIMAL") + || type.toUpperCase().startsWith("NUMBER") + || type.toUpperCase().startsWith("NUMERIC"); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/numerictypes/OracleNumericTypesImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/numerictypes/OracleNumericTypesImportTest.java b/src/test/org/apache/sqoop/importjob/numerictypes/OracleNumericTypesImportTest.java new file mode 100644 index 0000000..292884b --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/numerictypes/OracleNumericTypesImportTest.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.numerictypes; + +import org.apache.sqoop.importjob.configuration.AvroTestConfiguration; +import org.apache.sqoop.importjob.configuration.OracleImportJobTestConfiguration; +import org.apache.sqoop.importjob.configuration.OracleImportJobTestConfigurationForNumber; +import org.apache.sqoop.importjob.configuration.ParquetTestConfiguration; +import org.apache.sqoop.testcategories.thirdpartytest.OracleTest; +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; +import org.apache.sqoop.testutil.adapter.OracleDatabaseAdapter; +import org.apache.sqoop.util.BlockJUnit4ClassRunnerWithParametersFactory; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; + +@Category(OracleTest.class) +@RunWith(Parameterized.class) [email protected](BlockJUnit4ClassRunnerWithParametersFactory.class) +public class OracleNumericTypesImportTest<T extends AvroTestConfiguration & ParquetTestConfiguration> extends NumericTypesImportTestBase<T> { + + @Override + public DatabaseAdapter createAdapter() { + return new OracleDatabaseAdapter(); + } + + @Parameterized.Parameters(name = "Config: {0}| failWithoutExtraArgs: {1}| failWithPadding: {2}") + public static Iterable<? extends Object> testConfigurations() { + return Arrays.asList( + new Object[]{new OracleImportJobTestConfigurationForNumber(), FAIL_WITHOUT_EXTRA_ARGS, FAIL_WITH_PADDING_ONLY}, + new Object[]{new OracleImportJobTestConfiguration(), FAIL_WITHOUT_EXTRA_ARGS, SUCCEED_WITH_PADDING_ONLY} + ); + } + + public OracleNumericTypesImportTest(T configuration, boolean failWithoutExtraArgs, boolean failWithPaddingOnly) { + super(configuration, failWithoutExtraArgs, failWithPaddingOnly); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/numerictypes/PostgresNumericTypesImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/numerictypes/PostgresNumericTypesImportTest.java b/src/test/org/apache/sqoop/importjob/numerictypes/PostgresNumericTypesImportTest.java new file mode 100644 index 0000000..003b27d --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/numerictypes/PostgresNumericTypesImportTest.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.numerictypes; + +import org.apache.sqoop.importjob.configuration.AvroTestConfiguration; +import org.apache.sqoop.importjob.configuration.ParquetTestConfiguration; +import org.apache.sqoop.importjob.configuration.PostgresqlImportJobTestConfigurationForNumeric; +import org.apache.sqoop.importjob.configuration.PostgresqlImportJobTestConfigurationPaddingShouldSucceed; +import org.apache.sqoop.testcategories.thirdpartytest.PostgresqlTest; +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; +import org.apache.sqoop.testutil.adapter.PostgresDatabaseAdapter; +import org.apache.sqoop.util.BlockJUnit4ClassRunnerWithParametersFactory; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; + +@Category(PostgresqlTest.class) +@RunWith(Parameterized.class) [email protected](BlockJUnit4ClassRunnerWithParametersFactory.class) +public class PostgresNumericTypesImportTest<T extends AvroTestConfiguration & ParquetTestConfiguration> extends NumericTypesImportTestBase<T> { + + @Override + public DatabaseAdapter createAdapter() { + return new PostgresDatabaseAdapter(); + } + + @Parameterized.Parameters(name = "Config: {0}| failWithoutExtraArgs: {1}| failWithPadding: {2}") + public static Iterable<? extends Object> testConfigurations() { + return Arrays.asList( + new Object[]{new PostgresqlImportJobTestConfigurationForNumeric(), FAIL_WITHOUT_EXTRA_ARGS, FAIL_WITH_PADDING_ONLY}, + new Object[]{new PostgresqlImportJobTestConfigurationPaddingShouldSucceed(), SUCCEED_WITHOUT_EXTRA_ARGS, SUCCEED_WITH_PADDING_ONLY} + ); + } + + public PostgresNumericTypesImportTest(T configuration, boolean failWithoutExtraArgs, boolean failWithPaddingOnly) { + super(configuration, failWithoutExtraArgs, failWithPaddingOnly); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/numerictypes/SqlServerNumericTypesImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/numerictypes/SqlServerNumericTypesImportTest.java b/src/test/org/apache/sqoop/importjob/numerictypes/SqlServerNumericTypesImportTest.java new file mode 100644 index 0000000..17b94c5 --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/numerictypes/SqlServerNumericTypesImportTest.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.numerictypes; + +import org.apache.sqoop.importjob.configuration.SqlServerImportJobTestConfiguration; +import org.apache.sqoop.testcategories.thirdpartytest.SqlServerTest; +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; +import org.apache.sqoop.testutil.adapter.SqlServerDatabaseAdapter; +import org.junit.experimental.categories.Category; + +@Category(SqlServerTest.class) +public class SqlServerNumericTypesImportTest extends NumericTypesImportTestBase { + + @Override + public DatabaseAdapter createAdapter() { + return new SqlServerDatabaseAdapter(); + } + + public SqlServerNumericTypesImportTest() { + super(new SqlServerImportJobTestConfiguration(), SUCCEED_WITHOUT_EXTRA_ARGS, SUCCEED_WITH_PADDING_ONLY); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/splitby/MysqlSplitByImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/splitby/MysqlSplitByImportTest.java b/src/test/org/apache/sqoop/importjob/splitby/MysqlSplitByImportTest.java new file mode 100644 index 0000000..daba08b --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/splitby/MysqlSplitByImportTest.java @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.splitby; + +import org.apache.sqoop.testcategories.thirdpartytest.MysqlTest; +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; +import org.apache.sqoop.testutil.adapter.MysqlDatabaseAdapter; +import org.junit.experimental.categories.Category; + +@Category(MysqlTest.class) +public class MysqlSplitByImportTest extends SplitByImportTestBase { + + @Override + public DatabaseAdapter createAdapter() { + return new MysqlDatabaseAdapter(); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/splitby/OracleSplitByImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/splitby/OracleSplitByImportTest.java b/src/test/org/apache/sqoop/importjob/splitby/OracleSplitByImportTest.java new file mode 100644 index 0000000..dff2800 --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/splitby/OracleSplitByImportTest.java @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.splitby; + +import org.apache.sqoop.testcategories.thirdpartytest.OracleTest; +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; +import org.apache.sqoop.testutil.adapter.OracleDatabaseAdapter; +import org.junit.experimental.categories.Category; + +@Category(OracleTest.class) +public class OracleSplitByImportTest extends SplitByImportTestBase { + + @Override + public DatabaseAdapter createAdapter() { + return new OracleDatabaseAdapter(); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/splitby/PostgresSplitByImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/splitby/PostgresSplitByImportTest.java b/src/test/org/apache/sqoop/importjob/splitby/PostgresSplitByImportTest.java new file mode 100644 index 0000000..369770c --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/splitby/PostgresSplitByImportTest.java @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.splitby; + +import org.apache.sqoop.testcategories.thirdpartytest.PostgresqlTest; +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; +import org.apache.sqoop.testutil.adapter.PostgresDatabaseAdapter; +import org.junit.experimental.categories.Category; + +@Category(PostgresqlTest.class) +public class PostgresSplitByImportTest extends SplitByImportTestBase { + + @Override + public DatabaseAdapter createAdapter() { + return new PostgresDatabaseAdapter(); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/splitby/SplitByImportTestBase.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/splitby/SplitByImportTestBase.java b/src/test/org/apache/sqoop/importjob/splitby/SplitByImportTestBase.java new file mode 100644 index 0000000..2fc6b95 --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/splitby/SplitByImportTestBase.java @@ -0,0 +1,136 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.splitby; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.sqoop.SqoopOptions; +import org.apache.sqoop.importjob.DatabaseAdapterFactory; +import org.apache.sqoop.importjob.configuration.GenericImportJobSplitByTestConfiguration; +import org.apache.sqoop.importjob.configuration.ParquetTestConfiguration; +import org.apache.sqoop.testutil.ArgumentArrayBuilder; +import org.apache.sqoop.testutil.ImportJobTestCase; +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; +import org.apache.sqoop.util.ParquetReader; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.List; + +import static java.util.Arrays.asList; +import static org.junit.Assert.assertEquals; + +public abstract class SplitByImportTestBase extends ImportJobTestCase implements DatabaseAdapterFactory { + + public static final Log LOG = LogFactory.getLog(SplitByImportTestBase.class.getName()); + + private Configuration conf = new Configuration(); + + private final ParquetTestConfiguration configuration; + private final DatabaseAdapter adapter; + + public SplitByImportTestBase() { + this.adapter = createAdapter(); + this.configuration = new GenericImportJobSplitByTestConfiguration(); + } + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Override + protected Configuration getConf() { + return conf; + } + + @Override + protected boolean useHsqldbTestServer() { + return false; + } + + @Override + protected String getConnectString() { + return adapter.getConnectionString(); + } + + @Override + protected SqoopOptions getSqoopOptions(Configuration conf) { + SqoopOptions opts = new SqoopOptions(conf); + adapter.injectConnectionParameters(opts); + return opts; + } + + @Override + protected void dropTableIfExists(String table) throws SQLException { + adapter.dropTableIfExists(table, getManager()); + } + + @Before + public void setUp() { + super.setUp(); + String[] names = configuration.getNames(); + String[] types = configuration.getTypes(); + createTableWithColTypesAndNames(names, types, new String[0]); + List<String[]> inputData = configuration.getSampleData(); + for (String[] input : inputData) { + insertIntoTable(names, types, input); + } + } + + @After + public void tearDown() { + try { + dropTableIfExists(getTableName()); + } catch (SQLException e) { + LOG.warn("Error trying to drop table on tearDown: " + e); + } + super.tearDown(); + } + + private ArgumentArrayBuilder getArgsBuilder() { + return new ArgumentArrayBuilder() + .withCommonHadoopFlags(true) + .withProperty("org.apache.sqoop.splitter.allow_text_splitter","true") + .withOption("warehouse-dir", getWarehouseDir()) + .withOption("num-mappers", "2") + .withOption("table", getTableName()) + .withOption("connect", getConnectString()) + .withOption("split-by", GenericImportJobSplitByTestConfiguration.NAME_COLUMN) + .withOption("as-parquetfile"); + } + + @Test + public void testSplitBy() throws IOException { + ArgumentArrayBuilder builder = getArgsBuilder(); + String[] args = builder.build(); + runImport(args); + verifyParquetFile(); + } + + private void verifyParquetFile() { + ParquetReader reader = new ParquetReader(new Path(getWarehouseDir() + "/" + getTableName()), getConf()); + assertEquals(asList(configuration.getExpectedResultsForParquet()), reader.readAllInCsvSorted()); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/importjob/splitby/SqlServerSplitByImportTest.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/importjob/splitby/SqlServerSplitByImportTest.java b/src/test/org/apache/sqoop/importjob/splitby/SqlServerSplitByImportTest.java new file mode 100644 index 0000000..c1e9c88 --- /dev/null +++ b/src/test/org/apache/sqoop/importjob/splitby/SqlServerSplitByImportTest.java @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.importjob.splitby; + +import org.apache.sqoop.testcategories.thirdpartytest.SqlServerTest; +import org.apache.sqoop.testutil.adapter.DatabaseAdapter; +import org.apache.sqoop.testutil.adapter.SqlServerDatabaseAdapter; +import org.junit.experimental.categories.Category; + +@Category(SqlServerTest.class) +public class SqlServerSplitByImportTest extends SplitByImportTestBase { + + @Override + public DatabaseAdapter createAdapter() { + return new SqlServerDatabaseAdapter(); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/testutil/adapter/MSSQLServerDatabaseAdapter.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/testutil/adapter/MSSQLServerDatabaseAdapter.java b/src/test/org/apache/sqoop/testutil/adapter/MSSQLServerDatabaseAdapter.java deleted file mode 100644 index 2256716..0000000 --- a/src/test/org/apache/sqoop/testutil/adapter/MSSQLServerDatabaseAdapter.java +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.sqoop.testutil.adapter; - -import org.apache.sqoop.SqoopOptions; -import org.apache.sqoop.manager.ConnManager; -import org.apache.sqoop.manager.sqlserver.MSSQLTestUtils; - -import java.sql.SQLException; - -public class MSSQLServerDatabaseAdapter implements DatabaseAdapter { - - @Override - public String getConnectionString() { - return MSSQLTestUtils.CONNECT_STRING; - } - - @Override - public SqoopOptions injectConnectionParameters(SqoopOptions options) { - options.setConnectString(MSSQLTestUtils.CONNECT_STRING); - options.setUsername(MSSQLTestUtils.DATABASE_USER); - options.setPassword(MSSQLTestUtils.DATABASE_PASSWORD); - return options; - } - - @Override - public void dropTableIfExists(String tableName, ConnManager manager) throws SQLException { - String dropTableStatement = "DROP TABLE IF EXISTS " + manager.escapeTableName(tableName); - manager.execAndPrint(dropTableStatement); - } - - @Override - public String toString() { - return getClass().getSimpleName(); - } -} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/testutil/adapter/MySqlDatabaseAdapter.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/testutil/adapter/MySqlDatabaseAdapter.java b/src/test/org/apache/sqoop/testutil/adapter/MySqlDatabaseAdapter.java deleted file mode 100644 index ebd0146..0000000 --- a/src/test/org/apache/sqoop/testutil/adapter/MySqlDatabaseAdapter.java +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.sqoop.testutil.adapter; - -import org.apache.sqoop.SqoopOptions; -import org.apache.sqoop.manager.ConnManager; -import org.apache.sqoop.manager.mysql.MySQLTestUtils; - -import java.sql.SQLException; - -public class MySqlDatabaseAdapter implements DatabaseAdapter { - private MySQLTestUtils mySQLTestUtils = new MySQLTestUtils(); - - public SqoopOptions injectConnectionParameters(SqoopOptions options) { - options.setUsername(mySQLTestUtils.getUserName()); - mySQLTestUtils.addPasswordIfIsSet(options); - return options; - } - - public void dropTableIfExists(String tableName, ConnManager manager) throws SQLException { - mySQLTestUtils.dropTableIfExists(tableName, manager); - } - - public String getConnectionString() { - return mySQLTestUtils.getMySqlConnectString(); - } - - @Override - public String toString() { - return getClass().getSimpleName(); - } -} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/testutil/adapter/MysqlDatabaseAdapter.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/testutil/adapter/MysqlDatabaseAdapter.java b/src/test/org/apache/sqoop/testutil/adapter/MysqlDatabaseAdapter.java new file mode 100644 index 0000000..ff2466c --- /dev/null +++ b/src/test/org/apache/sqoop/testutil/adapter/MysqlDatabaseAdapter.java @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.testutil.adapter; + +import org.apache.sqoop.SqoopOptions; +import org.apache.sqoop.manager.ConnManager; +import org.apache.sqoop.manager.mysql.MySQLTestUtils; + +import java.sql.SQLException; + +public class MysqlDatabaseAdapter implements DatabaseAdapter { + private MySQLTestUtils mySQLTestUtils = new MySQLTestUtils(); + + public SqoopOptions injectConnectionParameters(SqoopOptions options) { + options.setUsername(mySQLTestUtils.getUserName()); + mySQLTestUtils.addPasswordIfIsSet(options); + return options; + } + + public void dropTableIfExists(String tableName, ConnManager manager) throws SQLException { + mySQLTestUtils.dropTableIfExists(tableName, manager); + } + + public String getConnectionString() { + return mySQLTestUtils.getMySqlConnectString(); + } + + @Override + public String toString() { + return getClass().getSimpleName(); + } +} http://git-wip-us.apache.org/repos/asf/sqoop/blob/0a740761/src/test/org/apache/sqoop/testutil/adapter/SqlServerDatabaseAdapter.java ---------------------------------------------------------------------- diff --git a/src/test/org/apache/sqoop/testutil/adapter/SqlServerDatabaseAdapter.java b/src/test/org/apache/sqoop/testutil/adapter/SqlServerDatabaseAdapter.java new file mode 100644 index 0000000..c675adb --- /dev/null +++ b/src/test/org/apache/sqoop/testutil/adapter/SqlServerDatabaseAdapter.java @@ -0,0 +1,52 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sqoop.testutil.adapter; + +import org.apache.sqoop.SqoopOptions; +import org.apache.sqoop.manager.ConnManager; +import org.apache.sqoop.manager.sqlserver.MSSQLTestUtils; + +import java.sql.SQLException; + +public class SqlServerDatabaseAdapter implements DatabaseAdapter { + + @Override + public String getConnectionString() { + return MSSQLTestUtils.CONNECT_STRING; + } + + @Override + public SqoopOptions injectConnectionParameters(SqoopOptions options) { + options.setConnectString(MSSQLTestUtils.CONNECT_STRING); + options.setUsername(MSSQLTestUtils.DATABASE_USER); + options.setPassword(MSSQLTestUtils.DATABASE_PASSWORD); + return options; + } + + @Override + public void dropTableIfExists(String tableName, ConnManager manager) throws SQLException { + String dropTableStatement = "DROP TABLE IF EXISTS " + manager.escapeTableName(tableName); + manager.execAndPrint(dropTableStatement); + } + + @Override + public String toString() { + return getClass().getSimpleName(); + } +}
