http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestWhere.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestWhere.java 
b/src/test/com/cloudera/sqoop/TestWhere.java
deleted file mode 100644
index 340be9c..0000000
--- a/src/test/com/cloudera/sqoop/TestWhere.java
+++ /dev/null
@@ -1,183 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.util.ReflectionUtils;
-
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-/**
- * Test that --where works in Sqoop.
- * Methods essentially copied out of the other Test* classes.
- * TODO(kevin or aaron): Factor out these common test methods
- * so that every new Test* class doesn't need to copy the code.
- */
-public class TestWhere extends ImportJobTestCase {
-
-  /**
-   * Create the argv to pass to Sqoop.
-   * @return the argv as an array of strings.
-   */
-  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
-      String whereClause) {
-    String columnsString = "";
-    for (String col : colNames) {
-      columnsString += col + ",";
-    }
-
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-
-    args.add("--table");
-    args.add(HsqldbTestServer.getTableName());
-    args.add("--columns");
-    args.add(columnsString);
-    args.add("--where");
-    args.add(whereClause);
-    args.add("--split-by");
-    args.add("INTFIELD1");
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
-    args.add("--as-sequencefile");
-    args.add("--num-mappers");
-    args.add("1");
-
-    return args.toArray(new String[0]);
-  }
-
-  // this test just uses the two int table.
-  protected String getTableName() {
-    return HsqldbTestServer.getTableName();
-  }
-
-
-  /**
-   * Given a comma-delimited list of integers, grab and parse the first int.
-   * @param str a comma-delimited list of values, the first of which is an int.
-   * @return the first field in the string, cast to int
-   */
-  private int getFirstInt(String str) {
-    String [] parts = str.split(",");
-    return Integer.parseInt(parts[0]);
-  }
-
-  public void runWhereTest(String whereClause, String firstValStr,
-      int numExpectedResults, int expectedSum) throws IOException {
-
-    String [] columns = HsqldbTestServer.getFieldNames();
-    ClassLoader prevClassLoader = null;
-    SequenceFile.Reader reader = null;
-
-    String [] argv = getArgv(true, columns, whereClause);
-    runImport(argv);
-    try {
-      SqoopOptions opts = new ImportTool().parseArguments(
-          getArgv(false, columns, whereClause),
-          null, null, true);
-
-      CompilationManager compileMgr = new CompilationManager(opts);
-      String jarFileName = compileMgr.getJarFilename();
-
-      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
-          getTableName());
-
-      reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
-
-      // here we can actually instantiate (k, v) pairs.
-      Configuration conf = new Configuration();
-      Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
-      Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
-
-      if (reader.next(key) == null) {
-        fail("Empty SequenceFile during import");
-      }
-
-      // make sure that the value we think should be at the top, is.
-      reader.getCurrentValue(val);
-      assertEquals("Invalid ordering within sorted SeqFile", firstValStr,
-          val.toString());
-
-      // We know that these values are two ints separated by a ',' character.
-      // Since this is all dynamic, though, we don't want to actually link
-      // against the class and use its methods. So we just parse this back
-      // into int fields manually.  Sum them up and ensure that we get the
-      // expected total for the first column, to verify that we got all the
-      // results from the db into the file.
-      int curSum = getFirstInt(val.toString());
-      int totalResults = 1;
-
-      // now sum up everything else in the file.
-      while (reader.next(key) != null) {
-        reader.getCurrentValue(val);
-        curSum += getFirstInt(val.toString());
-        totalResults++;
-      }
-
-      assertEquals("Total sum of first db column mismatch", expectedSum,
-          curSum);
-      assertEquals("Incorrect number of results for query", numExpectedResults,
-          totalResults);
-    } catch (InvalidOptionsException ioe) {
-      fail(ioe.toString());
-    } catch (ParseException pe) {
-      fail(pe.toString());
-    } finally {
-      IOUtils.closeStream(reader);
-
-      if (null != prevClassLoader) {
-        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
-      }
-    }
-  }
-
-  @Test
-  public void testSingleClauseWhere() throws IOException {
-    String whereClause = "INTFIELD2 > 4";
-    runWhereTest(whereClause, "1,8\n", 2, 4);
-  }
-
-  @Test
-  public void testMultiClauseWhere() throws IOException {
-    String whereClause = "INTFIELD1 > 4 AND INTFIELD2 < 3";
-    runWhereTest(whereClause, "7,2\n", 1, 7);
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java 
b/src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java
deleted file mode 100644
index fd00498..0000000
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-import org.apache.commons.lang.StringUtils;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import static java.util.Arrays.asList;
-import static org.apache.commons.lang.StringUtils.join;
-
-@RunWith(Parameterized.class)
-public class HBaseImportAddRowKeyTest extends HBaseTestCase {
-
-  @Parameterized.Parameters(name = "bulkLoad = {0}")
-  public static Iterable<? extends Object> bulkLoadParameters() {
-    return Arrays.asList(new Boolean[] { false } , new Boolean[] { true } );
-  }
-
-  private String[] columnTypes;
-
-  private String[] columnValues;
-
-  private String hbaseTableName;
-
-  private String hbaseColumnFamily;
-
-  private String hbaseTmpDir;
-
-  private String hbaseBulkLoadDir;
-
-  private boolean bulkLoad;
-
-  public HBaseImportAddRowKeyTest(boolean bulkLoad) {
-    this.bulkLoad = bulkLoad;
-  }
-
-  @Before
-  public void setUp() {
-    super.setUp();
-    columnTypes = new String[] { "INT", "INT" };
-    columnValues = new String[] { "0", "1" };
-    hbaseTableName = "addRowKeyTable";
-    hbaseColumnFamily = "addRowKeyFamily";
-    hbaseTmpDir = TEMP_BASE_DIR + "hbaseTmpDir";
-    hbaseBulkLoadDir = TEMP_BASE_DIR + "hbaseBulkLoadDir";
-    createTableWithColTypes(columnTypes, columnValues);
-  }
-
-  @Test
-  public void testAddRowKey() throws IOException {
-    String[] argv = getImportArguments(true, hbaseTableName, 
hbaseColumnFamily);
-
-    runImport(argv);
-
-    // Row key should have been added
-    verifyHBaseCell(hbaseTableName, columnValues[0], hbaseColumnFamily, 
getColName(0), columnValues[0]);
-    verifyHBaseCell(hbaseTableName, columnValues[0], hbaseColumnFamily, 
getColName(1), columnValues[1]);
-  }
-
-  @Test
-  public void testAddRowKeyDefault() throws IOException {
-    String[] argv = getImportArguments(false, hbaseTableName, 
hbaseColumnFamily);
-
-    runImport(argv);
-
-    // Row key should not be added by default
-    verifyHBaseCell(hbaseTableName, columnValues[0], hbaseColumnFamily, 
getColName(0), null);
-    verifyHBaseCell(hbaseTableName, columnValues[0], hbaseColumnFamily, 
getColName(1), columnValues[1]);
-  }
-
-  @Test
-  public void testAddCompositeKey() throws IOException {
-    String rowKey = getColName(0)+","+getColName(1);
-
-    String[] argv = getImportArguments(true, hbaseTableName, 
hbaseColumnFamily, rowKey);
-
-    runImport(argv);
-
-    // Row key should have been added
-    verifyHBaseCell(hbaseTableName, join(columnValues, '_'), 
hbaseColumnFamily, getColName(0), columnValues[0]);
-    verifyHBaseCell(hbaseTableName, join(columnValues, '_'), 
hbaseColumnFamily, getColName(1), columnValues[1]);
-  }
-
-  private String[] getImportArguments(boolean addRowKey, String 
hbaseTableName, String hbaseColumnFamily) {
-    return getImportArguments(addRowKey, hbaseTableName, hbaseColumnFamily, 
null);
-  }
-
-  private String[] getImportArguments(boolean addRowKey, String 
hbaseTableName, String hbaseColumnFamily, String rowKey) {
-    List<String> result = new ArrayList<>();
-
-    if (addRowKey) {
-      result.add("-D");
-      result.add("sqoop.hbase.add.row.key=true");
-    }
-    result.add("-D");
-    result.add("hbase.fs.tmp.dir=" + hbaseTmpDir);
-
-    result.addAll(asList(getArgv(true, hbaseTableName, hbaseColumnFamily, 
true, null)));
-
-    if(bulkLoad) {
-      result.add("--target-dir");
-      result.add(hbaseBulkLoadDir);
-      result.add("--hbase-bulkload");
-    }
-
-    if (!StringUtils.isBlank(rowKey)) {
-      result.add("--hbase-row-key");
-      result.add(rowKey);
-    }
-
-    return result.toArray(new String[result.size()]);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java 
b/src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java
deleted file mode 100644
index dd88fe7..0000000
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-import java.io.IOException;
-
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-
-/**
- *
- */
-public class HBaseImportNullTest extends HBaseTestCase {
-
-  @Test
-  public void testNullRow() throws IOException {
-    String [] argv = getArgv(true, "nullRowT", "nullRowF", true, null);
-    String [] types = { "INT", "INT" };
-    String [] vals = { "0", "null" };
-    createTableWithColTypes(types, vals);
-    runImport(argv);
-
-    // This cell should not be placed in the results..
-    verifyHBaseCell("nullRowT", "0", "nullRowF", getColName(1), null);
-
-    int rowCount = countHBaseTable("nullRowT", "nullRowF");
-    assertEquals(0, rowCount);
-  }
-
-  @Test
-  public void testNulls() throws IOException {
-    String [] argv = getArgv(true, "nullT", "nullF", true, null);
-    String [] types = { "INT", "INT", "INT" };
-    String [] vals = { "0", "42", "null" };
-    createTableWithColTypes(types, vals);
-    runImport(argv);
-
-    // This cell should import correctly.
-    verifyHBaseCell("nullT", "0", "nullF", getColName(1), "42");
-
-    // This cell should not be placed in the results..
-    verifyHBaseCell("nullT", "0", "nullF", getColName(2), null);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java 
b/src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java
deleted file mode 100644
index 4d79341..0000000
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-import java.io.IOException;
-
-import org.junit.Test;
-
-import static org.junit.Assert.fail;
-
-/**
- * Test imports of tables into HBase.
- */
-public class HBaseImportTest extends HBaseTestCase {
-
-  @Test
-  public void testBasicUsage() throws IOException {
-    // Create the HBase table in Sqoop as we run the job.
-    String [] argv = getArgv(true, "BasicUsage", "BasicColFam", true, null);
-    String [] types = { "INT", "INT" };
-    String [] vals = { "0", "1" };
-    createTableWithColTypes(types, vals);
-    runImport(argv);
-    verifyHBaseCell("BasicUsage", "0", "BasicColFam", getColName(1), "1");
-  }
-
-  @Test
-  public void testMissingTableFails() throws IOException {
-    // Test that if the table doesn't exist, we fail unless we
-    // explicitly create the table.
-    String [] argv = getArgv(true, "MissingTable", "MissingFam", false, null);
-    String [] types = { "INT", "INT" };
-    String [] vals = { "0", "1" };
-    createTableWithColTypes(types, vals);
-    try {
-      runImport(argv);
-      fail("Expected IOException");
-    } catch (IOException ioe) {
-      LOG.info("Got exception -- ok; we expected that job to fail.");
-    }
-  }
-
-  @Test
-  public void testOverwriteSucceeds() throws IOException {
-    // Test that we can create a table and then import immediately
-    // back on top of it without problem.
-    String [] argv = getArgv(true, "OverwriteT", "OverwriteF", true, null);
-    String [] types = { "INT", "INT" };
-    String [] vals = { "0", "1" };
-    createTableWithColTypes(types, vals);
-    runImport(argv);
-    verifyHBaseCell("OverwriteT", "0", "OverwriteF", getColName(1), "1");
-    // Run a second time.
-    runImport(argv);
-    verifyHBaseCell("OverwriteT", "0", "OverwriteF", getColName(1), "1");
-  }
-
-  @Test
-  public void testOverwriteNullColumnsSucceeds() throws IOException {
-    // Test that we can create a table and then import immediately
-    // back on top of it without problem and then update with null to validate
-    String [] argv = getArgv(true, "OverwriteTable", "OverwriteColumnFamily", 
true, null);
-    String [] types = { "INT", "INT", "INT", "DATETIME" };
-    String [] vals = { "0", "1", "1", "'2017-03-20'" };
-    createTableWithColTypes(types, vals);
-    runImport(argv);
-    verifyHBaseCell("OverwriteTable", "0", "OverwriteColumnFamily", 
getColName(2), "1");
-    // Run a second time.
-    argv = getIncrementalArgv(true, "OverwriteTable", "OverwriteColumnFamily", 
true, null, false, false, "DATA_COL3", "2017-03-24 01:01:01.0", null);
-    vals = new String[] { "0", "1", null, "'2017-03-25'" };
-    updateTable(types, vals);
-    runImport(argv);
-    verifyHBaseCell("OverwriteTable", "0", "OverwriteColumnFamily", 
getColName(2), null);
-  }
-
-  @Test
-  public void testAppendWithTimestampSucceeds() throws IOException {
-    // Test that we can create a table and then import multiple rows
-    // validate for append scenario with time stamp
-    String [] argv = getArgv(true, "AppendTable", "AppendColumnFamily", true, 
null);
-    String [] types = { "INT", "INT", "INT", "DATETIME" };
-    String [] vals = { "0", "1", "1", "'2017-03-20'" };
-    createTableWithColTypes(types, vals);
-    runImport(argv);
-    verifyHBaseCell("AppendTable", "0", "AppendColumnFamily", getColName(2), 
"1");
-    // Run a second time.
-    argv = getIncrementalArgv(true, "AppendTable", "AppendColumnFamily", true, 
null, true, false, "DATA_COL1", "2017-03-24 01:01:01.0", null);
-    vals = new String[] { "1", "2", "3", "'2017-06-15'" };
-    insertIntoTable(types, vals);
-    runImport(argv);
-    verifyHBaseCell("AppendTable", "1", "AppendColumnFamily", getColName(2), 
"3");
-  }
-
-  @Test
-  public void testAppendSucceeds() throws IOException {
-       // Test that we can create a table and then import multiple rows
-       // validate for append scenario with ID column(DATA_COL3)
-    String [] argv = getArgv(true, "AppendTable", "AppendColumnFamily", true, 
null);
-    String [] types = { "INT", "INT", "INT", "DATETIME" };
-    String [] vals = { "0", "1", "1", "'2017-03-20'" };
-    createTableWithColTypes(types, vals);
-    runImport(argv);
-    verifyHBaseCell("AppendTable", "0", "AppendColumnFamily", getColName(2), 
"1");
-    // Run a second time.
-    argv = getIncrementalArgv(true, "AppendTable", "AppendColumnFamily", true, 
null, true, true, "DATA_COL1", null, "DATA_COL3");
-    vals = new String[] { "1", "2", "3", "'2017-06-15'" };
-    insertIntoTable(types, vals);
-    runImport(argv);
-    verifyHBaseCell("AppendTable", "1", "AppendColumnFamily", getColName(2), 
"3");
-  }
-
-  @Test
-  public void testExitFailure() throws IOException {
-    String [] types = { "INT", "INT", "INT" };
-    String [] vals = { "0", "42", "43" };
-    createTableWithColTypes(types, vals);
-
-    String [] argv = getArgv(true, "NoHBaseT", "NoHBaseF", true, null);
-    try {
-      HBaseUtil.setAlwaysNoHBaseJarMode(true);
-      runImport(argv);
-    } catch (IOException e)  {
-      return;
-    } finally {
-      HBaseUtil.setAlwaysNoHBaseJarMode(false);
-    }
-
-    fail("should have gotten exception");
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/hbase/HBaseImportTypesTest.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportTypesTest.java 
b/src/test/com/cloudera/sqoop/hbase/HBaseImportTypesTest.java
deleted file mode 100644
index ae08a66..0000000
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportTypesTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-import java.io.IOException;
-
-import org.junit.Test;
-
-/**
- *
- */
-public class HBaseImportTypesTest extends HBaseTestCase {
-
-  @Test
-  public void testStrings() throws IOException {
-    String [] argv = getArgv(true, "stringT", "stringF", true, null);
-    String [] types = { "INT", "VARCHAR(32)" };
-    String [] vals = { "0", "'abc'" };
-    createTableWithColTypes(types, vals);
-    runImport(argv);
-    verifyHBaseCell("stringT", "0", "stringF", getColName(1), "abc");
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/hbase/HBaseKerberizedConnectivityTest.java
----------------------------------------------------------------------
diff --git 
a/src/test/com/cloudera/sqoop/hbase/HBaseKerberizedConnectivityTest.java 
b/src/test/com/cloudera/sqoop/hbase/HBaseKerberizedConnectivityTest.java
deleted file mode 100644
index 3c027ad..0000000
--- a/src/test/com/cloudera/sqoop/hbase/HBaseKerberizedConnectivityTest.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package com.cloudera.sqoop.hbase;
-
-import org.apache.sqoop.infrastructure.kerberos.MiniKdcInfrastructureRule;
-import org.junit.ClassRule;
-import org.junit.Test;
-
-import java.io.IOException;
-
-public class HBaseKerberizedConnectivityTest extends HBaseTestCase {
-
-  private static final String HBASE_TABLE_NAME = "KerberosTest";
-  private static final String HBASE_COLUMN_FAMILY = "TestColumnFamily";
-  private static final String TEST_ROW_KEY = "0";
-  private static final String TEST_ROW_VALUE = "1";
-  private static final String[] COLUMN_TYPES = { "INT", "INT" };
-
-  @ClassRule
-  public static MiniKdcInfrastructureRule miniKdcInfrastructure = new 
MiniKdcInfrastructureRule();
-
-  public HBaseKerberizedConnectivityTest() {
-    super(miniKdcInfrastructure);
-  }
-
-  @Test
-  public void testSqoopImportWithKerberizedHBaseConnectivitySucceeds() throws 
IOException {
-    String[] argv = getArgv(true, HBASE_TABLE_NAME, HBASE_COLUMN_FAMILY, true, 
null);
-    createTableWithColTypes(COLUMN_TYPES, new String[] { TEST_ROW_KEY, 
TEST_ROW_VALUE });
-
-    runImport(argv);
-
-    verifyHBaseCell(HBASE_TABLE_NAME, TEST_ROW_KEY, HBASE_COLUMN_FAMILY, 
getColName(1), TEST_ROW_VALUE);
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java 
b/src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java
deleted file mode 100644
index d71d4e3..0000000
--- a/src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-import java.io.IOException;
-
-import org.junit.Test;
-
-import static org.junit.Assert.fail;
-
-/**
- * Test import of free-form query into HBase.
- */
-public class HBaseQueryImportTest extends HBaseTestCase {
-
-  @Test
-  public void testImportFromQuery() throws IOException {
-    String [] types = { "INT", "INT", "INT" };
-    String [] vals = { "0", "42", "43" };
-    createTableWithColTypes(types, vals);
-
-    String [] argv = getArgv(true, "queryT", "queryF", true,
-        "SELECT " + getColName(0) + ", " + getColName(1) + " FROM "
-        + getTableName() + " WHERE $CONDITIONS");
-    runImport(argv);
-
-    // This cell should import correctly.
-    verifyHBaseCell("queryT", "0", "queryF", getColName(1), "42");
-
-    // This cell should not be placed in the results..
-    verifyHBaseCell("queryT", "0", "queryF", getColName(2), null);
-  }
-
-  @Test
-  public void testExitFailure() throws IOException {
-    String [] types = { "INT", "INT", "INT" };
-    String [] vals = { "0", "42", "43" };
-    createTableWithColTypes(types, vals);
-
-    String [] argv = getArgv(true, "queryT", "queryF", true,
-        "SELECT " + getColName(0) + ", " + getColName(1) + " FROM "
-        + getTableName() + " WHERE $CONDITIONS");
-    try {
-      HBaseUtil.setAlwaysNoHBaseJarMode(true);
-      runImport(argv);
-    } catch (Exception e)  {
-      return;
-    } finally {
-      HBaseUtil.setAlwaysNoHBaseJarMode(false);
-    }
-    fail("should have gotten exception");
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java 
b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
deleted file mode 100644
index 99fcd70..0000000
--- a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
+++ /dev/null
@@ -1,304 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-import static org.apache.hadoop.hbase.HConstants.MASTER_INFO_PORT;
-import static org.apache.hadoop.hbase.HConstants.ZOOKEEPER_CLIENT_PORT;
-import static 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost.REGION_COPROCESSOR_CONF_KEY;
-import static 
org.apache.hadoop.hbase.security.HBaseKerberosUtils.KRB_PRINCIPAL;
-import static 
org.apache.hadoop.hbase.security.HBaseKerberosUtils.MASTER_KRB_PRINCIPAL;
-import static org.apache.hadoop.hbase.security.User.HBASE_SECURITY_CONF_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HTTP_POLICY_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY;
-import static org.apache.hadoop.yarn.conf.YarnConfiguration.RM_PRINCIPAL;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
-import org.apache.hadoop.hbase.security.token.TokenProvider;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.sqoop.infrastructure.kerberos.KerberosConfigurationProvider;
-import org.junit.After;
-import org.junit.Before;
-
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-
-/**
- * Utility methods that facilitate HBase import tests.
- */
-public abstract class HBaseTestCase extends ImportJobTestCase {
-
-  public static final Log LOG = LogFactory.getLog(
-      HBaseTestCase.class.getName());
-  private static final String MASTER_INFO_PORT_DISABLE_WEB_UI = "-1";
-  private static final String DEFAULT_DFS_HTTPS_ADDRESS = "localhost:0";
-
-  private final KerberosConfigurationProvider kerberosConfigurationProvider;
-  private HBaseTestingUtility hbaseTestUtil;
-
-  public HBaseTestCase() {
-    this(null);
-  }
-
-  public HBaseTestCase(KerberosConfigurationProvider 
kerberosConfigurationProvider) {
-    this.kerberosConfigurationProvider = kerberosConfigurationProvider;
-  }
-
-  /**
-   * Create the argv to pass to Sqoop.
-   * @return the argv as an array of strings.
-   */
-  protected String [] getArgv(boolean includeHadoopFlags,
-      String hbaseTable, String hbaseColFam, boolean hbaseCreate,
-      String queryStr) {
-
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-      String zookeeperPort = 
hbaseTestUtil.getConfiguration().get(ZOOKEEPER_CLIENT_PORT);
-      args.add("-D");
-      args.add("hbase.zookeeper.property.clientPort=" + zookeeperPort);
-      args.addAll(getKerberosFlags());
-    }
-
-    if (null != queryStr) {
-      args.add("--query");
-      args.add(queryStr);
-    } else {
-      args.add("--table");
-      args.add(getTableName());
-    }
-    args.add("--split-by");
-    args.add(getColName(0));
-    args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
-    args.add("--num-mappers");
-    args.add("1");
-    args.add("--column-family");
-    args.add(hbaseColFam);
-    args.add("--hbase-table");
-    args.add(hbaseTable);
-    if (hbaseCreate) {
-      args.add("--hbase-create-table");
-    }
-    return args.toArray(new String[0]);
-  }
-
-  /**
-   * Create the argv to pass to Sqoop as incremental options.
-   * @return the argv as an array of strings.
-   */
-  protected String [] getIncrementalArgv(boolean includeHadoopFlags,
-      String hbaseTable, String hbaseColFam, boolean hbaseCreate,
-      String queryStr, boolean isAppend, boolean appendTimestamp, String 
checkColumn, String checkValue, String lastModifiedColumn) {
-
-    String[] argsStrArray = getArgv(includeHadoopFlags, hbaseTable, 
hbaseColFam, hbaseCreate, queryStr);
-    List<String> args = new ArrayList<String>(Arrays.asList(argsStrArray));
-
-    if (isAppend) {
-      args.add("--incremental");
-      args.add("append");
-      if (!appendTimestamp) {
-        args.add("--check-column");
-        args.add(checkColumn);//"ID");
-      } else {
-        args.add("--check-column");
-        args.add(lastModifiedColumn);//LAST_MODIFIED");
-      }
-    } else {
-      args.add("--incremental");
-      args.add("lastmodified");
-      args.add("--check-column");
-      args.add(checkColumn);
-      args.add("--last-value");
-      args.add(checkValue);
-    }
-    return args.toArray(new String[0]);
-  }
-
-  @Override
-  @Before
-  public void setUp() {
-    try {
-      hbaseTestUtil = new HBaseTestingUtility();
-      // We set the port for the hbase master web UI to -1 because we do not 
want the info server to run.
-      hbaseTestUtil.getConfiguration().set(MASTER_INFO_PORT, 
MASTER_INFO_PORT_DISABLE_WEB_UI);
-      setupKerberos();
-
-      hbaseTestUtil.startMiniCluster();
-      super.setUp();
-    } catch (Throwable e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private void setupKerberos() {
-    if (!isKerberized()){
-      return;
-    }
-
-    String servicePrincipal = kerberosConfigurationProvider.getTestPrincipal() 
+ "@" + kerberosConfigurationProvider.getRealm();
-    HBaseKerberosUtils.setPrincipalForTesting(servicePrincipal);
-    
HBaseKerberosUtils.setKeytabFileForTesting(kerberosConfigurationProvider.getKeytabFilePath());
-
-    Configuration configuration = hbaseTestUtil.getConfiguration();
-    HBaseKerberosUtils.setSecuredConfiguration(configuration);
-    UserGroupInformation.setConfiguration(configuration);
-    configuration.setStrings(REGION_COPROCESSOR_CONF_KEY, 
TokenProvider.class.getName());
-
-    setupKerberosForHdfs(servicePrincipal, configuration);
-  }
-
-  private void setupKerberosForHdfs(String servicePrincipal, Configuration 
configuration) {
-    configuration.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, servicePrincipal);
-    configuration.set(DFS_NAMENODE_KEYTAB_FILE_KEY, 
kerberosConfigurationProvider.getKeytabFilePath());
-    configuration.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, servicePrincipal);
-    configuration.set(DFS_DATANODE_KEYTAB_FILE_KEY, 
kerberosConfigurationProvider.getKeytabFilePath());
-    configuration.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
-    configuration.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, 
servicePrincipal);
-    configuration.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_ONLY.name());
-    configuration.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, 
DEFAULT_DFS_HTTPS_ADDRESS);
-    configuration.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, 
DEFAULT_DFS_HTTPS_ADDRESS);
-    configuration.setBoolean(IGNORE_SECURE_PORTS_FOR_TESTING_KEY, true);
-  }
-
-  public void shutdown() throws Exception {
-    LOG.info("In shutdown() method");
-    LOG.info("Shutting down HBase cluster");
-    hbaseTestUtil.shutdownMiniCluster();
-    hbaseTestUtil = null;
-    LOG.info("shutdown() method returning.");
-  }
-
-  @Override
-  @After
-  public void tearDown() {
-    try {
-      shutdown();
-    } catch (Exception e) {
-      LOG.warn("Error shutting down HBase minicluster: "
-              + StringUtils.stringifyException(e));
-    }
-    super.tearDown();
-  }
-
-  protected void verifyHBaseCell(String tableName, String rowKey,
-      String colFamily, String colName, String val) throws IOException {
-    Get get = new Get(Bytes.toBytes(rowKey));
-    get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(colName));
-    try (
-        Connection hbaseConnection = createHBaseConnection();
-        Table table = getHBaseTable(hbaseConnection, tableName)
-    ) {
-      Result r = table.get(get);
-      byte [] actualVal = r.getValue(Bytes.toBytes(colFamily),
-          Bytes.toBytes(colName));
-      if (null == val) {
-        assertNull("Got a result when expected null", actualVal);
-      } else {
-        assertNotNull("No result, but we expected one", actualVal);
-        assertEquals(val, Bytes.toString(actualVal));
-      }
-    }
-  }
-
-  protected int countHBaseTable(String tableName, String colFamily)
-      throws IOException {
-    int count = 0;
-    try (
-        Connection hbaseConnection = createHBaseConnection();
-        Table table = getHBaseTable(hbaseConnection, tableName)
-    ) {
-      ResultScanner scanner = table.getScanner(Bytes.toBytes(colFamily));
-      for(Result result = scanner.next();
-          result != null;
-          result = scanner.next()) {
-        count++;
-      }
-    }
-    return count;
-  }
-
-  private Connection createHBaseConnection() throws IOException {
-    return ConnectionFactory.createConnection(new 
Configuration(hbaseTestUtil.getConfiguration()));
-  }
-
-  private Table getHBaseTable(Connection connection, String tableName) throws 
IOException {
-    return connection.getTable(TableName.valueOf(tableName));
-  }
-
-  protected boolean isKerberized() {
-    return kerberosConfigurationProvider != null;
-  }
-
-  private String createFlagWithValue(String flag, String value) {
-    return String.format("%s=%s", flag, value);
-  }
-
-  private List<String> getKerberosFlags() {
-    if (!isKerberized()) {
-      return Collections.emptyList();
-    }
-    List<String> result = new ArrayList<>();
-
-    String principalForTesting = HBaseKerberosUtils.getPrincipalForTesting();
-    result.add("-D");
-    result.add(createFlagWithValue(HBASE_SECURITY_CONF_KEY, "kerberos"));
-    result.add("-D");
-    result.add(createFlagWithValue(MASTER_KRB_PRINCIPAL, principalForTesting));
-    result.add("-D");
-    result.add(createFlagWithValue(KRB_PRINCIPAL, principalForTesting));
-    result.add("-D");
-    result.add(createFlagWithValue(RM_PRINCIPAL, principalForTesting));
-
-    return result;
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java 
b/src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java
deleted file mode 100644
index 4201139..0000000
--- a/src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-import org.junit.Test;
-
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-
-/**
- * This tests to verify that HBase is present (default when running test cases)
- * and that when in fake not present mode, the method return false.
- */
-public class HBaseUtilTest {
-
-  @Test
-  public void testHBasePresent() {
-    assertTrue(HBaseUtil.isHBaseJarPresent());
-  }
-
-  @Test
-  public void testHBaseNotPresent() {
-    HBaseUtil.setAlwaysNoHBaseJarMode(true);
-    boolean present = HBaseUtil.isHBaseJarPresent();
-    HBaseUtil.setAlwaysNoHBaseJarMode(false);
-    assertFalse(present);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java 
b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
deleted file mode 100644
index a624f52..0000000
--- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
+++ /dev/null
@@ -1,743 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hive;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import com.cloudera.sqoop.Sqoop;
-
-import org.apache.avro.Schema;
-import org.apache.avro.SchemaBuilder;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.sqoop.avro.AvroSchemaMismatchException;
-import org.apache.sqoop.mapreduce.ParquetJob;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.BaseSqoopTool;
-import com.cloudera.sqoop.tool.CodeGenTool;
-import com.cloudera.sqoop.tool.CreateHiveTableTool;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
-import org.apache.commons.cli.ParseException;
-import org.junit.rules.ExpectedException;
-import org.kitesdk.data.Dataset;
-import org.kitesdk.data.DatasetReader;
-import org.kitesdk.data.Datasets;
-import org.kitesdk.data.Formats;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Test HiveImport capability after an import to HDFS.
- */
-
-public class TestHiveImport extends ImportJobTestCase {
-
-  public static final Log LOG = LogFactory.getLog(
-      TestHiveImport.class.getName());
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-  @Before
-  public void setUp() {
-    super.setUp();
-    HiveImport.setTestMode(true);
-  }
-
-  @After
-  public void tearDown() {
-    super.tearDown();
-    HiveImport.setTestMode(false);
-  }
-
-  /**
-   * Sets the expected number of columns in the table being manipulated
-   * by the test. Under the hood, this sets the expected column names
-   * to DATA_COLi for 0 &lt;= i &lt; numCols.
-   * @param numCols the number of columns to be created.
-   */
-  protected void setNumCols(int numCols) {
-    String [] cols = new String[numCols];
-    for (int i = 0; i < numCols; i++) {
-      cols[i] = "DATA_COL" + i;
-    }
-
-    setColNames(cols);
-  }
-
-  protected String[] getTypes() {
-    String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
-    return types;
-  }
-
-  /**
-   * Create the argv to pass to Sqoop.
-   * @return the argv as an array of strings.
-   */
-  protected String [] getArgv(boolean includeHadoopFlags, String [] moreArgs) {
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-
-    if (null != moreArgs) {
-      for (String arg: moreArgs) {
-        args.add(arg);
-      }
-    }
-
-    args.add("--table");
-    args.add(getTableName());
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--connect");
-    args.add(getConnectString());
-    args.add("--hive-import");
-    String [] colNames = getColNames();
-    if (null != colNames) {
-      args.add("--split-by");
-      args.add(colNames[0]);
-    } else {
-      fail("Could not determine column names.");
-    }
-
-    args.add("--num-mappers");
-    args.add("1");
-
-    for (String a : args) {
-      LOG.debug("ARG : "+ a);
-    }
-
-    return args.toArray(new String[0]);
-  }
-
-  /**
-   * @return the argv to supply to a create-table only job for Hive imports.
-   */
-  protected String [] getCreateTableArgv(boolean includeHadoopFlags,
-      String [] moreArgs) {
-
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (null != moreArgs) {
-      for (String arg: moreArgs) {
-        args.add(arg);
-      }
-    }
-
-    args.add("--table");
-    args.add(getTableName());
-    args.add("--connect");
-    args.add(getConnectString());
-
-    return args.toArray(new String[0]);
-  }
-
-  /**
-   * @return the argv to supply to a code-gen only job for Hive imports.
-   */
-  protected String [] getCodeGenArgs() {
-    ArrayList<String> args = new ArrayList<String>();
-
-    args.add("--table");
-    args.add(getTableName());
-    args.add("--connect");
-    args.add(getConnectString());
-    args.add("--hive-import");
-
-    return args.toArray(new String[0]);
-  }
-
-  /**
-   * @return the argv to supply to a ddl-executing-only job for Hive imports.
-   */
-  protected String [] getCreateHiveTableArgs(String [] extraArgs) {
-    ArrayList<String> args = new ArrayList<String>();
-
-    args.add("--table");
-    args.add(getTableName());
-    args.add("--connect");
-    args.add(getConnectString());
-
-    if (null != extraArgs) {
-      for (String arg : extraArgs) {
-        args.add(arg);
-      }
-    }
-
-    return args.toArray(new String[0]);
-  }
-
-  private SqoopOptions getSqoopOptions(String [] args, SqoopTool tool) {
-    SqoopOptions opts = null;
-    try {
-      opts = tool.parseArguments(args, null, null, true);
-    } catch (Exception e) {
-      fail("Invalid options: " + e.toString());
-    }
-
-    return opts;
-  }
-
-  private void runImportTest(String tableName, String [] types,
-      String [] values, String verificationScript, String [] args,
-      SqoopTool tool) throws IOException {
-
-    // create a table and populate it with a row...
-    createTableWithColTypes(types, values);
-
-    // set up our mock hive shell to compare our generated script
-    // against the correct expected one.
-    SqoopOptions options = getSqoopOptions(args, tool);
-    String hiveHome = options.getHiveHome();
-    assertNotNull("hive.home was not set", hiveHome);
-    String testDataPath = new Path(new Path(hiveHome),
-        "scripts/" + verificationScript).toString();
-    System.setProperty("expected.script",
-        new File(testDataPath).getAbsolutePath());
-
-    // verify that we can import it correctly into hive.
-    runImport(tool, args);
-  }
-
-  /** Test that we can generate a file containing the DDL and not import. */
-  @Test
-  public void testGenerateOnly() throws IOException {
-    final String TABLE_NAME = "GenerateOnly";
-    setCurTableName(TABLE_NAME);
-    setNumCols(1);
-
-    // Figure out where our target generated .q file is going to be.
-    SqoopOptions options = getSqoopOptions(getArgv(false, null),
-        new ImportTool());
-    Path ddlFile = new Path(new Path(options.getCodeOutputDir()),
-        TABLE_NAME + ".q");
-    FileSystem fs = FileSystem.getLocal(new Configuration());
-
-    // If it's already there, remove it before running the test to ensure
-    // that it's the current test that generated the file.
-    if (fs.exists(ddlFile)) {
-      if (!fs.delete(ddlFile, false)) {
-        LOG.warn("Could not delete previous ddl file: " + ddlFile);
-      }
-    }
-
-    // Run a basic import, but specify that we're just generating definitions.
-    String [] types = { "INTEGER" };
-    String [] vals = { "42" };
-    runImportTest(TABLE_NAME, types, vals, null, getCodeGenArgs(),
-        new CodeGenTool());
-
-    // Test that the generated definition file exists.
-    assertTrue("Couldn't find expected ddl file", fs.exists(ddlFile));
-
-    Path hiveImportPath = new Path(new Path(options.getWarehouseDir()),
-        TABLE_NAME);
-    assertFalse("Import actually happened!", fs.exists(hiveImportPath));
-  }
-
-  /** Test that strings and ints are handled in the normal fashion. */
-  @Test
-  public void testNormalHiveImport() throws IOException {
-    final String TABLE_NAME = "NORMAL_HIVE_IMPORT";
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
-    String [] vals = { "'test'", "42", "'somestring'" };
-    runImportTest(TABLE_NAME, types, vals, "normalImport.q",
-        getArgv(false, null), new ImportTool());
-  }
-
-  /** Test that strings and ints are handled in the normal fashion as parquet
-   * file. */
-  @Test
-  public void testNormalHiveImportAsParquet() throws IOException {
-    final String TABLE_NAME = "NORMAL_HIVE_IMPORT_AS_PARQUET";
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String [] types = getTypes();
-    String [] vals = { "'test'", "42", "'somestring'" };
-    String [] extraArgs = {"--as-parquetfile"};
-
-    runImportTest(TABLE_NAME, types, vals, "", getArgv(false, extraArgs),
-        new ImportTool());
-    verifyHiveDataset(TABLE_NAME, new Object[][]{{"test", 42, "somestring"}});
-  }
-
-  private void verifyHiveDataset(String tableName, Object[][] valsArray) {
-    String datasetUri = String.format("dataset:hive:default/%s",
-        tableName.toLowerCase());
-    assertTrue(Datasets.exists(datasetUri));
-    Dataset dataset = Datasets.load(datasetUri);
-    assertFalse(dataset.isEmpty());
-
-    DatasetReader<GenericRecord> reader = dataset.newReader();
-    try {
-      List<String> expectations = new ArrayList<String>();
-      if (valsArray != null) {
-        for (Object[] vals : valsArray) {
-          expectations.add(Arrays.toString(vals));
-        }
-      }
-
-      while (reader.hasNext() && expectations.size() > 0) {
-        String actual = Arrays.toString(
-            convertGenericRecordToArray(reader.next()));
-        assertTrue("Expect record: " + actual, expectations.remove(actual));
-      }
-      assertFalse(reader.hasNext());
-      assertEquals(0, expectations.size());
-    } finally {
-      reader.close();
-    }
-  }
-
-  private static Object[] convertGenericRecordToArray(GenericRecord record) {
-    Object[] result = new Object[record.getSchema().getFields().size()];
-    for (int i = 0; i < result.length; i++) {
-      result[i] = record.get(i);
-    }
-    return result;
-  }
-
-  /** Test that table is created in hive with no data import. */
-  @Test
-  public void testCreateOnlyHiveImport() throws IOException {
-    final String TABLE_NAME = "CREATE_ONLY_HIVE_IMPORT";
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
-    String [] vals = { "'test'", "42", "'somestring'" };
-    runImportTest(TABLE_NAME, types, vals,
-        "createOnlyImport.q", getCreateHiveTableArgs(null),
-        new CreateHiveTableTool());
-  }
-
-  /**
-   * Test that table is created in hive and replaces the existing table if
-   * any.
-   */
-  @Test
-  public void testCreateOverwriteHiveImport() throws IOException {
-    final String TABLE_NAME = "CREATE_OVERWRITE_HIVE_IMPORT";
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
-    String [] vals = { "'test'", "42", "'somestring'" };
-    String [] extraArgs = {"--hive-overwrite", "--create-hive-table"};
-    runImportTest(TABLE_NAME, types, vals,
-        "createOverwriteImport.q", getCreateHiveTableArgs(extraArgs),
-        new CreateHiveTableTool());
-    runImportTest(TABLE_NAME, types, vals,
-        "createOverwriteImport.q", getCreateHiveTableArgs(extraArgs),
-        new CreateHiveTableTool());
-  }
-
-  /**
-   * Test that table is created in hive and replaces the existing table if
-   * any.
-   */
-  @Test
-  public void testCreateOverwriteHiveImportAsParquet() throws IOException {
-    final String TABLE_NAME = "CREATE_OVERWRITE_HIVE_IMPORT_AS_PARQUET";
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String [] types = getTypes();
-    String [] vals = { "'test'", "42", "'somestring'" };
-    String [] extraArgs = {"--as-parquetfile"};
-    ImportTool tool = new ImportTool();
-
-    runImportTest(TABLE_NAME, types, vals, "", getArgv(false, extraArgs), 
tool);
-    verifyHiveDataset(TABLE_NAME, new Object[][]{{"test", 42, "somestring"}});
-
-    String [] valsToOverwrite = { "'test2'", "24", "'somestring2'" };
-    String [] extraArgsForOverwrite = {"--as-parquetfile", "--hive-overwrite"};
-    runImportTest(TABLE_NAME, types, valsToOverwrite, "",
-        getArgv(false, extraArgsForOverwrite), tool);
-    verifyHiveDataset(TABLE_NAME, new Object[][] {{"test2", 24, 
"somestring2"}});
-  }
-
-  @Test
-  public void testHiveImportAsParquetWhenTableExistsWithIncompatibleSchema() 
throws Exception {
-    final String TABLE_NAME = "HIVE_IMPORT_AS_PARQUET_EXISTING_TABLE";
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-
-    String [] types = { "VARCHAR(32)", "INTEGER", "DATE" };
-    String [] vals = { "'test'", "42", "'2009-12-31'" };
-    String [] extraArgs = {"--as-parquetfile"};
-
-    createHiveDataSet(TABLE_NAME);
-
-    createTableWithColTypes(types, vals);
-
-    thrown.expect(AvroSchemaMismatchException.class);
-    thrown.expectMessage(ParquetJob.INCOMPATIBLE_AVRO_SCHEMA_MSG + 
ParquetJob.HIVE_INCOMPATIBLE_AVRO_SCHEMA_MSG);
-
-    SqoopOptions sqoopOptions = getSqoopOptions(getConf());
-    sqoopOptions.setThrowOnError(true);
-    Sqoop sqoop = new Sqoop(new ImportTool(), getConf(), sqoopOptions);
-    sqoop.run(getArgv(false, extraArgs));
-
-  }
-
-  private void createHiveDataSet(String tableName) {
-    Schema dataSetSchema = SchemaBuilder
-        .record(tableName)
-            .fields()
-            .name(getColName(0)).type().nullable().stringType().noDefault()
-            .name(getColName(1)).type().nullable().stringType().noDefault()
-            .name(getColName(2)).type().nullable().stringType().noDefault()
-            .endRecord();
-    String dataSetUri = "dataset:hive:/default/" + tableName;
-    ParquetJob.createDataset(dataSetSchema, 
Formats.PARQUET.getDefaultCompressionType(), dataSetUri);
-  }
-
-  /**
-   * Test that records are appended to an existing table.
-   */
-  @Test
-  public void testAppendHiveImportAsParquet() throws IOException {
-    final String TABLE_NAME = "APPEND_HIVE_IMPORT_AS_PARQUET";
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String [] types = getTypes();
-    String [] vals = { "'test'", "42", "'somestring'" };
-    String [] extraArgs = {"--as-parquetfile"};
-    String [] args = getArgv(false, extraArgs);
-    ImportTool tool = new ImportTool();
-
-    runImportTest(TABLE_NAME, types, vals, "", args, tool);
-    verifyHiveDataset(TABLE_NAME, new Object[][]{{"test", 42, "somestring"}});
-
-    String [] valsToAppend = { "'test2'", "4242", "'somestring2'" };
-    runImportTest(TABLE_NAME, types, valsToAppend, "", args, tool);
-    verifyHiveDataset(TABLE_NAME, new Object[][] {
-        {"test2", 4242, "somestring2"}, {"test", 42, "somestring"}});
-  }
-
-  /**
-   * Test hive create and --as-parquetfile options validation.
-   */
-  @Test
-  public void testCreateHiveImportAsParquet() throws ParseException, 
InvalidOptionsException {
-    final String TABLE_NAME = "CREATE_HIVE_IMPORT_AS_PARQUET";
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String [] extraArgs = {"--as-parquetfile", "--create-hive-table"};
-    ImportTool tool = new ImportTool();
-
-    thrown.expect(InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
during Hive table creation with " +
-        "--as-parquetfile");
-    tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
-        null, true));
-  }
-
-
-  /** Test that dates are coerced properly to strings. */
-  @Test
-  public void testDate() throws IOException {
-    final String TABLE_NAME = "DATE_HIVE_IMPORT";
-    setCurTableName(TABLE_NAME);
-    setNumCols(2);
-    String [] types = { "VARCHAR(32)", "DATE" };
-    String [] vals = { "'test'", "'2009-05-12'" };
-    runImportTest(TABLE_NAME, types, vals, "dateImport.q",
-        getArgv(false, null), new ImportTool());
-  }
-
-  /** Test that NUMERICs are coerced to doubles. */
-  @Test
-  public void testNumeric() throws IOException {
-    final String TABLE_NAME = "NUMERIC_HIVE_IMPORT";
-    setCurTableName(TABLE_NAME);
-    setNumCols(2);
-    String [] types = { "NUMERIC", "CHAR(64)" };
-    String [] vals = { "3.14159", "'foo'" };
-    runImportTest(TABLE_NAME, types, vals, "numericImport.q",
-        getArgv(false, null), new ImportTool());
-  }
-
-  /** Test that DECIMALS using --map-column-hive option maps can run without 
issues. */
-  @Test
-  public void testDecimalMapColumnHive() throws IOException {
-    final String TABLE_NAME = "DECIMAL_MAP_HIVE_IMPORT";
-    setCurTableName(TABLE_NAME);
-    setNumCols(2);
-    String [] types = { "NUMERIC", "CHAR(64)" };
-    String [] vals = { "12343.14159", "'foo'" };
-
-    ArrayList<String> args = new ArrayList<String>();
-    args.add("--map-column-hive");
-    args.add(BASE_COL_NAME  + "0=DECIMAL(10,10)");
-
-    runImportTest(TABLE_NAME, types, vals, "decimalMapImport.q",
-        getArgv(false, args.toArray(new String[args.size()])), new 
ImportTool());
-  }
-
-  /** If bin/hive returns an error exit status, we should get an IOException. 
*/
-  @Test
-  public void testHiveExitFails() throws IOException {
-    // The expected script is different than the one which would be generated
-    // by this, so we expect an IOException out.
-    final String TABLE_NAME = "FAILING_HIVE_IMPORT";
-    setCurTableName(TABLE_NAME);
-    setNumCols(2);
-    String [] types = { "NUMERIC", "CHAR(64)" };
-    String [] vals = { "3.14159", "'foo'" };
-
-    thrown.expect(IOException.class);
-    thrown.reportMissingExceptionWithMessage("Expected IOException on 
erroneous Hive exit status");
-    runImportTest(TABLE_NAME, types, vals, "failingImport.q",
-        getArgv(false, null), new ImportTool());
-  }
-
-  /** Test that we can set delimiters how we want them. */
-  @Test
-  public void testCustomDelimiters() throws IOException {
-    final String TABLE_NAME = "CUSTOM_DELIM_IMPORT";
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
-    String [] vals = { "'test'", "42", "'somestring'" };
-    String [] extraArgs = {
-      "--fields-terminated-by", ",",
-      "--lines-terminated-by", "|",
-    };
-    runImportTest(TABLE_NAME, types, vals, "customDelimImport.q",
-        getArgv(false, extraArgs), new ImportTool());
-  }
-
-  /**
-   * Test hive import with row that has new line in it.
-   */
-  @Test
-  public void testFieldWithHiveDelims() throws IOException,
-    InterruptedException {
-    final String TABLE_NAME = "FIELD_WITH_NL_HIVE_IMPORT";
-
-    LOG.info("Doing import of single row into FIELD_WITH_NL_HIVE_IMPORT 
table");
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String[] types = getTypes();
-    String[] vals = { "'test with \n new lines \n'", "42",
-        "'oh no " + '\01' + " field delims " + '\01' + "'", };
-    String[] moreArgs = { "--"+ BaseSqoopTool.HIVE_DROP_DELIMS_ARG };
-
-    runImportTest(TABLE_NAME, types, vals, "fieldWithNewlineImport.q",
-        getArgv(false, moreArgs), new ImportTool());
-
-    LOG.info("Validating data in single row is present in: "
-          + "FIELD_WITH_NL_HIVE_IMPORT table");
-
-    // Ideally, we would actually invoke hive code to verify that record with
-    // record and field delimiters have values replaced and that we have the
-    // proper number of hive records. Unfortunately, this is a non-trivial 
task,
-    // and better dealt with at an integration test level
-    //
-    // Instead, this assumes the path of the generated table and just validate
-    // map job output.
-
-    // Get and read the raw output file
-    String whDir = getWarehouseDir();
-    File p = new File(new File(whDir, TABLE_NAME), "part-m-00000");
-    File f = new File(p.toString());
-    FileReader fr = new FileReader(f);
-    BufferedReader br = new BufferedReader(fr);
-    try {
-      // verify the output
-      assertEquals(br.readLine(), "test with  new lines " + '\01' + "42"
-          + '\01' + "oh no  field delims ");
-      assertEquals(br.readLine(), null); // should only be one line
-    } catch (IOException ioe) {
-      fail("Unable to read files generated from hive");
-    } finally {
-      br.close();
-    }
-  }
-
-  /**
-   * Test hive import with row that has new line in it.
-   */
-  @Test
-  public void testFieldWithHiveDelimsReplacement() throws IOException,
-    InterruptedException {
-    final String TABLE_NAME = "FIELD_WITH_NL_REPLACEMENT_HIVE_IMPORT";
-
-    LOG.info("Doing import of single row into "
-        + "FIELD_WITH_NL_REPLACEMENT_HIVE_IMPORT table");
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String[] types = getTypes();
-    String[] vals = { "'test with\nnew lines\n'", "42",
-        "'oh no " + '\01' + " field delims " + '\01' + "'", };
-    String[] moreArgs = { "--"+BaseSqoopTool.HIVE_DELIMS_REPLACEMENT_ARG, " "};
-
-    runImportTest(TABLE_NAME, types, vals,
-        "fieldWithNewlineReplacementImport.q", getArgv(false, moreArgs),
-        new ImportTool());
-
-    LOG.info("Validating data in single row is present in: "
-          + "FIELD_WITH_NL_REPLACEMENT_HIVE_IMPORT table");
-
-    // Ideally, we would actually invoke hive code to verify that record with
-    // record and field delimiters have values replaced and that we have the
-    // proper number of hive records. Unfortunately, this is a non-trivial 
task,
-    // and better dealt with at an integration test level
-    //
-    // Instead, this assumes the path of the generated table and just validate
-    // map job output.
-
-    // Get and read the raw output file
-    String whDir = getWarehouseDir();
-    File p = new File(new File(whDir, TABLE_NAME), "part-m-00000");
-    File f = new File(p.toString());
-    FileReader fr = new FileReader(f);
-    BufferedReader br = new BufferedReader(fr);
-    try {
-      // verify the output
-      assertEquals(br.readLine(), "test with new lines " + '\01' + "42"
-          + '\01' + "oh no   field delims  ");
-      assertEquals(br.readLine(), null); // should only be one line
-    } catch (IOException ioe) {
-      fail("Unable to read files generated from hive");
-    } finally {
-      br.close();
-    }
-  }
-
-  /**
-   * Test hive drop and replace option validation.
-   */
-  @Test
-  public void testHiveDropAndReplaceOptionValidation() throws ParseException, 
InvalidOptionsException {
-    LOG.info("Testing conflicting Hive delimiter drop/replace options");
-
-    setNumCols(3);
-    String[] moreArgs = { "--"+BaseSqoopTool.HIVE_DELIMS_REPLACEMENT_ARG, " ",
-      "--"+BaseSqoopTool.HIVE_DROP_DELIMS_ARG, };
-
-    ImportTool tool = new ImportTool();
-
-    thrown.expect(InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
with conflicting Hive delimiter " +
-        "drop/replace options");
-    tool.validateOptions(tool.parseArguments(getArgv(false, moreArgs), null,
-        null, true));
-  }
-
-  /**
-   * Test hive import with row that has new line in it.
-   */
-  @Test
-  public void testImportHiveWithPartitions() throws IOException,
-      InterruptedException {
-    final String TABLE_NAME = "PARTITION_HIVE_IMPORT";
-
-    LOG.info("Doing import of single row into PARTITION_HIVE_IMPORT table");
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)", };
-    String[] vals = { "'whoop'", "42", "'I am a row in a partition'", };
-    String[] moreArgs = { "--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG, "ds",
-        "--" + BaseSqoopTool.HIVE_PARTITION_VALUE_ARG, "20110413", };
-
-    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
-        getArgv(false, moreArgs), new ImportTool());
-  }
-
-  /**
-   * If partition key is set to one of importing columns, we should get an
-   * IOException.
-   * */
-  @Test
-  public void testImportWithBadPartitionKey() throws IOException {
-    final String TABLE_NAME = "FAILING_PARTITION_HIVE_IMPORT";
-
-    LOG.info("Doing import of single row into " + TABLE_NAME + " table");
-    setCurTableName(TABLE_NAME);
-    setNumCols(3);
-    String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)", };
-    String[] vals = { "'key'", "42", "'I am a row in a partition'", };
-
-    String partitionKey = getColNames()[0];
-
-    // Specify 1st column as partition key and import every column of the
-    // table by default (i.e. no --columns option).
-    String[] moreArgs1 = {
-        "--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG,
-        partitionKey,
-    };
-
-    // Specify 1st column as both partition key and importing column.
-    String[] moreArgs2 = {
-        "--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG,
-        partitionKey,
-        "--" + BaseSqoopTool.COLUMNS_ARG,
-        partitionKey,
-    };
-
-    // Test hive-import with the 1st args.
-    thrown.expect(IOException.class);
-    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive 
import with partition key " +
-        "as importing column");
-    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
-        getArgv(false, moreArgs1), new ImportTool());
-
-    // Test hive-import with the 2nd args.
-    thrown.expect(IOException.class);
-    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive 
import with partition key " +
-        "as importing column");
-    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
-        getArgv(false, moreArgs2), new ImportTool());
-
-    // Test create-hive-table with the 1st args.
-    thrown.expect(IOException.class);
-    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive 
table creation with partition key " +
-        "as importing column");
-    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
-        getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java 
b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
deleted file mode 100644
index dbf0dde..0000000
--- a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
+++ /dev/null
@@ -1,289 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hive;
-
-import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.sqoop.util.SqlTypeMap;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-import java.sql.Types;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-
-/**
- * Test Hive DDL statement generation.
- */
-public class TestTableDefWriter {
-
-  public static final Log LOG = LogFactory.getLog(
-      TestTableDefWriter.class.getName());
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-  // Test getHiveOctalCharCode and expect an IllegalArgumentException.
-  private void expectExceptionInCharCode(int charCode) {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.reportMissingExceptionWithMessage("Expected 
IllegalArgumentException with out-of-range Hive delimiter");
-    TableDefWriter.getHiveOctalCharCode(charCode);
-  }
-
-  @Test
-  public void testHiveOctalCharCode() {
-    assertEquals("\\000", TableDefWriter.getHiveOctalCharCode(0));
-    assertEquals("\\001", TableDefWriter.getHiveOctalCharCode(1));
-    assertEquals("\\012", TableDefWriter.getHiveOctalCharCode((int) '\n'));
-    assertEquals("\\177", TableDefWriter.getHiveOctalCharCode(0177));
-
-    expectExceptionInCharCode(4096);
-    expectExceptionInCharCode(0200);
-    expectExceptionInCharCode(254);
-  }
-
-  @Test
-  public void testDifferentTableNames() throws Exception {
-    Configuration conf = new Configuration();
-    SqoopOptions options = new SqoopOptions();
-    TableDefWriter writer = new TableDefWriter(options, null,
-        "inputTable", "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-    String loadData = writer.getLoadDataStmt();
-
-    LOG.debug("Create table stmt: " + createTable);
-    LOG.debug("Load data stmt: " + loadData);
-
-    // Assert that the statements generated have the form we expect.
-    assertTrue(createTable.indexOf(
-        "CREATE TABLE IF NOT EXISTS `outputTable`") != -1);
-    assertTrue(loadData.indexOf("INTO TABLE `outputTable`") != -1);
-    assertTrue(loadData.indexOf("/inputTable'") != -1);
-  }
-
-  @Test
-  public void testDifferentTargetDirs() throws Exception {
-    String targetDir = "targetDir";
-    String inputTable = "inputTable";
-    String outputTable = "outputTable";
-
-    Configuration conf = new Configuration();
-    SqoopOptions options = new SqoopOptions();
-    // Specify a different target dir from input table name
-    options.setTargetDir(targetDir);
-    TableDefWriter writer = new TableDefWriter(options, null,
-        inputTable, outputTable, conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-    String loadData = writer.getLoadDataStmt();
-
-    LOG.debug("Create table stmt: " + createTable);
-    LOG.debug("Load data stmt: " + loadData);
-
-    // Assert that the statements generated have the form we expect.
-    assertTrue(createTable.indexOf(
-        "CREATE TABLE IF NOT EXISTS `" + outputTable + "`") != -1);
-    assertTrue(loadData.indexOf("INTO TABLE `" + outputTable + "`") != -1);
-    assertTrue(loadData.indexOf("/" + targetDir + "'") != -1);
-  }
-
-  @Test
-  public void testPartitions() throws Exception {
-    String[] args = {
-        "--hive-partition-key", "ds",
-        "--hive-partition-value", "20110413",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-      new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, "inputTable", "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-    String loadData = writer.getLoadDataStmt();
-
-    assertNotNull(createTable);
-    assertNotNull(loadData);
-    assertEquals("CREATE TABLE IF NOT EXISTS `outputTable` ( ) "
-        + "PARTITIONED BY (ds STRING) "
-        + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\054' "
-        + "LINES TERMINATED BY '\\012' STORED AS TEXTFILE", createTable);
-    assertTrue(loadData.endsWith(" PARTITION (ds='20110413')"));
-  }
-
-  @Test
-  public void testLzoSplitting() throws Exception {
-    String[] args = {
-        "--compress",
-        "--compression-codec", "lzop",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-      new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, "inputTable", "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-    String loadData = writer.getLoadDataStmt();
-
-    assertNotNull(createTable);
-    assertNotNull(loadData);
-    assertEquals("CREATE TABLE IF NOT EXISTS `outputTable` ( ) "
-        + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\054' "
-        + "LINES TERMINATED BY '\\012' STORED AS "
-        + "INPUTFORMAT 'com.hadoop.mapred.DeprecatedLzoTextInputFormat' "
-        + "OUTPUTFORMAT "
-        + "'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'",
-        createTable);
-  }
-
-  @Test
-  public void testUserMappingNoDecimal() throws Exception {
-    String[] args = {
-        "--map-column-hive", "id=STRING,value=INTEGER",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-      new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    colTypes.put("id", Types.INTEGER);
-    colTypes.put("value", Types.VARCHAR);
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-
-    assertNotNull(createTable);
-
-    assertTrue(createTable.contains("`id` STRING"));
-    assertTrue(createTable.contains("`value` INTEGER"));
-
-    assertFalse(createTable.contains("`id` INTEGER"));
-    assertFalse(createTable.contains("`value` STRING"));
-  }
-
-  @Test
-  public void testUserMappingWithDecimal() throws Exception {
-    String[] args = {
-        "--map-column-hive", "id=STRING,value2=DECIMAL(13,5),value1=INTEGER," +
-                             "value3=DECIMAL(4,5),value4=VARCHAR(255)",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-        new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    colTypes.put("id", Types.INTEGER);
-    colTypes.put("value1", Types.VARCHAR);
-    colTypes.put("value2", Types.DOUBLE);
-    colTypes.put("value3", Types.FLOAT);
-    colTypes.put("value4", Types.CHAR);
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-
-    assertNotNull(createTable);
-
-    assertTrue(createTable.contains("`id` STRING"));
-    assertTrue(createTable.contains("`value1` INTEGER"));
-    assertTrue(createTable.contains("`value2` DECIMAL(13,5)"));
-    assertTrue(createTable.contains("`value3` DECIMAL(4,5)"));
-    assertTrue(createTable.contains("`value4` VARCHAR(255)"));
-
-    assertFalse(createTable.contains("`id` INTEGER"));
-    assertFalse(createTable.contains("`value1` STRING"));
-    assertFalse(createTable.contains("`value2` DOUBLE"));
-    assertFalse(createTable.contains("`value3` FLOAT"));
-    assertFalse(createTable.contains("`value4` CHAR"));
-  }
-
-  @Test
-  public void testUserMappingFailWhenCantBeApplied() throws Exception {
-    String[] args = {
-        "--map-column-hive", "id=STRING,value=INTEGER",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-      new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    colTypes.put("id", Types.INTEGER);
-    writer.setColumnTypes(colTypes);
-
-    thrown.expect(IllegalArgumentException.class);
-    thrown.reportMissingExceptionWithMessage("Expected 
IllegalArgumentException on non applied Hive type mapping");
-    String createTable = writer.getCreateTableStmt();
-  }
-
-  @Test
-  public void testHiveDatabase() throws Exception {
-    String[] args = {
-        "--hive-database", "db",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-      new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-    assertNotNull(createTable);
-    assertTrue(createTable.contains("`db`.`outputTable`"));
-
-    String loadStmt = writer.getLoadDataStmt();
-    assertNotNull(loadStmt);
-    assertTrue(createTable.contains("`db`.`outputTable`"));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/io/TestCodecMap.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/io/TestCodecMap.java 
b/src/test/com/cloudera/sqoop/io/TestCodecMap.java
deleted file mode 100644
index c78a5ae..0000000
--- a/src/test/com/cloudera/sqoop/io/TestCodecMap.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.io;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.io.compress.GzipCodec;
-
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import org.junit.Rule;
-
-import org.junit.rules.ExpectedException;
-
-/**
- * Test looking up codecs by name.
- */
-public class TestCodecMap  {
-
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-  private void verifyCodec(Class<?> c, String codecName)
-      throws UnsupportedCodecException {
-    CompressionCodec codec = CodecMap.getCodec(codecName, new Configuration());
-    assertEquals(codec.getClass(), c);
-  }
-
-  @Test
-  public void testGetCodecNames() {
-    // gzip is picked up from Hadoop defaults
-    assertTrue(CodecMap.getCodecNames().contains("gzip"));
-  }
-
-  @Test
-  public void testGetCodec() throws IOException {
-    verifyCodec(GzipCodec.class, "gzip");
-    verifyCodec(GzipCodec.class, "Gzip");
-    verifyCodec(GzipCodec.class, "GZIP");
-    verifyCodec(GzipCodec.class, "gzipcodec");
-    verifyCodec(GzipCodec.class, "GzipCodec");
-    verifyCodec(GzipCodec.class, "GZIPCODEC");
-    verifyCodec(GzipCodec.class, "org.apache.hadoop.io.compress.GzipCodec");
-  }
-
-  @Test
-  public void testGetShortName() throws UnsupportedCodecException {
-    verifyShortName("gzip", "org.apache.hadoop.io.compress.GzipCodec");
-    verifyShortName("default", "org.apache.hadoop.io.compress.DefaultCodec");
-
-    thrown.expect(UnsupportedCodecException.class);
-    thrown.reportMissingExceptionWithMessage("Expected 
UnsupportedCodecException with invalid codec name during getting " +
-        "short codec name");
-    verifyShortName("NONE", "bogus");
-  }
-
-  private void verifyShortName(String expected, String codecName)
-    throws UnsupportedCodecException {
-    assertEquals(expected,
-      CodecMap.getCodecShortNameByName(codecName, new Configuration()));
-  }
-
-  @Test
-  public void testUnrecognizedCodec() throws UnsupportedCodecException {
-    thrown.expect(UnsupportedCodecException.class);
-    thrown.reportMissingExceptionWithMessage("Expected 
UnsupportedCodecException with invalid codec name");
-    CodecMap.getCodec("bogus", new Configuration());
-  }
-
-}

Reply via email to