http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestMultiCols.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestMultiCols.java 
b/src/test/com/cloudera/sqoop/TestMultiCols.java
deleted file mode 100644
index 94721b8..0000000
--- a/src/test/com/cloudera/sqoop/TestMultiCols.java
+++ /dev/null
@@ -1,241 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import org.junit.Test;
-
-/**
- * Test cases that import rows containing multiple columns,
- * some of which may contain null values.
- *
- * Also test loading only selected columns from the db.
- */
-public class TestMultiCols extends ImportJobTestCase {
-
-  public static final Log LOG = LogFactory.getLog(
-      TestMultiCols.class.getName());
-
-  /**
-   * Do a full import verification test on a table containing one row.
-   * @param types the types of the columns to insert
-   * @param insertVals the SQL text to use to insert each value
-   * @param validateLine the text to expect as a toString() of the entire row,
-   * as imported by the tool
-   */
-  private void verifyTypes(String [] types , String [] insertVals,
-      String validateLine) {
-    verifyTypes(types, insertVals, validateLine, null);
-  }
-
-  private void verifyTypes(String [] types , String [] insertVals,
-      String validateLine, String [] importColumns) {
-
-    createTableWithColTypes(types, insertVals);
-    verifyImport(validateLine, importColumns);
-    LOG.debug("Verified input line as " + validateLine + " -- ok!");
-  }
-
-  @Test
-  public void testThreeStrings() {
-    String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
-    String [] insertVals = { "'foo'", "'bar'", "'baz'" };
-    String validateLine = "foo,bar,baz";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testStringsWithNull1() {
-    String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
-    String [] insertVals = { "'foo'", "null", "'baz'" };
-    String validateLine = "foo,null,baz";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testStringsWithNull2() {
-    String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
-    String [] insertVals = { "null", "'foo'", "'baz'" };
-    String validateLine = "null,foo,baz";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testStringsWithNull3() {
-    String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
-    String [] insertVals = { "'foo'", "'baz'", "null"};
-    String validateLine = "foo,baz,null";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testThreeInts() {
-    String [] types = { "INTEGER", "INTEGER", "INTEGER" };
-    String [] insertVals = { "1", "2", "3" };
-    String validateLine = "1,2,3";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testIntsWithNulls() {
-    String [] types = { "INTEGER", "INTEGER", "INTEGER" };
-    String [] insertVals = { "1", "null", "3" };
-    String validateLine = "1,null,3";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testMixed1() {
-    String [] types = { "INTEGER", "VARCHAR(32)", "DATE" };
-    String [] insertVals = { "1", "'meep'", "'2009-12-31'" };
-    String validateLine = "1,meep,2009-12-31";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testMixed2() {
-    String [] types = { "INTEGER", "VARCHAR(32)", "DATE" };
-    String [] insertVals = { "null", "'meep'", "'2009-12-31'" };
-    String validateLine = "null,meep,2009-12-31";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testMixed3() {
-    String [] types = { "INTEGER", "VARCHAR(32)", "DATE" };
-    String [] insertVals = { "1", "'meep'", "null" };
-    String validateLine = "1,meep,null";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testMixed4() {
-    String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
-    String [] insertVals = { "-42", "17", "33333333333333333333333.1714" };
-    String validateLine = "-42,17,33333333333333333333333.1714";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testMixed5() {
-    String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
-    String [] insertVals = { "null", "17", "33333333333333333333333.0" };
-    String validateLine = "null,17,33333333333333333333333.0";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  @Test
-  public void testMixed6() {
-    String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
-    String [] insertVals = { "33333333333333333333333", "17", "-42"};
-    String validateLine = "33333333333333333333333,17,-42";
-
-    verifyTypes(types, insertVals, validateLine);
-  }
-
-  //////////////////////////////////////////////////////////////////////////
-  // the tests below here test the --columns parameter and ensure that
-  // we can selectively import only certain columns.
-  //////////////////////////////////////////////////////////////////////////
-
-  @Test
-  public void testSkipFirstCol() {
-    String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
-    String [] insertVals = { "33333333333333333333333", "17", "-42"};
-    String validateLine = "17,-42";
-
-    String [] loadCols = {"DATA_COL1", "DATA_COL2"};
-
-    verifyTypes(types, insertVals, validateLine, loadCols);
-  }
-
-  @Test
-  public void testSkipSecondCol() {
-    String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
-    String [] insertVals = { "33333333333333333333333", "17", "-42"};
-    String validateLine = "33333333333333333333333,-42";
-
-    String [] loadCols = {"DATA_COL0", "DATA_COL2"};
-
-    verifyTypes(types, insertVals, validateLine, loadCols);
-  }
-
-  @Test
-  public void testSkipThirdCol() {
-    String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
-    String [] insertVals = { "33333333333333333333333", "17", "-42"};
-    String validateLine = "33333333333333333333333,17";
-
-    String [] loadCols = {"DATA_COL0", "DATA_COL1"};
-
-    verifyTypes(types, insertVals, validateLine, loadCols);
-  }
-
-  /**
-   * This tests that the columns argument can handle comma-separated column
-   * names.  So this is like having:
-   *   --columns "DATA_COL0,DATA_COL1,DATA_COL2"
-   * as two args on a sqoop command line
-   *
-   * @throws IOException
-   */
-  @Test
-  public void testSingleColumnsArg() throws IOException {
-    String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
-    String [] insertVals = { "'foo'", "'bar'", "'baz'" };
-    String validateLine = "foo,bar,baz";
-    String [] loadCols = {"DATA_COL0,DATA_COL1,DATA_COL2"};
-
-    verifyTypes(types, insertVals, validateLine, loadCols);
-  }
-
-  /**
-   * This tests that the columns argument can handle spaces between column
-   * names.  So this is like having:
-   *   --columns "DATA_COL0, DATA_COL1, DATA_COL2"
-   * as two args on a sqoop command line
-   *
-   * @throws IOException
-   */
-  @Test
-  public void testColumnsWithSpaces() throws IOException {
-    String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
-    String [] insertVals = { "'foo'", "'bar'", "'baz'" };
-    String validateLine = "foo,bar,baz";
-    String [] loadCols = {"DATA_COL0, DATA_COL1, DATA_COL2"};
-
-    verifyTypes(types, insertVals, validateLine, loadCols);
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestMultiMaps.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestMultiMaps.java 
b/src/test/com/cloudera/sqoop/TestMultiMaps.java
deleted file mode 100644
index c5f487b..0000000
--- a/src/test/com/cloudera/sqoop/TestMultiMaps.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import com.cloudera.sqoop.testutil.*;
-import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.mapred.Utils;
-import org.apache.hadoop.util.ReflectionUtils;
-
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-/**
- * Test that using multiple mapper splits works.
- */
-public class TestMultiMaps extends ImportJobTestCase {
-
-  /**
-   * Create the argv to pass to Sqoop.
-   * @return the argv as an array of strings.
-   */
-  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
-      String splitByCol) {
-    String columnsString = "";
-    for (String col : colNames) {
-      columnsString += col + ",";
-    }
-
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-
-    args.add("--table");
-    args.add(HsqldbTestServer.getTableName());
-    args.add("--columns");
-    args.add(columnsString);
-    args.add("--split-by");
-    args.add(splitByCol);
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
-    args.add("--as-sequencefile");
-    args.add("--num-mappers");
-    args.add("2");
-
-    return args.toArray(new String[0]);
-  }
-
-  // this test just uses the two int table.
-  protected String getTableName() {
-    return HsqldbTestServer.getTableName();
-  }
-
-  /** @return a list of Path objects for each data file */
-  protected List<Path> getDataFilePaths() throws IOException {
-    List<Path> paths = new ArrayList<Path>();
-    Configuration conf = new Configuration();
-    if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
-      conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
-    }
-    FileSystem fs = FileSystem.get(conf);
-
-    FileStatus [] stats = fs.listStatus(getTablePath(),
-        new Utils.OutputFileUtils.OutputFilesFilter());
-
-    for (FileStatus stat : stats) {
-      paths.add(stat.getPath());
-    }
-
-    return paths;
-  }
-
-  /**
-   * Given a comma-delimited list of integers, grab and parse the first int.
-   * @param str a comma-delimited list of values, the first of which is an int.
-   * @return the first field in the string, cast to int
-   */
-  private int getFirstInt(String str) {
-    String [] parts = str.split(",");
-    return Integer.parseInt(parts[0]);
-  }
-
-  public void runMultiMapTest(String splitByCol, int expectedSum)
-      throws IOException {
-
-    String [] columns = HsqldbTestServer.getFieldNames();
-    ClassLoader prevClassLoader = null;
-    SequenceFile.Reader reader = null;
-
-    String [] argv = getArgv(true, columns, splitByCol);
-    runImport(argv);
-    try {
-      ImportTool importTool = new ImportTool();
-      SqoopOptions opts = importTool.parseArguments(
-          getArgv(false, columns, splitByCol),
-          null, null, true);
-
-      CompilationManager compileMgr = new CompilationManager(opts);
-      String jarFileName = compileMgr.getJarFilename();
-
-      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
-          getTableName());
-
-      List<Path> paths = getDataFilePaths();
-      Configuration conf = new Configuration();
-      int curSum = 0;
-
-      // We expect multiple files. We need to open all the files and sum up the
-      // first column across all of them.
-      for (Path p : paths) {
-        reader = SeqFileReader.getSeqFileReader(p.toString());
-
-        // here we can actually instantiate (k, v) pairs.
-        Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
-        Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
-
-        // We know that these values are two ints separated by a ','
-        // character.  Since this is all dynamic, though, we don't want to
-        // actually link against the class and use its methods. So we just
-        // parse this back into int fields manually.  Sum them up and ensure
-        // that we get the expected total for the first column, to verify that
-        // we got all the results from the db into the file.
-
-        // now sum up everything in the file.
-        while (reader.next(key) != null) {
-          reader.getCurrentValue(val);
-          curSum += getFirstInt(val.toString());
-        }
-
-        IOUtils.closeStream(reader);
-        reader = null;
-      }
-
-      assertEquals("Total sum of first db column mismatch", expectedSum,
-          curSum);
-    } catch (InvalidOptionsException ioe) {
-      fail(ioe.toString());
-    } catch (ParseException pe) {
-      fail(pe.toString());
-    } finally {
-      IOUtils.closeStream(reader);
-
-      if (null != prevClassLoader) {
-        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
-      }
-    }
-  }
-
-  @Test
-  public void testSplitByFirstCol() throws IOException {
-    runMultiMapTest("INTFIELD1", HsqldbTestServer.getFirstColSum());
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestParquetExport.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestParquetExport.java 
b/src/test/com/cloudera/sqoop/TestParquetExport.java
deleted file mode 100644
index 680fd73..0000000
--- a/src/test/com/cloudera/sqoop/TestParquetExport.java
+++ /dev/null
@@ -1,459 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
-import com.google.common.collect.Lists;
-import org.apache.avro.Schema;
-import org.apache.avro.Schema.Field;
-import org.apache.avro.generic.GenericData;
-import org.apache.avro.generic.GenericRecord;
-import org.junit.Rule;
-
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.kitesdk.data.*;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-
-
-/**
- * Test that we can export Parquet Data Files from HDFS into databases.
- */
-public class TestParquetExport extends ExportJobTestCase {
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-  /**
-   * @return an argv for the CodeGenTool to use when creating tables to export.
-   */
-  protected String [] getCodeGenArgv(String... extraArgs) {
-    List<String> codeGenArgv = new ArrayList<String>();
-
-    if (null != extraArgs) {
-      for (String arg : extraArgs) {
-        codeGenArgv.add(arg);
-      }
-    }
-
-    codeGenArgv.add("--table");
-    codeGenArgv.add(getTableName());
-    codeGenArgv.add("--connect");
-    codeGenArgv.add(getConnectString());
-
-    return codeGenArgv.toArray(new String[0]);
-  }
-
-  /** When generating data for export tests, each column is generated
-      according to a ColumnGenerator. Methods exist for determining
-      what to put into Parquet objects in the files to export, as well
-      as what the object representation of the column as returned by
-      the database should look like.
-    */
-  public interface ColumnGenerator {
-    /** For a row with id rowNum, what should we write into that
-        Parquet record to export?
-      */
-    Object getExportValue(int rowNum);
-
-    /** Return the Parquet schema for the field. */
-    Schema getColumnParquetSchema();
-
-    /** For a row with id rowNum, what should the database return
-        for the given column's value?
-      */
-    Object getVerifyValue(int rowNum);
-
-    /** Return the column type to put in the CREATE TABLE statement. */
-    String getColumnType();
-  }
-
-  private ColumnGenerator colGenerator(final Object exportValue,
-      final Schema schema, final Object verifyValue,
-      final String columnType) {
-    return new ColumnGenerator() {
-      @Override
-      public Object getVerifyValue(int rowNum) {
-        return verifyValue;
-      }
-      @Override
-      public Object getExportValue(int rowNum) {
-        return exportValue;
-      }
-      @Override
-      public String getColumnType() {
-        return columnType;
-      }
-      @Override
-      public Schema getColumnParquetSchema() {
-        return schema;
-      }
-    };
-  }
-
-  /**
-   * Create a data file that gets exported to the db.
-   * @param fileNum the number of the file (for multi-file export)
-   * @param numRecords how many records to write to the file.
-   */
-  protected void createParquetFile(int fileNum, int numRecords,
-      ColumnGenerator... extraCols) throws IOException {
-
-    String uri = "dataset:file:" + getTablePath();
-    Schema schema = buildSchema(extraCols);
-    DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
-      .schema(schema)
-      .format(Formats.PARQUET)
-      .build();
-    Dataset dataset = Datasets.create(uri, descriptor);
-    DatasetWriter writer = dataset.newWriter();
-    try {
-      for (int i = 0; i < numRecords; i++) {
-        GenericRecord record = new GenericData.Record(schema);
-        record.put("id", i);
-        record.put("msg", getMsgPrefix() + i);
-        addExtraColumns(record, i, extraCols);
-        writer.write(record);
-      }
-    } finally {
-      writer.close();
-    }
-  }
-
-  private Schema buildSchema(ColumnGenerator... extraCols) {
-    List<Field> fields = new ArrayList<Field>();
-    fields.add(buildField("id", Schema.Type.INT));
-    fields.add(buildField("msg", Schema.Type.STRING));
-    int colNum = 0;
-    if (null != extraCols) {
-      for (ColumnGenerator gen : extraCols) {
-        if (gen.getColumnParquetSchema() != null) {
-          fields.add(buildParquetField(forIdx(colNum++), 
gen.getColumnParquetSchema()));
-        }
-      }
-    }
-    Schema schema = Schema.createRecord("myschema", null, null, false);
-    schema.setFields(fields);
-    return schema;
-  }
-
-  private void addExtraColumns(GenericRecord record, int rowNum,
-      ColumnGenerator[] extraCols) {
-    int colNum = 0;
-    if (null != extraCols) {
-      for (ColumnGenerator gen : extraCols) {
-        if (gen.getColumnParquetSchema() != null) {
-          record.put(forIdx(colNum++), gen.getExportValue(rowNum));
-        }
-      }
-    }
-  }
-
-  private Field buildField(String name, Schema.Type type) {
-    return new Field(name, Schema.create(type), null, null);
-  }
-
-  private Field buildParquetField(String name, Schema schema) {
-    return new Field(name, schema, null, null);
-  }
-
-  /** Return the column name for a column index.
-   *  Each table contains two columns named 'id' and 'msg', and then an
-   *  arbitrary number of additional columns defined by ColumnGenerators.
-   *  These columns are referenced by idx 0, 1, 2...
-   *  @param idx the index of the ColumnGenerator in the array passed to
-   *   createTable().
-   *  @return the name of the column
-   */
-  protected String forIdx(int idx) {
-    return "col" + idx;
-  }
-
-  /**
-   * Return a SQL statement that drops a table, if it exists.
-   * @param tableName the table to drop.
-   * @return the SQL statement to drop that table.
-   */
-  protected String getDropTableStatement(String tableName) {
-    return "DROP TABLE " + tableName + " IF EXISTS";
-  }
-
-  /** Create the table definition to export to, removing any prior table.
-      By specifying ColumnGenerator arguments, you can add extra columns
-      to the table of arbitrary type.
-   */
-  private void createTable(ColumnGenerator... extraColumns)
-      throws SQLException {
-    Connection conn = getConnection();
-    PreparedStatement statement = conn.prepareStatement(
-        getDropTableStatement(getTableName()),
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-
-    StringBuilder sb = new StringBuilder();
-    sb.append("CREATE TABLE ");
-    sb.append(getTableName());
-    sb.append(" (\"ID\" INT NOT NULL PRIMARY KEY, \"MSG\" VARCHAR(64)");
-    int colNum = 0;
-    for (ColumnGenerator gen : extraColumns) {
-      if (gen.getColumnType() != null) {
-        sb.append(", \"" + forIdx(colNum++) + "\" " + gen.getColumnType());
-      }
-    }
-    sb.append(")");
-
-    statement = conn.prepareStatement(sb.toString(),
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-  }
-
-  /**
-   * Create the table definition to export and also inserting one records for
-   * identifying the updates. Issue [SQOOP-2846]
-   */
-  private void createTableWithInsert() throws SQLException {
-    Connection conn = getConnection();
-    PreparedStatement statement = 
conn.prepareStatement(getDropTableStatement(getTableName()),
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-
-    StringBuilder sb = new StringBuilder();
-    sb.append("CREATE TABLE ");
-    sb.append(getTableName());
-    sb.append(" (id INT NOT NULL PRIMARY KEY, msg VARCHAR(64)");
-    sb.append(")");
-    statement = conn.prepareStatement(sb.toString(), 
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      Statement statement2 = conn.createStatement();
-      String insertCmd = "INSERT INTO " + getTableName() + " (ID,MSG) VALUES(" 
+ 0 + ",'testMsg');";
-      statement2.execute(insertCmd);
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-  }
-
-  /** Verify that on a given row, a column has a given value.
-   * @param id the id column specifying the row to test.
-   */
-  private void assertColValForRowId(int id, String colName, Object expectedVal)
-      throws SQLException {
-    Connection conn = getConnection();
-    LOG.info("Verifying column " + colName + " has value " + expectedVal);
-
-    PreparedStatement statement = conn.prepareStatement(
-        "SELECT \"" + colName + "\" FROM " + getTableName() + " WHERE \"ID\" = 
" + id,
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    Object actualVal = null;
-    try {
-      ResultSet rs = statement.executeQuery();
-      try {
-        rs.next();
-        actualVal = rs.getObject(1);
-      } finally {
-        rs.close();
-      }
-    } finally {
-      statement.close();
-    }
-
-    if (expectedVal != null && expectedVal instanceof byte[]) {
-      assertArrayEquals((byte[]) expectedVal, (byte[]) actualVal);
-    } else {
-      assertEquals("Got unexpected column value", expectedVal, actualVal);
-    }
-  }
-
-  /** Verify that for the max and min values of the 'id' column, the values
-      for a given column meet the expected values.
-   */
-  protected void assertColMinAndMax(String colName, ColumnGenerator generator)
-      throws SQLException {
-    Connection conn = getConnection();
-    int minId = getMinRowId(conn);
-    int maxId = getMaxRowId(conn);
-
-    LOG.info("Checking min/max for column " + colName + " with type "
-        + generator.getColumnType());
-
-    Object expectedMin = generator.getVerifyValue(minId);
-    Object expectedMax = generator.getVerifyValue(maxId);
-
-    assertColValForRowId(minId, colName, expectedMin);
-    assertColValForRowId(maxId, colName, expectedMax);
-  }
-
-  @Test
-  public void testSupportedParquetTypes() throws IOException, SQLException {
-    String[] argv = {};
-    final int TOTAL_RECORDS = 1 * 10;
-
-    byte[] b = new byte[] { (byte) 1, (byte) 2 };
-    Schema fixed = Schema.createFixed("myfixed", null, null, 2);
-    Schema enumeration = Schema.createEnum("myenum", null, null,
-        Lists.newArrayList("a", "b"));
-
-    ColumnGenerator[] gens = new ColumnGenerator[] {
-      colGenerator(true, Schema.create(Schema.Type.BOOLEAN), true, "BIT"),
-      colGenerator(100, Schema.create(Schema.Type.INT), 100, "INTEGER"),
-      colGenerator(200L, Schema.create(Schema.Type.LONG), 200L, "BIGINT"),
-      // HSQLDB maps REAL to double, not float:
-      colGenerator(1.0f, Schema.create(Schema.Type.FLOAT), 1.0d, "REAL"),
-      colGenerator(2.0d, Schema.create(Schema.Type.DOUBLE), 2.0d, "DOUBLE"),
-      colGenerator("s", Schema.create(Schema.Type.STRING), "s", "VARCHAR(8)"),
-      colGenerator(ByteBuffer.wrap(b), Schema.create(Schema.Type.BYTES),
-          b, "VARBINARY(8)"),
-      colGenerator(new GenericData.Fixed(fixed, b), fixed,
-          b, "BINARY(2)"),
-      colGenerator(new GenericData.EnumSymbol(enumeration, "a"), enumeration,
-          "a", "VARCHAR(8)"),
-    };
-    createParquetFile(0, TOTAL_RECORDS, gens);
-    createTable(gens);
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-    verifyExport(TOTAL_RECORDS);
-    for (int i = 0; i < gens.length; i++) {
-      assertColMinAndMax(forIdx(i), gens[i]);
-    }
-  }
-
-  @Test
-  public void testNullableField() throws IOException, SQLException {
-    String[] argv = {};
-    final int TOTAL_RECORDS = 1 * 10;
-
-    List<Schema> childSchemas = new ArrayList<Schema>();
-    childSchemas.add(Schema.create(Schema.Type.NULL));
-    childSchemas.add(Schema.create(Schema.Type.STRING));
-    Schema schema =  Schema.createUnion(childSchemas);
-    ColumnGenerator gen0 = colGenerator(null, schema, null, "VARCHAR(64)");
-    ColumnGenerator gen1 = colGenerator("s", schema, "s", "VARCHAR(64)");
-    createParquetFile(0, TOTAL_RECORDS, gen0, gen1);
-    createTable(gen0, gen1);
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-    verifyExport(TOTAL_RECORDS);
-    assertColMinAndMax(forIdx(0), gen0);
-    assertColMinAndMax(forIdx(1), gen1);
-  }
-
-  @Test
-  public void testParquetRecordsNotSupported() throws IOException, 
SQLException {
-    String[] argv = {};
-    final int TOTAL_RECORDS = 1;
-
-    Schema schema =  Schema.createRecord("nestedrecord", null, null, false);
-    schema.setFields(Lists.newArrayList(buildField("myint",
-        Schema.Type.INT)));
-    GenericRecord record = new GenericData.Record(schema);
-    record.put("myint", 100);
-    // DB type is not used so can be anything:
-    ColumnGenerator gen = colGenerator(record, schema, null, "VARCHAR(64)");
-    createParquetFile(0, TOTAL_RECORDS,  gen);
-    createTable(gen);
-
-    thrown.expect(Exception.class);
-    thrown.reportMissingExceptionWithMessage("Expected Exception as Parquet 
records are not supported");
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-  }
-
-  @Test
-  public void testMissingDatabaseFields() throws IOException, SQLException {
-    String[] argv = {};
-    final int TOTAL_RECORDS = 1;
-
-    // null column type means don't create a database column
-    // the Parquet value will not be exported
-    ColumnGenerator gen = colGenerator(100, Schema.create(Schema.Type.INT),
-        null, null);
-    createParquetFile(0, TOTAL_RECORDS, gen);
-    createTable(gen);
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-    verifyExport(TOTAL_RECORDS);
-  }
-
-  @Test
-  public void testParquetWithUpdateKey() throws IOException, SQLException {
-    String[] argv = { "--update-key", "ID" };
-    final int TOTAL_RECORDS = 1;
-    createParquetFile(0, TOTAL_RECORDS, null);
-    createTableWithInsert();
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-    verifyExport(getMsgPrefix() + "0");
-  }
-
-  // Test Case for Issue [SQOOP-2846]
-  @Test
-  public void testParquetWithUpsert() throws IOException, SQLException {
-    String[] argv = { "--update-key", "ID", "--update-mode", "allowinsert" };
-    final int TOTAL_RECORDS = 2;
-    // ColumnGenerator gen = colGenerator("100",
-    // Schema.create(Schema.Type.STRING), null, "VARCHAR(64)");
-    createParquetFile(0, TOTAL_RECORDS, null);
-    createTableWithInsert();
-
-    thrown.expect(Exception.class);
-    thrown.reportMissingExceptionWithMessage("Expected Exception during 
Parquet export with --update-mode");
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-  }
-
-  @Test
-  public void testMissingParquetFields()  throws IOException, SQLException {
-    String[] argv = {};
-    final int TOTAL_RECORDS = 1;
-
-    // null Parquet schema means don't create an Parquet field
-    ColumnGenerator gen = colGenerator(null, null, null, "VARCHAR(64)");
-    createParquetFile(0, TOTAL_RECORDS, gen);
-    createTable(gen);
-
-    thrown.expect(Exception.class);
-    thrown.reportMissingExceptionWithMessage("Expected Exception on missing 
Parquet fields");
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestParquetImport.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestParquetImport.java 
b/src/test/com/cloudera/sqoop/TestParquetImport.java
deleted file mode 100644
index 4ff025b..0000000
--- a/src/test/com/cloudera/sqoop/TestParquetImport.java
+++ /dev/null
@@ -1,357 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-
-import org.apache.avro.Schema;
-import org.apache.avro.Schema.Field;
-import org.apache.avro.Schema.Type;
-import org.apache.avro.file.DataFileReader;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.Path;
-import org.junit.Test;
-import org.kitesdk.data.CompressionType;
-import org.kitesdk.data.Dataset;
-import org.kitesdk.data.DatasetReader;
-import org.kitesdk.data.Datasets;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Tests --as-parquetfile.
- */
-public class TestParquetImport extends ImportJobTestCase {
-
-  public static final Log LOG = LogFactory
-      .getLog(TestParquetImport.class.getName());
-
-  /**
-   * Create the argv to pass to Sqoop.
-   *
-   * @return the argv as an array of strings.
-   */
-  protected String[] getOutputArgv(boolean includeHadoopFlags,
-          String[] extraArgs) {
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-
-    args.add("--table");
-    args.add(getTableName());
-    args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--m");
-    args.add("1");
-    args.add("--split-by");
-    args.add("INTFIELD1");
-    args.add("--as-parquetfile");
-    if (extraArgs != null) {
-      args.addAll(Arrays.asList(extraArgs));
-    }
-
-    return args.toArray(new String[args.size()]);
-  }
-
-  protected String[] getOutputQueryArgv(boolean includeHadoopFlags, String[] 
extraArgs) {
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-
-    args.add("--query");
-    args.add("SELECT * FROM " + getTableName() + " WHERE $CONDITIONS");
-    args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
-    args.add("--target-dir");
-    args.add(getWarehouseDir() + "/" + getTableName());
-    args.add("--m");
-    args.add("1");
-    args.add("--split-by");
-    args.add("INTFIELD1");
-    args.add("--as-parquetfile");
-    if (extraArgs != null) {
-      args.addAll(Arrays.asList(extraArgs));
-    }
-
-    return args.toArray(new String[args.size()]);
-  }
-
-  @Test
-  public void testSnappyCompression() throws IOException {
-    runParquetImportTest("snappy");
-  }
-
-  @Test
-  public void testDeflateCompression() throws IOException {
-    runParquetImportTest("deflate");
-  }
-
-  private void runParquetImportTest(String codec) throws IOException {
-    String[] types = {"BIT", "INTEGER", "BIGINT", "REAL", "DOUBLE", 
"VARCHAR(6)",
-        "VARBINARY(2)",};
-    String[] vals = {"true", "100", "200", "1.0", "2.0", "'s'", "'0102'", };
-    createTableWithColTypes(types, vals);
-
-    String [] extraArgs = { "--compression-codec", codec};
-    runImport(getOutputArgv(true, extraArgs));
-
-    assertEquals(CompressionType.forName(codec), getCompressionType());
-
-    Schema schema = getSchema();
-    assertEquals(Type.RECORD, schema.getType());
-    List<Field> fields = schema.getFields();
-    assertEquals(types.length, fields.size());
-    checkField(fields.get(0), "DATA_COL0", Type.BOOLEAN);
-    checkField(fields.get(1), "DATA_COL1", Type.INT);
-    checkField(fields.get(2), "DATA_COL2", Type.LONG);
-    checkField(fields.get(3), "DATA_COL3", Type.FLOAT);
-    checkField(fields.get(4), "DATA_COL4", Type.DOUBLE);
-    checkField(fields.get(5), "DATA_COL5", Type.STRING);
-    checkField(fields.get(6), "DATA_COL6", Type.BYTES);
-
-    DatasetReader<GenericRecord> reader = getReader();
-    try {
-      GenericRecord record1 = reader.next();
-      assertNotNull(record1);
-      assertEquals("DATA_COL0", true, record1.get("DATA_COL0"));
-      assertEquals("DATA_COL1", 100, record1.get("DATA_COL1"));
-      assertEquals("DATA_COL2", 200L, record1.get("DATA_COL2"));
-      assertEquals("DATA_COL3", 1.0f, record1.get("DATA_COL3"));
-      assertEquals("DATA_COL4", 2.0, record1.get("DATA_COL4"));
-      assertEquals("DATA_COL5", "s", record1.get("DATA_COL5"));
-      Object object = record1.get("DATA_COL6");
-      assertTrue(object instanceof ByteBuffer);
-      ByteBuffer b = ((ByteBuffer) object);
-      assertEquals((byte) 1, b.get(0));
-      assertEquals((byte) 2, b.get(1));
-      assertFalse(reader.hasNext());
-    } finally {
-      reader.close();
-    }
-  }
-
-  @Test
-  public void testOverrideTypeMapping() throws IOException {
-    String [] types = { "INT" };
-    String [] vals = { "10" };
-    createTableWithColTypes(types, vals);
-
-    String [] extraArgs = { "--map-column-java", "DATA_COL0=String"};
-    runImport(getOutputArgv(true, extraArgs));
-
-    Schema schema = getSchema();
-    assertEquals(Type.RECORD, schema.getType());
-    List<Field> fields = schema.getFields();
-    assertEquals(types.length, fields.size());
-    checkField(fields.get(0), "DATA_COL0", Type.STRING);
-
-    DatasetReader<GenericRecord> reader = getReader();
-    try {
-      assertTrue(reader.hasNext());
-      GenericRecord record1 = reader.next();
-      assertEquals("DATA_COL0", "10", record1.get("DATA_COL0"));
-      assertFalse(reader.hasNext());
-    } finally {
-      reader.close();
-    }
-  }
-
-  @Test
-  public void testFirstUnderscoreInColumnName() throws IOException {
-    String [] names = { "_NAME" };
-    String [] types = { "INT" };
-    String [] vals = { "1987" };
-    createTableWithColTypesAndNames(names, types, vals);
-
-    runImport(getOutputArgv(true, null));
-
-    Schema schema = getSchema();
-    assertEquals(Type.RECORD, schema.getType());
-    List<Field> fields = schema.getFields();
-    assertEquals(types.length, fields.size());
-    checkField(fields.get(0), "__NAME", Type.INT);
-
-    DatasetReader<GenericRecord> reader = getReader();
-    try {
-      assertTrue(reader.hasNext());
-      GenericRecord record1 = reader.next();
-      assertEquals("__NAME", 1987, record1.get("__NAME"));
-      assertFalse(reader.hasNext());
-    } finally {
-      reader.close();
-    }
-  }
-
-  @Test
-  public void testNonIdentCharactersInColumnName() throws IOException {
-    String [] names = { "test_p-a+r/quet" };
-    String [] types = { "INT" };
-    String [] vals = { "2015" };
-    createTableWithColTypesAndNames(names, types, vals);
-
-    runImport(getOutputArgv(true, null));
-
-    Schema schema = getSchema();
-    assertEquals(Type.RECORD, schema.getType());
-    List<Field> fields = schema.getFields();
-    assertEquals(types.length, fields.size());
-    checkField(fields.get(0), "TEST_P_A_R_QUET", Type.INT);
-
-    DatasetReader<GenericRecord> reader = getReader();
-    try {
-      assertTrue(reader.hasNext());
-      GenericRecord record1 = reader.next();
-      assertEquals("TEST_P_A_R_QUET", 2015, record1.get("TEST_P_A_R_QUET"));
-      assertFalse(reader.hasNext());
-    } finally {
-      reader.close();
-    }
-  }
-
-  @Test
-  public void testNullableParquetImport() throws IOException, SQLException {
-    String [] types = { "INT" };
-    String [] vals = { null };
-    createTableWithColTypes(types, vals);
-
-    runImport(getOutputArgv(true, null));
-
-    DatasetReader<GenericRecord> reader = getReader();
-    try {
-      assertTrue(reader.hasNext());
-      GenericRecord record1 = reader.next();
-      assertNull(record1.get("DATA_COL0"));
-      assertFalse(reader.hasNext());
-    } finally {
-      reader.close();
-    }
-  }
-
-  @Test
-  public void testQueryImport() throws IOException, SQLException {
-    String [] types = { "INT" };
-    String [] vals = { "1" };
-    createTableWithColTypes(types, vals);
-
-    runImport(getOutputQueryArgv(true, null));
-
-    DatasetReader<GenericRecord> reader = getReader();
-    try {
-      assertTrue(reader.hasNext());
-      GenericRecord record1 = reader.next();
-      assertEquals(1, record1.get("DATA_COL0"));
-      assertFalse(reader.hasNext());
-    } finally {
-      reader.close();
-    }
-  }
-
-  @Test
-  public void testIncrementalParquetImport() throws IOException, SQLException {
-    String [] types = { "INT" };
-    String [] vals = { "1" };
-    createTableWithColTypes(types, vals);
-
-    runImport(getOutputArgv(true, null));
-    runImport(getOutputArgv(true, new String[]{"--append"}));
-
-    DatasetReader<GenericRecord> reader = getReader();
-    try {
-      assertTrue(reader.hasNext());
-      GenericRecord record1 = reader.next();
-      assertEquals(1, record1.get("DATA_COL0"));
-      record1 = reader.next();
-      assertEquals(1, record1.get("DATA_COL0"));
-      assertFalse(reader.hasNext());
-    } finally {
-      reader.close();
-    }
-  }
-
-  @Test
-  public void testOverwriteParquetDatasetFail() throws IOException, 
SQLException {
-    String [] types = { "INT" };
-    String [] vals = {};
-    createTableWithColTypes(types, vals);
-
-    runImport(getOutputArgv(true, null));
-    try {
-      runImport(getOutputArgv(true, null));
-      fail("");
-    } catch (IOException ex) {
-      // ok
-    }
-  }
-
-  private CompressionType getCompressionType() {
-    return getDataset().getDescriptor().getCompressionType();
-  }
-
-  private Schema getSchema() {
-    return getDataset().getDescriptor().getSchema();
-  }
-
-  private DatasetReader<GenericRecord> getReader() {
-    return getDataset().newReader();
-  }
-
-  private Dataset<GenericRecord> getDataset() {
-    String uri = "dataset:file:" + getTablePath();
-    return Datasets.load(uri, GenericRecord.class);
-  }
-
-  @Override
-  public void tearDown() {
-    super.tearDown();
-    String uri = "dataset:file:" + getTablePath();
-    if (Datasets.exists(uri)) {
-      Datasets.delete(uri);
-    }
-  }
-
-  private void checkField(Field field, String name, Type type) {
-    assertEquals(name, field.name());
-    assertEquals(Type.UNION, field.schema().getType());
-    assertEquals(Type.NULL, field.schema().getTypes().get(0).getType());
-    assertEquals(type, field.schema().getTypes().get(1).getType());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestQuery.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestQuery.java 
b/src/test/com/cloudera/sqoop/TestQuery.java
deleted file mode 100644
index 0836b8d..0000000
--- a/src/test/com/cloudera/sqoop/TestQuery.java
+++ /dev/null
@@ -1,193 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.util.ReflectionUtils;
-
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-/**
- * Test that --query works in Sqoop.
- */
-public class TestQuery extends ImportJobTestCase {
-
-  /**
-   * Create the argv to pass to Sqoop.
-   * @return the argv as an array of strings.
-   */
-  protected String [] getArgv(boolean includeHadoopFlags, String query,
-      String targetDir, boolean allowParallel) {
-
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-
-    args.add("--query");
-    args.add(query);
-    args.add("--split-by");
-    args.add("INTFIELD1");
-    args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
-    args.add("--as-sequencefile");
-    args.add("--target-dir");
-    args.add(targetDir);
-    args.add("--class-name");
-    args.add(getTableName());
-    if (allowParallel) {
-      args.add("--num-mappers");
-      args.add("2");
-    } else {
-      args.add("--num-mappers");
-      args.add("1");
-    }
-
-    return args.toArray(new String[0]);
-  }
-
-  // this test just uses the two int table.
-  protected String getTableName() {
-    return HsqldbTestServer.getTableName();
-  }
-
-
-  /**
-   * Given a comma-delimited list of integers, grab and parse the first int.
-   * @param str a comma-delimited list of values, the first of which is an int.
-   * @return the first field in the string, cast to int
-   */
-  private int getFirstInt(String str) {
-    String [] parts = str.split(",");
-    return Integer.parseInt(parts[0]);
-  }
-
-  public void runQueryTest(String query, String firstValStr,
-      int numExpectedResults, int expectedSum, String targetDir)
-      throws IOException {
-
-    ClassLoader prevClassLoader = null;
-    SequenceFile.Reader reader = null;
-
-    String [] argv = getArgv(true, query, targetDir, false);
-    runImport(argv);
-    try {
-      SqoopOptions opts = new ImportTool().parseArguments(
-          getArgv(false, query, targetDir, false),
-          null, null, true);
-
-      CompilationManager compileMgr = new CompilationManager(opts);
-      String jarFileName = compileMgr.getJarFilename();
-
-      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
-          getTableName());
-
-      reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
-
-      // here we can actually instantiate (k, v) pairs.
-      Configuration conf = new Configuration();
-      Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
-      Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
-
-      if (reader.next(key) == null) {
-        fail("Empty SequenceFile during import");
-      }
-
-      // make sure that the value we think should be at the top, is.
-      reader.getCurrentValue(val);
-      assertEquals("Invalid ordering within sorted SeqFile", firstValStr,
-          val.toString());
-
-      // We know that these values are two ints separated by a ',' character.
-      // Since this is all dynamic, though, we don't want to actually link
-      // against the class and use its methods. So we just parse this back
-      // into int fields manually.  Sum them up and ensure that we get the
-      // expected total for the first column, to verify that we got all the
-      // results from the db into the file.
-      int curSum = getFirstInt(val.toString());
-      int totalResults = 1;
-
-      // now sum up everything else in the file.
-      while (reader.next(key) != null) {
-        reader.getCurrentValue(val);
-        curSum += getFirstInt(val.toString());
-        totalResults++;
-      }
-
-      assertEquals("Total sum of first db column mismatch", expectedSum,
-          curSum);
-      assertEquals("Incorrect number of results for query", numExpectedResults,
-          totalResults);
-    } catch (InvalidOptionsException ioe) {
-      fail(ioe.toString());
-    } catch (ParseException pe) {
-      fail(pe.toString());
-    } finally {
-      IOUtils.closeStream(reader);
-
-      if (null != prevClassLoader) {
-        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
-      }
-    }
-  }
-
-  @Test
-  public void testSelectStar() throws IOException {
-    runQueryTest("SELECT * FROM " + getTableName()
-        + " WHERE INTFIELD2 > 4 AND $CONDITIONS",
-        "1,8\n", 2, 4, getTablePath().toString());
-  }
-
-  @Test
-  public void testCompoundWhere() throws IOException {
-    runQueryTest("SELECT * FROM " + getTableName()
-        + " WHERE INTFIELD1 > 4 AND INTFIELD2 < 3 AND $CONDITIONS",
-        "7,2\n", 1, 7, getTablePath().toString());
-  }
-
-  @Test
-  public void testFailNoConditions() throws IOException {
-    String [] argv = getArgv(true, "SELECT * FROM " + getTableName(),
-        getTablePath().toString(), true);
-    try {
-      runImport(argv);
-      fail("Expected exception running import without $CONDITIONS");
-    } catch (Exception e) {
-      LOG.info("Got exception " + e + " running job (expected; ok)");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestSplitBy.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestSplitBy.java 
b/src/test/com/cloudera/sqoop/TestSplitBy.java
deleted file mode 100644
index c13fbcc..0000000
--- a/src/test/com/cloudera/sqoop/TestSplitBy.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.util.ReflectionUtils;
-
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-/**
- * Test that --split-by works.
- */
-public class TestSplitBy extends ImportJobTestCase {
-
-  /**
-   * Create the argv to pass to Sqoop.
-   * @return the argv as an array of strings.
-   */
-  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
-      String splitByCol) {
-    String columnsString = "";
-    for (String col : colNames) {
-      columnsString += col + ",";
-    }
-
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-
-    args.add("--table");
-    args.add(HsqldbTestServer.getTableName());
-    args.add("--columns");
-    args.add(columnsString);
-    args.add("--split-by");
-    args.add(splitByCol);
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
-    args.add("--as-sequencefile");
-    args.add("--num-mappers");
-    args.add("1");
-
-    return args.toArray(new String[0]);
-  }
-
-  // this test just uses the two int table.
-  protected String getTableName() {
-    return HsqldbTestServer.getTableName();
-  }
-
-
-  /**
-   * Given a comma-delimited list of integers, grab and parse the first int.
-   * @param str a comma-delimited list of values, the first of which is an int.
-   * @return the first field in the string, cast to int
-   */
-  private int getFirstInt(String str) {
-    String [] parts = str.split(",");
-    return Integer.parseInt(parts[0]);
-  }
-
-  public void runSplitByTest(String splitByCol, int expectedSum)
-      throws IOException {
-
-    String [] columns = HsqldbTestServer.getFieldNames();
-    ClassLoader prevClassLoader = null;
-    SequenceFile.Reader reader = null;
-
-    String [] argv = getArgv(true, columns, splitByCol);
-    runImport(argv);
-    try {
-      SqoopOptions opts = new ImportTool().parseArguments(
-          getArgv(false, columns, splitByCol),
-          null, null, true);
-
-      CompilationManager compileMgr = new CompilationManager(opts);
-      String jarFileName = compileMgr.getJarFilename();
-      LOG.debug("Got jar from import job: " + jarFileName);
-
-      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
-          getTableName());
-
-      reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
-
-      // here we can actually instantiate (k, v) pairs.
-      Configuration conf = new Configuration();
-      Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
-      Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
-
-      // We know that these values are two ints separated by a ',' character.
-      // Since this is all dynamic, though, we don't want to actually link
-      // against the class and use its methods. So we just parse this back
-      // into int fields manually.  Sum them up and ensure that we get the
-      // expected total for the first column, to verify that we got all the
-      // results from the db into the file.
-
-      // Sum up everything in the file.
-      int curSum = 0;
-      while (reader.next(key) != null) {
-        reader.getCurrentValue(val);
-        curSum += getFirstInt(val.toString());
-      }
-
-      assertEquals("Total sum of first db column mismatch", expectedSum,
-          curSum);
-    } catch (InvalidOptionsException ioe) {
-      fail(ioe.toString());
-    } catch (ParseException pe) {
-      fail(pe.toString());
-    } finally {
-      IOUtils.closeStream(reader);
-
-      if (null != prevClassLoader) {
-        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
-      }
-    }
-  }
-
-  @Test
-  public void testSplitByFirstCol() throws IOException {
-    String splitByCol = "INTFIELD1";
-    runSplitByTest(splitByCol, HsqldbTestServer.getFirstColSum());
-  }
-
-  @Test
-  public void testSplitBySecondCol() throws IOException {
-    String splitByCol = "INTFIELD2";
-    runSplitByTest(splitByCol, HsqldbTestServer.getFirstColSum());
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestSqoopOptions.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestSqoopOptions.java 
b/src/test/com/cloudera/sqoop/TestSqoopOptions.java
deleted file mode 100644
index dbdd2f1..0000000
--- a/src/test/com/cloudera/sqoop/TestSqoopOptions.java
+++ /dev/null
@@ -1,797 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.util.Properties;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.sqoop.manager.oracle.OracleUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-import com.cloudera.sqoop.lib.DelimiterSet;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.tool.BaseSqoopTool;
-import com.cloudera.sqoop.tool.ImportTool;
-
-import static org.apache.sqoop.Sqoop.SQOOP_RETHROW_PROPERTY;
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Test aspects of the SqoopOptions class.
- */
-public class TestSqoopOptions {
-
-  private Properties originalSystemProperties;
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-  @Before
-  public void setup() {
-   originalSystemProperties = System.getProperties();
-  }
-
-  @After
-  public void tearDown() {
-    System.setProperties(originalSystemProperties);
-  }
-
-  // tests for the toChar() parser
-  @Test
-  public void testNormalChar() throws Exception {
-    assertEquals('a', SqoopOptions.toChar("a"));
-  }
-
-  @Test
-  public void testEmptyString() throws Exception {
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on empty string");
-    SqoopOptions.toChar("");
-  }
-
-  @Test
-  public void testNullString() throws Exception {
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on null string");
-    SqoopOptions.toChar(null);
-  }
-
-  @Test
-  public void testTooLong() throws Exception {
-    // Should just use the first character and log a warning.
-    assertEquals('x', SqoopOptions.toChar("xyz"));
-  }
-
-  @Test
-  public void testHexChar1() throws Exception {
-    assertEquals(0xF, SqoopOptions.toChar("\\0xf"));
-  }
-
-  @Test
-  public void testHexChar2() throws Exception {
-    assertEquals(0xF, SqoopOptions.toChar("\\0xF"));
-  }
-
-  @Test
-  public void testHexChar3() throws Exception {
-    assertEquals(0xF0, SqoopOptions.toChar("\\0xf0"));
-  }
-
-  @Test
-  public void testHexChar4() throws Exception {
-    assertEquals(0xF0, SqoopOptions.toChar("\\0Xf0"));
-  }
-
-  @Test
-  public void testEscapeChar1() throws Exception {
-    assertEquals('\n', SqoopOptions.toChar("\\n"));
-  }
-
-  @Test
-  public void testEscapeChar2() throws Exception {
-    assertEquals('\\', SqoopOptions.toChar("\\\\"));
-  }
-
-  @Test
-  public void testEscapeChar3() throws Exception {
-    assertEquals('\\', SqoopOptions.toChar("\\"));
-  }
-
-  @Test
-  public void testWhitespaceToChar() throws Exception {
-    assertEquals(' ', SqoopOptions.toChar(" "));
-    assertEquals(' ', SqoopOptions.toChar("   "));
-    assertEquals('\t', SqoopOptions.toChar("\t"));
-  }
-
-  @Test
-  public void testUnknownEscape1() throws Exception {
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on unknown escaping");
-    SqoopOptions.toChar("\\Q");
-  }
-
-  @Test
-  public void testUnknownEscape2() throws Exception {
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on unknown escaping");
-    SqoopOptions.toChar("\\nn");
-  }
-
-  @Test
-  public void testEscapeNul1() throws Exception {
-    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0"));
-  }
-
-  @Test
-  public void testEscapeNul2() throws Exception {
-    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\00"));
-  }
-
-  @Test
-  public void testEscapeNul3() throws Exception {
-    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0000"));
-  }
-
-  @Test
-  public void testEscapeNul4() throws Exception {
-    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0x0"));
-  }
-
-  @Test
-  public void testOctalChar1() throws Exception {
-    assertEquals(04, SqoopOptions.toChar("\\04"));
-  }
-
-  @Test
-  public void testOctalChar2() throws Exception {
-    assertEquals(045, SqoopOptions.toChar("\\045"));
-  }
-
-  @Test
-  public void testErrOctalChar() throws Exception {
-    thrown.expect(NumberFormatException.class);
-    thrown.reportMissingExceptionWithMessage("Expected NumberFormatException 
on erroneous octal char");
-    SqoopOptions.toChar("\\095");
-  }
-
-  @Test
-  public void testErrHexChar() throws Exception {
-    thrown.expect(NumberFormatException.class);
-    thrown.reportMissingExceptionWithMessage("Expected NumberFormatException 
on erroneous hex char");
-    SqoopOptions.toChar("\\0x9K5");
-  }
-
-  private SqoopOptions parse(String [] argv) throws Exception {
-    ImportTool importTool = new ImportTool();
-    return importTool.parseArguments(argv, null, null, false);
-  }
-
-  // test that setting output delimiters also sets input delimiters
-  @Test
-  public void testDelimitersInherit() throws Exception {
-    String [] args = {
-      "--fields-terminated-by",
-      "|",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals('|', opts.getInputFieldDelim());
-    assertEquals('|', opts.getOutputFieldDelim());
-  }
-
-  // Test that setting output delimiters and setting input delims
-  // separately works.
-  @Test
-  public void testDelimOverride1() throws Exception {
-    String [] args = {
-      "--fields-terminated-by",
-      "|",
-      "--input-fields-terminated-by",
-      "*",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals('*', opts.getInputFieldDelim());
-    assertEquals('|', opts.getOutputFieldDelim());
-  }
-
-  // test that the order in which delims are specified doesn't matter
-  @Test
-  public void testDelimOverride2() throws Exception {
-    String [] args = {
-      "--input-fields-terminated-by",
-      "*",
-      "--fields-terminated-by",
-      "|",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals('*', opts.getInputFieldDelim());
-    assertEquals('|', opts.getOutputFieldDelim());
-  }
-
-  @Test
-  public void testBadNumMappers1() throws Exception {
-    String [] args = {
-      "--num-mappers",
-      "x",
-    };
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on invalid --num-mappers argument");
-    parse(args);
-  }
-
-  @Test
-  public void testBadNumMappers2() throws Exception {
-    String [] args = {
-      "-m",
-      "x",
-    };
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on invalid -m argument");
-    parse(args);
-  }
-
-  @Test
-  public void testGoodNumMappers() throws Exception {
-    String [] args = {
-      "-m",
-      "4",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals(4, opts.getNumMappers());
-  }
-
-  @Test
-  public void testHivePartitionParams() throws Exception {
-    String[] args = {
-        "--hive-partition-key", "ds",
-        "--hive-partition-value", "20110413",
-    };
-    SqoopOptions opts = parse(args);
-    assertEquals("ds", opts.getHivePartitionKey());
-    assertEquals("20110413", opts.getHivePartitionValue());
-  }
-
-  @Test
-  public void testBoundaryQueryParams() throws Exception {
-    String[] args = {
-      "--boundary-query", "select 1, 2",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals("select 1, 2", opts.getBoundaryQuery());
-  }
-
-  @Test
-  public void testMapColumnHiveParams() throws Exception {
-    String[] args = {
-      "--map-column-hive", "id=STRING",
-    };
-
-    SqoopOptions opts = parse(args);
-    Properties mapping = opts.getMapColumnHive();
-    assertTrue(mapping.containsKey("id"));
-    assertEquals("STRING", mapping.get("id"));
-  }
-
-  @Test
-  public void testMalformedMapColumnHiveParams() throws Exception {
-    String[] args = {
-      "--map-column-hive", "id",
-    };
-    try {
-      SqoopOptions opts = parse(args);
-      fail("Malformed hive mapping does not throw exception");
-    } catch (Exception e) {
-      // Caught exception as expected
-    }
-  }
-
-  @Test
-  public void testMapColumnJavaParams() throws Exception {
-    String[] args = {
-      "--map-column-java", "id=String",
-    };
-
-    SqoopOptions opts = parse(args);
-    Properties mapping = opts.getMapColumnJava();
-    assertTrue(mapping.containsKey("id"));
-    assertEquals("String", mapping.get("id"));
-  }
-
-  @Test
-  public void testMalfromedMapColumnJavaParams() throws Exception {
-    String[] args = {
-      "--map-column-java", "id",
-    };
-    try {
-      SqoopOptions opts = parse(args);
-      fail("Malformed java mapping does not throw exception");
-    } catch (Exception e) {
-      // Caught exception as expected
-    }
-  }
-
-  @Test
-  public void testSkipDistCacheOption() throws Exception {
-    String[] args = {"--skip-dist-cache"};
-    SqoopOptions opts = parse(args);
-    assertTrue(opts.isSkipDistCache());
-  }
-
-  @Test
-  public void testPropertySerialization1() {
-    // Test that if we write a SqoopOptions out to a Properties,
-    // and then read it back in, we get all the same results.
-    SqoopOptions out = new SqoopOptions();
-    out.setUsername("user");
-    out.setConnectString("bla");
-    out.setNumMappers(4);
-    out.setAppendMode(true);
-    out.setHBaseTable("hbasetable");
-    out.setWarehouseDir("Warehouse");
-    out.setClassName("someclass");
-    out.setSplitByCol("somecol");
-    out.setSqlQuery("the query");
-    out.setPackageName("a.package");
-    out.setHiveImport(true);
-    out.setFetchSize(null);
-
-    Properties connParams = new Properties();
-    connParams.put("conn.timeout", "3000");
-    connParams.put("conn.buffer_size", "256");
-    connParams.put("conn.dummy", "dummy");
-    connParams.put("conn.foo", "bar");
-
-    out.setConnectionParams(connParams);
-
-    Properties outProps = out.writeProperties();
-
-    SqoopOptions in = new SqoopOptions();
-    in.loadProperties(outProps);
-
-    Properties inProps = in.writeProperties();
-
-    assertEquals("properties don't match", outProps, inProps);
-
-    assertEquals("connection params don't match",
-            connParams, out.getConnectionParams());
-    assertEquals("connection params don't match",
-            connParams, in.getConnectionParams());
-  }
-
-  @Test
-  public void testPropertySerialization2() {
-    // Test that if we write a SqoopOptions out to a Properties,
-    // and then read it back in, we get all the same results.
-    SqoopOptions out = new SqoopOptions();
-    out.setUsername("user");
-    out.setConnectString("bla");
-    out.setNumMappers(4);
-    out.setAppendMode(true);
-    out.setHBaseTable("hbasetable");
-    out.setWarehouseDir("Warehouse");
-    out.setClassName("someclass");
-    out.setSplitByCol("somecol");
-    out.setSqlQuery("the query");
-    out.setPackageName("a.package");
-    out.setHiveImport(true);
-    out.setFetchSize(42);
-
-    Properties connParams = new Properties();
-    connParams.setProperty("a", "value-a");
-    connParams.setProperty("b", "value-b");
-    connParams.setProperty("a.b", "value-a.b");
-    connParams.setProperty("a.b.c", "value-a.b.c");
-    connParams.setProperty("aaaaaaaaaa.bbbbbbb.cccccccc", "value-abc");
-
-    out.setConnectionParams(connParams);
-
-    Properties outProps = out.writeProperties();
-
-    SqoopOptions in = new SqoopOptions();
-    in.loadProperties(outProps);
-
-    Properties inProps = in.writeProperties();
-
-    assertEquals("properties don't match", outProps, inProps);
-    assertEquals("connection params don't match",
-            connParams, out.getConnectionParams());
-    assertEquals("connection params don't match",
-            connParams, in.getConnectionParams());
-  }
-
-  @Test
-  public void testDefaultTempRootDir() {
-    SqoopOptions opts = new SqoopOptions();
-
-    assertEquals("_sqoop", opts.getTempRootDir());
-  }
-
-  @Test
-  public void testDefaultLoadedTempRootDir() {
-    SqoopOptions out = new SqoopOptions();
-    Properties props = out.writeProperties();
-    SqoopOptions opts = new SqoopOptions();
-    opts.loadProperties(props);
-
-    assertEquals("_sqoop", opts.getTempRootDir());
-  }
-
-  @Test
-  public void testLoadedTempRootDir() {
-    SqoopOptions out = new SqoopOptions();
-    final String tempRootDir = "customRoot";
-    out.setTempRootDir(tempRootDir);
-    Properties props = out.writeProperties();
-    SqoopOptions opts = new SqoopOptions();
-    opts.loadProperties(props);
-
-    assertEquals(tempRootDir, opts.getTempRootDir());
-  }
-
-  @Test
-  public void testNulledTempRootDir() {
-    SqoopOptions out = new SqoopOptions();
-    out.setTempRootDir(null);
-    Properties props = out.writeProperties();
-    SqoopOptions opts = new SqoopOptions();
-    opts.loadProperties(props);
-
-    assertEquals("_sqoop", opts.getTempRootDir());
-  }
-
-  @Test
-  public void testDefaultThrowOnErrorWithNotSetSystemProperty() {
-    System.clearProperty(SQOOP_RETHROW_PROPERTY);
-    SqoopOptions opts = new SqoopOptions();
-    assertFalse(opts.isThrowOnError());
-  }
-
-  @Test
-  public void testDefaultThrowOnErrorWithSetSystemProperty() {
-    String testSqoopRethrowProperty = "";
-    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
-    SqoopOptions opts = new SqoopOptions();
-
-    assertTrue(opts.isThrowOnError());
-  }
-
-  @Test
-  public void testDefaultLoadedThrowOnErrorWithNotSetSystemProperty() {
-    System.clearProperty(SQOOP_RETHROW_PROPERTY);
-    SqoopOptions out = new SqoopOptions();
-    Properties props = out.writeProperties();
-    SqoopOptions opts = new SqoopOptions();
-    opts.loadProperties(props);
-
-    assertFalse(opts.isThrowOnError());
-  }
-
-  @Test
-  public void testDefaultLoadedThrowOnErrorWithSetSystemProperty() {
-    String testSqoopRethrowProperty = "";
-    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
-    SqoopOptions out = new SqoopOptions();
-    Properties props = out.writeProperties();
-    SqoopOptions opts = new SqoopOptions();
-    opts.loadProperties(props);
-
-    assertTrue(opts.isThrowOnError());
-  }
-
-  @Test
-  public void testThrowOnErrorWithNotSetSystemProperty() throws Exception {
-    System.clearProperty(SQOOP_RETHROW_PROPERTY);
-    String[] args = {"--throw-on-error"};
-    SqoopOptions opts = parse(args);
-
-    assertTrue(opts.isThrowOnError());
-  }
-
-  @Test
-  public void testThrowOnErrorWithSetSystemProperty() throws Exception {
-    String testSqoopRethrowProperty = "";
-    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
-    String[] args = {"--throw-on-error"};
-    SqoopOptions opts = parse(args);
-
-    assertTrue(opts.isThrowOnError());
-  }
-
-  @Test
-  public void defaultValueOfOracleEscapingDisabledShouldBeFalse() {
-    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
-  }
-
-  @Test
-  public void 
valueOfOracleEscapingDisabledShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToFalse()
 {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "false");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
-  }
-
-  @Test
-  public void 
valueOfOracleEscapingDisabledShouldBeTrueIfTheValueOfTheRelatedEnvironmentVariableIsSetToTrue()
 {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "true");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
-  }
-
-  @Test
-  public void 
valueOfOracleEscapingDisabledShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToAnyNonBooleanValue()
 {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "falsetrue");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
-  }
-
-  @Test
-  public void 
hadoopConfigurationInstanceOfSqoopOptionsShouldContainTheSameValueForOracleEscapingDisabledAsSqoopOptionsProperty()
 {
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(opts.isOracleEscapingDisabled())));
-  }
-
-  @Test
-  public void 
hadoopConfigurationInstanceOfSqoopOptionsShouldContainTrueForOracleEscapingDisabledAsTheValueDirectlyHasBeenSetToSqoopOptions()
 {
-    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
-    SqoopOptions opts = new SqoopOptions();
-    opts.setOracleEscapingDisabled(true);
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(true)));
-  }
-
-  @Test
-  public void 
hadoopConfigurationInstanceOfSqoopOptionsShouldContainFalseForOracleEscapingDisabledAsTheValueDirectlyHasBeenSetToSqoopOptions()
 {
-    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
-    SqoopOptions opts = new SqoopOptions();
-    opts.setOracleEscapingDisabled(false);
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(false)));
-  }
-
-  @Test
-  public void 
valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToFalse()
 {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "false");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(false)));
-  }
-
-  @Test
-  public void 
valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeTrueIfTheValueOfTheRelatedEnvironmentVariableIsSetToTrue()
 {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "true");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(true)));
-  }
-
-  @Test
-  public void 
valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToAnyNonBooleanValue()
 {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "falsetrue");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(false)));
-  }
-
-  @Test
-  public void 
valueOfOracleEscapingDisabledShouldBeAbleToSavedAndLoadedBackWithTheSameValue() 
{
-    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
-    SqoopOptions opts = new SqoopOptions();
-    opts.setOracleEscapingDisabled(false);
-    Properties out = opts.writeProperties();
-    opts = new SqoopOptions();
-    opts.loadProperties(out);
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(false)));
-  }
-
-  @Test
-  public void 
valueOfOracleEscapingDisabledShouldBeEqualToNullIfASqoopOptionsInstanceWasLoadedWhichDidntContainASavedValueForIt()
 {
-    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
-    SqoopOptions opts = new SqoopOptions();
-    Properties out = opts.writeProperties();
-    opts = new SqoopOptions();
-    opts.loadProperties(out);
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(true)));
-  }
-
-  // test that hadoop-home is accepted as an option
-  @Test
-  public void testHadoopHome() throws Exception {
-    String [] args = {
-      "--hadoop-home",
-      "/usr/lib/hadoop",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals("/usr/lib/hadoop", opts.getHadoopMapRedHome());
-  }
-
-  // test that hadoop-home is accepted as an option
-  @Test
-  public void testHadoopMapRedOverridesHadoopHome() throws Exception {
-    String[] args = { "--hadoop-home", "/usr/lib/hadoop-ignored", 
"--hadoop-mapred-home", "/usr/lib/hadoop", };
-
-    SqoopOptions opts = parse(args);
-    assertEquals("/usr/lib/hadoop", opts.getHadoopMapRedHome());
-  }
-
-
-  //helper method to validate given import options
-  private void validateImportOptions(String[] extraArgs) throws Exception {
-    String [] args = {
-      "--connect", HsqldbTestServer.getUrl(),
-      "--table", "test",
-      "-m", "1",
-    };
-    ImportTool importTool = new ImportTool();
-    SqoopOptions opts = importTool.parseArguments(
-        (String []) ArrayUtils.addAll(args, extraArgs), null, null, false);
-    importTool.validateOptions(opts);
-  }
-
-  //test compatability of --detele-target-dir with import
-  @Test
-  public void testDeteleTargetDir() throws Exception {
-    String [] extraArgs = {
-      "--delete-target-dir",
-    };
-    try {
-      validateImportOptions(extraArgs);
-    } catch(SqoopOptions.InvalidOptionsException ioe) {
-      fail("Unexpected InvalidOptionsException" + ioe);
-    }
-  }
-
-  //test incompatability of --delete-target-dir & --append with import
-  @Test
-  public void testDeleteTargetDirWithAppend() throws Exception {
-    String [] extraArgs = {
-      "--append",
-      "--delete-target-dir",
-    };
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on incompatibility of " +
-        "--delete-target-dir and --append");
-    validateImportOptions(extraArgs);
-  }
-
-  //test incompatability of --delete-target-dir with incremental import
-  @Test
-  public void testDeleteWithIncrementalImport() throws Exception {
-    String [] extraArgs = {
-      "--incremental", "append",
-      "--delete-target-dir",
-    };
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on incompatibility of " +
-        "--delete-target-dir and --incremental");
-    validateImportOptions(extraArgs);
-  }
-
-  // test that hbase bulk load import with table name and target dir
-  // passes validation
-  @Test
-  public void testHBaseBulkLoad() throws Exception {
-    String [] extraArgs = {
-        longArgument(BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG),
-        longArgument(BaseSqoopTool.TARGET_DIR_ARG), "./test",
-        longArgument(BaseSqoopTool.HBASE_TABLE_ARG), "test_table",
-        longArgument(BaseSqoopTool.HBASE_COL_FAM_ARG), "d"};
-
-    validateImportOptions(extraArgs);
-  }
-
-  // test that hbase bulk load import with a missing --hbase-table fails
-  @Test
-  public void testHBaseBulkLoadMissingHbaseTable() throws Exception {
-    String [] extraArgs = {
-        longArgument(BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG),
-        longArgument(BaseSqoopTool.TARGET_DIR_ARG), "./test"};
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
because of missing --hbase-table");
-    validateImportOptions(extraArgs);
-  }
-
-  private static String longArgument(String argument) {
-    return String.format("--%s", argument);
-  }
-
-  @Test
-  public void testRelaxedIsolation() throws Exception {
-    String extraArgs[] = {
-      "--relaxed-isolation",
-    };
-    validateImportOptions(extraArgs);
-  }
-
-  @Test
-  public void testResetToOneMapper() throws Exception {
-    String extraArgs[] = {
-      "--autoreset-to-one-mapper",
-    };
-    validateImportOptions(extraArgs);
-  }
-
-  @Test
-  public void testResetToOneMapperAndSplitBy() throws Exception {
-    String extraArgs[] = {
-      "--autoreset-to-one-mapper",
-      "--split-by",
-      "col0",
-    };
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected Exception on 
incompatibility of " +
-        "--autoreset-to-one-mapper and --split-by");
-    validateImportOptions(extraArgs);
-  }
-
-  @Test
-  public void testEscapeMapingColumnNames() throws Exception {
-    SqoopOptions opts = new SqoopOptions();
-    // enabled by default
-    assertTrue(opts.getEscapeMappingColumnNamesEnabled());
-
-    String [] args = {
-        "--" + 
org.apache.sqoop.tool.BaseSqoopTool.ESCAPE_MAPPING_COLUMN_NAMES_ENABLED,
-        "false",
-    };
-
-    opts = parse(args);
-    assertFalse(opts.getEscapeMappingColumnNamesEnabled());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestTargetDir.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestTargetDir.java 
b/src/test/com/cloudera/sqoop/TestTargetDir.java
deleted file mode 100644
index d7ebd34..0000000
--- a/src/test/com/cloudera/sqoop/TestTargetDir.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import org.apache.hadoop.fs.ContentSummary;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.StringUtils;
-
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-
-/**
- * Test that --target-dir works.
- */
-public class TestTargetDir extends ImportJobTestCase {
-
-  public static final Log LOG = LogFactory
-      .getLog(TestTargetDir.class.getName());
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-  /**
-   * Create the argv to pass to Sqoop.
-   *
-   * @return the argv as an array of strings.
-   */
-  protected ArrayList getOutputArgv(boolean includeHadoopFlags) {
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-
-    args.add("--table");
-    args.add(HsqldbTestServer.getTableName());
-    args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
-    args.add("--split-by");
-    args.add("INTFIELD1");
-    args.add("--as-sequencefile");
-
-    return args;
-  }
-
-  // this test just uses the two int table.
-  protected String getTableName() {
-    return HsqldbTestServer.getTableName();
-  }
-
-  /** test invalid argument exception if several output options. */
-  @Test
-  public void testSeveralOutputsIOException() throws IOException {
-    ArrayList args = getOutputArgv(true);
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--target-dir");
-    args.add(getWarehouseDir());
-
-    String[] argv = (String[]) args.toArray(new String[0]);
-
-    thrown.expect(IOException.class);
-    thrown.reportMissingExceptionWithMessage("Expected IOException on several 
output options");
-    runImport(argv);
-  }
-
-  /** test target-dir contains imported files. */
-  @Test
-  public void testTargetDir() throws IOException {
-
-    try {
-      String targetDir = getWarehouseDir() + "/tempTargetDir";
-
-      ArrayList args = getOutputArgv(true);
-      args.add("--target-dir");
-      args.add(targetDir);
-
-      // delete target-dir if exists and recreate it
-      FileSystem fs = FileSystem.get(getConf());
-      Path outputPath = new Path(targetDir);
-      if (fs.exists(outputPath)) {
-        fs.delete(outputPath, true);
-      }
-
-      String[] argv = (String[]) args.toArray(new String[0]);
-      runImport(argv);
-
-      ContentSummary summ = fs.getContentSummary(outputPath);
-
-      assertTrue("There's no new imported files in target-dir",
-          summ.getFileCount() > 0);
-
-    } catch (Exception e) {
-      LOG.error("Got Exception: " + StringUtils.stringifyException(e));
-      fail(e.toString());
-    }
-  }
-
-  /** test target-dir breaks if already existing
-   * (only allowed in append mode). */
-  @Test
-  public void testExistingTargetDir() throws IOException {
-    String targetDir = getWarehouseDir() + "/tempTargetDir";
-
-    ArrayList args = getOutputArgv(true);
-    args.add("--target-dir");
-    args.add(targetDir);
-
-    // delete target-dir if exists and recreate it
-    FileSystem fs = FileSystem.get(getConf());
-    Path outputPath = new Path(targetDir);
-    if (!fs.exists(outputPath)) {
-      fs.mkdirs(outputPath);
-    }
-
-    String[] argv = (String[]) args.toArray(new String[0]);
-
-    thrown.expect(IOException.class);
-    thrown.reportMissingExceptionWithMessage("Expected IOException on 
--target-dir if target dir already exists");
-    runImport(argv);
-  }
-
-}

Reply via email to