http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/TestParquetExport.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/TestParquetExport.java 
b/src/test/org/apache/sqoop/TestParquetExport.java
new file mode 100644
index 0000000..fdaa5d8
--- /dev/null
+++ b/src/test/org/apache/sqoop/TestParquetExport.java
@@ -0,0 +1,459 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop;
+
+import org.apache.sqoop.testutil.ExportJobTestCase;
+import com.google.common.collect.Lists;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.junit.Rule;
+
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.kitesdk.data.*;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+
+/**
+ * Test that we can export Parquet Data Files from HDFS into databases.
+ */
+public class TestParquetExport extends ExportJobTestCase {
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  /**
+   * @return an argv for the CodeGenTool to use when creating tables to export.
+   */
+  protected String [] getCodeGenArgv(String... extraArgs) {
+    List<String> codeGenArgv = new ArrayList<String>();
+
+    if (null != extraArgs) {
+      for (String arg : extraArgs) {
+        codeGenArgv.add(arg);
+      }
+    }
+
+    codeGenArgv.add("--table");
+    codeGenArgv.add(getTableName());
+    codeGenArgv.add("--connect");
+    codeGenArgv.add(getConnectString());
+
+    return codeGenArgv.toArray(new String[0]);
+  }
+
+  /** When generating data for export tests, each column is generated
+      according to a ColumnGenerator. Methods exist for determining
+      what to put into Parquet objects in the files to export, as well
+      as what the object representation of the column as returned by
+      the database should look like.
+    */
+  public interface ColumnGenerator {
+    /** For a row with id rowNum, what should we write into that
+        Parquet record to export?
+      */
+    Object getExportValue(int rowNum);
+
+    /** Return the Parquet schema for the field. */
+    Schema getColumnParquetSchema();
+
+    /** For a row with id rowNum, what should the database return
+        for the given column's value?
+      */
+    Object getVerifyValue(int rowNum);
+
+    /** Return the column type to put in the CREATE TABLE statement. */
+    String getColumnType();
+  }
+
+  private ColumnGenerator colGenerator(final Object exportValue,
+      final Schema schema, final Object verifyValue,
+      final String columnType) {
+    return new ColumnGenerator() {
+      @Override
+      public Object getVerifyValue(int rowNum) {
+        return verifyValue;
+      }
+      @Override
+      public Object getExportValue(int rowNum) {
+        return exportValue;
+      }
+      @Override
+      public String getColumnType() {
+        return columnType;
+      }
+      @Override
+      public Schema getColumnParquetSchema() {
+        return schema;
+      }
+    };
+  }
+
+  /**
+   * Create a data file that gets exported to the db.
+   * @param fileNum the number of the file (for multi-file export)
+   * @param numRecords how many records to write to the file.
+   */
+  protected void createParquetFile(int fileNum, int numRecords,
+      ColumnGenerator... extraCols) throws IOException {
+
+    String uri = "dataset:file:" + getTablePath();
+    Schema schema = buildSchema(extraCols);
+    DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
+      .schema(schema)
+      .format(Formats.PARQUET)
+      .build();
+    Dataset dataset = Datasets.create(uri, descriptor);
+    DatasetWriter writer = dataset.newWriter();
+    try {
+      for (int i = 0; i < numRecords; i++) {
+        GenericRecord record = new GenericData.Record(schema);
+        record.put("id", i);
+        record.put("msg", getMsgPrefix() + i);
+        addExtraColumns(record, i, extraCols);
+        writer.write(record);
+      }
+    } finally {
+      writer.close();
+    }
+  }
+
+  private Schema buildSchema(ColumnGenerator... extraCols) {
+    List<Field> fields = new ArrayList<Field>();
+    fields.add(buildField("id", Schema.Type.INT));
+    fields.add(buildField("msg", Schema.Type.STRING));
+    int colNum = 0;
+    if (null != extraCols) {
+      for (ColumnGenerator gen : extraCols) {
+        if (gen.getColumnParquetSchema() != null) {
+          fields.add(buildParquetField(forIdx(colNum++), 
gen.getColumnParquetSchema()));
+        }
+      }
+    }
+    Schema schema = Schema.createRecord("myschema", null, null, false);
+    schema.setFields(fields);
+    return schema;
+  }
+
+  private void addExtraColumns(GenericRecord record, int rowNum,
+      ColumnGenerator[] extraCols) {
+    int colNum = 0;
+    if (null != extraCols) {
+      for (ColumnGenerator gen : extraCols) {
+        if (gen.getColumnParquetSchema() != null) {
+          record.put(forIdx(colNum++), gen.getExportValue(rowNum));
+        }
+      }
+    }
+  }
+
+  private Field buildField(String name, Schema.Type type) {
+    return new Field(name, Schema.create(type), null, null);
+  }
+
+  private Field buildParquetField(String name, Schema schema) {
+    return new Field(name, schema, null, null);
+  }
+
+  /** Return the column name for a column index.
+   *  Each table contains two columns named 'id' and 'msg', and then an
+   *  arbitrary number of additional columns defined by ColumnGenerators.
+   *  These columns are referenced by idx 0, 1, 2...
+   *  @param idx the index of the ColumnGenerator in the array passed to
+   *   createTable().
+   *  @return the name of the column
+   */
+  protected String forIdx(int idx) {
+    return "col" + idx;
+  }
+
+  /**
+   * Return a SQL statement that drops a table, if it exists.
+   * @param tableName the table to drop.
+   * @return the SQL statement to drop that table.
+   */
+  protected String getDropTableStatement(String tableName) {
+    return "DROP TABLE " + tableName + " IF EXISTS";
+  }
+
+  /** Create the table definition to export to, removing any prior table.
+      By specifying ColumnGenerator arguments, you can add extra columns
+      to the table of arbitrary type.
+   */
+  private void createTable(ColumnGenerator... extraColumns)
+      throws SQLException {
+    Connection conn = getConnection();
+    PreparedStatement statement = conn.prepareStatement(
+        getDropTableStatement(getTableName()),
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+
+    StringBuilder sb = new StringBuilder();
+    sb.append("CREATE TABLE ");
+    sb.append(getTableName());
+    sb.append(" (\"ID\" INT NOT NULL PRIMARY KEY, \"MSG\" VARCHAR(64)");
+    int colNum = 0;
+    for (ColumnGenerator gen : extraColumns) {
+      if (gen.getColumnType() != null) {
+        sb.append(", \"" + forIdx(colNum++) + "\" " + gen.getColumnType());
+      }
+    }
+    sb.append(")");
+
+    statement = conn.prepareStatement(sb.toString(),
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+  }
+
+  /**
+   * Create the table definition to export and also inserting one records for
+   * identifying the updates. Issue [SQOOP-2846]
+   */
+  private void createTableWithInsert() throws SQLException {
+    Connection conn = getConnection();
+    PreparedStatement statement = 
conn.prepareStatement(getDropTableStatement(getTableName()),
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+
+    StringBuilder sb = new StringBuilder();
+    sb.append("CREATE TABLE ");
+    sb.append(getTableName());
+    sb.append(" (id INT NOT NULL PRIMARY KEY, msg VARCHAR(64)");
+    sb.append(")");
+    statement = conn.prepareStatement(sb.toString(), 
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      Statement statement2 = conn.createStatement();
+      String insertCmd = "INSERT INTO " + getTableName() + " (ID,MSG) VALUES(" 
+ 0 + ",'testMsg');";
+      statement2.execute(insertCmd);
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+  }
+
+  /** Verify that on a given row, a column has a given value.
+   * @param id the id column specifying the row to test.
+   */
+  private void assertColValForRowId(int id, String colName, Object expectedVal)
+      throws SQLException {
+    Connection conn = getConnection();
+    LOG.info("Verifying column " + colName + " has value " + expectedVal);
+
+    PreparedStatement statement = conn.prepareStatement(
+        "SELECT \"" + colName + "\" FROM " + getTableName() + " WHERE \"ID\" = 
" + id,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    Object actualVal = null;
+    try {
+      ResultSet rs = statement.executeQuery();
+      try {
+        rs.next();
+        actualVal = rs.getObject(1);
+      } finally {
+        rs.close();
+      }
+    } finally {
+      statement.close();
+    }
+
+    if (expectedVal != null && expectedVal instanceof byte[]) {
+      assertArrayEquals((byte[]) expectedVal, (byte[]) actualVal);
+    } else {
+      assertEquals("Got unexpected column value", expectedVal, actualVal);
+    }
+  }
+
+  /** Verify that for the max and min values of the 'id' column, the values
+      for a given column meet the expected values.
+   */
+  protected void assertColMinAndMax(String colName, ColumnGenerator generator)
+      throws SQLException {
+    Connection conn = getConnection();
+    int minId = getMinRowId(conn);
+    int maxId = getMaxRowId(conn);
+
+    LOG.info("Checking min/max for column " + colName + " with type "
+        + generator.getColumnType());
+
+    Object expectedMin = generator.getVerifyValue(minId);
+    Object expectedMax = generator.getVerifyValue(maxId);
+
+    assertColValForRowId(minId, colName, expectedMin);
+    assertColValForRowId(maxId, colName, expectedMax);
+  }
+
+  @Test
+  public void testSupportedParquetTypes() throws IOException, SQLException {
+    String[] argv = {};
+    final int TOTAL_RECORDS = 1 * 10;
+
+    byte[] b = new byte[] { (byte) 1, (byte) 2 };
+    Schema fixed = Schema.createFixed("myfixed", null, null, 2);
+    Schema enumeration = Schema.createEnum("myenum", null, null,
+        Lists.newArrayList("a", "b"));
+
+    ColumnGenerator[] gens = new ColumnGenerator[] {
+      colGenerator(true, Schema.create(Schema.Type.BOOLEAN), true, "BIT"),
+      colGenerator(100, Schema.create(Schema.Type.INT), 100, "INTEGER"),
+      colGenerator(200L, Schema.create(Schema.Type.LONG), 200L, "BIGINT"),
+      // HSQLDB maps REAL to double, not float:
+      colGenerator(1.0f, Schema.create(Schema.Type.FLOAT), 1.0d, "REAL"),
+      colGenerator(2.0d, Schema.create(Schema.Type.DOUBLE), 2.0d, "DOUBLE"),
+      colGenerator("s", Schema.create(Schema.Type.STRING), "s", "VARCHAR(8)"),
+      colGenerator(ByteBuffer.wrap(b), Schema.create(Schema.Type.BYTES),
+          b, "VARBINARY(8)"),
+      colGenerator(new GenericData.Fixed(fixed, b), fixed,
+          b, "BINARY(2)"),
+      colGenerator(new GenericData.EnumSymbol(enumeration, "a"), enumeration,
+          "a", "VARCHAR(8)"),
+    };
+    createParquetFile(0, TOTAL_RECORDS, gens);
+    createTable(gens);
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
+    verifyExport(TOTAL_RECORDS);
+    for (int i = 0; i < gens.length; i++) {
+      assertColMinAndMax(forIdx(i), gens[i]);
+    }
+  }
+
+  @Test
+  public void testNullableField() throws IOException, SQLException {
+    String[] argv = {};
+    final int TOTAL_RECORDS = 1 * 10;
+
+    List<Schema> childSchemas = new ArrayList<Schema>();
+    childSchemas.add(Schema.create(Schema.Type.NULL));
+    childSchemas.add(Schema.create(Schema.Type.STRING));
+    Schema schema =  Schema.createUnion(childSchemas);
+    ColumnGenerator gen0 = colGenerator(null, schema, null, "VARCHAR(64)");
+    ColumnGenerator gen1 = colGenerator("s", schema, "s", "VARCHAR(64)");
+    createParquetFile(0, TOTAL_RECORDS, gen0, gen1);
+    createTable(gen0, gen1);
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
+    verifyExport(TOTAL_RECORDS);
+    assertColMinAndMax(forIdx(0), gen0);
+    assertColMinAndMax(forIdx(1), gen1);
+  }
+
+  @Test
+  public void testParquetRecordsNotSupported() throws IOException, 
SQLException {
+    String[] argv = {};
+    final int TOTAL_RECORDS = 1;
+
+    Schema schema =  Schema.createRecord("nestedrecord", null, null, false);
+    schema.setFields(Lists.newArrayList(buildField("myint",
+        Schema.Type.INT)));
+    GenericRecord record = new GenericData.Record(schema);
+    record.put("myint", 100);
+    // DB type is not used so can be anything:
+    ColumnGenerator gen = colGenerator(record, schema, null, "VARCHAR(64)");
+    createParquetFile(0, TOTAL_RECORDS,  gen);
+    createTable(gen);
+
+    thrown.expect(Exception.class);
+    thrown.reportMissingExceptionWithMessage("Expected Exception as Parquet 
records are not supported");
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
+  }
+
+  @Test
+  public void testMissingDatabaseFields() throws IOException, SQLException {
+    String[] argv = {};
+    final int TOTAL_RECORDS = 1;
+
+    // null column type means don't create a database column
+    // the Parquet value will not be exported
+    ColumnGenerator gen = colGenerator(100, Schema.create(Schema.Type.INT),
+        null, null);
+    createParquetFile(0, TOTAL_RECORDS, gen);
+    createTable(gen);
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
+    verifyExport(TOTAL_RECORDS);
+  }
+
+  @Test
+  public void testParquetWithUpdateKey() throws IOException, SQLException {
+    String[] argv = { "--update-key", "ID" };
+    final int TOTAL_RECORDS = 1;
+    createParquetFile(0, TOTAL_RECORDS, null);
+    createTableWithInsert();
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
+    verifyExport(getMsgPrefix() + "0");
+  }
+
+  // Test Case for Issue [SQOOP-2846]
+  @Test
+  public void testParquetWithUpsert() throws IOException, SQLException {
+    String[] argv = { "--update-key", "ID", "--update-mode", "allowinsert" };
+    final int TOTAL_RECORDS = 2;
+    // ColumnGenerator gen = colGenerator("100",
+    // Schema.create(Schema.Type.STRING), null, "VARCHAR(64)");
+    createParquetFile(0, TOTAL_RECORDS, null);
+    createTableWithInsert();
+
+    thrown.expect(Exception.class);
+    thrown.reportMissingExceptionWithMessage("Expected Exception during 
Parquet export with --update-mode");
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
+  }
+
+  @Test
+  public void testMissingParquetFields()  throws IOException, SQLException {
+    String[] argv = {};
+    final int TOTAL_RECORDS = 1;
+
+    // null Parquet schema means don't create an Parquet field
+    ColumnGenerator gen = colGenerator(null, null, null, "VARCHAR(64)");
+    createParquetFile(0, TOTAL_RECORDS, gen);
+    createTable(gen);
+
+    thrown.expect(Exception.class);
+    thrown.reportMissingExceptionWithMessage("Expected Exception on missing 
Parquet fields");
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/TestParquetImport.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/TestParquetImport.java 
b/src/test/org/apache/sqoop/TestParquetImport.java
new file mode 100644
index 0000000..379529a
--- /dev/null
+++ b/src/test/org/apache/sqoop/TestParquetImport.java
@@ -0,0 +1,355 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop;
+
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.Test;
+import org.kitesdk.data.CompressionType;
+import org.kitesdk.data.Dataset;
+import org.kitesdk.data.DatasetReader;
+import org.kitesdk.data.Datasets;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Tests --as-parquetfile.
+ */
+public class TestParquetImport extends ImportJobTestCase {
+
+  public static final Log LOG = LogFactory
+      .getLog(TestParquetImport.class.getName());
+
+  /**
+   * Create the argv to pass to Sqoop.
+   *
+   * @return the argv as an array of strings.
+   */
+  protected String[] getOutputArgv(boolean includeHadoopFlags,
+          String[] extraArgs) {
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+
+    args.add("--table");
+    args.add(getTableName());
+    args.add("--connect");
+    args.add(HsqldbTestServer.getUrl());
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--m");
+    args.add("1");
+    args.add("--split-by");
+    args.add("INTFIELD1");
+    args.add("--as-parquetfile");
+    if (extraArgs != null) {
+      args.addAll(Arrays.asList(extraArgs));
+    }
+
+    return args.toArray(new String[args.size()]);
+  }
+
+  protected String[] getOutputQueryArgv(boolean includeHadoopFlags, String[] 
extraArgs) {
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+
+    args.add("--query");
+    args.add("SELECT * FROM " + getTableName() + " WHERE $CONDITIONS");
+    args.add("--connect");
+    args.add(HsqldbTestServer.getUrl());
+    args.add("--target-dir");
+    args.add(getWarehouseDir() + "/" + getTableName());
+    args.add("--m");
+    args.add("1");
+    args.add("--split-by");
+    args.add("INTFIELD1");
+    args.add("--as-parquetfile");
+    if (extraArgs != null) {
+      args.addAll(Arrays.asList(extraArgs));
+    }
+
+    return args.toArray(new String[args.size()]);
+  }
+
+  @Test
+  public void testSnappyCompression() throws IOException {
+    runParquetImportTest("snappy");
+  }
+
+  @Test
+  public void testDeflateCompression() throws IOException {
+    runParquetImportTest("deflate");
+  }
+
+  private void runParquetImportTest(String codec) throws IOException {
+    String[] types = {"BIT", "INTEGER", "BIGINT", "REAL", "DOUBLE", 
"VARCHAR(6)",
+        "VARBINARY(2)",};
+    String[] vals = {"true", "100", "200", "1.0", "2.0", "'s'", "'0102'", };
+    createTableWithColTypes(types, vals);
+
+    String [] extraArgs = { "--compression-codec", codec};
+    runImport(getOutputArgv(true, extraArgs));
+
+    assertEquals(CompressionType.forName(codec), getCompressionType());
+
+    Schema schema = getSchema();
+    assertEquals(Type.RECORD, schema.getType());
+    List<Field> fields = schema.getFields();
+    assertEquals(types.length, fields.size());
+    checkField(fields.get(0), "DATA_COL0", Type.BOOLEAN);
+    checkField(fields.get(1), "DATA_COL1", Type.INT);
+    checkField(fields.get(2), "DATA_COL2", Type.LONG);
+    checkField(fields.get(3), "DATA_COL3", Type.FLOAT);
+    checkField(fields.get(4), "DATA_COL4", Type.DOUBLE);
+    checkField(fields.get(5), "DATA_COL5", Type.STRING);
+    checkField(fields.get(6), "DATA_COL6", Type.BYTES);
+
+    DatasetReader<GenericRecord> reader = getReader();
+    try {
+      GenericRecord record1 = reader.next();
+      assertNotNull(record1);
+      assertEquals("DATA_COL0", true, record1.get("DATA_COL0"));
+      assertEquals("DATA_COL1", 100, record1.get("DATA_COL1"));
+      assertEquals("DATA_COL2", 200L, record1.get("DATA_COL2"));
+      assertEquals("DATA_COL3", 1.0f, record1.get("DATA_COL3"));
+      assertEquals("DATA_COL4", 2.0, record1.get("DATA_COL4"));
+      assertEquals("DATA_COL5", "s", record1.get("DATA_COL5"));
+      Object object = record1.get("DATA_COL6");
+      assertTrue(object instanceof ByteBuffer);
+      ByteBuffer b = ((ByteBuffer) object);
+      assertEquals((byte) 1, b.get(0));
+      assertEquals((byte) 2, b.get(1));
+      assertFalse(reader.hasNext());
+    } finally {
+      reader.close();
+    }
+  }
+
+  @Test
+  public void testOverrideTypeMapping() throws IOException {
+    String [] types = { "INT" };
+    String [] vals = { "10" };
+    createTableWithColTypes(types, vals);
+
+    String [] extraArgs = { "--map-column-java", "DATA_COL0=String"};
+    runImport(getOutputArgv(true, extraArgs));
+
+    Schema schema = getSchema();
+    assertEquals(Type.RECORD, schema.getType());
+    List<Field> fields = schema.getFields();
+    assertEquals(types.length, fields.size());
+    checkField(fields.get(0), "DATA_COL0", Type.STRING);
+
+    DatasetReader<GenericRecord> reader = getReader();
+    try {
+      assertTrue(reader.hasNext());
+      GenericRecord record1 = reader.next();
+      assertEquals("DATA_COL0", "10", record1.get("DATA_COL0"));
+      assertFalse(reader.hasNext());
+    } finally {
+      reader.close();
+    }
+  }
+
+  @Test
+  public void testFirstUnderscoreInColumnName() throws IOException {
+    String [] names = { "_NAME" };
+    String [] types = { "INT" };
+    String [] vals = { "1987" };
+    createTableWithColTypesAndNames(names, types, vals);
+
+    runImport(getOutputArgv(true, null));
+
+    Schema schema = getSchema();
+    assertEquals(Type.RECORD, schema.getType());
+    List<Field> fields = schema.getFields();
+    assertEquals(types.length, fields.size());
+    checkField(fields.get(0), "__NAME", Type.INT);
+
+    DatasetReader<GenericRecord> reader = getReader();
+    try {
+      assertTrue(reader.hasNext());
+      GenericRecord record1 = reader.next();
+      assertEquals("__NAME", 1987, record1.get("__NAME"));
+      assertFalse(reader.hasNext());
+    } finally {
+      reader.close();
+    }
+  }
+
+  @Test
+  public void testNonIdentCharactersInColumnName() throws IOException {
+    String [] names = { "test_p-a+r/quet" };
+    String [] types = { "INT" };
+    String [] vals = { "2015" };
+    createTableWithColTypesAndNames(names, types, vals);
+
+    runImport(getOutputArgv(true, null));
+
+    Schema schema = getSchema();
+    assertEquals(Type.RECORD, schema.getType());
+    List<Field> fields = schema.getFields();
+    assertEquals(types.length, fields.size());
+    checkField(fields.get(0), "TEST_P_A_R_QUET", Type.INT);
+
+    DatasetReader<GenericRecord> reader = getReader();
+    try {
+      assertTrue(reader.hasNext());
+      GenericRecord record1 = reader.next();
+      assertEquals("TEST_P_A_R_QUET", 2015, record1.get("TEST_P_A_R_QUET"));
+      assertFalse(reader.hasNext());
+    } finally {
+      reader.close();
+    }
+  }
+
+  @Test
+  public void testNullableParquetImport() throws IOException, SQLException {
+    String [] types = { "INT" };
+    String [] vals = { null };
+    createTableWithColTypes(types, vals);
+
+    runImport(getOutputArgv(true, null));
+
+    DatasetReader<GenericRecord> reader = getReader();
+    try {
+      assertTrue(reader.hasNext());
+      GenericRecord record1 = reader.next();
+      assertNull(record1.get("DATA_COL0"));
+      assertFalse(reader.hasNext());
+    } finally {
+      reader.close();
+    }
+  }
+
+  @Test
+  public void testQueryImport() throws IOException, SQLException {
+    String [] types = { "INT" };
+    String [] vals = { "1" };
+    createTableWithColTypes(types, vals);
+
+    runImport(getOutputQueryArgv(true, null));
+
+    DatasetReader<GenericRecord> reader = getReader();
+    try {
+      assertTrue(reader.hasNext());
+      GenericRecord record1 = reader.next();
+      assertEquals(1, record1.get("DATA_COL0"));
+      assertFalse(reader.hasNext());
+    } finally {
+      reader.close();
+    }
+  }
+
+  @Test
+  public void testIncrementalParquetImport() throws IOException, SQLException {
+    String [] types = { "INT" };
+    String [] vals = { "1" };
+    createTableWithColTypes(types, vals);
+
+    runImport(getOutputArgv(true, null));
+    runImport(getOutputArgv(true, new String[]{"--append"}));
+
+    DatasetReader<GenericRecord> reader = getReader();
+    try {
+      assertTrue(reader.hasNext());
+      GenericRecord record1 = reader.next();
+      assertEquals(1, record1.get("DATA_COL0"));
+      record1 = reader.next();
+      assertEquals(1, record1.get("DATA_COL0"));
+      assertFalse(reader.hasNext());
+    } finally {
+      reader.close();
+    }
+  }
+
+  @Test
+  public void testOverwriteParquetDatasetFail() throws IOException, 
SQLException {
+    String [] types = { "INT" };
+    String [] vals = {};
+    createTableWithColTypes(types, vals);
+
+    runImport(getOutputArgv(true, null));
+    try {
+      runImport(getOutputArgv(true, null));
+      fail("");
+    } catch (IOException ex) {
+      // ok
+    }
+  }
+
+  private CompressionType getCompressionType() {
+    return getDataset().getDescriptor().getCompressionType();
+  }
+
+  private Schema getSchema() {
+    return getDataset().getDescriptor().getSchema();
+  }
+
+  private DatasetReader<GenericRecord> getReader() {
+    return getDataset().newReader();
+  }
+
+  private Dataset<GenericRecord> getDataset() {
+    String uri = "dataset:file:" + getTablePath();
+    return Datasets.load(uri, GenericRecord.class);
+  }
+
+  @Override
+  public void tearDown() {
+    super.tearDown();
+    String uri = "dataset:file:" + getTablePath();
+    if (Datasets.exists(uri)) {
+      Datasets.delete(uri);
+    }
+  }
+
+  private void checkField(Field field, String name, Type type) {
+    assertEquals(name, field.name());
+    assertEquals(Type.UNION, field.schema().getType());
+    assertEquals(Type.NULL, field.schema().getTypes().get(0).getType());
+    assertEquals(type, field.schema().getTypes().get(1).getType());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/TestQuery.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/TestQuery.java 
b/src/test/org/apache/sqoop/TestQuery.java
new file mode 100644
index 0000000..9dfad07
--- /dev/null
+++ b/src/test/org/apache/sqoop/TestQuery.java
@@ -0,0 +1,194 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+/**
+ * Test that --query works in Sqoop.
+ */
+public class TestQuery extends ImportJobTestCase {
+
+  /**
+   * Create the argv to pass to Sqoop.
+   * @return the argv as an array of strings.
+   */
+  protected String [] getArgv(boolean includeHadoopFlags, String query,
+      String targetDir, boolean allowParallel) {
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+
+    args.add("--query");
+    args.add(query);
+    args.add("--split-by");
+    args.add("INTFIELD1");
+    args.add("--connect");
+    args.add(HsqldbTestServer.getUrl());
+    args.add("--as-sequencefile");
+    args.add("--target-dir");
+    args.add(targetDir);
+    args.add("--class-name");
+    args.add(getTableName());
+    if (allowParallel) {
+      args.add("--num-mappers");
+      args.add("2");
+    } else {
+      args.add("--num-mappers");
+      args.add("1");
+    }
+
+    return args.toArray(new String[0]);
+  }
+
+  // this test just uses the two int table.
+  protected String getTableName() {
+    return HsqldbTestServer.getTableName();
+  }
+
+
+  /**
+   * Given a comma-delimited list of integers, grab and parse the first int.
+   * @param str a comma-delimited list of values, the first of which is an int.
+   * @return the first field in the string, cast to int
+   */
+  private int getFirstInt(String str) {
+    String [] parts = str.split(",");
+    return Integer.parseInt(parts[0]);
+  }
+
+  public void runQueryTest(String query, String firstValStr,
+      int numExpectedResults, int expectedSum, String targetDir)
+      throws IOException {
+
+    ClassLoader prevClassLoader = null;
+    SequenceFile.Reader reader = null;
+
+    String [] argv = getArgv(true, query, targetDir, false);
+    runImport(argv);
+    try {
+      SqoopOptions opts = new ImportTool().parseArguments(
+          getArgv(false, query, targetDir, false),
+          null, null, true);
+
+      CompilationManager compileMgr = new CompilationManager(opts);
+      String jarFileName = compileMgr.getJarFilename();
+
+      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+          getTableName());
+
+      reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
+
+      // here we can actually instantiate (k, v) pairs.
+      Configuration conf = new Configuration();
+      Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
+      Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
+
+      if (reader.next(key) == null) {
+        fail("Empty SequenceFile during import");
+      }
+
+      // make sure that the value we think should be at the top, is.
+      reader.getCurrentValue(val);
+      assertEquals("Invalid ordering within sorted SeqFile", firstValStr,
+          val.toString());
+
+      // We know that these values are two ints separated by a ',' character.
+      // Since this is all dynamic, though, we don't want to actually link
+      // against the class and use its methods. So we just parse this back
+      // into int fields manually.  Sum them up and ensure that we get the
+      // expected total for the first column, to verify that we got all the
+      // results from the db into the file.
+      int curSum = getFirstInt(val.toString());
+      int totalResults = 1;
+
+      // now sum up everything else in the file.
+      while (reader.next(key) != null) {
+        reader.getCurrentValue(val);
+        curSum += getFirstInt(val.toString());
+        totalResults++;
+      }
+
+      assertEquals("Total sum of first db column mismatch", expectedSum,
+          curSum);
+      assertEquals("Incorrect number of results for query", numExpectedResults,
+          totalResults);
+    } catch (InvalidOptionsException ioe) {
+      fail(ioe.toString());
+    } catch (ParseException pe) {
+      fail(pe.toString());
+    } finally {
+      IOUtils.closeStream(reader);
+
+      if (null != prevClassLoader) {
+        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+
+  @Test
+  public void testSelectStar() throws IOException {
+    runQueryTest("SELECT * FROM " + getTableName()
+        + " WHERE INTFIELD2 > 4 AND $CONDITIONS",
+        "1,8\n", 2, 4, getTablePath().toString());
+  }
+
+  @Test
+  public void testCompoundWhere() throws IOException {
+    runQueryTest("SELECT * FROM " + getTableName()
+        + " WHERE INTFIELD1 > 4 AND INTFIELD2 < 3 AND $CONDITIONS",
+        "7,2\n", 1, 7, getTablePath().toString());
+  }
+
+  @Test
+  public void testFailNoConditions() throws IOException {
+    String [] argv = getArgv(true, "SELECT * FROM " + getTableName(),
+        getTablePath().toString(), true);
+    try {
+      runImport(argv);
+      fail("Expected exception running import without $CONDITIONS");
+    } catch (Exception e) {
+      LOG.info("Got exception " + e + " running job (expected; ok)");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/TestSplitBy.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/TestSplitBy.java 
b/src/test/org/apache/sqoop/TestSplitBy.java
new file mode 100644
index 0000000..2518935
--- /dev/null
+++ b/src/test/org/apache/sqoop/TestSplitBy.java
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+/**
+ * Test that --split-by works.
+ */
+public class TestSplitBy extends ImportJobTestCase {
+
+  /**
+   * Create the argv to pass to Sqoop.
+   * @return the argv as an array of strings.
+   */
+  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
+      String splitByCol) {
+    String columnsString = "";
+    for (String col : colNames) {
+      columnsString += col + ",";
+    }
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+
+    args.add("--table");
+    args.add(HsqldbTestServer.getTableName());
+    args.add("--columns");
+    args.add(columnsString);
+    args.add("--split-by");
+    args.add(splitByCol);
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--connect");
+    args.add(HsqldbTestServer.getUrl());
+    args.add("--as-sequencefile");
+    args.add("--num-mappers");
+    args.add("1");
+
+    return args.toArray(new String[0]);
+  }
+
+  // this test just uses the two int table.
+  protected String getTableName() {
+    return HsqldbTestServer.getTableName();
+  }
+
+
+  /**
+   * Given a comma-delimited list of integers, grab and parse the first int.
+   * @param str a comma-delimited list of values, the first of which is an int.
+   * @return the first field in the string, cast to int
+   */
+  private int getFirstInt(String str) {
+    String [] parts = str.split(",");
+    return Integer.parseInt(parts[0]);
+  }
+
+  public void runSplitByTest(String splitByCol, int expectedSum)
+      throws IOException {
+
+    String [] columns = HsqldbTestServer.getFieldNames();
+    ClassLoader prevClassLoader = null;
+    SequenceFile.Reader reader = null;
+
+    String [] argv = getArgv(true, columns, splitByCol);
+    runImport(argv);
+    try {
+      SqoopOptions opts = new ImportTool().parseArguments(
+          getArgv(false, columns, splitByCol),
+          null, null, true);
+
+      CompilationManager compileMgr = new CompilationManager(opts);
+      String jarFileName = compileMgr.getJarFilename();
+      LOG.debug("Got jar from import job: " + jarFileName);
+
+      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+          getTableName());
+
+      reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
+
+      // here we can actually instantiate (k, v) pairs.
+      Configuration conf = new Configuration();
+      Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
+      Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
+
+      // We know that these values are two ints separated by a ',' character.
+      // Since this is all dynamic, though, we don't want to actually link
+      // against the class and use its methods. So we just parse this back
+      // into int fields manually.  Sum them up and ensure that we get the
+      // expected total for the first column, to verify that we got all the
+      // results from the db into the file.
+
+      // Sum up everything in the file.
+      int curSum = 0;
+      while (reader.next(key) != null) {
+        reader.getCurrentValue(val);
+        curSum += getFirstInt(val.toString());
+      }
+
+      assertEquals("Total sum of first db column mismatch", expectedSum,
+          curSum);
+    } catch (InvalidOptionsException ioe) {
+      fail(ioe.toString());
+    } catch (ParseException pe) {
+      fail(pe.toString());
+    } finally {
+      IOUtils.closeStream(reader);
+
+      if (null != prevClassLoader) {
+        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+
+  @Test
+  public void testSplitByFirstCol() throws IOException {
+    String splitByCol = "INTFIELD1";
+    runSplitByTest(splitByCol, HsqldbTestServer.getFirstColSum());
+  }
+
+  @Test
+  public void testSplitBySecondCol() throws IOException {
+    String splitByCol = "INTFIELD2";
+    runSplitByTest(splitByCol, HsqldbTestServer.getFirstColSum());
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java 
b/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java
index fb89a0b..b3579ac 100644
--- a/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java
+++ b/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java
@@ -18,11 +18,11 @@
 
 package org.apache.sqoop;
 
-import com.cloudera.sqoop.hive.HiveImport;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.hive.HiveImport;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.tool.SqoopTool;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
@@ -107,7 +107,7 @@ public class TestSqoopJobDataPublisher extends 
ImportJobTestCase {
 
         // set up our mock hive shell to compare our generated script
         // against the correct expected one.
-        com.cloudera.sqoop.SqoopOptions options = getSqoopOptions(args, tool);
+        SqoopOptions options = getSqoopOptions(args, tool);
         String hiveHome = options.getHiveHome();
         assertNotNull("hive.home was not set", hiveHome);
         String testDataPath = new Path(new Path(hiveHome),
@@ -119,8 +119,8 @@ public class TestSqoopJobDataPublisher extends 
ImportJobTestCase {
         runImport(tool, args);
     }
 
-    private com.cloudera.sqoop.SqoopOptions getSqoopOptions(String [] args, 
SqoopTool tool) {
-        com.cloudera.sqoop.SqoopOptions opts = null;
+    private SqoopOptions getSqoopOptions(String [] args, SqoopTool tool) {
+        SqoopOptions opts = null;
         try {
             opts = tool.parseArguments(args, null, null, true);
         } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/TestSqoopOptions.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/TestSqoopOptions.java 
b/src/test/org/apache/sqoop/TestSqoopOptions.java
index 94d9089..16901ca 100644
--- a/src/test/org/apache/sqoop/TestSqoopOptions.java
+++ b/src/test/org/apache/sqoop/TestSqoopOptions.java
@@ -31,19 +31,42 @@ import java.util.Random;
 import java.util.Set;
 import java.util.UUID;
 
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.sqoop.manager.oracle.OracleUtils;
+import org.junit.After;
 import org.apache.commons.lang3.reflect.FieldUtils;
 import org.apache.sqoop.tool.ImportAllTablesTool;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.tool.SqoopTool;
 import org.apache.sqoop.validation.AbsoluteValidationThreshold;
 import org.assertj.core.api.SoftAssertions;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.ExpectedException;
 
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.tool.BaseSqoopTool;
+import org.apache.sqoop.tool.ImportTool;
+
+import static org.apache.sqoop.Sqoop.SQOOP_RETHROW_PROPERTY;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.is;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
+/**
+ * Test aspects of the SqoopOptions class.
+ */
 public class TestSqoopOptions {
 
+  private Properties originalSystemProperties;
+
   private Random random = new Random();
 
   public static final String COLUMN_MAPPING = 
"test=INTEGER,test1=DECIMAL(1%2C1),test2=NUMERIC(1%2C%202)";
@@ -51,8 +74,12 @@ public class TestSqoopOptions {
   private Set<Class> excludedClassesFromClone = new HashSet<>();
   private Set<String> excludedFieldsFromClone = new HashSet<>();
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   @Before
-  public void setUp() {
+  public void setup() {
+    originalSystemProperties = System.getProperties();
     excludedClassesFromClone.add(String.class);
     excludedClassesFromClone.add(Class.class);
     excludedClassesFromClone.add(Integer.class);
@@ -64,10 +91,747 @@ public class TestSqoopOptions {
     excludedFieldsFromClone.add("activeSqoopTool");
   }
 
+  @After
+  public void tearDown() {
+    System.setProperties(originalSystemProperties);
+  }
+
+  // tests for the toChar() parser
+  @Test
+  public void testNormalChar() throws Exception {
+    assertEquals('a', SqoopOptions.toChar("a"));
+  }
+
+  @Test
+  public void testEmptyString() throws Exception {
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on empty string");
+    SqoopOptions.toChar("");
+  }
+
+  @Test
+  public void testNullString() throws Exception {
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on null string");
+    SqoopOptions.toChar(null);
+  }
+
+  @Test
+  public void testTooLong() throws Exception {
+    // Should just use the first character and log a warning.
+    assertEquals('x', SqoopOptions.toChar("xyz"));
+  }
+
+  @Test
+  public void testHexChar1() throws Exception {
+    assertEquals(0xF, SqoopOptions.toChar("\\0xf"));
+  }
+
+  @Test
+  public void testHexChar2() throws Exception {
+    assertEquals(0xF, SqoopOptions.toChar("\\0xF"));
+  }
+
+  @Test
+  public void testHexChar3() throws Exception {
+    assertEquals(0xF0, SqoopOptions.toChar("\\0xf0"));
+  }
+
+  @Test
+  public void testHexChar4() throws Exception {
+    assertEquals(0xF0, SqoopOptions.toChar("\\0Xf0"));
+  }
+
+  @Test
+  public void testEscapeChar1() throws Exception {
+    assertEquals('\n', SqoopOptions.toChar("\\n"));
+  }
+
+  @Test
+  public void testEscapeChar2() throws Exception {
+    assertEquals('\\', SqoopOptions.toChar("\\\\"));
+  }
+
+  @Test
+  public void testEscapeChar3() throws Exception {
+    assertEquals('\\', SqoopOptions.toChar("\\"));
+  }
+
+  @Test
+  public void testWhitespaceToChar() throws Exception {
+    assertEquals(' ', SqoopOptions.toChar(" "));
+    assertEquals(' ', SqoopOptions.toChar("   "));
+    assertEquals('\t', SqoopOptions.toChar("\t"));
+  }
+
+  @Test
+  public void testUnknownEscape1() throws Exception {
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on unknown escaping");
+    SqoopOptions.toChar("\\Q");
+  }
+
+  @Test
+  public void testUnknownEscape2() throws Exception {
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on unknown escaping");
+    SqoopOptions.toChar("\\nn");
+  }
+
+  @Test
+  public void testEscapeNul1() throws Exception {
+    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0"));
+  }
+
+  @Test
+  public void testEscapeNul2() throws Exception {
+    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\00"));
+  }
+
+  @Test
+  public void testEscapeNul3() throws Exception {
+    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0000"));
+  }
+
+  @Test
+  public void testEscapeNul4() throws Exception {
+    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0x0"));
+  }
+
+  @Test
+  public void testOctalChar1() throws Exception {
+    assertEquals(04, SqoopOptions.toChar("\\04"));
+  }
+
+  @Test
+  public void testOctalChar2() throws Exception {
+    assertEquals(045, SqoopOptions.toChar("\\045"));
+  }
+
+  @Test
+  public void testErrOctalChar() throws Exception {
+    thrown.expect(NumberFormatException.class);
+    thrown.reportMissingExceptionWithMessage("Expected NumberFormatException 
on erroneous octal char");
+    SqoopOptions.toChar("\\095");
+  }
+
+  @Test
+  public void testErrHexChar() throws Exception {
+    thrown.expect(NumberFormatException.class);
+    thrown.reportMissingExceptionWithMessage("Expected NumberFormatException 
on erroneous hex char");
+    SqoopOptions.toChar("\\0x9K5");
+  }
+
+  private SqoopOptions parse(String [] argv) throws Exception {
+    ImportTool importTool = new ImportTool();
+    return importTool.parseArguments(argv, null, null, false);
+  }
+
+  // test that setting output delimiters also sets input delimiters
+  @Test
+  public void testDelimitersInherit() throws Exception {
+    String [] args = {
+        "--fields-terminated-by",
+        "|",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals('|', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  // Test that setting output delimiters and setting input delims
+  // separately works.
+  @Test
+  public void testDelimOverride1() throws Exception {
+    String [] args = {
+        "--fields-terminated-by",
+        "|",
+        "--input-fields-terminated-by",
+        "*",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals('*', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  // test that the order in which delims are specified doesn't matter
+  @Test
+  public void testDelimOverride2() throws Exception {
+    String [] args = {
+        "--input-fields-terminated-by",
+        "*",
+        "--fields-terminated-by",
+        "|",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals('*', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  @Test
+  public void testBadNumMappers1() throws Exception {
+    String [] args = {
+        "--num-mappers",
+        "x",
+    };
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on invalid --num-mappers argument");
+    parse(args);
+  }
+
+  @Test
+  public void testBadNumMappers2() throws Exception {
+    String [] args = {
+        "-m",
+        "x",
+    };
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on invalid -m argument");
+    parse(args);
+  }
+
+  @Test
+  public void testGoodNumMappers() throws Exception {
+    String [] args = {
+        "-m",
+        "4",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals(4, opts.getNumMappers());
+  }
+
+  @Test
+  public void testHivePartitionParams() throws Exception {
+    String[] args = {
+        "--hive-partition-key", "ds",
+        "--hive-partition-value", "20110413",
+    };
+    SqoopOptions opts = parse(args);
+    assertEquals("ds", opts.getHivePartitionKey());
+    assertEquals("20110413", opts.getHivePartitionValue());
+  }
+
+  @Test
+  public void testBoundaryQueryParams() throws Exception {
+    String[] args = {
+        "--boundary-query", "select 1, 2",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals("select 1, 2", opts.getBoundaryQuery());
+  }
+
+  @Test
+  public void testMapColumnHiveParams() throws Exception {
+    String[] args = {
+        "--map-column-hive", "id=STRING",
+    };
+
+    SqoopOptions opts = parse(args);
+    Properties mapping = opts.getMapColumnHive();
+    assertTrue(mapping.containsKey("id"));
+    assertEquals("STRING", mapping.get("id"));
+  }
+
+  @Test
+  public void testMalformedMapColumnHiveParams() throws Exception {
+    String[] args = {
+        "--map-column-hive", "id",
+    };
+    try {
+      SqoopOptions opts = parse(args);
+      fail("Malformed hive mapping does not throw exception");
+    } catch (Exception e) {
+      // Caught exception as expected
+    }
+  }
+
+  @Test
+  public void testMapColumnJavaParams() throws Exception {
+    String[] args = {
+        "--map-column-java", "id=String",
+    };
+
+    SqoopOptions opts = parse(args);
+    Properties mapping = opts.getMapColumnJava();
+    assertTrue(mapping.containsKey("id"));
+    assertEquals("String", mapping.get("id"));
+  }
+
+  @Test
+  public void testMalfromedMapColumnJavaParams() throws Exception {
+    String[] args = {
+        "--map-column-java", "id",
+    };
+    try {
+      SqoopOptions opts = parse(args);
+      fail("Malformed java mapping does not throw exception");
+    } catch (Exception e) {
+      // Caught exception as expected
+    }
+  }
+
+  @Test
+  public void testSkipDistCacheOption() throws Exception {
+    String[] args = {"--skip-dist-cache"};
+    SqoopOptions opts = parse(args);
+    assertTrue(opts.isSkipDistCache());
+  }
+
+  @Test
+  public void testPropertySerialization1() {
+    // Test that if we write a SqoopOptions out to a Properties,
+    // and then read it back in, we get all the same results.
+    SqoopOptions out = new SqoopOptions();
+    out.setUsername("user");
+    out.setConnectString("bla");
+    out.setNumMappers(4);
+    out.setAppendMode(true);
+    out.setHBaseTable("hbasetable");
+    out.setWarehouseDir("Warehouse");
+    out.setClassName("someclass");
+    out.setSplitByCol("somecol");
+    out.setSqlQuery("the query");
+    out.setPackageName("a.package");
+    out.setHiveImport(true);
+    out.setFetchSize(null);
+
+    Properties connParams = new Properties();
+    connParams.put("conn.timeout", "3000");
+    connParams.put("conn.buffer_size", "256");
+    connParams.put("conn.dummy", "dummy");
+    connParams.put("conn.foo", "bar");
+
+    out.setConnectionParams(connParams);
+
+    Properties outProps = out.writeProperties();
+
+    SqoopOptions in = new SqoopOptions();
+    in.loadProperties(outProps);
+
+    Properties inProps = in.writeProperties();
+
+    assertEquals("properties don't match", outProps, inProps);
+
+    assertEquals("connection params don't match",
+        connParams, out.getConnectionParams());
+    assertEquals("connection params don't match",
+        connParams, in.getConnectionParams());
+  }
+
+  @Test
+  public void testPropertySerialization2() {
+    // Test that if we write a SqoopOptions out to a Properties,
+    // and then read it back in, we get all the same results.
+    SqoopOptions out = new SqoopOptions();
+    out.setUsername("user");
+    out.setConnectString("bla");
+    out.setNumMappers(4);
+    out.setAppendMode(true);
+    out.setHBaseTable("hbasetable");
+    out.setWarehouseDir("Warehouse");
+    out.setClassName("someclass");
+    out.setSplitByCol("somecol");
+    out.setSqlQuery("the query");
+    out.setPackageName("a.package");
+    out.setHiveImport(true);
+    out.setFetchSize(42);
+
+    Properties connParams = new Properties();
+    connParams.setProperty("a", "value-a");
+    connParams.setProperty("b", "value-b");
+    connParams.setProperty("a.b", "value-a.b");
+    connParams.setProperty("a.b.c", "value-a.b.c");
+    connParams.setProperty("aaaaaaaaaa.bbbbbbb.cccccccc", "value-abc");
+
+    out.setConnectionParams(connParams);
+
+    Properties outProps = out.writeProperties();
+
+    SqoopOptions in = new SqoopOptions();
+    in.loadProperties(outProps);
+
+    Properties inProps = in.writeProperties();
+
+    assertEquals("properties don't match", outProps, inProps);
+    assertEquals("connection params don't match",
+        connParams, out.getConnectionParams());
+    assertEquals("connection params don't match",
+        connParams, in.getConnectionParams());
+  }
+
+  @Test
+  public void testDefaultTempRootDir() {
+    SqoopOptions opts = new SqoopOptions();
+
+    assertEquals("_sqoop", opts.getTempRootDir());
+  }
+
+  @Test
+  public void testDefaultLoadedTempRootDir() {
+    SqoopOptions out = new SqoopOptions();
+    Properties props = out.writeProperties();
+    SqoopOptions opts = new SqoopOptions();
+    opts.loadProperties(props);
+
+    assertEquals("_sqoop", opts.getTempRootDir());
+  }
+
+  @Test
+  public void testLoadedTempRootDir() {
+    SqoopOptions out = new SqoopOptions();
+    final String tempRootDir = "customRoot";
+    out.setTempRootDir(tempRootDir);
+    Properties props = out.writeProperties();
+    SqoopOptions opts = new SqoopOptions();
+    opts.loadProperties(props);
+
+    assertEquals(tempRootDir, opts.getTempRootDir());
+  }
+
+  @Test
+  public void testNulledTempRootDir() {
+    SqoopOptions out = new SqoopOptions();
+    out.setTempRootDir(null);
+    Properties props = out.writeProperties();
+    SqoopOptions opts = new SqoopOptions();
+    opts.loadProperties(props);
+
+    assertEquals("_sqoop", opts.getTempRootDir());
+  }
+
+  @Test
+  public void testDefaultThrowOnErrorWithNotSetSystemProperty() {
+    System.clearProperty(SQOOP_RETHROW_PROPERTY);
+    SqoopOptions opts = new SqoopOptions();
+    assertFalse(opts.isThrowOnError());
+  }
+
+  @Test
+  public void testDefaultThrowOnErrorWithSetSystemProperty() {
+    String testSqoopRethrowProperty = "";
+    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
+    SqoopOptions opts = new SqoopOptions();
+
+    assertTrue(opts.isThrowOnError());
+  }
+
+  @Test
+  public void testDefaultLoadedThrowOnErrorWithNotSetSystemProperty() {
+    System.clearProperty(SQOOP_RETHROW_PROPERTY);
+    SqoopOptions out = new SqoopOptions();
+    Properties props = out.writeProperties();
+    SqoopOptions opts = new SqoopOptions();
+    opts.loadProperties(props);
+
+    assertFalse(opts.isThrowOnError());
+  }
+
+  @Test
+  public void testDefaultLoadedThrowOnErrorWithSetSystemProperty() {
+    String testSqoopRethrowProperty = "";
+    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
+    SqoopOptions out = new SqoopOptions();
+    Properties props = out.writeProperties();
+    SqoopOptions opts = new SqoopOptions();
+    opts.loadProperties(props);
+
+    assertTrue(opts.isThrowOnError());
+  }
+
+  @Test
+  public void testThrowOnErrorWithNotSetSystemProperty() throws Exception {
+    System.clearProperty(SQOOP_RETHROW_PROPERTY);
+    String[] args = {"--throw-on-error"};
+    SqoopOptions opts = parse(args);
+
+    assertTrue(opts.isThrowOnError());
+  }
+
+  @Test
+  public void testThrowOnErrorWithSetSystemProperty() throws Exception {
+    String testSqoopRethrowProperty = "";
+    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
+    String[] args = {"--throw-on-error"};
+    SqoopOptions opts = parse(args);
+
+    assertTrue(opts.isThrowOnError());
+  }
+
+  @Test
+  public void defaultValueOfOracleEscapingDisabledShouldBeFalse() {
+    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
+  }
+
+  @Test
+  public void 
valueOfOracleEscapingDisabledShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToFalse()
 {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "false");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
+  }
+
+  @Test
+  public void 
valueOfOracleEscapingDisabledShouldBeTrueIfTheValueOfTheRelatedEnvironmentVariableIsSetToTrue()
 {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "true");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
+  }
+
+  @Test
+  public void 
valueOfOracleEscapingDisabledShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToAnyNonBooleanValue()
 {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "falsetrue");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
+  }
+
+  @Test
+  public void 
hadoopConfigurationInstanceOfSqoopOptionsShouldContainTheSameValueForOracleEscapingDisabledAsSqoopOptionsProperty()
 {
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(opts.isOracleEscapingDisabled())));
+  }
+
+  @Test
+  public void 
hadoopConfigurationInstanceOfSqoopOptionsShouldContainTrueForOracleEscapingDisabledAsTheValueDirectlyHasBeenSetToSqoopOptions()
 {
+    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
+    SqoopOptions opts = new SqoopOptions();
+    opts.setOracleEscapingDisabled(true);
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(true)));
+  }
+
+  @Test
+  public void 
hadoopConfigurationInstanceOfSqoopOptionsShouldContainFalseForOracleEscapingDisabledAsTheValueDirectlyHasBeenSetToSqoopOptions()
 {
+    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
+    SqoopOptions opts = new SqoopOptions();
+    opts.setOracleEscapingDisabled(false);
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(false)));
+  }
+
+  @Test
+  public void 
valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToFalse()
 {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "false");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(false)));
+  }
+
+  @Test
+  public void 
valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeTrueIfTheValueOfTheRelatedEnvironmentVariableIsSetToTrue()
 {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "true");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(true)));
+  }
+
+  @Test
+  public void 
valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToAnyNonBooleanValue()
 {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "falsetrue");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(false)));
+  }
+
+  @Test
+  public void 
valueOfOracleEscapingDisabledShouldBeAbleToSavedAndLoadedBackWithTheSameValue() 
{
+    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
+    SqoopOptions opts = new SqoopOptions();
+    opts.setOracleEscapingDisabled(false);
+    Properties out = opts.writeProperties();
+    opts = new SqoopOptions();
+    opts.loadProperties(out);
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(false)));
+  }
+
+  @Test
+  public void 
valueOfOracleEscapingDisabledShouldBeEqualToNullIfASqoopOptionsInstanceWasLoadedWhichDidntContainASavedValueForIt()
 {
+    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
+    SqoopOptions opts = new SqoopOptions();
+    Properties out = opts.writeProperties();
+    opts = new SqoopOptions();
+    opts.loadProperties(out);
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(true)));
+  }
+
+  // test that hadoop-home is accepted as an option
+  @Test
+  public void testHadoopHome() throws Exception {
+    String [] args = {
+        "--hadoop-home",
+        "/usr/lib/hadoop",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals("/usr/lib/hadoop", opts.getHadoopMapRedHome());
+  }
+
+  // test that hadoop-home is accepted as an option
+  @Test
+  public void testHadoopMapRedOverridesHadoopHome() throws Exception {
+    String[] args = { "--hadoop-home", "/usr/lib/hadoop-ignored", 
"--hadoop-mapred-home", "/usr/lib/hadoop", };
+
+    SqoopOptions opts = parse(args);
+    assertEquals("/usr/lib/hadoop", opts.getHadoopMapRedHome());
+  }
+
+
+  //helper method to validate given import options
+  private void validateImportOptions(String[] extraArgs) throws Exception {
+    String [] args = {
+        "--connect", HsqldbTestServer.getUrl(),
+        "--table", "test",
+        "-m", "1",
+    };
+    ImportTool importTool = new ImportTool();
+    SqoopOptions opts = importTool.parseArguments(
+        (String []) ArrayUtils.addAll(args, extraArgs), null, null, false);
+    importTool.validateOptions(opts);
+  }
+
+  //test compatability of --detele-target-dir with import
+  @Test
+  public void testDeteleTargetDir() throws Exception {
+    String [] extraArgs = {
+        "--delete-target-dir",
+    };
+    try {
+      validateImportOptions(extraArgs);
+    } catch(SqoopOptions.InvalidOptionsException ioe) {
+      fail("Unexpected InvalidOptionsException" + ioe);
+    }
+  }
+
+  //test incompatability of --delete-target-dir & --append with import
+  @Test
+  public void testDeleteTargetDirWithAppend() throws Exception {
+    String [] extraArgs = {
+        "--append",
+        "--delete-target-dir",
+    };
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on incompatibility of " +
+        "--delete-target-dir and --append");
+    validateImportOptions(extraArgs);
+  }
+
+  //test incompatability of --delete-target-dir with incremental import
+  @Test
+  public void testDeleteWithIncrementalImport() throws Exception {
+    String [] extraArgs = {
+        "--incremental", "append",
+        "--delete-target-dir",
+    };
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
on incompatibility of " +
+        "--delete-target-dir and --incremental");
+    validateImportOptions(extraArgs);
+  }
+
+  // test that hbase bulk load import with table name and target dir
+  // passes validation
+  @Test
+  public void testHBaseBulkLoad() throws Exception {
+    String [] extraArgs = {
+        longArgument(BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG),
+        longArgument(BaseSqoopTool.TARGET_DIR_ARG), "./test",
+        longArgument(BaseSqoopTool.HBASE_TABLE_ARG), "test_table",
+        longArgument(BaseSqoopTool.HBASE_COL_FAM_ARG), "d"};
+
+    validateImportOptions(extraArgs);
+  }
+
+  // test that hbase bulk load import with a missing --hbase-table fails
+  @Test
+  public void testHBaseBulkLoadMissingHbaseTable() throws Exception {
+    String [] extraArgs = {
+        longArgument(BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG),
+        longArgument(BaseSqoopTool.TARGET_DIR_ARG), "./test"};
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException 
because of missing --hbase-table");
+    validateImportOptions(extraArgs);
+  }
+
+  private static String longArgument(String argument) {
+    return String.format("--%s", argument);
+  }
+
+  @Test
+  public void testRelaxedIsolation() throws Exception {
+    String extraArgs[] = {
+        "--relaxed-isolation",
+    };
+    validateImportOptions(extraArgs);
+  }
+
+  @Test
+  public void testResetToOneMapper() throws Exception {
+    String extraArgs[] = {
+        "--autoreset-to-one-mapper",
+    };
+    validateImportOptions(extraArgs);
+  }
+
+  @Test
+  public void testResetToOneMapperAndSplitBy() throws Exception {
+    String extraArgs[] = {
+        "--autoreset-to-one-mapper",
+        "--split-by",
+        "col0",
+    };
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected Exception on 
incompatibility of " +
+        "--autoreset-to-one-mapper and --split-by");
+    validateImportOptions(extraArgs);
+  }
+
+  @Test
+  public void testEscapeMapingColumnNames() throws Exception {
+    SqoopOptions opts = new SqoopOptions();
+    // enabled by default
+    assertTrue(opts.getEscapeMappingColumnNamesEnabled());
+
+    String [] args = {
+        "--" + 
org.apache.sqoop.tool.BaseSqoopTool.ESCAPE_MAPPING_COLUMN_NAMES_ENABLED,
+        "false",
+    };
+
+    opts = parse(args);
+    assertFalse(opts.getEscapeMappingColumnNamesEnabled());
+  }
+
   @Test
   public void testParseColumnParsing() {
     new SqoopOptions() {
-  @Test
+      @Test
       public void testParseColumnMapping() {
         Properties result = new Properties();
         parseColumnMapping(COLUMN_MAPPING, result);
@@ -183,6 +947,9 @@ public class TestSqoopOptions {
     else if(type.equals(String.class)) {
       return UUID.randomUUID().toString();
     }
+    else if(type.equals(Character.TYPE) || type.equals(Character.class)) {
+      return UUID.randomUUID().toString().charAt(0);
+    }
     else if(type.equals(BigInteger.class)){
       return BigInteger.valueOf(random.nextInt());
     }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/TestTargetDir.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/TestTargetDir.java 
b/src/test/org/apache/sqoop/TestTargetDir.java
new file mode 100644
index 0000000..c2cacd4
--- /dev/null
+++ b/src/test/org/apache/sqoop/TestTargetDir.java
@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.StringUtils;
+
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+
+/**
+ * Test that --target-dir works.
+ */
+public class TestTargetDir extends ImportJobTestCase {
+
+  public static final Log LOG = LogFactory
+      .getLog(TestTargetDir.class.getName());
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  /**
+   * Create the argv to pass to Sqoop.
+   *
+   * @return the argv as an array of strings.
+   */
+  protected ArrayList getOutputArgv(boolean includeHadoopFlags) {
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+
+    args.add("--table");
+    args.add(HsqldbTestServer.getTableName());
+    args.add("--connect");
+    args.add(HsqldbTestServer.getUrl());
+    args.add("--split-by");
+    args.add("INTFIELD1");
+    args.add("--as-sequencefile");
+
+    return args;
+  }
+
+  // this test just uses the two int table.
+  protected String getTableName() {
+    return HsqldbTestServer.getTableName();
+  }
+
+  /** test invalid argument exception if several output options. */
+  @Test
+  public void testSeveralOutputsIOException() throws IOException {
+    ArrayList args = getOutputArgv(true);
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--target-dir");
+    args.add(getWarehouseDir());
+
+    String[] argv = (String[]) args.toArray(new String[0]);
+
+    thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException on several 
output options");
+    runImport(argv);
+  }
+
+  /** test target-dir contains imported files. */
+  @Test
+  public void testTargetDir() throws IOException {
+
+    try {
+      String targetDir = getWarehouseDir() + "/tempTargetDir";
+
+      ArrayList args = getOutputArgv(true);
+      args.add("--target-dir");
+      args.add(targetDir);
+
+      // delete target-dir if exists and recreate it
+      FileSystem fs = FileSystem.get(getConf());
+      Path outputPath = new Path(targetDir);
+      if (fs.exists(outputPath)) {
+        fs.delete(outputPath, true);
+      }
+
+      String[] argv = (String[]) args.toArray(new String[0]);
+      runImport(argv);
+
+      ContentSummary summ = fs.getContentSummary(outputPath);
+
+      assertTrue("There's no new imported files in target-dir",
+          summ.getFileCount() > 0);
+
+    } catch (Exception e) {
+      LOG.error("Got Exception: " + StringUtils.stringifyException(e));
+      fail(e.toString());
+    }
+  }
+
+  /** test target-dir breaks if already existing
+   * (only allowed in append mode). */
+  @Test
+  public void testExistingTargetDir() throws IOException {
+    String targetDir = getWarehouseDir() + "/tempTargetDir";
+
+    ArrayList args = getOutputArgv(true);
+    args.add("--target-dir");
+    args.add(targetDir);
+
+    // delete target-dir if exists and recreate it
+    FileSystem fs = FileSystem.get(getConf());
+    Path outputPath = new Path(targetDir);
+    if (!fs.exists(outputPath)) {
+      fs.mkdirs(outputPath);
+    }
+
+    String[] argv = (String[]) args.toArray(new String[0]);
+
+    thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException on 
--target-dir if target dir already exists");
+    runImport(argv);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/TestWhere.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/TestWhere.java 
b/src/test/org/apache/sqoop/TestWhere.java
new file mode 100644
index 0000000..ed8199b
--- /dev/null
+++ b/src/test/org/apache/sqoop/TestWhere.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+/**
+ * Test that --where works in Sqoop.
+ * Methods essentially copied out of the other Test* classes.
+ * TODO(kevin or aaron): Factor out these common test methods
+ * so that every new Test* class doesn't need to copy the code.
+ */
+public class TestWhere extends ImportJobTestCase {
+
+  /**
+   * Create the argv to pass to Sqoop.
+   * @return the argv as an array of strings.
+   */
+  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
+      String whereClause) {
+    String columnsString = "";
+    for (String col : colNames) {
+      columnsString += col + ",";
+    }
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+
+    args.add("--table");
+    args.add(HsqldbTestServer.getTableName());
+    args.add("--columns");
+    args.add(columnsString);
+    args.add("--where");
+    args.add(whereClause);
+    args.add("--split-by");
+    args.add("INTFIELD1");
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--connect");
+    args.add(HsqldbTestServer.getUrl());
+    args.add("--as-sequencefile");
+    args.add("--num-mappers");
+    args.add("1");
+
+    return args.toArray(new String[0]);
+  }
+
+  // this test just uses the two int table.
+  protected String getTableName() {
+    return HsqldbTestServer.getTableName();
+  }
+
+
+  /**
+   * Given a comma-delimited list of integers, grab and parse the first int.
+   * @param str a comma-delimited list of values, the first of which is an int.
+   * @return the first field in the string, cast to int
+   */
+  private int getFirstInt(String str) {
+    String [] parts = str.split(",");
+    return Integer.parseInt(parts[0]);
+  }
+
+  public void runWhereTest(String whereClause, String firstValStr,
+      int numExpectedResults, int expectedSum) throws IOException {
+
+    String [] columns = HsqldbTestServer.getFieldNames();
+    ClassLoader prevClassLoader = null;
+    SequenceFile.Reader reader = null;
+
+    String [] argv = getArgv(true, columns, whereClause);
+    runImport(argv);
+    try {
+      SqoopOptions opts = new ImportTool().parseArguments(
+          getArgv(false, columns, whereClause),
+          null, null, true);
+
+      CompilationManager compileMgr = new CompilationManager(opts);
+      String jarFileName = compileMgr.getJarFilename();
+
+      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+          getTableName());
+
+      reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
+
+      // here we can actually instantiate (k, v) pairs.
+      Configuration conf = new Configuration();
+      Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
+      Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
+
+      if (reader.next(key) == null) {
+        fail("Empty SequenceFile during import");
+      }
+
+      // make sure that the value we think should be at the top, is.
+      reader.getCurrentValue(val);
+      assertEquals("Invalid ordering within sorted SeqFile", firstValStr,
+          val.toString());
+
+      // We know that these values are two ints separated by a ',' character.
+      // Since this is all dynamic, though, we don't want to actually link
+      // against the class and use its methods. So we just parse this back
+      // into int fields manually.  Sum them up and ensure that we get the
+      // expected total for the first column, to verify that we got all the
+      // results from the db into the file.
+      int curSum = getFirstInt(val.toString());
+      int totalResults = 1;
+
+      // now sum up everything else in the file.
+      while (reader.next(key) != null) {
+        reader.getCurrentValue(val);
+        curSum += getFirstInt(val.toString());
+        totalResults++;
+      }
+
+      assertEquals("Total sum of first db column mismatch", expectedSum,
+          curSum);
+      assertEquals("Incorrect number of results for query", numExpectedResults,
+          totalResults);
+    } catch (InvalidOptionsException ioe) {
+      fail(ioe.toString());
+    } catch (ParseException pe) {
+      fail(pe.toString());
+    } finally {
+      IOUtils.closeStream(reader);
+
+      if (null != prevClassLoader) {
+        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+
+  @Test
+  public void testSingleClauseWhere() throws IOException {
+    String whereClause = "INTFIELD2 > 4";
+    runWhereTest(whereClause, "1,8\n", 2, 4);
+  }
+
+  @Test
+  public void testMultiClauseWhere() throws IOException {
+    String whereClause = "INTFIELD1 > 4 AND INTFIELD2 < 3";
+    runWhereTest(whereClause, "7,2\n", 1, 7);
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java 
b/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java
index 95c9b56..14413b1 100644
--- a/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java
+++ b/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java
@@ -49,8 +49,8 @@ import org.apache.commons.logging.LogFactory;
 import org.junit.After;
 import org.junit.Before;
 
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java
----------------------------------------------------------------------
diff --git 
a/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java 
b/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java
index 244c744..9c1e9f9 100644
--- a/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java
+++ b/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java
@@ -18,9 +18,9 @@
 
 package org.apache.sqoop.credentials;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -28,12 +28,10 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.sqoop.mapreduce.db.DBConfiguration;
-import org.apache.sqoop.tool.BaseSqoopTool;
 import org.apache.sqoop.tool.ImportTool;
 import org.apache.sqoop.util.password.CredentialProviderHelper;
 import org.apache.sqoop.util.password.CredentialProviderPasswordLoader;
 import org.apache.sqoop.util.password.CryptoFileLoader;
-import org.apache.sqoop.util.password.PasswordLoader;
 import org.junit.Test;
 
 import javax.crypto.Cipher;
@@ -45,7 +43,6 @@ import javax.crypto.spec.SecretKeySpec;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
 import java.sql.Connection;
 import java.util.ArrayList;
 import java.util.Collections;

Reply via email to