http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/lib/SqoopRecord.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/lib/SqoopRecord.java 
b/src/java/org/apache/sqoop/lib/SqoopRecord.java
index 9badc71..1ee8ac5 100644
--- a/src/java/org/apache/sqoop/lib/SqoopRecord.java
+++ b/src/java/org/apache/sqoop/lib/SqoopRecord.java
@@ -32,26 +32,26 @@ import org.apache.sqoop.mapreduce.DBWritable;
  * Interface implemented by the classes generated by sqoop's orm.ClassWriter.
  */
 public abstract class SqoopRecord implements Cloneable, DBWritable,
-    com.cloudera.sqoop.lib.FieldMappable, Writable  {
+    org.apache.sqoop.lib.FieldMappable, Writable  {
 
   public SqoopRecord() {
   }
 
 
   public abstract void parse(CharSequence s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void parse(Text s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void parse(byte [] s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void parse(char [] s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void parse(ByteBuffer s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void parse(CharBuffer s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void loadLargeObjects(
-      com.cloudera.sqoop.lib.LargeObjectLoader objLoader)
+      org.apache.sqoop.lib.LargeObjectLoader objLoader)
       throws SQLException, IOException, InterruptedException;
 
   /**
@@ -65,8 +65,7 @@ public abstract class SqoopRecord implements Cloneable, 
DBWritable,
   /**
    * Format output data according to the specified delimiters.
    */
-  public abstract String toString(
-      com.cloudera.sqoop.lib.DelimiterSet delimiters);
+  public abstract String toString(DelimiterSet delimiters);
 
   /**
    * Use the default delimiters, but only append an end-of-record delimiter
@@ -91,8 +90,7 @@ public abstract class SqoopRecord implements Cloneable, 
DBWritable,
    * use with TextOutputFormat, calling this with useRecordDelim=false may
    * make more sense.
    */
-  public String toString(
-      com.cloudera.sqoop.lib.DelimiterSet delimiters, boolean useRecordDelim) {
+  public String toString(DelimiterSet delimiters, boolean useRecordDelim) {
     if (useRecordDelim) {
       return toString(delimiters);
     } else {
@@ -123,11 +121,11 @@ public abstract class SqoopRecord implements Cloneable, 
DBWritable,
    * @param processor A delegate that operates on this object.
    * @throws IOException if the processor encounters an IO error when
    * operating on this object.
-   * @throws com.cloudera.sqoop.lib.ProcessingException if the 
FieldMapProcessor
+   * @throws org.apache.sqoop.lib.ProcessingException if the FieldMapProcessor
    * encounters a general processing error when operating on this object.
    */
-  public void delegate(com.cloudera.sqoop.lib.FieldMapProcessor processor)
-      throws IOException, com.cloudera.sqoop.lib.ProcessingException {
+  public void delegate(org.apache.sqoop.lib.FieldMapProcessor processor)
+      throws IOException, org.apache.sqoop.lib.ProcessingException {
     processor.accept(this);
   }
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/CatalogQueryManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/CatalogQueryManager.java 
b/src/java/org/apache/sqoop/manager/CatalogQueryManager.java
index 7b2ee78..03500bb 100644
--- a/src/java/org/apache/sqoop/manager/CatalogQueryManager.java
+++ b/src/java/org/apache/sqoop/manager/CatalogQueryManager.java
@@ -28,7 +28,7 @@ import java.util.List;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.util.LoggingUtils;
 
 /**
@@ -36,7 +36,7 @@ import org.apache.sqoop.util.LoggingUtils;
  * (instead of metadata calls) to retrieve information.
  */
 public abstract class CatalogQueryManager
-    extends com.cloudera.sqoop.manager.GenericJdbcManager {
+    extends org.apache.sqoop.manager.GenericJdbcManager {
 
   public static final Log LOG = LogFactory.getLog(
     CatalogQueryManager.class.getName());

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/ConnManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/ConnManager.java 
b/src/java/org/apache/sqoop/manager/ConnManager.java
index 1811ce0..d88b59b 100644
--- a/src/java/org/apache/sqoop/manager/ConnManager.java
+++ b/src/java/org/apache/sqoop/manager/ConnManager.java
@@ -40,13 +40,13 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.hive.HiveTypes;
-import com.cloudera.sqoop.lib.BlobRef;
-import com.cloudera.sqoop.lib.ClobRef;
-import com.cloudera.sqoop.manager.SqlManager;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.hive.HiveTypes;
+import org.apache.sqoop.lib.BlobRef;
+import org.apache.sqoop.lib.ClobRef;
+import org.apache.sqoop.manager.SqlManager;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Abstract interface that manages connections to a database.
@@ -579,13 +579,13 @@ public abstract class ConnManager {
    * Perform an import of a table from the database into HDFS.
    */
   public abstract void importTable(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException;
 
   /**
    * Perform an import of a free-form query from the database into HDFS.
    */
-  public void importQuery(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importQuery(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     throw new ImportException(
         "This database only supports table-based imports.");
@@ -649,7 +649,7 @@ public abstract class ConnManager {
    * Export data stored in HDFS into a table in a database.
    * This inserts new rows into the target table.
    */
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     throw new ExportException("This database does not support exports");
   }
@@ -658,7 +658,7 @@ public abstract class ConnManager {
    * Export data stored in HDFS into a table in a database. This calls a stored
    * procedure to insert rows into the target table.
    */
-  public void callTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void callTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     throw new ExportException("This database does not support exports "
         + "using stored procedures");
@@ -669,7 +669,7 @@ public abstract class ConnManager {
    * This updates existing rows in the target table, based on the
    * updateKeyCol specified in the context's SqoopOptions.
    */
-  public void updateTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void updateTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     throw new ExportException("This database does not support updates");
   }
@@ -679,7 +679,7 @@ public abstract class ConnManager {
    * This may update or insert rows into the target table depending on
    * whether rows already exist in the target table or not.
    */
-  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     throw new ExportException("Mixed update/insert is not supported"
         + " against the target database yet");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/CubridManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/CubridManager.java 
b/src/java/org/apache/sqoop/manager/CubridManager.java
index 73b91d0..e27f616 100644
--- a/src/java/org/apache/sqoop/manager/CubridManager.java
+++ b/src/java/org/apache/sqoop/manager/CubridManager.java
@@ -28,18 +28,18 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.sqoop.mapreduce.cubrid.CubridUpsertOutputFormat;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.ExportBatchOutputFormat;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.mapreduce.JdbcUpsertExportJob;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.ExportBatchOutputFormat;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.mapreduce.JdbcUpsertExportJob;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Manages connections to CUBRID databases.
  */
 public class CubridManager extends
-    com.cloudera.sqoop.manager.CatalogQueryManager {
+    CatalogQueryManager {
 
   public static final Log LOG = LogFactory
       .getLog(CubridManager.class.getName());
@@ -50,7 +50,7 @@ public class CubridManager extends
 
   @Override
   public void importTable(
-      com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
 
     // Then run the normal importTable() method.
@@ -61,7 +61,7 @@ public class CubridManager extends
    * Export data stored in HDFS into a table in a database.
    */
   public void exportTable(
-      com.cloudera.sqoop.manager.ExportJobContext context)
+      org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcExportJob exportJob = new JdbcExportJob(context, null, null,
@@ -75,7 +75,7 @@ public class CubridManager extends
    */
   @Override
   public void upsertTable(
-      com.cloudera.sqoop.manager.ExportJobContext context)
+      org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/Db2Manager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/Db2Manager.java 
b/src/java/org/apache/sqoop/manager/Db2Manager.java
index 7525521..7ff68ce 100644
--- a/src/java/org/apache/sqoop/manager/Db2Manager.java
+++ b/src/java/org/apache/sqoop/manager/Db2Manager.java
@@ -37,18 +37,18 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.sqoop.mapreduce.db.Db2DataDrivenDBInputFormat;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.ExportBatchOutputFormat;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.ExportBatchOutputFormat;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 import org.apache.sqoop.util.LoggingUtils;
 
 /**
  * Manages connections to DB2 databases. Requires the DB2 JDBC driver.
  */
 public class Db2Manager
-    extends com.cloudera.sqoop.manager.GenericJdbcManager {
+    extends org.apache.sqoop.manager.GenericJdbcManager {
 
   public static final Log LOG = LogFactory.getLog(
       Db2Manager.class.getName());
@@ -95,7 +95,7 @@ public class Db2Manager
    */
   @Override
   public void importTable(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     context.setConnManager(this);
     // Specify the DB2-specific DBInputFormat for import.
@@ -107,7 +107,7 @@ public class Db2Manager
    * Export data stored in HDFS into a table in a database.
    */
   @Override
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcExportJob exportJob = new JdbcExportJob(context, null, null,

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/DefaultManagerFactory.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/DefaultManagerFactory.java 
b/src/java/org/apache/sqoop/manager/DefaultManagerFactory.java
index 8cc9285..b075e0d 100644
--- a/src/java/org/apache/sqoop/manager/DefaultManagerFactory.java
+++ b/src/java/org/apache/sqoop/manager/DefaultManagerFactory.java
@@ -21,9 +21,8 @@ package org.apache.sqoop.manager;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.manager.ConnManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.metastore.JobData;
 
 import static org.apache.sqoop.manager.SupportedManagers.CUBRID;
 import static org.apache.sqoop.manager.SupportedManagers.DB2;
@@ -41,7 +40,7 @@ import static 
org.apache.sqoop.manager.SupportedManagers.SQLSERVER;
  * shipped and enabled by default in Sqoop.
  */
 public class DefaultManagerFactory
-    extends com.cloudera.sqoop.manager.ManagerFactory {
+    extends org.apache.sqoop.manager.ManagerFactory {
 
   public static final Log LOG = LogFactory.getLog(
       DefaultManagerFactory.class.getName());

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/DirectMySQLManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/DirectMySQLManager.java 
b/src/java/org/apache/sqoop/manager/DirectMySQLManager.java
index c984a32..c3280af 100644
--- a/src/java/org/apache/sqoop/manager/DirectMySQLManager.java
+++ b/src/java/org/apache/sqoop/manager/DirectMySQLManager.java
@@ -22,18 +22,18 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.MySQLDumpImportJob;
-import com.cloudera.sqoop.mapreduce.MySQLExportJob;
-import com.cloudera.sqoop.util.ImportException;
-import com.cloudera.sqoop.util.ExportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.MySQLDumpImportJob;
+import org.apache.sqoop.mapreduce.MySQLExportJob;
+import org.apache.sqoop.util.ImportException;
+import org.apache.sqoop.util.ExportException;
 
 /**
  * Manages direct connections to MySQL databases
  * so we can use mysqldump to get really fast dumps.
  */
 public class DirectMySQLManager
-    extends com.cloudera.sqoop.manager.MySQLManager {
+    extends MySQLManager {
 
   public static final Log LOG = LogFactory.getLog(
       DirectMySQLManager.class.getName());
@@ -47,7 +47,7 @@ public class DirectMySQLManager
    * the database and upload the files directly to HDFS.
    */
   @Override
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
 
     context.setConnManager(this);
@@ -97,14 +97,14 @@ public class DirectMySQLManager
    * back into the database.
    */
   @Override
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     MySQLExportJob exportJob = new MySQLExportJob(context);
     exportJob.runExport();
   }
 
-  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     throw new ExportException("MySQL direct connector does not support upsert"
       + " mode. Please use JDBC based connector (remove --direct parameter)");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/DirectNetezzaManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/DirectNetezzaManager.java 
b/src/java/org/apache/sqoop/manager/DirectNetezzaManager.java
index af15824..39a14f3 100644
--- a/src/java/org/apache/sqoop/manager/DirectNetezzaManager.java
+++ b/src/java/org/apache/sqoop/manager/DirectNetezzaManager.java
@@ -37,10 +37,10 @@ import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.sqoop.mapreduce.netezza.NetezzaExternalTableExportJob;
 import org.apache.sqoop.mapreduce.netezza.NetezzaExternalTableImportJob;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Manages direct mode transfers from Netezza databases using the external 
table
@@ -184,7 +184,7 @@ public class DirectNetezzaManager extends NetezzaManager {
   /**
    * Export data stored in HDFS into a table in a database.
    */
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     options = context.getOptions();
     context.setConnManager(this);
@@ -214,7 +214,7 @@ public class DirectNetezzaManager extends NetezzaManager {
    * data from the database and upload the files directly to HDFS.
    */
   @Override
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
 
     context.setConnManager(this);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/DirectPostgresqlManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/DirectPostgresqlManager.java 
b/src/java/org/apache/sqoop/manager/DirectPostgresqlManager.java
index 63b0704..c05e1c1 100644
--- a/src/java/org/apache/sqoop/manager/DirectPostgresqlManager.java
+++ b/src/java/org/apache/sqoop/manager/DirectPostgresqlManager.java
@@ -42,19 +42,19 @@ import 
org.apache.sqoop.mapreduce.postgresql.PostgreSQLCopyExportJob;
 import org.apache.sqoop.util.PostgreSQLUtils;
 import org.apache.sqoop.util.SubstitutionUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.io.SplittableBufferedWriter;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.util.AsyncSink;
-import com.cloudera.sqoop.util.DirectImportUtils;
-import com.cloudera.sqoop.util.ErrorableAsyncSink;
-import com.cloudera.sqoop.util.ErrorableThread;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.Executor;
-import com.cloudera.sqoop.util.ImportException;
-import com.cloudera.sqoop.util.JdbcUrl;
-import com.cloudera.sqoop.util.LoggingAsyncSink;
-import com.cloudera.sqoop.util.PerfCounters;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.io.SplittableBufferedWriter;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.util.AsyncSink;
+import org.apache.sqoop.util.DirectImportUtils;
+import org.apache.sqoop.util.ErrorableAsyncSink;
+import org.apache.sqoop.util.ErrorableThread;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.Executor;
+import org.apache.sqoop.util.ImportException;
+import org.apache.sqoop.util.JdbcUrl;
+import org.apache.sqoop.util.LoggingAsyncSink;
+import org.apache.sqoop.util.PerfCounters;
 
 
 /**
@@ -62,7 +62,7 @@ import com.cloudera.sqoop.util.PerfCounters;
  * commands.
  */
 public class DirectPostgresqlManager
-    extends com.cloudera.sqoop.manager.PostgresqlManager {
+    extends PostgresqlManager {
 
   public static final Log LOG = LogFactory.getLog(
       DirectPostgresqlManager.class.getName());
@@ -345,7 +345,7 @@ public class DirectPostgresqlManager
    * Import the table into HDFS by using psql to pull the data out of the db
    * via COPY FILE TO STDOUT.
    */
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
     throws IOException, ImportException {
 
     context.setConnManager(this);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/ExportJobContext.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/ExportJobContext.java 
b/src/java/org/apache/sqoop/manager/ExportJobContext.java
index 2a6f2b5..773cf74 100644
--- a/src/java/org/apache/sqoop/manager/ExportJobContext.java
+++ b/src/java/org/apache/sqoop/manager/ExportJobContext.java
@@ -21,7 +21,7 @@ package org.apache.sqoop.manager;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.util.Jars;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 
 /**
  * A set of parameters describing an export operation; this is passed to

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/GenericJdbcManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/GenericJdbcManager.java 
b/src/java/org/apache/sqoop/manager/GenericJdbcManager.java
index f38bcc5..b88f0b8 100644
--- a/src/java/org/apache/sqoop/manager/GenericJdbcManager.java
+++ b/src/java/org/apache/sqoop/manager/GenericJdbcManager.java
@@ -29,7 +29,7 @@ import org.apache.commons.cli.ParseException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.cli.RelatedOptions;
 
 /**
@@ -38,7 +38,7 @@ import org.apache.sqoop.cli.RelatedOptions;
  * class to load.
  */
 public class GenericJdbcManager
-    extends com.cloudera.sqoop.manager.SqlManager {
+    extends SqlManager {
 
   public static final Log LOG = LogFactory.getLog(
       GenericJdbcManager.class.getName());

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/HsqldbManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/HsqldbManager.java 
b/src/java/org/apache/sqoop/manager/HsqldbManager.java
index 92b7d53..1fa58ee 100644
--- a/src/java/org/apache/sqoop/manager/HsqldbManager.java
+++ b/src/java/org/apache/sqoop/manager/HsqldbManager.java
@@ -23,9 +23,9 @@ import static org.apache.sqoop.manager.JdbcDrivers.HSQLDB;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.AsyncSqlOutputFormat;
-import com.cloudera.sqoop.util.ExportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.AsyncSqlOutputFormat;
+import org.apache.sqoop.util.ExportException;
 import java.io.IOException;
 
 /**
@@ -33,7 +33,7 @@ import java.io.IOException;
  * Extends generic SQL manager.
  */
 public class HsqldbManager
-    extends com.cloudera.sqoop.manager.GenericJdbcManager {
+    extends org.apache.sqoop.manager.GenericJdbcManager {
 
   public static final Log LOG = LogFactory.getLog(
       HsqldbManager.class.getName());
@@ -83,7 +83,7 @@ public class HsqldbManager
 
   @Override
   /** {@inheritDoc} */
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     // HSQLDB does not support multi-row inserts; disable that before export.
     context.getOptions().getConf().setInt(

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/ImportJobContext.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/ImportJobContext.java 
b/src/java/org/apache/sqoop/manager/ImportJobContext.java
index 354cd15..4337865 100644
--- a/src/java/org/apache/sqoop/manager/ImportJobContext.java
+++ b/src/java/org/apache/sqoop/manager/ImportJobContext.java
@@ -19,8 +19,8 @@
 package org.apache.sqoop.manager;
 
 import org.apache.hadoop.mapreduce.InputFormat;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.SqoopOptions;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/InformationSchemaManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/InformationSchemaManager.java 
b/src/java/org/apache/sqoop/manager/InformationSchemaManager.java
index 29be2e3..d582b27 100644
--- a/src/java/org/apache/sqoop/manager/InformationSchemaManager.java
+++ b/src/java/org/apache/sqoop/manager/InformationSchemaManager.java
@@ -21,14 +21,14 @@ package org.apache.sqoop.manager;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 
 /**
  * Database manager that queries "information schema" directly
  * (instead of metadata calls) to retrieve information.
  */
 public abstract class InformationSchemaManager
-    extends com.cloudera.sqoop.manager.CatalogQueryManager {
+    extends CatalogQueryManager {
 
   public static final Log LOG = LogFactory.getLog(
     InformationSchemaManager.class.getName());

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/MainframeManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/MainframeManager.java 
b/src/java/org/apache/sqoop/manager/MainframeManager.java
index 101f3ce..a6002ef 100644
--- a/src/java/org/apache/sqoop/manager/MainframeManager.java
+++ b/src/java/org/apache/sqoop/manager/MainframeManager.java
@@ -37,14 +37,14 @@ import org.apache.sqoop.mapreduce.ImportJobBase;
 import org.apache.sqoop.mapreduce.mainframe.MainframeDatasetInputFormat;
 import org.apache.sqoop.mapreduce.mainframe.MainframeImportJob;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.util.ImportException;
 
 
 /**
  * ConnManager implementation for mainframe datasets.
  */
-public class MainframeManager extends com.cloudera.sqoop.manager.ConnManager {
+public class MainframeManager extends org.apache.sqoop.manager.ConnManager {
   public static final String DEFAULT_DATASET_COLUMN_NAME = "DEFAULT_COLUMN";
   protected SqoopOptions options;
   private static final Log LOG
@@ -63,7 +63,7 @@ public class MainframeManager extends 
com.cloudera.sqoop.manager.ConnManager {
    * partitioned dataset with MainframeDatasetInputFormat.
    */
   @Override
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     String pdsName = context.getTableName();
     String jarFile = context.getJarFile();

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/ManagerFactory.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/ManagerFactory.java 
b/src/java/org/apache/sqoop/manager/ManagerFactory.java
index 23a2a10..693d3d2 100644
--- a/src/java/org/apache/sqoop/manager/ManagerFactory.java
+++ b/src/java/org/apache/sqoop/manager/ManagerFactory.java
@@ -18,9 +18,8 @@
 
 package org.apache.sqoop.manager;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.metastore.JobData;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.metastore.JobData;
 
 /**
  * Interface for factory classes for ConnManager implementations.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/MySQLManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/MySQLManager.java 
b/src/java/org/apache/sqoop/manager/MySQLManager.java
index ba612e2..2d17707 100644
--- a/src/java/org/apache/sqoop/manager/MySQLManager.java
+++ b/src/java/org/apache/sqoop/manager/MySQLManager.java
@@ -38,10 +38,10 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.util.ImportException;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.mapreduce.JdbcUpsertExportJob;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.util.ImportException;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.mapreduce.JdbcUpsertExportJob;
 import org.apache.sqoop.mapreduce.mysql.MySQLUpsertOutputFormat;
 import org.apache.sqoop.util.LoggingUtils;
 
@@ -49,7 +49,7 @@ import org.apache.sqoop.util.LoggingUtils;
  * Manages connections to MySQL databases.
  */
 public class MySQLManager
-    extends com.cloudera.sqoop.manager.InformationSchemaManager {
+    extends InformationSchemaManager {
 
   public static final Log LOG = 
LogFactory.getLog(MySQLManager.class.getName());
 
@@ -100,7 +100,7 @@ public class MySQLManager
   }
 
   @Override
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
 
     // Check that we're not doing a MapReduce from localhost. If we are, point
@@ -128,7 +128,7 @@ public class MySQLManager
    * {@inheritDoc}
    */
   @Override
-  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     LOG.warn("MySQL Connector upsert functionality is using INSERT ON");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/MySQLUtils.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/MySQLUtils.java 
b/src/java/org/apache/sqoop/manager/MySQLUtils.java
index ee22f17..b005c79 100644
--- a/src/java/org/apache/sqoop/manager/MySQLUtils.java
+++ b/src/java/org/apache/sqoop/manager/MySQLUtils.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.manager;
 
-import static com.cloudera.sqoop.lib.DelimiterSet.NULL_CHAR;
+import static org.apache.sqoop.lib.DelimiterSet.NULL_CHAR;
 
 import java.io.BufferedWriter;
 import java.io.File;
@@ -31,9 +31,9 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobConf;
 
-import com.cloudera.sqoop.config.ConfigurationConstants;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.util.DirectImportUtils;
+import org.apache.sqoop.config.ConfigurationConstants;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.util.DirectImportUtils;
 import org.apache.sqoop.lib.DelimiterSet;
 import org.apache.sqoop.mapreduce.db.DBConfiguration;
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/NetezzaManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/NetezzaManager.java 
b/src/java/org/apache/sqoop/manager/NetezzaManager.java
index 8c21073..5f0f13f 100644
--- a/src/java/org/apache/sqoop/manager/NetezzaManager.java
+++ b/src/java/org/apache/sqoop/manager/NetezzaManager.java
@@ -33,11 +33,11 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.mapreduce.AsyncSqlOutputFormat;
 import org.apache.sqoop.mapreduce.netezza.NetezzaDataDrivenDBInputFormat;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Manages connections to Netezza databases.
@@ -93,7 +93,7 @@ public class NetezzaManager extends GenericJdbcManager {
   }
 
   @Override
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     context.setConnManager(this);
     // The user probably should have requested --direct to invoke external
@@ -117,7 +117,7 @@ public class NetezzaManager extends GenericJdbcManager {
   }
 
   @Override
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     // The user probably should have requested --direct to invoke external
     // table option.
@@ -152,7 +152,7 @@ public class NetezzaManager extends GenericJdbcManager {
   }
 
   @Override
-  public void updateTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void updateTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     if (options.getNumMappers() > 1) {
       String msg = "Netezza update with multiple mappers can lead to "

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/OracleManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/OracleManager.java 
b/src/java/org/apache/sqoop/manager/OracleManager.java
index c0f5114..12613e3 100644
--- a/src/java/org/apache/sqoop/manager/OracleManager.java
+++ b/src/java/org/apache/sqoop/manager/OracleManager.java
@@ -49,22 +49,22 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.sqoop.manager.oracle.OracleUtils;
 import org.apache.sqoop.util.LoggingUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.UpdateMode;
-import com.cloudera.sqoop.mapreduce.ExportBatchOutputFormat;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.mapreduce.JdbcUpsertExportJob;
-import com.cloudera.sqoop.mapreduce.OracleUpsertOutputFormat;
-import com.cloudera.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.UpdateMode;
+import org.apache.sqoop.mapreduce.ExportBatchOutputFormat;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.mapreduce.JdbcUpsertExportJob;
+import org.apache.sqoop.mapreduce.OracleUpsertOutputFormat;
+import org.apache.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Manages connections to Oracle databases.
  * Requires the Oracle JDBC driver.
  */
 public class OracleManager
-    extends com.cloudera.sqoop.manager.GenericJdbcManager {
+    extends org.apache.sqoop.manager.GenericJdbcManager {
 
   public static final Log LOG = LogFactory.getLog(
       OracleManager.class.getName());
@@ -445,7 +445,7 @@ public class OracleManager
 
   @Override
   public void importTable(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     context.setConnManager(this);
     // Specify the Oracle-specific DBInputFormat for import.
@@ -456,7 +456,7 @@ public class OracleManager
   /**
    * Export data stored in HDFS into a table in a database.
    */
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcExportJob exportJob = new JdbcExportJob(context,
@@ -468,7 +468,7 @@ public class OracleManager
   /**
    * {@inheritDoc}
    */
-  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcUpsertExportJob exportJob =

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/PGBulkloadManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/PGBulkloadManager.java 
b/src/java/org/apache/sqoop/manager/PGBulkloadManager.java
index 04e1443..1d50597 100644
--- a/src/java/org/apache/sqoop/manager/PGBulkloadManager.java
+++ b/src/java/org/apache/sqoop/manager/PGBulkloadManager.java
@@ -19,9 +19,8 @@
 package org.apache.sqoop.manager;
 
 import java.io.IOException;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.util.ExportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.util.ExportException;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/PostgresqlManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/PostgresqlManager.java 
b/src/java/org/apache/sqoop/manager/PostgresqlManager.java
index 29f7c7c..8c810cc 100644
--- a/src/java/org/apache/sqoop/manager/PostgresqlManager.java
+++ b/src/java/org/apache/sqoop/manager/PostgresqlManager.java
@@ -31,15 +31,15 @@ import org.apache.commons.cli.ParseException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.util.ImportException;
 import org.apache.sqoop.cli.RelatedOptions;
 
 /**
  * Manages connections to Postgresql databases.
  */
 public class PostgresqlManager
-    extends com.cloudera.sqoop.manager.CatalogQueryManager {
+    extends CatalogQueryManager {
 
   public static final String SCHEMA = "schema";
 
@@ -109,7 +109,7 @@ public class PostgresqlManager
 
   @Override
   public void importTable(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
         throws IOException, ImportException {
 
     // The user probably should have requested --direct to invoke pg_dump.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/SQLServerManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/SQLServerManager.java 
b/src/java/org/apache/sqoop/manager/SQLServerManager.java
index cc5a1b4..d57a493 100644
--- a/src/java/org/apache/sqoop/manager/SQLServerManager.java
+++ b/src/java/org/apache/sqoop/manager/SQLServerManager.java
@@ -36,11 +36,11 @@ import 
org.apache.sqoop.mapreduce.SQLServerResilientUpdateOutputFormat;
 import org.apache.sqoop.mapreduce.db.SQLServerDBInputFormat;
 import org.apache.sqoop.mapreduce.db.SQLServerConnectionFailureHandler;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.mapreduce.JdbcUpdateExportJob;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.mapreduce.JdbcUpdateExportJob;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 import org.apache.sqoop.cli.RelatedOptions;
 import org.apache.sqoop.mapreduce.sqlserver.SqlServerExportBatchOutputFormat;
@@ -52,7 +52,7 @@ import 
org.apache.sqoop.mapreduce.sqlserver.SqlServerUpsertOutputFormat;
  * driver.
  */
 public class SQLServerManager
-    extends com.cloudera.sqoop.manager.InformationSchemaManager {
+    extends InformationSchemaManager {
 
   public static final String SCHEMA = "schema";
   public static final String TABLE_HINTS = "table-hints";
@@ -134,7 +134,7 @@ public class SQLServerManager
    */
   @Override
   public void importTable(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     // We're the correct connection manager
     context.setConnManager(this);
@@ -165,7 +165,7 @@ public class SQLServerManager
    * Export data stored in HDFS into a table in a database.
    */
   @Override
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
 
@@ -195,7 +195,7 @@ public class SQLServerManager
    * {@inheritDoc}
    */
   public void updateTable(
-          com.cloudera.sqoop.manager.ExportJobContext context)
+      org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     if (isNonResilientOperation()) {
       super.updateTable(context);
@@ -212,7 +212,7 @@ public class SQLServerManager
   /**
    * {@inheritDoc}
    */
-  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
 
@@ -389,7 +389,7 @@ public class SQLServerManager
    * SQLServerDBInputFormat which handles connection failures while
    * using free-form query importer.
    */
-  public void importQuery(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importQuery(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     if (!isNonResilientOperation()) {
       // Enable connection recovery only if split column is provided
@@ -408,7 +408,7 @@ public class SQLServerManager
    * SQLServerConnectionFailureHandler by default.
    */
   protected void configureConnectionRecoveryForImport(
-      com.cloudera.sqoop.manager.ImportJobContext context) {
+      org.apache.sqoop.manager.ImportJobContext context) {
 
     Configuration conf = context.getOptions().getConf();
 
@@ -430,7 +430,7 @@ public class SQLServerManager
    * using SQLServerConnectionFailureHandler by default.
    */
   protected void configureConnectionRecoveryForExport(
-      com.cloudera.sqoop.manager.ExportJobContext context) {
+      org.apache.sqoop.manager.ExportJobContext context) {
 
     Configuration conf = context.getOptions().getConf();
 
@@ -451,7 +451,7 @@ public class SQLServerManager
    * using SQLServerConnectionFailureHandler by default.
    */
   protected void configureConnectionRecoveryForUpdate(
-      com.cloudera.sqoop.manager.ExportJobContext context) {
+      org.apache.sqoop.manager.ExportJobContext context) {
 
     Configuration conf = context.getOptions().getConf();
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/SqlManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/SqlManager.java 
b/src/java/org/apache/sqoop/manager/SqlManager.java
index 808e330..fe997c5 100644
--- a/src/java/org/apache/sqoop/manager/SqlManager.java
+++ b/src/java/org/apache/sqoop/manager/SqlManager.java
@@ -47,17 +47,17 @@ import org.apache.sqoop.mapreduce.JdbcCallExportJob;
 import org.apache.sqoop.util.LoggingUtils;
 import org.apache.sqoop.util.SqlTypeMap;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.hbase.HBaseUtil;
-import com.cloudera.sqoop.mapreduce.DataDrivenImportJob;
-import com.cloudera.sqoop.mapreduce.HBaseImportJob;
-import com.cloudera.sqoop.mapreduce.ImportJobBase;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.mapreduce.JdbcUpdateExportJob;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
-import com.cloudera.sqoop.util.ResultSetPrinter;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.hbase.HBaseUtil;
+import org.apache.sqoop.mapreduce.DataDrivenImportJob;
+import org.apache.sqoop.mapreduce.HBaseImportJob;
+import org.apache.sqoop.mapreduce.ImportJobBase;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.mapreduce.JdbcUpdateExportJob;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
+import org.apache.sqoop.util.ResultSetPrinter;
 
 /**
  * ConnManager implementation for generic SQL-compliant database.
@@ -65,7 +65,7 @@ import com.cloudera.sqoop.util.ResultSetPrinter;
  * ConnManager implementation to actually create the connection.
  */
 public abstract class SqlManager
-    extends com.cloudera.sqoop.manager.ConnManager {
+    extends org.apache.sqoop.manager.ConnManager {
 
   public static final Log LOG = LogFactory.getLog(SqlManager.class.getName());
 
@@ -628,7 +628,7 @@ public abstract class SqlManager
    * @throws ImportException if the import is misconfigured.
    */
   protected void checkTableImportOptions(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     String tableName = context.getTableName();
     SqoopOptions opts = context.getOptions();
@@ -653,7 +653,7 @@ public abstract class SqlManager
    * Default implementation of importTable() is to launch a MapReduce job
    * via DataDrivenImportJob to read the table with DataDrivenDBInputFormat.
    */
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     String tableName = context.getTableName();
     String jarFile = context.getJarFile();
@@ -697,7 +697,7 @@ public abstract class SqlManager
    * via DataDrivenImportJob to read the table with DataDrivenDBInputFormat,
    * using its free-form query importer.
    */
-  public void importQuery(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importQuery(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     String jarFile = context.getJarFile();
     SqoopOptions opts = context.getOptions();
@@ -924,7 +924,7 @@ public abstract class SqlManager
   /**
    * Export data stored in HDFS into a table in a database.
    */
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcExportJob exportJob = new JdbcExportJob(context);
@@ -932,7 +932,7 @@ public abstract class SqlManager
   }
 
   @Override
-  public void callTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void callTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException,
       ExportException {
     context.setConnManager(this);
@@ -958,7 +958,7 @@ public abstract class SqlManager
    * {@inheritDoc}
    */
   public void updateTable(
-          com.cloudera.sqoop.manager.ExportJobContext context)
+      org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcUpdateExportJob exportJob = new JdbcUpdateExportJob(context);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/SupportedManagers.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/SupportedManagers.java 
b/src/java/org/apache/sqoop/manager/SupportedManagers.java
index 1b65a9a..ed9c594 100644
--- a/src/java/org/apache/sqoop/manager/SupportedManagers.java
+++ b/src/java/org/apache/sqoop/manager/SupportedManagers.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.manager;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/oracle/OraOopConnManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopConnManager.java 
b/src/java/org/apache/sqoop/manager/oracle/OraOopConnManager.java
index 2026c43..09207bb 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopConnManager.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopConnManager.java
@@ -33,14 +33,14 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.sqoop.manager.OracleManager;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.manager.GenericJdbcManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.mapreduce.JdbcUpdateExportJob;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.manager.GenericJdbcManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.mapreduce.JdbcUpdateExportJob;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * OraOop manager for high performance Oracle import / export.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/oracle/OraOopDBInputSplit.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopDBInputSplit.java 
b/src/java/org/apache/sqoop/manager/oracle/OraOopDBInputSplit.java
index 93efa76..948bdbb 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopDBInputSplit.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopDBInputSplit.java
@@ -24,7 +24,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.hadoop.io.Text;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
+import org.apache.sqoop.mapreduce.db.DBInputFormat;
 
 class OraOopDBInputSplit extends DBInputFormat.DBInputSplit {
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/oracle/OraOopDBRecordReader.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopDBRecordReader.java 
b/src/java/org/apache/sqoop/manager/oracle/OraOopDBRecordReader.java
index f7d1889..d720cb8 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopDBRecordReader.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopDBRecordReader.java
@@ -26,11 +26,11 @@ import java.util.ArrayList;
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat.DBInputSplit;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBRecordReader;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBInputFormat;
+import org.apache.sqoop.mapreduce.db.DBInputFormat.DBInputSplit;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBRecordReader;
 
 import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.manager.oracle.OraOopConstants.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormat.java
----------------------------------------------------------------------
diff --git 
a/src/java/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormat.java 
b/src/java/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormat.java
index 3e88d04..c07a34c 100644
--- 
a/src/java/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormat.java
+++ 
b/src/java/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormat.java
@@ -32,9 +32,9 @@ import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
 
 /**
  * Reads data from Oracle table - data is divided between mappers based on 
ROWID

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/oracle/OraOopManagerFactory.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopManagerFactory.java 
b/src/java/org/apache/sqoop/manager/oracle/OraOopManagerFactory.java
index 1cc8a04..a0ab604 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopManagerFactory.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopManagerFactory.java
@@ -29,11 +29,11 @@ import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.IncrementalMode;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ManagerFactory;
-import com.cloudera.sqoop.metastore.JobData;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.IncrementalMode;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ManagerFactory;
+import org.apache.sqoop.metastore.JobData;
 
 import org.apache.sqoop.manager.OracleManager;
 import org.apache.sqoop.manager.oracle.OraOopOutputFormatUpdate.UpdateMode;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatBase.java
----------------------------------------------------------------------
diff --git 
a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatBase.java 
b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatBase.java
index c6b7098..e8f5a26 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatBase.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatBase.java
@@ -33,10 +33,10 @@ import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.SqoopOptions;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AsyncSqlOutputFormat;
-import com.cloudera.sqoop.mapreduce.ExportOutputFormat;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.AsyncSqlOutputFormat;
+import org.apache.sqoop.mapreduce.ExportOutputFormat;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
 
 abstract class OraOopOutputFormatBase<K extends SqoopRecord, V> extends
     ExportOutputFormat<K, V> {
@@ -199,7 +199,7 @@ abstract class OraOopOutputFormatBase<K extends 
SqoopRecord, V> extends
   }
 
   abstract class OraOopDBRecordWriterBase extends
-      ExportOutputFormat<K, V>.ExportRecordWriter<K, V> {
+      ExportOutputFormat<K, V>.ExportRecordWriter {
 
     protected OracleTable oracleTable; // <- If exporting into a partitioned
                                        // table, this table will be unique for

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatInsert.java
----------------------------------------------------------------------
diff --git 
a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatInsert.java 
b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatInsert.java
index 1874b9f..940e4ff 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatInsert.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatInsert.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.SqoopOptions;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Insert into an Oracle table based on emitted keys.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatUpdate.java
----------------------------------------------------------------------
diff --git 
a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatUpdate.java 
b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatUpdate.java
index 33bcb84..4971f38 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatUpdate.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatUpdate.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.manager.oracle.OraOopOracleQueries.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/manager/oracle/OraOopUtilities.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopUtilities.java 
b/src/java/org/apache/sqoop/manager/oracle/OraOopUtilities.java
index e73fd68..0910e95 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopUtilities.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopUtilities.java
@@ -41,9 +41,9 @@ import org.apache.log4j.Logger;
 import org.apache.sqoop.manager.oracle.OraOopOutputFormatInsert.InsertMode;
 import org.apache.sqoop.manager.oracle.OraOopOutputFormatUpdate.UpdateMode;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.ExportJobBase;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.ExportJobBase;
 
 /**
  * Utilities used by OraOop.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/AccumuloImportJob.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/AccumuloImportJob.java 
b/src/java/org/apache/sqoop/mapreduce/AccumuloImportJob.java
index cb2145f..acd3201 100644
--- a/src/java/org/apache/sqoop/mapreduce/AccumuloImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/AccumuloImportJob.java
@@ -38,13 +38,12 @@ import org.apache.sqoop.accumulo.AccumuloConstants;
 import org.apache.sqoop.accumulo.AccumuloMutationProcessor;
 import org.apache.sqoop.accumulo.AccumuloUtil;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.lib.FieldMapProcessor;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.DataDrivenImportJob;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.lib.FieldMapProcessor;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Runs an Accumulo import via DataDrivenDBInputFormat to the

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/AccumuloImportMapper.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/AccumuloImportMapper.java 
b/src/java/org/apache/sqoop/mapreduce/AccumuloImportMapper.java
index e196099..a27225b 100644
--- a/src/java/org/apache/sqoop/mapreduce/AccumuloImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/AccumuloImportMapper.java
@@ -21,8 +21,7 @@ package org.apache.sqoop.mapreduce;
 import java.io.IOException;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.NullWritable;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Imports records by writing them to Accumulo via the DelegatingOutputFormat

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/AsyncSqlOutputFormat.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/AsyncSqlOutputFormat.java 
b/src/java/org/apache/sqoop/mapreduce/AsyncSqlOutputFormat.java
index ce11f84..422653e 100644
--- a/src/java/org/apache/sqoop/mapreduce/AsyncSqlOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/AsyncSqlOutputFormat.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Abstract OutputFormat class that allows the RecordWriter to buffer

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/AsyncSqlRecordWriter.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/AsyncSqlRecordWriter.java 
b/src/java/org/apache/sqoop/mapreduce/AsyncSqlRecordWriter.java
index 15a62a6..e6e3efb 100644
--- a/src/java/org/apache/sqoop/mapreduce/AsyncSqlRecordWriter.java
+++ b/src/java/org/apache/sqoop/mapreduce/AsyncSqlRecordWriter.java
@@ -30,8 +30,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.util.LoggingUtils;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Abstract RecordWriter base class that buffers SqoopRecords to be injected

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/AvroImportMapper.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/AvroImportMapper.java 
b/src/java/org/apache/sqoop/mapreduce/AvroImportMapper.java
index 450f947..a5e5bf5 100644
--- a/src/java/org/apache/sqoop/mapreduce/AvroImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/AvroImportMapper.java
@@ -18,9 +18,8 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.SqoopRecord;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.mapred.AvroWrapper;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java 
b/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
index dc49282..a5962ba 100644
--- a/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
@@ -40,15 +40,15 @@ import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.ImportJobBase;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-import com.cloudera.sqoop.orm.AvroSchemaGenerator;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.mapreduce.ImportJobBase;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.orm.AvroSchemaGenerator;
 import org.apache.sqoop.util.FileSystemUtil;
 import org.kitesdk.data.Datasets;
 import org.kitesdk.data.mapreduce.DatasetKeyOutputFormat;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/DelegatingOutputFormat.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/DelegatingOutputFormat.java 
b/src/java/org/apache/sqoop/mapreduce/DelegatingOutputFormat.java
index 2dd9be2..771c8a6 100644
--- a/src/java/org/apache/sqoop/mapreduce/DelegatingOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/DelegatingOutputFormat.java
@@ -27,9 +27,9 @@ import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.util.ReflectionUtils;
-import com.cloudera.sqoop.lib.FieldMappable;
-import com.cloudera.sqoop.lib.FieldMapProcessor;
-import com.cloudera.sqoop.lib.ProcessingException;
+import org.apache.sqoop.lib.FieldMappable;
+import org.apache.sqoop.lib.FieldMapProcessor;
+import org.apache.sqoop.lib.ProcessingException;
 
 /**
  * OutputFormat that produces a RecordReader which instantiates

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/ExportBatchOutputFormat.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/ExportBatchOutputFormat.java 
b/src/java/org/apache/sqoop/mapreduce/ExportBatchOutputFormat.java
index bc59eb9..c56fb33 100644
--- a/src/java/org/apache/sqoop/mapreduce/ExportBatchOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/ExportBatchOutputFormat.java
@@ -27,8 +27,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.ExportOutputFormat;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * This class uses batch mode to execute underlying statements instead of
@@ -56,7 +55,7 @@ public class ExportBatchOutputFormat<K extends SqoopRecord, V>
    * The actual database updates are executed in a second thread.
    */
   public class ExportBatchRecordWriter<K extends SqoopRecord, V>
-    extends ExportRecordWriter<K, V> {
+    extends ExportRecordWriter {
 
     public ExportBatchRecordWriter(TaskAttemptContext context)
         throws ClassNotFoundException, SQLException {

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/ExportCallOutputFormat.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/ExportCallOutputFormat.java 
b/src/java/org/apache/sqoop/mapreduce/ExportCallOutputFormat.java
index 7dc3453..e53a846 100644
--- a/src/java/org/apache/sqoop/mapreduce/ExportCallOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/ExportCallOutputFormat.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.mapreduce.db.DBConfiguration;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Insert the emitted keys as records into a database table.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/ExportJobBase.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/ExportJobBase.java 
b/src/java/org/apache/sqoop/mapreduce/ExportJobBase.java
index 9946f14..6529bd2 100644
--- a/src/java/org/apache/sqoop/mapreduce/ExportJobBase.java
+++ b/src/java/org/apache/sqoop/mapreduce/ExportJobBase.java
@@ -18,14 +18,13 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.mapreduce.JobBase;
-import com.cloudera.sqoop.orm.TableClassName;
-import com.cloudera.sqoop.util.ExportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.orm.TableClassName;
+import org.apache.sqoop.util.ExportException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/ExportOutputFormat.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/ExportOutputFormat.java 
b/src/java/org/apache/sqoop/mapreduce/ExportOutputFormat.java
index c2e39b1..cb21e1f 100644
--- a/src/java/org/apache/sqoop/mapreduce/ExportOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/ExportOutputFormat.java
@@ -30,9 +30,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.AsyncSqlOutputFormat;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Insert the emitted keys as records into a database table.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/GenericRecordExportMapper.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/GenericRecordExportMapper.java 
b/src/java/org/apache/sqoop/mapreduce/GenericRecordExportMapper.java
index b60ee42..cec373a 100644
--- a/src/java/org/apache/sqoop/mapreduce/GenericRecordExportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/GenericRecordExportMapper.java
@@ -18,9 +18,8 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
-import com.cloudera.sqoop.orm.ClassWriter;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.orm.ClassWriter;
 import org.apache.avro.Conversions;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericData;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportJob.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportJob.java 
b/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportJob.java
index ed89aeb..8d0c99f 100644
--- a/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportJob.java
@@ -40,9 +40,9 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.util.ImportException;
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
 import com.google.common.base.Preconditions;
 
 /**

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportMapper.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportMapper.java 
b/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportMapper.java
index 4b583dd..9eb27bd 100644
--- a/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportMapper.java
@@ -33,9 +33,8 @@ import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.sqoop.hbase.PutTransformer;
 import org.apache.sqoop.hbase.ToStringPutTransformer;
 
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.SqoopRecord;
 import static org.apache.sqoop.hbase.HBasePutProcessor.*;
 
 /**

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java 
b/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
index 5adb788..33da487 100644
--- a/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
@@ -40,14 +40,13 @@ import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.OutputFormat;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.hbase.HBasePutProcessor;
-import com.cloudera.sqoop.lib.FieldMapProcessor;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.DataDrivenImportJob;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.hbase.HBasePutProcessor;
+import org.apache.sqoop.lib.FieldMapProcessor;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Runs an HBase import via DataDrivenDBInputFormat to the HBasePutProcessor

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/HBaseImportMapper.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/HBaseImportMapper.java 
b/src/java/org/apache/sqoop/mapreduce/HBaseImportMapper.java
index 63e6cd3..039658d 100644
--- a/src/java/org/apache/sqoop/mapreduce/HBaseImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/HBaseImportMapper.java
@@ -21,8 +21,7 @@ package org.apache.sqoop.mapreduce;
 import java.io.IOException;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.NullWritable;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Imports records by writing them to HBase via the DelegatingOutputFormat

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/ImportJobBase.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/ImportJobBase.java 
b/src/java/org/apache/sqoop/mapreduce/ImportJobBase.java
index 105917c..fb5d054 100644
--- a/src/java/org/apache/sqoop/mapreduce/ImportJobBase.java
+++ b/src/java/org/apache/sqoop/mapreduce/ImportJobBase.java
@@ -18,13 +18,12 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.io.CodecMap;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.JobBase;
-import com.cloudera.sqoop.orm.TableClassName;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.io.CodecMap;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.orm.TableClassName;
+import org.apache.sqoop.util.ImportException;
 import org.apache.avro.file.DataFileConstants;
 import org.apache.avro.mapred.AvroJob;
 import org.apache.commons.logging.Log;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/JdbcCallExportJob.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/JdbcCallExportJob.java 
b/src/java/org/apache/sqoop/mapreduce/JdbcCallExportJob.java
index f8594c7..b7eea93 100644
--- a/src/java/org/apache/sqoop/mapreduce/JdbcCallExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/JdbcCallExportJob.java
@@ -29,8 +29,8 @@ import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.sqoop.mapreduce.db.DBConfiguration;
 import org.apache.sqoop.mapreduce.db.DBOutputFormat;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
 import com.google.common.base.Strings;
 
 /**

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/JdbcExportJob.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/JdbcExportJob.java 
b/src/java/org/apache/sqoop/mapreduce/JdbcExportJob.java
index 6f9afaf..3719836 100644
--- a/src/java/org/apache/sqoop/mapreduce/JdbcExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/JdbcExportJob.java
@@ -18,11 +18,10 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.mapreduce.ExportJobBase;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBOutputFormat;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBOutputFormat;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.DefaultStringifier;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/JdbcUpdateExportJob.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/JdbcUpdateExportJob.java 
b/src/java/org/apache/sqoop/mapreduce/JdbcUpdateExportJob.java
index d13b560..86069c4 100644
--- a/src/java/org/apache/sqoop/mapreduce/JdbcUpdateExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/JdbcUpdateExportJob.java
@@ -37,11 +37,10 @@ import org.apache.sqoop.mapreduce.ExportJobBase.FileType;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 import org.kitesdk.data.mapreduce.DatasetKeyInputFormat;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.mapreduce.ExportJobBase;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBOutputFormat;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBOutputFormat;
 import org.apache.sqoop.util.FileSystemUtil;
 
 /**

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/JdbcUpsertExportJob.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/JdbcUpsertExportJob.java 
b/src/java/org/apache/sqoop/mapreduce/JdbcUpsertExportJob.java
index 8e9d1b5..9a8c17a 100644
--- a/src/java/org/apache/sqoop/mapreduce/JdbcUpsertExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/JdbcUpsertExportJob.java
@@ -26,11 +26,10 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.OutputFormat;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.mapreduce.JdbcUpdateExportJob;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBOutputFormat;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBOutputFormat;
 
 /**
  * Run an update/insert export using JDBC (JDBC-based UpsertOutputFormat).

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/JobBase.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/JobBase.java 
b/src/java/org/apache/sqoop/mapreduce/JobBase.java
index 62aa3a9..6d1e049 100644
--- a/src/java/org/apache/sqoop/mapreduce/JobBase.java
+++ b/src/java/org/apache/sqoop/mapreduce/JobBase.java
@@ -20,6 +20,7 @@ package org.apache.sqoop.mapreduce;
 
 import java.io.File;
 import java.io.IOException;
+import java.sql.SQLException;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -33,14 +34,16 @@ import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.config.ConfigurationConstants;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.tool.SqoopTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import com.cloudera.sqoop.util.Jars;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.tool.SqoopTool;
+import org.apache.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.util.Jars;
+import org.apache.sqoop.validation.*;
 
 /**
  * Base class for configuring and running a MapReduce job.
@@ -436,4 +439,40 @@ public class JobBase {
     // So far, propagate only verbose flag
     configuration.setBoolean(PROPERTY_VERBOSE, options.getVerbose());
   }
+
+  protected long getRowCountFromDB(ConnManager connManager, String tableName)
+      throws SQLException {
+    return connManager.getTableRowCount(tableName);
+  }
+
+  protected long getRowCountFromHadoop(Job job)
+      throws IOException, InterruptedException {
+    return ConfigurationHelper.getNumMapOutputRecords(job);
+  }
+
+  protected void doValidate(SqoopOptions options, Configuration conf,
+                            ValidationContext validationContext)
+      throws ValidationException {
+    Validator validator = (Validator) ReflectionUtils.newInstance(
+        options.getValidatorClass(), conf);
+    ValidationThreshold threshold = (ValidationThreshold)
+        ReflectionUtils.newInstance(options.getValidationThresholdClass(),
+            conf);
+    ValidationFailureHandler failureHandler = (ValidationFailureHandler)
+        ReflectionUtils.newInstance(options.getValidationFailureHandlerClass(),
+            conf);
+
+    StringBuilder sb = new StringBuilder();
+    sb.append("Validating the integrity of the import using the "
+        + "following configuration\n");
+    sb.append("\tValidator : ").append(validator.getClass().getName())
+        .append('\n');
+    sb.append("\tThreshold Specifier : ")
+        .append(threshold.getClass().getName()).append('\n');
+    sb.append("\tFailure Handler : ")
+        .append(failureHandler.getClass().getName()).append('\n');
+    LOG.info(sb.toString());
+    validator.validate(validationContext, threshold, failureHandler);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/mapreduce/MergeAvroMapper.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeAvroMapper.java 
b/src/java/org/apache/sqoop/mapreduce/MergeAvroMapper.java
index a2277bf..3976c29 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeAvroMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeAvroMapper.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.sqoop.avro.AvroUtil;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Mapper for the merge program which operates on AVRO data files.

Reply via email to