Repository: sqoop
Updated Branches:
  refs/heads/SQOOP-1367 eddd4dd03 -> fcb77b671


http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopMapper.java
----------------------------------------------------------------------
diff --git 
a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopMapper.java 
b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopMapper.java
index 1d60ba3..3065680 100644
--- a/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopMapper.java
+++ b/execution/mapreduce/src/main/java/org/apache/sqoop/job/mr/SqoopMapper.java
@@ -144,6 +144,9 @@ public class SqoopMapper extends Mapper<SqoopSplit, 
NullWritable, SqoopWritable,
 
     private void writeContent() {
       try {
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("Extracted data: " + data.getTextData());
+        }
         dataOut.setString(data.getTextData());
         context.write(dataOut, NullWritable.get());
       } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/execution/mapreduce/src/test/java/org/apache/sqoop/execution/mapreduce/MapreduceExecutionEngineTest.java
----------------------------------------------------------------------
diff --git 
a/execution/mapreduce/src/test/java/org/apache/sqoop/execution/mapreduce/MapreduceExecutionEngineTest.java
 
b/execution/mapreduce/src/test/java/org/apache/sqoop/execution/mapreduce/MapreduceExecutionEngineTest.java
deleted file mode 100644
index f70e9bd..0000000
--- 
a/execution/mapreduce/src/test/java/org/apache/sqoop/execution/mapreduce/MapreduceExecutionEngineTest.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.execution.mapreduce;
-
-//import org.apache.sqoop.connector.idf.CSVIntermediateDataFormat;
-//import org.apache.sqoop.framework.configuration.OutputCompression;
-//import org.apache.sqoop.framework.configuration.OutputFormat;
-
-import static junit.framework.TestCase.assertEquals;
-
-public class MapreduceExecutionEngineTest {
-
-//  @Test
-//  public void testImportCompression() throws Exception {
-//    testImportCompressionInner(OutputCompression.NONE,
-//      null, false);
-//
-//    testImportCompressionInner(OutputCompression.DEFAULT,
-//      "org.apache.hadoop.io.compress.DefaultCodec", true);
-//
-//    testImportCompressionInner(OutputCompression.GZIP,
-//      "org.apache.hadoop.io.compress.GzipCodec", true);
-//
-//    testImportCompressionInner(OutputCompression.BZIP2,
-//      "org.apache.hadoop.io.compress.BZip2Codec", true);
-//
-//    testImportCompressionInner(OutputCompression.LZO,
-//      "com.hadoop.compression.lzo.LzoCodec", true);
-//
-//    testImportCompressionInner(OutputCompression.LZ4,
-//      "org.apache.hadoop.io.compress.Lz4Codec", true);
-//
-//    testImportCompressionInner(OutputCompression.SNAPPY,
-//      "org.apache.hadoop.io.compress.SnappyCodec", true);
-//
-//    testImportCompressionInner(null,
-//      null, false);
-//  }
-//
-//  private void testImportCompressionInner(OutputCompression comprssionFormat,
-//    String expectedCodecName, boolean expectedCompressionFlag) {
-//    MapreduceExecutionEngine executionEngine = new 
MapreduceExecutionEngine();
-//    SubmissionRequest request = executionEngine.createSubmissionRequest();
-//    ImportJobConfiguration jobConf = new ImportJobConfiguration();
-//    jobConf.output.outputFormat = OutputFormat.TEXT_FILE;
-//    jobConf.output.compression = comprssionFormat;
-//    request.setFrameworkJobConfig(jobConf);
-//    request.setConnectorCallbacks(new Importer(Initializer.class,
-//      Partitioner.class, Extractor.class, Destroyer.class) {
-//    });
-//    request.setIntermediateDataFormat(CSVIntermediateDataFormat.class);
-//    executionEngine.prepareImportSubmission(request);
-//
-//    MutableMapContext context = request.getFrameworkContext();
-//    final String obtainedCodecName = context.getString(
-//      JobConstants.HADOOP_COMPRESS_CODEC);
-//    final boolean obtainedCodecFlag =
-//      context.getBoolean(JobConstants.HADOOP_COMPRESS, false);
-//    assertEquals("Unexpected codec name was returned", obtainedCodecName,
-//      expectedCodecName);
-//    assertEquals("Unexpected codec flag was returned", obtainedCodecFlag,
-//      expectedCompressionFlag);
-//  }
-//
-//  @Test
-//  public void testCustomCompression() {
-//    MapreduceExecutionEngine executionEngine = new 
MapreduceExecutionEngine();
-//    final String customCodecName = "custom.compression";
-//    SubmissionRequest request = executionEngine.createSubmissionRequest();
-//    ImportJobConfiguration jobConf = new ImportJobConfiguration();
-//    jobConf.output.outputFormat = OutputFormat.TEXT_FILE;
-//    jobConf.output.compression = OutputCompression.CUSTOM;
-//    jobConf.output.customCompression = customCodecName;
-//    request.setFrameworkJobConfig(jobConf);
-//    request.setConnectorCallbacks(new Importer(Initializer.class,
-//      Partitioner.class, Extractor.class, Destroyer.class) {
-//    });
-//    request.setIntermediateDataFormat(CSVIntermediateDataFormat.class);
-//    executionEngine.prepareImportSubmission(request);
-//
-//    MutableMapContext context = request.getFrameworkContext();
-//    final String obtainedCodecName = context.getString(
-//      JobConstants.HADOOP_COMPRESS_CODEC);
-//    final boolean obtainedCodecFlag =
-//      context.getBoolean(JobConstants.HADOOP_COMPRESS, false);
-//    assertEquals("Unexpected codec name was returned", obtainedCodecName,
-//      customCodecName);
-//    assertEquals("Unexpected codec flag was returned", obtainedCodecFlag, 
true);
-//  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index fae9fe8..9d9b508 100644
--- a/pom.xml
+++ b/pom.xml
@@ -110,9 +110,6 @@ limitations under the License.
     <jdbc.sqlserver.version>4.0</jdbc.sqlserver.version>
     <jdbc.teradata.version>14.00.00.21</jdbc.teradata.version>
     <jdbc.netezza.version>6.0</jdbc.netezza.version>
-
-    <!-- To remove -->
-    <skipTests>true</skipTests>
   </properties>
 
   <dependencies>

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/main/java/org/apache/sqoop/test/minicluster/TomcatSqoopMiniCluster.java
----------------------------------------------------------------------
diff --git 
a/test/src/main/java/org/apache/sqoop/test/minicluster/TomcatSqoopMiniCluster.java
 
b/test/src/main/java/org/apache/sqoop/test/minicluster/TomcatSqoopMiniCluster.java
index e2f1675..9ecc9da 100644
--- 
a/test/src/main/java/org/apache/sqoop/test/minicluster/TomcatSqoopMiniCluster.java
+++ 
b/test/src/main/java/org/apache/sqoop/test/minicluster/TomcatSqoopMiniCluster.java
@@ -25,6 +25,7 @@ import 
org.codehaus.cargo.container.configuration.LocalConfiguration;
 import org.codehaus.cargo.container.deployable.WAR;
 import org.codehaus.cargo.container.installer.Installer;
 import org.codehaus.cargo.container.installer.ZipURLInstaller;
+import org.codehaus.cargo.container.property.GeneralPropertySet;
 import org.codehaus.cargo.generic.DefaultContainerFactory;
 import org.codehaus.cargo.generic.configuration.DefaultConfigurationFactory;
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
----------------------------------------------------------------------
diff --git 
a/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java 
b/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
index af0f299..8a699f5 100644
--- a/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
+++ b/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
@@ -21,8 +21,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.log4j.Logger;
 import org.apache.sqoop.client.SubmissionCallback;
+import org.apache.sqoop.common.Direction;
 import org.apache.sqoop.connector.hdfs.configuration.OutputFormat;
-import org.apache.sqoop.connector.hdfs.configuration.StorageType;
 import org.apache.sqoop.model.MConnection;
 import org.apache.sqoop.model.MFormList;
 import org.apache.sqoop.model.MJob;
@@ -125,7 +125,7 @@ abstract public class ConnectorTestCase extends 
TomcatTestCase {
    *
    * @param connection MConnection object to fill
    */
-  protected void fillConnectionForm(MConnection connection) {
+  protected void fillRdbmsConnectionForm(MConnection connection) {
     MFormList forms = connection.getConnectorPart();
     
forms.getStringInput("connection.jdbcDriver").setValue(provider.getJdbcDriver());
     
forms.getStringInput("connection.connectionString").setValue(provider.getConnectionUrl());
@@ -138,12 +138,10 @@ abstract public class ConnectorTestCase extends 
TomcatTestCase {
    * will be set to default test value.
    *
    * @param job MJOb object to fill
-   * @param storage Storage type that should be set
    * @param output Output type that should be set
    */
-  protected void fillOutputForm(MJob job, StorageType storage, OutputFormat 
output) {
-    MFormList forms = job.getFrameworkPart();
-    forms.getEnumInput("output.storageType").setValue(storage);
+  protected void fillOutputForm(MJob job, OutputFormat output) {
+    MFormList forms = job.getConnectorPart(Direction.TO);
     forms.getEnumInput("output.outputFormat").setValue(output);
     
forms.getStringInput("output.outputDirectory").setValue(getMapreduceDirectory());
   }
@@ -154,7 +152,7 @@ abstract public class ConnectorTestCase extends 
TomcatTestCase {
    * @param job MJOb object to fill
    */
   protected void fillInputForm(MJob job) {
-    MFormList forms = job.getFrameworkPart();
+    MFormList forms = job.getConnectorPart(Direction.FROM);
     
forms.getStringInput("input.inputDirectory").setValue(getMapreduceDirectory());
   }
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/main/java/org/apache/sqoop/test/utils/HdfsUtils.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/utils/HdfsUtils.java 
b/test/src/main/java/org/apache/sqoop/test/utils/HdfsUtils.java
index 59c5f15..ec650e1 100644
--- a/test/src/main/java/org/apache/sqoop/test/utils/HdfsUtils.java
+++ b/test/src/main/java/org/apache/sqoop/test/utils/HdfsUtils.java
@@ -49,9 +49,7 @@ public class HdfsUtils {
   public static Path [] getOutputMapreduceFiles(FileSystem fs, String 
directory) throws FileNotFoundException, IOException {
     LinkedList<Path> files = new LinkedList<Path>();
     for (FileStatus fileStatus : fs.listStatus(new Path(directory))) {
-      if (fileStatus.getPath().getName().startsWith("part-")) {
-        files.add(fileStatus.getPath());
-      }
+      files.add(fileStatus.getPath());
     }
     return files.toArray(new Path[files.size()]);
   }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
new file mode 100644
index 0000000..c01aa80
--- /dev/null
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.integration.connector.jdbc.generic;
+
+import org.apache.log4j.Logger;
+import org.apache.sqoop.common.Direction;
+import org.apache.sqoop.test.testcases.ConnectorTestCase;
+import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.model.MFormList;
+import org.apache.sqoop.model.MJob;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ *
+ */
+public class FromHDFSToRDBMSTest extends ConnectorTestCase {
+
+  private static final Logger LOG = 
Logger.getLogger(FromHDFSToRDBMSTest.class);
+
+  @Test
+  public void testBasic() throws Exception {
+    createTableCities();
+    createInputMapreduceFile("input-0001",
+      "1,'USA','San Francisco'",
+      "2,'USA','Sunnyvale'",
+      "3,'Czech Republic','Brno'",
+      "4,'USA','Palo Alto'"
+    );
+
+    // RDBMS connection
+    MConnection rdbmsConnection = 
getClient().newConnection("generic-jdbc-connector");
+    fillRdbmsConnectionForm(rdbmsConnection);
+    createConnection(rdbmsConnection);
+
+    // HDFS connection
+    MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
+    createConnection(hdfsConnection);
+
+    // Job creation
+    MJob job = getClient().newJob(hdfsConnection.getPersistenceId(), 
rdbmsConnection.getPersistenceId());
+
+    // Connector values
+    MFormList fromForms = job.getConnectorPart(Direction.FROM);
+    MFormList toForms = job.getConnectorPart(Direction.TO);
+    
toForms.getStringInput("toTable.tableName").setValue(provider.escapeTableName(getTableName()));
+    fillInputForm(job);
+    createJob(job);
+
+    runJob(job);
+
+    assertEquals(4L, rowCount());
+    assertRowInCities(1, "USA", "San Francisco");
+    assertRowInCities(2, "USA", "Sunnyvale");
+    assertRowInCities(3, "Czech Republic", "Brno");
+    assertRowInCities(4, "USA", "Palo Alto");
+
+    // Clean up testing table
+    dropTable();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
new file mode 100644
index 0000000..f976f29
--- /dev/null
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.integration.connector.jdbc.generic;
+
+import org.apache.log4j.Logger;
+import org.apache.sqoop.common.Direction;
+import org.apache.sqoop.connector.hdfs.configuration.OutputFormat;
+import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.model.MFormList;
+import org.apache.sqoop.model.MJob;
+import org.apache.sqoop.model.MSubmission;
+import org.apache.sqoop.test.testcases.ConnectorTestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Import simple table with various configurations.
+ */
+public class FromRDBMSToHDFSTest extends ConnectorTestCase {
+
+  private static final Logger LOG = 
Logger.getLogger(FromRDBMSToHDFSTest.class);
+
+  @Test
+  public void testBasic() throws Exception {
+    createAndLoadTableCities();
+
+    // RDBMS connection
+    MConnection rdbmsConnection = 
getClient().newConnection("generic-jdbc-connector");
+    fillRdbmsConnectionForm(rdbmsConnection);
+    createConnection(rdbmsConnection);
+
+    // HDFS connection
+    MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
+    createConnection(hdfsConnection);
+
+    // Job creation
+    MJob job = getClient().newJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+
+    // Connector values
+    MFormList forms = job.getConnectorPart(Direction.FROM);
+    
forms.getStringInput("fromTable.tableName").setValue(provider.escapeTableName(getTableName()));
+    
forms.getStringInput("fromTable.partitionColumn").setValue(provider.escapeColumnName("id"));
+    fillOutputForm(job, OutputFormat.TEXT_FILE);
+    createJob(job);
+
+    runJob(job);
+
+    // Assert correct output
+    assertMapreduceOutput(
+      "1,'USA','San Francisco'",
+      "2,'USA','Sunnyvale'",
+      "3,'Czech Republic','Brno'",
+      "4,'USA','Palo Alto'"
+    );
+
+    // Clean up testing table
+    dropTable();
+  }
+
+  @Test
+  public void testColumns() throws Exception {
+    createAndLoadTableCities();
+
+    // RDBMS connection
+    MConnection rdbmsConnection = 
getClient().newConnection("generic-jdbc-connector");
+    fillRdbmsConnectionForm(rdbmsConnection);
+    createConnection(rdbmsConnection);
+
+    // HDFS connection
+    MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
+    createConnection(hdfsConnection);
+
+    // Job creation
+    MJob job = getClient().newJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+
+    // Connector values
+    MFormList forms = job.getConnectorPart(Direction.FROM);
+    
forms.getStringInput("fromTable.tableName").setValue(provider.escapeTableName(getTableName()));
+    
forms.getStringInput("fromTable.partitionColumn").setValue(provider.escapeColumnName("id"));
+    
forms.getStringInput("fromTable.columns").setValue(provider.escapeColumnName("id")
 + "," + provider.escapeColumnName("country"));
+    fillOutputForm(job, OutputFormat.TEXT_FILE);
+    createJob(job);
+
+    MSubmission submission = 
getClient().startSubmission(job.getPersistenceId());
+    assertTrue(submission.getStatus().isRunning());
+
+    // Wait until the job finish - this active waiting will be removed once
+    // Sqoop client API will get blocking support.
+    do {
+      Thread.sleep(5000);
+      submission = getClient().getSubmissionStatus(job.getPersistenceId());
+    } while(submission.getStatus().isRunning());
+
+    // Assert correct output
+    assertMapreduceOutput(
+      "1,'USA'",
+      "2,'USA'",
+      "3,'Czech Republic'",
+      "4,'USA'"
+    );
+
+    // Clean up testing table
+    dropTable();
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
new file mode 100644
index 0000000..fac7e8b
--- /dev/null
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.integration.connector.jdbc.generic;
+
+import org.apache.log4j.Logger;
+import org.apache.sqoop.common.Direction;
+import org.apache.sqoop.connector.hdfs.configuration.OutputFormat;
+import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.model.MFormList;
+import org.apache.sqoop.model.MJob;
+import org.apache.sqoop.test.testcases.ConnectorTestCase;
+import org.apache.sqoop.test.utils.ParametrizedUtils;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.Test;
+
+/**
+ *
+ */
+@RunWith(Parameterized.class)
+public class PartitionerTest extends ConnectorTestCase {
+
+  private static final Logger LOG = Logger.getLogger(PartitionerTest.class);
+
+  /**
+   * Columns that we will use as partition column with maximal number of
+   * partitions that can be created for such column.
+   */
+  public static Object[] COLUMNS = new Object [][] {
+    {"id",           13},
+    {"code_name",    13},
+    {"version",      13},
+    {"release_date", 13},
+    {"lts",           2},
+  };
+
+  /**
+   * Number of extractors that we will use to transfer the table.
+   */
+  public static Object [] EXTRACTORS = new Object[] {
+    3, 5, 10, 13,
+  };
+
+  @Parameterized.Parameters(name = "{0}-{1}-{2}")
+  public static Iterable<Object[]> data() {
+    return ParametrizedUtils.crossProduct(COLUMNS, EXTRACTORS);
+  }
+
+  private String partitionColumn;
+  private int extractors;
+  private int maxOutputFiles;
+
+  public PartitionerTest(String partitionColumn, int expectedOutputFiles, int 
extractors) {
+    this.partitionColumn = partitionColumn;
+    this.maxOutputFiles = expectedOutputFiles;
+    this.extractors = extractors;
+  }
+
+  @Test
+  public void testSplitter() throws Exception {
+    createAndLoadTableUbuntuReleases();
+
+    // RDBMS connection
+    MConnection rdbmsConnection = 
getClient().newConnection("generic-jdbc-connector");
+    fillRdbmsConnectionForm(rdbmsConnection);
+    createConnection(rdbmsConnection);
+
+    // HDFS connection
+    MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
+    createConnection(hdfsConnection);
+
+    // Job creation
+    MJob job = getClient().newJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+
+    // Connector values
+    MFormList forms = job.getConnectorPart(Direction.FROM);
+    
forms.getStringInput("fromTable.tableName").setValue(provider.escapeTableName(getTableName()));
+    
forms.getStringInput("fromTable.partitionColumn").setValue(provider.escapeColumnName(partitionColumn));
+    fillOutputForm(job, OutputFormat.TEXT_FILE);
+    forms = job.getFrameworkPart();
+    forms.getIntegerInput("throttling.extractors").setValue(extractors);
+    createJob(job);
+
+    runJob(job);
+
+    // Assert correct output
+    assertMapreduceOutputFiles((extractors > maxOutputFiles) ? maxOutputFiles 
: extractors);
+    assertMapreduceOutput(
+      "1,'Warty Warthog',4.10,2004-10-20,false",
+      "2,'Hoary Hedgehog',5.04,2005-04-08,false",
+      "3,'Breezy Badger',5.10,2005-10-13,false",
+      "4,'Dapper Drake',6.06,2006-06-01,true",
+      "5,'Edgy Eft',6.10,2006-10-26,false",
+      "6,'Feisty Fawn',7.04,2007-04-19,false",
+      "7,'Gutsy Gibbon',7.10,2007-10-18,false",
+      "8,'Hardy Heron',8.04,2008-04-24,true",
+      "9,'Intrepid Ibex',8.10,2008-10-18,false",
+      "10,'Jaunty Jackalope',9.04,2009-04-23,false",
+      "11,'Karmic Koala',9.10,2009-10-29,false",
+      "12,'Lucid Lynx',10.04,2010-04-29,true",
+      "13,'Maverick Meerkat',10.10,2010-10-10,false",
+      "14,'Natty Narwhal',11.04,2011-04-28,false",
+      "15,'Oneiric Ocelot',11.10,2011-10-10,false",
+      "16,'Precise Pangolin',12.04,2012-04-26,true",
+      "17,'Quantal Quetzal',12.10,2012-10-18,false",
+      "18,'Raring Ringtail',13.04,2013-04-25,false",
+      "19,'Saucy Salamander',13.10,2013-10-17,false"
+    );
+
+    // Clean up testing table
+    dropTable();
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableExportTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableExportTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableExportTest.java
deleted file mode 100644
index 39b48d8..0000000
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableExportTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.integration.connector.jdbc.generic;
-
-import org.apache.log4j.Logger;
-import org.apache.sqoop.test.testcases.ConnectorTestCase;
-import org.apache.sqoop.model.MConnection;
-import org.apache.sqoop.model.MFormList;
-import org.apache.sqoop.model.MJob;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-/**
- *
- */
-public class TableExportTest extends ConnectorTestCase {
-
-//  private static final Logger LOG = Logger.getLogger(TableExportTest.class);
-//
-//  @Test
-//  public void testBasicImport() throws Exception {
-//    createTableCities();
-//    createInputMapreduceFile("input-0001",
-//      "1,'USA','San Francisco'",
-//      "2,'USA','Sunnyvale'",
-//      "3,'Czech Republic','Brno'",
-//      "4,'USA','Palo Alto'"
-//    );
-//
-//    // Connection creation
-//    MConnection connection = 
getClient().newConnection("generic-jdbc-connector");
-//    fillConnectionForm(connection);
-//    createConnection(connection);
-//
-//    // Job creation
-//    MJob job = getClient().newJob(connection.getPersistenceId(), 
MJob.Type.EXPORT);
-//
-//    // Connector values
-//    MFormList forms = job.getFromPart();
-//    
forms.getStringInput("table.tableName").setValue(provider.escapeTableName(getTableName()));
-//    fillInputForm(job);
-//    createJob(job);
-//
-//    runJob(job);
-//
-//    assertEquals(4L, rowCount());
-//    assertRowInCities(1, "USA", "San Francisco");
-//    assertRowInCities(2, "USA", "Sunnyvale");
-//    assertRowInCities(3, "Czech Republic", "Brno");
-//    assertRowInCities(4, "USA", "Palo Alto");
-//
-//    // Clean up testing table
-//    dropTable();
-//  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableImportTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableImportTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableImportTest.java
deleted file mode 100644
index 9171b8e..0000000
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableImportTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.integration.connector.jdbc.generic;
-
-import org.apache.sqoop.test.testcases.ConnectorTestCase;
-
-import static org.junit.Assert.assertTrue;
-
-/**
- * Import simple table with various configurations.
- */
-public class TableImportTest extends ConnectorTestCase {
-
-//  private static final Logger LOG = Logger.getLogger(TableImportTest.class);
-//
-//  @Test
-//  public void testBasicImport() throws Exception {
-//    createAndLoadTableCities();
-//
-//    // Connection creation
-//    MConnection connection = 
getClient().newConnection("generic-jdbc-connector");
-//    fillConnectionForm(connection);
-//    createConnection(connection);
-//
-//    // Job creation
-//    MJob job = getClient().newJob(connection.getPersistenceId(), 
MJob.Type.IMPORT);
-//
-//    // Connector values
-//    MFormList forms = job.getFromPart();
-//    
forms.getStringInput("table.tableName").setValue(provider.escapeTableName(getTableName()));
-//    
forms.getStringInput("table.partitionColumn").setValue(provider.escapeColumnName("id"));
-//    // Framework values
-//    fillOutputForm(job, StorageType.HDFS, OutputFormat.TEXT_FILE);
-//    createJob(job);
-//
-//    runJob(job);
-//
-//    // Assert correct output
-//    assertMapreduceOutput(
-//      "1,'USA','San Francisco'",
-//      "2,'USA','Sunnyvale'",
-//      "3,'Czech Republic','Brno'",
-//      "4,'USA','Palo Alto'"
-//    );
-//
-//    // Clean up testing table
-//    dropTable();
-//  }
-//
-//  @Test
-//  public void testColumns() throws Exception {
-//    createAndLoadTableCities();
-//
-//    // Connection creation
-//    MConnection connection = getClient().newConnection(1L);
-//    fillConnectionForm(connection);
-//
-//    createConnection(connection);
-//
-//    // Job creation
-//    MJob job = getClient().newJob(connection.getPersistenceId(), 
MJob.Type.IMPORT);
-//
-//    // Connector values
-//    MFormList forms = job.getFromPart();
-//    
forms.getStringInput("table.tableName").setValue(provider.escapeTableName(getTableName()));
-//    
forms.getStringInput("table.partitionColumn").setValue(provider.escapeColumnName("id"));
-//    
forms.getStringInput("table.columns").setValue(provider.escapeColumnName("id") 
+ "," + provider.escapeColumnName("country"));
-//    // Framework values
-//    fillOutputForm(job, StorageType.HDFS, OutputFormat.TEXT_FILE);
-//    createJob(job);
-//
-//    MSubmission submission = 
getClient().startSubmission(job.getPersistenceId());
-//    assertTrue(submission.getStatus().isRunning());
-//
-//    // Wait until the job finish - this active waiting will be removed once
-//    // Sqoop client API will get blocking support.
-//    do {
-//      Thread.sleep(5000);
-//      submission = getClient().getSubmissionStatus(job.getPersistenceId());
-//    } while(submission.getStatus().isRunning());
-//
-//    // Assert correct output
-//    assertMapreduceOutput(
-//      "1,'USA'",
-//      "2,'USA'",
-//      "3,'Czech Republic'",
-//      "4,'USA'"
-//    );
-//
-//    // Clean up testing table
-//    dropTable();
-//  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
new file mode 100644
index 0000000..cb782c7
--- /dev/null
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.integration.connector.jdbc.generic;
+
+import org.apache.sqoop.common.Direction;
+import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.model.MFormList;
+import org.apache.sqoop.model.MJob;
+import org.apache.sqoop.test.data.Cities;
+import org.apache.sqoop.test.testcases.ConnectorTestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ *
+ */
+public class TableStagedRDBMSTest extends ConnectorTestCase {
+
+  @Test
+  public void testStagedTransfer() throws Exception {
+    final String stageTableName = "STAGE_" + getTableName();
+    createTableCities();
+    createInputMapreduceFile("input-0001",
+      "1,'USA','San Francisco'",
+      "2,'USA','Sunnyvale'",
+      "3,'Czech Republic','Brno'",
+      "4,'USA','Palo Alto'"
+    );
+    new Cities(provider, stageTableName).createTables();
+
+    // RDBMS connection
+    MConnection rdbmsConnection = 
getClient().newConnection("generic-jdbc-connector");
+    fillRdbmsConnectionForm(rdbmsConnection);
+    createConnection(rdbmsConnection);
+
+    // HDFS connection
+    MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
+    createConnection(hdfsConnection);
+
+    // Job creation
+    MJob job = getClient().newJob(hdfsConnection.getPersistenceId(),
+        rdbmsConnection.getPersistenceId());
+
+    // Connector values
+    MFormList forms = job.getConnectorPart(Direction.TO);
+    forms.getStringInput("toTable.tableName").setValue(
+      provider.escapeTableName(getTableName()));
+    forms.getStringInput("toTable.stageTableName").setValue(
+      provider.escapeTableName(stageTableName));
+    fillInputForm(job);
+    createJob(job);
+
+    runJob(job);
+
+    // @TODO(Abe): Change back after SQOOP-1488
+//    assertEquals(0L, provider.rowCount(stageTableName));
+//    assertEquals(4L, rowCount());
+//    assertRowInCities(1, "USA", "San Francisco");
+//    assertRowInCities(2, "USA", "Sunnyvale");
+//    assertRowInCities(3, "Czech Republic", "Brno");
+//    assertRowInCities(4, "USA", "Palo Alto");
+    assertEquals(4L, provider.rowCount(stageTableName));
+    assertEquals(0L, rowCount());
+
+    // Clean up testing table
+    provider.dropTable(stageTableName);
+    dropTable();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/exports/TableStagedExportTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/exports/TableStagedExportTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/exports/TableStagedExportTest.java
deleted file mode 100644
index cb028bb..0000000
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/exports/TableStagedExportTest.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.integration.connector.jdbc.generic.exports;
-
-import org.apache.sqoop.model.MConnection;
-import org.apache.sqoop.model.MFormList;
-import org.apache.sqoop.model.MJob;
-import org.apache.sqoop.test.data.Cities;
-import org.apache.sqoop.test.testcases.ConnectorTestCase;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-
-/**
- *
- */
-public class TableStagedExportTest extends ConnectorTestCase {
-
-//  @Test
-//  public void testStagedExport() throws Exception {
-//    final String stageTableName = "STAGE_" + getTableName();
-//    createTableCities();
-//    createInputMapreduceFile("input-0001",
-//      "1,'USA','San Francisco'",
-//      "2,'USA','Sunnyvale'",
-//      "3,'Czech Republic','Brno'",
-//      "4,'USA','Palo Alto'"
-//    );
-//    new Cities(provider, stageTableName).createTables();
-//    // Connection creation
-//    MConnection connection = 
getClient().newConnection("generic-jdbc-connector");
-//    fillConnectionForm(connection);
-//    createConnection(connection);
-//
-//    // Job creation
-//    MJob job = getClient().newJob(connection.getPersistenceId(),
-//      MJob.Type.EXPORT);
-//
-//    // Connector values
-//    MFormList forms = job.getFromPart();
-//    forms.getStringInput("table.tableName").setValue(
-//      provider.escapeTableName(getTableName()));
-//    forms.getStringInput("table.stageTableName").setValue(
-//      provider.escapeTableName(stageTableName));
-//    fillInputForm(job);
-//    createJob(job);
-//
-//    runJob(job);
-//
-//    assertEquals(0L, provider.rowCount(stageTableName));
-//    assertEquals(4L, rowCount());
-//    assertRowInCities(1, "USA", "San Francisco");
-//    assertRowInCities(2, "USA", "Sunnyvale");
-//    assertRowInCities(3, "Czech Republic", "Brno");
-//    assertRowInCities(4, "USA", "Palo Alto");
-//
-//    // Clean up testing table
-//    provider.dropTable(stageTableName);
-//    dropTable();
-//  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/imports/PartitionerTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/imports/PartitionerTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/imports/PartitionerTest.java
deleted file mode 100644
index a0a4022..0000000
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/imports/PartitionerTest.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.integration.connector.jdbc.generic.imports;
-
-import org.apache.sqoop.test.testcases.ConnectorTestCase;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-/**
- *
- */
-@RunWith(Parameterized.class)
-public class PartitionerTest extends ConnectorTestCase {
-
-//  private static final Logger LOG = Logger.getLogger(PartitionerTest.class);
-//
-//  /**
-//   * Columns that we will use as partition column with maximal number of
-//   * partitions that can be created for such column.
-//   */
-//  public static Object[] COLUMNS = new Object [][] {
-//    {"id",           13},
-//    {"code_name",    13},
-//    {"version",      13},
-//    {"release_date", 13},
-//    {"lts",           2},
-//  };
-//
-//  /**
-//   * Number of extractors that we will use to transfer the table.
-//   */
-//  public static Object [] EXTRACTORS = new Object[] {
-//    3, 5, 10, 13,
-//  };
-//
-//  @Parameterized.Parameters(name = "{0}-{1}-{2}")
-//  public static Iterable<Object[]> data() {
-//    return ParametrizedUtils.crossProduct(COLUMNS, EXTRACTORS);
-//  }
-//
-//  private String partitionColumn;
-//  private int extractors;
-//  private int maxOutputFiles;
-//
-//  public PartitionerTest(String partitionColumn, int expectedOutputFiles, 
int extractors) {
-//    this.partitionColumn = partitionColumn;
-//    this.maxOutputFiles = expectedOutputFiles;
-//    this.extractors = extractors;
-//  }
-//
-//  @Test
-//  public void testSplitter() throws Exception {
-//    createAndLoadTableUbuntuReleases();
-//
-//    // Connection creation
-//    MConnection connection = 
getClient().newConnection("generic-jdbc-connector");
-//    fillConnectionForm(connection);
-//    createConnection(connection);
-//
-//    // Job creation
-//    MJob job = getClient().newJob(connection.getPersistenceId(), 
MJob.Type.IMPORT);
-//
-//    // Connector values
-//    MFormList forms = job.getFromPart();
-//    
forms.getStringInput("table.tableName").setValue(provider.escapeTableName(getTableName()));
-//    
forms.getStringInput("table.partitionColumn").setValue(provider.escapeColumnName(partitionColumn));
-//    // Framework values
-//    fillOutputForm(job, StorageType.HDFS, OutputFormat.TEXT_FILE);
-//    forms = job.getFrameworkPart();
-//    forms.getIntegerInput("throttling.extractors").setValue(extractors);
-//    createJob(job);
-//
-//    runJob(job);
-//
-//    // Assert correct output
-//    assertMapreduceOutputFiles((extractors > maxOutputFiles) ? 
maxOutputFiles : extractors);
-//    assertMapreduceOutput(
-//      "1,'Warty Warthog',4.10,2004-10-20,false",
-//      "2,'Hoary Hedgehog',5.04,2005-04-08,false",
-//      "3,'Breezy Badger',5.10,2005-10-13,false",
-//      "4,'Dapper Drake',6.06,2006-06-01,true",
-//      "5,'Edgy Eft',6.10,2006-10-26,false",
-//      "6,'Feisty Fawn',7.04,2007-04-19,false",
-//      "7,'Gutsy Gibbon',7.10,2007-10-18,false",
-//      "8,'Hardy Heron',8.04,2008-04-24,true",
-//      "9,'Intrepid Ibex',8.10,2008-10-18,false",
-//      "10,'Jaunty Jackalope',9.04,2009-04-23,false",
-//      "11,'Karmic Koala',9.10,2009-10-29,false",
-//      "12,'Lucid Lynx',10.04,2010-04-29,true",
-//      "13,'Maverick Meerkat',10.10,2010-10-10,false",
-//      "14,'Natty Narwhal',11.04,2011-04-28,false",
-//      "15,'Oneiric Ocelot',11.10,2011-10-10,false",
-//      "16,'Precise Pangolin',12.04,2012-04-26,true",
-//      "17,'Quantal Quetzal',12.10,2012-10-18,false",
-//      "18,'Raring Ringtail',13.04,2013-04-25,false",
-//      "19,'Saucy Salamander',13.10,2013-10-17,false"
-//    );
-//
-//    // Clean up testing table
-//    dropTable();
-//  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
index 1a7a3a8..3c01cb0 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
@@ -17,9 +17,20 @@
  */
 package org.apache.sqoop.integration.server;
 
+import org.apache.sqoop.client.ClientError;
+import org.apache.sqoop.common.Direction;
+import org.apache.sqoop.common.SqoopException;
+import org.apache.sqoop.connector.hdfs.configuration.OutputFormat;
+import org.apache.sqoop.framework.FrameworkError;
+import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.model.MFormList;
+import org.apache.sqoop.model.MJob;
 import org.apache.sqoop.test.testcases.ConnectorTestCase;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
+import org.junit.Test;
+
+import java.util.Arrays;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
@@ -33,70 +44,74 @@ import static org.junit.Assert.fail;
 @RunWith(Parameterized.class)
 public class SubmissionWithDisabledModelObjectsTest extends ConnectorTestCase {
 
-//  @Parameterized.Parameters(name = "con({0}) job({1})")
-//  public static Iterable<Object[]> data() {
-//    return Arrays.asList(new Object[][] {
-//      { true, false },
-//      { false, true },
-//      { false, false },
-//    });
-//  }
-//
-//  private boolean enabledConnection;
-//  private boolean enabledJob;
-//
-//  public SubmissionWithDisabledModelObjectsTest(boolean enabledConnection, 
boolean enabledJob) {
-//    this.enabledConnection = enabledConnection;
-//    this.enabledJob = enabledJob;
-//  }
-//
-//  @Test
-//  public void testWithDisabledObjects() throws Exception {
-//    createAndLoadTableCities();
-//
-//    // Connection creation
-//    MConnection connection = 
getClient().newConnection("generic-jdbc-connector");
-//    fillConnectionForm(connection);
-//    createConnection(connection);
-//
-//    // Job creation
-//    MJob job = getClient().newJob(connection.getPersistenceId(), 
MJob.Type.IMPORT);
-//
-//    // Connector values
-//    MFormList forms = job.getFromPart();
-//    
forms.getStringInput("table.tableName").setValue(provider.escapeTableName(getTableName()));
-//    
forms.getStringInput("table.partitionColumn").setValue(provider.escapeColumnName("id"));
-//    // Framework values
-//    fillOutputForm(job, StorageType.HDFS, OutputFormat.TEXT_FILE);
-//    createJob(job);
-//
-//    // Disable model entities as per parametrized run
-//    getClient().enableConnection(connection.getPersistenceId(), 
enabledConnection);
-//    getClient().enableJob(job.getPersistenceId(), enabledJob);
-//
-//    // Try to run the job and verify that the it was not executed
-//    try {
-//      runJob(job);
-//      fail("Expected exception as the model classes are disabled.");
-//    } catch(SqoopException ex) {
-//      // Top level exception should be CLIENT_0001
-//      assertEquals(ClientError.CLIENT_0001, ex.getErrorCode());
-//
-//      // We can directly verify the ErrorCode from SqoopException as client 
side
-//      // is not rebuilding SqoopExceptions per missing ErrorCodes. E.g. the 
cause
-//      // will be generic Throwable and not SqoopException instance.
-//      Throwable cause = ex.getCause();
-//      assertNotNull(cause);
-//
-//      if(!enabledJob) {
-//        
assertTrue(cause.getMessage().startsWith(FrameworkError.FRAMEWORK_0009.toString()));
-//      } else if(!enabledConnection) {
-//        
assertTrue(cause.getMessage().startsWith(FrameworkError.FRAMEWORK_0010.toString()));
-//      } else {
-//        fail("Unexpected expception retrieved from server " + cause);
-//      }
-//    } finally {
-//      dropTable();
-//    }
-//  }
+  @Parameterized.Parameters(name = "con({0}) job({1})")
+  public static Iterable<Object[]> data() {
+    return Arrays.asList(new Object[][]{
+        {true, false},
+        {false, true},
+        {false, false},
+    });
+  }
+
+  private boolean enabledConnection;
+  private boolean enabledJob;
+
+  public SubmissionWithDisabledModelObjectsTest(boolean enabledConnection, 
boolean enabledJob) {
+    this.enabledConnection = enabledConnection;
+    this.enabledJob = enabledJob;
+  }
+
+  @Test
+  public void testWithDisabledObjects() throws Exception {
+    createAndLoadTableCities();
+
+    // RDBMS connection
+    MConnection rdbmsConnection = 
getClient().newConnection("generic-jdbc-connector");
+    fillRdbmsConnectionForm(rdbmsConnection);
+    createConnection(rdbmsConnection);
+
+    // HDFS connection
+    MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
+    createConnection(hdfsConnection);
+
+    // Job creation
+    MJob job = getClient().newJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+
+    // Connector values
+    MFormList forms = job.getConnectorPart(Direction.FROM);
+    
forms.getStringInput("fromTable.tableName").setValue(provider.escapeTableName(getTableName()));
+    
forms.getStringInput("fromTable.partitionColumn").setValue(provider.escapeColumnName("id"));
+    // Framework values
+    fillOutputForm(job, OutputFormat.TEXT_FILE);
+    createJob(job);
+
+    // Disable model entities as per parametrized run
+    getClient().enableConnection(rdbmsConnection.getPersistenceId(), 
enabledConnection);
+    getClient().enableJob(job.getPersistenceId(), enabledJob);
+
+    // Try to run the job and verify that the it was not executed
+    try {
+      runJob(job);
+      fail("Expected exception as the model classes are disabled.");
+    } catch(SqoopException ex) {
+      // Top level exception should be CLIENT_0001
+      assertEquals(ClientError.CLIENT_0001, ex.getErrorCode());
+
+      // We can directly verify the ErrorCode from SqoopException as client 
side
+      // is not rebuilding SqoopExceptions per missing ErrorCodes. E.g. the 
cause
+      // will be generic Throwable and not SqoopException instance.
+      Throwable cause = ex.getCause();
+      assertNotNull(cause);
+
+      if(!enabledJob) {
+        
assertTrue(cause.getMessage().startsWith(FrameworkError.FRAMEWORK_0009.toString()));
+      } else if(!enabledConnection) {
+        
assertTrue(cause.getMessage().startsWith(FrameworkError.FRAMEWORK_0010.toString()));
+      } else {
+        fail("Unexpected expception retrieved from server " + cause);
+      }
+    } finally {
+      dropTable();
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/fcb77b67/test/src/test/java/org/apache/sqoop/integration/server/VersionTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/server/VersionTest.java 
b/test/src/test/java/org/apache/sqoop/integration/server/VersionTest.java
index 5ebe95f..cea24b9 100644
--- a/test/src/test/java/org/apache/sqoop/integration/server/VersionTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/server/VersionTest.java
@@ -32,14 +32,14 @@ public class VersionTest extends TomcatTestCase {
 
   @Test
   public void testVersion() {
-//    VersionRequest versionRequest = new VersionRequest();
-//    VersionBean versionBean = versionRequest.doGet(getServerUrl());
-//
-//    assertEquals(versionBean.getVersion(), VersionInfo.getVersion());
-//    assertEquals(versionBean.getDate(), VersionInfo.getDate());
-//    assertEquals(versionBean.getRevision(), VersionInfo.getRevision());
-//    assertEquals(versionBean.getUser(), VersionInfo.getUser());
-//    assertEquals(versionBean.getRevision(), VersionInfo.getRevision());
+    VersionRequest versionRequest = new VersionRequest();
+    VersionBean versionBean = versionRequest.doGet(getServerUrl());
+
+    assertEquals(versionBean.getVersion(), VersionInfo.getVersion());
+    assertEquals(versionBean.getDate(), VersionInfo.getDate());
+    assertEquals(versionBean.getRevision(), VersionInfo.getRevision());
+    assertEquals(versionBean.getUser(), VersionInfo.getUser());
+    assertEquals(versionBean.getRevision(), VersionInfo.getRevision());
   }
 
 }

Reply via email to