Repository: sqoop
Updated Branches:
  refs/heads/sqoop2 ec5dfc42f -> 5297bf552


SQOOP-2321: Sqoop2: Clean up FromRDBMSToHDFS test

(Abraham Elmahrek via Jarek Jarcec Cecho)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/5297bf55
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/5297bf55
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/5297bf55

Branch: refs/heads/sqoop2
Commit: 5297bf552a8d1fd0e2d36a871b197a80a1830ddd
Parents: ec5dfc4
Author: Jarek Jarcec Cecho <[email protected]>
Authored: Tue Apr 21 19:26:45 2015 -0700
Committer: Jarek Jarcec Cecho <[email protected]>
Committed: Tue Apr 21 19:26:45 2015 -0700

----------------------------------------------------------------------
 .../jdbc/generic/FromRDBMSToHDFSTest.java       | 20 ++++++++++++--------
 1 file changed, 12 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/5297bf55/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
index 65da40c..52d62a6 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
@@ -98,13 +98,14 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
     MConfigList configs = job.getFromJobConfig();
     
configs.getStringInput("fromJobConfig.columns").setValue(provider.escapeColumnName("id")
 + "," + provider.escapeColumnName("name") + "," + 
provider.escapeColumnName("story"));
     fillHdfsToConfig(job, ToFormat.TEXT_FILE);
+
     saveJob(job);
 
-    MSubmission submission = getClient().startJob(job.getPersistenceId());
-    assertTrue(submission.getStatus().isRunning());
+    executeJob(job);
 
     // Wait until the job finish - this active waiting will be removed once
     // Sqoop client API will get blocking support.
+    MSubmission submission;
     do {
       Thread.sleep(5000);
       submission = getClient().getJobStatus(job.getPersistenceId());
@@ -143,13 +144,14 @@ public class FromRDBMSToHDFSTest extends 
ConnectorTestCase {
     MConfigList configs = job.getFromJobConfig();
     
configs.getStringInput("fromJobConfig.columns").setValue(provider.escapeColumnName("id")
 + "," + provider.escapeColumnName("country"));
     fillHdfsToConfig(job, ToFormat.TEXT_FILE);
+
     saveJob(job);
 
-    MSubmission submission = getClient().startJob(job.getPersistenceId());
-    assertTrue(submission.getStatus().isRunning());
+    executeJob(job);
 
     // Wait until the job finish - this active waiting will be removed once
     // Sqoop client API will get blocking support.
+    MSubmission submission;
     do {
       Thread.sleep(5000);
       submission = getClient().getJobStatus(job.getPersistenceId());
@@ -190,13 +192,14 @@ public class FromRDBMSToHDFSTest extends 
ConnectorTestCase {
         + " FROM " + provider.escapeTableName(getTableName().getTableName()) + 
" WHERE ${CONDITIONS}");
     
configs.getStringInput("fromJobConfig.partitionColumn").setValue(provider.escapeColumnName("id"));
     fillHdfsToConfig(job, ToFormat.TEXT_FILE);
+
     saveJob(job);
 
-    MSubmission submission = getClient().startJob(job.getPersistenceId());
-    assertTrue(submission.getStatus().isRunning());
+    executeJob(job);
 
     // Wait until the job finish - this active waiting will be removed once
     // Sqoop client API will get blocking support.
+    MSubmission submission;
     do {
       Thread.sleep(5000);
       submission = getClient().getJobStatus(job.getPersistenceId());
@@ -243,13 +246,14 @@ public class FromRDBMSToHDFSTest extends 
ConnectorTestCase {
         "SELECT MIN(" + partitionColumn + "), MAX(" + partitionColumn + ") 
FROM "
             + provider.escapeTableName(getTableName().getTableName()));
     fillHdfsToConfig(job, ToFormat.TEXT_FILE);
+
     saveJob(job);
 
-    MSubmission submission = getClient().startJob(job.getPersistenceId());
-    assertTrue(submission.getStatus().isRunning());
+    executeJob(job);
 
     // Wait until the job finish - this active waiting will be removed once
     // Sqoop client API will get blocking support.
+    MSubmission submission;
     do {
       Thread.sleep(5000);
       submission = getClient().getJobStatus(job.getPersistenceId());

Reply via email to