Repository: sqoop
Updated Branches:
  refs/heads/sqoop2 7112964e7 -> aaee89dc6


SQOOP-2678: Sqoop2: Remove the id from public interface for Link

(Colin Ma via Jarek Jarcec Cecho)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/aaee89dc
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/aaee89dc
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/aaee89dc

Branch: refs/heads/sqoop2
Commit: aaee89dc6c02a975ab22626e0638208793be5673
Parents: 7112964
Author: Jarek Jarcec Cecho <[email protected]>
Authored: Fri Nov 13 11:54:05 2015 -0800
Committer: Jarek Jarcec Cecho <[email protected]>
Committed: Fri Nov 13 11:54:05 2015 -0800

----------------------------------------------------------------------
 .../org/apache/sqoop/handler/HandlerUtils.java  | 51 +++++---------------
 .../apache/sqoop/handler/JobRequestHandler.java | 20 ++------
 .../sqoop/handler/LinkRequestHandler.java       | 38 +++++++--------
 .../connector/hdfs/AppendModeTest.java          |  2 +-
 .../connector/hdfs/FromHDFSToHDFSTest.java      |  2 +-
 .../connector/hdfs/HdfsIncrementalReadTest.java |  2 +-
 .../connector/hdfs/OutputDirectoryTest.java     |  6 +--
 .../connector/hive/FromRDBMSToKiteHiveTest.java |  2 +-
 .../connector/jdbc/generic/AllTypesTest.java    |  4 +-
 .../jdbc/generic/FromHDFSToRDBMSTest.java       |  2 +-
 .../jdbc/generic/FromRDBMSToHDFSTest.java       | 12 ++---
 .../jdbc/generic/IncrementalReadTest.java       |  4 +-
 .../connector/jdbc/generic/PartitionerTest.java |  2 +-
 .../jdbc/generic/TableStagedRDBMSTest.java      |  4 +-
 .../connector/kafka/FromHDFSToKafkaTest.java    |  2 +-
 .../connector/kafka/FromRDBMSToKafkaTest.java   |  2 +-
 .../connector/kite/FromRDBMSToKiteTest.java     |  2 +-
 .../connectorloading/ClasspathTest.java         |  2 +-
 .../derby/upgrade/Derby1_99_3UpgradeTest.java   |  9 ++--
 .../derby/upgrade/Derby1_99_4UpgradeTest.java   |  9 ++--
 .../derby/upgrade/Derby1_99_5UpgradeTest.java   | 11 +++--
 .../derby/upgrade/Derby1_99_6UpgradeTest.java   | 11 +++--
 .../upgrade/DerbyRepositoryUpgradeTest.java     | 19 +++++---
 .../server/InformalObjectNameTest.java          |  4 +-
 .../server/InvalidRESTCallsTest.java            |  4 +-
 .../integration/server/ShowJobInOrderTest.java  |  8 +--
 .../SubmissionWithDisabledModelObjectsTest.java |  4 +-
 27 files changed, 104 insertions(+), 134 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/server/src/main/java/org/apache/sqoop/handler/HandlerUtils.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/sqoop/handler/HandlerUtils.java 
b/server/src/main/java/org/apache/sqoop/handler/HandlerUtils.java
index c23a704..864b0be 100644
--- a/server/src/main/java/org/apache/sqoop/handler/HandlerUtils.java
+++ b/server/src/main/java/org/apache/sqoop/handler/HandlerUtils.java
@@ -37,53 +37,24 @@ public class HandlerUtils {
     return job;
   }
 
-  public static long getLinkIdFromIdentifier(String identifier) {
-    // support linkName or linkId for the api
-    // NOTE: linkId is a fallback for older sqoop clients if any, since we want
-    // to primarily use unique linkNames
-    long linkId;
+  public static MLink getLinkFromLinkName(String linkName) {
     Repository repository = RepositoryManager.getInstance().getRepository();
-    MLink link = repository.findLink(identifier);
-    if (link != null) {
-      linkId = link.getPersistenceId();
-    } else {
-      try {
-        linkId = Long.parseLong(identifier);
-      } catch (NumberFormatException ex) {
-        // this means name nor Id existed and we want to throw a user friendly
-        // message than a number format exception
-        throw new SqoopException(ServerError.SERVER_0005, "Invalid link: " + 
identifier
-            + " requested");
-      }
+    MLink link = repository.findLink(linkName);
+    if (link == null) {
+      throw new SqoopException(ServerError.SERVER_0006, "Invalid link name: " 
+ linkName
+              + " doesn't exist");
     }
-    return linkId;
+    return link;
   }
 
-  public static String getLinkNameFromIdentifier(String identifier) {
-    // support linkName or linkId for the api
-    // NOTE: linkId is a fallback for older sqoop clients if any, since we want
-    // to primarily use unique linkNames
+  public static MLink getLinkFromLinkId(Long linkId) {
     Repository repository = RepositoryManager.getInstance().getRepository();
-    MLink link = repository.findLink(identifier);
+    MLink link = repository.findLink(linkId);
     if (link == null) {
-      long linkId;
-      try {
-        linkId = Long.parseLong(identifier);
-      } catch (NumberFormatException ex) {
-        // this means name nor Id existed and we want to throw a user friendly
-        // message than a number format exception
-        throw new SqoopException(ServerError.SERVER_0005, "Invalid link: " + 
identifier
-            + " requested");
-      }
-
-      link = repository.findLink(linkId);
-      if (link == null) {
-        throw new SqoopException(ServerError.SERVER_0006, "Link: " + identifier
-            + " doesn't exist");
-      }
+      throw new SqoopException(ServerError.SERVER_0006, "Invalid link id: " + 
linkId
+              + " doesn't exist");
     }
-
-    return link.getName();
+    return link;
   }
 
   public static long getConnectorIdFromIdentifier(String identifier) {

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/server/src/main/java/org/apache/sqoop/handler/JobRequestHandler.java
----------------------------------------------------------------------
diff --git 
a/server/src/main/java/org/apache/sqoop/handler/JobRequestHandler.java 
b/server/src/main/java/org/apache/sqoop/handler/JobRequestHandler.java
index 094bf2b..2f6edc1 100644
--- a/server/src/main/java/org/apache/sqoop/handler/JobRequestHandler.java
+++ b/server/src/main/java/org/apache/sqoop/handler/JobRequestHandler.java
@@ -37,14 +37,7 @@ import org.apache.sqoop.json.JobsBean;
 import org.apache.sqoop.json.JsonBean;
 import org.apache.sqoop.json.SubmissionBean;
 import org.apache.sqoop.json.ValidationResultBean;
-import org.apache.sqoop.model.ConfigUtils;
-import org.apache.sqoop.model.MDriverConfig;
-import org.apache.sqoop.model.MFromConfig;
-import org.apache.sqoop.model.MJob;
-import org.apache.sqoop.model.MPersistableEntity;
-import org.apache.sqoop.model.MResource;
-import org.apache.sqoop.model.MSubmission;
-import org.apache.sqoop.model.MToConfig;
+import org.apache.sqoop.model.*;
 import org.apache.sqoop.repository.Repository;
 import org.apache.sqoop.repository.RepositoryManager;
 import org.apache.sqoop.request.HttpEventContext;
@@ -186,17 +179,14 @@ public class JobRequestHandler implements RequestHandler {
 
     // Job object
     MJob postedJob = jobs.get(0);
+    MLink fromLink = HandlerUtils.getLinkFromLinkId(postedJob.getFromLinkId());
+    MLink toLink = HandlerUtils.getLinkFromLinkId(postedJob.getToLinkId());
 
     // Authorization check
     if (create) {
-      AuthorizationEngine.createJob(ctx.getUserName(),
-          
HandlerUtils.getLinkNameFromIdentifier(String.valueOf(postedJob.getFromLinkId())),
-          
HandlerUtils.getLinkNameFromIdentifier(String.valueOf(postedJob.getToLinkId())));
+      AuthorizationEngine.createJob(ctx.getUserName(), fromLink.getName(), 
toLink.getName());
     } else {
-      AuthorizationEngine.updateJob(ctx.getUserName(),
-          
HandlerUtils.getLinkNameFromIdentifier(String.valueOf(postedJob.getFromLinkId())),
-          
HandlerUtils.getLinkNameFromIdentifier(String.valueOf(postedJob.getToLinkId())),
-          postedJob.getName());
+      AuthorizationEngine.updateJob(ctx.getUserName(), fromLink.getName(), 
toLink.getName(), postedJob.getName());
     }
 
     // Verify that user is not trying to spoof us

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/server/src/main/java/org/apache/sqoop/handler/LinkRequestHandler.java
----------------------------------------------------------------------
diff --git 
a/server/src/main/java/org/apache/sqoop/handler/LinkRequestHandler.java 
b/server/src/main/java/org/apache/sqoop/handler/LinkRequestHandler.java
index bd818b3..f72f0eb 100644
--- a/server/src/main/java/org/apache/sqoop/handler/LinkRequestHandler.java
+++ b/server/src/main/java/org/apache/sqoop/handler/LinkRequestHandler.java
@@ -90,15 +90,15 @@ public class LinkRequestHandler implements RequestHandler {
    */
   private JsonBean deleteLink(RequestContext ctx) {
     Repository repository = RepositoryManager.getInstance().getRepository();
-    String linkIdentifier = ctx.getLastURLElement();
-    // support linkName or linkId for the api
-    String linkName = HandlerUtils.getLinkNameFromIdentifier(linkIdentifier);
+    String linkName = ctx.getLastURLElement();
+    // make sure the link exist, otherwise, the exception will be thrown
+    MLink link = HandlerUtils.getLinkFromLinkName(linkName);
 
     // Authorization check
-    AuthorizationEngine.deleteLink(ctx.getUserName(), linkName);
+    AuthorizationEngine.deleteLink(ctx.getUserName(), link.getName());
 
     AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(),
-        ctx.getRequest().getRemoteAddr(), "delete", "link", linkIdentifier);
+        ctx.getRequest().getRemoteAddr(), "delete", "link", link.getName());
 
     repository.deleteLink(linkName);
     MResource resource = new MResource(linkName, MResource.TYPE.LINK);
@@ -142,7 +142,7 @@ public class LinkRequestHandler implements RequestHandler {
     } else {
       AuthorizationEngine.updateLink(ctx.getUserName(),
           
HandlerUtils.getConnectorNameFromIdentifier(String.valueOf(postedLink.getConnectorId())),
-          
HandlerUtils.getLinkNameFromIdentifier(String.valueOf(postedLink.getPersistenceId())));
+          postedLink.getName());
     }
 
     MLinkConfig linkConfig = ConnectorManager.getInstance()
@@ -152,11 +152,9 @@ public class LinkRequestHandler implements RequestHandler {
     }
     // if update get the link id from the request URI
     if (!create) {
-      String linkIdentifier = ctx.getLastURLElement();
-      // support linkName or linkId for the api
-      String linkName = HandlerUtils.getLinkNameFromIdentifier(linkIdentifier);
+      String linkName = ctx.getLastURLElement();
+      MLink existingLink = repository.findLink(linkName);
       if (postedLink.getPersistenceId() == 
MPersistableEntity.PERSISTANCE_ID_DEFAULT) {
-        MLink existingLink = repository.findLink(linkName);
         postedLink.setPersistenceId(existingLink.getPersistenceId());
       }
     }
@@ -194,15 +192,15 @@ public class LinkRequestHandler implements RequestHandler 
{
   }
 
   private JsonBean getLinks(RequestContext ctx) {
-    String identifier = ctx.getLastURLElement();
+    String linkName = ctx.getLastURLElement();
     LinkBean linkBean;
     List<MLink> links;
     Locale locale = ctx.getAcceptLanguageHeader();
     Repository repository = RepositoryManager.getInstance().getRepository();
 
-    AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(), 
ctx.getRequest().getRemoteAddr(), "get", "link", identifier);
+    AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(), 
ctx.getRequest().getRemoteAddr(), "get", "link", linkName);
 
-    if(identifier.equals("all")) { // Return all links (by perhaps only for 
given connector)
+    if(linkName.equals("all")) { // Return all links (by perhaps only for 
given connector)
       String connectorName = ctx.getParameterValue(CONNECTOR_NAME_QUERY_PARAM);
 
       if(StringUtils.isEmpty(connectorName)) {
@@ -214,16 +212,16 @@ public class LinkRequestHandler implements RequestHandler 
{
         links = repository.findLinksForConnector(connectorName);
       }
     } else { // Return one specific link with name or id stored in identifier
-      String linkName = HandlerUtils.getLinkNameFromIdentifier(identifier);
+      MLink link = HandlerUtils.getLinkFromLinkName(linkName);
       links = new LinkedList<>();
-      links.add(repository.findLink(linkName));
+      links.add(link);
     }
 
     // Authorization check
     links = AuthorizationEngine.filterResource(ctx.getUserName(), 
MResource.TYPE.LINK, links);
 
     // Return bean entity (we have to separate what we're returning here)
-    if(identifier.equals("all")) {
+    if(linkName.equals("all")) {
       linkBean = createLinksBean(links, locale);
     } else {
       linkBean = createLinkBean(links, locale);
@@ -257,13 +255,13 @@ public class LinkRequestHandler implements RequestHandler 
{
   private JsonBean enableLink(RequestContext ctx, boolean enabled) {
     Repository repository = RepositoryManager.getInstance().getRepository();
     String[] elements = ctx.getUrlElements();
-    String linkIdentifier = elements[elements.length - 2];
-    String linkName = HandlerUtils.getLinkNameFromIdentifier(linkIdentifier);
+    String linkName = elements[elements.length - 2];
+    MLink link = HandlerUtils.getLinkFromLinkName(linkName);
 
     // Authorization check
-    AuthorizationEngine.enableDisableLink(ctx.getUserName(), linkName);
+    AuthorizationEngine.enableDisableLink(ctx.getUserName(), link.getName());
 
-    repository.enableLink(linkName, enabled);
+    repository.enableLink(link.getName(), enabled);
     return JsonBean.EMPTY_BEAN;
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/AppendModeTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/AppendModeTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/AppendModeTest.java
index 5063a2b..8c65898 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/AppendModeTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/AppendModeTest.java
@@ -44,7 +44,7 @@ public class AppendModeTest extends ConnectorTestCase {
     saveLink(hdfsConnection);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsConnection.getName(), 
hdfsConnection.getName());
 
     // Set rdbms "FROM" config
     fillRdbmsFromConfig(job, "id");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/FromHDFSToHDFSTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/FromHDFSToHDFSTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/FromHDFSToHDFSTest.java
index 4b2fa06..c39c8d6 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/FromHDFSToHDFSTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/FromHDFSToHDFSTest.java
@@ -51,7 +51,7 @@ public class FromHDFSToHDFSTest extends ConnectorTestCase {
     fillHdfsLink(hdfsLinkTo);
     saveLink(hdfsLinkTo);
 
-    MJob job = getClient().createJob(hdfsLinkFrom.getPersistenceId(), 
hdfsLinkTo.getPersistenceId());
+    MJob job = getClient().createJob(hdfsLinkFrom.getName(), 
hdfsLinkTo.getName());
 
     fillHdfsFromConfig(job);
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/HdfsIncrementalReadTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/HdfsIncrementalReadTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/HdfsIncrementalReadTest.java
index a32a563..e6f6e0d 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/HdfsIncrementalReadTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/HdfsIncrementalReadTest.java
@@ -56,7 +56,7 @@ public class HdfsIncrementalReadTest extends 
ConnectorTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(hdfsLink.getPersistenceId(), 
rdbmsLink.getPersistenceId());
+    MJob job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName());
     fillHdfsFromConfig(job);
     
job.getFromJobConfig().getEnumInput("incremental.incrementalType").setValue(IncrementalType.NEW_FILES);
     fillRdbmsToConfig(job);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/OutputDirectoryTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/OutputDirectoryTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/OutputDirectoryTest.java
index d712e46..1790f96 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/OutputDirectoryTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/hdfs/OutputDirectoryTest.java
@@ -54,7 +54,7 @@ public class OutputDirectoryTest extends ConnectorTestCase {
     saveLink(hdfsConnection);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsConnection.getName(), 
hdfsConnection.getName());
 
     // Set rdbms "FROM" config
     fillRdbmsFromConfig(job, "id");
@@ -90,7 +90,7 @@ public class OutputDirectoryTest extends ConnectorTestCase {
     saveLink(hdfsConnection);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsConnection.getName(), 
hdfsConnection.getName());
 
     // Set rdbms "FROM" config
     fillRdbmsFromConfig(job, "id");
@@ -125,7 +125,7 @@ public class OutputDirectoryTest extends ConnectorTestCase {
     saveLink(hdfsConnection);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsConnection.getName(), 
hdfsConnection.getName());
 
     // Set rdbms "FROM" config
     fillRdbmsFromConfig(job, "id");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/hive/FromRDBMSToKiteHiveTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/hive/FromRDBMSToKiteHiveTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/hive/FromRDBMSToKiteHiveTest.java
index bb68339..0e46bf3 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/hive/FromRDBMSToKiteHiveTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/hive/FromRDBMSToKiteHiveTest.java
@@ -106,7 +106,7 @@ public class FromRDBMSToKiteHiveTest extends 
HiveConnectorTestCase implements IT
   @Test
   public void testCities() throws Exception {
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
kiteLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), kiteLink.getName());
 
     // Set rdbms "FROM" config
     fillRdbmsFromConfig(job, "id");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/AllTypesTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/AllTypesTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/AllTypesTest.java
index 0c8f57a..5053b56 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/AllTypesTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/AllTypesTest.java
@@ -94,7 +94,7 @@ public class AllTypesTest extends ConnectorTestCase 
implements ITest {
     saveLink(hdfsConnection);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsConnection.getName(), 
hdfsConnection.getName());
 
     // Fill rdbms "FROM" config
     fillRdbmsFromConfig(job, "id");
@@ -139,7 +139,7 @@ public class AllTypesTest extends ConnectorTestCase 
implements ITest {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(hdfsLink.getPersistenceId(), 
rdbmsLink.getPersistenceId());
+    MJob job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName());
     fillHdfsFromConfig(job);
 
     // Set the rdbms "TO" config here

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
index 225b98d..25cdb68 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
@@ -64,7 +64,7 @@ public class FromHDFSToRDBMSTest extends ConnectorTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(hdfsLink.getPersistenceId(), 
rdbmsLink.getPersistenceId());
+    MJob job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName());
 
     // set hdfs "FROM" config for the job, since the connector test case base 
class only has utilities for hdfs!
     fillHdfsFromConfig(job);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
index cc5124b..686572a 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
@@ -47,7 +47,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
     saveLink(hdfsConnection);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsConnection.getName(), 
hdfsConnection.getName());
 
     // Set rdbms "FROM" config
     fillRdbmsFromConfig(job, "id");
@@ -89,7 +89,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     // Connector values
     fillRdbmsFromConfig(job, "id");
@@ -131,7 +131,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     // Connector values
     fillRdbmsFromConfig(job, "id");
@@ -173,7 +173,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     // Connector values
     MConfigList configs = job.getFromJobConfig();
@@ -213,7 +213,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     // Connector values
     String partitionColumn = 
provider.escapeTableName(getTableName().getTableName()) + "." + 
provider.escapeColumnName("id");
@@ -263,7 +263,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
     saveLink(hdfsConnection);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), 
hdfsConnection.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsConnection.getName(), 
hdfsConnection.getName());
 
     // Set rdbms "FROM" config
     fillRdbmsFromConfig(job, "id");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/IncrementalReadTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/IncrementalReadTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/IncrementalReadTest.java
index d29e4cf..38ebb74 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/IncrementalReadTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/IncrementalReadTest.java
@@ -90,7 +90,7 @@ public class IncrementalReadTest extends ConnectorTestCase 
implements ITest {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     // Set the rdbms "FROM" config
     fillRdbmsFromConfig(job, "id");
@@ -142,7 +142,7 @@ public class IncrementalReadTest extends ConnectorTestCase 
implements ITest {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     String query = "SELECT * FROM " + 
provider.escapeTableName(getTableName().getTableName()) + " WHERE 
${CONDITIONS}";
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
index a1e2a41..72728fe 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
@@ -92,7 +92,7 @@ public class PartitionerTest extends ConnectorTestCase 
implements ITest {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     // set the rdbms "FROM" config
     fillRdbmsFromConfig(job, partitionColumn);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
index 0008cac..68dc65e 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
@@ -56,8 +56,8 @@ public class TableStagedRDBMSTest extends ConnectorTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(hdfsLink.getPersistenceId(),
-        rdbmsLink.getPersistenceId());
+    MJob job = getClient().createJob(hdfsLink.getName(),
+        rdbmsLink.getName());
 
     // fill HDFS "FROM" config
     fillHdfsFromConfig(job);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromHDFSToKafkaTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromHDFSToKafkaTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromHDFSToKafkaTest.java
index 9ec4e8f..aa062fb 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromHDFSToKafkaTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromHDFSToKafkaTest.java
@@ -49,7 +49,7 @@ public class FromHDFSToKafkaTest extends 
KafkaConnectorTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(hdfsLink.getPersistenceId(), 
kafkaLink.getPersistenceId());
+    MJob job = getClient().createJob(hdfsLink.getName(), kafkaLink.getName());
 
     // Job connector configs
     fillHdfsFromConfig(job);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromRDBMSToKafkaTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromRDBMSToKafkaTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromRDBMSToKafkaTest.java
index dc1a80f..6e78a13 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromRDBMSToKafkaTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/kafka/FromRDBMSToKafkaTest.java
@@ -52,7 +52,7 @@ public class FromRDBMSToKafkaTest extends 
KafkaConnectorTestCase {
     saveLink(rdbmsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
kafkaLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), kafkaLink.getName());
 
     // set rdbms "FROM" job config
     fillRdbmsFromConfig(job, "id");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connector/kite/FromRDBMSToKiteTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connector/kite/FromRDBMSToKiteTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connector/kite/FromRDBMSToKiteTest.java
index 4b49683..7b2aced 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connector/kite/FromRDBMSToKiteTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connector/kite/FromRDBMSToKiteTest.java
@@ -67,7 +67,7 @@ public class FromRDBMSToKiteTest extends ConnectorTestCase {
     saveLink(kiteLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
kiteLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), kiteLink.getName());
 
     // Set rdbms "FROM" config
     fillRdbmsFromConfig(job, "id");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/connectorloading/ClasspathTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/connectorloading/ClasspathTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/connectorloading/ClasspathTest.java
index 9d8460d..4a2e7a4 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/connectorloading/ClasspathTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/connectorloading/ClasspathTest.java
@@ -175,7 +175,7 @@ public class ClasspathTest extends ConnectorTestCase {
     MLink testConnection = getClient().createLink("test-connector");
     saveLink(testConnection);
 
-    MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), 
testConnection.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsConnection.getName(), 
testConnection.getName());
 
     fillRdbmsFromConfig(job, "id");
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_3UpgradeTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_3UpgradeTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_3UpgradeTest.java
index ab2b950..8d02e24 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_3UpgradeTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_3UpgradeTest.java
@@ -69,8 +69,8 @@ public class Derby1_99_3UpgradeTest extends 
DerbyRepositoryUpgradeTest {
   }
 
   @Override
-  public Integer[] getDisabledLinkIds() {
-    return new Integer[] {4};
+  public String[] getDisabledLinkNames() {
+    return new String[] {linkIdToNameMap.get(4L)};
   }
 
   @Override
@@ -79,8 +79,9 @@ public class Derby1_99_3UpgradeTest extends 
DerbyRepositoryUpgradeTest {
   }
 
   @Override
-  public Integer[] getDeleteLinkIds() {
-    return new Integer[] {1, 2, 3, 4, 5};
+  public String[] getDeleteLinkNames() {
+    return new String[] {linkIdToNameMap.get(1L), linkIdToNameMap.get(2L),
+            linkIdToNameMap.get(3L), linkIdToNameMap.get(4L), 
linkIdToNameMap.get(5L)};
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_4UpgradeTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_4UpgradeTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_4UpgradeTest.java
index c86888f..b88940a 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_4UpgradeTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_4UpgradeTest.java
@@ -71,8 +71,8 @@ public class Derby1_99_4UpgradeTest extends 
DerbyRepositoryUpgradeTest {
   }
 
   @Override
-  public Integer[] getDisabledLinkIds() {
-    return new Integer[] {4, 5};
+  public String[] getDisabledLinkNames() {
+    return new String[] {linkIdToNameMap.get(4L), linkIdToNameMap.get(5L)};
   }
 
   @Override
@@ -81,7 +81,8 @@ public class Derby1_99_4UpgradeTest extends 
DerbyRepositoryUpgradeTest {
   }
 
   @Override
-  public Integer[] getDeleteLinkIds() {
-    return new Integer[] {1, 2, 3, 4, 5, 6};
+  public String[] getDeleteLinkNames() {
+    return new String[] {linkIdToNameMap.get(1L), linkIdToNameMap.get(2L),
+            linkIdToNameMap.get(3L), linkIdToNameMap.get(4L), 
linkIdToNameMap.get(5L), linkIdToNameMap.get(6L)};
   }
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_5UpgradeTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_5UpgradeTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_5UpgradeTest.java
index 8e6435a..1f3563d 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_5UpgradeTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_5UpgradeTest.java
@@ -80,8 +80,8 @@ public class Derby1_99_5UpgradeTest extends 
DerbyRepositoryUpgradeTest {
   }
 
   @Override
-  public Integer[] getDisabledLinkIds() {
-    return new Integer[] {4, 5};
+  public String[] getDisabledLinkNames() {
+    return new String[] {linkIdToNameMap.get(4L), linkIdToNameMap.get(5L)};
   }
 
   @Override
@@ -90,8 +90,9 @@ public class Derby1_99_5UpgradeTest extends 
DerbyRepositoryUpgradeTest {
   }
 
   @Override
-  public Integer[] getDeleteLinkIds() {
-    return new Integer[] {1, 2, 3, 4, 5, 6};
+  public String[] getDeleteLinkNames() {
+    return new String[] {linkIdToNameMap.get(1L), linkIdToNameMap.get(2L),
+            linkIdToNameMap.get(3L), linkIdToNameMap.get(4L), 
linkIdToNameMap.get(5L), linkIdToNameMap.get(6L)};
   }
 
   @Test
@@ -101,7 +102,7 @@ public class Derby1_99_5UpgradeTest extends 
DerbyRepositoryUpgradeTest {
       assertNotNull(job.getName());
     }
 
-    MJob job = getClient().createJob(1, 1);
+    MJob job = getClient().createJob(linkIdToNameMap.get(1L), 
linkIdToNameMap.get(1L));
     assertNull(job.getName());
     assertEquals(getClient().saveJob(job), Status.ERROR);
   }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_6UpgradeTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_6UpgradeTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_6UpgradeTest.java
index 258b79a..a57a420 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_6UpgradeTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/Derby1_99_6UpgradeTest.java
@@ -81,8 +81,8 @@ public class Derby1_99_6UpgradeTest extends 
DerbyRepositoryUpgradeTest {
   }
 
   @Override
-  public Integer[] getDisabledLinkIds() {
-    return new Integer[] {4, 5};
+  public String[] getDisabledLinkNames() {
+    return new String[] {linkIdToNameMap.get(4L), linkIdToNameMap.get(5L)};
   }
 
   @Override
@@ -91,8 +91,9 @@ public class Derby1_99_6UpgradeTest extends 
DerbyRepositoryUpgradeTest {
   }
 
   @Override
-  public Integer[] getDeleteLinkIds() {
-    return new Integer[] {1, 2, 3, 4, 5, 6};
+  public String[] getDeleteLinkNames() {
+    return new String[] {linkIdToNameMap.get(1L), linkIdToNameMap.get(2L),
+            linkIdToNameMap.get(3L), linkIdToNameMap.get(4L), 
linkIdToNameMap.get(5L), linkIdToNameMap.get(6L)};
   }
 
   @Test
@@ -102,7 +103,7 @@ public class Derby1_99_6UpgradeTest extends 
DerbyRepositoryUpgradeTest {
       assertNotNull(job.getName());
     }
 
-    MJob job = getClient().createJob(1, 1);
+    MJob job = getClient().createJob(linkIdToNameMap.get(1L), 
linkIdToNameMap.get(1L));
     assertNull(job.getName());
     assertEquals(getClient().saveJob(job), Status.ERROR);
   }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/DerbyRepositoryUpgradeTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/DerbyRepositoryUpgradeTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/DerbyRepositoryUpgradeTest.java
index 6f19113..cbc243c 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/DerbyRepositoryUpgradeTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/DerbyRepositoryUpgradeTest.java
@@ -20,6 +20,7 @@ package org.apache.sqoop.integration.repository.derby.upgrade;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.client.SqoopClient;
 import org.apache.sqoop.model.MJob;
+import org.apache.sqoop.model.MLink;
 import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
 import org.apache.sqoop.test.testcases.JettyTestCase;
 import org.apache.sqoop.test.utils.CompressionUtils;
@@ -56,6 +57,7 @@ public abstract class DerbyRepositoryUpgradeTest extends 
JettyTestCase {
 
   private static final Logger LOG = 
Logger.getLogger(DerbyRepositoryUpgradeTest.class);
   protected Map<Long, String> jobIdToNameMap;
+  protected Map<Long, String> linkIdToNameMap;
 
   /**
    * Custom Sqoop mini cluster that points derby repository to real on-disk 
structures.
@@ -109,7 +111,7 @@ public abstract class DerbyRepositoryUpgradeTest extends 
JettyTestCase {
   /**
    * List of link ids that should be disabled
    */
-  public abstract Integer[] getDisabledLinkIds();
+  public abstract String[] getDisabledLinkNames();
 
   /**
    * List of job ids that should be disabled
@@ -119,7 +121,7 @@ public abstract class DerbyRepositoryUpgradeTest extends 
JettyTestCase {
   /**
    * List of link ids that we should delete using the id
    */
-  public abstract Integer[] getDeleteLinkIds();
+  public abstract String[] getDeleteLinkNames();
 
   public String getRepositoryPath() {
     return HdfsUtils.joinPathFragments(getTemporaryJettyPath(), "repo");
@@ -159,6 +161,11 @@ public abstract class DerbyRepositoryUpgradeTest extends 
JettyTestCase {
     for(MJob job : getClient().getJobs()) {
       jobIdToNameMap.put(job.getPersistenceId(), job.getName());
     }
+
+    linkIdToNameMap = new HashMap<Long, String>();
+    for(MLink link : getClient().getLinks()) {
+      linkIdToNameMap.put(link.getPersistenceId(), link.getName());
+    }
   }
 
   @AfterMethod
@@ -185,8 +192,8 @@ public abstract class DerbyRepositoryUpgradeTest extends 
JettyTestCase {
     }
 
     // Verify that disabled status is preserved
-    for(Integer id : getDisabledLinkIds()) {
-      assertFalse(getClient().getLink(id).getEnabled());
+    for(String linkName : getDisabledLinkNames()) {
+      assertFalse(getClient().getLink(linkName).getEnabled());
     }
     for(String name : getDisabledJobNames()) {
       assertFalse(getClient().getJob(name).getEnabled());
@@ -196,8 +203,8 @@ public abstract class DerbyRepositoryUpgradeTest extends 
JettyTestCase {
     for(String name : jobIdToNameMap.values()) {
       getClient().deleteJob(name);
     }
-    for(Integer id : getDeleteLinkIds()) {
-      getClient().deleteLink(id);
+    for(String linkName : getDeleteLinkNames()) {
+      getClient().deleteLink(linkName);
     }
 
     // We should end up with empty repository

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/server/InformalObjectNameTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/server/InformalObjectNameTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/server/InformalObjectNameTest.java
index 811315a..16480c3 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/server/InformalObjectNameTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/server/InformalObjectNameTest.java
@@ -69,7 +69,7 @@ public class InformalObjectNameTest extends SqoopTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     // rdms "FROM" config
     fillRdbmsFromConfig(job, "id");
@@ -81,7 +81,7 @@ public class InformalObjectNameTest extends SqoopTestCase {
     saveJob(job);
     assertEquals(job, getClient().getJob(JOB_NAME_CONTAINS_WHITESPACE));
 
-    job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     // rdms "FROM" config
     fillRdbmsFromConfig(job, "id");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/server/InvalidRESTCallsTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/server/InvalidRESTCallsTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/server/InvalidRESTCallsTest.java
index 614895d..d083d37 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/server/InvalidRESTCallsTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/server/InvalidRESTCallsTest.java
@@ -216,8 +216,8 @@ public class InvalidRESTCallsTest extends SqoopTestCase {
       @Override
       void validate() throws Exception {
         assertResponseCode(500);
-        assertServerException("org.apache.sqoop.server.common.ServerError", 
"SERVER_0005");
-        assertContains("Invalid link: i-dont-exists");
+        assertServerException("org.apache.sqoop.server.common.ServerError", 
"SERVER_0006");
+        assertContains("Invalid link name: i-dont-exists");
       }}),
     new TestDescription("Get links for non existing connector", 
"v1/link/all?cname=i-dont-exists", "GET", null, new Validator() {
       @Override

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/server/ShowJobInOrderTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/server/ShowJobInOrderTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/server/ShowJobInOrderTest.java
index 6c0622c..cbf1e90 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/server/ShowJobInOrderTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/server/ShowJobInOrderTest.java
@@ -55,7 +55,7 @@ public class ShowJobInOrderTest extends SqoopTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     // rdms "FROM" config
     fillRdbmsFromConfig(job, "id");
@@ -66,7 +66,7 @@ public class ShowJobInOrderTest extends SqoopTestCase {
     saveJob(job);
 
     // Job creation
-    job = getClient().createJob(hdfsLink.getPersistenceId(), 
rdbmsLink.getPersistenceId());
+    job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName());
 
     // rdms "To" config
     fillRdbmsToConfig(job);
@@ -77,7 +77,7 @@ public class ShowJobInOrderTest extends SqoopTestCase {
     saveJob(job);
 
     // Job creation
-    job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
 
     // rdms "FROM" config
     fillRdbmsFromConfig(job, "id");
@@ -88,7 +88,7 @@ public class ShowJobInOrderTest extends SqoopTestCase {
     saveJob(job);
 
     // Job creation
-    job = getClient().createJob(hdfsLink.getPersistenceId(), 
rdbmsLink.getPersistenceId());
+    job = getClient().createJob(hdfsLink.getName(), rdbmsLink.getName());
 
 
     // hdfs "From" config

http://git-wip-us.apache.org/repos/asf/sqoop/blob/aaee89dc/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
----------------------------------------------------------------------
diff --git 
a/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
 
b/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
index c4e448e..9e682bc 100644
--- 
a/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
+++ 
b/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
@@ -89,7 +89,7 @@ public class SubmissionWithDisabledModelObjectsTest extends 
SqoopTestCase {
     saveLink(hdfsLink);
 
     // Job creation
-    MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), 
hdfsLink.getPersistenceId());
+    MJob job = getClient().createJob(rdbmsLink.getName(), hdfsLink.getName());
     job.setName(jobName);
 
     // rdms "FROM" config
@@ -101,7 +101,7 @@ public class SubmissionWithDisabledModelObjectsTest extends 
SqoopTestCase {
     saveJob(job);
 
     // Disable model entities as per parameterized run
-    getClient().enableLink(rdbmsLink.getPersistenceId(), enabledLink);
+    getClient().enableLink(rdbmsLink.getName(), enabledLink);
     getClient().enableJob(jobName, enabledJob);
 
     // Try to execute the job and verify that the it was not executed

Reply via email to