YARN-1942. Deprecate toString/fromString methods from ConverterUtils and move them to records classes like ContainerId/ApplicationId, etc. (wangda)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2749b194 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2749b194 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2749b194 Branch: refs/heads/HDFS-1312 Commit: 2749b194af42921a7f5489ae8e470ba40f0b8d11 Parents: a219225 Author: Wangda Tan <[email protected]> Authored: Tue Jun 14 15:06:38 2016 -0700 Committer: Anu Engineer <[email protected]> Committed: Sat Jun 18 00:05:02 2016 -0700 ---------------------------------------------------------------------- .../org/apache/hadoop/mapred/YarnChild.java | 10 +- .../hadoop/mapreduce/v2/app/MRAppMaster.java | 2 +- .../v2/app/job/impl/TaskAttemptImpl.java | 7 +- .../v2/app/webapp/dao/TaskAttemptInfo.java | 4 +- .../apache/hadoop/mapreduce/v2/app/MRApp.java | 2 +- .../mapreduce/v2/app/TestMRAppMaster.java | 66 +++--- .../app/commit/TestCommitterEventHandler.java | 12 +- .../mapreduce/v2/app/job/impl/TestJobImpl.java | 4 +- .../app/webapp/TestAMWebServicesAttempts.java | 2 +- .../mapred/LocalDistributedCacheManager.java | 2 +- .../apache/hadoop/mapreduce/v2/util/MRApps.java | 8 +- .../mapreduce/jobhistory/AMStartedEvent.java | 6 +- .../jobhistory/TaskAttemptStartedEvent.java | 4 +- .../v2/hs/webapp/TestHsWebServicesAttempts.java | 2 +- .../org/apache/hadoop/mapred/YARNRunner.java | 5 +- .../apache/hadoop/mapreduce/v2/TestMRJobs.java | 2 +- .../apache/hadoop/mapred/ShuffleHandler.java | 2 +- .../apache/hadoop/tools/HadoopArchiveLogs.java | 3 +- .../yarn/api/records/ApplicationAttemptId.java | 36 +++- .../hadoop/yarn/api/records/ApplicationId.java | 38 +++- .../hadoop/yarn/api/records/ContainerId.java | 6 +- .../apache/hadoop/yarn/api/records/NodeId.java | 23 ++- .../org/apache/hadoop/yarn/api/records/URL.java | 49 +++++ .../distributedshell/ApplicationMaster.java | 7 +- .../applications/distributedshell/Client.java | 3 +- .../DistributedShellTimelinePlugin.java | 4 +- .../distributedshell/TestDistributedShell.java | 4 +- .../hadoop/yarn/client/cli/ApplicationCLI.java | 27 ++- .../apache/hadoop/yarn/client/cli/LogsCLI.java | 6 +- .../apache/hadoop/yarn/client/cli/NodeCLI.java | 2 +- .../hadoop/yarn/client/cli/RMAdminCLI.java | 2 +- .../hadoop/yarn/client/cli/TestRMAdminCLI.java | 2 +- .../AggregatedLogDeletionService.java | 2 +- .../logaggregation/AggregatedLogFormat.java | 11 +- .../yarn/logaggregation/LogCLIHelpers.java | 2 +- .../apache/hadoop/yarn/util/ConverterUtils.java | 206 +++++++------------ .../org/apache/hadoop/yarn/util/FSDownload.java | 2 +- .../yarn/webapp/log/AggregatedLogsBlock.java | 4 +- .../hadoop/yarn/webapp/util/WebAppUtils.java | 2 +- .../hadoop/yarn/util/TestConverterUtils.java | 30 +-- .../apache/hadoop/yarn/util/TestFSDownload.java | 13 +- ...pplicationHistoryManagerOnTimelineStore.java | 25 +-- .../FileSystemApplicationHistoryStore.java | 8 +- .../hadoop/yarn/server/utils/BuilderUtils.java | 2 +- .../yarn/server/webapp/AppAttemptBlock.java | 2 +- .../yarn/server/webapp/ContainerBlock.java | 2 +- .../hadoop/yarn/server/webapp/WebServices.java | 6 +- .../nodemanager/DefaultContainerExecutor.java | 5 +- .../nodemanager/DockerContainerExecutor.java | 6 +- .../nodemanager/LinuxContainerExecutor.java | 2 +- .../container/ContainerImpl.java | 2 +- .../launcher/ContainerLaunch.java | 10 +- .../launcher/ContainerRelaunch.java | 2 +- .../launcher/RecoveredContainerLaunch.java | 6 +- .../localizer/ContainerLocalizer.java | 3 +- .../localizer/LocalResourceRequest.java | 4 +- .../localizer/ResourceLocalizationService.java | 28 +-- .../event/LocalizerResourceRequestEvent.java | 2 +- .../sharedcache/SharedCacheUploader.java | 2 +- .../logaggregation/AppLogAggregatorImpl.java | 2 +- .../recovery/NMLeveldbStateStoreService.java | 10 +- .../util/NodeManagerBuilderUtils.java | 2 +- .../nodemanager/util/ProcessIdFileReader.java | 3 +- .../nodemanager/webapp/ApplicationPage.java | 5 +- .../nodemanager/webapp/ContainerLogsPage.java | 2 +- .../nodemanager/webapp/ContainerLogsUtils.java | 6 +- .../nodemanager/webapp/ContainerPage.java | 2 +- .../nodemanager/webapp/NMWebServices.java | 4 +- .../server/nodemanager/webapp/dao/AppInfo.java | 4 +- .../nodemanager/TestNodeManagerReboot.java | 2 +- .../nodemanager/TestNodeManagerResync.java | 2 +- .../nodemanager/TestNodeManagerShutdown.java | 2 +- .../impl/pb/TestPBRecordImpl.java | 9 +- .../containermanager/TestContainerManager.java | 22 +- .../TestContainerManagerRecovery.java | 2 +- .../launcher/TestContainerLaunch.java | 8 +- .../localizer/TestContainerLocalizer.java | 2 +- .../localizer/TestLocalResource.java | 7 +- .../TestResourceLocalizationService.java | 21 +- .../TestAppLogAggregatorImpl.java | 14 +- .../TestLogAggregationService.java | 36 ++-- .../monitor/TestContainersMonitor.java | 2 +- .../TestNMLeveldbStateStoreService.java | 24 +-- .../nodemanager/webapp/TestNMWebServer.java | 2 +- .../webapp/TestNMWebServicesContainers.java | 7 +- .../server/resourcemanager/ResourceManager.java | 2 +- .../recovery/LeveldbRMStateStore.java | 5 +- .../recovery/ZKRMStateStore.java | 2 +- .../resource/DynamicResourceConfiguration.java | 2 +- .../rmcontainer/RMContainerImpl.java | 2 +- .../resourcemanager/webapp/RMAppsBlock.java | 4 +- .../resourcemanager/webapp/RMWebAppFilter.java | 2 +- .../resourcemanager/webapp/RMWebServices.java | 9 +- .../webapp/dao/AppAttemptInfo.java | 2 +- .../resourcemanager/webapp/dao/AppInfo.java | 3 +- .../resourcemanager/TestRMAdminService.java | 8 +- .../recovery/RMStateStoreTestBase.java | 14 +- .../recovery/TestFSRMStateStore.java | 4 +- .../recovery/TestZKRMStateStore.java | 11 +- .../TestRMWebServicesAppsModification.java | 7 +- ...ebServicesDelegationTokenAuthentication.java | 5 +- .../webapp/TestRMWebappAuthentication.java | 9 +- .../timeline/EntityGroupFSTimelineStore.java | 2 +- .../timeline/EntityGroupPlugInForTest.java | 7 +- .../TestEntityGroupFSTimelineStore.java | 4 +- 105 files changed, 561 insertions(+), 486 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java index ec7ade7..164f19d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java @@ -58,6 +58,7 @@ import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.util.ConverterUtils; /** @@ -290,11 +291,10 @@ class YarnChild { private static void configureTask(JobConf job, Task task, Credentials credentials, Token<JobTokenIdentifier> jt) throws IOException { job.setCredentials(credentials); - - ApplicationAttemptId appAttemptId = - ConverterUtils.toContainerId( - System.getenv(Environment.CONTAINER_ID.name())) - .getApplicationAttemptId(); + + ApplicationAttemptId appAttemptId = ContainerId.fromString( + System.getenv(Environment.CONTAINER_ID.name())) + .getApplicationAttemptId(); LOG.debug("APPLICATION_ATTEMPT_ID: " + appAttemptId); // Set it in conf, so as to be able to be used the the OutputCommitter. job.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java index f8d54c5..c5070f3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java @@ -1562,7 +1562,7 @@ public class MRAppMaster extends CompositeService { validateInputParam(appSubmitTimeStr, ApplicationConstants.APP_SUBMIT_TIME_ENV); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); ApplicationAttemptId applicationAttemptId = containerId.getApplicationAttemptId(); if (applicationAttemptId != null) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java index 6ee8e00..0da3afb 100755 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java @@ -713,8 +713,7 @@ public abstract class TaskAttemptImpl implements LocalResourceType type, LocalResourceVisibility visibility) throws IOException { FileStatus fstat = fc.getFileStatus(file); - URL resourceURL = ConverterUtils.getYarnUrlFromPath(fc.resolvePath(fstat - .getPath())); + URL resourceURL = URL.fromPath(fc.resolvePath(fstat.getPath())); long resourceSize = fstat.getLen(); long resourceModificationTime = fstat.getModificationTime(); @@ -1247,8 +1246,8 @@ public abstract class TaskAttemptImpl implements public TaskAttemptStateInternal recover(TaskAttemptInfo taInfo, OutputCommitter committer, boolean recoverOutput) { ContainerId containerId = taInfo.getContainerId(); - NodeId containerNodeId = ConverterUtils.toNodeId(taInfo.getHostname() + ":" - + taInfo.getPort()); + NodeId containerNodeId = NodeId.fromString( + taInfo.getHostname() + ":" + taInfo.getPort()); String nodeHttpAddress = StringInterner.weakIntern(taInfo.getHostname() + ":" + taInfo.getHttpPort()); // Resource/Priority/Tokens are only needed while launching the container on http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java index d8e89b1..892c626 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java @@ -69,8 +69,10 @@ public class TaskAttemptInfo { this.nodeHttpAddress = ta.getNodeHttpAddress(); this.startTime = report.getStartTime(); this.finishTime = report.getFinishTime(); - this.assignedContainerId = ConverterUtils.toString(report.getContainerId()); this.assignedContainer = report.getContainerId(); + if (assignedContainer != null) { + this.assignedContainerId = assignedContainer.toString(); + } this.progress = report.getProgress() * 100; this.status = report.getStateString(); this.state = report.getTaskAttemptState(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java index b43a7b4..6ba93e6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java @@ -254,7 +254,7 @@ public class MRApp extends MRAppMaster { // the job can reaches the final state when MRAppMaster shuts down. this.successfullyUnregistered.set(unregistered); this.assignedQueue = assignedQueue; - this.resource = Resource.newInstance(1234, 2); + this.resource = Resource.newInstance(1234L, 2L); } @Override http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java index 5116491..203958d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java @@ -114,7 +114,7 @@ public class TestMRAppMaster { localFS.delete(testDir, true); new File(testDir.toString()).mkdir(); } - + @Before public void prepare() throws IOException { File dir = new File(stagingDir); @@ -134,11 +134,11 @@ public class TestMRAppMaster { InterruptedException { String applicationAttemptIdStr = "appattempt_1317529182569_0004_000001"; String containerIdStr = "container_1317529182569_0004_000001_1"; - + String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMasterTest appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis()); @@ -161,15 +161,15 @@ public class TestMRAppMaster { conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); conf.setInt(org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter. FILEOUTPUTCOMMITTER_ALGORITHM_VERSION, 1); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); FileSystem fs = FileSystem.get(conf); //Create the file, but no end file so we should unregister with an error. fs.create(start).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -200,8 +200,8 @@ public class TestMRAppMaster { conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); conf.setInt(MRJobConfig.NUM_REDUCES, 0); conf.set(JHAdminConfig.MR_HS_JHIST_FORMAT, "json"); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); @@ -219,7 +219,7 @@ public class TestMRAppMaster { FileSystem fs = FileSystem.get(conf); JobSplitWriter.createSplitFiles(new Path(dir.getAbsolutePath()), conf, fs, new org.apache.hadoop.mapred.InputSplit[0]); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMasterTestLaunchTime appMaster = new MRAppMasterTestLaunchTime(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis()); @@ -237,8 +237,8 @@ public class TestMRAppMaster { String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); @@ -246,7 +246,7 @@ public class TestMRAppMaster { FileSystem fs = FileSystem.get(conf); fs.create(start).close(); fs.create(end).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -266,7 +266,7 @@ public class TestMRAppMaster { // verify the final status is SUCCEEDED verifyFailedStatus((MRAppMasterTest)appMaster, "SUCCEEDED"); } - + @Test public void testMRAppMasterFailLock() throws IOException, InterruptedException { @@ -275,8 +275,8 @@ public class TestMRAppMaster { String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); @@ -284,7 +284,7 @@ public class TestMRAppMaster { FileSystem fs = FileSystem.get(conf); fs.create(start).close(); fs.create(end).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -304,7 +304,7 @@ public class TestMRAppMaster { // verify the final status is FAILED verifyFailedStatus((MRAppMasterTest)appMaster, "FAILED"); } - + @Test public void testMRAppMasterMissingStaging() throws IOException, InterruptedException { @@ -313,16 +313,16 @@ public class TestMRAppMaster { String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); //Delete the staging directory File dir = new File(stagingDir); if(dir.exists()) { FileUtils.deleteDirectory(dir); } - - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -353,9 +353,9 @@ public class TestMRAppMaster { String containerIdStr = "container_1317529182569_0004_000002_1"; String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); @@ -427,7 +427,7 @@ public class TestMRAppMaster { new Token<AMRMTokenIdentifier>(identifier, password, AMRMTokenIdentifier.KIND_NAME, appTokenService); credentials.addToken(appTokenService, appToken); - + Text keyAlias = new Text("mySecretKeyAlias"); credentials.addSecretKey(keyAlias, "mySecretKey".getBytes()); Token<? extends TokenIdentifier> storedToken = @@ -488,7 +488,7 @@ public class TestMRAppMaster { Assert.assertEquals(storedToken, confCredentials.getToken(tokenAlias)); Assert.assertEquals("mySecretKey", new String(confCredentials.getSecretKey(keyAlias))); - + // Verify the AM's ugi - app token should be present Credentials ugiCredentials = appMaster.getUgi().getCredentials(); Assert.assertEquals(1, ugiCredentials.numberOfSecretKeys()); @@ -507,9 +507,9 @@ public class TestMRAppMaster { String applicationAttemptIdStr = "appattempt_1317529182569_0004_000002"; String containerIdStr = "container_1317529182569_0004_000002_1"; String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); @@ -591,7 +591,7 @@ class MRAppMasterTest extends MRAppMaster { } this.conf = conf; } - + @Override protected ContainerAllocator createContainerAllocator( final ClientService clientService, final AppContext context) { @@ -628,7 +628,7 @@ class MRAppMasterTest extends MRAppMaster { public Credentials getCredentials() { return super.getCredentials(); } - + public UserGroupInformation getUgi() { return currentUser; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java index a4853d5..b099bcc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java @@ -129,8 +129,8 @@ public class TestCommitterEventHandler { SystemClock clock = SystemClock.getInstance(); AppContext appContext = mock(AppContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); when(appContext.getApplicationID()).thenReturn(attemptid.getApplicationId()); when(appContext.getApplicationAttemptId()).thenReturn(attemptid); when(appContext.getEventHandler()).thenReturn( @@ -240,8 +240,8 @@ public class TestCommitterEventHandler { YarnConfiguration conf = new YarnConfiguration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); JobContext mockJobContext = mock(JobContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(attemptid.getApplicationId())); @@ -288,8 +288,8 @@ public class TestCommitterEventHandler { YarnConfiguration conf = new YarnConfiguration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); JobContext mockJobContext = mock(JobContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = + ApplicationAttemptId.fromString("appattempt_1234567890000_0001_0"); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(attemptid.getApplicationId())); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java index 36221e0..eaa5af7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java @@ -942,8 +942,8 @@ public class TestJobImpl { callback.run(); } }; - ApplicationAttemptId id = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId id = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); when(appContext.getApplicationID()).thenReturn(id.getApplicationId()); when(appContext.getApplicationAttemptId()).thenReturn(id); CommitterEventHandler handler = http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java index dcd5d29..3c9127f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java @@ -515,7 +515,7 @@ public class TestAMWebServicesAttempts extends JerseyTest { WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag, diagnostics); WebServicesTestUtils.checkStringMatch("assignedContainerId", - ConverterUtils.toString(ta.getAssignedContainerID()), + ta.getAssignedContainerID().toString(), assignedContainerId); assertEquals("startTime wrong", ta.getLaunchTime(), startTime); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java index 3b87197..c58a774 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java @@ -157,7 +157,7 @@ class LocalDistributedCacheManager { } Path resourcePath; try { - resourcePath = ConverterUtils.getPathFromYarnURL(resource.getResource()); + resourcePath = resource.getResource().toPath(); } catch (URISyntaxException e) { throw new IOException(e); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java index c8d8a44..31e4c0f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java @@ -68,6 +68,7 @@ import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.util.Apps; @@ -569,14 +570,13 @@ public class MRApps extends Apps { } String linkName = name.toUri().getPath(); LocalResource orig = localResources.get(linkName); - if(orig != null && !orig.getResource().equals( - ConverterUtils.getYarnUrlFromURI(p.toUri()))) { + if(orig != null && !orig.getResource().equals(URL.fromURI(p.toUri()))) { throw new InvalidJobConfException( getResourceDescription(orig.getType()) + orig.getResource() + " conflicts with " + getResourceDescription(type) + u); } - localResources.put(linkName, LocalResource.newInstance(ConverterUtils - .getYarnUrlFromURI(p.toUri()), type, visibilities[i] + localResources.put(linkName, LocalResource + .newInstance(URL.fromURI(p.toUri()), type, visibilities[i] ? LocalResourceVisibility.PUBLIC : LocalResourceVisibility.PRIVATE, sizes[i], timestamps[i])); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java index ea2ca9e..266aa94 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java @@ -107,8 +107,8 @@ public class AMStartedEvent implements HistoryEvent { * @return the ApplicationAttemptId */ public ApplicationAttemptId getAppAttemptId() { - return ConverterUtils.toApplicationAttemptId(datum.getApplicationAttemptId() - .toString()); + return ApplicationAttemptId.fromString( + datum.getApplicationAttemptId().toString()); } /** @@ -122,7 +122,7 @@ public class AMStartedEvent implements HistoryEvent { * @return the ContainerId for the MRAppMaster. */ public ContainerId getContainerId() { - return ConverterUtils.toContainerId(datum.getContainerId().toString()); + return ContainerId.fromString(datum.getContainerId().toString()); } /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java index c8c250a..3073d5b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java @@ -75,7 +75,7 @@ public class TaskAttemptStartedEvent implements HistoryEvent { long startTime, String trackerName, int httpPort, int shufflePort, String locality, String avataar) { this(attemptId, taskType, startTime, trackerName, httpPort, shufflePort, - ConverterUtils.toContainerId("container_-1_-1_-1_-1"), locality, + ContainerId.fromString("container_-1_-1_-1_-1"), locality, avataar); } @@ -116,7 +116,7 @@ public class TaskAttemptStartedEvent implements HistoryEvent { } /** Get the ContainerId */ public ContainerId getContainerId() { - return ConverterUtils.toContainerId(datum.getContainerId().toString()); + return ContainerId.fromString(datum.getContainerId().toString()); } /** Get the locality */ public String getLocality() { http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java index 60dc235..54c2792 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java @@ -533,7 +533,7 @@ public class TestHsWebServicesAttempts extends JerseyTest { WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag, diagnostics); WebServicesTestUtils.checkStringMatch("assignedContainerId", - ConverterUtils.toString(ta.getAssignedContainerID()), + ta.getAssignedContainerID().toString(), assignedContainerId); assertEquals("startTime wrong", ta.getLaunchTime(), startTime); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index 1342282..b30641e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -321,7 +321,7 @@ public class YARNRunner implements ClientProtocol { throws IOException { LocalResource rsrc = recordFactory.newRecordInstance(LocalResource.class); FileStatus rsrcStat = fs.getFileStatus(p); - rsrc.setResource(ConverterUtils.getYarnUrlFromPath(fs + rsrc.setResource(URL.fromPath(fs .getDefaultFileSystem().resolvePath(rsrcStat.getPath()))); rsrc.setSize(rsrcStat.getLen()); rsrc.setTimestamp(rsrcStat.getModificationTime()); @@ -355,8 +355,7 @@ public class YARNRunner implements ClientProtocol { Path jobConfPath = new Path(jobSubmitDir, MRJobConfig.JOB_CONF_FILE); - URL yarnUrlForJobSubmitDir = ConverterUtils - .getYarnUrlFromPath(defaultFileContext.getDefaultFileSystem() + URL yarnUrlForJobSubmitDir = URL.fromPath(defaultFileContext.getDefaultFileSystem() .resolvePath( defaultFileContext.makeQualified(new Path(jobSubmitDir)))); LOG.debug("Creating setup context, jobSubmitDir url is " http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java index a6647f1..900bdeb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java @@ -749,7 +749,7 @@ public class TestMRJobs { boolean foundAppMaster = job.isUber(); final Path containerPathComponent = slog.getPath().getParent(); if (!foundAppMaster) { - final ContainerId cid = ConverterUtils.toContainerId( + final ContainerId cid = ContainerId.fromString( containerPathComponent.getName()); foundAppMaster = ((cid.getContainerId() & ContainerId.CONTAINER_ID_BITMASK)== 1); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java index 0d6e900..8cbae81 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java @@ -999,7 +999,7 @@ public class ShuffleHandler extends AuxiliaryService { final String baseStr = ContainerLocalizer.USERCACHE + "/" + user + "/" + ContainerLocalizer.APPCACHE + "/" - + ConverterUtils.toString(appID) + "/output" + "/"; + + appID.toString() + "/output" + "/"; return baseStr; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java index c502ffd..2e44070 100644 --- a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java @@ -39,6 +39,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.LogAggregationStatus; import org.apache.hadoop.yarn.applications.distributedshell.ApplicationMaster; @@ -302,7 +303,7 @@ public class HadoopArchiveLogs implements Tool { AppInfo app = it.next(); try { ApplicationReport report = client.getApplicationReport( - ConverterUtils.toApplicationId(app.getAppId())); + ApplicationId.fromString(app.getAppId())); LogAggregationStatus aggStatus = report.getLogAggregationStatus(); if (aggStatus.equals(LogAggregationStatus.RUNNING) || aggStatus.equals(LogAggregationStatus.RUNNING_WITH_FAILURE) || http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java index 0a83bc0..5f3a68e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java @@ -19,6 +19,8 @@ package org.apache.hadoop.yarn.api.records; import java.text.NumberFormat; +import java.util.Iterator; +import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -26,6 +28,8 @@ import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.util.Records; +import com.google.common.base.Splitter; + /** * <p><code>ApplicationAttemptId</code> denotes the particular <em>attempt</em> * of an <code>ApplicationMaster</code> for a given {@link ApplicationId}.</p> @@ -38,10 +42,11 @@ import org.apache.hadoop.yarn.util.Records; @Stable public abstract class ApplicationAttemptId implements Comparable<ApplicationAttemptId> { + private static Splitter _spliter = Splitter.on('_').trimResults(); @Private @Unstable - public static final String appAttemptIdStrPrefix = "appattempt_"; + public static final String appAttemptIdStrPrefix = "appattempt"; @Public @Unstable @@ -131,6 +136,7 @@ public abstract class ApplicationAttemptId implements @Override public String toString() { StringBuilder sb = new StringBuilder(appAttemptIdStrPrefix); + sb.append("_"); sb.append(this.getApplicationId().getClusterTimestamp()).append("_"); sb.append(ApplicationId.appIdFormat.get().format( this.getApplicationId().getId())); @@ -139,4 +145,32 @@ public abstract class ApplicationAttemptId implements } protected abstract void build(); + + @Public + @Stable + public static ApplicationAttemptId fromString(String applicationAttemptIdStr) { + Iterator<String> it = _spliter.split(applicationAttemptIdStr).iterator(); + if (!it.next().equals(appAttemptIdStrPrefix)) { + throw new IllegalArgumentException("Invalid AppAttemptId prefix: " + + applicationAttemptIdStr); + } + try { + return toApplicationAttemptId(it); + } catch (NumberFormatException n) { + throw new IllegalArgumentException("Invalid AppAttemptId: " + + applicationAttemptIdStr, n); + } catch (NoSuchElementException e) { + throw new IllegalArgumentException("Invalid AppAttemptId: " + + applicationAttemptIdStr, e); + } + } + + private static ApplicationAttemptId toApplicationAttemptId( + Iterator<String> it) throws NumberFormatException { + ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), + Integer.parseInt(it.next())); + ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(appId, Integer.parseInt(it.next())); + return appAttemptId; + } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java index 90214cd..03a77ce 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java @@ -19,6 +19,8 @@ package org.apache.hadoop.yarn.api.records; import java.text.NumberFormat; +import java.util.Iterator; +import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -26,6 +28,8 @@ import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.util.Records; +import com.google.common.base.Splitter; + /** * <p><code>ApplicationId</code> represents the <em>globally unique</em> * identifier for an application.</p> @@ -38,10 +42,11 @@ import org.apache.hadoop.yarn.util.Records; @Public @Stable public abstract class ApplicationId implements Comparable<ApplicationId> { + private static Splitter _spliter = Splitter.on('_').trimResults(); @Private @Unstable - public static final String appIdStrPrefix = "application_"; + public static final String appIdStrPrefix = "application"; @Public @Unstable @@ -105,8 +110,35 @@ public abstract class ApplicationId implements Comparable<ApplicationId> { @Override public String toString() { - return appIdStrPrefix + this.getClusterTimestamp() + "_" - + appIdFormat.get().format(getId()); + return appIdStrPrefix + "_" + this.getClusterTimestamp() + "_" + appIdFormat + .get().format(getId()); + } + + private static ApplicationId toApplicationId( + Iterator<String> it) throws NumberFormatException { + ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), + Integer.parseInt(it.next())); + return appId; + } + + @Public + @Stable + public static ApplicationId fromString(String appIdStr) { + Iterator<String> it = _spliter.split((appIdStr)).iterator(); + if (!it.next().equals(appIdStrPrefix)) { + throw new IllegalArgumentException("Invalid ApplicationId prefix: " + + appIdStr + ". The valid ApplicationId should start with prefix " + + appIdStrPrefix); + } + try { + return toApplicationId(it); + } catch (NumberFormatException n) { + throw new IllegalArgumentException("Invalid ApplicationId: " + + appIdStr, n); + } catch (NoSuchElementException e) { + throw new IllegalArgumentException("Invalid ApplicationId: " + + appIdStr, e); + } } @Override http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java index f332651..feddeca 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java @@ -42,7 +42,7 @@ public abstract class ContainerId implements Comparable<ContainerId>{ private static final String CONTAINER_PREFIX = "container"; private static final String EPOCH_PREFIX = "e"; - @Private + @Public @Unstable public static ContainerId newContainerId(ApplicationAttemptId appAttemptId, long containerId) { @@ -97,7 +97,7 @@ public abstract class ContainerId implements Comparable<ContainerId>{ */ @Public @Deprecated - @Stable + @Unstable public abstract int getId(); /** @@ -205,7 +205,7 @@ public abstract class ContainerId implements Comparable<ContainerId>{ } @Public - @Unstable + @Stable public static ContainerId fromString(String containerIdStr) { Iterator<String> it = _SPLITTER.split(containerIdStr).iterator(); if (!it.next().equals(CONTAINER_PREFIX)) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java index c3f8595..a0b87a7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java @@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.api.records; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.yarn.util.Records; /** @@ -35,8 +35,8 @@ import org.apache.hadoop.yarn.util.Records; @Stable public abstract class NodeId implements Comparable<NodeId> { - @Private - @Unstable + @Public + @Stable public static NodeId newInstance(String host, int port) { NodeId nodeId = Records.newRecord(NodeId.class); nodeId.setHost(host); @@ -112,6 +112,23 @@ public abstract class NodeId implements Comparable<NodeId> { } return hostCompare; } + + @Public + @Stable + public static NodeId fromString(String nodeIdStr) { + String[] parts = nodeIdStr.split(":"); + if (parts.length != 2) { + throw new IllegalArgumentException("Invalid NodeId [" + nodeIdStr + + "]. Expected host:port"); + } + try { + NodeId nodeId = + NodeId.newInstance(parts[0].trim(), Integer.parseInt(parts[1])); + return nodeId; + } catch (NumberFormatException e) { + throw new IllegalArgumentException("Invalid port: " + parts[1], e); + } + } protected abstract void build(); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java index 4261117..aa28585 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java @@ -18,8 +18,13 @@ package org.apache.hadoop.yarn.api.records; +import java.net.URI; +import java.net.URISyntaxException; + import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.util.Records; /** @@ -119,4 +124,48 @@ public abstract class URL { @Public @Stable public abstract void setFile(String file); + + @Public + @Stable + public Path toPath() throws URISyntaxException { + String scheme = getScheme() == null ? "" : getScheme(); + + String authority = ""; + if (getHost() != null) { + authority = getHost(); + if (getUserInfo() != null) { + authority = getUserInfo() + "@" + authority; + } + if (getPort() > 0) { + authority += ":" + getPort(); + } + } + + return new Path( + (new URI(scheme, authority, getFile(), null, null)).normalize()); + } + + @Public + @Stable + public static URL fromURI(URI uri) { + URL url = + RecordFactoryProvider.getRecordFactory(null).newRecordInstance( + URL.class); + if (uri.getHost() != null) { + url.setHost(uri.getHost()); + } + if (uri.getUserInfo() != null) { + url.setUserInfo(uri.getUserInfo()); + } + url.setPort(uri.getPort()); + url.setScheme(uri.getScheme()); + url.setFile(uri.getPath()); + return url; + } + + @Public + @Stable + public static URL fromPath(Path path) { + return fromURI(path.toUri()); + } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java index 5e2c90b..703595c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java @@ -435,13 +435,13 @@ public class ApplicationMaster { if (!envs.containsKey(Environment.CONTAINER_ID.name())) { if (cliParser.hasOption("app_attempt_id")) { String appIdStr = cliParser.getOptionValue("app_attempt_id", ""); - appAttemptID = ConverterUtils.toApplicationAttemptId(appIdStr); + appAttemptID = ApplicationAttemptId.fromString(appIdStr); } else { throw new IllegalArgumentException( "Application Attempt Id not set in the environment"); } } else { - ContainerId containerId = ConverterUtils.toContainerId(envs + ContainerId containerId = ContainerId.fromString(envs .get(Environment.CONTAINER_ID.name())); appAttemptID = containerId.getApplicationAttemptId(); } @@ -1048,8 +1048,7 @@ public class ApplicationMaster { URL yarnUrl = null; try { - yarnUrl = ConverterUtils.getYarnUrlFromURI( - new URI(renamedScriptPath.toString())); + yarnUrl = URL.fromURI(new URI(renamedScriptPath.toString())); } catch (URISyntaxException e) { LOG.error("Error when trying to use shell script path specified" + " in env, path=" + renamedScriptPath, e); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java index 5adc37d..9879b1e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java @@ -68,6 +68,7 @@ import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain; @@ -857,7 +858,7 @@ public class Client { FileStatus scFileStatus = fs.getFileStatus(dst); LocalResource scRsrc = LocalResource.newInstance( - ConverterUtils.getYarnUrlFromURI(dst.toUri()), + URL.fromURI(dst.toUri()), LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, scFileStatus.getLen(), scFileStatus.getModificationTime()); localResources.put(fileDstPath, scRsrc); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java index 55fbd60..119fa6f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java @@ -53,7 +53,7 @@ public class DistributedShellTimelinePlugin extends TimelineEntityGroupPlugin { public Set<TimelineEntityGroupId> getTimelineEntityGroupId(String entityId, String entityType) { if (ApplicationMaster.DSEntity.DS_CONTAINER.toString().equals(entityId)) { - ContainerId containerId = ConverterUtils.toContainerId(entityId); + ContainerId containerId = ContainerId.fromString(entityId); ApplicationId appId = containerId.getApplicationAttemptId() .getApplicationId(); return toEntityGroupId(appId.toString()); @@ -69,7 +69,7 @@ public class DistributedShellTimelinePlugin extends TimelineEntityGroupPlugin { } private Set<TimelineEntityGroupId> toEntityGroupId(String strAppId) { - ApplicationId appId = ConverterUtils.toApplicationId(strAppId); + ApplicationId appId = ApplicationId.fromString(strAppId); TimelineEntityGroupId groupId = TimelineEntityGroupId.newInstance( appId, ApplicationMaster.CONTAINER_ENTITY_GROUP_ID); Set<TimelineEntityGroupId> result = new HashSet<>(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java index 2b46fca..9448cf1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java @@ -371,8 +371,8 @@ public class TestDistributedShell { } String currAttemptEntityId = entitiesAttempts.getEntities().get(0).getEntityId(); - ApplicationAttemptId attemptId - = ConverterUtils.toApplicationAttemptId(currAttemptEntityId); + ApplicationAttemptId attemptId = ApplicationAttemptId.fromString( + currAttemptEntityId); NameValuePair primaryFilter = new NameValuePair( ApplicationMaster.APPID_TIMELINE_FILTER_NAME, attemptId.getApplicationId().toString()); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java index d9e9fa6..865ce00 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java @@ -301,7 +301,7 @@ public class ApplicationCLI extends YarnCLI { */ private void signalToContainer(String containerIdStr, SignalContainerCommand command) throws YarnException, IOException { - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); sysout.println("Signalling container " + containerIdStr); client.signalToContainer(containerId, command); } @@ -327,8 +327,8 @@ public class ApplicationCLI extends YarnCLI { throws YarnException, IOException { ApplicationAttemptReport appAttemptReport = null; try { - appAttemptReport = client.getApplicationAttemptReport(ConverterUtils - .toApplicationAttemptId(applicationAttemptId)); + appAttemptReport = client.getApplicationAttemptReport( + ApplicationAttemptId.fromString(applicationAttemptId)); } catch (ApplicationNotFoundException e) { sysout.println("Application for AppAttempt with id '" + applicationAttemptId + "' doesn't exist in RM or Timeline Server."); @@ -384,8 +384,7 @@ public class ApplicationCLI extends YarnCLI { IOException { ContainerReport containerReport = null; try { - containerReport = client.getContainerReport((ConverterUtils - .toContainerId(containerId))); + containerReport = client.getContainerReport(ContainerId.fromString(containerId)); } catch (ApplicationNotFoundException e) { sysout.println("Application for Container with id '" + containerId + "' doesn't exist in RM or Timeline Server."); @@ -515,7 +514,7 @@ public class ApplicationCLI extends YarnCLI { */ private void killApplication(String applicationId) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); ApplicationReport appReport = null; try { appReport = client.getApplicationReport(appId); @@ -540,7 +539,7 @@ public class ApplicationCLI extends YarnCLI { */ private void moveApplicationAcrossQueues(String applicationId, String queue) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); ApplicationReport appReport = client.getApplicationReport(appId); if (appReport.getYarnApplicationState() == YarnApplicationState.FINISHED || appReport.getYarnApplicationState() == YarnApplicationState.KILLED @@ -565,7 +564,7 @@ public class ApplicationCLI extends YarnCLI { IOException { ApplicationId appId; ApplicationAttemptId attId; - attId = ConverterUtils.toApplicationAttemptId(attemptId); + attId = ApplicationAttemptId.fromString(attemptId); appId = attId.getApplicationId(); sysout.println("Failing attempt " + attId + " of application " + appId); @@ -583,8 +582,8 @@ public class ApplicationCLI extends YarnCLI { throws YarnException, IOException { ApplicationReport appReport = null; try { - appReport = client.getApplicationReport(ConverterUtils - .toApplicationId(applicationId)); + appReport = client.getApplicationReport( + ApplicationId.fromString(applicationId)); } catch (ApplicationNotFoundException e) { sysout.println("Application with id '" + applicationId + "' doesn't exist in RM or Timeline Server."); @@ -684,7 +683,7 @@ public class ApplicationCLI extends YarnCLI { new OutputStreamWriter(sysout, Charset.forName("UTF-8"))); List<ApplicationAttemptReport> appAttemptsReport = client - .getApplicationAttempts(ConverterUtils.toApplicationId(applicationId)); + .getApplicationAttempts(ApplicationId.fromString(applicationId)); writer.println("Total number of application attempts " + ":" + appAttemptsReport.size()); writer.printf(APPLICATION_ATTEMPTS_PATTERN, "ApplicationAttempt-Id", @@ -711,8 +710,8 @@ public class ApplicationCLI extends YarnCLI { PrintWriter writer = new PrintWriter( new OutputStreamWriter(sysout, Charset.forName("UTF-8"))); - List<ContainerReport> appsReport = client - .getContainers(ConverterUtils.toApplicationAttemptId(appAttemptId)); + List<ContainerReport> appsReport = client.getContainers( + ApplicationAttemptId.fromString(appAttemptId)); writer.println("Total number of containers " + ":" + appsReport.size()); writer.printf(CONTAINER_PATTERN, "Container-Id", "Start Time", "Finish Time", "State", "Host", "Node Http Address", "LOG-URL"); @@ -735,7 +734,7 @@ public class ApplicationCLI extends YarnCLI { */ private void updateApplicationPriority(String applicationId, String priority) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); Priority newAppPriority = Priority.newInstance(Integer.parseInt(priority)); sysout.println("Updating priority of an application " + applicationId); Priority updateApplicationPriority = http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java index d62ee5e..4fdb57b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java @@ -153,7 +153,7 @@ public class LogsCLI extends Configured implements Tool { ApplicationId appId = null; try { - appId = ConverterUtils.toApplicationId(appIdStr); + appId = ApplicationId.fromString(appIdStr); } catch (Exception e) { System.err.println("Invalid ApplicationId specified"); return -1; @@ -456,8 +456,8 @@ public class LogsCLI extends Configured implements Tool { throws YarnException, IOException { YarnClient yarnClient = createYarnClient(); try { - return yarnClient.getContainerReport(ConverterUtils - .toContainerId(containerIdStr)); + return yarnClient.getContainerReport( + ContainerId.fromString(containerIdStr)); } finally { yarnClient.close(); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java index a89551f..f51fee9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java @@ -243,7 +243,7 @@ public class NodeCLI extends YarnCLI { */ private void printNodeStatus(String nodeIdStr) throws YarnException, IOException { - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); List<NodeReport> nodesReport = client.getNodeReports(); // Use PrintWriter.println, which uses correct platform line ending. ByteArrayOutputStream baos = new ByteArrayOutputStream(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java index d407c20..aa7fc30 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java @@ -427,7 +427,7 @@ public class RMAdminCLI extends HAAdmin { ResourceManagerAdministrationProtocol adminProtocol = createAdminProtocol(); UpdateNodeResourceRequest request = recordFactory.newRecordInstance(UpdateNodeResourceRequest.class); - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); Resource resource = Resources.createResource(memSize, cores); Map<NodeId, ResourceOption> resourceMap = http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java index 057594d..1551333 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java @@ -222,7 +222,7 @@ public class TestRMAdminCLI { verify(admin).updateNodeResource(argument.capture()); UpdateNodeResourceRequest request = argument.getValue(); Map<NodeId, ResourceOption> resourceMap = request.getNodeResourceMap(); - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); Resource expectedResource = Resources.createResource(memSize, cores); ResourceOption resource = resourceMap.get(nodeId); assertNotNull("resource for " + nodeIdStr + " shouldn't be null.", http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java index 4c1d152..a80f9d7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java @@ -99,7 +99,7 @@ public class AggregatedLogDeletionService extends AbstractService { if(appDir.isDirectory() && appDir.getModificationTime() < cutoffMillis) { boolean appTerminated = - isApplicationTerminated(ConverterUtils.toApplicationId(appDir + isApplicationTerminated(ApplicationId.fromString(appDir .getPath().getName()), rmClient); if(appTerminated && shouldDeleteLogDir(appDir, cutoffMillis, fs)) { try { http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java index 98ffce1..8b213d5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java @@ -209,14 +209,11 @@ public class AggregatedLogFormat { public Set<File> getPendingLogFilesToUploadForThisContainer() { Set<File> pendingUploadFiles = new HashSet<File>(); for (String rootLogDir : this.rootLogDirs) { - File appLogDir = - new File(rootLogDir, - ConverterUtils.toString( - this.containerId.getApplicationAttemptId(). - getApplicationId()) - ); + File appLogDir = new File(rootLogDir, + this.containerId.getApplicationAttemptId(). + getApplicationId().toString()); File containerLogDir = - new File(appLogDir, ConverterUtils.toString(this.containerId)); + new File(appLogDir, this.containerId.toString()); if (!containerLogDir.isDirectory()) { continue; // ContainerDir may have been deleted by the user. http://git-wip-us.apache.org/repos/asf/hadoop/blob/2749b194/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java index 3811054..26b2b01 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java @@ -59,7 +59,7 @@ public class LogCLIHelpers implements Configurable { public int dumpAContainersLogs(String appId, String containerId, String nodeId, String jobOwner) throws IOException { ContainerLogsRequest options = new ContainerLogsRequest(); - options.setAppId(ConverterUtils.toApplicationId(appId)); + options.setAppId(ApplicationId.fromString(appId)); options.setContainerId(containerId); options.setNodeId(nodeId); options.setAppOwner(jobOwner); --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
