This is an automated email from the ASF dual-hosted git repository.

bteke pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new e4ee3d560bd YARN-10345 HsWebServices containerlogs does not honor ACLs 
for completed jobs (#7013)
e4ee3d560bd is described below

commit e4ee3d560bddc27a495cc9a158278a9c18276dd0
Author: K0K0V0K <109747532+k0k0...@users.noreply.github.com>
AuthorDate: Tue Aug 27 17:55:07 2024 +0200

    YARN-10345 HsWebServices containerlogs does not honor ACLs for completed 
jobs (#7013)
    
    - following rest apis did not have access control
    - - /ws/v1/history/containerlogs/{containerid}/{filename}
    - - /ws/v1/history/containers/{containerid}/logs
    
    Change-Id: I434f6138966ab22583d356509e40b70d328d9e7c
---
 .../mapreduce/v2/app/webapp/AMWebServices.java     | 15 +++++--
 .../mapreduce/v2/hs/webapp/HsWebServices.java      | 11 +++++-
 .../v2/hs/webapp/TestHsWebServicesAcls.java        | 46 ++++++++++++++++++----
 3 files changed, 60 insertions(+), 12 deletions(-)

diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
index 78174afb6f8..e95a5d7d33e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
@@ -40,6 +40,7 @@ import javax.ws.rs.core.Response.Status;
 
 import org.apache.hadoop.http.JettyUtils;
 import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.JobID;
 import 
org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest;
 import 
org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptResponse;
 import 
org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskAttemptRequestPBImpl;
@@ -113,9 +114,17 @@ public class AMWebServices {
     response.setContentType(null);
   }
 
-  /**
-   * convert a job id string to an actual job and handle all the error 
checking.
-   */
+  public static Job getJobFromContainerIdString(String cid, AppContext appCtx)
+      throws NotFoundException {
+    //example container_e06_1724414851587_0004_01_000001
+    String[] parts = cid.split("_");
+    return getJobFromJobIdString(JobID.JOB + "_" + parts[2] + "_" + parts[3], 
appCtx);
+  }
+
+
+    /**
+     * convert a job id string to an actual job and handle all the error 
checking.
+     */
  public static Job getJobFromJobIdString(String jid, AppContext appCtx) throws 
NotFoundException {
     JobId jobId;
     Job job;
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
index a26724b1bb6..d16b70ac6f0 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.JettyUtils;
 import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
@@ -87,6 +88,7 @@ public class HsWebServices extends WebServices {
   private final HistoryContext ctx;
   private WebApp webapp;
   private LogServlet logServlet;
+  private boolean mrAclsEnabled;
 
   private @Context HttpServletResponse response;
   @Context UriInfo uriInfo;
@@ -100,6 +102,7 @@ public class HsWebServices extends WebServices {
     this.ctx = ctx;
     this.webapp = webapp;
     this.logServlet = new LogServlet(conf, this);
+    this.mrAclsEnabled = conf.getBoolean(MRConfig.MR_ACLS_ENABLED, false);
   }
 
   private boolean hasAccess(Job job, HttpServletRequest request) {
@@ -116,6 +119,11 @@ public class HsWebServices extends WebServices {
       throw new WebApplicationException(Status.UNAUTHORIZED);
     }
   }
+  private void checkAccess(String containerIdStr, HttpServletRequest hsr) {
+    if (mrAclsEnabled) {
+      checkAccess(AMWebServices.getJobFromContainerIdString(containerIdStr, 
ctx), hsr);
+    }
+  }
 
   private void init() {
     //clear content type
@@ -500,7 +508,7 @@ public class HsWebServices extends WebServices {
       @QueryParam(YarnWebServiceParams.MANUAL_REDIRECTION)
       @DefaultValue("false") boolean manualRedirection) {
     init();
-
+    checkAccess(containerIdStr, hsr);
     WrappedLogMetaRequest.Builder logMetaRequestBuilder =
         LogServlet.createRequestFromContainerId(containerIdStr);
 
@@ -527,6 +535,7 @@ public class HsWebServices extends WebServices {
       @QueryParam(YarnWebServiceParams.MANUAL_REDIRECTION)
       @DefaultValue("false") boolean manualRedirection) {
     init();
+    checkAccess(containerIdStr, req);
     return logServlet.getLogFile(req, containerIdStr, filename, format, size,
         nmId, redirectedFromNode, null, manualRedirection);
   }
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAcls.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAcls.java
index 8d4f635e11d..bb25a97c6cb 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAcls.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAcls.java
@@ -18,23 +18,20 @@
 
 package org.apache.hadoop.mapreduce.v2.hs.webapp;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
 import java.io.IOException;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.Response.Status;
 
+import org.junit.Before;
+import org.junit.Test;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.Path;
@@ -60,9 +57,19 @@ import org.apache.hadoop.security.Groups;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.server.webapp.LogServlet;
 import org.apache.hadoop.yarn.webapp.WebApp;
-import org.junit.Before;
-import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyBoolean;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 public class TestHsWebServicesAcls {
   private static String FRIENDLY_USER = "friendly";
@@ -253,6 +260,29 @@ public class TestHsWebServicesAcls {
         this.taskIdStr, this.taskAttemptIdStr);
   }
 
+  @Test
+  public void testLogs() {
+    HttpServletRequest hsr = mock(HttpServletRequest.class);
+    when(hsr.getRemoteUser()).thenReturn(ENEMY_USER);
+    hsWebServices.setLogServlet(mock(LogServlet.class));
+    String cid = "container_e02_" + jobIdStr.substring(4) + "_01_000001";
+    try {
+      hsWebServices.getContainerLogFile(hsr, cid, "syslog",
+          null, null, null, false, false);
+      fail("enemy can access job");
+    } catch (WebApplicationException e) {
+      assertEquals(Status.UNAUTHORIZED,
+          Status.fromStatusCode(e.getResponse().getStatus()));
+    }
+
+    when(hsr.getRemoteUser()).thenReturn(FRIENDLY_USER);
+    hsWebServices.getContainerLogFile(hsr, cid, "syslog",
+        "format", "1024", "nmid", false, false);
+    verify(hsWebServices.getLogServlet(), times(1))
+        .getLogFile(any(), anyString(), anyString(),
+        anyString(), anyString(), anyString(), anyBoolean(), eq(null), 
anyBoolean());
+  }
+
   private static HistoryContext buildHistoryContext(final Configuration conf)
       throws IOException {
     HistoryContext ctx = new MockHistoryContext(1, 1, 1);


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to