This is an automated email from the ASF dual-hosted git repository.
ayushsaxena pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 85e2264e089 HIVE-26789: Add UserName in CallerContext for queries.
(#3813). (Ayush Saxena, reviewed by Denys Kuzmenko, Chris Nauroth)
85e2264e089 is described below
commit 85e2264e089108b9f3430a6ed2357c165a08118a
Author: Ayush Saxena <[email protected]>
AuthorDate: Sun Dec 4 11:24:22 2022 +0530
HIVE-26789: Add UserName in CallerContext for queries. (#3813). (Ayush
Saxena, reviewed by Denys Kuzmenko, Chris Nauroth)
---
cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java | 10 +++++++---
ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java | 7 +++++--
.../java/org/apache/hadoop/hive/ql/session/SessionState.java | 3 ++-
.../java/org/apache/hive/service/cli/operation/Operation.java | 8 ++++++--
.../org/apache/hive/service/cli/operation/SQLOperation.java | 5 ++++-
.../hive/service/cli/operation/hplsql/HplSqlOperation.java | 5 ++++-
.../main/java/org/apache/hadoop/hive/shims/HadoopShims.java | 2 ++
7 files changed, 30 insertions(+), 10 deletions(-)
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index 4d57b22d0af..d0d37b506ab 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.cli;
+import static org.apache.hadoop.hive.shims.HadoopShims.USER_ID;
import static org.apache.hadoop.util.StringUtils.stringifyException;
import java.io.BufferedReader;
@@ -250,12 +251,14 @@ public class CliDriver {
}
// Set HDFS CallerContext to queryId and reset back to sessionId after
the query is done
-
ShimLoader.getHadoopShims().setHadoopQueryContext(qp.getQueryState().getQueryId());
+ ShimLoader.getHadoopShims()
+ .setHadoopQueryContext(String.format(USER_ID,
qp.getQueryState().getQueryId(), ss.getUserName()));
try {
response = qp.run(cmd);
} catch (CommandProcessorException e) {
qp.close();
-
ShimLoader.getHadoopShims().setHadoopSessionContext(ss.getSessionId());
+ ShimLoader.getHadoopShims()
+ .setHadoopSessionContext(String.format(USER_ID,
ss.getSessionId(), ss.getUserName()));
throw e;
}
@@ -292,7 +295,8 @@ public class CliDriver {
throw new CommandProcessorException(1);
} finally {
qp.close();
-
ShimLoader.getHadoopShims().setHadoopSessionContext(ss.getSessionId());
+ ShimLoader.getHadoopShims()
+ .setHadoopSessionContext(String.format(USER_ID,
ss.getSessionId(), ss.getUserName()));
if (out instanceof FetchConverter) {
((FetchConverter) out).fetchFinished();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
index 0cb240670ee..c9562c24de2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
@@ -94,6 +94,8 @@ import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
+import static org.apache.hadoop.hive.shims.HadoopShims.USER_ID;
+
/**
*
* TezTask handles the execution of TezWork. Currently it executes a graph of
map and reduce work
@@ -197,8 +199,9 @@ public class TezTask extends Task<TezWork> {
// DAG scratch dir. We get a session from the pool so it may be
different from Tez one.
// TODO: we could perhaps reuse the same directory for HiveResources?
Path scratchDir = utils.createTezDir(ctx.getMRScratchDir(), conf);
- CallerContext callerContext = CallerContext.create(
- "HIVE", queryPlan.getQueryId(), "HIVE_QUERY_ID",
queryPlan.getQueryStr());
+ CallerContext callerContext =
+ CallerContext.create("HIVE", String.format(USER_ID,
queryPlan.getQueryId(), userName), "HIVE_QUERY_ID",
+ queryPlan.getQueryStr());
perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.TEZ_GET_SESSION);
session = sessionRef.value = WorkloadManagerFederation.getSession(
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 03875b09afe..32c0891d3f9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.session;
import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
+import static org.apache.hadoop.hive.shims.HadoopShims.USER_ID;
import java.io.Closeable;
import java.io.File;
@@ -470,7 +471,7 @@ public class SessionState implements ISessionAuthState{
killQuery = new NullKillQuery();
this.cleanupService = cleanupService;
- ShimLoader.getHadoopShims().setHadoopSessionContext(getSessionId());
+ ShimLoader.getHadoopShims().setHadoopSessionContext(String.format(USER_ID,
getSessionId(), userName));
}
public Map<String, String> getHiveVariables() {
diff --git
a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index b48809e8f7b..55c8f87c0b7 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -53,6 +53,8 @@ import org.slf4j.LoggerFactory;
import com.google.common.collect.Sets;
+import static org.apache.hadoop.hive.shims.HadoopShims.USER_ID;
+
public abstract class Operation {
protected final HiveSession parentSession;
protected boolean embedded;
@@ -237,7 +239,8 @@ public abstract class Operation {
* Set up some preconditions, or configurations.
*/
protected void beforeRun() {
- ShimLoader.getHadoopShims().setHadoopQueryContext(queryState.getQueryId());
+ ShimLoader.getHadoopShims()
+ .setHadoopQueryContext(String.format(USER_ID, queryState.getQueryId(),
parentSession.getUserName()));
if (!embedded) {
createOperationLog();
LogUtils.registerLoggingContext(queryState.getConf());
@@ -263,7 +266,8 @@ public abstract class Operation {
LogUtils.unregisterLoggingContext();
}
// Reset back to session context after the query is done
-
ShimLoader.getHadoopShims().setHadoopSessionContext(parentSession.getSessionState().getSessionId());
+ ShimLoader.getHadoopShims().setHadoopSessionContext(
+ String.format(USER_ID, parentSession.getSessionState().getSessionId(),
parentSession.getUserName()));
}
/**
diff --git
a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 04076b29a4a..b4550905fed 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -82,6 +82,8 @@ import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
import org.apache.hive.service.server.ThreadWithGarbageCleanup;
+import static org.apache.hadoop.hive.shims.HadoopShims.USER_ID;
+
/**
* SQLOperation.
*/
@@ -328,7 +330,8 @@ public class SQLOperation extends ExecuteStatementOperation
{
if (!embedded) {
LogUtils.registerLoggingContext(queryState.getConf());
}
-
ShimLoader.getHadoopShims().setHadoopQueryContext(queryState.getQueryId());
+ ShimLoader.getHadoopShims()
+ .setHadoopQueryContext(String.format(USER_ID,
queryState.getQueryId(), parentSessionState.getUserName()));
try {
if (asyncPrepare) {
diff --git
a/service/src/java/org/apache/hive/service/cli/operation/hplsql/HplSqlOperation.java
b/service/src/java/org/apache/hive/service/cli/operation/hplsql/HplSqlOperation.java
index e0c45b5868c..2bd829fa806 100644
---
a/service/src/java/org/apache/hive/service/cli/operation/hplsql/HplSqlOperation.java
+++
b/service/src/java/org/apache/hive/service/cli/operation/hplsql/HplSqlOperation.java
@@ -49,6 +49,8 @@ import
org.apache.hive.service.cli.operation.ExecuteStatementOperation;
import org.apache.hive.service.cli.session.HiveSession;
import org.apache.hive.service.server.ThreadWithGarbageCleanup;
+import static org.apache.hadoop.hive.shims.HadoopShims.USER_ID;
+
public class HplSqlOperation extends ExecuteStatementOperation implements
ResultListener {
private final Exec exec;
private final boolean runInBackground;
@@ -199,7 +201,8 @@ public class HplSqlOperation extends
ExecuteStatementOperation implements Result
SessionState.setCurrentSessionState(parentSessionState);
PerfLogger.setPerfLogger(SessionState.getPerfLogger());
LogUtils.registerLoggingContext(queryState.getConf());
-
ShimLoader.getHadoopShims().setHadoopQueryContext(queryState.getQueryId());
+ ShimLoader.getHadoopShims()
+ .setHadoopQueryContext(String.format(USER_ID,
queryState.getQueryId(), parentSessionState.getUserName()));
try {
interpret();
} catch (HiveSQLException e) {
diff --git
a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
index 0f529c7d742..036153b4c99 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
@@ -72,6 +72,8 @@ import org.apache.hadoop.util.Progressable;
*/
public interface HadoopShims {
+ String USER_ID = "%s_User:%s";
+
/**
* Constructs and Returns TaskAttempt Logger Url
* or null if the TaskLogServlet is not available