This is an automated email from the ASF dual-hosted git repository.
kunalkapoor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git
The following commit(s) were added to refs/heads/master by this push:
new 59ad77a [CARBONDATA-4121] Prepriming is not working in Index Server
59ad77a is described below
commit 59ad77a96bcbada2c62bc31cbda2acf24bfa7498
Author: Karan980 <[email protected]>
AuthorDate: Fri Feb 5 16:56:23 2021 +0530
[CARBONDATA-4121] Prepriming is not working in Index Server
Why is this PR needed?
Prepriming is not working in Index Server. Server.getRemoteUser
returns null value in async call of prepriming which results in
NPE and crashes the indexServer application. Issue Induced after PR #3952
What changes were proposed in this PR?
Computed the Server.getRemoteUser value before making the async prepriming
call and then used the same value during async call. Code reset to code
before PR #3952
This closes #4088
---
.../main/scala/org/apache/carbondata/indexserver/IndexServer.scala | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git
a/integration/spark/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala
b/integration/spark/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala
index 207fb85..089b857 100644
---
a/integration/spark/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala
+++
b/integration/spark/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala
@@ -128,6 +128,10 @@ object IndexServer extends ServerInterface {
def getCount(request: IndexInputFormat): LongWritable = {
doAs {
val sparkSession = SparkSQLUtil.getSparkSession
+ var currentUser: String = null
+ if (!request.isFallbackJob) {
+ currentUser = Server.getRemoteUser.getShortUserName
+ }
lazy val getCountTask = {
if (!request.isFallbackJob) {
sparkSession.sparkContext.setLocalProperty("spark.jobGroup.id",
request.getTaskGroupId)
@@ -139,8 +143,7 @@ object IndexServer extends ServerInterface {
}
sparkSession.sparkContext.setLocalProperty("spark.job.description",
taskGroupDesc)
// Fire Generic Event like ACLCheck..etc
- val indexServerEvent = IndexServerEvent(sparkSession,
request.getCarbonTable,
- Server.getRemoteUser.getShortUserName)
+ val indexServerEvent = IndexServerEvent(sparkSession,
request.getCarbonTable, currentUser)
OperationListenerBus.getInstance().fireEvent(indexServerEvent,
operationContext)
}
val splits = new DistributedCountRDD(sparkSession, request).collect()