This is an automated email from the ASF dual-hosted git repository.
peacewong pushed a commit to branch dev-1.3.2
in repository https://gitbox.apache.org/repos/asf/linkis.git
The following commit(s) were added to refs/heads/dev-1.3.2 by this push:
new d0c3ef8f6 fix: HIVE job log contains 'java.lang.NullPointerException'
error.(#4206) (#4207)
d0c3ef8f6 is described below
commit d0c3ef8f6a8fae744868e105cce9d3c555e24878
Author: CharlieYan <[email protected]>
AuthorDate: Mon Feb 13 19:24:28 2023 +0800
fix: HIVE job log contains 'java.lang.NullPointerException' error.(#4206)
(#4207)
---
.../hive/errorcode/HiveErrorCodeSummary.java | 4 +++-
.../hive/executor/HiveEngineConnExecutor.scala | 22 ++++++++++++++++++++--
.../core/receivers/DsmReceiver.scala | 10 ++++++----
3 files changed, 29 insertions(+), 7 deletions(-)
diff --git
a/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/errorcode/HiveErrorCodeSummary.java
b/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/errorcode/HiveErrorCodeSummary.java
index b40e2dfc9..da976b51a 100644
---
a/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/errorcode/HiveErrorCodeSummary.java
+++
b/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/errorcode/HiveErrorCodeSummary.java
@@ -25,7 +25,9 @@ public enum HiveErrorCodeSummary implements LinkisErrorCode {
HIVE_EXEC_JAR_ERROR(
26041, "cannot find hive-exec.jar, start session failed(找不到
hive-exec.jar,启动会话失败)"),
GET_FIELD_SCHEMAS_ERROR(26042, "cannot get the field schemas(无法获取字段
schemas)"),
- INVALID_VALUE(26043, "invalid value(无效值)");
+ INVALID_VALUE(26043, "invalid value(无效值)"),
+ COMPILE_HIVE_QUERY_ERROR(26044, "failed to compile hive query(hive语句编译失败)"),
+ ;
private final int errorCode;
diff --git
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
index 089eef23a..b50d3c68a 100644
---
a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
+++
b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
@@ -28,6 +28,7 @@ import org.apache.linkis.engineconn.core.EngineConnObject
import org.apache.linkis.engineconn.executor.entity.ResourceFetchExecutor
import org.apache.linkis.engineplugin.hive.conf.{Counters,
HiveEngineConfiguration}
import org.apache.linkis.engineplugin.hive.cs.CSHiveHelper
+import
org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.COMPILE_HIVE_QUERY_ERROR
import
org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.GET_FIELD_SCHEMAS_ERROR
import org.apache.linkis.engineplugin.hive.exception.HiveQueryFailedException
import org.apache.linkis.engineplugin.hive.progress.HiveProgressHelper
@@ -204,8 +205,18 @@ class HiveEngineConnExecutor(
driver.setTryCount(tryCount + 1)
val startTime = System.currentTimeMillis()
try {
+ var compileRet = -1
Utils.tryCatch {
- val queryPlan = driver.getPlan
+ compileRet = driver.compile(realCode)
+ logger.info(s"driver compile realCode : ${realCode} finished, status
: ${compileRet}")
+ if (0 != compileRet) {
+ logger.warn(s"compile realCode : ${realCode} error status :
${compileRet}")
+ throw HiveQueryFailedException(
+ COMPILE_HIVE_QUERY_ERROR.getErrorCode,
+ COMPILE_HIVE_QUERY_ERROR.getErrorDesc
+ )
+ }
+ val queryPlan = driver.getPlan()
val numberOfJobs = Utilities.getMRTasks(queryPlan.getRootTasks).size
numberOfMRJobs = numberOfJobs
logger.info(s"there are ${numberOfMRJobs} jobs.")
@@ -216,7 +227,7 @@ class HiveEngineConnExecutor(
if (numberOfMRJobs > 0) {
engineExecutorContext.appendStdout(s"Your hive sql has
$numberOfMRJobs MR jobs to do")
}
- val hiveResponse: CommandProcessorResponse = driver.run(realCode)
+ val hiveResponse: CommandProcessorResponse = driver.run(realCode,
compileRet == 0)
if (hiveResponse.getResponseCode != 0) {
LOG.error("Hive query failed, response code is {}",
hiveResponse.getResponseCode)
// todo check uncleared context ?
@@ -605,6 +616,13 @@ class HiveDriverProxy(driver: Any) extends Logging {
.asInstanceOf[CommandProcessorResponse]
}
+ def run(command: String, alreadyCompiled: Boolean): CommandProcessorResponse
= {
+ driver.getClass
+ .getMethod("run", classOf[String], classOf[Boolean])
+ .invoke(driver, command.asInstanceOf[AnyRef],
alreadyCompiled.asInstanceOf[AnyRef])
+ .asInstanceOf[CommandProcessorResponse]
+ }
+
def setTryCount(retry: Int): Unit = {
if (HiveDriverProxy.HAS_COMMAND_NEED_RETRY_EXCEPTION) {
driver.getClass
diff --git
a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/scala/org/apache/linkis/datasourcemanager/core/receivers/DsmReceiver.scala
b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/scala/org/apache/linkis/datasourcemanager/core/receivers/DsmReceiver.scala
index 52eb8d938..c32bb1b52 100644
---
a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/scala/org/apache/linkis/datasourcemanager/core/receivers/DsmReceiver.scala
+++
b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/main/scala/org/apache/linkis/datasourcemanager/core/receivers/DsmReceiver.scala
@@ -55,10 +55,12 @@ class DsmReceiver {
if (dsInfoQueryRequest.isValid) {
Utils.tryCatch {
var dataSource: DataSource = null
- if (Option(dsInfoQueryRequest.name).isDefined &&
Option(dsInfoQueryRequest.envId).isDefined) {
- logger.info(
- "Try to get dataSource by dataSourceName:" +
dsInfoQueryRequest.name + ", envId:" + dsInfoQueryRequest.envId
- )
+ if (
+ Option(dsInfoQueryRequest.name).isDefined &&
Option(dsInfoQueryRequest.envId).isDefined
+ ) {
+ logger.info(
+ "Try to get dataSource by dataSourceName:" +
dsInfoQueryRequest.name + ", envId:" + dsInfoQueryRequest.envId
+ )
dataSource = dataSourceInfoService.getDataSourceInfoForConnect(
dsInfoQueryRequest.name,
dsInfoQueryRequest.envId
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]