This is an automated email from the ASF dual-hosted git repository.
mgergely pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 7575732 HIVE-22222 Clean up the error handling in Driver - get rid of
global variables (Miklos Gergely reviewed by Jesus Camacho Rodriguez)
7575732 is described below
commit 757573272a097957c2b084119cb448c0304e5c2c
Author: miklosgergely <[email protected]>
AuthorDate: Fri Sep 20 18:08:47 2019 +0200
HIVE-22222 Clean up the error handling in Driver - get rid of global
variables (Miklos Gergely reviewed by Jesus Camacho Rodriguez)
---
ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 270 ++++++++-------------
.../hadoop/hive/ql/parse/TestHiveDecimalParse.java | 92 ++++---
2 files changed, 170 insertions(+), 192 deletions(-)
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 64375c1..00b21d5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -37,7 +37,6 @@ import java.util.Queue;
import java.util.Set;
import java.util.stream.Collectors;
-import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.conf.Configurable;
@@ -121,6 +120,7 @@ import
org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.ql.wm.WmContext;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hive.common.util.ShutdownHookManager;
import org.apache.hive.common.util.TxnIdUtils;
import org.apache.thrift.TException;
@@ -150,9 +150,6 @@ public class Driver implements IDriver {
private DriverContext driverCxt;
private QueryPlan plan;
private Schema schema;
- private String errorMessage;
- private String SQLState;
- private Throwable downstreamError;
private FetchTask fetchTask;
private List<HiveLock> hiveLocks = new ArrayList<HiveLock>();
@@ -264,8 +261,7 @@ public class Driver implements IDriver {
try {
lst = HiveMetaStoreUtils.getFieldsFromDeserializer(tableName,
td.getDeserializer(conf));
} catch (Exception e) {
- LOG.warn("Error getting schema: "
- + org.apache.hadoop.util.StringUtils.stringifyException(e));
+ LOG.warn("Error getting schema: " +
StringUtils.stringifyException(e));
}
if (lst != null) {
schema = new Schema(lst, null);
@@ -356,7 +352,7 @@ public class Driver implements IDriver {
// interrupted, it should be set to true if the compile is called within
another method like
// runInternal, which defers the close to the called in that method.
@VisibleForTesting
- void compile(String command, boolean resetTaskIds, boolean deferClose)
throws CommandProcessorResponse {
+ public void compile(String command, boolean resetTaskIds, boolean
deferClose) throws CommandProcessorResponse {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.COMPILE);
driverState.lock();
@@ -568,11 +564,8 @@ public class Driver implements IDriver {
CommandAuthorizer.doAuthorization(queryState.getHiveOperation(),
sem, command);
}
} catch (AuthorizationException authExp) {
- console.printError("Authorization failed:" + authExp.getMessage()
- + ". Use SHOW GRANT to get more details.");
- errorMessage = authExp.getMessage();
- SQLState = "42000";
- throw createProcessorResponse(403);
+ console.printError("Authorization failed:" + authExp.getMessage() +
". Use SHOW GRANT to get more details.");
+ throw createProcessorResponse(403, authExp.getMessage(), "42000",
null);
} finally {
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DO_AUTHORIZATION);
}
@@ -598,7 +591,7 @@ public class Driver implements IDriver {
compileError = true;
ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage());
- errorMessage = "FAILED: " + e.getClass().getSimpleName();
+ String errorMessage = "FAILED: " + e.getClass().getSimpleName();
if (error != ErrorMsg.GENERIC_ERROR) {
errorMessage += " [Error " + error.getErrorCode() + "]:";
}
@@ -614,11 +607,8 @@ public class Driver implements IDriver {
errorMessage += ". Failed command: " + queryStr;
}
- SQLState = error.getSQLState();
- downstreamError = e;
- console.printError(errorMessage, "\n"
- + org.apache.hadoop.util.StringUtils.stringifyException(e));
- throw createProcessorResponse(error.getErrorCode());
+ console.printError(errorMessage, "\n" +
StringUtils.stringifyException(e));
+ throw createProcessorResponse(error.getErrorCode(), errorMessage,
error.getSQLState(), e);
} finally {
// Trigger post compilation hook. Note that if the compilation fails
here then
// before/after execution hook will never be executed.
@@ -784,14 +774,9 @@ public class Driver implements IDriver {
}
private void openTransaction() throws LockException,
CommandProcessorResponse {
- if (checkConcurrency() && startImplicitTxn(queryTxnMgr)) {
+ if (checkConcurrency() && startImplicitTxn(queryTxnMgr) &&
!queryTxnMgr.isTxnOpen()) {
String userFromUGI = getUserFromUGI();
- if (!queryTxnMgr.isTxnOpen()) {
- if (userFromUGI == null) {
- throw createProcessorResponse(10);
- }
- queryTxnMgr.openTxn(ctx, userFromUGI);
- }
+ queryTxnMgr.openTxn(ctx, userFromUGI);
}
}
@@ -847,24 +832,18 @@ public class Driver implements IDriver {
return shouldOpenImplicitTxn;
}
- private int handleInterruptionWithHook(String msg, HookContext hookContext,
- PerfLogger perfLogger) {
- SQLState = "HY008"; //SQLState for cancel operation
- errorMessage = "FAILED: command has been interrupted: " + msg;
- console.printError(errorMessage);
- if (hookContext != null) {
- try {
- invokeFailureHooks(perfLogger, hookContext, errorMessage, null);
- } catch (Exception e) {
- LOG.warn("Caught exception attempting to invoke Failure Hooks", e);
- }
- }
- return 1000;
- }
-
private void checkInterrupted(String msg, HookContext hookContext,
PerfLogger perfLogger) throws CommandProcessorResponse {
if (driverState.isAborted()) {
- throw createProcessorResponse(handleInterruptionWithHook(msg,
hookContext, perfLogger));
+ String errorMessage = "FAILED: command has been interrupted: " + msg;
+ console.printError(errorMessage);
+ if (hookContext != null) {
+ try {
+ invokeFailureHooks(perfLogger, hookContext, errorMessage, null);
+ } catch (Exception e) {
+ LOG.warn("Caught exception attempting to invoke Failure Hooks", e);
+ }
+ }
+ throw createProcessorResponse(1000, errorMessage, "HY008", null);
}
}
@@ -1071,19 +1050,16 @@ public class Driver implements IDriver {
return result;
}
- private String getUserFromUGI() {
+ private String getUserFromUGI() throws CommandProcessorResponse {
// Don't use the userName member, as it may or may not have been set. Get
the value from
// conf, which calls into getUGI to figure out who the process is running
as.
try {
return conf.getUser();
} catch (IOException e) {
- errorMessage = "FAILED: Error in determining user while acquiring locks:
" + e.getMessage();
- SQLState = ErrorMsg.findSQLState(e.getMessage());
- downstreamError = e;
- console.printError(errorMessage,
- "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+ String errorMessage = "FAILED: Error in determining user while acquiring
locks: " + e.getMessage();
+ console.printError(errorMessage, "\n" +
StringUtils.stringifyException(e));
+ throw createProcessorResponse(10, errorMessage,
ErrorMsg.findSQLState(e.getMessage()), e);
}
- return null;
}
/**
@@ -1108,9 +1084,6 @@ public class Driver implements IDriver {
}
try {
String userFromUGI = getUserFromUGI();
- if(userFromUGI == null) {
- throw createProcessorResponse(10);
- }
// Set the table write id in all of the acid file sinks
if (!plan.getAcidSinks().isEmpty()) {
@@ -1176,12 +1149,9 @@ public class Driver implements IDriver {
}
} catch (Exception e) {
- errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage();
- SQLState = ErrorMsg.findSQLState(e.getMessage());
- downstreamError = e;
- console.printError(errorMessage, "\n"
- + org.apache.hadoop.util.StringUtils.stringifyException(e));
- throw createProcessorResponse(10);
+ String errorMessage = "FAILED: Error in acquiring locks: " +
e.getMessage();
+ console.printError(errorMessage, "\n" +
StringUtils.stringifyException(e));
+ throw createProcessorResponse(10, errorMessage,
ErrorMsg.findSQLState(e.getMessage()), e);
} finally {
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ACQUIRE_READ_WRITE_LOCKS);
}
@@ -1266,59 +1236,50 @@ public class Driver implements IDriver {
try {
runInternal(command, alreadyCompiled);
- return createProcessorResponse(0);
+ return createProcessorResponse(0, null, null, null);
} catch (CommandProcessorResponse cpr) {
-
- SessionState ss = SessionState.get();
- if(ss == null) {
- return cpr;
- }
- MetaDataFormatter mdf = MetaDataFormatUtils.getFormatter(ss.getConf());
- if(!(mdf instanceof JsonMetaDataFormatter)) {
- return cpr;
- }
- /*Here we want to encode the error in machine readable way (e.g. JSON)
- * Ideally, errorCode would always be set to a canonical error defined in
ErrorMsg.
- * In practice that is rarely the case, so the messy logic below tries to
tease
- * out canonical error code if it can. Exclude stack trace from output
when
- * the error is a specific/expected one.
- * It's written to stdout for backward compatibility (WebHCat consumes
it).*/
- try {
- if(downstreamError == null) {
- mdf.error(ss.out, errorMessage, cpr.getResponseCode(), SQLState);
+ SessionState ss = SessionState.get();
+ if (ss == null) {
return cpr;
}
- ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpr.getResponseCode());
- if(canonicalErr != null && canonicalErr != ErrorMsg.GENERIC_ERROR) {
- /*Some HiveExceptions (e.g. SemanticException) don't set
- canonical ErrorMsg explicitly, but there is logic
- (e.g. #compile()) to find an appropriate canonical error and
- return its code as error code. In this case we want to
- preserve it for downstream code to interpret*/
- mdf.error(ss.out, errorMessage, cpr.getResponseCode(), SQLState, null);
+ MetaDataFormatter mdf = MetaDataFormatUtils.getFormatter(ss.getConf());
+ if (!(mdf instanceof JsonMetaDataFormatter)) {
return cpr;
}
- if(downstreamError instanceof HiveException) {
- HiveException rc = (HiveException) downstreamError;
- mdf.error(ss.out, errorMessage,
- rc.getCanonicalErrorMsg().getErrorCode(), SQLState,
- rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ?
-
org.apache.hadoop.util.StringUtils.stringifyException(rc)
- : null);
- }
- else {
- ErrorMsg canonicalMsg =
- ErrorMsg.getErrorMsg(downstreamError.getMessage());
- mdf.error(ss.out, errorMessage, canonicalMsg.getErrorCode(),
- SQLState, org.apache.hadoop.util.StringUtils.
- stringifyException(downstreamError));
+ /*Here we want to encode the error in machine readable way (e.g. JSON)
+ * Ideally, errorCode would always be set to a canonical error defined
in ErrorMsg.
+ * In practice that is rarely the case, so the messy logic below tries
to tease
+ * out canonical error code if it can. Exclude stack trace from output
when
+ * the error is a specific/expected one.
+ * It's written to stdout for backward compatibility (WebHCat consumes
it).*/
+ try {
+ if (cpr.getException() == null) {
+ mdf.error(ss.out, cpr.getErrorMessage(), cpr.getResponseCode(),
cpr.getSQLState());
+ return cpr;
+ }
+ ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpr.getResponseCode());
+ if (canonicalErr != null && canonicalErr != ErrorMsg.GENERIC_ERROR) {
+ /*Some HiveExceptions (e.g. SemanticException) don't set
+ canonical ErrorMsg explicitly, but there is logic
+ (e.g. #compile()) to find an appropriate canonical error and
+ return its code as error code. In this case we want to
+ preserve it for downstream code to interpret*/
+ mdf.error(ss.out, cpr.getErrorMessage(), cpr.getResponseCode(),
cpr.getSQLState(), null);
+ return cpr;
+ }
+ if (cpr.getException() instanceof HiveException) {
+ HiveException rc = (HiveException)cpr.getException();
+ mdf.error(ss.out, cpr.getErrorMessage(),
rc.getCanonicalErrorMsg().getErrorCode(), cpr.getSQLState(),
+ rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ?
StringUtils.stringifyException(rc) : null);
+ } else {
+ ErrorMsg canonicalMsg =
ErrorMsg.getErrorMsg(cpr.getException().getMessage());
+ mdf.error(ss.out, cpr.getErrorMessage(),
canonicalMsg.getErrorCode(), cpr.getSQLState(),
+ StringUtils.stringifyException(cpr.getException()));
+ }
+ } catch (HiveException ex) {
+ console.printError("Unable to JSON-encode the error",
StringUtils.stringifyException(ex));
}
- }
- catch(HiveException ex) {
- console.printError("Unable to JSON-encode the error",
- org.apache.hadoop.util.StringUtils.stringifyException(ex));
- }
- return cpr;
+ return cpr;
}
}
@@ -1330,7 +1291,7 @@ public class Driver implements IDriver {
public CommandProcessorResponse compileAndRespond(String command, boolean
cleanupTxnList) {
try {
compileInternal(command, false);
- return createProcessorResponse(0);
+ return createProcessorResponse(0, null, null, null);
} catch (CommandProcessorResponse e) {
return e;
} finally {
@@ -1377,8 +1338,8 @@ public class Driver implements IDriver {
metrics.decrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1);
}
if (!success) {
- errorMessage = ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCodedMsg();
- throw
createProcessorResponse(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode());
+ String errorMessage =
ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCodedMsg();
+ throw
createProcessorResponse(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode(),
errorMessage, null, null);
}
try {
@@ -1387,7 +1348,7 @@ public class Driver implements IDriver {
try {
releaseLocksAndCommitOrRollback(false);
} catch (LockException e) {
- LOG.warn("Exception in releasing locks. " +
org.apache.hadoop.util.StringUtils.stringifyException(e));
+ LOG.warn("Exception in releasing locks. " +
StringUtils.stringifyException(e));
}
throw cpr;
}
@@ -1400,9 +1361,6 @@ public class Driver implements IDriver {
}
private void runInternal(String command, boolean alreadyCompiled) throws
CommandProcessorResponse {
- errorMessage = null;
- SQLState = null;
- downstreamError = null;
DriverState.setDriverState(driverState);
driverState.lock();
@@ -1411,9 +1369,9 @@ public class Driver implements IDriver {
if (driverState.isCompiled()) {
driverState.executing();
} else {
- errorMessage = "FAILED: Precompiled query has been cancelled or
closed.";
+ String errorMessage = "FAILED: Precompiled query has been cancelled
or closed.";
console.printError(errorMessage);
- throw createProcessorResponse(12);
+ throw createProcessorResponse(12, errorMessage, null, null);
}
} else {
driverState.compiling();
@@ -1432,12 +1390,9 @@ public class Driver implements IDriver {
try {
hookRunner.runPreDriverHooks(hookContext);
} catch (Exception e) {
- errorMessage = "FAILED: Hive Internal Error: " +
Utilities.getNameMessage(e);
- SQLState = ErrorMsg.findSQLState(e.getMessage());
- downstreamError = e;
- console.printError(errorMessage + "\n"
- + org.apache.hadoop.util.StringUtils.stringifyException(e));
- throw createProcessorResponse(12);
+ String errorMessage = "FAILED: Hive Internal Error: " +
Utilities.getNameMessage(e);
+ console.printError(errorMessage + "\n" +
StringUtils.stringifyException(e));
+ throw createProcessorResponse(12, errorMessage,
ErrorMsg.findSQLState(e.getMessage()), e);
}
if (!alreadyCompiled) {
@@ -1537,12 +1492,9 @@ public class Driver implements IDriver {
try {
hookRunner.runPostDriverHooks(hookContext);
} catch (Exception e) {
- errorMessage = "FAILED: Hive Internal Error: " +
Utilities.getNameMessage(e);
- SQLState = ErrorMsg.findSQLState(e.getMessage());
- downstreamError = e;
- console.printError(errorMessage + "\n"
- + org.apache.hadoop.util.StringUtils.stringifyException(e));
- throw createProcessorResponse(12);
+ String errorMessage = "FAILED: Hive Internal Error: " +
Utilities.getNameMessage(e);
+ console.printError(errorMessage + "\n" +
StringUtils.stringifyException(e));
+ throw createProcessorResponse(12, errorMessage,
ErrorMsg.findSQLState(e.getMessage()), e);
}
isFinishedWithError = false;
} finally {
@@ -1580,16 +1532,14 @@ public class Driver implements IDriver {
}
private CommandProcessorResponse handleHiveException(HiveException e, int
ret, String rootMsg) throws CommandProcessorResponse {
- errorMessage = "FAILED: Hive Internal Error: " +
Utilities.getNameMessage(e);
+ String errorMessage = "FAILED: Hive Internal Error: " +
Utilities.getNameMessage(e);
if(rootMsg != null) {
errorMessage += "\n" + rootMsg;
}
- SQLState = e.getCanonicalErrorMsg() != null ?
- e.getCanonicalErrorMsg().getSQLState() :
ErrorMsg.findSQLState(e.getMessage());
- downstreamError = e;
- console.printError(errorMessage + "\n"
- + org.apache.hadoop.util.StringUtils.stringifyException(e));
- throw createProcessorResponse(ret);
+ String sqlState = e.getCanonicalErrorMsg() != null ?
+ e.getCanonicalErrorMsg().getSQLState() :
ErrorMsg.findSQLState(e.getMessage());
+ console.printError(errorMessage + "\n" +
StringUtils.stringifyException(e));
+ throw createProcessorResponse(ret, errorMessage, sqlState, e);
}
private boolean requiresLock() {
if (!checkConcurrency()) {
@@ -1637,17 +1587,18 @@ public class Driver implements IDriver {
return false;
}
- private CommandProcessorResponse createProcessorResponse(int ret) {
+ private CommandProcessorResponse createProcessorResponse(int ret, String
errorMessage, String sqlState,
+ Throwable downstreamError) {
SessionState.getPerfLogger().cleanupPerfLogMetrics();
queryDisplay.setErrorMessage(errorMessage);
if(downstreamError != null && downstreamError instanceof HiveException) {
ErrorMsg em = ((HiveException)downstreamError).getCanonicalErrorMsg();
if(em != null) {
- return new CommandProcessorResponse(ret, errorMessage, SQLState,
+ return new CommandProcessorResponse(ret, errorMessage, sqlState,
schema, downstreamError, em.getErrorCode(), null);
}
}
- return new CommandProcessorResponse(ret, errorMessage, SQLState,
downstreamError);
+ return new CommandProcessorResponse(ret, errorMessage, sqlState,
downstreamError);
}
private void useFetchFromCache(CacheEntry cacheEntry) {
@@ -1718,7 +1669,7 @@ public class Driver implements IDriver {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DRIVER_EXECUTE);
- boolean noName = StringUtils.isEmpty(conf.get(MRJobConfig.JOB_NAME));
+ boolean noName = Strings.isNullOrEmpty(conf.get(MRJobConfig.JOB_NAME));
int maxlen;
if ("spark".equals(conf.getVar(ConfVars.HIVE_EXECUTION_ENGINE))) {
@@ -1737,11 +1688,10 @@ public class Driver implements IDriver {
try {
// if query is not in compiled state, or executing state which is
carried over from
// a combined compile/execute in runInternal, throws the error
- if (driverState.isCompiled() && driverState.isExecuting()) {
- SQLState = "HY008";
- errorMessage = "FAILED: unexpected driverstate: " + driverState + ",
for query " + queryStr;
+ if (!driverState.isCompiled() && !driverState.isExecuting()) {
+ String errorMessage = "FAILED: unexpected driverstate: " + driverState
+ ", for query " + queryStr;
console.printError(errorMessage);
- throw createProcessorResponse(1000);
+ throw createProcessorResponse(1000, errorMessage, "HY008", null);
} else {
driverState.executing();
}
@@ -1872,7 +1822,7 @@ public class Driver implements IDriver {
if (exitVal != 0) {
Task<? extends Serializable> backupTask = tsk.getAndInitBackupTask();
if (backupTask != null) {
- setErrorMsgAndDetail(exitVal, result.getTaskError(), tsk);
+ String errorMessage = getErrorMsgAndDetail(exitVal,
result.getTaskError(), tsk);
console.printError(errorMessage);
errorMessage = "ATTEMPT: Execute BackupTask: " +
backupTask.getClass().getName();
console.printError(errorMessage);
@@ -1884,13 +1834,13 @@ public class Driver implements IDriver {
continue;
} else {
- setErrorMsgAndDetail(exitVal, result.getTaskError(), tsk);
+ String errorMessage = getErrorMsgAndDetail(exitVal,
result.getTaskError(), tsk);
if (driverCxt.isShutdown()) {
errorMessage = "FAILED: Operation cancelled. " + errorMessage;
}
invokeFailureHooks(perfLogger, hookContext,
errorMessage + Strings.nullToEmpty(tsk.getDiagnosticsMessage()),
result.getTaskError());
- SQLState = "08S01";
+ String sqlState = "08S01";
// 08S01 (Communication error) is the default sql state. Override
the sqlstate
// based on the ErrorMsg set in HiveException.
@@ -1898,7 +1848,7 @@ public class Driver implements IDriver {
ErrorMsg errorMsg = ((HiveException) result.getTaskError()).
getCanonicalErrorMsg();
if (errorMsg != ErrorMsg.GENERIC_ERROR) {
- SQLState = errorMsg.getSQLState();
+ sqlState = errorMsg.getSQLState();
}
}
@@ -1907,7 +1857,7 @@ public class Driver implements IDriver {
// in case we decided to run everything in local mode, restore the
// the jobtracker setting to its initial value
ctx.restoreOriginalTracker();
- throw createProcessorResponse(exitVal);
+ throw createProcessorResponse(exitVal, errorMessage, sqlState,
result.getTaskError());
}
}
@@ -1936,11 +1886,10 @@ public class Driver implements IDriver {
ctx.restoreOriginalTracker();
if (driverCxt.isShutdown()) {
- SQLState = "HY008";
- errorMessage = "FAILED: Operation cancelled";
+ String errorMessage = "FAILED: Operation cancelled";
invokeFailureHooks(perfLogger, hookContext, errorMessage, null);
console.printError(errorMessage);
- throw createProcessorResponse(1000);
+ throw createProcessorResponse(1000, errorMessage, "HY008", null);
}
// remove incomplete outputs.
@@ -1982,7 +1931,7 @@ public class Driver implements IDriver {
String.valueOf(12));
}
// TODO: do better with handling types of Exception here
- errorMessage = "FAILED: Hive Internal Error: " +
Utilities.getNameMessage(e);
+ String errorMessage = "FAILED: Hive Internal Error: " +
Utilities.getNameMessage(e);
if (hookContext != null) {
try {
invokeFailureHooks(perfLogger, hookContext, errorMessage, e);
@@ -1990,11 +1939,8 @@ public class Driver implements IDriver {
LOG.warn("Failed to invoke failure hook", t);
}
}
- SQLState = "08S01";
- downstreamError = e;
- console.printError(errorMessage + "\n"
- + org.apache.hadoop.util.StringUtils.stringifyException(e));
- throw createProcessorResponse(12);
+ console.printError(errorMessage + "\n" +
StringUtils.stringifyException(e));
+ throw createProcessorResponse(12, errorMessage, "08S01", e);
} finally {
// Trigger query hooks after query completes its execution.
try {
@@ -2111,16 +2057,15 @@ public class Driver implements IDriver {
LOG.warn(warning);
}
- private void setErrorMsgAndDetail(int exitVal, Throwable downstreamError,
Task tsk) {
- this.downstreamError = downstreamError;
- errorMessage = "FAILED: Execution Error, return code " + exitVal + " from
" + tsk.getClass().getName();
- if(downstreamError != null) {
+ private String getErrorMsgAndDetail(int exitVal, Throwable downstreamError,
Task tsk) {
+ String errorMessage = "FAILED: Execution Error, return code " + exitVal +
" from " + tsk.getClass().getName();
+ if (downstreamError != null) {
//here we assume that upstream code may have parametrized the msg from
ErrorMsg
//so we want to keep it
if (downstreamError.getMessage() != null) {
errorMessage += ". " + downstreamError.getMessage();
} else {
- errorMessage += ". " +
org.apache.hadoop.util.StringUtils.stringifyException(downstreamError);
+ errorMessage += ". " + StringUtils.stringifyException(downstreamError);
}
}
else {
@@ -2129,6 +2074,8 @@ public class Driver implements IDriver {
errorMessage += ". " + em.getMsg();
}
}
+
+ return errorMessage;
}
private void invokeFailureHooks(PerfLogger perfLogger,
@@ -2455,11 +2402,6 @@ public class Driver implements IDriver {
ShutdownHookManager.removeShutdownHook(shutdownRunner);
}
- public String getErrorMsg() {
- return errorMessage;
- }
-
-
@Override
public QueryDisplay getQueryDisplay() {
return queryDisplay;
diff --git
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
index 5096763..e4535ca 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.ddl.DDLTask;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.junit.Assert;
import org.junit.Test;
@@ -57,10 +58,15 @@ public class TestHiveDecimalParse {
String query = "create table `dec` (d decimal(66,7))";
Driver driver = createDriver();
- int rc = driver.compile(query, true);
- Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
- Assert.assertTrue(driver.getErrorMsg(),
- driver.getErrorMsg().contains("Decimal precision out of allowed range
[1,38]"));
+ try {
+ driver.compile(query, true, false);
+ } catch (CommandProcessorResponse cpe) {
+ Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not
zero", cpe.getResponseCode() != 0);
+ Assert.assertTrue(cpe.getErrorMessage(),
+ cpe.getErrorMessage().contains("Decimal precision out of allowed
range [1,38]"));
+ return;
+ }
+ Assert.assertTrue("Expected to receive an exception", false);
}
@Test
@@ -68,10 +74,15 @@ public class TestHiveDecimalParse {
String query = "create table `dec` (d decimal(0,7))";
Driver driver = createDriver();
- int rc = driver.compile(query, true);
- Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
- Assert.assertTrue(driver.getErrorMsg(),
- driver.getErrorMsg().contains("Decimal precision out of allowed range
[1,38]"));
+ try {
+ driver.compile(query, true, false);
+ } catch (CommandProcessorResponse cpe) {
+ Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not
zero", cpe.getResponseCode() != 0);
+ Assert.assertTrue(cpe.getErrorMessage(),
+ cpe.getErrorMessage().contains("Decimal precision out of allowed
range [1,38]"));
+ return;
+ }
+ Assert.assertTrue("Expected to receive an exception", false);
}
@Test
@@ -79,10 +90,15 @@ public class TestHiveDecimalParse {
String query = "create table `dec` (d decimal(7,33))";
Driver driver = createDriver();
- int rc = driver.compile(query, true);
- Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
- Assert.assertTrue(driver.getErrorMsg(),
- driver.getErrorMsg().contains("Decimal scale must be less than or
equal to precision"));
+ try {
+ driver.compile(query, true, false);
+ } catch (CommandProcessorResponse cpe) {
+ Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not
zero", cpe.getResponseCode() != 0);
+ Assert.assertTrue(cpe.getErrorMessage(),
+ cpe.getErrorMessage().contains("Decimal scale must be less than or
equal to precision"));
+ return;
+ }
+ Assert.assertTrue("Expected to receive an exception", false);
}
@Test
@@ -90,10 +106,15 @@ public class TestHiveDecimalParse {
String query = "create table `dec` (d decimal(7,-1))";
Driver driver = createDriver();
- int rc = driver.compile(query, true);
- Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
- Assert.assertTrue(driver.getErrorMsg(),
- driver.getErrorMsg().contains("extraneous input '-' expecting
Number"));
+ try {
+ driver.compile(query, true, false);
+ } catch (CommandProcessorResponse cpe) {
+ Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not
zero", cpe.getResponseCode() != 0);
+ Assert.assertTrue(cpe.getErrorMessage(),
+ cpe.getErrorMessage().contains("extraneous input '-' expecting
Number"));
+ return;
+ }
+ Assert.assertTrue("Expected to receive an exception", false);
}
@Test
@@ -101,10 +122,15 @@ public class TestHiveDecimalParse {
String query = "create table `dec` (d decimal(7,33,4))";
Driver driver = createDriver();
- int rc = driver.compile(query, true);
- Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
- Assert.assertTrue(driver.getErrorMsg(),
- driver.getErrorMsg().contains("missing ) at ',' near ',' in column name
or constraint"));
+ try {
+ driver.compile(query, true, false);
+ } catch (CommandProcessorResponse cpe) {
+ Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not
zero", cpe.getResponseCode() != 0);
+ Assert.assertTrue(cpe.getErrorMessage(),
+ cpe.getErrorMessage().contains("missing ) at ',' near ',' in column
name or constraint"));
+ return;
+ }
+ Assert.assertTrue("Expected to receive an exception", false);
}
@Test
@@ -112,10 +138,15 @@ public class TestHiveDecimalParse {
String query = "create table `dec` (d decimal(7a))";
Driver driver = createDriver();
- int rc = driver.compile(query, true);
- Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
- Assert.assertTrue(driver.getErrorMsg(),
- driver.getErrorMsg().contains("mismatched input '7a' expecting Number
near '('"));
+ try {
+ driver.compile(query, true, false);
+ } catch (CommandProcessorResponse cpe) {
+ Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not
zero", cpe.getResponseCode() != 0);
+ Assert.assertTrue(cpe.getErrorMessage(),
+ cpe.getErrorMessage().contains("mismatched input '7a' expecting
Number near '('"));
+ return;
+ }
+ Assert.assertTrue("Expected to receive an exception", false);
}
@Test
@@ -123,10 +154,15 @@ public class TestHiveDecimalParse {
String query = "create table `dec` (d decimal(20,23))";
Driver driver = createDriver();
- int rc = driver.compile(query, true);
- Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
- Assert.assertTrue(driver.getErrorMsg(),
- driver.getErrorMsg().contains("Decimal scale must be less than or
equal to precision"));
+ try {
+ driver.compile(query, true, false);
+ } catch (CommandProcessorResponse cpe) {
+ Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not
zero", cpe.getResponseCode() != 0);
+ Assert.assertTrue(cpe.getErrorMessage(),
+ cpe.getErrorMessage().contains("Decimal scale must be less than or
equal to precision"));
+ return;
+ }
+ Assert.assertTrue("Expected to receive an exception", false);
}
private Driver createDriver() {