This is an automated email from the ASF dual-hosted git repository.
sankarh pushed a commit to branch branch-3
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/branch-3 by this push:
new 3045ca6da96 HIVE-27721: Backport of HIVE-23396: Many fixes and
improvements to stabilize tests (Zoltan Haindrich reviewed by Miklos Gergely)
3045ca6da96 is described below
commit 3045ca6da96ead155aa64385b3a910e0d42fd440
Author: Aman Raj <[email protected]>
AuthorDate: Tue Sep 26 10:12:25 2023 +0530
HIVE-27721: Backport of HIVE-23396: Many fixes and improvements to
stabilize tests (Zoltan Haindrich reviewed by Miklos Gergely)
Signed-off-by: Sankar Hariappan <[email protected]>
Closes (#4736)
---
.../metrics/metrics2/TestCodahaleMetrics.java | 2 +-
.../org/apache/hadoop/hive/ql/metadata/Hive.java | 2 +-
.../hadoop/hive/metastore/txn/TestTxnHandler.java | 6 ++---
.../apache/hadoop/hive/ql/metadata/TestHive.java | 31 +++++++++++++---------
.../hadoop/hive/ql/metadata/TestHiveRemote.java | 25 ++++++++++++-----
.../cli/session/TestSessionManagerMetrics.java | 2 +-
.../hadoop/hive/metastore/HiveMetaStore.java | 6 +++--
.../hadoop/hive/metastore/MetaStoreTestUtils.java | 2 +-
.../hadoop/hive/metastore/TestMarkPartition.java | 2 +-
.../hive/metastore/client/MetaStoreClientTest.java | 8 +++---
10 files changed, 54 insertions(+), 32 deletions(-)
diff --git
a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java
b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java
index 9595e72f58c..abe0892af7d 100644
---
a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java
+++
b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java
@@ -55,7 +55,7 @@ public class TestCodahaleMetrics {
private static final Path tmpDir =
Paths.get(System.getProperty("java.io.tmpdir"));
private static File jsonReportFile;
private static MetricRegistry metricRegistry;
- private static final long REPORT_INTERVAL_MS = 100;
+ private static final long REPORT_INTERVAL_MS = 2000;
@BeforeClass
public static void setUp() throws Exception {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 35c72bbcf54..74dbdfb9a95 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -460,7 +460,7 @@ public class Hive {
/**
* closes the connection to metastore for the calling thread
*/
- private void close() {
+ public void close() {
LOG.debug("Closing current thread's connection to Hive Metastore.");
if (metaStoreClient != null) {
metaStoreClient.close();
diff --git
a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
index be37b2a286d..71e7eacff87 100644
--- a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
+++ b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
@@ -1142,7 +1142,7 @@ public class TestTxnHandler {
LockRequest req = new LockRequest(components, "me", "localhost");
LockResponse res = txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
- Thread.sleep(10);
+ Thread.sleep(1000);
txnHandler.performTimeOuts();
txnHandler.checkLock(new CheckLockRequest(res.getLockid()));
fail("Told there was a lock, when it should have timed out.");
@@ -1157,7 +1157,7 @@ public class TestTxnHandler {
long timeout = txnHandler.setTimeout(1);
try {
txnHandler.openTxns(new OpenTxnRequest(503, "me", "localhost"));
- Thread.sleep(10);
+ Thread.sleep(1000);
txnHandler.performTimeOuts();
GetOpenTxnsInfoResponse rsp = txnHandler.getOpenTxnsInfo();
int numAborted = 0;
@@ -1180,7 +1180,7 @@ public class TestTxnHandler {
request.setReplPolicy("default.*");
request.setReplSrcTxnIds(response.getTxn_ids());
OpenTxnsResponse responseRepl = txnHandler.openTxns(request);
- Thread.sleep(10);
+ Thread.sleep(1000);
txnHandler.performTimeOuts();
GetOpenTxnsInfoResponse rsp = txnHandler.getOpenTxnsInfo();
int numAborted = 0;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
index e455079d794..4fa2e990266 100755
--- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
@@ -59,23 +59,29 @@ import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.junit.Assert;
+import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
-import junit.framework.TestCase;
-
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.fail;
+import org.junit.BeforeClass;
+import org.junit.AfterClass;
/**
* TestHive.
*
*/
-public class TestHive extends TestCase {
- protected Hive hm;
- protected HiveConf hiveConf;
-
- @Override
- protected void setUp() throws Exception {
- super.setUp();
- hiveConf = new HiveConf(this.getClass());
+public class TestHive {
+ protected static Hive hm;
+ protected static HiveConf hiveConf;
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ hiveConf = new HiveConf(TestHive.class);
hiveConf
.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
@@ -94,10 +100,9 @@ public class TestHive extends TestCase {
}
}
- @Override
- protected void tearDown() throws Exception {
+ @AfterClass
+ public static void tearDown() throws Exception {
try {
- super.tearDown();
// disable trash
hiveConf.setFloat("fs.trash.checkpoint.interval", 30); //
FS_TRASH_CHECKPOINT_INTERVAL_KEY (hadoop-2)
hiveConf.setFloat("fs.trash.interval", 30); //
FS_TRASH_INTERVAL_KEY (hadoop-2)
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java
index eccca481742..b367f08de1d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java
@@ -23,8 +23,13 @@ import java.net.ServerSocket;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.util.StringUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
/**
*
* TestHiveRemote.
@@ -36,15 +41,17 @@ public class TestHiveRemote extends TestHive {
/**
* Start a remote metastore and initialize a Hive object pointing at it.
*/
- @Override
- protected void setUp() throws Exception {
- super.setUp();
- hiveConf = new HiveConf(this.getClass());
- hiveConf
- .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
+ @BeforeClass
+ public static void setUp() throws Exception {
+ hiveConf = new HiveConf(TestHiveRemote.class);
+ hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
MetaStoreTestUtils.startMetaStoreWithRetry(hiveConf);
+ }
+ @Before
+ public void before() throws Exception {
+ SessionState.start(hiveConf);
try {
hm = Hive.get(hiveConf);
} catch (Exception e) {
@@ -56,6 +63,12 @@ public class TestHiveRemote extends TestHive {
}
}
+ @After
+ public void after() throws IOException {
+ SessionState.get().close();
+ hm.close();
+ }
+
/**
* Cannot control trash in remote metastore, so skip this test
*/
diff --git
a/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
b/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
index 565545824a7..0365a049645 100644
---
a/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
+++
b/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
@@ -376,7 +376,7 @@ public class TestSessionManagerMetrics {
// We're going to wait for the session to be abandoned.
String currentValue;
- int count = 5; // how many times we'll sleep before giving up
+ int count = 10; // how many times we'll sleep before giving up
String expectedValue = "1";
do {
// HIVE_SERVER2_SESSION_CHECK_INTERVAL is set to 3 seconds, so we have
to wait for at least
diff --git
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 84697df3fcb..bf00455e146 100644
---
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -807,8 +807,10 @@ public class HiveMetaStore extends ThriftHiveMetastore {
} catch (JDOException e) {
LOG.warn("Retrying creating default database after error: " +
e.getMessage(), e);
try {
- createDefaultDB_core(getMS());
- } catch (InvalidObjectException e1) {
+ RawStore ms = getMS();
+ createDefaultCatalog(ms, wh);
+ createDefaultDB_core(ms);
+ } catch (InvalidObjectException | InvalidOperationException e1) {
throw new MetaException(e1.getMessage());
}
} catch (InvalidObjectException|InvalidOperationException e) {
diff --git
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
index 3d36b60ec99..877ab5bfb07 100644
---
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
+++
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
@@ -128,7 +128,7 @@ public class MetaStoreTestUtils {
String jdbcUrl = MetastoreConf.getVar(conf, ConfVars.CONNECT_URL_KEY);
if (!keepJdbcUri) {
// Setting metastore instance specific jdbc url postfixed with port
- jdbcUrl = "jdbc:derby:;databaseName=" + TMP_DIR + File.separator
+ jdbcUrl = "jdbc:derby:memory:" + TMP_DIR + File.separator
+ "junit_metastore_db_" + metaStorePort + ";create=true";
MetastoreConf.setVar(conf, ConfVars.CONNECT_URL_KEY, jdbcUrl);
}
diff --git
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java
index 00fae25be6d..811932f23ba 100644
---
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java
+++
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java
@@ -83,7 +83,7 @@ public class TestMarkPartition {
kvs.put("b", "'2011'");
msc.markPartitionForEvent(dbName, tableName, kvs,
PartitionEventType.LOAD_DONE);
Assert.assertTrue(msc.isPartitionMarkedForEvent(dbName, tableName, kvs,
PartitionEventType.LOAD_DONE));
- Thread.sleep(3000);
+ Thread.sleep(10000);
Assert.assertFalse(msc.isPartitionMarkedForEvent(dbName, tableName, kvs,
PartitionEventType.LOAD_DONE));
kvs.put("b", "'2012'");
diff --git
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/MetaStoreClientTest.java
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/MetaStoreClientTest.java
index dc48fa8308a..2bc53317565 100644
---
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/MetaStoreClientTest.java
+++
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/MetaStoreClientTest.java
@@ -31,7 +31,7 @@ import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Set;
+
import java.util.stream.Collectors;
/**
@@ -45,14 +45,14 @@ public abstract class MetaStoreClientTest {
// Needed until there is no junit release with @BeforeParam, @AfterParam
(junit 4.13)
//
https://github.com/junit-team/junit4/commit/1bf8438b65858565dbb64736bfe13aae9cfc1b5a
// Then we should remove our own copy
- private static Set<AbstractMetaStoreService> metaStoreServices = null;
+ private static List<AbstractMetaStoreService> metaStoreServices = null;
@Parameterized.Parameters(name = "{0}")
public static List<Object[]> getMetaStoreToTest() throws Exception {
List<Object[]> result = MetaStoreFactoryForTests.getMetaStores();
metaStoreServices = result.stream()
.map(test -> (AbstractMetaStoreService)test[1])
- .collect(Collectors.toSet());
+ .collect(Collectors.toList());
return result;
}
@@ -76,6 +76,7 @@ public abstract class MetaStoreClientTest {
// Catch the exceptions, so every other metastore could be stopped as
well
// Log it, so at least there is a slight possibility we find out about
this :)
LOG.error("Error starting MetaStoreService", e);
+ throw new RuntimeException("Error starting MetaStoreService", e);
}
}
}
@@ -89,6 +90,7 @@ public abstract class MetaStoreClientTest {
// Catch the exceptions, so every other metastore could be stopped as
well
// Log it, so at least there is a slight possibility we find out about
this :)
LOG.error("Error stopping MetaStoreService", e);
+ throw new RuntimeException("Error stopping MetaStoreService", e);
}
}
}