This is an automated email from the ASF dual-hosted git repository.
tkhurana pushed a commit to branch 5.1
in repository https://gitbox.apache.org/repos/asf/phoenix.git
The following commit(s) were added to refs/heads/5.1 by this push:
new caa436659b PHOENIX-6776 :- Abort scans of closed connections at
ScanningResultIterator (#1517) (#1525)
caa436659b is described below
commit caa436659b2bc8e520d6c9492bc768dcecc544ac
Author: Lokesh Khurana <[email protected]>
AuthorDate: Wed Nov 9 14:43:49 2022 -0800
PHOENIX-6776 :- Abort scans of closed connections at ScanningResultIterator
(#1517) (#1525)
Co-authored-by: Lokesh Khurana <[email protected]>
---
.../end2end/PreMatureTimelyAbortScanIt.java | 89 ++++++++++++++++++++++
.../coprocessor/BaseScannerRegionObserver.java | 2 +-
.../apache/phoenix/exception/SQLExceptionCode.java | 5 +-
.../phoenix/iterate/BaseResultIterators.java | 3 +
.../phoenix/iterate/ScanningResultIterator.java | 26 ++++++-
.../phoenix/iterate/TableResultIterator.java | 2 +-
.../iterate/TableSnapshotResultIterator.java | 7 +-
.../org/apache/phoenix/jdbc/PhoenixConnection.java | 23 +++++-
.../phoenix/mapreduce/PhoenixRecordReader.java | 2 +-
9 files changed, 148 insertions(+), 11 deletions(-)
diff --git
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PreMatureTimelyAbortScanIt.java
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PreMatureTimelyAbortScanIt.java
new file mode 100644
index 0000000000..c0729707be
--- /dev/null
+++
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PreMatureTimelyAbortScanIt.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
+import org.apache.phoenix.exception.SQLExceptionCode;
+import org.apache.phoenix.iterate.ScanningResultIterator;
+import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.thirdparty.com.google.common.collect.Maps;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+@Category(ParallelStatsDisabledTest.class)
+public class PreMatureTimelyAbortScanIt extends ParallelStatsDisabledIT {
+ private static final Logger LOG =
LoggerFactory.getLogger(PreMatureTimelyAbortScanIt.class);
+
+ @BeforeClass
+ public static synchronized void doSetup() throws Exception {
+ Map<String, String> props = Maps.newHashMapWithExpectedSize(1);
+ props.put(BaseScannerRegionObserver.PHOENIX_MAX_LOOKBACK_AGE_CONF_KEY,
Integer.toString(60*60)); // An hour
+ props.put(QueryServices.USE_STATS_FOR_PARALLELIZATION,
Boolean.toString(false));
+ props.put(QueryServices.PHOENIX_SERVER_PAGE_SIZE_MS,
Integer.toString(0));
+ setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+ }
+
+ private String getUniqueUrl() {
+ return url + generateUniqueName();
+ }
+
+ @Test
+ public void testPreMatureScannerAbortForCount() throws Exception {
+
+ try (Connection conn = DriverManager.getConnection(getUniqueUrl())) {
+ conn.createStatement().execute("CREATE TABLE LONG_BUG (ID INTEGER
PRIMARY KEY, AMOUNT DECIMAL) SALT_BUCKETS = 16 ");
+ }
+ try (Connection conn = DriverManager.getConnection(getUniqueUrl())) {
+ for (int i = 0; i<100 ; i++) {
+ int amount = -50000 + i;
+ String s = "UPSERT INTO LONG_BUG (ID, AMOUNT) VALUES( " + i +
", " + amount + ")";
+ conn.createStatement().execute(s);
+ }
+ conn.commit();
+ }
+
+ try {
+ PhoenixConnection conn =
DriverManager.getConnection(getUniqueUrl()).unwrap(PhoenixConnection.class);
+ ScanningResultIterator.setIsScannerClosedForcefully(true);
+ ResultSet resultSet = conn.createStatement().executeQuery(
+ "SELECT COUNT(*) FROM LONG_BUG WHERE ID % 2 = 0");
+ conn.setIsClosing(true);
+ resultSet.next();
+ LOG.info("Count of modulus 2 for LONG_BUG :- " +
resultSet.getInt(1));
+ fail("ResultSet should have been closed");
+ } catch (SQLException sqe) {
+
assertEquals(SQLExceptionCode.FAILED_KNOWINGLY_FOR_TEST.getErrorCode(),
sqe.getErrorCode());
+ } catch (Exception e) {
+ fail();
+ } finally {
+ ScanningResultIterator.setIsScannerClosedForcefully(false);
+ }
+ }
+}
\ No newline at end of file
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
index 2e5fbc105a..6f8dfd49b4 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
@@ -244,7 +244,7 @@ abstract public class BaseScannerRegionObserver extends
CompatBaseScannerRegionO
// last possible moment. You need to swap the start/stop and make
the
// start exclusive and the stop inclusive.
ScanUtil.setupReverseScan(scan);
- if (scan.getFilter() != null && !(scan.getFilter() instanceof
PagedFilter)) {
+ if (!(scan.getFilter() instanceof PagedFilter)) {
byte[] pageSizeMsBytes =
scan.getAttribute(BaseScannerRegionObserver.SERVER_PAGE_SIZE_MS);
if (pageSizeMsBytes != null) {
scan.setFilter(new PagedFilter(scan.getFilter(),
getPageSizeMsForFilter(scan)));
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
index 0a9c961efc..1902068d72 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
@@ -560,7 +560,10 @@ public enum SQLExceptionCode {
"CASCADE INDEX feature is not supported for local index"),
INVALID_REGION_SPLIT_POLICY(908, "43M19",
- "REGION SPLIT POLICY is incorrect.");
+ "REGION SPLIT POLICY is incorrect."),
+
+ //SQLCode for testing exceptions
+ FAILED_KNOWINGLY_FOR_TEST(7777, "TEST", "Exception was thrown to test
something");
private final int errorCode;
private final String sqlState;
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index ec6e37a69e..45ec1f09cd 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -51,6 +51,7 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.UUID;
+import java.util.concurrent.CancellationException;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
@@ -1414,6 +1415,8 @@ public abstract class BaseResultIterators extends
ExplainTable implements Result
}
}
+ } catch (CancellationException ce) {
+ LOGGER.warn("Iterator scheduled to be executed in
Future was being cancelled", ce);
}
}
addIterator(iterators, concatIterators);
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ScanningResultIterator.java
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ScanningResultIterator.java
index 61924c0f99..0c15c67236 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ScanningResultIterator.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ScanningResultIterator.java
@@ -53,22 +53,33 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.phoenix.compile.ExplainPlanAttributes
.ExplainPlanAttributesBuilder;
+import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.exception.SQLExceptionCode;
+import org.apache.phoenix.exception.SQLExceptionInfo;
import org.apache.phoenix.monitoring.CombinableMetric;
import org.apache.phoenix.monitoring.GlobalClientMetrics;
import org.apache.phoenix.monitoring.ScanMetricsHolder;
import org.apache.phoenix.schema.tuple.ResultTuple;
import org.apache.phoenix.schema.tuple.Tuple;
+import
org.apache.phoenix.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.phoenix.util.ServerUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class ScanningResultIterator implements ResultIterator {
+
+ private static final Logger LOG =
LoggerFactory.getLogger(ScanningResultIterator.class);
private final ResultScanner scanner;
private final ScanMetricsHolder scanMetricsHolder;
boolean scanMetricsUpdated;
boolean scanMetricsEnabled;
+ private StatementContext context;
+ private static boolean throwExceptionIfScannerClosedForceFully = false;
- public ScanningResultIterator(ResultScanner scanner, Scan scan,
ScanMetricsHolder scanMetricsHolder) {
+ public ScanningResultIterator(ResultScanner scanner, Scan scan,
ScanMetricsHolder scanMetricsHolder, StatementContext context) {
this.scanner = scanner;
this.scanMetricsHolder = scanMetricsHolder;
+ this.context = context;
scanMetricsUpdated = false;
scanMetricsEnabled = scan.isScanMetricsEnabled();
}
@@ -157,6 +168,14 @@ public class ScanningResultIterator implements
ResultIterator {
try {
Result result = scanner.next();
while (result != null && (result.isEmpty() || isDummy(result))) {
+ if (context.getConnection().isClosing() ||
context.getConnection().isClosed()) {
+ LOG.warn("Closing ResultScanner as Connection is already
closed or in middle of closing");
+ if (throwExceptionIfScannerClosedForceFully) {
+ throw new
SQLExceptionInfo.Builder(SQLExceptionCode.FAILED_KNOWINGLY_FOR_TEST).build().buildException();
+ }
+ close();
+ return null;
+ }
result = scanner.next();
}
if (result == null) {
@@ -188,4 +207,9 @@ public class ScanningResultIterator implements
ResultIterator {
public ResultScanner getScanner() {
return scanner;
}
+
+ @VisibleForTesting
+ public static void setIsScannerClosedForcefully(boolean throwException) {
+ throwExceptionIfScannerClosedForceFully = throwException;
+ }
}
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableResultIterator.java
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableResultIterator.java
index 5c8d6c77ce..ee0bd4b7ab 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableResultIterator.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableResultIterator.java
@@ -236,7 +236,7 @@ public class TableResultIterator implements ResultIterator {
if (delegate == UNINITIALIZED_SCANNER) {
try {
this.scanIterator =
- new
ScanningResultIterator(htable.getScanner(scan), scan, scanMetricsHolder);
+ new
ScanningResultIterator(htable.getScanner(scan), scan, scanMetricsHolder,
plan.getContext());
} catch (IOException e) {
Closeables.closeQuietly(htable);
throw ServerUtil.parseServerException(e);
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
index 8e7430fb97..4d37b7f995 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
@@ -39,6 +39,7 @@ import
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import
org.apache.phoenix.compile.ExplainPlanAttributes.ExplainPlanAttributesBuilder;
+import org.apache.phoenix.compile.StatementContext;
import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
import org.apache.phoenix.monitoring.ScanMetricsHolder;
import org.apache.phoenix.schema.tuple.Tuple;
@@ -73,12 +74,14 @@ public class TableSnapshotResultIterator implements
ResultIterator {
private FileSystem fs;
private int currentRegion;
private boolean closed = false;
+ private StatementContext context;
- public TableSnapshotResultIterator(Configuration configuration, Scan scan,
ScanMetricsHolder scanMetricsHolder)
+ public TableSnapshotResultIterator(Configuration configuration, Scan scan,
ScanMetricsHolder scanMetricsHolder, StatementContext context)
throws IOException {
this.configuration = configuration;
this.currentRegion = -1;
this.scan = scan;
+ this.context = context;
this.scanMetricsHolder = scanMetricsHolder;
this.scanIterator = UNINITIALIZED_SCANNER;
if (PhoenixConfigurationUtil.isMRSnapshotManagedExternally(configuration))
{
@@ -152,7 +155,7 @@ public class TableSnapshotResultIterator implements
ResultIterator {
RegionInfo hri = regions.get(this.currentRegion);
this.scanIterator =
new ScanningResultIterator(new SnapshotScanner(configuration, fs,
restoreDir, htd, hri, scan),
- scan, scanMetricsHolder);
+ scan, scanMetricsHolder, context);
} catch (Throwable e) {
throw ServerUtil.parseServerException(e);
}
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
index 9c927a5b2d..efd925a138 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
@@ -161,6 +161,7 @@ public class PhoenixConnection implements Connection,
MetaDataMutated, SQLClosea
private int statementExecutionCounter;
private TraceScope traceScope = null;
private volatile boolean isClosed = false;
+ private volatile boolean isClosing = false;
private Sampler<?> sampler;
private boolean readOnly = false;
private Consistency consistency = Consistency.STRONG;
@@ -688,7 +689,7 @@ public class PhoenixConnection implements Connection,
MetaDataMutated, SQLClosea
}
private void checkOpen() throws SQLException {
- if (isClosed) {
+ if (isClosed || isClosing) {
throw new SQLExceptionInfo.Builder(
SQLExceptionCode.CONNECTION_CLOSED).build()
.buildException();
@@ -697,10 +698,11 @@ public class PhoenixConnection implements Connection,
MetaDataMutated, SQLClosea
@Override
public void close() throws SQLException {
- if (isClosed) {
+ if (isClosed || isClosing) {
return;
}
try {
+ isClosing = true;
clearMetrics();
try {
if (traceScope != null) {
@@ -711,8 +713,9 @@ public class PhoenixConnection implements Connection,
MetaDataMutated, SQLClosea
} finally {
services.removeConnection(this);
}
-
+
} finally {
+ isClosing = false;
isClosed = true;
if(isInternalConnection()){
GLOBAL_OPEN_INTERNAL_PHOENIX_CONNECTIONS.decrement();
@@ -917,6 +920,10 @@ public class PhoenixConnection implements Connection,
MetaDataMutated, SQLClosea
return isClosed;
}
+ public boolean isClosing() throws SQLException {
+ return isClosing;
+ }
+
@Override
public boolean isReadOnly() throws SQLException {
return readOnly;
@@ -925,7 +932,7 @@ public class PhoenixConnection implements Connection,
MetaDataMutated, SQLClosea
@Override
public boolean isValid(int timeout) throws SQLException {
// TODO: run query here or ping
- return !isClosed;
+ return !isClosed && !isClosing;
}
@Override
@@ -1326,6 +1333,14 @@ public class PhoenixConnection implements Connection,
MetaDataMutated, SQLClosea
this.tableResultIteratorFactory = factory;
}
+ /**
+ * Added for testing purposes. Do not use this elsewhere.
+ */
+ @VisibleForTesting
+ public void setIsClosing(boolean imitateIsClosing) {
+ isClosing = imitateIsClosing;
+ }
+
@Override
public void removeSchema(PSchema schema, long schemaTimeStamp) {
metaData.removeSchema(schema, schemaTimeStamp);
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordReader.java
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordReader.java
index 91d3da40b4..d18bcffac1 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordReader.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordReader.java
@@ -138,7 +138,7 @@ public class PhoenixRecordReader<T extends DBWritable>
extends RecordReader<Null
if (snapshotName != null) {
// result iterator to read snapshots
final TableSnapshotResultIterator
tableSnapshotResultIterator = new TableSnapshotResultIterator(configuration,
scan,
- scanMetricsHolder);
+ scanMetricsHolder, queryPlan.getContext());
peekingResultIterator =
LookAheadResultIterator.wrap(tableSnapshotResultIterator);
LOGGER.info("Adding TableSnapshotResultIterator for scan:
" + scan);
} else {