This is an automated email from the ASF dual-hosted git repository.
volodymyr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git
The following commit(s) were added to refs/heads/master by this push:
new dbff10aada DRILL-8279: Use thick Phoenix driver
dbff10aada is described below
commit dbff10aadaeda1cc4aea73ba2637d03d6751639a
Author: Volodymyr Vysotskyi <[email protected]>
AuthorDate: Sun Aug 14 12:18:14 2022 +0300
DRILL-8279: Use thick Phoenix driver
---
.github/workflows/ci.yml | 2 +-
contrib/storage-hive/core/pom.xml | 8 +
contrib/storage-phoenix/README.md | 51 ++---
contrib/storage-phoenix/pom.xml | 177 ++++++++++-------
.../exec/store/phoenix/PhoenixDataSource.java | 92 +++++----
.../drill/exec/store/phoenix/PhoenixReader.java | 6 +-
.../exec/store/phoenix/PhoenixStoragePlugin.java | 4 +-
.../store/phoenix/PhoenixStoragePluginConfig.java | 41 ++--
.../store/phoenix/rules/PhoenixImplementor.java | 20 +-
.../main/resources/bootstrap-storage-plugins.json | 4 +-
.../drill/exec/store/phoenix/PhoenixBaseTest.java | 14 +-
...eryServerBasicsIT.java => PhoenixBasicsIT.java} | 64 +++----
.../drill/exec/store/phoenix/PhoenixSQLTest.java | 2 +-
.../drill/exec/store/phoenix/PhoenixTestSuite.java | 10 +-
...ryServerIT.java => ImpersonationPhoenixIT.java} | 27 +--
...verEnvironment.java => PhoenixEnvironment.java} | 209 ++++++---------------
.../phoenix/secured/SecuredPhoenixBaseTest.java | 51 +++--
.../phoenix/secured/SecuredPhoenixCommandTest.java | 14 +-
.../secured/SecuredPhoenixDataTypeTest.java | 7 +-
.../phoenix/secured/SecuredPhoenixSQLTest.java | 17 +-
.../phoenix/secured/SecuredPhoenixTestSuite.java | 31 ++-
.../src/test/resources/hbase-site.xml | 4 +
22 files changed, 383 insertions(+), 472 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 0c7b24aeb5..185a113f85 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -29,7 +29,7 @@ jobs:
build:
name: Main Build
runs-on: ubuntu-latest
- timeout-minutes: 100
+ timeout-minutes: 120
strategy:
matrix:
# Java versions to run unit tests
diff --git a/contrib/storage-hive/core/pom.xml
b/contrib/storage-hive/core/pom.xml
index 2bcaf13cea..795cb49486 100644
--- a/contrib/storage-hive/core/pom.xml
+++ b/contrib/storage-hive/core/pom.xml
@@ -76,6 +76,14 @@
<groupId>io.airlift</groupId>
<artifactId>aircompressor</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>co.cask.tephra</groupId>
+ <artifactId>tephra-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>co.cask.tephra</groupId>
+ <artifactId>tephra-core</artifactId>
+ </exclusion>
</exclusions>
</dependency>
<dependency>
diff --git a/contrib/storage-phoenix/README.md
b/contrib/storage-phoenix/README.md
index fa8467fe04..139a60c58b 100644
--- a/contrib/storage-phoenix/README.md
+++ b/contrib/storage-phoenix/README.md
@@ -22,7 +22,6 @@ Related Information :
2. PHOENIX-6582: Bump default HBase version to 2.3.7 and 2.4.8
3. PHOENIX-6605, PHOENIX-6606 and PHOENIX-6607.
4. DRILL-8060, DRILL-8061 and DRILL-8062.
- 5. [QueryServer
6.0.0-drill-r1](https://github.com/luocooong/phoenix-queryserver/releases/tag/6.0.0-drill-r1)
## Configuration
@@ -33,8 +32,9 @@ Option 1 (Use the host and port):
```json
{
"type": "phoenix",
- "host": "the.queryserver.hostname",
- "port": 8765,
+ "zkQuorum": "zk.quorum.hostnames",
+ "port": 2181,
+ "zkPath": "/hbase",
"enabled": true
}
```
@@ -44,7 +44,7 @@ Option 2 (Use the jdbcURL) :
```json
{
"type": "phoenix",
- "jdbcURL":
"jdbc:phoenix:thin:url=http://the.queryserver.hostname:8765;serialization=PROTOBUF",
+ "jdbcURL":
"jdbc:phoenix:zk.quorum.hostname1,zk.quorum.hostname2,zk.quorum.hostname3:2181:/hbase",
"enabled": true
}
```
@@ -54,10 +54,11 @@ Use the connection properties :
```json
{
"type": "phoenix",
- "host": "the.queryserver.hostname",
- "port": 8765,
+ "zkQuorum": "zk.quorum.hostnames",
+ "port": 2181,
"props": {
- "phoenix.query.timeoutMs": 60000
+ "hbase.client.retries.number": 10,
+ "hbase.client.pause": 10000
},
"enabled": true
}
@@ -65,13 +66,12 @@ Use the connection properties :
Tips :
* More connection properties, see also [PQS
Configuration](http://phoenix.apache.org/server.html).
- * If you provide the `jdbcURL`, the connection will ignore the value of
`host` and `port`.
- * If you [extended the authentication of
QueryServer](https://github.com/Boostport/avatica/issues/28), you can also pass
the `userName` and `password`.
+ * If you provide the `jdbcURL`, the connection will ignore the value of
`zkQuorum` and `port`.
```json
{
"type": "phoenix",
- "host": "the.queryserver.hostname",
+ "zkQuorum": "zk.quorum.hostnames",
"port": 8765,
"userName": "my_user",
"password": "my_pass",
@@ -83,11 +83,11 @@ Tips :
Configurations :
1. Enable [Drill User
Impersonation](https://drill.apache.org/docs/configuring-user-impersonation/)
2. Enable [PQS
Impersonation](https://phoenix.apache.org/server.html#Impersonation)
-3. PQS URL:
- 1. Provide `host` and `port` and Drill will generate the PQS URL with a doAs
parameter of current session user
- 2. Provide the `jdbcURL` with a `doAs` url param and `$user` placeholder as
a value, for instance:
- `jdbc:phoenix:thin:url=http://localhost:8765?doAs=$user`. In case Drill
Impersonation is enabled, but `doAs=$user`
- is missing the User Exception is thrown.
+3. Phoenix URL:
+ 1. Provide `zkQuorum` and `port` and Drill will create a connection to
Phoenix with a doAs of current
+ session user
+ 2. Provide the `jdbcURL` with a `principal`, for instance:
+
`jdbc:phoenix:<ZK-QUORUM>:<ZK-PORT>:<ZK-HBASE-NODE>:principal_name@REALM:/path/to/keytab`.
## Testing
@@ -99,27 +99,6 @@ Current HBase2 releases still compile against Hadoop2 by
default, and using Hado
requires a recompilation of HBase because of incompatible changes between
Hadoop2 and Hadoop3. "
```
-### Recommended Practices
-
- 1. Download HBase 2.4.2 sources and rebuild with Hadoop 3.
-
- ```mvn clean install -DskipTests -Dhadoop.profile=3.0
-Dhadoop-three.version=3.2.3```
-
- 2. Remove the `Ignore` annotation in `PhoenixTestSuite.java`.
-
- ```
- @Ignore
- @Category({ SlowTest.class })
- public class PhoenixTestSuite extends ClusterTest {
- ```
-
- 3. Go to the phoenix root folder and run test.
-
- ```
- cd contrib/storage-phoenix/
- mvn test
- ```
-
### To Add Features
- Don't forget to add a test function to the test class.
diff --git a/contrib/storage-phoenix/pom.xml b/contrib/storage-phoenix/pom.xml
index d47b60aaae..4fe3829569 100644
--- a/contrib/storage-phoenix/pom.xml
+++ b/contrib/storage-phoenix/pom.xml
@@ -29,12 +29,12 @@
<name>Drill : Contrib : Storage : Phoenix</name>
<properties>
- <phoenix.queryserver.version>6.0.0</phoenix.queryserver.version>
<phoenix.version>5.1.2</phoenix.version>
<!-- Keep the 2.4.2 to reduce dependency conflict -->
<hbase.minicluster.version>2.4.2</hbase.minicluster.version>
+ <phoenix.skip.tests>false</phoenix.skip.tests>
</properties>
-
+
<dependencies>
<dependency>
<groupId>org.apache.drill.exec</groupId>
@@ -55,19 +55,12 @@
<version>${project.version}</version>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>org.apache.phoenix</groupId>
- <artifactId>phoenix-queryserver-client</artifactId>
- <version>${phoenix.queryserver.version}</version>
- </dependency>
-
- <!-- Test Dependency versions -->
<dependency>
- <!-- PHOENIX-6605 -->
- <groupId>com.github.luocooong.phoenix-queryserver</groupId>
- <artifactId>phoenix-queryserver</artifactId>
- <version>6.0.0-drill-r1</version>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix-core</artifactId>
+ <version>${phoenix.version}</version>
<scope>test</scope>
+ <classifier>tests</classifier>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
@@ -78,8 +71,8 @@
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
- <groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
+ <groupId>commons-logging</groupId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
@@ -89,32 +82,53 @@
<groupId>org.ow2.asm</groupId>
<artifactId>asm</artifactId>
</exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <!-- PHOENIX-6605 -->
- <groupId>com.github.luocooong.phoenix-queryserver</groupId>
- <artifactId>phoenix-queryserver-it</artifactId>
- <version>6.0.0-drill-r1</version>
- <scope>test</scope>
- <classifier>tests</classifier>
- <exclusions>
<exclusion>
- <groupId>org.codehaus.jackson</groupId>
- <artifactId>jackson-jaxrs</artifactId>
+ <groupId>org.ow2.asm</groupId>
+ <artifactId>asm-all</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>commons-configuration</groupId>
+ <artifactId>commons-configuration</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-csv</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-endpoint</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.salesforce.i18n</groupId>
+ <artifactId>i18n-util</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
</exclusion>
<exclusion>
- <groupId>org.codehaus.jackson</groupId>
- <artifactId>jackson-xc</artifactId>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-graphite</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.jboss.netty</groupId>
+ <artifactId>netty</artifactId>
</exclusion>
</exclusions>
</dependency>
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix-hbase-compat-2.4.1</artifactId>
+ <version>${phoenix.version}</version>
+ </dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-core</artifactId>
<version>${phoenix.version}</version>
- <scope>test</scope>
- <classifier>tests</classifier>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
@@ -136,21 +150,49 @@
<groupId>org.ow2.asm</groupId>
<artifactId>asm</artifactId>
</exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.phoenix</groupId>
- <artifactId>phoenix-core</artifactId>
- <version>${phoenix.version}</version>
- <scope>test</scope>
- <exclusions>
<exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>*</artifactId>
+ <groupId>org.ow2.asm</groupId>
+ <artifactId>asm-all</artifactId>
</exclusion>
<exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
+ <groupId>commons-configuration</groupId>
+ <artifactId>commons-configuration</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-testing-util</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.htrace</groupId>
+ <artifactId>htrace-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-csv</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-endpoint</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.salesforce.i18n</groupId>
+ <artifactId>i18n-util</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-graphite</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.jboss.netty</groupId>
+ <artifactId>netty</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -170,21 +212,20 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-it</artifactId>
<version>${hbase.minicluster.version}</version>
- <type>test-jar</type>
+ <classifier>tests</classifier>
<scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-endpoint</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>${hbase.minicluster.version}</version>
- <type>test-jar</type>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hbase</groupId>
- <artifactId>hbase-server</artifactId>
- <version>${hbase.minicluster.version}</version>
- <type>test-jar</type>
+ <classifier>tests</classifier>
<scope>test</scope>
</dependency>
<dependency>
@@ -214,7 +255,7 @@
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-asyncfs</artifactId>
- <type>test-jar</type>
+ <classifier>tests</classifier>
<version>${hbase.minicluster.version}</version>
<scope>test</scope>
<exclusions>
@@ -288,29 +329,12 @@
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP-java7</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-csv</artifactId>
+ </exclusion>
</exclusions>
</dependency>
- <dependency>
- <groupId>com.univocity</groupId>
- <artifactId>univocity-parsers</artifactId>
- <version>${univocity-parsers.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-server</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-http</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-servlet</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<build>
<plugins>
@@ -339,8 +363,12 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
+ <skipTests>${phoenix.skip.tests}</skipTests>
+ <forkCount combine.self="override">1</forkCount>
+ <reuseForks>false</reuseForks>
<includes>
<include>**/PhoenixTestSuite.class</include>
+ <include>**/SecuredPhoenixTestSuite.class</include>
</includes>
<excludes>
<exclude>**/*Test.java</exclude>
@@ -353,6 +381,9 @@
<profiles>
<profile>
<id>hadoop-2</id>
+ <properties>
+ <phoenix.skip.tests>true</phoenix.skip.tests>
+ </properties>
<build>
<plugins>
<plugin>
diff --git
a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixDataSource.java
b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixDataSource.java
index b89a53099c..806317fcce 100644
---
a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixDataSource.java
+++
b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixDataSource.java
@@ -18,40 +18,42 @@
package org.apache.drill.exec.store.phoenix;
import java.io.PrintWriter;
+import java.security.PrivilegedExceptionAction;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Map;
+import java.util.Optional;
import java.util.Properties;
+import java.util.function.Supplier;
import java.util.logging.Logger;
import javax.sql.DataSource;
-import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.common.util.function.CheckedSupplier;
+import org.apache.drill.exec.util.ImpersonationUtil;
import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
+import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.LoggerFactory;
/**
* Phoenix’s Connection objects are different from most other JDBC Connections
* due to the underlying HBase connection. The Phoenix Connection object
* is designed to be a thin object that is inexpensive to create.
- *
+ * <p>
* If Phoenix Connections are reused, it is possible that the underlying HBase
connection
* is not always left in a healthy state by the previous user. It is better to
* create new Phoenix Connections to ensure that you avoid any potential
issues.
*/
public class PhoenixDataSource implements DataSource {
private static final org.slf4j.Logger logger =
LoggerFactory.getLogger(PhoenixDataSource.class);
-
- private static final String DEFAULT_URL_HEADER =
"jdbc:phoenix:thin:url=http://";
- private static final String DEFAULT_SERIALIZATION = "serialization=PROTOBUF";
- private static final String IMPERSONATED_USER_VARIABLE = "$user";
- private static final String DEFAULT_QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM =
"doAs";
+ private static final String DEFAULT_URL_HEADER = "jdbc:phoenix:";
private final String url;
private final String user;
- private Map<String, Object> connectionProperties;
- private boolean isFatClient;
+ private final Map<String, Object> connectionProperties;
+ private final boolean impersonationEnabled;
public PhoenixDataSource(String url,
String userName,
@@ -61,29 +63,30 @@ public class PhoenixDataSource implements DataSource {
Preconditions.checkNotNull(connectionProperties);
connectionProperties.forEach((k, v)
-> Preconditions.checkArgument(v != null, String.format("does not
accept null values : %s", k)));
- this.url = impersonationEnabled ? doAsUserUrl(url, userName) : url;
+ this.impersonationEnabled = impersonationEnabled;
+ this.url = url;
this.user = userName;
this.connectionProperties = connectionProperties;
}
- public PhoenixDataSource(String host,
+ public PhoenixDataSource(String zkQuorum,
int port,
+ String zkPath,
String userName,
Map<String, Object> connectionProperties,
boolean impersonationEnabled) {
- Preconditions.checkNotNull(host, userName);
- Preconditions.checkArgument(port > 0, "Please set the correct port.");
+ Preconditions.checkNotNull(zkQuorum, userName);
connectionProperties.forEach((k, v)
-> Preconditions.checkArgument(v != null, String.format("does not accept
null values : %s", k)));
- this.url = new StringBuilder()
+ StringBuilder stringBuilder = new StringBuilder()
.append(DEFAULT_URL_HEADER)
- .append(host)
+ .append(zkQuorum)
.append(":")
- .append(port)
- .append(impersonationEnabled ? "?doAs=" + userName : "")
- .append(";")
- .append(DEFAULT_SERIALIZATION)
- .toString();
+ .append(port);
+ Optional.ofNullable(zkPath)
+ .ifPresent(path -> stringBuilder.append(":").append(path));
+ this.url = stringBuilder.toString();
+ this.impersonationEnabled = impersonationEnabled;
this.user = userName;
this.connectionProperties = connectionProperties;
}
@@ -92,10 +95,6 @@ public class PhoenixDataSource implements DataSource {
return connectionProperties;
}
- public void setConnectionProperties(Map<String, Object>
connectionProperties) {
- this.connectionProperties = connectionProperties;
- }
-
@Override
public PrintWriter getLogWriter() {
throw new UnsupportedOperationException("getLogWriter");
@@ -138,30 +137,37 @@ public class PhoenixDataSource implements DataSource {
@Override
public Connection getConnection() throws SQLException {
- useDriverClass();
+ loadDriverClass();
return getConnection(this.user, null);
}
@Override
public Connection getConnection(String userName, String password) throws
SQLException {
- useDriverClass();
+ loadDriverClass();
logger.debug("Drill/Phoenix connection url: {}", url);
- return DriverManager.getConnection(url, useConfProperties());
+ CheckedSupplier<Connection, SQLException> action =
+ () -> DriverManager.getConnection(url, useConfProperties());
+ if (impersonationEnabled) {
+ return doAsRemoteUser(userName, action);
+ }
+ return action.getAndThrow();
+ }
+
+ private <T> T doAsRemoteUser(String remoteUserName, final Supplier<T>
action) {
+ try {
+ UserGroupInformation proxyUser =
ImpersonationUtil.createProxyUgi(remoteUserName);
+ return proxyUser.doAs((PrivilegedExceptionAction<T>) action::get);
+ } catch (Exception e) {
+ throw new DrillRuntimeException(e);
+ }
}
/**
- * The thin-client is lightweight and better compatibility.
- * Only thin-client is currently supported.
- *
- * @throws SQLException
+ * Only thick-client is currently supported due to a shaded Avatica conflict
created by the thin client.
*/
- public Class<?> useDriverClass() throws SQLException {
+ private void loadDriverClass() throws SQLException {
try {
- if (isFatClient) {
- return Class.forName(PhoenixStoragePluginConfig.FAT_DRIVER_CLASS);
- } else {
- return Class.forName(PhoenixStoragePluginConfig.THIN_DRIVER_CLASS);
- }
+ Class.forName(PhoenixStoragePluginConfig.FAT_DRIVER_CLASS);
} catch (ClassNotFoundException e) {
throw new SQLException("Cause by : " + e.getMessage());
}
@@ -180,17 +186,7 @@ public class PhoenixDataSource implements DataSource {
props.putIfAbsent("phoenix.trace.frequency", "never");
props.putIfAbsent("phoenix.query.timeoutMs", 30000);
props.putIfAbsent("phoenix.query.keepAliveMs", 120000);
+ props.putIfAbsent("phoenix.schema.isNamespaceMappingEnabled", "true");
return props;
}
-
- private String doAsUserUrl(String url, String userName) {
- if (url.contains(DEFAULT_QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM)) {
- return url.replace(IMPERSONATED_USER_VARIABLE, userName);
- } else {
- throw UserException
- .connectionError()
- .message("Invalid PQS URL. Please add the value of the `doAs=$user`
parameter if Impersonation is enabled.")
- .build(logger);
- }
- }
}
diff --git
a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixReader.java
b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixReader.java
index 0ecc4ae8a2..1d702761d7 100644
---
a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixReader.java
+++
b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixReader.java
@@ -236,7 +236,11 @@ public class PhoenixReader implements AutoCloseable {
public void load(ResultSet row) throws SQLException {
Array array = row.getArray(index);
if (array != null && array.getArray() != null) {
- Object[] values = (Object[]) array.getArray();
+ // Phoenix can create an array of primitives, so need to convert them
+ Object[] values = new
Object[java.lang.reflect.Array.getLength(array.getArray())];
+ for (int i = 0; i < values.length; i++) {
+ values[i] = java.lang.reflect.Array.get(array.getArray(), i);
+ }
((ScalarArrayWriter) writer).setObjectArray(values);
}
}
diff --git
a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePlugin.java
b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePlugin.java
index b46fef264d..542099565a 100644
---
a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePlugin.java
+++
b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePlugin.java
@@ -121,7 +121,7 @@ public class PhoenixStoragePlugin extends
AbstractStoragePlugin {
try {
return CACHE.get(userName);
} catch (final ExecutionException e) {
- throw new SQLException("Failure setting up Phoenix DataSource (PQS
client)", e);
+ throw new SQLException("Failure setting up Phoenix DataSource (Phoenix
client)", e);
}
}
@@ -135,6 +135,6 @@ public class PhoenixStoragePlugin extends
AbstractStoragePlugin {
boolean impersonationEnabled =
context.getConfig().getBoolean(ExecConstants.IMPERSONATION_ENABLED);
return StringUtils.isNotBlank(config.getJdbcURL())
? new PhoenixDataSource(config.getJdbcURL(), userName, props,
impersonationEnabled) // the props is initiated.
- : new PhoenixDataSource(config.getHost(), config.getPort(), userName,
props, impersonationEnabled);
+ : new PhoenixDataSource(config.getZkQuorum(), config.getPort(),
config.getZkPath(), userName, props, impersonationEnabled);
}
}
diff --git
a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePluginConfig.java
b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePluginConfig.java
index 8a9e6a8950..2bac97b26e 100644
---
a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePluginConfig.java
+++
b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePluginConfig.java
@@ -38,26 +38,28 @@ import com.fasterxml.jackson.annotation.JsonTypeName;
public class PhoenixStoragePluginConfig extends StoragePluginConfig {
public static final String NAME = "phoenix";
- public static final String THIN_DRIVER_CLASS =
"org.apache.phoenix.queryserver.client.Driver";
public static final String FAT_DRIVER_CLASS =
"org.apache.phoenix.jdbc.PhoenixDriver";
- private final String host;
- private final int port;
- private final String jdbcURL; // (options) Equal to host + port
+ private final String zkQuorum;
+ private final String zkPath;
+ private final Integer port;
+ private final String jdbcURL; // (options) Equal to host + port + zkPath
private final Map<String, Object> props; // (options) See also
http://phoenix.apache.org/tuning.html
@JsonCreator
public PhoenixStoragePluginConfig(
- @JsonProperty("host") String host,
- @JsonProperty("port") int port,
+ @JsonProperty("zkQuorum") String zkQuorum,
+ @JsonProperty("port") Integer port,
+ @JsonProperty("zkPath") String zkPath,
@JsonProperty("userName") String userName,
@JsonProperty("password") String password,
@JsonProperty("jdbcURL") String jdbcURL,
@JsonProperty("credentialsProvider") CredentialsProvider
credentialsProvider,
@JsonProperty("props") Map<String, Object> props) {
super(CredentialProviderUtils.getCredentialsProvider(userName, password,
credentialsProvider), credentialsProvider == null);
- this.host = host;
- this.port = port == 0 ? 8765 : port;
+ this.zkQuorum = zkQuorum;
+ this.zkPath = zkPath;
+ this.port = port;
this.jdbcURL = jdbcURL;
this.props = props == null ? Collections.emptyMap() : props;
}
@@ -69,13 +71,18 @@ public class PhoenixStoragePluginConfig extends
StoragePluginConfig {
.build();
}
- @JsonProperty("host")
- public String getHost() {
- return host;
+ @JsonProperty("zkQuorum")
+ public String getZkQuorum() {
+ return zkQuorum;
+ }
+
+ @JsonProperty("zkPath")
+ public String getZkPath() {
+ return zkPath;
}
@JsonProperty("port")
- public int getPort() {
+ public Integer getPort() {
return port;
}
@@ -124,7 +131,9 @@ public class PhoenixStoragePluginConfig extends
StoragePluginConfig {
return Objects.equals(this.jdbcURL, config.getJdbcURL());
}
// Then the host and port
- return Objects.equals(this.host, config.getHost()) &&
Objects.equals(this.port, config.getPort());
+ return Objects.equals(this.zkQuorum, config.getZkQuorum())
+ && Objects.equals(this.port, config.getPort())
+ && Objects.equals(this.zkPath, config.getZkPath());
}
@Override
@@ -132,15 +141,15 @@ public class PhoenixStoragePluginConfig extends
StoragePluginConfig {
if (StringUtils.isNotBlank(jdbcURL)) {
return Objects.hash(jdbcURL);
}
- return Objects.hash(host, port);
+ return Objects.hash(zkQuorum, port, zkPath);
}
@Override
public String toString() {
return new PlanStringBuilder(PhoenixStoragePluginConfig.NAME)
- .field("driverName", THIN_DRIVER_CLASS)
- .field("host", host)
+ .field("zkQuorum", zkQuorum)
.field("port", port)
+ .field("zkPath", zkPath)
.field("userName", getUsername())
.maskedField("password", getPassword()) // will set to "*******"
.field("jdbcURL", jdbcURL)
diff --git
a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixImplementor.java
b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixImplementor.java
index 29a45d8380..096506ba09 100644
---
a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixImplementor.java
+++
b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixImplementor.java
@@ -47,14 +47,12 @@ public class PhoenixImplementor extends JdbcImplementor {
if (node instanceof SqlIdentifier) {
SqlIdentifier identifier = (SqlIdentifier) node;
String name = identifier.names.get(identifier.names.size() -1);
- if (!aliasSet.contains(name)) {
- /*
- * phoenix does not support the 'SELECT `table_name`.`field_name`',
- * need to force the alias name and start from `table_name0`,
- * the result is that 'SELECT `table_name0`.`field_name`'.
- */
- aliasSet.add(name);
- }
+ /*
+ * phoenix does not support the 'SELECT `table_name`.`field_name`',
+ * need to force the alias name and start from `table_name0`,
+ * the result is that 'SELECT `table_name0`.`field_name`'.
+ */
+ aliasSet.add(name);
}
return super.result(node, clauses, rel, aliases);
}
@@ -67,12 +65,12 @@ public class PhoenixImplementor extends JdbcImplementor {
@Override
public Result visit(Filter e) {
final RelNode input = e.getInput();
- Result x = visitRoot(input);
- parseCorrelTable(e, x);
if (input instanceof Aggregate) {
return super.visit(e);
} else {
- final Builder builder = x.builder(e, Clause.WHERE);
+ final Result x = visitInput(e, 0, Clause.WHERE);
+ parseCorrelTable(e, x);
+ final Builder builder = x.builder(e);
builder.setWhere(builder.context.toSql(null, e.getCondition()));
final List<SqlNode> selectList = new ArrayList<>();
e.getRowType().getFieldNames().forEach(fieldName -> {
diff --git
a/contrib/storage-phoenix/src/main/resources/bootstrap-storage-plugins.json
b/contrib/storage-phoenix/src/main/resources/bootstrap-storage-plugins.json
index a1890f0292..82254e52b9 100644
--- a/contrib/storage-phoenix/src/main/resources/bootstrap-storage-plugins.json
+++ b/contrib/storage-phoenix/src/main/resources/bootstrap-storage-plugins.json
@@ -2,10 +2,10 @@
"storage": {
"phoenix": {
"type": "phoenix",
- "host": "the.queryserver.hostname",
+ "zkQuorum": "zk.quorum.hostnames",
"port": 8765,
"props" : {
- "phoenix.query.timeoutMs": 60000
+ "hbase.client.retries.number": 10
},
"enabled": false
}
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBaseTest.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBaseTest.java
index 9aea2614c1..ecb08c7979 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBaseTest.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBaseTest.java
@@ -59,15 +59,15 @@ public class PhoenixBaseTest extends ClusterTest {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
- PhoenixTestSuite.initPhoenixQueryServer();
+ PhoenixTestSuite.initPhoenix();
if (PhoenixTestSuite.isRunningSuite()) {
- QueryServerBasicsIT.testCatalogs();
+ PhoenixBasicsIT.testCatalogs();
}
startDrillCluster();
if (initCount.incrementAndGet() == 1) {
- createSchema(QueryServerBasicsIT.CONN_STRING);
- createTables(QueryServerBasicsIT.CONN_STRING);
- createSampleData(QueryServerBasicsIT.CONN_STRING);
+ createSchema(PhoenixBasicsIT.CONN_STRING);
+ createTables(PhoenixBasicsIT.CONN_STRING);
+ createSampleData(PhoenixBasicsIT.CONN_STRING);
}
}
@@ -85,8 +85,8 @@ public class PhoenixBaseTest extends ClusterTest {
props.put("phoenix.query.timeoutMs", 90000);
props.put("phoenix.query.keepAliveMs", "30000");
StoragePluginRegistry registry =
cluster.drillbit().getContext().getStorage();
- PhoenixStoragePluginConfig config = new PhoenixStoragePluginConfig(null,
0, null, null,
- QueryServerBasicsIT.CONN_STRING, null, props);
+ PhoenixStoragePluginConfig config = new PhoenixStoragePluginConfig(null,
0, null, null, null,
+ PhoenixBasicsIT.CONN_STRING, null, props);
config.setEnabled(true);
registry.put(PhoenixStoragePluginConfig.NAME + "123", config);
dirTestWatcher.copyResourceToRoot(Paths.get(""));
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/QueryServerBasicsIT.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBasicsIT.java
similarity index 54%
rename from
contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/QueryServerBasicsIT.java
rename to
contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBasicsIT.java
index 4a48bfc61f..bb61e01632 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/QueryServerBasicsIT.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBasicsIT.java
@@ -17,52 +17,54 @@
*/
package org.apache.drill.exec.store.phoenix;
+import static org.apache.hadoop.hbase.HConstants.HBASE_DIR;
import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_CAT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
+import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
-import java.util.concurrent.TimeUnit;
+import java.util.Optional;
import org.apache.hadoop.conf.Configuration;
-import org.apache.phoenix.end2end.QueryServerThread;
-import org.apache.phoenix.query.BaseTest;
-import org.apache.phoenix.queryserver.QueryServerProperties;
-import org.apache.phoenix.util.ReadOnlyProps;
-import org.apache.phoenix.util.ThinClientUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.LocalHBaseCluster;
+import org.apache.phoenix.util.PhoenixRuntime;
import org.slf4j.LoggerFactory;
/**
- * This is a copy of {@link org.apache.phoenix.end2end.QueryServerBasicsIT}
until
+ * This is a copy of {@code org.apache.phoenix.end2end.QueryServerBasicsIT}
until
* <a
href="https://issues.apache.org/jira/browse/PHOENIX-6613">PHOENIX-6613</a> is
fixed
*/
-public class QueryServerBasicsIT extends BaseTest {
+public class PhoenixBasicsIT {
+ private static final HBaseTestingUtility util = new HBaseTestingUtility();
- private static final org.slf4j.Logger logger =
LoggerFactory.getLogger(QueryServerBasicsIT.class);
+ private static final org.slf4j.Logger logger =
LoggerFactory.getLogger(PhoenixBasicsIT.class);
- private static QueryServerThread AVATICA_SERVER;
- private static Configuration CONF;
protected static String CONN_STRING;
+ static LocalHBaseCluster hbaseCluster;
public static synchronized void doSetup() throws Exception {
- setUpTestDriver(ReadOnlyProps.EMPTY_PROPS);
+ Configuration conf = util.getConfiguration();
+ // Start ZK by hand
+ util.startMiniZKCluster();
+ Path rootdir =
util.getDataTestDirOnTestFS(PhoenixBasicsIT.class.getSimpleName());
+ // There is no setRootdir method that is available in all supported HBase
versions.
+ conf.set(HBASE_DIR, rootdir.toString());
+ hbaseCluster = new LocalHBaseCluster(conf, 1);
+ hbaseCluster.startup();
- CONF = config;
- if(System.getProperty("do.not.randomize.pqs.port") == null) {
- CONF.setInt(QueryServerProperties.QUERY_SERVER_HTTP_PORT_ATTRIB, 0);
- }
- String url = getUrl();
- AVATICA_SERVER = new QueryServerThread(new String[] { url }, CONF,
QueryServerBasicsIT.class.getName());
- AVATICA_SERVER.start();
- AVATICA_SERVER.getQueryServer().awaitRunning();
- final int port = AVATICA_SERVER.getQueryServer().getPort();
- logger.info("Avatica server started on port " + port);
- CONN_STRING = ThinClientUtil.getConnectionUrl("localhost", port);
- logger.info("JDBC connection string is " + CONN_STRING);
+ CONN_STRING = PhoenixRuntime.JDBC_PROTOCOL + ":localhost:" +
getZookeeperPort();
+ logger.info("JDBC connection string is " + CONN_STRING);
+ }
+
+ public static int getZookeeperPort() {
+ return util.getConfiguration().getInt(HConstants.ZOOKEEPER_CLIENT_PORT,
2181);
}
public static void testCatalogs() throws Exception {
@@ -77,14 +79,8 @@ public class QueryServerBasicsIT extends BaseTest {
}
}
- public static synchronized void afterClass() throws Exception {
- if (AVATICA_SERVER != null) {
- AVATICA_SERVER.join(TimeUnit.SECONDS.toSeconds(3));
- Throwable t = AVATICA_SERVER.getQueryServer().getThrowable();
- if (t != null) {
- fail("query server threw. " + t.getMessage());
- }
- assertEquals("query server didn't exit cleanly", 0,
AVATICA_SERVER.getQueryServer().getRetCode());
- }
+ public static synchronized void afterClass() throws IOException {
+ Optional.of(hbaseCluster).ifPresent(LocalHBaseCluster::shutdown);
+ util.shutdownMiniCluster();
}
}
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java
index 8e091b9b87..39cddcc92f 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java
@@ -220,7 +220,7 @@ public class PhoenixSQLTest extends PhoenixBaseTest {
sets.clear();
}
- @Ignore("use the remote query server directly without minicluster")
+ @Ignore("use the remote phoenix directly without minicluster")
@Test
public void testJoinWithFilterPushdown() throws Exception {
String sql = "select 10 as DRILL, a.n_name, b.r_name from
phoenix123.v1.nation a join phoenix123.v1.region b "
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixTestSuite.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixTestSuite.java
index a0f19ff07c..0e430904c2 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixTestSuite.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixTestSuite.java
@@ -24,7 +24,6 @@ import org.apache.drill.categories.SlowTest;
import org.apache.drill.test.BaseTest;
import org.junit.AfterClass;
import org.junit.BeforeClass;
-import org.junit.Ignore;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
@@ -38,22 +37,21 @@ import org.slf4j.LoggerFactory;
PhoenixSQLTest.class,
PhoenixCommandTest.class
})
-@Ignore
@Category({ SlowTest.class })
public class PhoenixTestSuite extends BaseTest {
private static final org.slf4j.Logger logger =
LoggerFactory.getLogger(PhoenixTestSuite.class);
private static volatile boolean runningSuite = false;
- private static AtomicInteger initCount = new AtomicInteger(0);
+ private static final AtomicInteger initCount = new AtomicInteger(0);
@BeforeClass
- public static void initPhoenixQueryServer() throws Exception {
+ public static void initPhoenix() throws Exception {
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
synchronized (PhoenixTestSuite.class) {
if (initCount.get() == 0) {
logger.info("Boot the test cluster...");
- QueryServerBasicsIT.doSetup();
+ PhoenixBasicsIT.doSetup();
}
initCount.incrementAndGet();
runningSuite = true;
@@ -65,7 +63,7 @@ public class PhoenixTestSuite extends BaseTest {
synchronized (PhoenixTestSuite.class) {
if (initCount.decrementAndGet() == 0) {
logger.info("Shutdown all instances of test cluster.");
- QueryServerBasicsIT.afterClass();
+ PhoenixBasicsIT.afterClass();
}
}
}
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/HttpParamImpersonationQueryServerIT.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/ImpersonationPhoenixIT.java
similarity index 76%
rename from
contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/HttpParamImpersonationQueryServerIT.java
rename to
contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/ImpersonationPhoenixIT.java
index f0f0e1118a..9a4fb259ea 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/HttpParamImpersonationQueryServerIT.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/ImpersonationPhoenixIT.java
@@ -25,26 +25,23 @@ import
org.apache.hadoop.hbase.security.access.AccessController;
import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.security.token.TokenProvider;
import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
-import org.apache.phoenix.end2end.TlsUtil;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
-import org.apache.phoenix.queryserver.QueryServerOptions;
-import org.apache.phoenix.queryserver.QueryServerProperties;
-import org.apache.phoenix.queryserver.client.Driver;
+import org.apache.phoenix.util.PhoenixRuntime;
import org.junit.experimental.categories.Category;
import java.util.Arrays;
import java.util.List;
-import static
org.apache.drill.exec.store.phoenix.secured.QueryServerEnvironment.LOGIN_USER;
+import static
org.apache.drill.exec.store.phoenix.secured.PhoenixEnvironment.LOGIN_USER;
/**
- * This is a copy of {@link
org.apache.phoenix.end2end.HttpParamImpersonationQueryServerIT},
+ * This is a copy of {@code
org.apache.phoenix.end2end.HttpParamImpersonationQueryServerIT},
* but customized with 3 users, see {@link
SecuredPhoenixBaseTest#runForThreeClients} for details
*/
@Category(NeedsOwnMiniClusterTest.class)
-public class HttpParamImpersonationQueryServerIT {
+public class ImpersonationPhoenixIT {
- public static QueryServerEnvironment environment;
+ public static PhoenixEnvironment environment;
private static final List<TableName> SYSTEM_TABLE_NAMES = Arrays.asList(
PhoenixDatabaseMetaData.SYSTEM_CATALOG_HBASE_TABLE_NAME,
@@ -54,7 +51,7 @@ public class HttpParamImpersonationQueryServerIT {
PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_HBASE_TABLE_NAME,
PhoenixDatabaseMetaData.SYSTEM_STATS_HBASE_TABLE_NAME);
- public static synchronized void startQueryServerEnvironment() throws
Exception {
+ public static synchronized void startPhoenixEnvironment() throws Exception {
// Clean up previous environment if any (Junit 4.13 @BeforeParam /
@AfterParam would be an alternative)
if(environment != null) {
stopEnvironment();
@@ -69,8 +66,7 @@ public class HttpParamImpersonationQueryServerIT {
// so that the user who is running the Drillbits/MiniDfs can impersonate
user1 and user2 (not user3)
conf.set(String.format("hadoop.proxyuser.%s.hosts", LOGIN_USER), "*");
conf.set(String.format("hadoop.proxyuser.%s.users", LOGIN_USER),
"user1,user2");
-
conf.setBoolean(QueryServerProperties.QUERY_SERVER_WITH_REMOTEUSEREXTRACTOR_ATTRIB,
true);
- environment = new QueryServerEnvironment(conf, 3, false);
+ environment = new PhoenixEnvironment(conf, 3, false);
}
public static synchronized void stopEnvironment() throws Exception {
@@ -79,14 +75,7 @@ public class HttpParamImpersonationQueryServerIT {
}
static public String getUrlTemplate() {
- String url = Driver.CONNECT_STRING_PREFIX + "url=%s://localhost:" +
environment.getPqsPort() + "?"
- + QueryServerOptions.DEFAULT_QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM +
"=%s;authentication=SPNEGO;serialization=PROTOBUF%s";
- if (environment.getTls()) {
- return String.format(url, "https", "%s", ";truststore=" +
TlsUtil.getTrustStoreFile().getAbsolutePath()
- + ";truststore_password=" + TlsUtil.getTrustStorePassword());
- } else {
- return String.format(url, "http", "%s", "");
- }
+ return PhoenixRuntime.JDBC_PROTOCOL + ":localhost:%s";
}
static void grantUsersToPhoenixSystemTables(List<String> usersToGrant)
throws Exception {
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/QueryServerEnvironment.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/PhoenixEnvironment.java
similarity index 57%
rename from
contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/QueryServerEnvironment.java
rename to
contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/PhoenixEnvironment.java
index 6cbe8df6f5..88b3d60785 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/QueryServerEnvironment.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/PhoenixEnvironment.java
@@ -25,41 +25,34 @@ import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.InetAddress;
-import java.security.PrivilegedAction;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.LocalHBaseCluster;
import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
+import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.util.KerberosName;
-import org.apache.phoenix.end2end.TlsUtil;
import org.apache.phoenix.query.ConfigurationFactory;
-import org.apache.phoenix.queryserver.QueryServerProperties;
-import org.apache.phoenix.queryserver.server.QueryServer;
import org.apache.phoenix.util.InstanceResolver;
-import org.apache.phoenix.util.ThinClientUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.phoenix.util.PhoenixRuntime;
/**
* This is a copy of class from `org.apache.phoenix:phoenix-queryserver-it`,
- * see original javadoc in {@link
org.apache.phoenix.end2end.QueryServerEnvironment}.
- *
+ * see original javadoc in {@code
org.apache.phoenix.end2end.QueryServerEnvironment}.
+ * <p>
* It is possible to use original QueryServerEnvironment, but need to solve
several issues:
* <ul>
* <li>TlsUtil.getClasspathDir(QueryServerEnvironment.class); in
QueryServerEnvironment fails due to the path from jar.
@@ -72,12 +65,11 @@ import org.slf4j.LoggerFactory;
* your local machine login user</li>
* </ul>
*/
-public class QueryServerEnvironment {
- private static final Logger LOG =
LoggerFactory.getLogger(QueryServerEnvironment.class);
+public class PhoenixEnvironment {
- private final File TEMP_DIR = new File(getTempDir());
- private final File KEYTAB_DIR = new File(TEMP_DIR, "keytabs");
- private final List<File> USER_KEYTAB_FILES = new ArrayList<>();
+ private final File tempDir = new File(getTempDir());
+ private final File keytabDir = new File(tempDir, "keytabs");
+ private final List<File> userKeytabFiles = new ArrayList<>();
private static final String LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME;
static final String LOGIN_USER;
@@ -97,51 +89,40 @@ public class QueryServerEnvironment {
private static final String SPNEGO_PRINCIPAL = "HTTP/" +
LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME;
private static final String PQS_PRINCIPAL = "phoenixqs/" +
LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME;
private static final String SERVICE_PRINCIPAL = LOGIN_USER + "/" +
LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME;
- private File KEYTAB;
+ private final File keytab;
- private MiniKdc KDC;
- private HBaseTestingUtility UTIL = new HBaseTestingUtility();
- private LocalHBaseCluster HBASE_CLUSTER;
- private int NUM_CREATED_USERS;
+ private final MiniKdc kdc;
+ private final HBaseTestingUtility util = new HBaseTestingUtility(conf());
+ private final LocalHBaseCluster hbaseCluster;
+ private int numCreatedUsers;
- private ExecutorService PQS_EXECUTOR;
- private QueryServer PQS;
- private int PQS_PORT;
- private String PQS_URL;
+ private final String phoenixUrl;
- private boolean tls;
+ private static Configuration conf() {
+ Configuration configuration = HBaseConfiguration.create();
+ configuration.set(User.HBASE_SECURITY_CONF_KEY, "kerberos");
+ return configuration;
+ }
private static String getTempDir() {
StringBuilder sb = new StringBuilder(32);
sb.append(System.getProperty("user.dir")).append(File.separator);
sb.append("target").append(File.separator);
- sb.append(QueryServerEnvironment.class.getSimpleName());
+ sb.append(PhoenixEnvironment.class.getSimpleName());
sb.append("-").append(UUID.randomUUID());
return sb.toString();
}
- public int getPqsPort() {
- return PQS_PORT;
- }
-
- public String getPqsUrl() {
- return PQS_URL;
- }
-
- public boolean getTls() {
- return tls;
+ public String getPhoenixUrl() {
+ return phoenixUrl;
}
public HBaseTestingUtility getUtil() {
- return UTIL;
- }
-
- public String getServicePrincipal() {
- return SERVICE_PRINCIPAL;
+ return util;
}
public File getServiceKeytab() {
- return KEYTAB;
+ return keytab;
}
private static void updateDefaultRealm() throws Exception {
@@ -154,21 +135,21 @@ public class QueryServerEnvironment {
}
private void createUsers(int numUsers) throws Exception {
- assertNotNull("KDC is null, was setup method called?", KDC);
- NUM_CREATED_USERS = numUsers;
+ assertNotNull("KDC is null, was setup method called?", kdc);
+ numCreatedUsers = numUsers;
for (int i = 1; i <= numUsers; i++) {
String principal = "user" + i;
- File keytabFile = new File(KEYTAB_DIR, principal + ".keytab");
- KDC.createPrincipal(keytabFile, principal);
- USER_KEYTAB_FILES.add(keytabFile);
+ File keytabFile = new File(keytabDir, principal + ".keytab");
+ kdc.createPrincipal(keytabFile, principal);
+ userKeytabFiles.add(keytabFile);
}
}
public Map.Entry<String, File> getUser(int offset) {
- if (!(offset > 0 && offset <= NUM_CREATED_USERS)) {
+ if (!(offset > 0 && offset <= numCreatedUsers)) {
throw new IllegalArgumentException();
}
- return new AbstractMap.SimpleImmutableEntry<String, File>("user" + offset,
USER_KEYTAB_FILES.get(offset - 1));
+ return new AbstractMap.SimpleImmutableEntry<>("user" + offset,
userKeytabFiles.get(offset - 1));
}
/**
@@ -177,13 +158,13 @@ public class QueryServerEnvironment {
private void setHdfsSecuredConfiguration(Configuration conf) throws
Exception {
// Set principal+keytab configuration for HDFS
conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY,
- SERVICE_PRINCIPAL + "@" + KDC.getRealm());
- conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
KEYTAB.getAbsolutePath());
+ SERVICE_PRINCIPAL + "@" + kdc.getRealm());
+ conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
keytab.getAbsolutePath());
conf.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY,
- SERVICE_PRINCIPAL + "@" + KDC.getRealm());
- conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY,
KEYTAB.getAbsolutePath());
+ SERVICE_PRINCIPAL + "@" + kdc.getRealm());
+ conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY,
keytab.getAbsolutePath());
conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
- SPNEGO_PRINCIPAL + "@" + KDC.getRealm());
+ SPNEGO_PRINCIPAL + "@" + kdc.getRealm());
// Enable token access for HDFS blocks
conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
// Only use HTTPS (required because we aren't using "secure" ports)
@@ -193,10 +174,8 @@ public class QueryServerEnvironment {
conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
// Generate SSL certs
- File keystoresDir = new
File(UTIL.getDataTestDir("keystore").toUri().getPath());
+ File keystoresDir = new
File(util.getDataTestDir("keystore").toUri().getPath());
keystoresDir.mkdirs();
- String sslConfDir = TlsUtil.getClasspathDir(QueryServerEnvironment.class);
- TlsUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf,
false);
// Magic flag to tell hdfs to not fail on using ports above 1024
conf.setBoolean("ignore.secure.ports.for.testing", true);
@@ -215,34 +194,32 @@ public class QueryServerEnvironment {
/**
* Setup and start kerberosed, hbase
- * @throws Exception
*/
- public QueryServerEnvironment(final Configuration confIn, int numberOfUsers,
boolean tls)
+ public PhoenixEnvironment(final Configuration confIn, int numberOfUsers,
boolean tls)
throws Exception {
- this.tls = tls;
- Configuration conf = UTIL.getConfiguration();
+ Configuration conf = util.getConfiguration();
conf.addResource(confIn);
// Ensure the dirs we need are created/empty
- ensureIsEmptyDirectory(TEMP_DIR);
- ensureIsEmptyDirectory(KEYTAB_DIR);
- KEYTAB = new File(KEYTAB_DIR, "test.keytab");
+ ensureIsEmptyDirectory(tempDir);
+ ensureIsEmptyDirectory(keytabDir);
+ keytab = new File(keytabDir, "test.keytab");
// Start a MiniKDC
- KDC = UTIL.setupMiniKdc(KEYTAB);
+ kdc = util.setupMiniKdc(keytab);
// Create a service principal and spnego principal in one keytab
// NB. Due to some apparent limitations between HDFS and HBase in the same
JVM, trying to
// use separate identies for HBase and HDFS results in a GSS initiate
error. The quick
// solution is to just use a single "service" principal instead of "hbase"
and "hdfs"
// (or "dn" and "nn") per usual.
- KDC.createPrincipal(KEYTAB, SPNEGO_PRINCIPAL, PQS_PRINCIPAL,
SERVICE_PRINCIPAL);
+ kdc.createPrincipal(keytab, SPNEGO_PRINCIPAL, PQS_PRINCIPAL,
SERVICE_PRINCIPAL);
// Start ZK by hand
- UTIL.startMiniZKCluster();
+ util.startMiniZKCluster();
// Create a number of unprivileged users
createUsers(numberOfUsers);
// Set configuration for HBase
- HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" +
KDC.getRealm());
+ HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" +
kdc.getRealm());
HBaseKerberosUtils.setSecuredConfiguration(conf);
setHdfsSecuredConfiguration(conf);
UserGroupInformation.setConfiguration(conf);
@@ -251,33 +228,6 @@ public class QueryServerEnvironment {
conf.setInt(HConstants.REGIONSERVER_PORT, 0);
conf.setInt(HConstants.REGIONSERVER_INFO_PORT, 0);
- if (tls) {
- conf.setBoolean(QueryServerProperties.QUERY_SERVER_TLS_ENABLED, true);
- conf.set(QueryServerProperties.QUERY_SERVER_TLS_KEYSTORE,
- TlsUtil.getKeyStoreFile().getAbsolutePath());
- conf.set(QueryServerProperties.QUERY_SERVER_TLS_KEYSTORE_PASSWORD,
- TlsUtil.getKeyStorePassword());
- conf.set(QueryServerProperties.QUERY_SERVER_TLS_TRUSTSTORE,
- TlsUtil.getTrustStoreFile().getAbsolutePath());
- conf.set(QueryServerProperties.QUERY_SERVER_TLS_TRUSTSTORE_PASSWORD,
- TlsUtil.getTrustStorePassword());
- }
-
- // Secure Phoenix setup
-
conf.set(QueryServerProperties.QUERY_SERVER_KERBEROS_HTTP_PRINCIPAL_ATTRIB_LEGACY,
- SPNEGO_PRINCIPAL + "@" + KDC.getRealm());
- conf.set(QueryServerProperties.QUERY_SERVER_HTTP_KEYTAB_FILENAME_ATTRIB,
- KEYTAB.getAbsolutePath());
- conf.set(QueryServerProperties.QUERY_SERVER_KERBEROS_PRINCIPAL_ATTRIB,
- PQS_PRINCIPAL + "@" + KDC.getRealm());
- conf.set(QueryServerProperties.QUERY_SERVER_KEYTAB_FILENAME_ATTRIB,
- KEYTAB.getAbsolutePath());
- conf.setBoolean(QueryServerProperties.QUERY_SERVER_DISABLE_KERBEROS_LOGIN,
true);
- conf.setInt(QueryServerProperties.QUERY_SERVER_HTTP_PORT_ATTRIB, 0);
- // Required so that PQS can impersonate the end-users to HBase
- conf.set("hadoop.proxyuser.phoenixqs.groups", "*");
- conf.set("hadoop.proxyuser.phoenixqs.hosts", "*");
-
// Clear the cached singletons so we can inject our own.
InstanceResolver.clearSingletons();
// Make sure the ConnectionInfo doesn't try to pull a default Configuration
@@ -296,72 +246,33 @@ public class QueryServerEnvironment {
});
updateDefaultRealm();
- // Start HDFS
- UTIL.startMiniDFSCluster(1);
// Use LocalHBaseCluster to avoid HBaseTestingUtility from doing something
wrong
// NB. I'm not actually sure what HTU does incorrect, but this was pulled
from some test
// classes in HBase itself. I couldn't get HTU to work myself (2017/07/06)
- Path rootdir =
UTIL.getDataTestDirOnTestFS(QueryServerEnvironment.class.getSimpleName());
+ Path rootdir =
util.getDataTestDirOnTestFS(PhoenixEnvironment.class.getSimpleName());
// There is no setRootdir method that is available in all supported HBase
versions.
conf.set(HBASE_DIR, rootdir.toString());
- HBASE_CLUSTER = new LocalHBaseCluster(conf, 1);
- HBASE_CLUSTER.startup();
+ hbaseCluster = new LocalHBaseCluster(conf, 1);
+ hbaseCluster.startup();
- // Then fork a thread with PQS in it.
- configureAndStartQueryServer(tls);
+ phoenixUrl = PhoenixRuntime.JDBC_PROTOCOL + ":localhost:" +
getZookeeperPort();
}
- private void configureAndStartQueryServer(boolean tls) throws Exception {
- PQS = new QueryServer(new String[0], UTIL.getConfiguration());
- // Get the PQS ident for PQS to use
- final UserGroupInformation ugi =
- UserGroupInformation.loginUserFromKeytabAndReturnUGI(PQS_PRINCIPAL,
- KEYTAB.getAbsolutePath());
- PQS_EXECUTOR = Executors.newSingleThreadExecutor();
- // Launch PQS, doing in the Kerberos login instead of letting PQS do it
itself (which would
- // break the HBase/HDFS logins also running in the same test case).
- PQS_EXECUTOR.submit(new Runnable() {
- @Override
- public void run() {
- ugi.doAs(new PrivilegedAction<Void>() {
- @Override
- public Void run() {
- PQS.run();
- return null;
- }
- });
- }
- });
- PQS.awaitRunning();
- PQS_PORT = PQS.getPort();
- PQS_URL =
- ThinClientUtil.getConnectionUrl(tls ? "https" : "http", "localhost",
PQS_PORT)
- + ";authentication=SPNEGO" + (tls
- ? ";truststore=" + TlsUtil.getTrustStoreFile().getAbsolutePath()
- + ";truststore_password=" + TlsUtil.getTrustStorePassword()
- : "");
- LOG.debug("Phoenix Query Server URL: {}", PQS_URL);
+ public int getZookeeperPort() {
+ return util.getConfiguration().getInt(HConstants.ZOOKEEPER_CLIENT_PORT,
2181);
}
public void stop() throws Exception {
// Remove our custom ConfigurationFactory for future tests
InstanceResolver.clearSingletons();
- if (PQS_EXECUTOR != null) {
- PQS.stop();
- PQS_EXECUTOR.shutdown();
- if (!PQS_EXECUTOR.awaitTermination(5, TimeUnit.SECONDS)) {
- LOG.info("PQS didn't exit in 5 seconds, proceeding anyways.");
- }
- }
- if (HBASE_CLUSTER != null) {
- HBASE_CLUSTER.shutdown();
- HBASE_CLUSTER.join();
- }
- if (UTIL != null) {
- UTIL.shutdownMiniZKCluster();
+ if (hbaseCluster != null) {
+ hbaseCluster.shutdown();
+ hbaseCluster.join();
}
- if (KDC != null) {
- KDC.stop();
+ util.shutdownMiniCluster();
+ if (kdc != null) {
+ kdc.stop();
}
+ util.closeConnection();
}
}
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixBaseTest.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixBaseTest.java
index a490f443af..852a33650b 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixBaseTest.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixBaseTest.java
@@ -36,10 +36,8 @@ import org.apache.drill.test.ClusterTest;
import org.apache.drill.test.LogFixture;
import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.phoenix.queryserver.server.QueryServer;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeAll;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -47,7 +45,7 @@ import java.io.File;
import java.nio.file.Paths;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
-import java.util.HashMap;
+import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import java.util.TimeZone;
@@ -56,11 +54,12 @@ import java.util.concurrent.atomic.AtomicInteger;
import static
org.apache.drill.exec.store.phoenix.PhoenixBaseTest.createSampleData;
import static org.apache.drill.exec.store.phoenix.PhoenixBaseTest.createSchema;
import static org.apache.drill.exec.store.phoenix.PhoenixBaseTest.createTables;
-import static
org.apache.drill.exec.store.phoenix.secured.HttpParamImpersonationQueryServerIT.environment;
-import static
org.apache.drill.exec.store.phoenix.secured.HttpParamImpersonationQueryServerIT.getUrlTemplate;
-import static
org.apache.drill.exec.store.phoenix.secured.HttpParamImpersonationQueryServerIT.grantUsersToGlobalPhoenixUserTables;
-import static
org.apache.drill.exec.store.phoenix.secured.HttpParamImpersonationQueryServerIT.grantUsersToPhoenixSystemTables;
-import static
org.apache.drill.exec.store.phoenix.secured.SecuredPhoenixTestSuite.initPhoenixQueryServer;
+import static
org.apache.drill.exec.store.phoenix.secured.ImpersonationPhoenixIT.environment;
+import static
org.apache.drill.exec.store.phoenix.secured.ImpersonationPhoenixIT.getUrlTemplate;
+import static
org.apache.drill.exec.store.phoenix.secured.ImpersonationPhoenixIT.grantUsersToGlobalPhoenixUserTables;
+import static
org.apache.drill.exec.store.phoenix.secured.ImpersonationPhoenixIT.grantUsersToPhoenixSystemTables;
+import static
org.apache.drill.exec.store.phoenix.secured.SecuredPhoenixTestSuite.initPhoenix;
+import static org.junit.Assert.assertThrows;
public abstract class SecuredPhoenixBaseTest extends ClusterTest {
private static final Logger logger =
LoggerFactory.getLogger(PhoenixDataSource.class);
@@ -70,10 +69,10 @@ public abstract class SecuredPhoenixBaseTest extends
ClusterTest {
private final static AtomicInteger initCount = new AtomicInteger(0);
- @BeforeAll
+ @BeforeClass
public static void setUpBeforeClass() throws Exception {
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
- initPhoenixQueryServer();
+ initPhoenix();
startSecuredDrillCluster();
initializeDatabase();
}
@@ -81,11 +80,10 @@ public abstract class SecuredPhoenixBaseTest extends
ClusterTest {
private static void startSecuredDrillCluster() throws Exception {
logFixture = LogFixture.builder()
.toConsole()
- .logger(QueryServerEnvironment.class, CURRENT_LOG_LEVEL)
+ .logger(PhoenixEnvironment.class, CURRENT_LOG_LEVEL)
.logger(SecuredPhoenixBaseTest.class, CURRENT_LOG_LEVEL)
.logger(KerberosFactory.class, CURRENT_LOG_LEVEL)
.logger(Krb5LoginModule.class, CURRENT_LOG_LEVEL)
- .logger(QueryServer.class, CURRENT_LOG_LEVEL)
.logger(ServerAuthenticationHandler.class, CURRENT_LOG_LEVEL)
.build();
@@ -118,15 +116,10 @@ public abstract class SecuredPhoenixBaseTest extends
ClusterTest {
user3ClientProperties.setProperty(DrillProperties.KEYTAB,
user3.getValue().getAbsolutePath());
cluster.addClientFixture(user3ClientProperties);
- Map<String, Object> phoenixProps = new HashMap<>();
- phoenixProps.put("phoenix.query.timeoutMs", 90000);
- phoenixProps.put("phoenix.query.keepAliveMs", "30000");
- phoenixProps.put("phoenix.queryserver.withRemoteUserExtractor", true);
StoragePluginRegistry registry =
cluster.drillbit().getContext().getStorage();
- final String doAsUrl = String.format(getUrlTemplate(), "$user");
- logger.debug("Phoenix Query Server URL: {}", environment.getPqsUrl());
- PhoenixStoragePluginConfig config = new PhoenixStoragePluginConfig(null,
0, null, null,
- doAsUrl, null, phoenixProps);
+ logger.debug("Phoenix URL: {}", environment.getPhoenixUrl());
+ PhoenixStoragePluginConfig config = new PhoenixStoragePluginConfig(null,
0, null, null, null,
+ getUrlTemplate(), null, Collections.emptyMap());
config.setEnabled(true);
registry.put(PhoenixStoragePluginConfig.NAME + "123", config);
}
@@ -143,9 +136,9 @@ public abstract class SecuredPhoenixBaseTest extends
ClusterTest {
// Build the JDBC URL by hand with the doAs
final UserGroupInformation serviceUgi =
ImpersonationUtil.getProcessUserUGI();
serviceUgi.doAs((PrivilegedExceptionAction<Void>) () -> {
- createSchema(environment.getPqsUrl());
- createTables(environment.getPqsUrl());
- createSampleData(environment.getPqsUrl());
+ createSchema(environment.getPhoenixUrl());
+ createTables(environment.getPhoenixUrl());
+ createSampleData(environment.getPhoenixUrl());
grantUsersToPhoenixSystemTables(Arrays.asList(user1.getKey(),
user2.getKey()));
grantUsersToGlobalPhoenixUserTables(Arrays.asList(user1.getKey()));
return null;
@@ -173,19 +166,19 @@ public abstract class SecuredPhoenixBaseTest extends
ClusterTest {
wrapper.apply();
client = cluster.client(1);
// original is AccessDeniedException: Insufficient permissions for user
'user2'
- Assertions.assertThrows(user2ExpectedException, wrapper::apply);
+ assertThrows(user2ExpectedException, wrapper::apply);
client = cluster.client(2);
// RuntimeException for user3, Failed to execute HTTP Request, got
HTTP/401
- Assertions.assertThrows(user3ExpectedException, wrapper::apply);
+ assertThrows(user3ExpectedException, wrapper::apply);
} finally {
client = cluster.client(0);
}
}
- @AfterAll
+ @AfterClass
public static void tearDownCluster() throws Exception {
if (!SecuredPhoenixTestSuite.isRunningSuite() && environment != null) {
- HttpParamImpersonationQueryServerIT.stopEnvironment();
+ ImpersonationPhoenixIT.stopEnvironment();
}
}
}
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java
index 1c54ff4482..46e1017080 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java
@@ -26,12 +26,12 @@ import org.apache.drill.exec.record.metadata.SchemaBuilder;
import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.apache.drill.test.QueryBuilder;
import org.apache.drill.test.rowSet.RowSetComparison;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Tag;
-import org.junit.jupiter.api.Test;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
-@Tag(SlowTest.TAG)
-@Tag(RowSetTest.TAG)
+import static org.junit.Assert.assertEquals;
+
+@Category({ SlowTest.class, RowSetTest.class })
public class SecuredPhoenixCommandTest extends SecuredPhoenixBaseTest {
@Test
@@ -42,7 +42,7 @@ public class SecuredPhoenixCommandTest extends
SecuredPhoenixBaseTest {
private void doTestShowTablesLike() throws Exception {
runAndPrint("SHOW SCHEMAS");
run("USE phoenix123.V1");
- Assertions.assertEquals(1, queryBuilder().sql("SHOW TABLES LIKE
'%REGION%'").run().recordCount());
+ assertEquals(1, queryBuilder().sql("SHOW TABLES LIKE
'%REGION%'").run().recordCount());
}
@Test
@@ -77,6 +77,6 @@ public class SecuredPhoenixCommandTest extends
SecuredPhoenixBaseTest {
private void doTestDescribe() throws Exception {
run("USE phoenix123.v1");
- Assertions.assertEquals(4, queryBuilder().sql("DESCRIBE
NATION").run().recordCount());
+ assertEquals(4, queryBuilder().sql("DESCRIBE NATION").run().recordCount());
}
}
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java
index 2938f46102..67ffa5d647 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java
@@ -26,8 +26,8 @@ import org.apache.drill.exec.record.metadata.SchemaBuilder;
import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.apache.drill.test.QueryBuilder;
import org.apache.drill.test.rowSet.RowSetComparison;
-import org.junit.jupiter.api.Tag;
-import org.junit.jupiter.api.Test;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
import java.math.BigDecimal;
import java.time.Instant;
@@ -43,8 +43,7 @@ import static
org.apache.drill.test.rowSet.RowSetUtilities.longArray;
import static org.apache.drill.test.rowSet.RowSetUtilities.shortArray;
import static org.apache.drill.test.rowSet.RowSetUtilities.strArray;
-@Tag(SlowTest.TAG)
-@Tag(RowSetTest.TAG)
+@Category({ SlowTest.class, RowSetTest.class })
public class SecuredPhoenixDataTypeTest extends SecuredPhoenixBaseTest {
@Test
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java
index 86ecd3f8a2..55918b8121 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java
@@ -28,14 +28,13 @@ import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.apache.drill.exec.rpc.RpcException;
import org.apache.drill.test.QueryBuilder;
import org.apache.drill.test.rowSet.RowSetComparison;
-import org.junit.jupiter.api.Disabled;
-import org.junit.jupiter.api.Tag;
-import org.junit.jupiter.api.Test;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.Assert.assertEquals;
-@Tag(SlowTest.TAG)
-@Tag(RowSetTest.TAG)
+@Category({ SlowTest.class, RowSetTest.class })
public class SecuredPhoenixSQLTest extends SecuredPhoenixBaseTest {
@Test
@@ -175,7 +174,7 @@ public class SecuredPhoenixSQLTest extends
SecuredPhoenixBaseTest {
String sql = "select count(*) as total from phoenix123.v1.nation";
String plan = queryBuilder().sql(sql).explainJson();
long cnt = queryBuilder().physical(plan).singletonLong();
- assertEquals(25, cnt, "Counts should match");
+ assertEquals("Counts should match", 25, cnt);
}
@Test
@@ -243,12 +242,12 @@ public class SecuredPhoenixSQLTest extends
SecuredPhoenixBaseTest {
builder.planMatcher().exclude("Join").match();
- assertEquals(625, sets.rowCount(), "Counts should match");
+ assertEquals("Counts should match", 625, sets.rowCount());
sets.clear();
}
@Test
- @Disabled("use the remote query server directly without minicluster")
+ @Ignore("use the remote phoenix directly without minicluster")
public void testJoinWithFilterPushdown() throws Exception {
String sql = "select 10 as DRILL, a.n_name, b.r_name from
phoenix123.v1.nation a join phoenix123.v1.region b "
+ "on a.n_regionkey = b.r_regionkey where b.r_name = 'ASIA'";
diff --git
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
index 2c4c6fadeb..89f1a99904 100644
---
a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
+++
b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
@@ -19,29 +19,26 @@ package org.apache.drill.exec.store.phoenix.secured;
import org.apache.drill.categories.RowSetTest;
import org.apache.drill.categories.SlowTest;
-import org.apache.drill.exec.store.phoenix.QueryServerBasicsIT;
+import org.apache.drill.exec.store.phoenix.PhoenixBasicsIT;
import org.apache.drill.test.BaseTest;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Disabled;
-import org.junit.jupiter.api.Tag;
-import org.junit.platform.suite.api.SelectClasses;
-import org.junit.platform.suite.api.Suite;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
import org.slf4j.LoggerFactory;
import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicInteger;
-@Suite
-@SelectClasses({
+@RunWith(Suite.class)
[email protected]({
SecuredPhoenixDataTypeTest.class,
SecuredPhoenixSQLTest.class,
SecuredPhoenixCommandTest.class
})
-@Disabled
-@Tag(SlowTest.TAG)
-@Tag(RowSetTest.TAG)
+@Category({ SlowTest.class, RowSetTest.class })
public class SecuredPhoenixTestSuite extends BaseTest {
private static final org.slf4j.Logger logger =
LoggerFactory.getLogger(SecuredPhoenixTestSuite.class);
@@ -49,25 +46,25 @@ public class SecuredPhoenixTestSuite extends BaseTest {
private static volatile boolean runningSuite = false;
private static final AtomicInteger initCount = new AtomicInteger(0);
- @BeforeAll
- public static void initPhoenixQueryServer() throws Exception {
+ @BeforeClass
+ public static void initPhoenix() throws Exception {
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
synchronized (SecuredPhoenixTestSuite.class) {
if (initCount.get() == 0) {
logger.info("Boot the test cluster...");
- HttpParamImpersonationQueryServerIT.startQueryServerEnvironment();
+ ImpersonationPhoenixIT.startPhoenixEnvironment();
}
initCount.incrementAndGet();
runningSuite = true;
}
}
- @AfterAll
+ @AfterClass
public static void tearDownCluster() throws Exception {
synchronized (SecuredPhoenixTestSuite.class) {
if (initCount.decrementAndGet() == 0) {
logger.info("Shutdown all instances of test cluster.");
- QueryServerBasicsIT.afterClass();
+ PhoenixBasicsIT.afterClass();
}
}
}
diff --git a/contrib/storage-phoenix/src/test/resources/hbase-site.xml
b/contrib/storage-phoenix/src/test/resources/hbase-site.xml
index 159c7022e1..3ba8319f5f 100644
--- a/contrib/storage-phoenix/src/test/resources/hbase-site.xml
+++ b/contrib/storage-phoenix/src/test/resources/hbase-site.xml
@@ -28,4 +28,8 @@
<name>phoenix.schema.isNamespaceMappingEnabled</name>
<value>true</value>
</property>
+ <property>
+ <name>hbase.wal.provider</name>
+ <value>filesystem</value>
+ </property>
</configuration>