[incubator-livy] branch master updated: [MINOR] Fix instructions on how to build Livy.
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new e5489d0 [MINOR] Fix instructions on how to build Livy. e5489d0 is described below commit e5489d0e15869c03065088464c2551cf45d320dd Author: meisam AuthorDate: Mon Jan 28 02:09:03 2019 -0800 [MINOR] Fix instructions on how to build Livy. ## What changes were proposed in this pull request? Update the README section with instructions on building Livy ## How was this patch tested? - Checked the rendered markdown. - Executed the instructions. ## What remains to be done? This instruction should be updated in the documentation each time Livy's repository name changes. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a1f0854..d5219e5 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ Livy is built using [Apache Maven](http://maven.apache.org). To check out and bu ``` git clone https://github.com/apache/incubator-livy.git -cd livy +cd incubator-livy mvn package ```
[incubator-livy] branch master updated: [LIVY-574][TESTS][THRIFT] Add tests for metadata operations
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 3626382 [LIVY-574][TESTS][THRIFT] Add tests for metadata operations 3626382 is described below commit 3626382f9e718b446cdb68341df0dc46036f1068 Author: yihengwang AuthorDate: Sun Aug 25 09:47:49 2019 +0200 [LIVY-574][TESTS][THRIFT] Add tests for metadata operations ## What changes were proposed in this pull request? Add unit test for existing meta operation: GetCatalogsOperation/GetTableTypesOperation/GetTypeInfoOperation. We also fix issues we met. ## How was this patch tested? Add new unit tests and existing test. We also use SquirrelSQL test relate operations. ![image](https://user-images.githubusercontent.com/1297418/62930007-26a93400-bdee-11e9-9364-259308724db6.png) ![image](https://user-images.githubusercontent.com/1297418/62930073-42143f00-bdee-11e9-9409-62d07ad0eabd.png) Author: yihengwang Closes #197 from yiheng/fix_574. --- .../livy/thriftserver/cli/ThriftCLIService.scala | 6 +- .../operation/GetCatalogsOperation.scala | 4 +- .../operation/GetTypeInfoOperation.scala | 11 ++- .../thriftserver/operation/MetadataOperation.scala | 2 +- .../livy/thriftserver/serde/ThriftResultSet.scala | 8 ++ .../livy/thriftserver/ThriftServerSuites.scala | 102 - .../livy/thriftserver/session/ColumnBuffer.java| 96 --- .../thriftserver/session/ColumnBufferTest.java | 96 +++ 8 files changed, 300 insertions(+), 25 deletions(-) diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftCLIService.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftCLIService.scala index da108ab..9cced79 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftCLIService.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftCLIService.scala @@ -397,7 +397,7 @@ abstract class ThriftCLIService(val cliService: LivyCLIService, val serviceName: override def GetTypeInfo(req: TGetTypeInfoReq): TGetTypeInfoResp = { val resp = new TGetTypeInfoResp try { - val operationHandle = cliService.getTypeInfo(new SessionHandle(req.getSessionHandle)) + val operationHandle = cliService.getTypeInfo(createSessionHandle(req.getSessionHandle)) resp.setOperationHandle(operationHandle.toTOperationHandle) resp.setStatus(ThriftCLIService.OK_STATUS) } catch { @@ -412,7 +412,7 @@ abstract class ThriftCLIService(val cliService: LivyCLIService, val serviceName: override def GetCatalogs(req: TGetCatalogsReq): TGetCatalogsResp = { val resp = new TGetCatalogsResp try { - val opHandle = cliService.getCatalogs(new SessionHandle(req.getSessionHandle)) + val opHandle = cliService.getCatalogs(createSessionHandle(req.getSessionHandle)) resp.setOperationHandle(opHandle.toTOperationHandle) resp.setStatus(ThriftCLIService.OK_STATUS) } catch { @@ -463,7 +463,7 @@ abstract class ThriftCLIService(val cliService: LivyCLIService, val serviceName: override def GetTableTypes(req: TGetTableTypesReq): TGetTableTypesResp = { val resp = new TGetTableTypesResp try { - val opHandle = cliService.getTableTypes(new SessionHandle(req.getSessionHandle)) + val opHandle = cliService.getTableTypes(createSessionHandle(req.getSessionHandle)) resp.setOperationHandle(opHandle.toTOperationHandle) resp.setStatus(ThriftCLIService.OK_STATUS) } catch { diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/operation/GetCatalogsOperation.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/operation/GetCatalogsOperation.scala index 57687b0..781c022 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/operation/GetCatalogsOperation.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/operation/GetCatalogsOperation.scala @@ -33,11 +33,11 @@ class GetCatalogsOperation(sessionHandle: SessionHandle) @throws(classOf[HiveSQLException]) override def runInternal(): Unit = { setState(OperationState.RUNNING) -info("Fetching table type metadata") +info("Fetching catalogs metadata") try { // catalogs are actually not supported in spark, so this is a no-op setState(OperationState.FINISHED) - info("Fetching table type metadata has been successfully finished") + info("Fetching catalogs has been successfully finished") } catch { case e: Throwable => setState(OperationState.ERROR) diff --git a/thriftserver/server/src/main/s
[incubator-livy] branch master updated: [LIVY-622][LIVY-623][LIVY-624][LIVY-625][THRIFT][FOLLOWUP] Use ResultSet in catalog operations
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new d0d8028 [LIVY-622][LIVY-623][LIVY-624][LIVY-625][THRIFT][FOLLOWUP] Use ResultSet in catalog operations d0d8028 is described below commit d0d8028657c7314ad031f51c0a564a8786213187 Author: Marco Gaido AuthorDate: Fri Aug 30 14:06:08 2019 +0200 [LIVY-622][LIVY-623][LIVY-624][LIVY-625][THRIFT][FOLLOWUP] Use ResultSet in catalog operations ## What changes were proposed in this pull request? This is a followup of #194 which addresses all the remaining concerns. The main changes are: - reverting the introduction of a state specific for catalog operations; - usage of `ResultSet` to send over the wire the data for catalog operations too. ## How was this patch tested? existing modified UTs Author: Marco Gaido Closes #217 from mgaido91/LIVY-622_followup. --- .../livy/thriftserver/LivyOperationManager.scala | 18 ++--- .../operation/GetColumnsOperation.scala| 20 +++--- .../operation/GetFunctionsOperation.scala | 35 ++--- .../operation/GetSchemasOperation.scala| 12 ++-- .../operation/GetTablesOperation.scala | 16 ++--- .../thriftserver/operation/MetadataOperation.scala | 3 - .../operation/SparkCatalogOperation.scala | 51 ++ .../session/CleanupCatalogResultJob.java | 37 -- .../session/FetchCatalogResultJob.java | 51 -- .../livy/thriftserver/session/GetColumnsJob.java | 31 .../livy/thriftserver/session/GetFunctionsJob.java | 66 ++--- .../livy/thriftserver/session/GetSchemasJob.java | 24 --- .../livy/thriftserver/session/GetTablesJob.java| 19 ++--- .../livy/thriftserver/session/SparkCatalogJob.java | 64 +++-- .../livy/thriftserver/session/SparkUtils.java | 46 .../thriftserver/session/ThriftSessionState.java | 32 - .../thriftserver/session/ThriftSessionTest.java| 82 +- 17 files changed, 290 insertions(+), 317 deletions(-) diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/LivyOperationManager.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/LivyOperationManager.scala index 2454185..eb1dd21 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/LivyOperationManager.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/LivyOperationManager.scala @@ -196,12 +196,7 @@ class LivyOperationManager(val livyThriftSessionManager: LivyThriftSessionManage tableTypes: util.List[String]): OperationHandle = { executeOperation(sessionHandle, { val op = new GetTablesOperation( -sessionHandle, -catalogName, -schemaName, -tableName, -tableTypes, -livyThriftSessionManager) +sessionHandle, schemaName, tableName, tableTypes, livyThriftSessionManager) addOperation(op, sessionHandle) op }) @@ -214,8 +209,8 @@ class LivyOperationManager(val livyThriftSessionManager: LivyThriftSessionManage schemaName: String, functionName: String): OperationHandle = { executeOperation(sessionHandle, { - val op = new GetFunctionsOperation(sessionHandle, catalogName, schemaName, functionName, -livyThriftSessionManager) + val op = new GetFunctionsOperation( +sessionHandle, schemaName, functionName, livyThriftSessionManager) addOperation(op, sessionHandle) op }) @@ -227,8 +222,7 @@ class LivyOperationManager(val livyThriftSessionManager: LivyThriftSessionManage catalogName: String, schemaName: String): OperationHandle = { executeOperation(sessionHandle, { - val op = new GetSchemasOperation(sessionHandle, catalogName, schemaName, -livyThriftSessionManager) + val op = new GetSchemasOperation(sessionHandle, schemaName, livyThriftSessionManager) addOperation(op, sessionHandle) op }) @@ -242,8 +236,8 @@ class LivyOperationManager(val livyThriftSessionManager: LivyThriftSessionManage tableName: String, columnName: String): OperationHandle = { executeOperation(sessionHandle, { - val op = new GetColumnsOperation(sessionHandle, catalogName, schemaName, tableName, -columnName, livyThriftSessionManager) + val op = new GetColumnsOperation( +sessionHandle, schemaName, tableName, columnName, livyThriftSessionManager) addOperation(op, sessionHandle) op }) diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/operation/GetColumnsOperation.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/operation
[incubator-livy] branch master updated: [LIVY-650][THRIFT] Remove schema from ResultSet
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 9c34750 [LIVY-650][THRIFT] Remove schema from ResultSet 9c34750 is described below commit 9c34750230acb3c3f72b66c82bea86ca2e33fe57 Author: Marco Gaido AuthorDate: Thu Aug 29 14:44:41 2019 +0200 [LIVY-650][THRIFT] Remove schema from ResultSet ## What changes were proposed in this pull request? The class `ResultSet` is serialized and sent over the wire. Currently this class contains a JSON string representation of the spark schema, which is never used. Hence, the PR removes it in order to avoid serializing it uselessly. ## How was this patch tested? existing UTs Author: Marco Gaido Closes #213 from mgaido91/LIVY-650. --- .../org/apache/livy/thriftserver/session/FetchResultJob.java | 2 +- .../java/org/apache/livy/thriftserver/session/ResultSet.java | 9 + .../main/java/org/apache/livy/thriftserver/session/SqlJob.java | 3 --- .../org/apache/livy/thriftserver/session/StatementState.java | 1 - .../org/apache/livy/thriftserver/session/ThriftSessionState.java | 1 - .../org/apache/livy/thriftserver/session/ColumnBufferTest.java | 2 +- 6 files changed, 3 insertions(+), 15 deletions(-) diff --git a/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/FetchResultJob.java b/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/FetchResultJob.java index 09b69c9..450fd86 100644 --- a/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/FetchResultJob.java +++ b/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/FetchResultJob.java @@ -49,7 +49,7 @@ public class FetchResultJob implements Job { StatementState st = session.findStatement(statementId); Iterator iter = st.iter; -ResultSet rs = new ResultSet(st.types, st.schema); +ResultSet rs = new ResultSet(st.types); int count = 0; while (iter.hasNext() && count < maxRows) { Row row = iter.next(); diff --git a/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/ResultSet.java b/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/ResultSet.java index 317c4e6..cd0f71d 100644 --- a/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/ResultSet.java +++ b/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/ResultSet.java @@ -37,16 +37,13 @@ import org.apache.spark.sql.types.StructField; */ public class ResultSet { - private final String schema; private final ColumnBuffer[] columns; public ResultSet() { -this.schema = null; this.columns = null; } - public ResultSet(DataType[] types, String schema) { -this.schema = schema; + public ResultSet(DataType[] types) { this.columns = new ColumnBuffer[types.length]; for (int i = 0; i < columns.length; i++) { columns[i] = new ColumnBuffer(types[i]); @@ -69,10 +66,6 @@ public class ResultSet { } } - public String getSchema() { -return schema; - } - public ColumnBuffer[] getColumns() { return columns; } diff --git a/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/SqlJob.java b/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/SqlJob.java index a0b9c85..849c057 100644 --- a/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/SqlJob.java +++ b/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/SqlJob.java @@ -18,9 +18,7 @@ package org.apache.livy.thriftserver.session; import java.util.Iterator; -import java.util.List; -import org.apache.spark.rdd.RDD; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; @@ -88,5 +86,4 @@ public class SqlJob implements Job { // has been executed. session.registerStatement(statementId, schema, iter); } - } diff --git a/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/StatementState.java b/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/StatementState.java index 5238845..7a300e9 100644 --- a/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/StatementState.java +++ b/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/StatementState.java @@ -36,5 +36,4 @@ class StatementState { this.iter = iter; this.types = SparkUtils.translateSchema(schema); } - } diff --git a/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/ThriftSessionState.java b/thriftserver/session/src/main/java/org/apache/livy/thriftserver/session/ThriftSessionState.java index 5378270..9111d94 100644 --- a/thrift
[incubator-livy] branch master updated: [LIVY-640] Add tests for ThriftServer
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 3cdb7b2 [LIVY-640] Add tests for ThriftServer 3cdb7b2 is described below commit 3cdb7b2584dc97bc17e01721b6be38c3b137a2e1 Author: micahzhao AuthorDate: Thu Sep 5 14:26:02 2019 +0200 [LIVY-640] Add tests for ThriftServer ## What changes were proposed in this pull request? 1、Added some tests in BinaryThriftServerSuite that ThriftServer is currently missing(The tests was Moved from spark ThriftServer). 2、Set ENABLE_HIVE_CONTEXT=true in BinaryThriftServerSuite to support the creation of Hive tables and the creation of udf in test. 3、Upgrade spark2.2.0 to 2.2.3. To resolve the issue that session cannot be created during Travis test when ENABLE_HIVE_CONTEXT is set to true 4、Travis ITs is divided into two parts to avoid test timeout problems. ## How was this patch tested? Test with Travis and see the results ![image](https://user-images.githubusercontent.com/13825159/63577482-10a32c80-c5c1-11e9-868a-0aab4b1af743.png) Author: micahzhao Closes #209 from captainzmc/add-thrift-test. --- pom.xml| 6 +- .../livy/thriftserver/ThriftServerSuites.scala | 209 ++--- 2 files changed, 143 insertions(+), 72 deletions(-) diff --git a/pom.xml b/pom.xml index f98071c..fa1600d 100644 --- a/pom.xml +++ b/pom.xml @@ -80,7 +80,7 @@ 2.7.3 compile -2.2.0 +2.2.3 ${spark.scala-2.11.version} 3.0.0 1.9 @@ -109,9 +109,9 @@ ${user.dir} ${execution.root}/dev/spark - https://d3kbcqa49mib13.cloudfront.net/spark-2.2.0-bin-hadoop2.7.tgz + https://archive.apache.org/dist/spark/spark-2.2.3/spark-2.2.3-bin-hadoop2.7.tgz -spark-2.2.0-bin-hadoop2.7 +spark-2.2.3-bin-hadoop2.7 ${basedir}/target diff --git a/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala b/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala index 48750da..1939a56 100644 --- a/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala +++ b/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala @@ -19,6 +19,9 @@ package org.apache.livy.thriftserver import java.sql.{Connection, Date, SQLException, Statement, Types} +import scala.collection.mutable.ArrayBuffer +import scala.io.Source + import org.apache.hive.jdbc.HiveStatement import org.apache.livy.LivyConf @@ -31,7 +34,8 @@ trait CommonThriftTests { def dataTypesTest(statement: Statement, mapSupported: Boolean): Unit = { val resultSet = statement.executeQuery( - "select 1, 'a', cast(null as int), 1.2345, CAST('2018-08-06' as date)") + "select 1, 'a', cast(null as int), 1.2345, CAST('2018-08-06' as date), " + +"CAST('123' as BINARY)") resultSet.next() assert(resultSet.getInt(1) == 1) assert(resultSet.getString(2) == "a") @@ -39,6 +43,9 @@ trait CommonThriftTests { assert(resultSet.wasNull()) assert(resultSet.getDouble(4) == 1.2345) assert(resultSet.getDate(5) == Date.valueOf("2018-08-06")) +val resultBytes = Source.fromInputStream(resultSet.getBinaryStream(6)) + .map(_.toByte).toArray +assert("123".getBytes.sameElements(resultBytes)) assert(!resultSet.next()) val resultSetWithNulls = statement.executeQuery("select cast(null as string), " + @@ -103,81 +110,90 @@ trait CommonThriftTests { def getTablesTest(connection: Connection): Unit = { val statement = connection.createStatement() -statement.execute("CREATE TABLE test_get_tables (id integer, desc string) USING json") -statement.close() - -val metadata = connection.getMetaData -val tablesResultSet = metadata.getTables("", "default", "*", Array("TABLE")) -assert(tablesResultSet.getMetaData.getColumnCount == 5) -assert(tablesResultSet.getMetaData.getColumnName(1) == "TABLE_CAT") -assert(tablesResultSet.getMetaData.getColumnName(2) == "TABLE_SCHEM") -assert(tablesResultSet.getMetaData.getColumnName(3) == "TABLE_NAME") -assert(tablesResultSet.getMetaData.getColumnName(4) == "TABLE_TYPE") -assert(tablesResultSet.getMetaData.getColumnName(5) == "REMARKS") +try { + statement.execute("CREATE TABLE test_get_tables (id integer, desc string) USING json") + val metadata = connection.getMetaData + val tablesResultSet = metadata.getTables("", "default", "*", Array("TABLE")) + a
[incubator-livy] branch master updated: [LIVY-571] cleanupStatement should not throw exception when statementId not exist
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 7dee3cc [LIVY-571] cleanupStatement should not throw exception when statementId not exist 7dee3cc is described below commit 7dee3cc142f79a131d54a37c7a56bb697c160cc1 Author: Jeffrey(Xilang) Yan <7855100+yan...@users.noreply.github.com> AuthorDate: Mon Jul 22 12:15:32 2019 +0200 [LIVY-571] cleanupStatement should not throw exception when statementId not exist ## What changes were proposed in this pull request? `ThriftSessionState` is used to store query result by statementId. When an exception is thrown during execute query, no query result is stored. But when a statement is closed from beeline, a request is invoked to remove cached query result in `ThriftSessionState`. Remove statement query result from `ThriftSessionState` currently throws an exception, hiding the original query failure exception and message. The PR makes it return cleanly, so the proper exception is reported to the end user. ## How was this patch tested? Added unit tests. Author: Jeffrey(Xilang) Yan <7855100+yan...@users.noreply.github.com> Author: yantzu <7855100+yan...@users.noreply.github.com> Author: 80254702 <80254...@adc.com> Closes #182 from yantzu/LIVY-571. --- .../LivyExecuteStatementOperation.scala| 6 - .../apache/livy/thriftserver/rpc/RpcClient.scala | 2 +- .../livy/thriftserver/ThriftServerSuites.scala | 31 +- .../thriftserver/session/CleanupStatementJob.java | 7 +++-- .../thriftserver/session/ThriftSessionState.java | 8 +++--- .../thriftserver/session/ThriftSessionTest.java| 12 - 6 files changed, 50 insertions(+), 16 deletions(-) diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/LivyExecuteStatementOperation.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/LivyExecuteStatementOperation.scala index a067788..ebb8e1d 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/LivyExecuteStatementOperation.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/LivyExecuteStatementOperation.scala @@ -174,7 +174,11 @@ class LivyExecuteStatementOperation( private def cleanup(state: OperationState) { if (statementId != null && rpcClientValid) { - rpcClient.cleanupStatement(sessionHandle, statementId).get() + val cleaned = rpcClient.cleanupStatement(sessionHandle, statementId).get() + if (!cleaned) { +warn(s"Fail to cleanup query $statementId (session = ${sessionHandle.getSessionId}), " + + "this message can be ignored if the query failed.") + } } setState(state) } diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/rpc/RpcClient.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/rpc/RpcClient.scala index beba6a9..b3b2ddf 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/rpc/RpcClient.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/rpc/RpcClient.scala @@ -76,7 +76,7 @@ class RpcClient(livySession: InteractiveSession) extends Logging { def cleanupStatement( sessionHandle: SessionHandle, statementId: String, - cancelJob: Boolean = false): JobHandle[_] = { + cancelJob: Boolean = false): JobHandle[java.lang.Boolean] = { info(s"Cleaning up remote session for statementId = $statementId") require(null != statementId, s"Invalid statementId specified. statementId = $statementId") livySession.recordActivity() diff --git a/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala b/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala index 3099436..a3d9e88 100644 --- a/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala +++ b/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala @@ -17,7 +17,7 @@ package org.apache.livy.thriftserver -import java.sql.{Date, Statement} +import java.sql.{Date, SQLException, Statement} import org.apache.livy.LivyConf @@ -134,6 +134,35 @@ class BinaryThriftServerSuite extends ThriftServerBaseTest with CommonThriftTest statement.close() } } + + test("LIVY-571: returns a meaningful exception when database doesn't exist") { +assume(hiveSupportEnabled(formattedSparkVersion._1, livyConf)) +withJdbcConnection(jdbcUri("default")) { c => + val caught = intercept[SQLException] { +val statement = c.createStatemen
[incubator-livy] branch master updated: [LIVY-705][THRIFT] Support getting keystore password from Hadoop credential provider
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 9042ff5 [LIVY-705][THRIFT] Support getting keystore password from Hadoop credential provider 9042ff5 is described below commit 9042ff5b4a03cd302a884b5a74280d4b476792b7 Author: Wing Yew Poon AuthorDate: Wed Nov 6 10:54:42 2019 +0100 [LIVY-705][THRIFT] Support getting keystore password from Hadoop credential provider ## What changes were proposed in this pull request? https://issues.apache.org/jira/browse/LIVY-705 LIVY-475 added support for getting the keystore password and key password from a Hadoop credential provider file. The keystore password is also needed for SSL/TLS support in the Thrift server. In this change, we extend the support for getting the keystore password from the Hadoop credential provider to the Thrift server as well. ## How was this patch tested? Manually tested a Livy Thrift server that has livy.server.thrift.use.SSL=true, using both binary and http mode. Configured keystore password in a Hadoop credential provider file and provided the path to this file in livy.hadoop.security.credential.provider.path. Author: Wing Yew Poon Closes #253 from wypoon/wypoon_LIVY-705. --- .../livy/thriftserver/cli/ThriftBinaryCLIService.scala | 2 +- .../apache/livy/thriftserver/cli/ThriftCLIService.scala| 14 ++ .../livy/thriftserver/cli/ThriftHttpCLIService.scala | 2 +- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftBinaryCLIService.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftBinaryCLIService.scala index e16313d..734768f 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftBinaryCLIService.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftBinaryCLIService.scala @@ -76,7 +76,7 @@ class ThriftBinaryCLIService(override val cliService: LivyCLIService, val oomHoo throw new IllegalArgumentException( s"${LivyConf.SSL_KEYSTORE.key} Not configured for SSL connection") } -val keyStorePassword = livyConf.get(LivyConf.SSL_KEYSTORE_PASSWORD) +val keyStorePassword = getKeyStorePassword() val params = new TSSLTransportFactory.TSSLTransportParameters params.setKeyStore(keyStorePath, keyStorePassword) serverSocket = diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftCLIService.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftCLIService.scala index 9cced79..30e1f28 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftCLIService.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftCLIService.scala @@ -26,6 +26,7 @@ import javax.security.auth.login.LoginException import scala.collection.JavaConverters._ import com.google.common.base.Preconditions.checkArgument +import org.apache.hadoop.conf.Configuration import org.apache.hadoop.security.UserGroupInformation import org.apache.hadoop.security.authentication.util.KerberosName import org.apache.hadoop.security.authorize.ProxyUsers @@ -83,6 +84,19 @@ abstract class ThriftCLIService(val cliService: LivyCLIService, val serviceName: super.init(livyConf) } + protected def getKeyStorePassword(): String = +Option(livyConf.get(LivyConf.SSL_KEYSTORE_PASSWORD)).orElse { + val credentialProviderPath = livyConf.get(LivyConf.HADOOP_CREDENTIAL_PROVIDER_PATH) + val hadoopConf = new Configuration() + if (credentialProviderPath != null) { +hadoopConf.set("hadoop.security.credential.provider.path", credentialProviderPath) + } + Option(hadoopConf.getPassword(LivyConf.SSL_KEYSTORE_PASSWORD.key)).map(_.mkString) +}.getOrElse { + throw new IllegalArgumentException( +"Livy keystore password not configured for SSL connection") +} + protected def initServer(): Unit override def start(): Unit = { diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftHttpCLIService.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftHttpCLIService.scala index 8a3d439..80122dc 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftHttpCLIService.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/cli/ThriftHttpCLIService.scala @@ -83,11 +83,11 @@ class ThriftHttpCLIService( // Change connector if SSL is used val connector = if (useSsl) { val keyStorePath = livyConf.g
[incubator-livy] branch master updated: [LIVY-699][THRIFT] Fix resultSet.getBigDecimal throw java.sql.SQLException: Illegal conversion
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 7847c3f [LIVY-699][THRIFT] Fix resultSet.getBigDecimal throw java.sql.SQLException: Illegal conversion 7847c3f is described below commit 7847c3fe8309dfd2d2089dac68044603745a3499 Author: runzhiwang AuthorDate: Tue Nov 12 11:26:48 2019 +0100 [LIVY-699][THRIFT] Fix resultSet.getBigDecimal throw java.sql.SQLException: Illegal conversion ## What changes were proposed in this pull request? [LIVY-699][THRIFT] Fix resultSet.getBigDecimal throw java.sql.SQLException: Illegal conversion. Follows are steps to reproduce the problem: 1. `create table test(id decimal)`. 2. Then `resultSet.getBigDecimal(1)` will throw:` java.sql.SQLException: Illegal conversion`. The reason is `getSchema().getColumnDescriptorAt(columnIndex - 1).getType();` at https://github.com/apache/hive/blob/master/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java#L415 return string, so cannot pass the check `val instanceof BigDecimal `at https://github.com/apache/hive/blob/master/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java#L133, so throw `java.sql.SQLExceptio [...] 3. So the root cause is the error return of `getType()`, which should return decimal other than string. 4. Regarding to date and timestamp, though the return type is string, hive-jdbc has done the transformation from string to date and timestamp in the following links. But I think it is necessary to return the right type. https://github.com/apache/hive/blob/master/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java#L255 https://github.com/apache/hive/blob/master/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java#L571 Additionally, SparkThrift return decimal type instead of string in the same case, if user use `getBigDecimal` and migrate from SparkThrift to Livy, it will throw exception. So it is necessary to return decimal instead of string in livy. ## How was this patch tested? Add `SchemaIT.scala` to test the return of column type. Author: runzhiwang Closes #247 from runzhiwang/support-type. --- .../apache/livy/thriftserver/types/Schema.scala| 6 + .../livy/thriftserver/ThriftServerSuites.scala | 134 ++--- .../apache/livy/thriftserver/session/DataType.java | 3 + 3 files changed, 126 insertions(+), 17 deletions(-) diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala index 6e06474..0f4c642 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala @@ -41,6 +41,9 @@ case class BasicDataType(name: String) extends FieldType { case "float" => DataType.FLOAT case "double" => DataType.DOUBLE case "binary" => DataType.BINARY +case _ if name.contains("decimal") => DataType.DECIMAL +case "timestamp" => DataType.TIMESTAMP +case "date" => DataType.DATE case _ => DataType.STRING } } @@ -100,6 +103,9 @@ object Schema { case DataType.FLOAT => TTypeId.FLOAT_TYPE case DataType.DOUBLE => TTypeId.DOUBLE_TYPE case DataType.BINARY => TTypeId.BINARY_TYPE + case DataType.DECIMAL => TTypeId.DECIMAL_TYPE + case DataType.TIMESTAMP => TTypeId.TIMESTAMP_TYPE + case DataType.DATE => TTypeId.DATE_TYPE case _ => TTypeId.STRING_TYPE } val primitiveEntry = new TPrimitiveTypeEntry(typeId) diff --git a/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala b/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala index 1939a56..19abb0d 100644 --- a/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala +++ b/thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala @@ -17,7 +17,7 @@ package org.apache.livy.thriftserver -import java.sql.{Connection, Date, SQLException, Statement, Types} +import java.sql.{Connection, Date, SQLException, Statement, Timestamp, Types} import scala.collection.mutable.ArrayBuffer import scala.io.Source @@ -34,33 +34,133 @@ trait CommonThriftTests { def dataTypesTest(statement: Statement, mapSupported: Boolean): Unit = { val resultSet = statement.executeQuery( - "select 1, 'a', cast(null as int), 1.2345, CAST('2018-08-06' as date), " + -"CAST('123' as BINARY)") + "select cast(1 as tinyint)," + +"
[incubator-livy] branch master updated: [LIVY-356][SERVER] Add LDAP authentication for livy-server.
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 2e7d691 [LIVY-356][SERVER] Add LDAP authentication for livy-server. 2e7d691 is described below commit 2e7d691c03caa6eaa1d170b2ab03265a65ff12f2 Author: captainzmc AuthorDate: Sun Oct 13 09:42:23 2019 +0200 [LIVY-356][SERVER] Add LDAP authentication for livy-server. ## What changes were proposed in this pull request? Currently, livy-server doesn't support LDAP Authentication from client to server(livy). We need to add LDAP authentication as that's preferable method due to security reasons. Here we reimplement LdapAuthenticationHandle, which is new in hadoop2.8+. ## How was this patch tested? UTs tests for this part have been added. We can test in UTs Author: captainzmc Author: Janki Akhani Closes #231 from captainzmc/add-ldap. --- pom.xml| 4 + server/pom.xml | 43 .../src/main/scala/org/apache/livy/LivyConf.scala | 7 + .../scala/org/apache/livy/server/LivyServer.scala | 21 ++ .../auth/LdapAuthenticationHandlerImpl.scala | 219 + .../auth/TestLdapAuthenticationHandlerImpl.scala | 156 +++ 6 files changed, 450 insertions(+) diff --git a/pom.xml b/pom.xml index fa1600d..5bcf1a3 100644 --- a/pom.xml +++ b/pom.xml @@ -121,6 +121,10 @@ false + +2.0.0-M21 +1.0.0-M33 +
[incubator-livy] branch master updated: [LIVY-745] Ensure that a single RSCClientFactory gets loaded.
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 3a26856 [LIVY-745] Ensure that a single RSCClientFactory gets loaded. 3a26856 is described below commit 3a2685643670c1cd1530feb7a54516b01784e781 Author: Wing Yew Poon AuthorDate: Sat Feb 15 22:42:47 2020 +0100 [LIVY-745] Ensure that a single RSCClientFactory gets loaded. ## What changes were proposed in this pull request? In LivyClientBuilder, a ServiceLoader is used to load configured LivyClientFactory providers (HttpClientFactory and RSCClientFactory). Correct behavior of RSCClientFactory implicitly depends on there being a single instance of it in the Livy server. Instead of instantiating a new ServiceLoader every time the build method is called on a LivyClientBuilder, keep a single ServiceLoader in a static field. ServiceLoader instances are not safe for use by multiple concurrent threads, so have the ServiceLoader load the LivyClientFactory implementations into a static List. Then LivyClientBuilder#build will use the single instance of either HttpClientFactory or RSCClientFactory in this List to create a LivyClient. ## How was this patch tested? Tested by having 20 clients connect to the Livy Thrift server simultaneously. Before the change, the behavior described in https://issues.apache.org/jira/browse/LIVY-745 was encountered. With the change, the problem is not encountered. Also added a unit test that fails before this change to LivyClientBuilder, and passes with the change. Author: Wing Yew Poon Closes #275 from wypoon/LIVY-745. --- .../java/org/apache/livy/LivyClientBuilder.java| 26 -- .../java/org/apache/livy/TestClientFactory.java| 12 +- .../org/apache/livy/TestLivyClientBuilder.java | 8 +++ 3 files changed, 38 insertions(+), 8 deletions(-) diff --git a/api/src/main/java/org/apache/livy/LivyClientBuilder.java b/api/src/main/java/org/apache/livy/LivyClientBuilder.java index 5bbd170..1d7ec01 100644 --- a/api/src/main/java/org/apache/livy/LivyClientBuilder.java +++ b/api/src/main/java/org/apache/livy/LivyClientBuilder.java @@ -23,6 +23,8 @@ import java.io.Reader; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.Properties; import java.util.ServiceLoader; @@ -35,6 +37,19 @@ public final class LivyClientBuilder { public static final String LIVY_URI_KEY = "livy.uri"; + private static final ServiceLoader CLIENT_FACTORY_LOADER = +ServiceLoader.load(LivyClientFactory.class, classLoader()); + + private static List getLivyClientFactories() { +List factories = new ArrayList<>(); +for (LivyClientFactory f : CLIENT_FACTORY_LOADER) { + factories.add(f); +} +return factories; + } + + private static final List CLIENT_FACTORIES = getLivyClientFactories(); + private final Properties config; /** @@ -118,14 +133,11 @@ public final class LivyClientBuilder { } LivyClient client = null; -ServiceLoader loader = ServiceLoader.load(LivyClientFactory.class, - classLoader()); -if (!loader.iterator().hasNext()) { +if (CLIENT_FACTORIES.isEmpty()) { throw new IllegalStateException("No LivyClientFactory implementation was found."); } -Exception error = null; -for (LivyClientFactory factory : loader) { +for (LivyClientFactory factory : CLIENT_FACTORIES) { try { client = factory.createClient(uri, config); } catch (Exception e) { @@ -158,10 +170,10 @@ public final class LivyClientBuilder { return client; } - private ClassLoader classLoader() { + private static ClassLoader classLoader() { ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl == null) { - cl = getClass().getClassLoader(); + cl = LivyClientBuilder.class.getClassLoader(); } return cl; } diff --git a/api/src/test/java/org/apache/livy/TestClientFactory.java b/api/src/test/java/org/apache/livy/TestClientFactory.java index 89edeec..622908c 100644 --- a/api/src/test/java/org/apache/livy/TestClientFactory.java +++ b/api/src/test/java/org/apache/livy/TestClientFactory.java @@ -21,9 +21,19 @@ import java.io.File; import java.net.URI; import java.util.Properties; import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicLong; public class TestClientFactory implements LivyClientFactory { + private static AtomicLong instanceCount = new AtomicLong(); + public static long getInstanceCount() { +return instanceCount.get(); + } + + public TestClientFactory() { +instanceCount.incrementAndGet(); + } + @Override publ
[incubator-livy] branch master updated (06a8d4f -> 3f9a1a5)
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git. from 06a8d4f [LIVY-748] Add support for running Livy Integration tests against secure external clusters add 3f9a1a5 [LIVY-752][THRIFT] Fix implementation of limits on connections. No new revisions were added by this update. Summary of changes: pom.xml| 1 + .../thriftserver/LivyThriftSessionManager.scala| 112 +--- .../TestLivyThriftSessionManager.scala | 145 + 3 files changed, 212 insertions(+), 46 deletions(-) create mode 100644 thriftserver/server/src/test/scala/org/apache/livy/thriftserver/TestLivyThriftSessionManager.scala
[incubator-livy] branch master updated: [LIVY-754][THRIFT] Encode precision and scale for decimal type.
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 3b9bbef [LIVY-754][THRIFT] Encode precision and scale for decimal type. 3b9bbef is described below commit 3b9bbefbaf8ddb3116952caf8c15b97d676386a7 Author: Wing Yew Poon AuthorDate: Wed May 20 19:37:20 2020 +0200 [LIVY-754][THRIFT] Encode precision and scale for decimal type. ## What changes were proposed in this pull request? When a `org.apache.livy.thriftserver.session.DataType.DECIMAL` is converted to a `org.apache.hive.service.rpc.thrift.TTypeDesc` for sending a Thrift response to a client request for result set metadata, the `TTypeDesc` contains a `TPrimitiveTypeEntry(TTypeId.DECIMAL_TYPE)` without `TTypeQualifiers` (which are needed to capture the precision and scale). With this change, we include the qualifiers in the `TPrimitiveTypeEntry`. We use both the name and the `DataType` of a field type to construct the `TTypeDesc`. We are able to do this without changing the existing internal representation for data types because we can obtain the precision and scale from the name of the decimal type. ## How was this patch tested? Use beeline to connect to the Thrift server. Do a select from a table with a column of decimal type. Also extended an existing integration test. Author: Wing Yew Poon Closes #288 from wypoon/LIVY-754. --- .../apache/livy/thriftserver/types/Schema.scala| 37 +++--- .../livy/thriftserver/ThriftServerSuites.scala | 16 +- 2 files changed, 48 insertions(+), 5 deletions(-) diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala index 0f4c642..5c02159 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala @@ -17,7 +17,7 @@ package org.apache.livy.thriftserver.types -import org.apache.hive.service.rpc.thrift.{TColumnDesc, TPrimitiveTypeEntry, TTableSchema, TTypeDesc, TTypeEntry, TTypeId} +import org.apache.hive.service.rpc.thrift._ import org.apache.livy.thriftserver.session.DataType @@ -41,7 +41,7 @@ case class BasicDataType(name: String) extends FieldType { case "float" => DataType.FLOAT case "double" => DataType.DOUBLE case "binary" => DataType.BINARY -case _ if name.contains("decimal") => DataType.DECIMAL +case _ if name.startsWith("decimal") => DataType.DECIMAL case "timestamp" => DataType.TIMESTAMP case "date" => DataType.DATE case _ => DataType.STRING @@ -88,12 +88,12 @@ object Schema { val tColumnDesc = new TColumnDesc tColumnDesc.setColumnName(field.name) tColumnDesc.setComment(field.comment) -tColumnDesc.setTypeDesc(toTTypeDesc(field.fieldType.dataType)) +tColumnDesc.setTypeDesc(toTTypeDesc(field.fieldType.name, field.fieldType.dataType)) tColumnDesc.setPosition(index) tColumnDesc } - private def toTTypeDesc(dt: DataType): TTypeDesc = { + private def toTTypeDesc(name: String, dt: DataType): TTypeDesc = { val typeId = dt match { case DataType.BOOLEAN => TTypeId.BOOLEAN_TYPE case DataType.BYTE => TTypeId.TINYINT_TYPE @@ -109,9 +109,38 @@ object Schema { case _ => TTypeId.STRING_TYPE } val primitiveEntry = new TPrimitiveTypeEntry(typeId) +if (dt == DataType.DECIMAL) { + val qualifiers = getDecimalQualifiers(name) + primitiveEntry.setTypeQualifiers(qualifiers) +} val entry = TTypeEntry.primitiveEntry(primitiveEntry) val desc = new TTypeDesc desc.addToTypes(entry) desc } + + private def getDecimalQualifiers(name: String): TTypeQualifiers = { +// name can be one of +// 1. decimal +// 2. decimal(p) +// 3. decimal(p, s) +val (precision, scale) = + if (name == "decimal") { +(10, 0) + } else { +val suffix = name.substring("decimal".length) +require(suffix.startsWith("(") && suffix.endsWith(")"), + name + " is not of the form decimal(,)") +val parts = suffix.substring(1, suffix.length - 1).split(",").map(_.trim.toInt) +(parts(0), parts.lift(1).getOrElse(0)) + } +val qMap = new java.util.HashMap[String, TTypeQualifierValue] +val pVal = new TTypeQualifierValue +pVal.setI32Value(precision) +qMap.put(TCLIServiceConstants.PRECISION, pVal) +val sVal = new TTypeQualifierValue +sVal.setI32Value(scale) +qMap.put(TCLIServ
[incubator-livy] branch master updated: [LIVY-754][THRIFT] Encode precision and scale for decimal type.
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 3b9bbef [LIVY-754][THRIFT] Encode precision and scale for decimal type. 3b9bbef is described below commit 3b9bbefbaf8ddb3116952caf8c15b97d676386a7 Author: Wing Yew Poon AuthorDate: Wed May 20 19:37:20 2020 +0200 [LIVY-754][THRIFT] Encode precision and scale for decimal type. ## What changes were proposed in this pull request? When a `org.apache.livy.thriftserver.session.DataType.DECIMAL` is converted to a `org.apache.hive.service.rpc.thrift.TTypeDesc` for sending a Thrift response to a client request for result set metadata, the `TTypeDesc` contains a `TPrimitiveTypeEntry(TTypeId.DECIMAL_TYPE)` without `TTypeQualifiers` (which are needed to capture the precision and scale). With this change, we include the qualifiers in the `TPrimitiveTypeEntry`. We use both the name and the `DataType` of a field type to construct the `TTypeDesc`. We are able to do this without changing the existing internal representation for data types because we can obtain the precision and scale from the name of the decimal type. ## How was this patch tested? Use beeline to connect to the Thrift server. Do a select from a table with a column of decimal type. Also extended an existing integration test. Author: Wing Yew Poon Closes #288 from wypoon/LIVY-754. --- .../apache/livy/thriftserver/types/Schema.scala| 37 +++--- .../livy/thriftserver/ThriftServerSuites.scala | 16 +- 2 files changed, 48 insertions(+), 5 deletions(-) diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala index 0f4c642..5c02159 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala @@ -17,7 +17,7 @@ package org.apache.livy.thriftserver.types -import org.apache.hive.service.rpc.thrift.{TColumnDesc, TPrimitiveTypeEntry, TTableSchema, TTypeDesc, TTypeEntry, TTypeId} +import org.apache.hive.service.rpc.thrift._ import org.apache.livy.thriftserver.session.DataType @@ -41,7 +41,7 @@ case class BasicDataType(name: String) extends FieldType { case "float" => DataType.FLOAT case "double" => DataType.DOUBLE case "binary" => DataType.BINARY -case _ if name.contains("decimal") => DataType.DECIMAL +case _ if name.startsWith("decimal") => DataType.DECIMAL case "timestamp" => DataType.TIMESTAMP case "date" => DataType.DATE case _ => DataType.STRING @@ -88,12 +88,12 @@ object Schema { val tColumnDesc = new TColumnDesc tColumnDesc.setColumnName(field.name) tColumnDesc.setComment(field.comment) -tColumnDesc.setTypeDesc(toTTypeDesc(field.fieldType.dataType)) +tColumnDesc.setTypeDesc(toTTypeDesc(field.fieldType.name, field.fieldType.dataType)) tColumnDesc.setPosition(index) tColumnDesc } - private def toTTypeDesc(dt: DataType): TTypeDesc = { + private def toTTypeDesc(name: String, dt: DataType): TTypeDesc = { val typeId = dt match { case DataType.BOOLEAN => TTypeId.BOOLEAN_TYPE case DataType.BYTE => TTypeId.TINYINT_TYPE @@ -109,9 +109,38 @@ object Schema { case _ => TTypeId.STRING_TYPE } val primitiveEntry = new TPrimitiveTypeEntry(typeId) +if (dt == DataType.DECIMAL) { + val qualifiers = getDecimalQualifiers(name) + primitiveEntry.setTypeQualifiers(qualifiers) +} val entry = TTypeEntry.primitiveEntry(primitiveEntry) val desc = new TTypeDesc desc.addToTypes(entry) desc } + + private def getDecimalQualifiers(name: String): TTypeQualifiers = { +// name can be one of +// 1. decimal +// 2. decimal(p) +// 3. decimal(p, s) +val (precision, scale) = + if (name == "decimal") { +(10, 0) + } else { +val suffix = name.substring("decimal".length) +require(suffix.startsWith("(") && suffix.endsWith(")"), + name + " is not of the form decimal(,)") +val parts = suffix.substring(1, suffix.length - 1).split(",").map(_.trim.toInt) +(parts(0), parts.lift(1).getOrElse(0)) + } +val qMap = new java.util.HashMap[String, TTypeQualifierValue] +val pVal = new TTypeQualifierValue +pVal.setI32Value(precision) +qMap.put(TCLIServiceConstants.PRECISION, pVal) +val sVal = new TTypeQualifierValue +sVal.setI32Value(scale) +qMap.put(TCLIServ
[incubator-livy] branch master updated: [LIVY-754][THRIFT] Encode precision and scale for decimal type.
This is an automated email from the ASF dual-hosted git repository. mgaido pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-livy.git The following commit(s) were added to refs/heads/master by this push: new 3b9bbef [LIVY-754][THRIFT] Encode precision and scale for decimal type. 3b9bbef is described below commit 3b9bbefbaf8ddb3116952caf8c15b97d676386a7 Author: Wing Yew Poon AuthorDate: Wed May 20 19:37:20 2020 +0200 [LIVY-754][THRIFT] Encode precision and scale for decimal type. ## What changes were proposed in this pull request? When a `org.apache.livy.thriftserver.session.DataType.DECIMAL` is converted to a `org.apache.hive.service.rpc.thrift.TTypeDesc` for sending a Thrift response to a client request for result set metadata, the `TTypeDesc` contains a `TPrimitiveTypeEntry(TTypeId.DECIMAL_TYPE)` without `TTypeQualifiers` (which are needed to capture the precision and scale). With this change, we include the qualifiers in the `TPrimitiveTypeEntry`. We use both the name and the `DataType` of a field type to construct the `TTypeDesc`. We are able to do this without changing the existing internal representation for data types because we can obtain the precision and scale from the name of the decimal type. ## How was this patch tested? Use beeline to connect to the Thrift server. Do a select from a table with a column of decimal type. Also extended an existing integration test. Author: Wing Yew Poon Closes #288 from wypoon/LIVY-754. --- .../apache/livy/thriftserver/types/Schema.scala| 37 +++--- .../livy/thriftserver/ThriftServerSuites.scala | 16 +- 2 files changed, 48 insertions(+), 5 deletions(-) diff --git a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala index 0f4c642..5c02159 100644 --- a/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala +++ b/thriftserver/server/src/main/scala/org/apache/livy/thriftserver/types/Schema.scala @@ -17,7 +17,7 @@ package org.apache.livy.thriftserver.types -import org.apache.hive.service.rpc.thrift.{TColumnDesc, TPrimitiveTypeEntry, TTableSchema, TTypeDesc, TTypeEntry, TTypeId} +import org.apache.hive.service.rpc.thrift._ import org.apache.livy.thriftserver.session.DataType @@ -41,7 +41,7 @@ case class BasicDataType(name: String) extends FieldType { case "float" => DataType.FLOAT case "double" => DataType.DOUBLE case "binary" => DataType.BINARY -case _ if name.contains("decimal") => DataType.DECIMAL +case _ if name.startsWith("decimal") => DataType.DECIMAL case "timestamp" => DataType.TIMESTAMP case "date" => DataType.DATE case _ => DataType.STRING @@ -88,12 +88,12 @@ object Schema { val tColumnDesc = new TColumnDesc tColumnDesc.setColumnName(field.name) tColumnDesc.setComment(field.comment) -tColumnDesc.setTypeDesc(toTTypeDesc(field.fieldType.dataType)) +tColumnDesc.setTypeDesc(toTTypeDesc(field.fieldType.name, field.fieldType.dataType)) tColumnDesc.setPosition(index) tColumnDesc } - private def toTTypeDesc(dt: DataType): TTypeDesc = { + private def toTTypeDesc(name: String, dt: DataType): TTypeDesc = { val typeId = dt match { case DataType.BOOLEAN => TTypeId.BOOLEAN_TYPE case DataType.BYTE => TTypeId.TINYINT_TYPE @@ -109,9 +109,38 @@ object Schema { case _ => TTypeId.STRING_TYPE } val primitiveEntry = new TPrimitiveTypeEntry(typeId) +if (dt == DataType.DECIMAL) { + val qualifiers = getDecimalQualifiers(name) + primitiveEntry.setTypeQualifiers(qualifiers) +} val entry = TTypeEntry.primitiveEntry(primitiveEntry) val desc = new TTypeDesc desc.addToTypes(entry) desc } + + private def getDecimalQualifiers(name: String): TTypeQualifiers = { +// name can be one of +// 1. decimal +// 2. decimal(p) +// 3. decimal(p, s) +val (precision, scale) = + if (name == "decimal") { +(10, 0) + } else { +val suffix = name.substring("decimal".length) +require(suffix.startsWith("(") && suffix.endsWith(")"), + name + " is not of the form decimal(,)") +val parts = suffix.substring(1, suffix.length - 1).split(",").map(_.trim.toInt) +(parts(0), parts.lift(1).getOrElse(0)) + } +val qMap = new java.util.HashMap[String, TTypeQualifierValue] +val pVal = new TTypeQualifierValue +pVal.setI32Value(precision) +qMap.put(TCLIServiceConstants.PRECISION, pVal) +val sVal = new TTypeQualifierValue +sVal.setI32Value(scale) +qMap.put(TCLIServ