captainzmc commented on a change in pull request #209: [LIVY-640] Add tests for ThriftServer URL: https://github.com/apache/incubator-livy/pull/209#discussion_r319729380
########## File path: thriftserver/server/src/test/scala/org/apache/livy/thriftserver/ThriftServerSuites.scala ########## @@ -259,6 +276,137 @@ trait CommonThriftTests { class BinaryThriftServerSuite extends ThriftServerBaseTest with CommonThriftTests { override def mode: ServerMode.Value = ServerMode.binary override def port: Int = 20000 + // In BinaryThriftServerSuite, we set ENABLE_HIVE_CONTEXT=true to support the creation + // of Hive tables. + livyConf.set(LivyConf.ENABLE_HIVE_CONTEXT, true) + + def getTestDataFilePath(): URL = { + Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt") + } + + test("test multiple session") { + var defaultV1: String = null + var defaultV2: String = null + var data: ArrayBuffer[Int] = null + try { + // create table + withJdbcStatement { statement => + val queries = Seq( + "CREATE TABLE test_map(key INT, value STRING)", + s"LOAD DATA LOCAL INPATH '${getTestDataFilePath}' OVERWRITE INTO TABLE test_map", + "CACHE TABLE test_table AS SELECT key FROM test_map ORDER BY key DESC", + "CREATE DATABASE db1") + + queries.foreach(statement.execute) + + val plan = statement.executeQuery("explain select * from test_table") + plan.next() + plan.next() + assert(plan.getString(1).contains("InMemoryTableScan")) + + val rs1 = statement.executeQuery("SELECT key FROM test_table ORDER BY KEY DESC") + val buf1 = new collection.mutable.ArrayBuffer[Int]() + while (rs1.next()) { + buf1 += rs1.getInt(1) + } + rs1.close() + + val rs2 = statement.executeQuery("SELECT key FROM test_map ORDER BY KEY DESC") + val buf2 = new collection.mutable.ArrayBuffer[Int]() + while (rs2.next()) { + buf2 += rs2.getInt(1) + } + rs2.close() + + assert(buf1 === buf2) + + data = buf1 + } + + // first session, we get the default value of the session status + withJdbcStatement { statement => + val rs1 = statement.executeQuery(s"SET spark.sql.shuffle.partitions") + rs1.next() + defaultV1 = rs1.getString(1) + assert(defaultV1 != "200") + rs1.close() + + val rs2 = statement.executeQuery("SET hive.cli.print.header") + rs2.next() + + defaultV2 = rs2.getString(1) + assert(defaultV1 != "true") + rs2.close() + } + + // second session, we update the session status + withJdbcStatement { statement => + val queries = Seq( + s"SET spark.sql.shuffle.partitions=291", Review comment: I’ll fix this ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services