HIVE-5867: JDBC driver and beeline should support executing an initial SQL script(Jianguo Tian, via Ferdinand Xu)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/737fd09a Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/737fd09a Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/737fd09a Branch: refs/heads/hive-14535 Commit: 737fd09a20dbb4b728307f84825f7ddc4294ae02 Parents: 19fd561 Author: Jianguo Tian <jianguo.t...@intel.com> Authored: Tue Sep 27 04:01:49 2016 +0800 Committer: Ferdinand Xu <cheng.a...@intel.com> Committed: Tue Sep 27 04:01:49 2016 +0800 ---------------------------------------------------------------------- .../org/apache/hive/jdbc/HiveConnection.java | 79 ++++++++++++++++ jdbc/src/java/org/apache/hive/jdbc/Utils.java | 1 + .../org/apache/hive/jdbc/TestJdbcDriver.java | 98 ++++++++++++++++++++ 3 files changed, 178 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/737fd09a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java ---------------------------------------------------------------------- diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index ad96a64..ce85320 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -67,8 +67,11 @@ import javax.net.ssl.TrustManagerFactory; import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; +import java.io.BufferedReader; +import java.io.File; import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStreamReader; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -92,6 +95,7 @@ import java.sql.SQLXML; import java.sql.Savepoint; import java.sql.Statement; import java.sql.Struct; +import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; @@ -125,6 +129,7 @@ public class HiveConnection implements java.sql.Connection { private int loginTimeout = 0; private TProtocolVersion protocol; private int fetchSize = HiveStatement.DEFAULT_FETCH_SIZE; + private String initFile = null; public HiveConnection(String uri, Properties info) throws SQLException { setupLoginTimeout(); @@ -147,6 +152,9 @@ public class HiveConnection implements java.sql.Connection { if (sessConfMap.containsKey(JdbcConnectionParams.FETCH_SIZE)) { fetchSize = Integer.parseInt(sessConfMap.get(JdbcConnectionParams.FETCH_SIZE)); } + if (sessConfMap.containsKey(JdbcConnectionParams.INIT_FILE)) { + initFile = sessConfMap.get(JdbcConnectionParams.INIT_FILE); + } // add supported protocols supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1); @@ -166,6 +174,7 @@ public class HiveConnection implements java.sql.Connection { // open client session openSession(); + executeInitSql(); } else { int maxRetries = 1; try { @@ -184,6 +193,7 @@ public class HiveConnection implements java.sql.Connection { client = new TCLIService.Client(new TBinaryProtocol(transport)); // open client session openSession(); + executeInitSql(); break; } catch (Exception e) { @@ -218,6 +228,75 @@ public class HiveConnection implements java.sql.Connection { client = newSynchronizedClient(client); } + private void executeInitSql() throws SQLException { + if (initFile != null) { + try { + List<String> sqlList = parseInitFile(initFile); + Statement st = createStatement(); + for(String sql : sqlList) { + boolean hasResult = st.execute(sql); + if (hasResult) { + ResultSet rs = st.getResultSet(); + while (rs.next()) { + System.out.println(rs.getString(1)); + } + } + } + } catch(Exception e) { + LOG.error("Failed to execute initial SQL"); + throw new SQLException(e.getMessage()); + } + } + } + + public static List<String> parseInitFile(String initFile) throws IOException { + File file = new File(initFile); + BufferedReader br = null; + List<String> initSqlList = null; + try { + FileInputStream input = new FileInputStream(file); + br = new BufferedReader(new InputStreamReader(input, "UTF-8")); + String line; + StringBuilder sb = new StringBuilder(""); + while ((line = br.readLine()) != null) { + line = line.trim(); + if (line.length() != 0) { + if (line.startsWith("#") || line.startsWith("--")) { + continue; + } else { + line = line.concat(" "); + sb.append(line); + } + } + } + initSqlList = getInitSql(sb.toString()); + } catch(IOException e) { + LOG.error("Failed to read initial SQL file", e); + throw new IOException(e); + } finally { + if (br != null) { + br.close(); + } + } + return initSqlList; + } + + private static List<String> getInitSql(String sbLine) { + char[] sqlArray = sbLine.toCharArray(); + List<String> initSqlList = new ArrayList(); + int index = 0; + int beginIndex = 0; + for (; index < sqlArray.length; index++) { + if (sqlArray[index] == ';') { + String sql = sbLine.substring(beginIndex, index).trim(); + initSqlList.add(sql); + beginIndex = index + 1; + } + } + return initSqlList; + } + + private void openTransport() throws Exception { assumeSubject = JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(sessConfMap http://git-wip-us.apache.org/repos/asf/hive/blob/737fd09a/jdbc/src/java/org/apache/hive/jdbc/Utils.java ---------------------------------------------------------------------- diff --git a/jdbc/src/java/org/apache/hive/jdbc/Utils.java b/jdbc/src/java/org/apache/hive/jdbc/Utils.java index 3161566..bfae8b9 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/Utils.java +++ b/jdbc/src/java/org/apache/hive/jdbc/Utils.java @@ -120,6 +120,7 @@ public class Utils { static final String HTTP_HEADER_PREFIX = "http.header."; // Set the fetchSize static final String FETCH_SIZE = "fetchSize"; + static final String INIT_FILE = "initFile"; // --------------- Begin 2 way ssl options ------------------------- // Use two way ssl. This param will take effect only when ssl=true http://git-wip-us.apache.org/repos/asf/hive/blob/737fd09a/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver.java ---------------------------------------------------------------------- diff --git a/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver.java b/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver.java new file mode 100644 index 0000000..162e42f --- /dev/null +++ b/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.jdbc; + +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.junit.Assert.assertEquals; + +@RunWith(Parameterized.class) +public class TestJdbcDriver { + private static File file = null; + private String input; + private String expected; + + public TestJdbcDriver(String input, String expected) throws Exception { + this.input = input; + this.expected = expected; + } + + @Parameters + public static Collection<Object[]> data() { + return Arrays.asList(new Object[][]{ + // Here are some positive cases which can be executed as below : + {"show databases;show tables;", "show databases,show tables"}, + {" show\n\r tables;", "show tables"}, + {"show databases; show\ntables;", "show databases,show tables"}, + {"show tables;", "show tables"}, + {"show tables ;", "show tables"}, + // Here are some negative cases as below : + {"show tables", ","}, + {"show tables show tables;", "show tables show tables"}, + {"show tab les;", "show tab les"}, + {"#show tables; show\n tables;", "tables"}, + {"show tab les;show tables;", "show tab les,show tables"} + }); + } + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + file = new File(System.getProperty("user.dir") + File.separator + "Init.sql"); + if (!file.exists()) { + file.createNewFile(); + } + } + + @AfterClass + public static void cleanUpAfterClass() throws Exception { + if (file != null) { + file.delete(); + } + } + + @Test + public void testParseInitFile() throws IOException { + BufferedWriter bw = null; + try { + bw = new BufferedWriter(new FileWriter(file)); + bw.write(input); + bw.flush(); + assertEquals(Arrays.asList(expected.split(",")), HiveConnection.parseInitFile(file.toString())); + } catch(Exception e) { + Assert.fail("Test was failed due to " + e); + } finally { + if (bw != null) { + bw.close(); + } + } + } +}