Github user evans-ye commented on a diff in the pull request:
https://github.com/apache/bigtop/pull/370#discussion_r200164580
--- Diff: bigtop-tests/smoke-tests/hive/HiveJdbcGeneralTest.java ---
@@ -0,0 +1,392 @@
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Properties;
+
+import org.apache.hive.service.cli.HiveSQLException;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * THIS CLASS TESTS THE FOLLOWING ASPECTS OF HIVE:
+ *
+ * Connect to hive; Drop the tables if already there; Test Show Tables;
Create
+ * blank tables, both transactional and non, along with ORC format and
+ * partitions; Reprint the list of tables; Load File into HDFS; Load data
into
+ * tables; describe tables; Delete Uploaded File; Print table contents with
+ * various queries; Test Prepared and Callable Statements; Test Fetch
Sizes;
+ * Test ACID (UPDATE/INSERT/DELETE); View Partitions;
+ */
+public class HiveJdbcGeneralTest extends TestMethods {
+
+ static String hivePort = System.getenv("HIVE_PORT");
+ static String jdbcConnection = System.getenv("HIVE_JDBC_URL");
+ static Connection con;
+ String newTableName = "btest";
+
+ @BeforeClass
+ public static void onTimeSetup() throws Exception {
+ String username = System.getenv("HIVE_USER");
+ String password = System.getenv("HIVE_PASSWORD");
+ Properties connectionProps = new Properties();
+ connectionProps.put("user", username);
+ connectionProps.put("password", password);
+ Class.forName("org.apache.hive.jdbc.HiveDriver");
+ con = DriverManager.getConnection(
+ jdbcConnection + ":" + hivePort + "/default;", connectionProps);
+ }
+
+ @AfterClass
+ public static void oneTimeTearDown() throws Exception {
+ con.close();
+ }
+
+ @Test // (expected=java.sql.SQLDataException.class)
+ public void testHive() throws Exception {
+ final File f = new File(HiveJdbcGeneralTest.class.getProtectionDomain()
+ .getCodeSource().getLocation().getPath());
+ // String jdbcDriver = propertyValue("hive-site.xml",
+ // "javax.jdo.option.ConnectionDriverName");
+
+ // String qualifiedName = propertyValue("hdfs-site.xml",
+ // "dfs.internal.nameservices");
+ // String[] haNodes = propertyValue("hdfs-site.xml",
+ // "dfs.ha.namenodes."+qualifiedName).split(",");
+ // String primaryNode = haNodes[0];
+ String hdfsConnection =
+ propertyValue("hdfs-site.xml", "dfs.namenode.rpc-address");
+ try (Statement stmt = con.createStatement()) {
+ String columnNames =
+ "(Flight int, Dates varchar(255), Depart varchar(10), Orig
varchar(5), Dest varchar(5), Miles int, Boarded int, Capacity double)";
+ String partitionedColumns =
+ "(Flight int, Dates varchar(255), Depart varchar(10), Orig
varchar(5), Dest varchar(5), Miles int, Boarded int)";
+ String localFilepath = f + "/samdat1.csv";
+ String HdfsURI = "hdfs://" + hdfsConnection;
+ String filePath = "/tmp/htest/00000_";
+ String fileDestination = HdfsURI + filePath;
+ assertFalse(con.getMetaData().supportsRefCursors());
+ assertTrue(con.getMetaData().allTablesAreSelectable());
+ assertEquals("Apache Hive",
con.getMetaData().getDatabaseProductName());
+ System.out.println(
+ "Hive Version: " + con.getMetaData().getDatabaseMajorVersion() +
"."
+ + con.getMetaData().getDatabaseMinorVersion());
+ getTables(con, newTableName);
+ dropTable(stmt, newTableName);
+ dropTable(stmt, newTableName + "NT");
+ dropTable(stmt, newTableName + "T");
+ dropTable(stmt, newTableName + "P");
+ dropTable(stmt, newTableName + "V");
+ showTables(stmt, "show tables like 'b*'");
+ createTable(stmt, newTableName, columnNames, ",", "");
+ try {
+ createTable(stmt, newTableName + "NT", columnNames, ",",
+ "TBLPROPERTIES(\"transactional\"=\"true\")");
+ fail("shouldn't get here");
+ } catch (SQLException e) {
+
+ }
+ createTable(stmt, newTableName + "T", columnNames, ",",
+ "STORED AS ORC TBLPROPERTIES(\"transactional\"=\"true\")");
+ createPartitionedTable(stmt, newTableName + "P", partitionedColumns,
+ "(Capacity double)", ",", "STORED AS ORC");
+ createTable(stmt, newTableName + "V", "(Dest varchar(5))", ",",
+ "STORED AS ORC");
+ showTables(stmt, "show tables like 'b*'");
+ loadFile(localFilepath, HdfsURI, fileDestination + ".txt");
+ loadData(stmt, filePath + ".txt", newTableName);
+ describeTable(stmt, newTableName);
+ updateTable(stmt, "Insert into table btestt SELECT * from btest");
+ updateTable(stmt, "Insert into table btestv SELECT Dest from btest");
+ updateTable(stmt,
+ "Insert into table btestp PARTITION (Capacity) SELECT * from
btest");
+ deleteFile(stmt, filePath + ".txt", HdfsURI);
+ deleteFile(stmt, filePath + "0.orc", HdfsURI);
+ assertEquals("302", printResults(stmt, "Select * from btest"));
+ assertEquals("114", printResults(stmt,
+ "Select * from btest where Dest = 'LAX' order by boarded desc"));
+ assertEquals("622", printResults(stmt,
+ "Select * from btest where boarded between 160 and 180 order by
boarded asc"));
+ assertEquals("202", printResults(stmt,
+ "Select * from btest where Dest='LAX' or Dest='ORD'"));
+ assertEquals("114", printResults(stmt,
+ "Select * from btest where Dest= 'LAX' and boarded >= 180"));
+ assertEquals("114", printResults(stmt,
+ "Select * from btest where Dest ='LAX' and boarded = 197"));
+ assertEquals("219",
+ printResults(stmt, "Select * from btest LIMIT 10 --"));
+ assertEquals("114", preparedStatement(con,
+ "Select * from btest where Dest ='LAX' and boarded = 197"));
+ assertEquals(15, setFetchSizeStatement(stmt));
+ assertEquals(15, setFetchSizePreparedStatement(con));
+ // assertEquals(callableStatement(con, -20), -1);
+ assertEquals("95487", printResults(stmt, "Select SUM(Miles) from
btest"));
+ assertEquals("302", printResults(stmt, "Select * from btest"));
+ assertEquals(0, updateTable(stmt,
+ "Update btestt set Orig= 'test' where Dest= 'LAX'"));
+ try {
+ loadData(stmt, filePath + ".txt", newTableName + "T");
+ fail("shouldn't get here");
+ } catch (HiveSQLException e) {
+ System.out.println("File does not exist in specified location");
+ }
+ assertEquals("132",
+ printResults(stmt, "Select * from btestt order by Dest"));
+ assertEquals("capacity=250.0",
+ printResults(stmt, "show partitions btestp"));
+ assertEquals("LAX", printResults(stmt,
+ "select MIN(Dest), boarded from btest where Dest ='LAX' group by
Dest, boarded"));
+ assertEquals("19", printResults(stmt,
+ "select count(*) from btest where capacity = 250.0 group by
capacity"));
+ executeStatement(stmt, "Drop view testview");
+ executeStatement(stmt,
+ "Create view testview AS select SUM(btest.boarded) BOARDED from
btest, btestv where btest.Dest=btestv.Dest");
+ assertEquals("45072", printResults(stmt, "Select * from testview"));
+ executeStatement(stmt, "Drop view testview");
+ printResults(stmt, "Describe formatted btest");
+ dropTable(stmt, newTableName);
+ dropTable(stmt, newTableName + "NT");
+ dropTable(stmt, newTableName + "T");
+ dropTable(stmt, newTableName + "P");
+ dropTable(stmt, newTableName + "V");
+ setNegativeFetchSize(stmt);
+ }
+ }
--- End diff --
It seems that we're testing many cases here. Can we separate them one by
one? That way we can precisely get which part is failing. If the whole series
of operation is what you'd like to test. Then we should rename the test name or
add comments to describe what we're going to test here.
---