Github user evans-ye commented on a diff in the pull request:

    https://github.com/apache/bigtop/pull/370#discussion_r200169170
  
    --- Diff: bigtop-tests/smoke-tests/hive/TestMethods.java ---
    @@ -0,0 +1,331 @@
    +
    +/**
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + * <p/>
    + * http://www.apache.org/licenses/LICENSE-2.0
    + * <p/>
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +// A masterclass containing methods which aid in the replication of access 
to hadoop
    +import java.io.BufferedInputStream;
    +import java.io.File;
    +import java.io.FileInputStream;
    +import java.io.IOException;
    +import java.io.InputStream;
    +import java.io.OutputStream;
    +import java.net.URI;
    +import java.net.URISyntaxException;
    +import java.sql.CallableStatement;
    +import java.sql.Connection;
    +import java.sql.DatabaseMetaData;
    +import java.sql.PreparedStatement;
    +import java.sql.ResultSet;
    +import java.sql.ResultSetMetaData;
    +import java.sql.SQLException;
    +import java.sql.Statement;
    +import java.sql.Types;
    +import java.util.ArrayList;
    +import java.util.Arrays;
    +
    +import javax.xml.parsers.DocumentBuilder;
    +import javax.xml.parsers.DocumentBuilderFactory;
    +import javax.xml.parsers.ParserConfigurationException;
    +
    +import org.apache.hadoop.conf.Configuration;
    +import org.apache.hadoop.fs.FileSystem;
    +import org.apache.hadoop.fs.Path;
    +import org.apache.hadoop.io.IOUtils;
    +import org.w3c.dom.Document;
    +import org.w3c.dom.Element;
    +import org.w3c.dom.Node;
    +import org.w3c.dom.NodeList;
    +import org.xml.sax.SAXException;
    +
    +/**
    + * THE METHODS IN THIS CLASS ENABLE HIVE OPERATIONS: propertyValue Reads
    + * properties from hadoop configuration files; getTables Uses MetaData to 
print
    + * the tables in the database; dropTable Drops the table from the database;
    + * createTable Creates a new table with user defined properties
    + * createPartitionedTable Creates a new table with a partition on a 
specified
    + * column; showTables Lists tables in the database; loadFile Loads a local 
data
    + * file into HDFS; loadData Inserts data from a data file into a table;
    + * deleteFile Deletes a file from HDFS; updateTable Performs an ACID 
Transaction
    + * on a table; printResults Performs a regular query and parses the result 
set;
    + * resulSetVerification Returns the value in a column of the table in 
order to
    + * verify data; preparedStatement Performs a prepared query in hive;
    + * callableStatement Creates and executes a stored procedure; getObject 
Performs
    + * a test query; setFetchSizeStatement Tests setting the fetch size on a 
result
    + * set; setFetchSizePreparedStatement Tests setting the fetch size on a 
prepared
    + * statement's result set; setNegativeFetchSize Ensures that negative 
values
    + * cannot be passed to the result set; executeStatement Performs a regular 
query
    + *
    + */
    +
    +public class TestMethods {
    +
    +  static String propertyValue(String propertyFile, String propertyName)
    +      throws ParserConfigurationException, SAXException, IOException,
    +      URISyntaxException {
    +    String configLocation = System.getenv("HADOOP_CONF_DIR");
    +    File file = new File(configLocation + "/" + propertyFile);
    +    DocumentBuilderFactory documentBuilderFactory =
    +        DocumentBuilderFactory.newInstance();
    +    DocumentBuilder documentBuilder =
    +        documentBuilderFactory.newDocumentBuilder();
    +    Document document = documentBuilder.parse(file);
    +    document.getDocumentElement().normalize();
    +    Element docElement = document.getDocumentElement();
    +    NodeList nodeList = docElement.getElementsByTagName("property");
    +    ArrayList<String> names = new ArrayList<>();
    +    ArrayList<String> values = new ArrayList<>();
    +    if (nodeList != null) {
    +      int length = nodeList.getLength();
    +      for (int i = 0; i < length; i++) {
    +        if (nodeList.item(i).getNodeType() == Node.ELEMENT_NODE) {
    +          Element element = (Element) nodeList.item(i);
    +          if (element.getNodeName().contains("property")) {
    +            names.add(
    +                
element.getElementsByTagName("name").item(0).getTextContent());
    +            values.add(
    +                
element.getElementsByTagName("value").item(0).getTextContent());
    +
    --- End diff --
    
    Not sure why additional newline here


---

Reply via email to