HAWQ-969. fix bugs in last commit

Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/6617a836
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/6617a836
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/6617a836

Branch: refs/heads/master
Commit: 6617a8368e0123be16fd2bf44ff081b15da551b5
Parents: d1aafe6
Author: Chunling Wang <[email protected]>
Authored: Wed Aug 3 11:30:12 2016 +0800
Committer: ivan <[email protected]>
Committed: Wed Aug 24 09:50:40 2016 +0800

----------------------------------------------------------------------
 src/test/feature/lib/hdfs_config.cpp  |  51 +++++++-----
 src/test/feature/lib/hdfs_config.h    |  11 ++-
 src/test/feature/lib/xml_parser.cpp   |   2 +-
 src/test/feature/lib/xml_parser.h     |   2 +-
 src/test/feature/lib/yarn_config.cpp  |  63 ++++++++++-----
 src/test/feature/lib/yarn_config.h    |  13 ++-
 src/test/feature/testlib/test_lib.cpp | 125 ++++++++++++++++-------------
 7 files changed, 161 insertions(+), 106 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6617a836/src/test/feature/lib/hdfs_config.cpp
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/hdfs_config.cpp 
b/src/test/feature/lib/hdfs_config.cpp
index 90b1032..e59a02e 100644
--- a/src/test/feature/lib/hdfs_config.cpp
+++ b/src/test/feature/lib/hdfs_config.cpp
@@ -28,7 +28,7 @@ string HdfsConfig::getHdfsUser() {
 bool HdfsConfig::LoadFromHawqConfigFile() {
   const char *env = getenv("GPHOME");
   string confPath = env ? env : "";
-  if (!confPath.empty()) {
+  if (confPath != "") {
     confPath.append("/etc/hdfs-client.xml");
   } else {
     return false;
@@ -41,8 +41,9 @@ bool HdfsConfig::LoadFromHawqConfigFile() {
 
 bool HdfsConfig::LoadFromHdfsConfigFile() {
   string confPath=getHadoopHome();
-  if (confPath == "")
+  if (confPath == "") {
     return false;
+  }
   confPath.append("/etc/hadoop/hdfs-site.xml");
   hdfsxmlconf.reset(new XmlConfig(confPath));
   hdfsxmlconf->parse();
@@ -50,22 +51,26 @@ bool HdfsConfig::LoadFromHdfsConfigFile() {
 }
 
 bool HdfsConfig::isHA() {
-  bool ret = LoadFromHawqConfigFile();
-  if (!ret) {
-    return false;
-  }
-  string nameservice = hawqxmlconf->getString("dfs.nameservices");
-  if (nameservice.length() > 0) {
-    return true;
-  } else {
-    return false;
+  const hawq::test::PSQLQueryResult &result = psql.getQueryResult(
+       "SELECT substring(fselocation from length('hdfs:// ') for (position('/' 
in substring(fselocation from length('hdfs:// ')))-1)::int) "
+       "FROM pg_filespace pgfs, pg_filespace_entry pgfse "
+       "WHERE pgfs.fsname = 'dfs_system' AND pgfse.fsefsoid=pgfs.oid ;");
+  std::vector<std::vector<string>> table = result.getRows();
+  if (table.size() > 0) {
+    int find = table[0][0].find(":");
+    if (find < 0) {
+      return true;
+    } else {
+      return false;
+    }
   }
+  return false;
 }
 
-bool HdfsConfig::isKerbos() {
+bool HdfsConfig::isConfigKerberos() {
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    return false;
+    throw GetHawqHomeException();
   }
   string authentication = 
hawqxmlconf->getString("hadoop.security.authentication");
   if (authentication == "kerberos") {
@@ -83,8 +88,9 @@ bool HdfsConfig::isTruncate() {
   if (lines.size() >= 1) {
     string valueLine = lines[0];
     int find = valueLine.find("-truncate: Unknown command");
-    if (find < 0)
+    if (find < 0) {
       return true;
+    }
   }
   return false;
 }
@@ -122,8 +128,9 @@ bool HdfsConfig::getStandbyNamenode(string &standbynamenode,
 bool HdfsConfig::getHANamenode(const string &namenodetype,
                                string &namenode,
                                int &port) {
-  if (!isHA())
+  if (!isHA()) {
     return false;
+  }
   string namenodeService = "";
   string nameServiceValue = hawqxmlconf->getString("dfs.nameservices");
   string haNamenodesName = "dfs.ha.namenodes.";
@@ -227,8 +234,9 @@ bool HdfsConfig::isSafemode() {
   if (lines.size() >= 1) {
     string valueLine = lines[0];
     int find = valueLine.find("Name node is in safe mode.");
-    if (find >= 0)
+    if (find >= 0) {
       return true;
+    }
   }
   cmd = "hadoop fs -rm -r /tmp_hawq_test";
   Command c_teardown(cmd);
@@ -239,7 +247,7 @@ bool HdfsConfig::isSafemode() {
 string HdfsConfig::getParameterValue(const string &parameterName) {
   bool ret = LoadFromHdfsConfigFile();
   if (!ret) {
-    return NULL;
+    throw GetHadoopHomeException();
   }
 
   return hdfsxmlconf->getString(parameterName);
@@ -251,7 +259,7 @@ string HdfsConfig::getParameterValue(const string 
&parameterName,
     return getParameterValue(parameterName);
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    return NULL;
+    throw GetHawqHomeException();
   }
 
   return hawqxmlconf->getString(parameterName);
@@ -261,7 +269,7 @@ bool HdfsConfig::setParameterValue(const string 
&parameterName,
                                    const string &parameterValue) { 
   bool ret = LoadFromHdfsConfigFile();
   if (!ret) {
-    return false;
+    throw GetHadoopHomeException();
   }
 
   return hdfsxmlconf->setString(parameterName, parameterValue);
@@ -270,11 +278,12 @@ bool HdfsConfig::setParameterValue(const string 
&parameterName,
 bool HdfsConfig::setParameterValue(const string &parameterName,
                                    const string &parameterValue,
                                    const string &conftype) {
-  if (conftype == "hdfs" || conftype == "HDFS")
+  if (conftype == "hdfs" || conftype == "HDFS") {
     return setParameterValue(parameterName, parameterValue);
+  }
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    return false;
+    throw GetHawqHomeException();
   }
 
   return hawqxmlconf->setString(parameterName, parameterValue);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6617a836/src/test/feature/lib/hdfs_config.h
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/hdfs_config.h 
b/src/test/feature/lib/hdfs_config.h
index 2ea89f5..34e6f56 100644
--- a/src/test/feature/lib/hdfs_config.h
+++ b/src/test/feature/lib/hdfs_config.h
@@ -24,7 +24,7 @@ class HdfsConfig {
     HdfsConfig(): psql(HAWQ_DB, HAWQ_HOST, HAWQ_PORT, HAWQ_USER, 
HAWQ_PASSWORD) {}
 
     /**
-     * HdfsConfig desstructor
+     * HdfsConfig destructor
      */
     ~HdfsConfig() {}
 
@@ -36,9 +36,9 @@ class HdfsConfig {
 
     /**
      * whether HDFS is kerbos
-     * @return true if HDFS is kerbos
+     * @return true if HDFS is kerberos
      */
-    bool isKerbos();
+    bool isConfigKerberos();
 
     /**
      * whether HDFS supports truncate operation
@@ -134,7 +134,7 @@ class HdfsConfig {
 
   private:
     /**
-     * @return yarn user
+     * @return hdfs user
      */
     std::string getHdfsUser();
     /**
@@ -166,6 +166,9 @@ class HdfsConfig {
     hawq::test::PSQL psql;
 };
 
+class GetHawqHomeException {};
+class GetHadoopHomeException {};
+
 } // namespace test
 } // namespace hawq
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6617a836/src/test/feature/lib/xml_parser.cpp
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/xml_parser.cpp 
b/src/test/feature/lib/xml_parser.cpp
index 3cdc229..c731ca8 100644
--- a/src/test/feature/lib/xml_parser.cpp
+++ b/src/test/feature/lib/xml_parser.cpp
@@ -107,7 +107,7 @@ void XmlConfig::readConfigItem(xmlNodePtr root) {
 
 bool XmlConfig::setString(const string &key,
                           const string &value,
-                          const bool &save) {
+                          bool save) {
   bool result = false;
 
   if (save) {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6617a836/src/test/feature/lib/xml_parser.h
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/xml_parser.h 
b/src/test/feature/lib/xml_parser.h
index dde3b34..e2a83a7 100644
--- a/src/test/feature/lib/xml_parser.h
+++ b/src/test/feature/lib/xml_parser.h
@@ -33,7 +33,7 @@ class XmlConfig {
   // @param value The updated value
   // @param save whether save the updated document to disk, if save is false, 
open() and closeAndSave() should be called additionally
   // @ return The value of configuration item
-  bool setString(const std::string &key, const std::string &value, const bool 
&save);
+  bool setString(const std::string &key, const std::string &value, bool save);
   
   bool setString(const std::string &, const std::string &);
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6617a836/src/test/feature/lib/yarn_config.cpp
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/yarn_config.cpp 
b/src/test/feature/lib/yarn_config.cpp
index 79211be..5582c0a 100644
--- a/src/test/feature/lib/yarn_config.cpp
+++ b/src/test/feature/lib/yarn_config.cpp
@@ -28,7 +28,7 @@ string YarnConfig::getYarnUser() {
 bool YarnConfig::LoadFromHawqConfigFile() {
   const char *env = getenv("GPHOME");
   string confPath = env ? env : "";
-  if (!confPath.empty()) {
+  if (confPath != "") {
     confPath.append("/etc/yarn-client.xml");
   } else {
     return false;
@@ -41,31 +41,48 @@ bool YarnConfig::LoadFromHawqConfigFile() {
 
 bool YarnConfig::LoadFromYarnConfigFile() {
   string confPath=getHadoopHome();
-  if (confPath == "")
+  if (confPath == "") {
     return false;
+  }
   confPath.append("/etc/hadoop/yarn-site.xml");
   yarnxmlconf.reset(new XmlConfig(confPath));
   yarnxmlconf->parse();
   return true;
 }
 
-bool YarnConfig::isHA() {
-  bool ret = LoadFromHawqConfigFile();
+bool YarnConfig::isConfigYarn() {
+  bool ret = LoadFromYarnConfigFile();
   if (!ret) {
-    return false;
+    throw GetHadoopHomeException();
   }
-  string nameservice = hawqxmlconf->getString("yarn.resourcemanager.ha");
-  if (nameservice.length() > 0) {
-    return true;
-  } else {
+  string rm = yarnxmlconf->getString("yarn.resourcemanager.address.rm1");
+  if (rm == "") {
     return false;
   }
+  return true;
 }
 
-bool YarnConfig::isKerbos() {
+bool YarnConfig::isHA() {
+  const hawq::test::PSQLQueryResult &result = psql.getQueryResult(
+       "SELECT substring(fselocation from length('hdfs:// ') for (position('/' 
in substring(fselocation from length('hdfs:// ')))-1)::int) "
+       "FROM pg_filespace pgfs, pg_filespace_entry pgfse "
+       "WHERE pgfs.fsname = 'dfs_system' AND pgfse.fsefsoid=pgfs.oid ;");
+  std::vector<std::vector<string>> table = result.getRows();
+  if (table.size() > 0) {
+    int find = table[0][0].find(":");
+    if (find < 0) {
+      return true;
+    } else {
+      return false;
+    }
+  }
+  return false;
+}
+
+bool YarnConfig::isConfigKerberos() {
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    return false;
+    throw GetHawqHomeException();
   }
   string authentication = 
hawqxmlconf->getString("hadoop.security.authentication");
   if (authentication == "kerberos") {
@@ -108,8 +125,9 @@ bool YarnConfig::getStandbyRM(string &standbyRM,
 bool YarnConfig::getHARM(const string &RMtype,
                                string &RM,
                                int &port) {
-  if (!isHA())
+  if (!isHA()) {
     return false;
+  }
   string RMService = "";
   string haRMValue = "rm1,rm2";
   auto haRMs = hawq::test::split(haRMValue, ',');
@@ -132,7 +150,7 @@ bool YarnConfig::getHARM(const string &RMtype,
   }
   bool ret = LoadFromYarnConfigFile();
   if (!ret) {
-    return false;
+    throw GetHadoopHomeException();
   }
   string rpcAddressName = "yarn.resourcemanager.address.";
   rpcAddressName.append(RMService);
@@ -159,13 +177,14 @@ bool YarnConfig::getRMList(std::vector<string> &RMList,
 
   bool ret = LoadFromYarnConfigFile();
   if (!ret) {
-    return false;
+    throw GetHadoopHomeException();
   }
 
   string RMAddressName = "yarn.resourcemanager.address";
   string RMAddressValue = yarnxmlconf->getString(RMAddressName);
-  if (RMAddressValue == "")
+  if (RMAddressValue == "") {
     return false;
+  }
   auto RMInfo = hawq::test::split(RMAddressValue, ':');
   RM = hawq::test::trim(RMInfo[0]);
   port = std::stoi(hawq::test::trim(RMInfo[1]));
@@ -222,7 +241,7 @@ void YarnConfig::getActiveNodeManagers(std::vector<string> 
&nodemanagers,
 string YarnConfig::getParameterValue(const string &parameterName) {
   bool ret = LoadFromYarnConfigFile();
   if (!ret) {
-    return NULL;
+    throw GetHadoopHomeException();
   }
 
   return yarnxmlconf->getString(parameterName);
@@ -230,11 +249,12 @@ string YarnConfig::getParameterValue(const string 
&parameterName) {
 
 string YarnConfig::getParameterValue(const string &parameterName,
                                      const string &conftype) {
-  if (conftype == "yarn" || conftype == "YARN")
+  if (conftype == "yarn" || conftype == "YARN") {
     return getParameterValue(parameterName);
+  }
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    return NULL;
+    throw GetHadoopHomeException();
   }
 
   return hawqxmlconf->getString(parameterName);
@@ -244,7 +264,7 @@ bool YarnConfig::setParameterValue(const string 
&parameterName,
                                    const string &parameterValue) {
   bool ret = LoadFromYarnConfigFile();
   if (!ret) {
-    return false;
+    throw GetHadoopHomeException();
   }
 
   return yarnxmlconf->setString(parameterName, parameterValue);
@@ -253,11 +273,12 @@ bool YarnConfig::setParameterValue(const string 
&parameterName,
 bool YarnConfig::setParameterValue(const string &parameterName,
                                    const string &parameterValue,
                                    const string &conftype) {
-  if (conftype == "yarn" || conftype == "YARN")
+  if (conftype == "yarn" || conftype == "YARN") {
     return setParameterValue(parameterName, parameterValue);
+  }
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    return false;
+    throw GetHawqHomeException();
   }
 
   return hawqxmlconf->setString(parameterName, parameterValue);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6617a836/src/test/feature/lib/yarn_config.h
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/yarn_config.h 
b/src/test/feature/lib/yarn_config.h
index 559daca..cccf5e9 100644
--- a/src/test/feature/lib/yarn_config.h
+++ b/src/test/feature/lib/yarn_config.h
@@ -7,6 +7,7 @@
 #include "psql.h"
 #include "sql_util.h"
 #include "xml_parser.h"
+#include "hdfs_config.h"
 
 namespace hawq {
 namespace test {
@@ -24,21 +25,27 @@ class YarnConfig {
     YarnConfig(): psql(HAWQ_DB, HAWQ_HOST, HAWQ_PORT, HAWQ_USER, 
HAWQ_PASSWORD) {}
 
     /**
-      * YarnConfig desstructor
+      * YarnConfig destructor
       */
     ~YarnConfig()  {}
 
     /**
+     * whether YARN is configured
+     * @return true if YARN is configured; if return false, following 
functions should not be called
+     */
+    bool isConfigYarn();
+
+    /**
      * whether YARN is in HA mode
      * @return true if YARN is HA
      */
     bool isHA();
 
     /**
-     * whether YARN is kerbos
+     * whether YARN is kerberos
      * @return true if YARN is kerbos
      */
-    bool isKerbos();
+    bool isConfigKerberos();
 
     /**
      * get HADOOP working directory

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6617a836/src/test/feature/testlib/test_lib.cpp
----------------------------------------------------------------------
diff --git a/src/test/feature/testlib/test_lib.cpp 
b/src/test/feature/testlib/test_lib.cpp
index a577c93..a58a0d9 100644
--- a/src/test/feature/testlib/test_lib.cpp
+++ b/src/test/feature/testlib/test_lib.cpp
@@ -60,65 +60,80 @@ TEST_F(TestCommonLib, TestHawqConfig) {
 
 TEST_F(TestCommonLib, TestHdfsConfig) {
   hawq::test::HdfsConfig hc;
-  hc.isHA();
-  hc.isKerbos();
-  hc.isTruncate();
-  std::string hadoopHome = hc.getHadoopHome();
-
-  std::string hostname = "";
-  int port = 0;
-  hc.getActiveNamenode(hostname, port);
-
-  hostname = "";
-  port = 0;
-  hc.getStandbyNamenode(hostname, port);
-
-  std::vector<std::string> hostList;
-  std::vector<int> portList;
-  hc.getNamenodes(hostList, portList);
-
-  hostList.clear();
-  portList.clear();
-  hc.getDatanodelist(hostList, portList);
-
-  hostList.clear();
-  portList.clear();
-  hc.getActiveDatanodes(hostList, portList);
-
-  hc.isSafemode();
-
-  hc.getParameterValue("dfs.replication");
-  hc.setParameterValue("dfs.replication", "1");
+  try {
+    hc.isHA();
+    hc.isConfigKerberos();
+    hc.isTruncate();
+    std::string hadoopHome = hc.getHadoopHome();
+
+    std::string hostname = "";
+    int port = 0;
+    hc.getActiveNamenode(hostname, port);
+
+    hostname = "";
+    port = 0;
+    hc.getStandbyNamenode(hostname, port);
+
+    std::vector<std::string> hostList;
+    std::vector<int> portList;
+    hc.getNamenodes(hostList, portList);
+
+    hostList.clear();
+    portList.clear();
+    hc.getDatanodelist(hostList, portList);
+
+    hostList.clear();
+    portList.clear();
+    hc.getActiveDatanodes(hostList, portList);
+
+    hc.isSafemode();
+
+    hc.getParameterValue("dfs.replication");
+    hc.setParameterValue("dfs.replication", "1");
+  } catch (hawq::test::GetHawqHomeException &e) {
+    printf("Failed to get HAWQ home!");
+  } catch (hawq::test::GetHadoopHomeException &e) {
+    printf("Failed to get HADOOP home!");
+  }
 }
 
 TEST_F(TestCommonLib, TestYarnConfig) {
   hawq::test::YarnConfig hc;
-  hc.isHA();
-  hc.isKerbos();
-  std::string hadoopHome = hc.getHadoopHome();
-
-  std::string hostname = "";
-  int port = 0;
-  hc.getActiveRM(hostname, port);
-
-  hostname = "";
-  port = 0;
-  hc.getStandbyRM(hostname, port);
-
-  std::vector<std::string> hostList;
-  std::vector<int> portList;
-  hc.getRMList(hostList, portList);
-
-  hostList.clear();
-  portList.clear();
-  hc.getNodeManagers(hostList, portList);
-
-  hostList.clear();
-  portList.clear();
-  hc.getActiveNodeManagers(hostList, portList);
-
-  hc.getParameterValue("yarn.scheduler.minimum-allocation-mb");
-  hc.setParameterValue("yarn.scheduler.minimum-allocation-mb", "1024");
+  if (!hc.isConfigYarn()) {
+    return;
+  }
+  try {
+    hc.isHA();
+    hc.isConfigKerberos();
+    std::string hadoopHome = hc.getHadoopHome();
+
+    std::string hostname = "";
+    int port = 0;
+    hc.getActiveRM(hostname, port);
+
+    hostname = "";
+    port = 0;
+    hc.getStandbyRM(hostname, port);
+
+    std::vector<std::string> hostList;
+    std::vector<int> portList;
+    hc.getRMList(hostList, portList);
+
+    hostList.clear();
+    portList.clear();
+    hc.getNodeManagers(hostList, portList);
+
+    hostList.clear();
+    portList.clear();
+    hc.getActiveNodeManagers(hostList, portList);
+
+    hc.getParameterValue("yarn.scheduler.minimum-allocation-mb");
+    hc.setParameterValue("yarn.scheduler.minimum-allocation-mb", "1024");
+  } catch (hawq::test::GetHawqHomeException &e) {
+    printf("Failed to get HAWQ home!");
+  } catch (hawq::test::GetHadoopHomeException &e) {
+    printf("Failed to get HADOOP home!");
+  }
 }
 
 TEST_F(TestCommonLib, TestCommand) {

Reply via email to