AMBARI-4906 - Make use of new stack metric definitions in SQLProviderModule


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/872d3c76
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/872d3c76
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/872d3c76

Branch: refs/heads/trunk
Commit: 872d3c76b575eccb99b5e7af9ff3a7f2d328eabe
Parents: ac56341
Author: Artem Baranchuk <[email protected]>
Authored: Fri Feb 28 01:10:34 2014 +0200
Committer: Artem Baranchuk <[email protected]>
Committed: Mon Mar 3 17:42:26 2014 +0200

----------------------------------------------------------------------
 .../ambari-scom-server/conf/ambari.properties   |     2 +-
 .../apache/ambari/msi/ClusterDefinition.java    |     8 +-
 .../org/apache/ambari/scom/AmbariServer.java    |     4 +-
 .../apache/ambari/scom/SQLProviderModule.java   |    99 +-
 .../ambari/scom/utilities/SCOMMetricHelper.java |   119 +
 .../src/main/resources/jmx_properties.json      |  9312 +++++++
 .../main/resources/sqlserver_properties.json    | 23142 +++++++++++++++++
 7 files changed, 32631 insertions(+), 55 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/872d3c76/contrib/ambari-scom/ambari-scom-server/conf/ambari.properties
----------------------------------------------------------------------
diff --git a/contrib/ambari-scom/ambari-scom-server/conf/ambari.properties 
b/contrib/ambari-scom/ambari-scom-server/conf/ambari.properties
index 06cfa3b..eab7310 100644
--- a/contrib/ambari-scom/ambari-scom-server/conf/ambari.properties
+++ b/contrib/ambari-scom/ambari-scom-server/conf/ambari.properties
@@ -16,7 +16,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 #
 
 server.connection.max.idle.millis=900000
@@ -24,5 +23,6 @@ api.authenticate=false
 server.os_type=windows2012
 server.persistence.type=in-memory
 security.passwords.encryption.enabled=false
+scom.version.id=HDP-2.0.6
 scom.sink.db.driver=com.microsoft.sqlserver.jdbc.SQLServerDriver
 
scom.sink.db.url=jdbc:sqlserver://[server]:[port];databaseName=[databaseName];user=[user];password=[password]

http://git-wip-us.apache.org/repos/asf/ambari/blob/872d3c76/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/msi/ClusterDefinition.java
----------------------------------------------------------------------
diff --git 
a/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/msi/ClusterDefinition.java
 
b/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/msi/ClusterDefinition.java
index a2da723..e21afa3 100644
--- 
a/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/msi/ClusterDefinition.java
+++ 
b/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/msi/ClusterDefinition.java
@@ -93,7 +93,7 @@ public class ClusterDefinition {
       if(majorStackVersion == 1) {
         Set<String> mapReduceComponents = new HashSet<String>();
         mapReduceComponents.add("JOBTRACKER");
-        mapReduceComponents.add("HISTORY_SERVER");
+        mapReduceComponents.add("HISTORYSERVER");
         componentNameMap.put("JOBTRACKER_HOST", mapReduceComponents);
 
         slaveComponents.add("TASKTRACKER");
@@ -102,7 +102,7 @@ public class ClusterDefinition {
         componentNameMap.put("JOURNALNODE_HOST", 
Collections.singleton("JOURNALNODE"));
 
         Set<String> mapReduce2Components = new HashSet<String>();
-        mapReduce2Components.add("HISTORY_SERVER");
+        mapReduce2Components.add("HISTORYSERVER");
         mapReduce2Components.add("RESOURCEMANAGER");
         componentNameMap.put("RESOURCEMANAGER_HOST", mapReduce2Components);
 
@@ -139,11 +139,11 @@ public class ClusterDefinition {
     if(majorStackVersion != null) {
       if(majorStackVersion == 1) {
         componentServiceMap.put("JOBTRACKER",         "MAPREDUCE");
-        componentServiceMap.put("HISTORY_SERVER",     "MAPREDUCE");
+        componentServiceMap.put("HISTORYSERVER",     "MAPREDUCE");
         componentServiceMap.put("TASKTRACKER",        "MAPREDUCE");
       }
       if(majorStackVersion == 2) {
-        componentServiceMap.put("HISTORY_SERVER",     "MAPREDUCE2");
+        componentServiceMap.put("HISTORYSERVER",      "MAPREDUCE2");
         componentServiceMap.put("JOURNALNODE",        "HDFS");
         componentServiceMap.put("NODEMANAGER",        "YARN");
         componentServiceMap.put("RESOURCEMANAGER",    "YARN");

http://git-wip-us.apache.org/repos/asf/ambari/blob/872d3c76/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/AmbariServer.java
----------------------------------------------------------------------
diff --git 
a/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/AmbariServer.java
 
b/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/AmbariServer.java
index c43d826..a5071cb 100644
--- 
a/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/AmbariServer.java
+++ 
b/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/AmbariServer.java
@@ -121,7 +121,9 @@ public class AmbariServer {
       SinkConnectionFactory.instance().init(ambariServer.configuration);
       ClusterDefinitionProvider.instance().init(ambariServer.configuration);
 
-      ambariServer.run();
+      if (ambariServer != null) {
+        ambariServer.run();
+      }
     } catch (Throwable t) {
       LOG.error("Failed to run the Ambari Server", t);
       if (ambariServer != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/872d3c76/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/SQLProviderModule.java
----------------------------------------------------------------------
diff --git 
a/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/SQLProviderModule.java
 
b/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/SQLProviderModule.java
index cbf2bfe..a4503b6 100644
--- 
a/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/SQLProviderModule.java
+++ 
b/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/SQLProviderModule.java
@@ -21,6 +21,7 @@ package org.apache.ambari.scom;
 import org.apache.ambari.msi.AbstractResourceProvider;
 import org.apache.ambari.msi.ClusterDefinition;
 import org.apache.ambari.msi.StateProvider;
+import org.apache.ambari.scom.utilities.SCOMMetricHelper;
 import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
 import org.apache.ambari.server.controller.internal.DefaultProviderModule;
 import org.apache.ambari.server.controller.internal.URLStreamProvider;
@@ -52,7 +53,7 @@ public class SQLProviderModule extends DefaultProviderModule 
implements HostInfo
   private final ConnectionFactory connectionFactory = 
SinkConnectionFactory.instance();
   private final ComponentSSLConfiguration sslConfiguration = 
ComponentSSLConfiguration.instance();
   private final URLStreamProvider urlStreamProvider = new 
URLStreamProvider(5000, 10000,
-      sslConfiguration.getTruststorePath(), 
sslConfiguration.getTruststorePassword(), sslConfiguration.getTruststoreType());
+          sslConfiguration.getTruststorePath(), 
sslConfiguration.getTruststorePassword(), sslConfiguration.getTruststoreType());
 
 
   // ----- Constants ---------------------------------------------------------
@@ -60,25 +61,25 @@ public class SQLProviderModule extends 
DefaultProviderModule implements HostInfo
   private static Map<String, String> serviceNames = new HashMap<String, 
String>();
 
   static {
-    serviceNames.put("NAMENODE",           "namenode");
+    serviceNames.put("NAMENODE", "namenode");
     serviceNames.put("SECONDARY_NAMENODE", "secondarynamenode");
-    serviceNames.put("JOBTRACKER",         "jobtracker");
-    serviceNames.put("HISTORY_SERVER",     "historyserver");
-    serviceNames.put("HIVE_SERVER",        "hiveserver");
-    serviceNames.put("HIVE_SERVER2",       "hiveserver2");
-    serviceNames.put("HIVE_METASTORE",     "metastore");
-    serviceNames.put("HIVE_CLIENT",        "hwi");
-    serviceNames.put("OOZIE_SERVER",       "oozieservice");
-    serviceNames.put("FLUME_SERVER",       "flumagent");
-    serviceNames.put("HBASE_MASTER",       "master");
+    serviceNames.put("JOBTRACKER", "jobtracker");
+    serviceNames.put("HISTORYSERVER", "historyserver");
+    serviceNames.put("HIVE_SERVER", "hiveserver");
+    serviceNames.put("HIVE_SERVER2", "hiveserver2");
+    serviceNames.put("HIVE_METASTORE", "metastore");
+    serviceNames.put("HIVE_CLIENT", "hwi");
+    serviceNames.put("OOZIE_SERVER", "oozieservice");
+    serviceNames.put("FLUME_SERVER", "flumagent");
+    serviceNames.put("HBASE_MASTER", "master");
     serviceNames.put("HBASE_REGIONSERVER", "regionserver");
-    serviceNames.put("ZOOKEEPER_SERVER",   "zkServer");
-    serviceNames.put("DATANODE",           "datanode");
-    serviceNames.put("TASKTRACKER",        "tasktracker");
-    serviceNames.put("WEBHCAT_SERVER",     "templeton");
-    serviceNames.put("NODEMANAGER",        "nodemanager");
-    serviceNames.put("RESOURCEMANAGER",    "resourcemanager");
-    serviceNames.put("JOURNALNODE",        "journalnode");
+    serviceNames.put("ZOOKEEPER_SERVER", "zkServer");
+    serviceNames.put("DATANODE", "datanode");
+    serviceNames.put("TASKTRACKER", "tasktracker");
+    serviceNames.put("WEBHCAT_SERVER", "templeton");
+    serviceNames.put("NODEMANAGER", "nodemanager");
+    serviceNames.put("RESOURCEMANAGER", "resourcemanager");
+    serviceNames.put("JOURNALNODE", "journalnode");
   }
 
   private static final String STATE_PREFIX = "STATE              : ";
@@ -105,40 +106,40 @@ public class SQLProviderModule extends 
DefaultProviderModule implements HostInfo
 
     if (type.equals(Resource.Type.Component)) {
       providers.add(new JMXPropertyProvider(
-          PropertyHelper.getJMXPropertyIds(type),
-          urlStreamProvider,
-          this,
-          PropertyHelper.getPropertyId("ServiceComponentInfo", "cluster_name"),
-          null,
-          PropertyHelper.getPropertyId("ServiceComponentInfo", 
"component_name"),
-          PropertyHelper.getPropertyId("ServiceComponentInfo", "state"),
-          Collections.singleton("STARTED")));
+              SCOMMetricHelper.getJMXPropertyIds(type),
+              urlStreamProvider,
+              this,
+              PropertyHelper.getPropertyId("ServiceComponentInfo", 
"cluster_name"),
+              null,
+              PropertyHelper.getPropertyId("ServiceComponentInfo", 
"component_name"),
+              PropertyHelper.getPropertyId("ServiceComponentInfo", "state"),
+              Collections.singleton("STARTED")));
 
       providers.add(new SQLPropertyProvider(
-          PropertyHelper.getGangliaPropertyIds(type),
-          this,
-          PropertyHelper.getPropertyId("ServiceComponentInfo", "cluster_name"),
-          null,
-          PropertyHelper.getPropertyId("ServiceComponentInfo", 
"component_name"),
-          connectionFactory));
+              SCOMMetricHelper.getSqlServerPropertyIds(type),
+              this,
+              PropertyHelper.getPropertyId("ServiceComponentInfo", 
"cluster_name"),
+              null,
+              PropertyHelper.getPropertyId("ServiceComponentInfo", 
"component_name"),
+              connectionFactory));
     } else if (type.equals(Resource.Type.HostComponent)) {
       providers.add(new JMXPropertyProvider(
-          PropertyHelper.getJMXPropertyIds(type),
-          urlStreamProvider,
-          this,
-          PropertyHelper.getPropertyId("HostRoles", "cluster_name"),
-          PropertyHelper.getPropertyId("HostRoles", "host_name"),
-          PropertyHelper.getPropertyId("HostRoles", "component_name"),
-          PropertyHelper.getPropertyId("HostRoles", "state"),
-          Collections.singleton("STARTED")));
+              SCOMMetricHelper.getJMXPropertyIds(type),
+              urlStreamProvider,
+              this,
+              PropertyHelper.getPropertyId("HostRoles", "cluster_name"),
+              PropertyHelper.getPropertyId("HostRoles", "host_name"),
+              PropertyHelper.getPropertyId("HostRoles", "component_name"),
+              PropertyHelper.getPropertyId("HostRoles", "state"),
+              Collections.singleton("STARTED")));
 
       providers.add(new SQLPropertyProvider(
-          PropertyHelper.getGangliaPropertyIds(type),
-          this,
-          PropertyHelper.getPropertyId("HostRoles", "cluster_name"),
-          PropertyHelper.getPropertyId("HostRoles", "host_name"),
-          PropertyHelper.getPropertyId("HostRoles", "component_name"),
-          connectionFactory));
+              SCOMMetricHelper.getSqlServerPropertyIds(type),
+              this,
+              PropertyHelper.getPropertyId("HostRoles", "cluster_name"),
+              PropertyHelper.getPropertyId("HostRoles", "host_name"),
+              PropertyHelper.getPropertyId("HostRoles", "component_name"),
+              connectionFactory));
     }
     putPropertyProviders(type, providers);
   }
@@ -251,7 +252,7 @@ public class SQLProviderModule extends 
DefaultProviderModule implements HostInfo
   // get the response text from a completed process stream
   private static String getProcessResponse(InputStream stream) {
 
-    StringBuilder  sb       = new StringBuilder();
+    StringBuilder sb = new StringBuilder();
     BufferedReader stdInput = new BufferedReader(new 
InputStreamReader(stream));
 
     try {
@@ -303,7 +304,7 @@ public class SQLProviderModule extends 
DefaultProviderModule implements HostInfo
 
       String processResponse = getProcessResponse(process.getInputStream());
 
-      if (!isRunning()){
+      if (!isRunning()) {
         output = processResponse;
       }
 
@@ -318,7 +319,7 @@ public class SQLProviderModule extends 
DefaultProviderModule implements HostInfo
 
       String processResponse = getProcessResponse(process.getErrorStream());
 
-      if (!isRunning()){
+      if (!isRunning()) {
         error = processResponse;
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/872d3c76/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/utilities/SCOMMetricHelper.java
----------------------------------------------------------------------
diff --git 
a/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/utilities/SCOMMetricHelper.java
 
b/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/utilities/SCOMMetricHelper.java
new file mode 100644
index 0000000..ee1210f
--- /dev/null
+++ 
b/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/scom/utilities/SCOMMetricHelper.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.scom.utilities;
+
+import org.apache.ambari.server.controller.internal.PropertyInfo;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Utility class that provides Property helper methods.
+ */
+public class SCOMMetricHelper {
+  private static final String SQLSERVER_PROPERTIES_FILE = 
"sqlserver_properties.json";
+  private static final String JMX_PROPERTIES_FILE = "jmx_properties.json";
+
+  private static final Map<Resource.InternalType, Map<String, Map<String, 
PropertyInfo>>> JMX_PROPERTY_IDS = readPropertyProviderIds(JMX_PROPERTIES_FILE);
+  private static final Map<Resource.InternalType, Map<String, Map<String, 
PropertyInfo>>> SQLSERVER_PROPERTY_IDS = 
readPropertyProviderIds(SQLSERVER_PROPERTIES_FILE);
+
+  public static Map<String, Map<String, PropertyInfo>> 
getSqlServerPropertyIds(Resource.Type resourceType) {
+    return SQLSERVER_PROPERTY_IDS.get(resourceType.getInternalType());
+  }
+
+  public static Map<String, Map<String, PropertyInfo>> 
getJMXPropertyIds(Resource.Type resourceType) {
+    return JMX_PROPERTY_IDS.get(resourceType.getInternalType());
+  }
+
+  protected static class Metric {
+    private String metric;
+    private boolean pointInTime;
+    private boolean temporal;
+
+    private Metric() {
+    }
+
+    protected Metric(String metric, boolean pointInTime, boolean temporal) {
+      this.metric = metric;
+      this.pointInTime = pointInTime;
+      this.temporal = temporal;
+    }
+
+    public String getMetric() {
+      return metric;
+    }
+
+    public void setMetric(String metric) {
+      this.metric = metric;
+    }
+
+    public boolean isPointInTime() {
+      return pointInTime;
+    }
+
+    public void setPointInTime(boolean pointInTime) {
+      this.pointInTime = pointInTime;
+    }
+
+    public boolean isTemporal() {
+      return temporal;
+    }
+
+    public void setTemporal(boolean temporal) {
+      this.temporal = temporal;
+    }
+  }
+
+  private static Map<Resource.InternalType, Map<String, Map<String, 
PropertyInfo>>> readPropertyProviderIds(String filename) {
+    ObjectMapper mapper = new ObjectMapper();
+
+    try {
+      Map<Resource.InternalType, Map<String, Map<String, Metric>>> 
resourceMetricMap =
+              mapper.readValue(ClassLoader.getSystemResourceAsStream(filename),
+                      new TypeReference<Map<Resource.InternalType, Map<String, 
Map<String, Metric>>>>() {});
+
+      Map<Resource.InternalType, Map<String, Map<String, PropertyInfo>>> 
resourceMetrics =
+              new HashMap<Resource.InternalType, Map<String, Map<String, 
PropertyInfo>>>();
+
+      for (Map.Entry<Resource.InternalType, Map<String, Map<String, Metric>>> 
resourceEntry : resourceMetricMap.entrySet()) {
+        Map<String, Map<String, PropertyInfo>> componentMetrics = new 
HashMap<String, Map<String, PropertyInfo>>();
+
+        for (Map.Entry<String, Map<String, Metric>> componentEntry : 
resourceEntry.getValue().entrySet()) {
+          Map<String, PropertyInfo> metrics = new HashMap<String, 
PropertyInfo>();
+
+          for (Map.Entry<String, Metric> metricEntry : 
componentEntry.getValue().entrySet()) {
+            String property = metricEntry.getKey();
+            Metric metric = metricEntry.getValue();
+
+            metrics.put(property, new PropertyInfo(metric.getMetric(), 
metric.isTemporal(), metric.isPointInTime()));
+          }
+          componentMetrics.put(componentEntry.getKey(), metrics);
+        }
+        resourceMetrics.put(resourceEntry.getKey(), componentMetrics);
+      }
+      return resourceMetrics;
+    }
+    catch (IOException e) {
+      throw new IllegalStateException("Can't read properties file " + 
filename, e);
+    }
+  }
+}

Reply via email to