AMBARI-18125 : Allow for certain metrics to skip aggregation determined by 
client. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f5ad1de8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f5ad1de8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f5ad1de8

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f5ad1de8689fc45980e5439d873a9331ea6f4723
Parents: 0e34358
Author: Aravindan Vijayan <avija...@hortonworks.com>
Authored: Wed Sep 14 15:03:23 2016 -0700
Committer: Aravindan Vijayan <avija...@hortonworks.com>
Committed: Wed Sep 14 15:03:34 2016 -0700

----------------------------------------------------------------------
 .../metrics2/sink/timeline/TimelineMetric.java  |  11 ++
 .../timeline/HadoopTimelineMetricsSink.java     |  35 ++++--
 .../timeline/HadoopTimelineMetricsSinkTest.java |   4 +-
 .../src/main/python/core/host_info.py           |   4 +-
 .../src/test/python/core/TestHostInfo.py        |  43 +++----
 .../timeline/TimelineMetricConfiguration.java   |   3 +
 .../TimelineMetricClusterAggregatorSecond.java  |  30 ++++-
 .../TimelineMetricMetadataManager.java          |   9 +-
 .../metrics/timeline/query/Condition.java       |   1 +
 .../timeline/query/DefaultCondition.java        |  84 +++++++++++---
 .../metrics/timeline/query/EmptyCondition.java  |   6 +
 .../query/SplitByMetricNamesCondition.java      |   6 +
 .../timeline/TestPhoenixTransactSQL.java        |  50 ++++----
 ...melineMetricClusterAggregatorSecondTest.java |   5 +
 .../timeline/query/DefaultConditionTest.java    | 116 +++++++++++++++++++
 .../0.1.0/configuration/ams-site.xml            |  12 ++
 16 files changed, 340 insertions(+), 79 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
 
b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
index 98f4978..44c9d4a 100644
--- 
a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
+++ 
b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.metrics2.sink.timeline;
 
+import java.util.HashMap;
 import java.util.Map;
 import java.util.TreeMap;
 
@@ -44,6 +45,7 @@ public class TimelineMetric implements 
Comparable<TimelineMetric> {
   private String type;
   private String units;
   private TreeMap<Long, Double> metricValues = new TreeMap<Long, Double>();
+  private Map<String, String> metadata = new HashMap<>();
 
   // default
   public TimelineMetric() {
@@ -148,6 +150,15 @@ public class TimelineMetric implements 
Comparable<TimelineMetric> {
     this.metricValues.putAll(metricValues);
   }
 
+  @XmlElement(name = "metadata")
+  public Map<String,String> getMetadata () {
+    return metadata;
+  }
+
+  public void setMetadata (Map<String,String> metadata) {
+    this.metadata = metadata;
+  }
+
   @Override
   public boolean equals(Object o) {
     if (this == o) return true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
 
b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
index c534121..b720ba9 100644
--- 
a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
+++ 
b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
@@ -34,6 +34,7 @@ import java.io.IOException;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -228,6 +229,7 @@ public class HadoopTimelineMetricsSink extends 
AbstractTimelineMetricsSink imple
       String contextName = record.context();
 
       StringBuilder sb = new StringBuilder();
+      boolean skipAggregation = false;
 
       // Transform ipc.8020 -> ipc.client,  ipc.8040 -> ipc.datanode, etc.
       if (contextName.startsWith("ipc.")) {
@@ -239,17 +241,19 @@ public class HadoopTimelineMetricsSink extends 
AbstractTimelineMetricsSink imple
 
       sb.append(contextName);
       sb.append('.');
-      // Similar to GangliaContext adding processName to distinguish jvm
-      // metrics for co-hosted daemons. We only do this for HBase since the
-      // appId is shared for Master and RS.
-      if (contextName.equals("jvm")) {
-        if (record.tags() != null) {
-          for (MetricsTag tag : record.tags()) {
-            if (tag.info().name().equalsIgnoreCase("processName") &&
-               (tag.value().equals("RegionServer") || 
tag.value().equals("Master"))) {
-              sb.append(tag.value());
-              sb.append('.');
-            }
+
+      if (record.tags() != null) {
+        for (MetricsTag tag : record.tags()) {
+          if (StringUtils.isNotEmpty(tag.name()) && 
tag.name().equals("skipAggregation")) {
+            skipAggregation = String.valueOf(true).equals(tag.value());
+          }
+                // Similar to GangliaContext adding processName to distinguish 
jvm
+                // metrics for co-hosted daemons. We only do this for HBase 
since the
+                // appId is shared for Master and RS.
+          if (contextName.equals("jvm") && 
tag.info().name().equalsIgnoreCase("processName") &&
+            (tag.value().equals("RegionServer") || 
tag.value().equals("Master"))) {
+            sb.append(tag.value());
+            sb.append('.');
           }
         }
       }
@@ -281,6 +285,12 @@ public class HadoopTimelineMetricsSink extends 
AbstractTimelineMetricsSink imple
       Collection<AbstractMetric> metrics = (Collection<AbstractMetric>) 
record.metrics();
 
       List<TimelineMetric> metricList = new ArrayList<TimelineMetric>();
+      Map<String, String> metadata = null;
+      if (skipAggregation) {
+        metadata = Collections.singletonMap("skipAggregation", "true");
+      }
+
+
       long startTime = record.timestamp();
 
       for (AbstractMetric metric : metrics) {
@@ -294,6 +304,9 @@ public class HadoopTimelineMetricsSink extends 
AbstractTimelineMetricsSink imple
         timelineMetric.setStartTime(startTime);
         timelineMetric.setType(metric.type() != null ? metric.type().name() : 
null);
         timelineMetric.getMetricValues().put(startTime, value.doubleValue());
+        if (metadata != null) {
+          timelineMetric.setMetadata(metadata);
+        }
         // Put intermediate values into the cache until it is time to send
         boolean isCounter = MetricType.COUNTER == metric.type();
         metricsCache.putTimelineMetric(timelineMetric, isCounter);

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
 
b/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
index ea7f72d..f44ca35 100644
--- 
a/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
+++ 
b/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
@@ -362,8 +362,8 @@ public class HadoopTimelineMetricsSinkTest {
         }
       }, "8040")
     );
-    expect(record.tags()).andReturn(tags1).times(6);
-    expect(record.tags()).andReturn(tags2).times(6);
+    expect(record.tags()).andReturn(tags1).times(12);
+    expect(record.tags()).andReturn(tags2).times(12);
 
     sink.appendPrefix(eq(record), (StringBuilder) anyObject());
     expectLastCall().anyTimes().andStubAnswer(new IAnswer<Object>() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/host_info.py
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/host_info.py
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/host_info.py
index f79cacd..632c86b 100644
--- 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/host_info.py
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/host_info.py
@@ -248,8 +248,10 @@ class HostInfo():
 
     skip_disk_patterns = self.__config.get_disk_metrics_skip_pattern()
     logger.debug('skip_disk_patterns: %s' % skip_disk_patterns)
+    print skip_disk_patterns
     if not skip_disk_patterns or skip_disk_patterns == 'None':
       io_counters = psutil.disk_io_counters()
+      print io_counters
     else:
       sdiskio = namedtuple('sdiskio', ['read_count', 'write_count',
                                        'read_bytes', 'write_bytes',
@@ -303,7 +305,7 @@ class HostInfo():
         disk = item[0]
         logger.debug('Adding disk counters for %s' % str(disk))
         sdiskio = item[1]
-        prefix = 'disk_{0}_'.format(disk_counter)
+        prefix = 'sdisk_{0}_'.format(disk)
         counter_dict = {
           prefix + 'read_count' : sdiskio.read_count if hasattr(sdiskio, 
'read_count') else 0,
           prefix + 'write_count' : sdiskio.write_count if hasattr(sdiskio, 
'write_count') else 0,

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/core/TestHostInfo.py
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/core/TestHostInfo.py
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/core/TestHostInfo.py
index d3d3f05..63a1ae1 100644
--- 
a/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/core/TestHostInfo.py
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/core/TestHostInfo.py
@@ -23,6 +23,7 @@ from host_info import HostInfo
 import platform
 from unittest import TestCase
 from mock.mock import patch, MagicMock
+from core.config_reader import Configuration
 import collections
 
 logger = logging.getLogger()
@@ -138,7 +139,9 @@ class TestHostInfo(TestCase):
                                                   'read_time', 'write_time'])
     io_mock.return_value = Counters(0, 1, 2, 3, 4, 5)
 
-    hostinfo = HostInfo(MagicMock())
+    c = MagicMock()
+    c.get_disk_metrics_skip_pattern.return_value = None
+    hostinfo = HostInfo(c)
 
     disk_counters = hostinfo.get_combined_disk_io_counters()
 
@@ -179,28 +182,28 @@ class TestHostInfo(TestCase):
 
     disk_counter_per_disk = hostinfo.get_disk_io_counters_per_disk()
 
-    # Assert for sda1
-    self.assertEqual(disk_counter_per_disk['disk_1_read_count'], 0)
-    self.assertEqual(disk_counter_per_disk['disk_1_write_count'], 1)
-    self.assertEqual(disk_counter_per_disk['disk_1_read_bytes'], 2)
-    self.assertEqual(disk_counter_per_disk['disk_1_write_bytes'], 3)
-    self.assertEqual(disk_counter_per_disk['disk_1_read_time'], 4)
-    self.assertEqual(disk_counter_per_disk['disk_1_write_time'], 5)
-    self.assertEqual(disk_counter_per_disk['disk_1_busy_time'], 6)
-    self.assertEqual(disk_counter_per_disk['disk_1_read_merged_count'], 7)
-    self.assertEqual(disk_counter_per_disk['disk_1_write_merged_count'], 8)
+    # Assert for sdisk_sda1
+    self.assertEqual(disk_counter_per_disk['sdisk_sda1_read_count'], 0)
+    self.assertEqual(disk_counter_per_disk['sdisk_sda1_write_count'], 1)
+    self.assertEqual(disk_counter_per_disk['sdisk_sda1_read_bytes'], 2)
+    self.assertEqual(disk_counter_per_disk['sdisk_sda1_write_bytes'], 3)
+    self.assertEqual(disk_counter_per_disk['sdisk_sda1_read_time'], 4)
+    self.assertEqual(disk_counter_per_disk['sdisk_sda1_write_time'], 5)
+    self.assertEqual(disk_counter_per_disk['sdisk_sda1_busy_time'], 6)
+    self.assertEqual(disk_counter_per_disk['sdisk_sda1_read_merged_count'], 7)
+    self.assertEqual(disk_counter_per_disk['sdisk_sda1_write_merged_count'], 8)
 
     # Assert for sdb1
 
-    self.assertEqual(disk_counter_per_disk['disk_2_read_count'], 9)
-    self.assertEqual(disk_counter_per_disk['disk_2_write_count'], 10)
-    self.assertEqual(disk_counter_per_disk['disk_2_read_bytes'], 11)
-    self.assertEqual(disk_counter_per_disk['disk_2_write_bytes'], 12)
-    self.assertEqual(disk_counter_per_disk['disk_2_read_time'], 13)
-    self.assertEqual(disk_counter_per_disk['disk_2_write_time'], 14)
-    self.assertEqual(disk_counter_per_disk['disk_2_busy_time'], 15)
-    self.assertEqual(disk_counter_per_disk['disk_2_read_merged_count'], 16)
-    self.assertEqual(disk_counter_per_disk['disk_2_write_merged_count'], 17)
+    self.assertEqual(disk_counter_per_disk['sdisk_sdb1_read_count'], 9)
+    self.assertEqual(disk_counter_per_disk['sdisk_sdb1_write_count'], 10)
+    self.assertEqual(disk_counter_per_disk['sdisk_sdb1_read_bytes'], 11)
+    self.assertEqual(disk_counter_per_disk['sdisk_sdb1_write_bytes'], 12)
+    self.assertEqual(disk_counter_per_disk['sdisk_sdb1_read_time'], 13)
+    self.assertEqual(disk_counter_per_disk['sdisk_sdb1_write_time'], 14)
+    self.assertEqual(disk_counter_per_disk['sdisk_sdb1_busy_time'], 15)
+    self.assertEqual(disk_counter_per_disk['sdisk_sdb1_read_merged_count'], 16)
+    self.assertEqual(disk_counter_per_disk['sdisk_sdb1_write_merged_count'], 
17)
 
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
index 1da68ba..22710b0 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
@@ -255,6 +255,9 @@ public class TimelineMetricConfiguration {
   public static final String DEFAULT_TOPN_HOSTS_LIMIT =
     "timeline.metrics.default.topn.hosts.limit";
 
+  public static final String TIMELINE_METRIC_AGGREGATION_SQL_FILTERS =
+    "timeline.metrics.cluster.aggregation.sql.filters";
+
   public static final String HOST_APP_ID = "HOST";
 
   public static final String DEFAULT_INSTANCE_PORT = "12001";

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
index 5f40d21..6bc0f18 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
@@ -23,9 +23,11 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics2.sink.timeline.PostProcessingUtil;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
 import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.availability.AggregationTaskRunner.AGGREGATOR_NAME;
 import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.availability.MetricCollectorHAController;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
 import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
@@ -34,6 +36,7 @@ import java.io.IOException;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.Date;
 import java.util.HashMap;
@@ -47,6 +50,7 @@ import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.TIMELINE_METRICS_CLUSTER_AGGREGATOR_INTERPOLATION_ENABLED;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.GET_METRIC_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_RECORD_TABLE_NAME;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.TIMELINE_METRIC_AGGREGATION_SQL_FILTERS;
 
 /**
  * Aggregates a metric across all hosts in the cluster. Reads metrics from
@@ -60,6 +64,8 @@ public class TimelineMetricClusterAggregatorSecond extends 
AbstractTimelineAggre
   // 1 minute client side buffering adjustment
   private final Long serverTimeShiftAdjustment;
   private final boolean interpolationEnabled;
+  private TimelineMetricMetadataManager metadataManagerInstance;
+  private String skipAggrPatternStrings;
 
   public TimelineMetricClusterAggregatorSecond(AGGREGATOR_NAME aggregatorName,
                                                TimelineMetricMetadataManager 
metadataManager,
@@ -78,10 +84,12 @@ public class TimelineMetricClusterAggregatorSecond extends 
AbstractTimelineAggre
       sleepIntervalMillis, checkpointCutOffMultiplier, aggregatorDisabledParam,
       tableName, outputTableName, nativeTimeRangeDelay, haController);
 
+    this.metadataManagerInstance = metadataManager;
     appAggregator = new TimelineMetricAppAggregator(metadataManager, 
metricsConf);
     this.timeSliceIntervalMillis = timeSliceInterval;
     this.serverTimeShiftAdjustment = 
Long.parseLong(metricsConf.get(SERVER_SIDE_TIMESIFT_ADJUSTMENT, "90000"));
     this.interpolationEnabled = 
Boolean.parseBoolean(metricsConf.get(TIMELINE_METRICS_CLUSTER_AGGREGATOR_INTERPOLATION_ENABLED,
 "true"));
+    this.skipAggrPatternStrings = 
metricsConf.get(TIMELINE_METRIC_AGGREGATION_SQL_FILTERS);
   }
 
   @Override
@@ -103,8 +111,19 @@ public class TimelineMetricClusterAggregatorSecond extends 
AbstractTimelineAggre
 
   @Override
   protected Condition prepareMetricQueryCondition(long startTime, long 
endTime) {
-    Condition condition = new DefaultCondition(null, null, null, null, 
startTime - serverTimeShiftAdjustment,
+
+    List<String> metricNames = new ArrayList<>();
+    boolean metricNamesNotCondition = false;
+
+    if (!StringUtils.isEmpty(skipAggrPatternStrings)) {
+      LOG.info("Skipping aggregation for metric patterns : " + 
skipAggrPatternStrings);
+      metricNames.addAll(Arrays.asList(skipAggrPatternStrings.split(",")));
+      metricNamesNotCondition = true;
+    }
+
+    Condition condition = new DefaultCondition(metricNames, null, null, null, 
startTime - serverTimeShiftAdjustment,
       endTime, null, null, true);
+    condition.setMetricNamesNotCondition(metricNamesNotCondition);
     condition.setNoLimit();
     condition.setFetchSize(resultsetFetchSize);
     condition.setStatement(String.format(GET_METRIC_SQL,
@@ -180,6 +199,15 @@ public class TimelineMetricClusterAggregatorSecond extends 
AbstractTimelineAggre
   protected int processAggregateClusterMetrics(Map<TimelineClusterMetric, 
MetricClusterAggregate> aggregateClusterMetrics,
                                               TimelineMetric metric, 
List<Long[]> timeSlices) {
     // Create time slices
+
+    TimelineMetricMetadataKey appKey =  new 
TimelineMetricMetadataKey(metric.getMetricName(), metric.getAppId());
+    TimelineMetricMetadata metricMetadata = 
metadataManagerInstance.getMetadataCacheValue(appKey);
+
+    if (metricMetadata != null && !metricMetadata.isSupportsAggregates()) {
+      LOG.debug("Skipping cluster aggregation for " + metric.getMetricName());
+      return 0;
+    }
+
     Map<TimelineClusterMetric, Double> clusterMetrics = 
sliceFromTimelineMetric(metric, timeSlices);
     int numHosts = 0;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
index fd471fb..d0d1dbf 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
@@ -17,6 +17,8 @@
  */
 package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
 
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -202,7 +204,7 @@ public class TimelineMetricMetadataManager {
       timelineMetric.getUnits(),
       timelineMetric.getType(),
       timelineMetric.getStartTime(),
-      true
+      supportAggregates(timelineMetric)
     );
   }
 
@@ -229,4 +231,9 @@ public class TimelineMetricMetadataManager {
   Map<String, Set<String>> getHostedAppsFromStore() throws SQLException {
     return hBaseAccessor.getHostedAppsMetadata();
   }
+
+  private boolean supportAggregates(TimelineMetric metric) {
+    return MapUtils.isEmpty(metric.getMetadata()) ||
+      
!(String.valueOf(true).equals(metric.getMetadata().get("skipAggregation")));
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/Condition.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/Condition.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/Condition.java
index 4873c24..9aa64bd 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/Condition.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/Condition.java
@@ -44,4 +44,5 @@ public interface Condition {
   void addOrderByColumn(String column);
   void setNoLimit();
   boolean doUpdate();
+  void setMetricNamesNotCondition(boolean metricNamesNotCondition);
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultCondition.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultCondition.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultCondition.java
index 0851e8f..a4f7014 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultCondition.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultCondition.java
@@ -16,12 +16,14 @@
  * limitations under the License.
  */
 package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query;
+import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.sink.timeline.TopNConfig;
 import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 
+import java.util.ArrayList;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
@@ -40,6 +42,7 @@ public class DefaultCondition implements Condition {
   Integer fetchSize;
   String statement;
   Set<String> orderByColumns = new LinkedHashSet<String>();
+  boolean metricNamesNotCondition = false;
 
   private static final Log LOG = LogFactory.getLog(DefaultCondition.class);
 
@@ -215,44 +218,85 @@ public class DefaultCondition implements Condition {
 
   protected boolean appendMetricNameClause(StringBuilder sb) {
     boolean appendConjunction = false;
-    StringBuilder metricsLike = new StringBuilder();
-    StringBuilder metricsIn = new StringBuilder();
+    List<String> metricsLike = new ArrayList<>();
+    List<String> metricsIn = new ArrayList<>();
 
     if (getMetricNames() != null) {
       for (String name : getMetricNames()) {
         if (name.contains("%")) {
-          if (metricsLike.length() > 1) {
-            metricsLike.append(" OR ");
-          }
-          metricsLike.append("METRIC_NAME LIKE ?");
+          metricsLike.add(name);
         } else {
-          if (metricsIn.length() > 0) {
-            metricsIn.append(", ");
-          }
-          metricsIn.append("?");
+          metricsIn.add(name);
         }
       }
 
-      if (metricsIn.length() > 0) {
-        sb.append("(METRIC_NAME IN (");
-        sb.append(metricsIn);
+      // Put a '(' first
+      sb.append("(");
+
+      //IN clause
+      // METRIC_NAME (NOT) IN (?,?,?,?)
+      if (CollectionUtils.isNotEmpty(metricsIn)) {
+        sb.append("METRIC_NAME");
+        if (metricNamesNotCondition) {
+          sb.append(" NOT");
+        }
+        sb.append(" IN (");
+        //Append ?,?,?,?
+        for (int i = 0; i < metricsIn.size(); i++) {
+          sb.append("?");
+          if (i < metricsIn.size() - 1) {
+            sb.append(", ");
+          }
+        }
         sb.append(")");
         appendConjunction = true;
       }
 
-      if (metricsLike.length() > 0) {
-        if (appendConjunction) {
-          sb.append(" OR ");
+      //Put an OR/AND if both types are present
+      if (CollectionUtils.isNotEmpty(metricsIn) &&
+        CollectionUtils.isNotEmpty(metricsLike)) {
+        if (metricNamesNotCondition) {
+          sb.append(" AND ");
         } else {
-          sb.append("(");
+          sb.append(" OR ");
+        }
+      }
+
+      //LIKE clause
+      // METRIC_NAME (NOT) LIKE ? OR(AND) METRIC_NAME LIKE ?
+      if (CollectionUtils.isNotEmpty(metricsLike)) {
+
+        for (int i = 0; i < metricsLike.size(); i++) {
+          sb.append("METRIC_NAME");
+          if (metricNamesNotCondition) {
+            sb.append(" NOT");
+          }
+          sb.append(" LIKE ");
+          sb.append("?");
+
+          if (i < metricsLike.size() - 1) {
+            if (metricNamesNotCondition) {
+              sb.append(" AND ");
+            } else {
+              sb.append(" OR ");
+            }
+          }
         }
-        sb.append(metricsLike);
         appendConjunction = true;
       }
 
+      // Finish with a ')'
       if (appendConjunction) {
         sb.append(")");
       }
+
+      metricNames.clear();
+      if (CollectionUtils.isNotEmpty(metricsIn)) {
+        metricNames.addAll(metricsIn);
+      }
+      if (CollectionUtils.isNotEmpty(metricsLike)) {
+        metricNames.addAll(metricsLike);
+      }
     }
     return appendConjunction;
   }
@@ -333,4 +377,8 @@ public class DefaultCondition implements Condition {
     }
     return false;
   }
+
+  public void setMetricNamesNotCondition(boolean metricNamesNotCondition) {
+    this.metricNamesNotCondition = metricNamesNotCondition;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/EmptyCondition.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/EmptyCondition.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/EmptyCondition.java
index 34174e2..43ab88c 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/EmptyCondition.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/EmptyCondition.java
@@ -27,6 +27,7 @@ import java.util.List;
 public class EmptyCondition implements Condition {
   String statement;
   boolean doUpdate = false;
+  boolean metricNamesNotCondition = false;
 
   @Override
   public boolean isEmpty() {
@@ -144,4 +145,9 @@ public class EmptyCondition implements Condition {
       " doUpdate = " + this.doUpdate() +
       " }";
   }
+
+  @Override
+  public void setMetricNamesNotCondition(boolean metricNamesNotCondition) {
+    this.metricNamesNotCondition = metricNamesNotCondition;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/SplitByMetricNamesCondition.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/SplitByMetricNamesCondition.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/SplitByMetricNamesCondition.java
index b8ca599..bb4dced 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/SplitByMetricNamesCondition.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/SplitByMetricNamesCondition.java
@@ -25,6 +25,7 @@ import java.util.List;
 public class SplitByMetricNamesCondition implements Condition {
   private final Condition adaptee;
   private String currentMetric;
+  private boolean metricNamesNotCondition = false;
 
   public SplitByMetricNamesCondition(Condition condition){
     this.adaptee = condition;
@@ -180,4 +181,9 @@ public class SplitByMetricNamesCondition implements 
Condition {
   public void setCurrentMetric(String currentMetric) {
     this.currentMetric = currentMetric;
   }
+
+ @Override
+  public void setMetricNamesNotCondition(boolean metricNamesNotCondition) {
+    this.metricNamesNotCondition = metricNamesNotCondition;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
index a95655d..e988a61 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
@@ -47,7 +47,7 @@ public class TestPhoenixTransactSQL {
   @Test
   public void testConditionClause() throws Exception {
     Condition condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", 1407959718L, 1407959918L, null, null, false);
 
     String preparedClause = condition.getConditionClause().toString();
@@ -78,7 +78,7 @@ public class TestPhoenixTransactSQL {
   @Test
   public void testLikeConditionClause() throws Exception {
     Condition condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "some=%.metric"),
+      new ArrayList<>(Arrays.asList("cpu_user", "some=%.metric")),
       Collections.singletonList("h1"), "a1", "i1", 1407959718L, 1407959918L,
       null, null, false);
 
@@ -115,7 +115,7 @@ public class TestPhoenixTransactSQL {
 
 
     condition = new DefaultCondition(
-        Arrays.asList("some=%.metric"), Collections.singletonList("h1"), "a1", 
"i1",
+      new ArrayList<>(Arrays.asList("some=%.metric")), 
Collections.singletonList("h1"), "a1", "i1",
         1407959718L, 1407959918L, null, null, false);
 
     preparedClause = condition.getConditionClause().toString();
@@ -127,7 +127,7 @@ public class TestPhoenixTransactSQL {
 
 
     condition = new DefaultCondition(
-      Arrays.asList("some=%.metric1", "some=%.metric2", "some=%.metric3"),
+      new ArrayList<>(Arrays.asList("some=%.metric1", "some=%.metric2", 
"some=%.metric3")),
       Collections.singletonList("h1"), "a1", "i1",
       1407959718L, 1407959918L, null, null, false);
 
@@ -142,7 +142,7 @@ public class TestPhoenixTransactSQL {
   @Test
   public void testPrepareGetAggregatePrecisionMINUTES() throws SQLException {
     Condition condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", 1407959718L, 1407959918L, Precision.MINUTES, null, false);
     Connection connection = createNiceMock(Connection.class);
     PreparedStatement preparedStatement = 
createNiceMock(PreparedStatement.class);
@@ -163,7 +163,7 @@ public class TestPhoenixTransactSQL {
     Long startTime = 1407959718L;
     //SECONDS precision
     Condition condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", startTime, endTime, null, null, false);
     Connection connection = createNiceMock(Connection.class);
     PreparedStatement preparedStatement = 
createNiceMock(PreparedStatement.class);
@@ -181,7 +181,7 @@ public class TestPhoenixTransactSQL {
     // MINUTES precision
     startTime = endTime-PhoenixTransactSQL.DAY/1000;
     condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", startTime, endTime, null, null, false);
     connection = createNiceMock(Connection.class);
     preparedStatement = createNiceMock(PreparedStatement.class);
@@ -199,7 +199,7 @@ public class TestPhoenixTransactSQL {
     // HOURS precision
     startTime = endTime-PhoenixTransactSQL.DAY*30/1000;
     condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", startTime, endTime, null, null, false);
     connection = createNiceMock(Connection.class);
     preparedStatement = createNiceMock(PreparedStatement.class);
@@ -217,7 +217,7 @@ public class TestPhoenixTransactSQL {
     // DAYS precision
     startTime = endTime-PhoenixTransactSQL.DAY*30*2/1000;
     condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", startTime, endTime, null, null, false);
     connection = createNiceMock(Connection.class);
     preparedStatement = createNiceMock(PreparedStatement.class);
@@ -236,7 +236,7 @@ public class TestPhoenixTransactSQL {
   @Test
   public void testPrepareGetAggregatePrecisionHours() throws SQLException {
     Condition condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", 1407959718L, 1407959918L, Precision.HOURS, null, false);
     Connection connection = createNiceMock(Connection.class);
     PreparedStatement preparedStatement = 
createNiceMock(PreparedStatement.class);
@@ -254,7 +254,7 @@ public class TestPhoenixTransactSQL {
   @Test
   public void testPrepareGetMetricsPrecisionMinutes() throws SQLException {
     Condition condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", 1407959718L, 1407959918L, Precision.MINUTES, null, false);
     Connection connection = createNiceMock(Connection.class);
     PreparedStatement preparedStatement = 
createNiceMock(PreparedStatement.class);
@@ -275,7 +275,7 @@ public class TestPhoenixTransactSQL {
     Long startTime = endTime - 200;
     // SECONDS precision
     Condition condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", startTime, endTime, null, null, false);
     Connection connection = createNiceMock(Connection.class);
     PreparedStatement preparedStatement = 
createNiceMock(PreparedStatement.class);
@@ -294,7 +294,7 @@ public class TestPhoenixTransactSQL {
     // SECONDS precision
     startTime = endTime-PhoenixTransactSQL.HOUR*2/1000;
     condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", startTime, endTime, null, null, false);
     connection = createNiceMock(Connection.class);
     preparedStatement = createNiceMock(PreparedStatement.class);
@@ -311,7 +311,7 @@ public class TestPhoenixTransactSQL {
     // MINUTES precision
     startTime = endTime-PhoenixTransactSQL.DAY/1000;
     condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", startTime, endTime, null, null, false);
     connection = createNiceMock(Connection.class);
     preparedStatement = createNiceMock(PreparedStatement.class);
@@ -328,7 +328,7 @@ public class TestPhoenixTransactSQL {
     // HOURS precision
     startTime = endTime-PhoenixTransactSQL.DAY*30/1000;
     condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", startTime, endTime, null, null, false);
     connection = createNiceMock(Connection.class);
     preparedStatement = createNiceMock(PreparedStatement.class);
@@ -345,7 +345,7 @@ public class TestPhoenixTransactSQL {
     // DAYS precision
     startTime = endTime-PhoenixTransactSQL.DAY*30*2/1000;
     condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", startTime, endTime, null, null, false);
     connection = createNiceMock(Connection.class);
     preparedStatement = createNiceMock(PreparedStatement.class);
@@ -364,7 +364,7 @@ public class TestPhoenixTransactSQL {
   @Test
   public void testPrepareGetLatestMetricSqlStmtMultipleHostNames() throws 
SQLException {
     Condition condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Arrays.asList("h1", "h2"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Arrays.asList("h1", "h2"),
       "a1", "i1", null, null, null, null, false);
     Connection connection = createNiceMock(Connection.class);
     PreparedStatement preparedStatement = 
createNiceMock(PreparedStatement.class);
@@ -390,7 +390,7 @@ public class TestPhoenixTransactSQL {
   public void testPrepareGetLatestMetricSqlStmtSortMergeJoinAlgorithm()
     throws SQLException {
     Condition condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Arrays.asList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Arrays.asList("h1"),
       "a1", "i1", null, null, null, null, false);
     Connection connection = createNiceMock(Connection.class);
     PreparedStatement preparedStatement = 
createNiceMock(PreparedStatement.class);
@@ -413,7 +413,7 @@ public class TestPhoenixTransactSQL {
   @Test
   public void testPrepareGetMetricsPrecisionHours() throws SQLException {
     Condition condition = new DefaultCondition(
-      Arrays.asList("cpu_user", "mem_free"), Collections.singletonList("h1"),
+      new ArrayList<>(Arrays.asList("cpu_user", "mem_free")), 
Collections.singletonList("h1"),
       "a1", "i1", 1407959718L, 1407959918L, Precision.HOURS, null, false);
     Connection connection = createNiceMock(Connection.class);
     PreparedStatement preparedStatement = 
createNiceMock(PreparedStatement.class);
@@ -561,7 +561,7 @@ public class TestPhoenixTransactSQL {
     List<String> hosts = Arrays.asList("h1", "h2", "h3", "h4");
 
     Condition condition = new TopNCondition(
-      Arrays.asList("cpu_user"), hosts,
+      new ArrayList<>(Collections.singletonList("cpu_user")), hosts,
       "a1", "i1", 1407959718L, 1407959918L, null, null, false, 2, null, false);
 
     String conditionClause = condition.getConditionClause().toString();
@@ -580,7 +580,7 @@ public class TestPhoenixTransactSQL {
 
   @Test
   public void testTopNMetricsConditionClause() throws Exception {
-    List<String> metricNames = Arrays.asList("m1", "m2", "m3");
+    List<String> metricNames = new ArrayList<>(Arrays.asList("m1", "m2", 
"m3"));
 
     Condition condition = new TopNCondition(
       metricNames, Collections.singletonList("h1"),
@@ -602,7 +602,7 @@ public class TestPhoenixTransactSQL {
 
   @Test
   public void testTopNMetricsIllegalConditionClause() throws Exception {
-    List<String> metricNames = Arrays.asList("m1", "m2");
+    List<String> metricNames = new ArrayList<>(Arrays.asList("m1", "m2"));
 
     List<String> hosts = Arrays.asList("h1", "h2");
 
@@ -616,7 +616,7 @@ public class TestPhoenixTransactSQL {
   @Test
   public void testHostsRegexpConditionClause() {
     Condition condition = new TopNCondition(
-            Arrays.asList("m1"), Arrays.asList("%.ambari", "host1.apache"),
+      new ArrayList<>(Arrays.asList("m1")), Arrays.asList("%.ambari", 
"host1.apache"),
             "a1", "i1", 1407959718L, 1407959918L, null, null, false, 2, null, 
false);
 
     String conditionClause = condition.getConditionClause().toString();
@@ -629,7 +629,7 @@ public class TestPhoenixTransactSQL {
     Assert.assertEquals(expectedClause, conditionClause);
 
     condition = new TopNCondition(
-            Arrays.asList("m1"), Arrays.asList("%.ambari"),
+      new ArrayList<>(Arrays.asList("m1")), Arrays.asList("%.ambari"),
             "a1", "i1", 1407959718L, 1407959918L, null, null, false, 2, null, 
false);
 
     conditionClause = condition.getConditionClause().toString();
@@ -643,7 +643,7 @@ public class TestPhoenixTransactSQL {
     Assert.assertEquals(expectedClause, conditionClause);
 
     condition = new TopNCondition(
-            Arrays.asList("m1", "m2", "m3"), Arrays.asList("h1.ambari"),
+      new ArrayList<>(Arrays.asList("m1", "m2", "m3")), 
Arrays.asList("h1.ambari"),
             "a1", "i1", 1407959718L, 1407959918L, null, null, false, 2, null, 
false);
 
     conditionClause = condition.getConditionClause().toString();

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecondTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecondTest.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecondTest.java
index 014772f..97b6258 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecondTest.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecondTest.java
@@ -20,6 +20,7 @@ package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline
 import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
 import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.easymock.EasyMock;
 import org.junit.Test;
@@ -115,6 +116,10 @@ public class TimelineMetricClusterAggregatorSecondTest {
     Configuration configuration = new Configuration();
     TimelineMetricMetadataManager metricMetadataManagerMock = 
EasyMock.createNiceMock(TimelineMetricMetadataManager.class);
 
+    
EasyMock.expect(metricMetadataManagerMock.getMetadataCacheValue((TimelineMetricMetadataKey)EasyMock.anyObject()))
+      .andReturn(null).anyTimes();
+    EasyMock.replay(metricMetadataManagerMock);
+
     TimelineMetricClusterAggregatorSecond secondAggregator = new 
TimelineMetricClusterAggregatorSecond(
       METRIC_AGGREGATE_SECOND, metricMetadataManagerMock, null, configuration, 
null,
       aggregatorInterval, 2, "false", "", "", aggregatorInterval, 
sliceInterval, null

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultConditionTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultConditionTest.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultConditionTest.java
new file mode 100644
index 0000000..e4e9225
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultConditionTest.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query;
+
+import junit.framework.Assert;
+import org.apache.commons.collections.CollectionUtils;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class DefaultConditionTest {
+
+  @Test
+  public void testMetricNameWhereCondition() {
+    List<String> metricNames = new ArrayList<>();
+
+    //Only IN clause.
+
+    metricNames.add("M1");
+    DefaultCondition condition = new 
DefaultCondition(metricNames,null,null,null,null,null,null,null,true);
+    StringBuilder sb = new StringBuilder();
+    condition.appendMetricNameClause(sb);
+    Assert.assertEquals(sb.toString(), "(METRIC_NAME IN (?))");
+    Assert.assertTrue(CollectionUtils.isEqualCollection(metricNames, 
condition.getMetricNames()));
+
+    metricNames.add("m2");
+    condition = new 
DefaultCondition(metricNames,null,null,null,null,null,null,null,true);
+    sb = new StringBuilder();
+    condition.appendMetricNameClause(sb);
+    Assert.assertEquals(sb.toString(), "(METRIC_NAME IN (?, ?))");
+    Assert.assertTrue(CollectionUtils.isEqualCollection(metricNames, 
condition.getMetricNames()));
+
+    // Only NOT IN clause
+    condition = new 
DefaultCondition(metricNames,null,null,null,null,null,null,null,true);
+    condition.setMetricNamesNotCondition(true);
+    sb = new StringBuilder();
+    condition.appendMetricNameClause(sb);
+    Assert.assertEquals(sb.toString(), "(METRIC_NAME NOT IN (?, ?))");
+    Assert.assertTrue(CollectionUtils.isEqualCollection(metricNames, 
condition.getMetricNames()));
+
+    metricNames.clear();
+
+    //Only LIKE clause
+    metricNames.add("disk%");
+    condition = new 
DefaultCondition(metricNames,null,null,null,null,null,null,null,true);
+    sb = new StringBuilder();
+    condition.appendMetricNameClause(sb);
+    Assert.assertEquals(sb.toString(), "(METRIC_NAME LIKE ?)");
+    Assert.assertTrue(CollectionUtils.isEqualCollection(metricNames, 
condition.getMetricNames()));
+
+    metricNames.add("cpu%");
+    condition = new 
DefaultCondition(metricNames,null,null,null,null,null,null,null,true);
+    sb = new StringBuilder();
+    condition.appendMetricNameClause(sb);
+    Assert.assertEquals(sb.toString(), "(METRIC_NAME LIKE ? OR METRIC_NAME 
LIKE ?)");
+    Assert.assertTrue(CollectionUtils.isEqualCollection(metricNames, 
condition.getMetricNames()));
+
+    //Only NOT LIKE clause
+    condition = new 
DefaultCondition(metricNames,null,null,null,null,null,null,null,true);
+    condition.setMetricNamesNotCondition(true);
+    sb = new StringBuilder();
+    condition.appendMetricNameClause(sb);
+    Assert.assertEquals(sb.toString(), "(METRIC_NAME NOT LIKE ? AND 
METRIC_NAME NOT LIKE ?)");
+    Assert.assertTrue(CollectionUtils.isEqualCollection(metricNames, 
condition.getMetricNames()));
+
+    metricNames.clear();
+
+    // IN followed by LIKE clause
+    metricNames.add("M1");
+    metricNames.add("disk%");
+    metricNames.add("M2");
+    condition = new 
DefaultCondition(metricNames,null,null,null,null,null,null,null,true);
+    sb = new StringBuilder();
+    condition.appendMetricNameClause(sb);
+    Assert.assertEquals(sb.toString(), "(METRIC_NAME IN (?, ?) OR METRIC_NAME 
LIKE ?)");
+    Assert.assertEquals(metricNames.get(2), "disk%");
+
+    metricNames.clear();
+    //NOT IN followed by NOT LIKE clause
+    metricNames.add("disk%");
+    metricNames.add("metric1");
+    metricNames.add("cpu%");
+    condition = new 
DefaultCondition(metricNames,null,null,null,null,null,null,null,true);
+    sb = new StringBuilder();
+    condition.setMetricNamesNotCondition(true);
+    condition.appendMetricNameClause(sb);
+    Assert.assertEquals(sb.toString(), "(METRIC_NAME NOT IN (?) AND 
METRIC_NAME NOT LIKE ? AND METRIC_NAME NOT LIKE ?)");
+    Assert.assertEquals(metricNames.get(0), "metric1");
+
+    //Empty
+    metricNames.clear();
+    condition = new 
DefaultCondition(metricNames,null,null,null,null,null,null,null,true);
+    sb = new StringBuilder();
+    condition.appendMetricNameClause(sb);
+    Assert.assertEquals(sb.toString(), "");
+
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5ad1de8/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
index 68b53f2..2f2e114 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
@@ -744,4 +744,16 @@
     </description>
     <on-ambari-upgrade add="true"/>
   </property>
+  <property>
+    <name>timeline.metrics.cluster.aggregation.sql.filters</name>
+    <value>sdisk\_%,boottime</value>
+    <description>
+      Commas separated list of Metric names or Phoenix 'LIKE' class 
expressions that match metric names
+      which prevents certain metrics from being aggregated across hosts.
+    </description>
+    <on-ambari-upgrade add="true"/>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+  </property>
 </configuration>

Reply via email to