This is an automated email from the ASF dual-hosted git repository.
jialiang pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git
The following commit(s) were added to refs/heads/trunk by this push:
new b6a31856aa [AMBARI-26076] Upgrade net.sf.ehcache to 3.10.0 (#3825)
b6a31856aa is described below
commit b6a31856aae2c352faef6c41d781e4cef9dafe47
Author: Sandeep Kumar <[email protected]>
AuthorDate: Wed Sep 25 13:28:30 2024 +0530
[AMBARI-26076] Upgrade net.sf.ehcache to 3.10.0 (#3825)
---
ambari-server/pom.xml | 29 +++-
.../ambari/server/configuration/Configuration.java | 16 ++
.../cache/TimelineAppMetricCacheKeySerializer.java | 78 +++++++++
.../timeline/cache/TimelineMetricCache.java | 103 ++++-------
.../cache/TimelineMetricCacheCustomExpiry.java | 55 ++++++
.../cache/TimelineMetricCacheEntryFactory.java | 47 ++++--
.../cache/TimelineMetricCacheProvider.java | 95 +++++------
.../cache/TimelineMetricsCacheSizeOfEngine.java | 152 -----------------
.../cache/TimelineMetricsCacheValueSerializer.java | 79 +++++++++
.../utilities/ServiceCalculatedStateFactory.java | 2 +-
.../internal/StackDefinedPropertyProviderTest.java | 2 +-
.../metrics/timeline/AMSPropertyProviderTest.java | 2 +-
.../cache/TimelineMetricCacheSizingTest.java | 115 -------------
.../timeline/cache/TimelineMetricCacheTest.java | 188 +++++++++++----------
14 files changed, 466 insertions(+), 497 deletions(-)
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 5e5595e8db..47f657ac11 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -1367,6 +1367,10 @@
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>javax.xml.bind</groupId>
+ <artifactId>jaxb-api</artifactId>
+ </exclusion>
</exclusions>
</dependency>
<dependency>
@@ -1661,9 +1665,30 @@
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
- <groupId>net.sf.ehcache</groupId>
+ <groupId>javax.xml.bind</groupId>
+ <artifactId>jaxb-api</artifactId>
+ <version>2.3.1</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.istack</groupId>
+ <artifactId>istack-commons-runtime</artifactId>
+ <version>4.0.0</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.xml.fastinfoset</groupId>
+ <artifactId>FastInfoset</artifactId>
+ <version>1.2.16</version>
+ </dependency>
+ <dependency>
+ <groupId>org.ehcache</groupId>
<artifactId>ehcache</artifactId>
- <version>2.10.0</version>
+ <version>3.10.0</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.glassfish.jaxb</groupId>
+ <artifactId>jaxb-runtime</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>com.nimbusds</groupId>
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 83a4655cc7..0477219bc3 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -2061,6 +2061,15 @@ public class Configuration {
public static final ConfigurationProperty<Integer>
TIMELINE_METRICS_CACHE_IDLE_TIME = new ConfigurationProperty<>(
"server.timeline.metrics.cache.entry.idle.seconds", 1800);
+ /**
+ * Cache size in entry units that ambari metrics cache will hold.
+ */
+ @Markdown(
+ relatedTo = "server.timeline.metrics.cache.disabled",
+ description = "cache size, in entries, that ambari metrics cache
will hold.")
+ public static final ConfigurationProperty<Integer>
TIMELINE_METRICS_CACHE_ENTRY_UNIT_SIZE = new ConfigurationProperty<>(
+ "server.timeline.metrics.cache.entry.entry.unit.size", 100);
+
/**
* The time, in {@link TimeUnit#MILLISECONDS}, that initial requests made to
* Ambari Metrics will wait while reading from the socket before timing out.
@@ -5269,6 +5278,13 @@ public class Configuration {
return Integer.parseInt(getProperty(TIMELINE_METRICS_CACHE_IDLE_TIME));
}
+ /**
+ * Ambari metrics cache size.
+ */
+ public int getMetricCacheEntryUnitSize() {
+ return
Integer.parseInt(getProperty(TIMELINE_METRICS_CACHE_ENTRY_UNIT_SIZE));
+ }
+
/**
* Separate timeout settings for metrics cache.
* @return milliseconds
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineAppMetricCacheKeySerializer.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineAppMetricCacheKeySerializer.java
new file mode 100644
index 0000000000..91f8f27146
--- /dev/null
+++
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineAppMetricCacheKeySerializer.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.metrics.timeline.cache;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.nio.ByteBuffer;
+
+import org.ehcache.spi.serialization.Serializer;
+import org.ehcache.spi.serialization.SerializerException;
+
+public class TimelineAppMetricCacheKeySerializer implements
Serializer<TimelineAppMetricCacheKey> {
+ private final ClassLoader classLoader;
+ public TimelineAppMetricCacheKeySerializer(ClassLoader classLoader) {
+ this.classLoader = classLoader;
+ }
+ @Override
+ public ByteBuffer serialize(TimelineAppMetricCacheKey key) throws
SerializerException {
+ try {
+ ByteArrayOutputStream byteArrayOutputStream = new
ByteArrayOutputStream();
+ ObjectOutputStream objectOutputStream = new
ObjectOutputStream(byteArrayOutputStream);
+ objectOutputStream.writeObject(key);
+ objectOutputStream.close();
+ return ByteBuffer.wrap(byteArrayOutputStream.toByteArray());
+ } catch (Exception e) {
+ throw new SerializerException(e);
+ }
+ }
+
+ @Override
+ public TimelineAppMetricCacheKey read(ByteBuffer binary) throws
ClassNotFoundException, SerializerException {
+ try {
+ ByteArrayInputStream byteArrayInputStream = new
ByteArrayInputStream(binary.array());
+ ObjectInputStream objectInputStream = new
ObjectInputStream(byteArrayInputStream);
+ return (TimelineAppMetricCacheKey) objectInputStream.readObject();
+ } catch (IOException | ClassNotFoundException e) {
+ throw new SerializerException("Error during deserialization", e);
+ }
+ }
+
+ @Override
+ public boolean equals(TimelineAppMetricCacheKey key, ByteBuffer binary)
throws ClassNotFoundException, SerializerException {
+ try {
+ ByteArrayInputStream byteArrayInputStream = new
ByteArrayInputStream(binary.array());
+ ObjectInputStream objectInputStream = new
ObjectInputStream(byteArrayInputStream);
+ TimelineAppMetricCacheKey deserializedKey = (TimelineAppMetricCacheKey)
objectInputStream.readObject();
+
+ // Now compare key and deserializedKey
+ if (key == deserializedKey) return true;
+ if (deserializedKey == null || (key.getClass() !=
deserializedKey.getClass())) return false;
+
+ if (!key.getMetricNames().equals(deserializedKey.getMetricNames()))
return false;
+ if (!key.getAppId().equals(deserializedKey.getAppId())) return false;
+ return !(key.getHostNames() != null ?
!key.getHostNames().equals(deserializedKey.getHostNames()) :
deserializedKey.getHostNames() != null);
+
+ } catch (IOException e) {
+ throw new SerializerException("Error during deserialization", e);
+ }
+ }
+}
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCache.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCache.java
index bfe8456174..6287fe4034 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCache.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCache.java
@@ -18,35 +18,36 @@
package org.apache.ambari.server.controller.metrics.timeline.cache;
import java.io.IOException;
-import java.net.ConnectException;
import java.net.SocketTimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.ehcache.Cache;
+import org.ehcache.core.internal.statistics.DefaultStatisticsService;
+import org.ehcache.core.statistics.CacheStatistics;
+import org.ehcache.spi.loaderwriter.CacheLoadingException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import net.sf.ehcache.CacheException;
-import net.sf.ehcache.Ehcache;
-import net.sf.ehcache.Element;
-import net.sf.ehcache.constructs.blocking.LockTimeoutException;
-import net.sf.ehcache.constructs.blocking.UpdatingCacheEntryFactory;
-import net.sf.ehcache.constructs.blocking.UpdatingSelfPopulatingCache;
-import net.sf.ehcache.statistics.StatisticsGateway;
-
-public class TimelineMetricCache extends UpdatingSelfPopulatingCache {
-
+public class TimelineMetricCache {
+ private final Cache<TimelineAppMetricCacheKey, TimelineMetricsCacheValue>
cache;
+ private final DefaultStatisticsService statisticsService;
+ private final TimelineMetricCacheEntryFactory cacheEntryFactory;
+ public static final String TIMELINE_METRIC_CACHE_INSTANCE_NAME =
"timelineMetricCache";
private final static Logger LOG =
LoggerFactory.getLogger(TimelineMetricCache.class);
private static AtomicInteger printCacheStatsCounter = new AtomicInteger(0);
/**
- * Creates a SelfPopulatingCache.
+ * Creates a TimelineMetricCache.
*
* @param cache @Cache
- * @param factory @CacheEntryFactory
+ * @param cacheEntryFactory @CacheEntryFactory
+ * @param statisticsService @DefaultStatisticsService
*/
- public TimelineMetricCache(Ehcache cache, UpdatingCacheEntryFactory factory)
throws CacheException {
- super(cache, factory);
+ public TimelineMetricCache(Cache<TimelineAppMetricCacheKey,
TimelineMetricsCacheValue> cache, TimelineMetricCacheEntryFactory
cacheEntryFactory, DefaultStatisticsService statisticsService) {
+ this.cache = cache;
+ this.cacheEntryFactory = cacheEntryFactory;
+ this.statisticsService = statisticsService;
}
/**
@@ -63,26 +64,22 @@ public class TimelineMetricCache extends
UpdatingSelfPopulatingCache {
// Make sure key is valid
validateKey(key);
- Element element = null;
+ TimelineMetricsCacheValue value = null;
try {
- element = get(key);
- } catch (LockTimeoutException le) {
- // Ehcache masks the Socket Timeout to look as a LockTimeout
- Throwable t = le.getCause();
- if (t instanceof CacheException) {
- t = t.getCause();
- if (t instanceof SocketTimeoutException) {
- throw new SocketTimeoutException(t.getMessage());
- }
- if (t instanceof ConnectException) {
- throw new ConnectException(t.getMessage());
- }
+ value = cache.get(key);
+ } catch (CacheLoadingException cle) {
+ Throwable t = cle.getCause();
+ if(t instanceof SocketTimeoutException) {
+ throw new SocketTimeoutException(t.getMessage());
+ }
+ if(t instanceof IOException) {
+ throw new IOException(t.getMessage());
}
+ throw cle;
}
TimelineMetrics timelineMetrics = new TimelineMetrics();
- if (element != null && element.getObjectValue() != null) {
- TimelineMetricsCacheValue value = (TimelineMetricsCacheValue)
element.getObjectValue();
+ if (value != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Returning value from cache: {}", value);
}
@@ -92,51 +89,21 @@ public class TimelineMetricCache extends
UpdatingSelfPopulatingCache {
if (LOG.isDebugEnabled()) {
// Print stats every 100 calls - Note: Supported in debug mode only
if (printCacheStatsCounter.getAndIncrement() == 0) {
- StatisticsGateway statistics = this.getStatistics();
- LOG.debug("Metrics cache stats => \n, Evictions = {}, Expired = {},
Hits = {}, Misses = {}, Hit ratio = {}, Puts = {}, Size in MB = {}",
- statistics.cacheEvictedCount(), statistics.cacheExpiredCount(),
statistics.cacheHitCount(), statistics.cacheMissCount(),
statistics.cacheHitRatio(),
- statistics.cachePutCount(), statistics.getLocalHeapSizeInBytes() /
1048576);
+ CacheStatistics cacheStatistics =
statisticsService.getCacheStatistics(TIMELINE_METRIC_CACHE_INSTANCE_NAME);
+ if(cacheStatistics == null) {
+ LOG.warn("Cache statistics not available.");
+ return timelineMetrics;
+ }
+ LOG.debug("Metrics cache stats => \n, Evictions = {}, Expired = {},
Hits = {}, Misses = {}, Hit ratio = {}, Puts = {}",
+ cacheStatistics.getCacheEvictions(),
cacheStatistics.getCacheExpirations(), cacheStatistics.getCacheHits(),
cacheStatistics.getCacheMisses(), cacheStatistics.getCacheHitPercentage(),
cacheStatistics.getCachePuts()
+ );
} else {
printCacheStatsCounter.compareAndSet(100, 0);
}
}
-
return timelineMetrics;
}
- /**
- * Set new time bounds on the cache key so that update can use the new
- * query window. We do this quietly which means regular get/update logic is
- * not invoked.
- */
- @Override
- public Element get(Object key) throws LockTimeoutException {
- Element element = this.getQuiet(key);
- if (element != null) {
- if (LOG.isTraceEnabled()) {
- LOG.trace("key : {}", element.getObjectKey());
- LOG.trace("value : {}", element.getObjectValue());
- }
-
- // Set new time boundaries on the key
- TimelineAppMetricCacheKey existingKey = (TimelineAppMetricCacheKey)
element.getObjectKey();
-
- LOG.debug("Existing temporal info: {} for : {}",
existingKey.getTemporalInfo(), existingKey.getMetricNames());
-
- TimelineAppMetricCacheKey newKey = (TimelineAppMetricCacheKey) key;
- existingKey.setTemporalInfo(newKey.getTemporalInfo());
-
- LOG.debug("New temporal info: {} for : {}", newKey.getTemporalInfo(),
existingKey.getMetricNames());
-
- if (existingKey.getSpec() == null ||
!existingKey.getSpec().equals(newKey.getSpec())) {
- existingKey.setSpec(newKey.getSpec());
- LOG.debug("New spec: {} for : {}", newKey.getSpec(),
existingKey.getMetricNames());
- }
- }
-
- return super.get(key);
- }
-
private void validateKey(TimelineAppMetricCacheKey key) throws
IllegalArgumentException {
StringBuilder msg = new StringBuilder("Invalid metric key requested.");
boolean throwException = false;
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheCustomExpiry.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheCustomExpiry.java
new file mode 100644
index 0000000000..11134d8423
--- /dev/null
+++
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheCustomExpiry.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.metrics.timeline.cache;
+
+import java.util.concurrent.TimeUnit;
+
+import org.ehcache.ValueSupplier;
+import org.ehcache.expiry.Duration;
+import org.ehcache.expiry.Expiry;
+
+
+public class TimelineMetricCacheCustomExpiry implements
Expiry<TimelineAppMetricCacheKey, TimelineMetricsCacheValue> {
+
+ private final Duration timeToLive;
+ private final Duration timeToIdle;
+
+ public TimelineMetricCacheCustomExpiry(java.time.Duration timeToLive,
java.time.Duration timeToIdle) {
+ this.timeToLive = convertJavaDurationToEhcacheDuration(timeToLive);
+ this.timeToIdle = convertJavaDurationToEhcacheDuration(timeToIdle);
+ }
+
+ @Override
+ public Duration getExpiryForCreation(TimelineAppMetricCacheKey key,
TimelineMetricsCacheValue value) {
+ return timeToLive;
+ }
+
+ @Override
+ public Duration getExpiryForAccess(TimelineAppMetricCacheKey key,
ValueSupplier<? extends TimelineMetricsCacheValue> value) {
+ return timeToIdle;
+ }
+
+ @Override
+ public Duration getExpiryForUpdate(TimelineAppMetricCacheKey key,
ValueSupplier<? extends TimelineMetricsCacheValue> oldValue,
TimelineMetricsCacheValue newValue) {
+ return timeToLive;
+ }
+
+ private Duration convertJavaDurationToEhcacheDuration(java.time.Duration
javaDuration) {
+ return Duration.of(javaDuration.toNanos(), TimeUnit.NANOSECONDS);
+ }
+}
\ No newline at end of file
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheEntryFactory.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheEntryFactory.java
index 2b11f61484..bab48b063b 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheEntryFactory.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheEntryFactory.java
@@ -18,7 +18,9 @@
package org.apache.ambari.server.controller.metrics.timeline.cache;
import java.io.IOException;
+import java.net.URISyntaxException;
import java.util.Date;
+import java.util.Map;
import java.util.TreeMap;
import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
@@ -30,16 +32,19 @@ import org.apache.hadoop.metrics2.sink.timeline.Precision;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
import org.apache.http.client.utils.URIBuilder;
+import org.ehcache.spi.loaderwriter.BulkCacheLoadingException;
+import org.ehcache.spi.loaderwriter.BulkCacheWritingException;
+import org.ehcache.spi.loaderwriter.CacheLoaderWriter;
+import org.ehcache.spi.loaderwriter.CacheWritingException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
-import net.sf.ehcache.constructs.blocking.UpdatingCacheEntryFactory;
@Singleton
-public class TimelineMetricCacheEntryFactory implements
UpdatingCacheEntryFactory {
+public class TimelineMetricCacheEntryFactory implements
CacheLoaderWriter<TimelineAppMetricCacheKey, TimelineMetricsCacheValue> {
private final static Logger LOG =
LoggerFactory.getLogger(TimelineMetricCacheEntryFactory.class);
// Not declared final to ease unit test code and allow streamProvider
// injection
@@ -74,9 +79,9 @@ public class TimelineMetricCacheEntryFactory implements
UpdatingCacheEntryFactor
* @throws Exception
*/
@Override
- public Object createEntry(Object key) throws Exception {
+ public TimelineMetricsCacheValue load(TimelineAppMetricCacheKey key) throws
Exception {
LOG.debug("Creating cache entry since none exists, key = {}", key);
- TimelineAppMetricCacheKey metricCacheKey = (TimelineAppMetricCacheKey) key;
+ TimelineAppMetricCacheKey metricCacheKey = key;
TimelineMetrics timelineMetrics = null;
try {
@@ -84,13 +89,15 @@ public class TimelineMetricCacheEntryFactory implements
UpdatingCacheEntryFactor
timelineMetrics = requestHelperForGets.fetchTimelineMetrics(uriBuilder,
metricCacheKey.getTemporalInfo().getStartTimeMillis(),
metricCacheKey.getTemporalInfo().getEndTimeMillis());
+ } catch (URISyntaxException e) {
+ LOG.debug("Caught URISyntaxException on fetching metrics. {}",
e.getMessage());
+ throw new RuntimeException(e);
} catch (IOException io) {
LOG.debug("Caught IOException on fetching metrics. {}", io.getMessage());
throw io;
}
TimelineMetricsCacheValue value = null;
-
if (timelineMetrics != null && !timelineMetrics.getMetrics().isEmpty()) {
value = new TimelineMetricsCacheValue(
metricCacheKey.getTemporalInfo().getStartTime(),
@@ -99,10 +106,8 @@ public class TimelineMetricCacheEntryFactory implements
UpdatingCacheEntryFactor
Precision.getPrecision(metricCacheKey.getTemporalInfo().getStartTimeMillis(),
metricCacheKey.getTemporalInfo().getEndTimeMillis()) //Initial
Precision
);
-
LOG.debug("Created cache entry: {}", value);
}
-
return value;
}
@@ -116,9 +121,9 @@ public class TimelineMetricCacheEntryFactory implements
UpdatingCacheEntryFactor
* @throws Exception
*/
@Override
- public void updateEntryValue(Object key, Object value) throws Exception {
- TimelineAppMetricCacheKey metricCacheKey = (TimelineAppMetricCacheKey) key;
- TimelineMetricsCacheValue existingMetrics = (TimelineMetricsCacheValue)
value;
+ public void write(TimelineAppMetricCacheKey key, TimelineMetricsCacheValue
value) throws Exception {
+ TimelineAppMetricCacheKey metricCacheKey = key;
+ TimelineMetricsCacheValue existingMetrics = value;
LOG.debug("Updating cache entry, key: {}, with value = {}", key, value);
@@ -199,7 +204,6 @@ public class TimelineMetricCacheEntryFactory implements
UpdatingCacheEntryFactor
protected void updateTimelineMetricsInCache(TimelineMetrics newMetrics,
TimelineMetricsCacheValue timelineMetricsCacheValue,
Long requestedStartTime, Long requestedEndTime, boolean removeAll) {
-
TimelineMetrics existingTimelineMetrics =
timelineMetricsCacheValue.getTimelineMetrics();
// Remove values that do not fit before adding new data
@@ -242,7 +246,6 @@ public class TimelineMetricCacheEntryFactory implements
UpdatingCacheEntryFactor
// Remove out of band data from the cache
private void updateExistingMetricValues(TimelineMetrics existingMetrics,
Long requestedStartTime, Long requestedEndTime, boolean removeAll) {
-
for (TimelineMetric existingMetric : existingMetrics.getMetrics()) {
if (removeAll) {
existingMetric.setMetricValues(new TreeMap<>());
@@ -319,4 +322,24 @@ public class TimelineMetricCacheEntryFactory implements
UpdatingCacheEntryFactor
return time;
}
}
+ @Override
+ public void delete(TimelineAppMetricCacheKey key) throws
CacheWritingException {
+ // no need to implement.
+ }
+
+ @Override
+ public Map<TimelineAppMetricCacheKey, TimelineMetricsCacheValue>
loadAll(Iterable<? extends TimelineAppMetricCacheKey> keys) throws
BulkCacheLoadingException, Exception {
+// no need to implement.
+ return null;
+ }
+
+ @Override
+ public void writeAll(Iterable<? extends Map.Entry<? extends
TimelineAppMetricCacheKey, ? extends TimelineMetricsCacheValue>> entries)
throws BulkCacheWritingException, Exception {
+// no need to implement.
+ }
+
+ @Override
+ public void deleteAll(Iterable<? extends TimelineAppMetricCacheKey> keys)
throws BulkCacheWritingException, Exception {
+// no need to implement.
+ }
}
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheProvider.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheProvider.java
index f5cf852b48..55442c97ec 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheProvider.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheProvider.java
@@ -17,22 +17,22 @@
*/
package org.apache.ambari.server.controller.metrics.timeline.cache;
+import java.time.Duration;
+
import org.apache.ambari.server.configuration.Configuration;
+import org.ehcache.Cache;
+import org.ehcache.CacheManager;
+import org.ehcache.config.builders.CacheConfigurationBuilder;
+import org.ehcache.config.builders.CacheManagerBuilder;
+import org.ehcache.config.builders.ResourcePoolsBuilder;
+import org.ehcache.config.units.EntryUnit;
+import org.ehcache.core.internal.statistics.DefaultStatisticsService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
-import net.sf.ehcache.Cache;
-import net.sf.ehcache.CacheManager;
-import net.sf.ehcache.config.CacheConfiguration;
-import net.sf.ehcache.config.PersistenceConfiguration;
-import net.sf.ehcache.config.PersistenceConfiguration.Strategy;
-import net.sf.ehcache.config.SizeOfPolicyConfiguration;
-import
net.sf.ehcache.config.SizeOfPolicyConfiguration.MaxDepthExceededBehavior;
-import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
-
/**
* Cache implementation that provides ability to perform incremental reads
* from Metrics backend and reduce the amount of calls between Ambari and the
@@ -42,7 +42,6 @@ import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
public class TimelineMetricCacheProvider {
private TimelineMetricCache timelineMetricsCache;
private volatile boolean isCacheInitialized = false;
- public static final String TIMELINE_METRIC_CACHE_MANAGER_NAME =
"timelineMetricCacheManager";
public static final String TIMELINE_METRIC_CACHE_INSTANCE_NAME =
"timelineMetricCache";
Configuration configuration;
@@ -58,63 +57,52 @@ public class TimelineMetricCacheProvider {
}
private synchronized void initializeCache() {
- // Check in case of contention to avoid ObjectExistsException
if (isCacheInitialized) {
return;
}
+ DefaultStatisticsService statisticsService = new
DefaultStatisticsService();
- System.setProperty("net.sf.ehcache.skipUpdateCheck", "true");
- if (configuration.useMetricsCacheCustomSizingEngine()) {
- // Use custom sizing engine to speed cache sizing calculations
- System.setProperty("net.sf.ehcache.sizeofengine." +
TIMELINE_METRIC_CACHE_MANAGER_NAME,
-
"org.apache.ambari.server.controller.metrics.timeline.cache.TimelineMetricsCacheSizeOfEngine");
- }
-
- net.sf.ehcache.config.Configuration managerConfig =
- new net.sf.ehcache.config.Configuration();
- managerConfig.setName(TIMELINE_METRIC_CACHE_MANAGER_NAME);
-
- // Set max heap available to the cache manager
-
managerConfig.setMaxBytesLocalHeap(configuration.getMetricsCacheManagerHeapPercent());
-
- //Create a singleton CacheManager using defaults
- CacheManager manager = CacheManager.create(managerConfig);
-
- LOG.info("Creating Metrics Cache with timeouts => ttl = " +
- configuration.getMetricCacheTTLSeconds() + ", idle = " +
- configuration.getMetricCacheIdleSeconds());
+ CacheManager manager = CacheManagerBuilder.newCacheManagerBuilder()
+ .using(statisticsService)
+ .build(true);
// Create a Cache specifying its configuration.
- CacheConfiguration cacheConfiguration = createCacheConfiguration();
- Cache cache = new Cache(cacheConfiguration);
+ CacheConfigurationBuilder<TimelineAppMetricCacheKey,
TimelineMetricsCacheValue> cacheConfigurationBuilder =
createCacheConfiguration();
+ Cache<TimelineAppMetricCacheKey, TimelineMetricsCacheValue> cache =
manager.createCache(TIMELINE_METRIC_CACHE_INSTANCE_NAME,
cacheConfigurationBuilder);
- // Decorate with UpdatingSelfPopulatingCache
- timelineMetricsCache = new TimelineMetricCache(cache, cacheEntryFactory);
+ // Decorate with timelineMetricsCache.
+ timelineMetricsCache = new TimelineMetricCache(cache, cacheEntryFactory,
statisticsService);
LOG.info("Registering metrics cache with provider: name = " +
- cache.getName() + ", guid: " + cache.getGuid());
-
- manager.addCache(timelineMetricsCache);
+ TIMELINE_METRIC_CACHE_INSTANCE_NAME + ", manager = " + manager);
isCacheInitialized = true;
}
// Having this as a separate public method for testing/mocking purposes
- public CacheConfiguration createCacheConfiguration() {
-
- CacheConfiguration cacheConfiguration = new CacheConfiguration()
- .name(TIMELINE_METRIC_CACHE_INSTANCE_NAME)
- .timeToLiveSeconds(configuration.getMetricCacheTTLSeconds()) // 1 hour
- .timeToIdleSeconds(configuration.getMetricCacheIdleSeconds()) // 5
minutes
- .memoryStoreEvictionPolicy(MemoryStoreEvictionPolicy.LRU)
- .sizeOfPolicy(new SizeOfPolicyConfiguration() // Set sizeOf policy to
continue on max depth reached - avoid OOM
- .maxDepth(10000)
- .maxDepthExceededBehavior(MaxDepthExceededBehavior.CONTINUE))
- .eternal(false)
- .persistence(new PersistenceConfiguration()
- .strategy(Strategy.NONE.name()));
-
- return cacheConfiguration;
+ public CacheConfigurationBuilder createCacheConfiguration() {
+ LOG.info("Creating Metrics Cache with timeouts => ttl = " +
+ configuration.getMetricCacheTTLSeconds() + ", idle = " +
+ configuration.getMetricCacheIdleSeconds() + ", cache size = " +
configuration.getMetricCacheEntryUnitSize());
+
+ TimelineMetricCacheCustomExpiry timelineMetricCacheCustomExpiry = new
TimelineMetricCacheCustomExpiry(
+ Duration.ofSeconds(configuration.getMetricCacheTTLSeconds()),
+ Duration.ofSeconds(configuration.getMetricCacheIdleSeconds())
+ );
+
+ CacheConfigurationBuilder<TimelineAppMetricCacheKey,
TimelineMetricsCacheValue> cacheConfigurationBuilder = CacheConfigurationBuilder
+ .newCacheConfigurationBuilder(
+ TimelineAppMetricCacheKey.class,
+ TimelineMetricsCacheValue.class,
+ ResourcePoolsBuilder.newResourcePoolsBuilder()
+ .heap(configuration.getMetricCacheEntryUnitSize(),
EntryUnit.ENTRIES)
+ )
+ .withKeySerializer(TimelineAppMetricCacheKeySerializer.class)
+ .withValueSerializer(TimelineMetricsCacheValueSerializer.class)
+ .withLoaderWriter(cacheEntryFactory)
+ .withExpiry(timelineMetricCacheCustomExpiry);
+
+ return cacheConfigurationBuilder;
}
/**
@@ -131,5 +119,4 @@ public class TimelineMetricCacheProvider {
}
return timelineMetricsCache;
}
-
}
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheSizeOfEngine.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheSizeOfEngine.java
deleted file mode 100644
index f3cc7038ba..0000000000
---
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheSizeOfEngine.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.server.controller.metrics.timeline.cache;
-
-import java.util.Map;
-import java.util.TreeMap;
-
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import net.sf.ehcache.pool.Size;
-import net.sf.ehcache.pool.SizeOfEngine;
-import net.sf.ehcache.pool.impl.DefaultSizeOfEngine;
-import net.sf.ehcache.pool.sizeof.ReflectionSizeOf;
-import net.sf.ehcache.pool.sizeof.SizeOf;
-
-/**
- * Cache sizing engine that reduces reflective calls over the Object graph to
- * find total Heap usage.
- */
-public class TimelineMetricsCacheSizeOfEngine implements SizeOfEngine {
-
- private final static Logger LOG =
LoggerFactory.getLogger(TimelineMetricsCacheSizeOfEngine.class);
- public static final int DEFAULT_MAX_DEPTH = 1000;
- public static final boolean DEFAULT_ABORT_WHEN_MAX_DEPTH_EXCEEDED = false;
-
- private SizeOfEngine underlying = null;
- SizeOf reflectionSizeOf = new ReflectionSizeOf();
-
- // Optimizations
- private volatile long timelineMetricPrimitivesApproximation = 0;
-
- private long sizeOfMapEntry;
- private long sizeOfMapEntryOverhead;
-
- private TimelineMetricsCacheSizeOfEngine(SizeOfEngine underlying) {
- this.underlying = underlying;
- }
-
- public TimelineMetricsCacheSizeOfEngine() {
- this(new DefaultSizeOfEngine(DEFAULT_MAX_DEPTH,
DEFAULT_ABORT_WHEN_MAX_DEPTH_EXCEEDED));
-
- this.sizeOfMapEntry = reflectionSizeOf.sizeOf(new Long(1)) +
- reflectionSizeOf.sizeOf(new Double(2.0));
-
- //SizeOfMapEntryOverhead = SizeOfMapWithOneEntry - (SizeOfEmptyMap +
SizeOfOneEntry)
- TreeMap<Long, Double> map = new TreeMap<>();
- long emptyMapSize = reflectionSizeOf.sizeOf(map);
- map.put(new Long(1), new Double(2.0));
- long sizeOfMapOneEntry = reflectionSizeOf.deepSizeOf(DEFAULT_MAX_DEPTH,
DEFAULT_ABORT_WHEN_MAX_DEPTH_EXCEEDED, map).getCalculated();
- this.sizeOfMapEntryOverhead = sizeOfMapOneEntry - (emptyMapSize +
this.sizeOfMapEntry);
-
- LOG.info("Creating custom sizeof engine for TimelineMetrics.");
- }
-
- @Override
- public Size sizeOf(Object key, Object value, Object container) {
- try {
- LOG.debug("BEGIN - Sizeof, key: {}, value: {}", key, value);
-
- long size = 0;
-
- if (key instanceof TimelineAppMetricCacheKey) {
- size += getTimelineMetricCacheKeySize((TimelineAppMetricCacheKey) key);
- }
-
- if (value instanceof TimelineMetricsCacheValue) {
- size += getTimelineMetricCacheValueSize((TimelineMetricsCacheValue)
value);
- }
- // Mark size as not being exact
- return new Size(size, false);
- } finally {
- LOG.debug("END - Sizeof, key: {}", key);
- }
- }
-
- private long getTimelineMetricCacheKeySize(TimelineAppMetricCacheKey key) {
- long size = reflectionSizeOf.sizeOf(key.getAppId());
- size += key.getMetricNames() != null && !key.getMetricNames().isEmpty() ?
- reflectionSizeOf.deepSizeOf(1000, false,
key.getMetricNames()).getCalculated() : 0;
- size += key.getSpec() != null ?
- reflectionSizeOf.deepSizeOf(1000, false, key.getSpec()).getCalculated()
: 0;
- size += key.getHostNames() != null ?
- reflectionSizeOf.deepSizeOf(1000, false,
key.getHostNames()).getCalculated() : 0;
- // 4 fixed longs of @TemporalInfo + reference
- size += 40;
- size += 8; // Object overhead
-
- return size;
- }
-
- private long getTimelineMetricCacheValueSize(TimelineMetricsCacheValue
value) {
- long size = 16; // startTime + endTime
- TimelineMetrics metrics = value.getTimelineMetrics();
- size += 8; // Object reference
-
- if (metrics != null) {
- for (TimelineMetric metric : metrics.getMetrics()) {
-
- if (timelineMetricPrimitivesApproximation == 0) {
- timelineMetricPrimitivesApproximation +=
reflectionSizeOf.sizeOf(metric.getMetricName());
- timelineMetricPrimitivesApproximation +=
reflectionSizeOf.sizeOf(metric.getAppId());
- timelineMetricPrimitivesApproximation +=
reflectionSizeOf.sizeOf(metric.getHostName());
- timelineMetricPrimitivesApproximation +=
reflectionSizeOf.sizeOf(metric.getInstanceId());
- timelineMetricPrimitivesApproximation +=
reflectionSizeOf.sizeOf(metric.getStartTime());
- timelineMetricPrimitivesApproximation +=
reflectionSizeOf.sizeOf(metric.getType());
- timelineMetricPrimitivesApproximation += 8; // Object overhead
-
- LOG.debug("timelineMetricPrimitivesApproximation bytes = {}",
timelineMetricPrimitivesApproximation);
- }
- size += timelineMetricPrimitivesApproximation;
-
- Map<Long, Double> metricValues = metric.getMetricValues();
- if (metricValues != null && !metricValues.isEmpty()) {
- // Numeric wrapper: 12 bytes + 8 bytes Data type + 4 bytes alignment
= 48 (Long, Double)
- // Tree Map: 12 bytes for header + 20 bytes for 5 object fields :
pointers + 1 byte for flag = 40
- LOG.debug("Size of metric value: {}", (sizeOfMapEntry +
sizeOfMapEntryOverhead) * metricValues.size());
- size += (sizeOfMapEntry + sizeOfMapEntryOverhead) *
metricValues.size(); // Treemap size is O(1)
- }
- }
- LOG.debug("Total Size of metric values in cache: {}", size);
- }
-
- return size;
- }
-
- @Override
- public SizeOfEngine copyWith(int maxDepth, boolean
abortWhenMaxDepthExceeded) {
- LOG.debug("Copying tracing sizeof engine, maxdepth: {}, abort: {}",
- maxDepth, abortWhenMaxDepthExceeded);
-
- return new TimelineMetricsCacheSizeOfEngine(
- underlying.copyWith(maxDepth, abortWhenMaxDepthExceeded));
- }
-}
\ No newline at end of file
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheValueSerializer.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheValueSerializer.java
new file mode 100644
index 0000000000..6dd1a16176
--- /dev/null
+++
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheValueSerializer.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.metrics.timeline.cache;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.nio.ByteBuffer;
+
+import org.ehcache.spi.serialization.Serializer;
+import org.ehcache.spi.serialization.SerializerException;
+
+public class TimelineMetricsCacheValueSerializer implements
Serializer<TimelineMetricsCacheValue> {
+ private final ClassLoader classLoader;
+ public TimelineMetricsCacheValueSerializer(ClassLoader classLoader) {
+ this.classLoader = classLoader;
+ }
+ @Override
+ public ByteBuffer serialize(TimelineMetricsCacheValue value) throws
SerializerException {
+ try {
+ ByteArrayOutputStream byteArrayOutputStream = new
ByteArrayOutputStream();
+ ObjectOutputStream objectOutputStream = new
ObjectOutputStream(byteArrayOutputStream);
+ objectOutputStream.writeObject(value);
+ objectOutputStream.close();
+ return ByteBuffer.wrap(byteArrayOutputStream.toByteArray());
+ } catch (Exception e) {
+ throw new SerializerException(e);
+ }
+ }
+
+ @Override
+ public TimelineMetricsCacheValue read(ByteBuffer binary) throws
ClassNotFoundException, SerializerException {
+ try {
+ ByteArrayInputStream byteArrayInputStream = new
ByteArrayInputStream(binary.array());
+ ObjectInputStream objectInputStream = new
ObjectInputStream(byteArrayInputStream);
+ return (TimelineMetricsCacheValue) objectInputStream.readObject();
+ } catch (IOException | ClassNotFoundException e) {
+ throw new SerializerException("Error during deserialization", e);
+ }
+ }
+
+ @Override
+ public boolean equals(TimelineMetricsCacheValue value, ByteBuffer binary)
throws ClassNotFoundException, SerializerException {
+ try {
+ ByteArrayInputStream byteArrayInputStream = new
ByteArrayInputStream(binary.array());
+ ObjectInputStream objectInputStream = new
ObjectInputStream(byteArrayInputStream);
+ TimelineMetricsCacheValue deserializedValue =
(TimelineMetricsCacheValue) objectInputStream.readObject();
+
+ // Now compare value and deserializedValue
+ if (value == deserializedValue) return true;
+ if (deserializedValue == null || (value.getClass() !=
deserializedValue.getClass())) return false;
+
+ if (!value.getStartTime().equals(deserializedValue.getStartTime()))
return false;
+ if (!value.getEndTime().equals(deserializedValue.getEndTime()))
return false;
+ if
(!value.getTimelineMetrics().equals(deserializedValue.getTimelineMetrics()))
return false;
+ return value.getPrecision() == deserializedValue.getPrecision();
+
+ } catch (IOException e) {
+ throw new SerializerException("Error during deserialization", e);
+ }
+ }
+}
diff --git
a/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/ServiceCalculatedStateFactory.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/ServiceCalculatedStateFactory.java
index 935361cb7e..7330c53ad5 100644
---
a/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/ServiceCalculatedStateFactory.java
+++
b/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/ServiceCalculatedStateFactory.java
@@ -19,6 +19,7 @@
package org.apache.ambari.server.controller.utilities;
import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
import
org.apache.ambari.server.controller.utilities.state.DefaultServiceCalculatedState;
import
org.apache.ambari.server.controller.utilities.state.FlumeServiceCalculatedState;
@@ -32,7 +33,6 @@ import org.apache.ambari.server.state.Service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import net.sf.ehcache.util.concurrent.ConcurrentHashMap;
public class ServiceCalculatedStateFactory {
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
index 2d4e25d91e..78b4f1feae 100644
---
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
+++
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
@@ -119,7 +119,7 @@ public class StackDefinedPropertyProviderTest {
public void setup() throws Exception {
InMemoryDefaultTestModule module = new InMemoryDefaultTestModule();
// Use the same cache provider to ensure there is only once instance of
- // Cache available. The @net.sf.ehcache.CacheManager is a singleton and
+ // Cache available. The @org.ehcache.CacheManager is a singleton and
// does not allow multiple instance with same cache name to be registered.
injector = Guice.createInjector(Modules.override(module).with(new
TestModuleWithCacheProvider()));
injector.getInstance(GuiceJpaInitializer.class);
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java
index a1ff9132b6..f3253b0c6b 100644
---
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java
+++
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java
@@ -82,7 +82,7 @@ import
org.springframework.security.core.context.SecurityContextHolder;
@RunWith(PowerMockRunner.class)
@PrepareForTest({AMSPropertyProvider.class, AmbariServer.class})
-@PowerMockIgnore({"javax.xml.parsers.*", "org.xml.sax.*", "net.sf.ehcache.*",
"org.apache.log4j.*"})
+@PowerMockIgnore({"javax.xml.parsers.*", "org.xml.sax.*", "org.ehcache.*",
"org.apache.log4j.*"})
public class AMSPropertyProviderTest {
private static final String PROPERTY_ID1 =
PropertyHelper.getPropertyId("metrics/cpu", "cpu_user");
private static final String PROPERTY_ID2 =
PropertyHelper.getPropertyId("metrics/memory", "mem_free");
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheSizingTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheSizingTest.java
deleted file mode 100644
index cd76e2cb13..0000000000
---
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheSizingTest.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.server.controller.metrics.timeline.cache;
-
-import java.util.HashSet;
-import java.util.Set;
-import java.util.TreeMap;
-
-import org.apache.ambari.server.controller.internal.TemporalInfoImpl;
-import org.apache.ambari.server.controller.spi.TemporalInfo;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.junit.Assert;
-import org.junit.Test;
-
-import net.sf.ehcache.pool.sizeof.ReflectionSizeOf;
-import net.sf.ehcache.pool.sizeof.SizeOf;
-
-public class TimelineMetricCacheSizingTest {
-
- SizeOf reflectionSizeOf = new ReflectionSizeOf();
-
- private TimelineMetric getSampleTimelineMetric(String metricName) {
- TimelineMetric metric = new TimelineMetric();
- metric.setMetricName(metricName);
- metric.setAppId("KAFKA_BROKER");
- metric.setInstanceId("NULL");
- metric.setHostName("my.privatehostname.of.average.length");
- metric.setStartTime(System.currentTimeMillis());
- metric.setType("LONG");
-
- // JSON dser gives a LinkedHashMap
- TreeMap<Long, Double> valueMap = new TreeMap<>();
- long now = System.currentTimeMillis();
- for (int i = 0; i < 50000; i++) {
- valueMap.put(new Long(now + i), new Double(1.0 + i));
- }
-
- metric.setMetricValues(valueMap);
-
- return metric;
- }
-
- @Test
- public void testTimelineMetricCacheSizing() throws Exception {
- Set<String> metricNames = new HashSet<>();
- String metric1 =
"prefix1.suffix1.suffix2.actualNamePrefix.longMetricName1";
- String metric2 =
"prefix1.suffix1.suffix2.actualNamePrefix.longMetricName2";
- String metric3 =
"prefix1.suffix1.suffix2.actualNamePrefix.longMetricName3";
- String metric4 =
"prefix1.suffix1.suffix2.actualNamePrefix.longMetricName4";
- String metric5 =
"prefix1.suffix1.suffix2.actualNamePrefix.longMetricName5";
- String metric6 =
"prefix1.suffix1.suffix2.actualNamePrefix.longMetricName6";
-
- metricNames.add(metric1);
- metricNames.add(metric2);
- metricNames.add(metric3);
- metricNames.add(metric4);
- metricNames.add(metric5);
- metricNames.add(metric6);
-
- long now = System.currentTimeMillis();
- TemporalInfo temporalInfo = new TemporalInfoImpl(now - 1000, now, 15);
-
- TimelineAppMetricCacheKey key = new TimelineAppMetricCacheKey(
- metricNames, "KAFKA_BROKER", temporalInfo);
- // Some random spec
-
key.setSpec("http://104.196.94.129:6188/ws/v1/timeline/metrics?metricNames=" +
- "jvm.JvmMetrics.MemHeapCommittedM&appId=RESOURCEMANAGER&" +
- "startTime=1439522640000&endTime=1440127440000&precision=hours");
-
- TimelineMetrics metrics = new TimelineMetrics();
- metrics.getMetrics().add(getSampleTimelineMetric(metric1));
- metrics.getMetrics().add(getSampleTimelineMetric(metric2));
- metrics.getMetrics().add(getSampleTimelineMetric(metric3));
- metrics.getMetrics().add(getSampleTimelineMetric(metric4));
- metrics.getMetrics().add(getSampleTimelineMetric(metric5));
- metrics.getMetrics().add(getSampleTimelineMetric(metric6));
-
- TimelineMetricsCacheValue value = new TimelineMetricsCacheValue(now -
- 1000, now, metrics, null);
-
- TimelineMetricsCacheSizeOfEngine customSizeOfEngine = new
TimelineMetricsCacheSizeOfEngine();
-
- long bytesFromReflectionEngine =
- reflectionSizeOf.deepSizeOf(50000, false, key).getCalculated() +
- reflectionSizeOf.deepSizeOf(50000, false, value).getCalculated();
-
- long bytesFromCustomSizeOfEngine = customSizeOfEngine.sizeOf(key, value,
null).getCalculated();
-
- long sampleSizeInMB = bytesFromReflectionEngine / (1024 * 1024);
- long discrepancyInKB = Math.abs(bytesFromCustomSizeOfEngine -
bytesFromReflectionEngine) / 1024;
-
- Assert.assertTrue("Sample size is " + sampleSizeInMB + ", expected to be" +
- "greater that 10 MB", sampleSizeInMB > 10);
- Assert.assertTrue("Discrepancy in values is " + discrepancyInKB + ", " +
- "expected to be less than 10K. " + "Bytes from reflection = " +
- bytesFromReflectionEngine + ", bytes from custom sizing engine = " +
- bytesFromCustomSizeOfEngine, discrepancyInKB < 10);
- }
-}
diff --git
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheTest.java
index 9b3395635c..e0484f61ce 100644
---
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheTest.java
+++
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheTest.java
@@ -17,6 +17,7 @@
*/
package org.apache.ambari.server.controller.metrics.timeline.cache;
+import static junit.framework.Assert.assertNotNull;
import static
org.apache.ambari.server.controller.metrics.timeline.cache.TimelineMetricCacheProvider.TIMELINE_METRIC_CACHE_INSTANCE_NAME;
import static org.easymock.EasyMock.anyLong;
import static org.easymock.EasyMock.anyObject;
@@ -24,19 +25,20 @@ import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.createMockBuilder;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.getCurrentArguments;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import java.lang.reflect.Field;
+import java.time.Duration;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.TreeMap;
import org.apache.ambari.server.configuration.Configuration;
@@ -47,98 +49,101 @@ import
org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
import org.apache.http.client.utils.URIBuilder;
import org.easymock.EasyMock;
-import org.easymock.IAnswer;
-import org.junit.After;
+import org.ehcache.Cache;
+import org.ehcache.CacheManager;
+import org.ehcache.config.builders.CacheConfigurationBuilder;
+import org.ehcache.config.builders.CacheManagerBuilder;
+import org.ehcache.config.builders.ResourcePoolsBuilder;
+import org.ehcache.config.units.EntryUnit;
+import org.ehcache.core.internal.statistics.DefaultStatisticsService;
import org.junit.Test;
import junit.framework.Assert;
-import net.sf.ehcache.Cache;
-import net.sf.ehcache.CacheManager;
-import net.sf.ehcache.config.CacheConfiguration;
-import net.sf.ehcache.config.PersistenceConfiguration;
-import net.sf.ehcache.config.SizeOfPolicyConfiguration;
-import net.sf.ehcache.constructs.blocking.UpdatingCacheEntryFactory;
-import net.sf.ehcache.constructs.blocking.UpdatingSelfPopulatingCache;
-import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
public class TimelineMetricCacheTest {
- @After
- public void removeCacheInstance() {
- // Avoids Object Exists Exception on unit tests by adding a new cache for
- // every provider.
- CacheManager manager = CacheManager.getInstance();
- manager.removeAllCaches();
- }
-
// General cache behavior demonstration
@Test
- public void testSelfPopulatingCacheUpdates() throws Exception {
- UpdatingCacheEntryFactory cacheEntryFactory =
createMock(UpdatingCacheEntryFactory.class);
-
- StringBuilder value = new StringBuilder("b");
-
- expect(cacheEntryFactory.createEntry("a")).andReturn(value);
- cacheEntryFactory.updateEntryValue("a", value);
- expectLastCall().andAnswer(new IAnswer<Object>() {
- @Override
- public Object answer() throws Throwable {
- String key = (String) getCurrentArguments()[0];
- StringBuilder value = (StringBuilder) getCurrentArguments()[1];
- System.out.println("key = " + key + ", value = " + value);
- value.append("c");
- return null;
- }
- });
+ public void testTimelineMetricCache() throws Exception {
+ TimelineMetricCacheEntryFactory cacheEntryFactory =
createMock(TimelineMetricCacheEntryFactory.class);
+
+ final long now = System.currentTimeMillis();
+ TimelineMetrics metrics = new TimelineMetrics();
+ TimelineMetric timelineMetric = new TimelineMetric();
+ timelineMetric.setMetricName("cpu_user");
+ timelineMetric.setAppId("app1");
+ TreeMap<Long, Double> metricValues = new TreeMap<>();
+ metricValues.put(now + 100, 1.0);
+ metricValues.put(now + 200, 2.0);
+ metricValues.put(now + 300, 3.0);
+ timelineMetric.setMetricValues(metricValues);
+ metrics.getMetrics().add(timelineMetric);
+ TimelineMetricsCacheValue testValue = new TimelineMetricsCacheValue(now,
now + 1000, metrics, null);
+
+ Set<String> metricNames = new HashSet<>(Arrays.asList("metric1",
"metric2"));
+ String appId = "appId1";
+ String instanceId = "instanceId1";
+ TemporalInfo temporalInfo = new TemporalInfoImpl(100L, 200L, 1);
+ TimelineAppMetricCacheKey testKey = new
TimelineAppMetricCacheKey(metricNames, appId, instanceId, temporalInfo);
+
+ expect(cacheEntryFactory.load(testKey)).andReturn(testValue).anyTimes();
replay(cacheEntryFactory);
- // Need to set this due what seems like a bug in Ehcache 2.10.0, setting
- // it on the second cache instance results in a assertion error.
- // Since this is not out production use case, setting it here as well.
- net.sf.ehcache.config.Configuration managerConfig = new
net.sf.ehcache.config.Configuration();
- managerConfig.setMaxBytesLocalHeap("10%");
- CacheManager manager = CacheManager.create(managerConfig);
- Cache cache = new Cache("test", 0, false, false, 10000, 10000);
- UpdatingSelfPopulatingCache testCache = new
UpdatingSelfPopulatingCache(cache, cacheEntryFactory);
- manager.addCache(testCache);
+ Configuration configuration = createNiceMock(Configuration.class);
+ expect(configuration.getMetricCacheTTLSeconds()).andReturn(3600);
+ expect(configuration.getMetricCacheIdleSeconds()).andReturn(1800);
+
expect(configuration.getMetricCacheEntryUnitSize()).andReturn(100).anyTimes();
+ replay(configuration);
+
+ DefaultStatisticsService statisticsService = new
DefaultStatisticsService();
+ CacheManager manager = CacheManagerBuilder.newCacheManagerBuilder()
+ .using(statisticsService)
+ .build(true);
- Assert.assertEquals("b", testCache.get("a").getObjectValue().toString());
- Assert.assertEquals("bc", testCache.get("a").getObjectValue().toString());
+ CacheConfigurationBuilder<TimelineAppMetricCacheKey,
TimelineMetricsCacheValue> cacheConfigurationBuilder =
createTestCacheConfiguration(configuration, cacheEntryFactory);
+ Cache<TimelineAppMetricCacheKey, TimelineMetricsCacheValue> cache =
manager.createCache(TIMELINE_METRIC_CACHE_INSTANCE_NAME,
cacheConfigurationBuilder);
+ TimelineMetricCache testCache = new TimelineMetricCache(cache,
cacheEntryFactory, statisticsService);
+
+ TimelineMetrics testTimelineMetrics =
testCache.getAppTimelineMetricsFromCache(testKey);
+ Assert.assertEquals(metrics, testTimelineMetrics);
verify(cacheEntryFactory);
}
- private CacheConfiguration createTestCacheConfiguration(Configuration
configuration) {
-
- CacheConfiguration cacheConfiguration = new CacheConfiguration()
- .name(TIMELINE_METRIC_CACHE_INSTANCE_NAME)
- .timeToLiveSeconds(configuration.getMetricCacheTTLSeconds()) // 1 hour
- .timeToIdleSeconds(configuration.getMetricCacheIdleSeconds()) // 5
minutes
- .memoryStoreEvictionPolicy(MemoryStoreEvictionPolicy.LRU)
- .sizeOfPolicy(new SizeOfPolicyConfiguration() // Set sizeOf policy to
continue on max depth reached - avoid OOM
- .maxDepth(10000)
-
.maxDepthExceededBehavior(SizeOfPolicyConfiguration.MaxDepthExceededBehavior.CONTINUE))
- .eternal(false)
- .persistence(new PersistenceConfiguration()
- .strategy(PersistenceConfiguration.Strategy.NONE.name()));
-
- cacheConfiguration.setMaxBytesLocalHeap(20*1024*1024l);
- return cacheConfiguration;
+ private CacheConfigurationBuilder createTestCacheConfiguration(Configuration
configuration, TimelineMetricCacheEntryFactory cacheEntryFactory){
+
+
+ TimelineMetricCacheCustomExpiry timelineMetricCacheCustomExpiry = new
TimelineMetricCacheCustomExpiry(
+ Duration.ofSeconds(configuration.getMetricCacheTTLSeconds()), //
TTL
+ Duration.ofSeconds(configuration.getMetricCacheIdleSeconds()) //
TTI
+ );
+
+ CacheConfigurationBuilder<TimelineAppMetricCacheKey,
TimelineMetricsCacheValue> cacheConfigurationBuilder = CacheConfigurationBuilder
+ .newCacheConfigurationBuilder(
+ TimelineAppMetricCacheKey.class,
+ TimelineMetricsCacheValue.class,
+ ResourcePoolsBuilder.newResourcePoolsBuilder()
+ .heap(configuration.getMetricCacheEntryUnitSize(),
EntryUnit.ENTRIES)
+ )
+ .withKeySerializer(TimelineAppMetricCacheKeySerializer.class)
+ .withValueSerializer(TimelineMetricsCacheValueSerializer.class)
+ .withLoaderWriter(cacheEntryFactory)
+ .withExpiry(timelineMetricCacheCustomExpiry);
+
+ return cacheConfigurationBuilder;
}
@Test
public void testTimelineMetricCacheProviderGets() throws Exception {
Configuration configuration = createNiceMock(Configuration.class);
expect(configuration.getMetricCacheTTLSeconds()).andReturn(3600);
- expect(configuration.getMetricCacheIdleSeconds()).andReturn(100);
-
expect(configuration.getMetricsCacheManagerHeapPercent()).andReturn("10%").anyTimes();
-
+ expect(configuration.getMetricCacheIdleSeconds()).andReturn(1800);
+
expect(configuration.getMetricCacheEntryUnitSize()).andReturn(100).anyTimes();
replay(configuration);
final long now = System.currentTimeMillis();
TimelineMetrics metrics = new TimelineMetrics();
-
TimelineMetric timelineMetric = new TimelineMetric();
timelineMetric.setMetricName("cpu_user");
timelineMetric.setAppId("app1");
@@ -149,7 +154,6 @@ public class TimelineMetricCacheTest {
timelineMetric.setMetricValues(metricValues);
metrics.getMetrics().add(timelineMetric);
-
TimelineMetricCacheEntryFactory cacheEntryFactory =
createMock(TimelineMetricCacheEntryFactory.class);
TimelineAppMetricCacheKey queryKey = new TimelineAppMetricCacheKey(
@@ -164,10 +168,7 @@ public class TimelineMetricCacheTest {
new TemporalInfoImpl(now, now + 2000, 1)
);
- expect(cacheEntryFactory.createEntry(anyObject())).andReturn(value);
- cacheEntryFactory.updateEntryValue(testKey, value);
- expectLastCall().once();
-
+ expect(cacheEntryFactory.load(anyObject())).andReturn(value);
replay(cacheEntryFactory);
TimelineMetricCacheProvider cacheProvider =
createMockBuilder(TimelineMetricCacheProvider.class)
@@ -175,7 +176,7 @@ public class TimelineMetricCacheTest {
.withConstructor(configuration, cacheEntryFactory)
.createNiceMock();
-
expect(cacheProvider.createCacheConfiguration()).andReturn(createTestCacheConfiguration(configuration)).anyTimes();
+
expect(cacheProvider.createCacheConfiguration()).andReturn(createTestCacheConfiguration(configuration,
cacheEntryFactory)).anyTimes();
replay(cacheProvider);
TimelineMetricCache cache = cacheProvider.getTimelineMetricsCache();
@@ -365,8 +366,8 @@ public class TimelineMetricCacheTest {
}
}
- Assert.assertNotNull(newMetric1);
- Assert.assertNotNull(newMetric2);
+ assertNotNull(newMetric1);
+ assertNotNull(newMetric2);
Assert.assertEquals(3, newMetric1.getMetricValues().size());
Assert.assertEquals(3, newMetric2.getMetricValues().size());
Map<Long, Double> newMetricsMap = newMetric1.getMetricValues();
@@ -407,8 +408,8 @@ public class TimelineMetricCacheTest {
public void testTimelineMetricCachePrecisionUpdates() throws Exception {
Configuration configuration = createNiceMock(Configuration.class);
expect(configuration.getMetricCacheTTLSeconds()).andReturn(3600);
- expect(configuration.getMetricCacheIdleSeconds()).andReturn(100);
-
expect(configuration.getMetricsCacheManagerHeapPercent()).andReturn("10%").anyTimes();
+ expect(configuration.getMetricCacheIdleSeconds()).andReturn(1800);
+
expect(configuration.getMetricCacheEntryUnitSize()).andReturn(100).anyTimes();
expect(configuration.getMetricRequestBufferTimeCatchupInterval()).andReturn(1000l).anyTimes();
replay(configuration);
@@ -422,16 +423,16 @@ public class TimelineMetricCacheTest {
//Original Values
Map<String, TimelineMetric> valueMap = new HashMap<>();
TimelineMetric timelineMetric = new TimelineMetric();
- timelineMetric.setMetricName("cpu_user");
+ timelineMetric.setMetricName("cpu_user1");
timelineMetric.setAppId("app1");
TreeMap<Long, Double> metricValues = new TreeMap<>();
for (long i = 1 * year - 1 * day; i >= 0; i -= 1 * day) {
- metricValues.put(now-i, 1.0);
+ metricValues.put(now - i, 1.0);
}
timelineMetric.setMetricValues(metricValues);
- valueMap.put("cpu_user", timelineMetric);
+ valueMap.put("cpu_user1", timelineMetric);
List<TimelineMetric> timelineMetricList = new ArrayList<>();
timelineMetricList.add(timelineMetric);
@@ -439,7 +440,7 @@ public class TimelineMetricCacheTest {
metrics.setMetrics(timelineMetricList);
TimelineAppMetricCacheKey key = new TimelineAppMetricCacheKey(
- Collections.singleton("cpu_user"),
+ Collections.singleton("cpu_user1"),
"app1",
new TemporalInfoImpl(now-1*year, now, 1)
);
@@ -448,8 +449,8 @@ public class TimelineMetricCacheTest {
//Updated values
Map<String, TimelineMetric> newValueMap = new HashMap<>();
TimelineMetric newTimelineMetric = new TimelineMetric();
- newTimelineMetric.setMetricName("cpu_user");
- newTimelineMetric.setAppId("app1");
+ newTimelineMetric.setMetricName("cpu_user2");
+ newTimelineMetric.setAppId("app2");
TreeMap<Long, Double> newMetricValues = new TreeMap<>();
for(long i=1*hour;i<=2*day;i+=hour) {
@@ -457,7 +458,7 @@ public class TimelineMetricCacheTest {
}
newTimelineMetric.setMetricValues(newMetricValues);
- newValueMap.put("cpu_user", newTimelineMetric);
+ newValueMap.put("cpu_user2", newTimelineMetric);
List<TimelineMetric> newTimelineMetricList = new ArrayList<>();
newTimelineMetricList.add(newTimelineMetric);
@@ -465,8 +466,8 @@ public class TimelineMetricCacheTest {
newMetrics.setMetrics(newTimelineMetricList);
TimelineAppMetricCacheKey newKey = new TimelineAppMetricCacheKey(
- Collections.singleton("cpu_user"),
- "app1",
+ Collections.singleton("cpu_user2"),
+ "app2",
new TemporalInfoImpl(now - 1 * day, now + 2 * day, 1)
);
newKey.setSpec("");
@@ -495,7 +496,7 @@ public class TimelineMetricCacheTest {
.withConstructor(configuration, cacheEntryFactory)
.createNiceMock();
-
expect(cacheProvider.createCacheConfiguration()).andReturn(createTestCacheConfiguration(configuration)).anyTimes();
+
expect(cacheProvider.createCacheConfiguration()).andReturn(createTestCacheConfiguration(configuration,
cacheEntryFactory)).anyTimes();
replay(cacheProvider);
TimelineMetricCache cache = cacheProvider.getTimelineMetricsCache();
@@ -505,16 +506,21 @@ public class TimelineMetricCacheTest {
List<TimelineMetric> metricsList = metrics.getMetrics();
Assert.assertEquals(1, metricsList.size());
TimelineMetric metric = metricsList.iterator().next();
- Assert.assertEquals("cpu_user", metric.getMetricName());
+ Assert.assertEquals("cpu_user1", metric.getMetricName());
Assert.assertEquals("app1", metric.getAppId());
Assert.assertEquals(metricValues, metric.getMetricValues());
+ System.out.println("first call values: " + metric.getMetricValues());
+ System.out.println();
+
// call to update with new key
metrics = cache.getAppTimelineMetricsFromCache(newKey);
metricsList = metrics.getMetrics();
Assert.assertEquals(1, metricsList.size());
- Assert.assertEquals("cpu_user", metric.getMetricName());
- Assert.assertEquals("app1", metric.getAppId());
+ metric = metricsList.iterator().next();
+ Assert.assertEquals("cpu_user2", metric.getMetricName());
+ Assert.assertEquals("app2", metric.getAppId());
+ System.out.println("Second call values: " + metric.getMetricValues());
Assert.assertEquals(newMetricValues, metric.getMetricValues());
verify(configuration, metricsRequestHelperForGets, cacheEntryFactory);
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]