This is an automated email from the ASF dual-hosted git repository.
wusheng pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/skywalking.git
The following commit(s) were added to refs/heads/master by this push:
new 1473c04 Introduce label system to histogram (#5425)
1473c04 is described below
commit 1473c04d3e0973d08e4e478dbbccaa3873377e7c
Author: Gao Hongtao <[email protected]>
AuthorDate: Fri Sep 4 14:49:47 2020 +0800
Introduce label system to histogram (#5425)
---
docs/en/setup/backend/backend-fetcher.md | 12 +-
.../provider/meter/process/MeterBuilder.java | 2 +-
.../meter/function/AvgHistogramFunction.java | 37 +++---
.../function/AvgHistogramPercentileFunction.java | 112 ++++++++++++-------
.../analysis/meter/function/BucketedValues.java | 16 ++-
.../analysis/meter/function/HistogramFunction.java | 2 +-
.../meter/function/PercentileFunction.java | 2 +-
.../server/core/analysis/metrics/DataTable.java | 3 +-
.../promethues/PrometheusMetricConverter.java | 93 ++++++++++------
.../metric/promethues/operation/Operation.java | 3 +
.../core/metric/promethues/rule/MetricsRule.java | 2 +
.../oap/server/core/query/type/HeatMap.java | 23 +++-
...tionTest.java => AvgHistogramFunctionTest.java} | 124 +++++++++++++--------
...ava => AvgHistogramPercentileFunctionTest.java} | 82 ++++----------
.../meter/function/HistogramFunctionTest.java | 8 +-
.../meter/function/PercentileFunctionTest.java | 4 +-
.../library/util/prometheus/parser/Context.java | 41 ++++---
.../util/prometheus/parser/TextParserTest.java | 14 +++
.../src/test/resources/testdata/prometheus.txt | 24 ++--
19 files changed, 363 insertions(+), 241 deletions(-)
diff --git a/docs/en/setup/backend/backend-fetcher.md
b/docs/en/setup/backend/backend-fetcher.md
index cff6e27..01ebf4a 100644
--- a/docs/en/setup/backend/backend-fetcher.md
+++ b/docs/en/setup/backend/backend-fetcher.md
@@ -60,6 +60,10 @@ name: <string>
scope: <string>
# The transformation operation from prometheus metrics to skywalking ones.
operation: <operation>
+# The percentile rank of percentile operation
+[percentiles: [<rank>,...]]
+# bucketUnit indicates the unit of histogram bucket, it should be one of
MILLISECONDS, SECONDS, MINUTES, HOURS, DAYS
+[bucketUnit: <string>]
# The prometheus sources of the transformation operation.
sources:
# The prometheus metric family name
@@ -68,8 +72,12 @@ sources:
[counterFunction: <string> ]
# The range of a counterFunction.
[range: <duration>]
- # The percentile rank of percentile operation
- [percentiles: [<rank>,...]]
+ # Aggregate metrics group by dedicated labels
+ [groupBy: [<labelname>, ...]]
+ # Set up the scale of the analysis result
+ [scale: <integer>]
+ # Filter target metrics by dedicated labels
+ [labelFilter: [<filterRule>, ...]]
# Relabel prometheus labels to skywalking dimensions.
relabel:
service: [<labelname>, ...]
diff --git
a/oap-server/analyzer/agent-analyzer/src/main/java/org/apache/skywalking/oap/server/analyzer/provider/meter/process/MeterBuilder.java
b/oap-server/analyzer/agent-analyzer/src/main/java/org/apache/skywalking/oap/server/analyzer/provider/meter/process/MeterBuilder.java
index b8c61fc..aa0f421 100644
---
a/oap-server/analyzer/agent-analyzer/src/main/java/org/apache/skywalking/oap/server/analyzer/provider/meter/process/MeterBuilder.java
+++
b/oap-server/analyzer/agent-analyzer/src/main/java/org/apache/skywalking/oap/server/analyzer/provider/meter/process/MeterBuilder.java
@@ -101,7 +101,7 @@ public class MeterBuilder {
final EvalData combinedHistogramData =
values.combineAsSingleData();
if (combinedHistogramData instanceof EvalHistogramData) {
final EvalHistogramData histogram = (EvalHistogramData)
combinedHistogramData;
- int[] buckets = new int[histogram.getBuckets().size()];
+ long[] buckets = new long[histogram.getBuckets().size()];
long[] bucketValues = new
long[histogram.getBuckets().size()];
int i = 0;
for (Map.Entry<Double, Long> entry :
histogram.getBuckets().entrySet()) {
diff --git
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunction.java
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunction.java
index 22985cf..4522496 100644
---
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunction.java
+++
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunction.java
@@ -18,9 +18,8 @@
package org.apache.skywalking.oap.server.core.analysis.meter.function;
-import java.util.Comparator;
+import com.google.common.base.Strings;
import java.util.HashMap;
-import java.util.List;
import java.util.Map;
import java.util.Objects;
import lombok.Getter;
@@ -32,6 +31,7 @@ import
org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
+import org.apache.skywalking.oap.server.core.query.type.Bucket;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@@ -83,33 +83,38 @@ public abstract class AvgHistogramFunction extends Metrics
implements Acceptable
this.entityId = entity.id();
+ String template = "%s";
+ if (!Strings.isNullOrEmpty(value.getGroup())) {
+ template = value.getGroup() + ":%s";
+ }
final long[] values = value.getValues();
for (int i = 0; i < values.length; i++) {
- String bucketName = String.valueOf(value.getBuckets()[i]);
- summation.valueAccumulation(bucketName, values[i]);
- count.valueAccumulation(bucketName, 1L);
+ long bucket = value.getBuckets()[i];
+ String bucketName = bucket == Long.MIN_VALUE ?
Bucket.INFINITE_NEGATIVE : String.valueOf(bucket);
+ String key = String.format(template, bucketName);
+ summation.valueAccumulation(key, values[i]);
+ count.valueAccumulation(key, 1L);
}
}
@Override
public void combine(final Metrics metrics) {
AvgHistogramFunction histogram = (AvgHistogramFunction) metrics;
-
- if (!summation.keysEqual(histogram.getSummation())) {
- log.warn("Incompatible input [{}}] for current
HistogramFunction[{}], entity {}",
- histogram, this, entityId
- );
- return;
- }
this.summation.append(histogram.summation);
this.count.append(histogram.count);
}
@Override
public void calculate() {
- final List<String> sortedKeys =
summation.sortedKeys(Comparator.comparingInt(Integer::parseInt));
- for (String key : sortedKeys) {
- dataset.put(key, summation.get(key) / count.get(key));
+ for (String key : summation.keys()) {
+ long value = 0;
+ if (count.get(key) != 0) {
+ value = summation.get(key) / count.get(key);
+ if (value == 0L && summation.get(key) > 0L) {
+ value = 1;
+ }
+ }
+ dataset.put(key, value);
}
}
@@ -146,6 +151,7 @@ public abstract class AvgHistogramFunction extends Metrics
implements Acceptable
this.setCount(new DataTable(remoteData.getDataObjectStrings(0)));
this.setSummation(new DataTable(remoteData.getDataObjectStrings(1)));
+ this.setDataset(new DataTable(remoteData.getDataObjectStrings(2)));
}
@Override
@@ -157,6 +163,7 @@ public abstract class AvgHistogramFunction extends Metrics
implements Acceptable
remoteBuilder.addDataObjectStrings(count.toStorageData());
remoteBuilder.addDataObjectStrings(summation.toStorageData());
+ remoteBuilder.addDataObjectStrings(dataset.toStorageData());
return remoteBuilder;
}
diff --git
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramPercentileFunction.java
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramPercentileFunction.java
index 19de02c..8c891ff 100644
---
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramPercentileFunction.java
+++
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramPercentileFunction.java
@@ -18,11 +18,16 @@
package org.apache.skywalking.oap.server.core.analysis.meter.function;
+import com.google.common.base.Strings;
+import io.vavr.Tuple;
+import io.vavr.Tuple2;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collector;
import java.util.stream.IntStream;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
@@ -35,10 +40,14 @@ import
org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.analysis.metrics.IntList;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import
org.apache.skywalking.oap.server.core.analysis.metrics.MultiIntValuesHolder;
+import org.apache.skywalking.oap.server.core.query.type.Bucket;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
+import static java.util.stream.Collectors.groupingBy;
+import static java.util.stream.Collectors.mapping;
+
/**
* AvgPercentile intends to calculate percentile based on the average of raw
values over the interval(minute, hour or day).
*
@@ -53,6 +62,7 @@ import
org.apache.skywalking.oap.server.core.storage.annotation.Column;
@MeterFunction(functionName = "avgHistogramPercentile")
@Slf4j
public abstract class AvgHistogramPercentileFunction extends Metrics
implements
AcceptableValue<AvgHistogramPercentileFunction.AvgPercentileArgument>,
MultiIntValuesHolder {
+ private static final String DEFAULT_GROUP = "pD";
public static final String DATASET = "dataset";
public static final String RANKS = "ranks";
public static final String VALUE = "value";
@@ -125,11 +135,17 @@ public abstract class AvgHistogramPercentileFunction
extends Metrics implements
this.entityId = entity.id();
+ String template = "%s";
+ if (!Strings.isNullOrEmpty(value.getBucketedValues().getGroup())) {
+ template = value.getBucketedValues().getGroup() + ":%s";
+ }
final long[] values = value.getBucketedValues().getValues();
for (int i = 0; i < values.length; i++) {
- String bucketName =
String.valueOf(value.getBucketedValues().getBuckets()[i]);
- summation.valueAccumulation(bucketName, values[i]);
- count.valueAccumulation(bucketName, 1L);
+ long bucket = value.getBucketedValues().getBuckets()[i];
+ String bucketName = bucket == Long.MIN_VALUE ?
Bucket.INFINITE_NEGATIVE : String.valueOf(bucket);
+ String key = String.format(template, bucketName);
+ summation.valueAccumulation(key, values[i]);
+ count.valueAccumulation(key, 1L);
}
this.isCalculated = false;
@@ -139,12 +155,6 @@ public abstract class AvgHistogramPercentileFunction
extends Metrics implements
public void combine(final Metrics metrics) {
AvgHistogramPercentileFunction percentile =
(AvgHistogramPercentileFunction) metrics;
- if (!summation.keysEqual(percentile.getSummation())) {
- log.warn("Incompatible input [{}}] for current
PercentileFunction[{}], entity {}",
- percentile, this, entityId
- );
- return;
- }
if (ranks.size() > 0) {
if (this.ranks.size() != ranks.size()) {
log.warn("Incompatible ranks size = [{}}] for current
PercentileFunction[{}]",
@@ -168,38 +178,64 @@ public abstract class AvgHistogramPercentileFunction
extends Metrics implements
@Override
public void calculate() {
if (!isCalculated) {
- final List<String> sortedKeys =
summation.sortedKeys(Comparator.comparingInt(Integer::parseInt));
- for (String key : sortedKeys) {
- dataset.put(key, summation.get(key) / count.get(key));
- }
-
- long total = dataset.sumOfValues();
-
- int[] roofs = new int[ranks.size()];
- for (int i = 0; i < ranks.size(); i++) {
- roofs[i] = Math.round(total * ranks.get(i) * 1.0f / 100);
- }
-
- int count = 0;
- int loopIndex = 0;
-
- for (int i = 0; i < sortedKeys.size(); i++) {
- String key = sortedKeys.get(i);
- final Long value = dataset.get(key);
-
- count += value;
- for (int rankIdx = loopIndex; rankIdx < roofs.length;
rankIdx++) {
- int roof = roofs[rankIdx];
-
- if (count >= roof) {
- long latency = (i + 1 == sortedKeys.size()) ?
Long.MAX_VALUE : Long.parseLong(sortedKeys.get(i + 1));
-
percentileValues.put(String.valueOf(ranks.get(rankIdx)), latency);
- loopIndex++;
- } else {
- break;
+ final Set<String> keys = summation.keys();
+ for (String key : keys) {
+ long value = 0;
+ if (count.get(key) != 0) {
+ value = summation.get(key) / count.get(key);
+ if (value == 0L && summation.get(key) > 0L) {
+ value = 1;
}
}
+ dataset.put(key, value);
}
+ dataset.keys().stream()
+ .map(key -> {
+ if (key.contains(":")) {
+ String[] kk = key.split(":");
+ return Tuple.of(kk[0], key);
+ } else {
+ return Tuple.of(DEFAULT_GROUP, key);
+ }
+ })
+ .collect(groupingBy(Tuple2::_1, mapping(Tuple2::_2,
Collector.of(
+ DataTable::new,
+ (dt, key) -> dt.put(key.contains(":") ? key.split(":")[1]
: key, dataset.get(key)),
+ DataTable::append))))
+ .forEach((group, subDataset) -> {
+ long total;
+ total = subDataset.sumOfValues();
+
+ int[] roofs = new int[ranks.size()];
+ for (int i = 0; i < ranks.size(); i++) {
+ roofs[i] = Math.round(total * ranks.get(i) * 1.0f /
100);
+ }
+
+ int count = 0;
+ final List<String> sortedKeys =
subDataset.sortedKeys(Comparator.comparingLong(Long::parseLong));
+
+ int loopIndex = 0;
+
+ for (String key : sortedKeys) {
+ final Long value = subDataset.get(key);
+
+ count += value;
+ for (int rankIdx = loopIndex; rankIdx < roofs.length;
rankIdx++) {
+ int roof = roofs[rankIdx];
+
+ if (count >= roof) {
+ if (group.equals(DEFAULT_GROUP)) {
+
percentileValues.put(String.valueOf(ranks.get(rankIdx)), Long.parseLong(key));
+ } else {
+
percentileValues.put(String.format("%s:%s", group, ranks.get(rankIdx)),
Long.parseLong(key));
+ }
+ loopIndex++;
+ } else {
+ break;
+ }
+ }
+ }
+ });
}
}
diff --git
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/BucketedValues.java
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/BucketedValues.java
index de5e1b9..ab221f2 100644
---
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/BucketedValues.java
+++
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/BucketedValues.java
@@ -21,6 +21,7 @@ package
org.apache.skywalking.oap.server.core.analysis.meter.function;
import java.util.Arrays;
import java.util.List;
import lombok.Getter;
+import lombok.Setter;
import lombok.ToString;
import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.query.type.Bucket;
@@ -32,13 +33,16 @@ import
org.apache.skywalking.oap.server.core.query.type.HeatMap;
@ToString
@Getter
public class BucketedValues {
+
+ @Setter
+ private String group;
/**
* The element in the buckets represent the minimal value of this bucket,
the max is defined by the next element.
* Such as 0, 10, 50, 100 means buckets are [0, 10), [10, 50), [50, 100),
[100, infinite+).
*
- * The {@link Integer#MIN_VALUE} could be the first bucket element to
indicate there is no minimal value.
+ * The {@link Long#MIN_VALUE} could be the first bucket element to
indicate there is no minimal value.
*/
- private int[] buckets;
+ private long[] buckets;
/**
* {@link #buckets} and {@link #values} arrays should have the same
length. The element in the values, represents
* the amount in the same index bucket.
@@ -49,7 +53,7 @@ public class BucketedValues {
* @param buckets Read {@link #buckets}
* @param values Read {@link #values}
*/
- public BucketedValues(final int[] buckets, final long[] values) {
+ public BucketedValues(final long[] buckets, final long[] values) {
if (buckets == null || values == null || buckets.length == 0 ||
values.length == 0) {
throw new IllegalArgumentException("buckets and values can't be
null.");
}
@@ -65,13 +69,13 @@ public class BucketedValues {
*/
public boolean isCompatible(DataTable dataset) {
final List<String> sortedKeys = dataset.sortedKeys(new
HeatMap.KeyComparator(true));
- int[] existedBuckets = new int[sortedKeys.size()];
+ long[] existedBuckets = new long[sortedKeys.size()];
for (int i = 0; i < sortedKeys.size(); i++) {
final String key = sortedKeys.get(i);
if (key.equals(Bucket.INFINITE_NEGATIVE)) {
- existedBuckets[i] = Integer.MIN_VALUE;
+ existedBuckets[i] = Long.MIN_VALUE;
} else {
- existedBuckets[i] = Integer.parseInt(key);
+ existedBuckets[i] = Long.parseLong(key);
}
}
diff --git
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunction.java
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunction.java
index c6edef5..ce23f27 100644
---
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunction.java
+++
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunction.java
@@ -68,7 +68,7 @@ public abstract class HistogramFunction extends Metrics
implements AcceptableVal
final long[] values = value.getValues();
for (int i = 0; i < values.length; i++) {
final long bucket = value.getBuckets()[i];
- String bucketName = bucket == Integer.MIN_VALUE ?
Bucket.INFINITE_NEGATIVE : String.valueOf(bucket);
+ String bucketName = bucket == Long.MIN_VALUE ?
Bucket.INFINITE_NEGATIVE : String.valueOf(bucket);
final long bucketValue = values[i];
dataset.valueAccumulation(bucketName, bucketValue);
}
diff --git
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunction.java
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunction.java
index 8bfc782..b7c81fd 100644
---
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunction.java
+++
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunction.java
@@ -113,7 +113,7 @@ public abstract class PercentileFunction extends Metrics
implements AcceptableVa
final long[] values = value.getBucketedValues().getValues();
for (int i = 0; i < values.length; i++) {
final long bucket = value.getBucketedValues().getBuckets()[i];
- String bucketName = bucket == Integer.MIN_VALUE ?
Bucket.INFINITE_NEGATIVE : String.valueOf(bucket);
+ String bucketName = bucket == Long.MIN_VALUE ?
Bucket.INFINITE_NEGATIVE : String.valueOf(bucket);
final long bucketValue = values[i];
dataset.valueAccumulation(bucketName, bucketValue);
}
diff --git
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/DataTable.java
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/DataTable.java
index 2fca2da..0640d68 100644
---
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/DataTable.java
+++
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/DataTable.java
@@ -141,7 +141,7 @@ public class DataTable implements
StorageDataComplexObject<DataTable> {
this.append(source);
}
- public void append(DataTable dataTable) {
+ public DataTable append(DataTable dataTable) {
dataTable.data.forEach((key, value) -> {
Long current = this.data.get(key);
if (current == null) {
@@ -151,5 +151,6 @@ public class DataTable implements
StorageDataComplexObject<DataTable> {
}
this.data.put(key, current);
});
+ return this;
}
}
diff --git
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/PrometheusMetricConverter.java
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/PrometheusMetricConverter.java
index 449d525..0bdb58d 100644
---
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/PrometheusMetricConverter.java
+++
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/PrometheusMetricConverter.java
@@ -31,6 +31,7 @@ import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
+import java.util.Optional;
import java.util.StringJoiner;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
@@ -78,6 +79,10 @@ public class PrometheusMetricConverter {
private final static String LATEST = "latest";
+ private final static String DEFAULT_GROUP = "default";
+
+ private final static List<String> DEFAULT_GROUP_LIST =
Collections.singletonList(DEFAULT_GROUP);
+
private final Window window = new Window();
private final List<MetricsRule> rules;
@@ -93,6 +98,9 @@ public class PrometheusMetricConverter {
lastRuleName.set(rule.getName());
return;
}
+ if (rule.getBucketUnit().toMillis(1L) < 1L) {
+ throw new IllegalArgumentException("Bucket unit should be
equals or more than MILLISECOND");
+ }
service.create(formatMetricName(rule.getName()),
rule.getOperation(), rule.getScope());
lastRuleName.set(rule.getName());
});
@@ -133,7 +141,7 @@ public class PrometheusMetricConverter {
.peek(tuple -> log.debug("Mapped rules to metrics: {}", tuple))
.map(Function1.liftTry(tuple -> {
String serviceName =
composeEntity(tuple._3.getRelabel().getService().stream(),
tuple._4.getLabels());
- Operation o = new Operation(tuple._1.getOperation(),
tuple._1.getName(), tuple._1.getScope(), tuple._1.getPercentiles());
+ Operation o = new Operation(tuple._1.getOperation(),
tuple._1.getName(), tuple._1.getScope(), tuple._1.getPercentiles(),
tuple._1.getBucketUnit());
MetricSource.MetricSourceBuilder sb = MetricSource.builder();
sb.promMetricName(tuple._2)
.timestamp(tuple._4.getTimestamp())
@@ -192,41 +200,54 @@ public class PrometheusMetricConverter {
case AVG_PERCENTILE:
Validate.isTrue(sources.size() == 1, "Can't get
source for histogram");
Map.Entry<MetricSource, List<Metric>> smm =
sources.entrySet().iterator().next();
- Histogram h = (Histogram) sum(smm.getValue());
-
- long[] vv = new long[h.getBuckets().size()];
- int[] bb = new int[h.getBuckets().size()];
- long v = 0L;
- int i = 0;
- for (Map.Entry<Double, Long> entry :
h.getBuckets().entrySet()) {
- long increase = entry.getValue() - v;
- vv[i] = window.get(operation.getMetricName(),
ImmutableMap.of("le", entry.getKey().toString()))
- .apply(smm.getKey(), (double)
increase).longValue();
- v = entry.getValue();
-
- if (i + 1 < h.getBuckets().size()) {
- bb[i + 1] =
BigDecimal.valueOf(entry.getKey()).multiply(SECOND_TO_MILLISECOND).intValue();
- }
-
- i++;
- }
-
- if (operation.getName().equals(AVG_HISTOGRAM)) {
- AcceptableValue<BucketedValues> heatmapMetrics
= service.buildMetrics(
-
formatMetricName(operation.getMetricName()), BucketedValues.class);
-
heatmapMetrics.setTimeBucket(TimeBucket.getMinuteTimeBucket(smm.getKey().getTimestamp()));
-
heatmapMetrics.accept(smm.getKey().getEntity(), new BucketedValues(bb, vv));
- service.doStreamingCalculation(heatmapMetrics);
- } else {
-
AcceptableValue<AvgHistogramPercentileFunction.AvgPercentileArgument>
percentileMetrics =
-
service.buildMetrics(formatMetricName(operation.getMetricName()),
AvgHistogramPercentileFunction.AvgPercentileArgument.class);
-
percentileMetrics.setTimeBucket(TimeBucket.getMinuteTimeBucket(smm.getKey().getTimestamp()));
-
percentileMetrics.accept(smm.getKey().getEntity(),
- new
AvgHistogramPercentileFunction.AvgPercentileArgument(new BucketedValues(bb,
vv),
operation.getPercentiles().stream().mapToInt(Integer::intValue).toArray()));
-
service.doStreamingCalculation(percentileMetrics);
- }
-
- generateTraffic(smm.getKey().getEntity());
+
+ smm.getValue().stream()
+ .collect(groupingBy(m ->
Optional.ofNullable(smm.getKey().getGroupBy()).orElse(DEFAULT_GROUP_LIST).stream()
+ .map(m.getLabels()::get)
+ .map(group ->
Optional.ofNullable(group).orElse(DEFAULT_GROUP))
+ .collect(Collectors.joining("-"))))
+ .forEach((group, mm) -> {
+ Histogram h = (Histogram) sum(mm);
+
+ long[] vv = new
long[h.getBuckets().size()];
+ long[] bb = new
long[h.getBuckets().size()];
+ long v = 0L;
+ int i = 0;
+ for (Map.Entry<Double, Long> entry :
h.getBuckets().entrySet()) {
+ long increase = entry.getValue() - v;
+ vv[i] =
window.get(operation.getMetricName(), ImmutableMap.of("group", group, "le",
entry.getKey().toString()))
+ .apply(smm.getKey(), (double)
increase).longValue();
+ v = entry.getValue();
+
+ if (i + 1 < h.getBuckets().size()) {
+ bb[i + 1] =
BigDecimal.valueOf(entry.getKey())
+
.multiply(BigDecimal.valueOf(operation.getBucketUnit().toMillis(1L)))
+ .longValue();
+ }
+ i++;
+ }
+ BucketedValues bv = new BucketedValues(bb,
vv);
+ if (!group.equals(DEFAULT_GROUP)) {
+ bv.setGroup(group);
+ }
+ if
(operation.getName().equals(AVG_HISTOGRAM)) {
+ AcceptableValue<BucketedValues>
heatmapMetrics = service.buildMetrics(
+
formatMetricName(operation.getMetricName()), BucketedValues.class);
+
heatmapMetrics.setTimeBucket(TimeBucket.getMinuteTimeBucket(smm.getKey().getTimestamp()));
+
heatmapMetrics.accept(smm.getKey().getEntity(), bv);
+
service.doStreamingCalculation(heatmapMetrics);
+ } else {
+
AcceptableValue<AvgHistogramPercentileFunction.AvgPercentileArgument>
percentileMetrics =
+
service.buildMetrics(formatMetricName(operation.getMetricName()),
AvgHistogramPercentileFunction.AvgPercentileArgument.class);
+
percentileMetrics.setTimeBucket(TimeBucket.getMinuteTimeBucket(smm.getKey().getTimestamp()));
+
percentileMetrics.accept(smm.getKey().getEntity(),
+ new
AvgHistogramPercentileFunction.AvgPercentileArgument(bv,
operation.getPercentiles().stream().mapToInt(Integer::intValue).toArray()));
+
service.doStreamingCalculation(percentileMetrics);
+ }
+
+ generateTraffic(smm.getKey().getEntity());
+ });
+
break;
default:
throw new
IllegalArgumentException(String.format("Unsupported downSampling %s",
operation.getName()));
diff --git
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/operation/Operation.java
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/operation/Operation.java
index 42334e3..4d7cc64 100644
---
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/operation/Operation.java
+++
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/operation/Operation.java
@@ -19,6 +19,7 @@
package org.apache.skywalking.oap.server.core.metric.promethues.operation;
import java.util.List;
+import java.util.concurrent.TimeUnit;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
@@ -39,4 +40,6 @@ public class Operation {
private final List<Integer> percentiles;
+ private final TimeUnit bucketUnit;
+
}
diff --git
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/rule/MetricsRule.java
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/rule/MetricsRule.java
index 6b9b713..143a804 100644
---
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/rule/MetricsRule.java
+++
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/rule/MetricsRule.java
@@ -20,6 +20,7 @@ package
org.apache.skywalking.oap.server.core.metric.promethues.rule;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.TimeUnit;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.skywalking.oap.server.core.analysis.meter.ScopeType;
@@ -31,5 +32,6 @@ public class MetricsRule {
private ScopeType scope;
private String operation;
private List<Integer> percentiles;
+ private TimeUnit bucketUnit = TimeUnit.SECONDS;
private Map<String, PrometheusMetric> sources;
}
diff --git
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/HeatMap.java
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/HeatMap.java
index d8b11c1..24dd802 100644
---
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/HeatMap.java
+++
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/HeatMap.java
@@ -18,6 +18,7 @@
package org.apache.skywalking.oap.server.core.query.type;
+import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
@@ -132,9 +133,16 @@ public class HeatMap {
private final boolean asc;
@Override
- public int compare(final String key1, final String key2) {
+ public int compare(final String k1, final String k2) {
int result;
-
+ String[] kk1 = parseKey(k1);
+ String[] kk2 = parseKey(k2);
+ result = kk1[0].compareTo(kk2[0]);
+ if (result != 0) {
+ return result;
+ }
+ final String key1 = kk1[1];
+ final String key2 = kk2[1];
if (key1.equals(key2)) {
result = 0;
} else if (Bucket.INFINITE_NEGATIVE.equals(key1) ||
Bucket.INFINITE_POSITIVE.equals(key2)) {
@@ -142,10 +150,17 @@ public class HeatMap {
} else if (Bucket.INFINITE_NEGATIVE.equals(key2) ||
Bucket.INFINITE_POSITIVE.equals(key1)) {
result = 1;
} else {
- result = Integer.parseInt(key1) - Integer.parseInt(key2);
+ result = new BigInteger(key1).subtract(new
BigInteger(key2)).signum();
}
- return asc ? result : 0 - result;
+ return asc ? result : -result;
+ }
+
+ private String[] parseKey(String key) {
+ if (key.contains(":")) {
+ return key.split(":");
+ }
+ return new String[] {"default", key};
}
}
}
diff --git
a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunctionTest.java
b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunctionTest.java
similarity index 72%
copy from
oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunctionTest.java
copy to
oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunctionTest.java
index 2bceedb..6238218 100644
---
a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunctionTest.java
+++
b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunctionTest.java
@@ -28,25 +28,20 @@ import
org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.junit.Assert;
import org.junit.Test;
-import static
org.apache.skywalking.oap.server.core.analysis.meter.function.HistogramFunction.DATASET;
+import static
org.apache.skywalking.oap.server.core.analysis.meter.function.AvgHistogramFunction.DATASET;
+import static
org.apache.skywalking.oap.server.core.analysis.meter.function.AvgLabeledFunction.COUNT;
+import static
org.apache.skywalking.oap.server.core.analysis.meter.function.AvgLabeledFunction.SUMMATION;
-public class HistogramFunctionTest {
- private static final int[] BUCKETS = new int[] {
+public class AvgHistogramFunctionTest {
+ private static final long[] BUCKETS = new long[] {
0,
50,
100,
250
};
- private static final int[] BUCKETS_2ND = new int[] {
- 0,
- 51,
- 100,
- 250
- };
-
- private static final int[] INFINITE_BUCKETS = new int[] {
- Integer.MIN_VALUE,
+ private static final long[] INFINITE_BUCKETS = new long[] {
+ Long.MIN_VALUE,
-5,
0,
10
@@ -76,15 +71,16 @@ public class HistogramFunctionTest {
4
})
);
+ inst.calculate();
final int[] results = inst.getDataset().sortedValues(new
HeatMap.KeyComparator(true)).stream()
- .flatMapToInt(l ->
IntStream.of(l.intValue()))
- .toArray();
+ .flatMapToInt(l -> IntStream.of(l.intValue()))
+ .toArray();
Assert.assertArrayEquals(new int[] {
1,
+ 3,
6,
- 13,
- 14
+ 7
}, results);
}
@@ -113,33 +109,9 @@ public class HistogramFunctionTest {
})
);
- Assert.assertEquals(1L,
inst.getDataset().get(Bucket.INFINITE_NEGATIVE).longValue());
- }
-
- @Test(expected = IllegalArgumentException.class)
- public void testIncompatible() {
- HistogramFunctionInst inst = new HistogramFunctionInst();
- inst.accept(
- MeterEntity.newService("service-test"),
- new BucketedValues(
- BUCKETS, new long[] {
- 0,
- 4,
- 10,
- 10
- })
- );
+ inst.calculate();
- inst.accept(
- MeterEntity.newService("service-test"),
- new BucketedValues(
- BUCKETS_2ND, new long[] {
- 1,
- 2,
- 3,
- 4
- })
- );
+ Assert.assertEquals(1L,
inst.getDataset().get(Bucket.INFINITE_NEGATIVE).longValue());
}
@Test
@@ -155,6 +127,7 @@ public class HistogramFunctionTest {
10
})
);
+ inst.calculate();
final HistogramFunctionInst inst2 = new HistogramFunctionInst();
inst2.deserialize(inst.serialize().build());
@@ -199,20 +172,81 @@ public class HistogramFunctionTest {
10
})
);
+ inst.calculate();
final StorageBuilder storageBuilder = inst.builder().newInstance();
// Simulate the storage layer do, convert the datatable to string.
- final Map map = storageBuilder.data2Map(inst);
+ Map<String, Object> map = storageBuilder.data2Map(inst);
+ map.put(SUMMATION, ((DataTable) map.get(SUMMATION)).toStorageData());
+ map.put(COUNT, ((DataTable) map.get(COUNT)).toStorageData());
map.put(DATASET, ((DataTable) map.get(DATASET)).toStorageData());
- final HistogramFunction inst2 = (HistogramFunction)
storageBuilder.map2Data(map);
+ final AvgHistogramFunction inst2 = (AvgHistogramFunction)
storageBuilder.map2Data(map);
Assert.assertEquals(inst, inst2);
// HistogramFunction equal doesn't include dataset.
Assert.assertEquals(inst.getDataset(), inst2.getDataset());
}
- private static class HistogramFunctionInst extends HistogramFunction {
+ @Test
+ public void testGroup() {
+
+ HistogramFunctionInst inst = new HistogramFunctionInst();
+ BucketedValues bv1 = new BucketedValues(
+ BUCKETS, new long[] {
+ 0,
+ 4,
+ 10,
+ 10
+ });
+ bv1.setGroup("g1");
+ inst.accept(
+ MeterEntity.newService("service-test"),
+ bv1
+ );
+
+ BucketedValues bv2 = new BucketedValues(
+ BUCKETS, new long[] {
+ 1,
+ 2,
+ 3,
+ 4
+ });
+ bv2.setGroup("g1");
+ inst.accept(
+ MeterEntity.newService("service-test"),
+ bv2
+ );
+ BucketedValues bv3 = new BucketedValues(
+ BUCKETS, new long[] {
+ 2,
+ 4,
+ 6,
+ 8
+ });
+ bv3.setGroup("g2");
+ inst.accept(
+ MeterEntity.newService("service-test"),
+ bv3
+ );
+ inst.calculate();
+
+ int[] results = inst.getDataset().sortedValues(new
HeatMap.KeyComparator(true)).stream()
+ .flatMapToInt(l -> IntStream.of(l.intValue()))
+ .toArray();
+ Assert.assertArrayEquals(new int[] {
+ 1,
+ 3,
+ 6,
+ 7,
+ 2,
+ 4,
+ 6,
+ 8
+ }, results);
+ }
+
+ private static class HistogramFunctionInst extends AvgHistogramFunction {
@Override
public AcceptableValue<BucketedValues> createNew() {
diff --git
a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunctionTest.java
b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramPercentileFunctionTest.java
similarity index 67%
copy from
oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunctionTest.java
copy to
oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramPercentileFunctionTest.java
index 6409d7b..ca9c4dd 100644
---
a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunctionTest.java
+++
b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramPercentileFunctionTest.java
@@ -26,17 +26,11 @@ import
org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.junit.Assert;
import org.junit.Test;
-public class PercentileFunctionTest {
- private static final int[] BUCKETS = new int[] {
- 0,
- 50,
- 100,
- 250
- };
+public class AvgHistogramPercentileFunctionTest {
- private static final int[] BUCKETS_2ND = new int[] {
+ private static final long[] BUCKETS = new long[] {
0,
- 51,
+ 50,
100,
250
};
@@ -51,7 +45,7 @@ public class PercentileFunctionTest {
PercentileFunctionInst inst = new PercentileFunctionInst();
inst.accept(
MeterEntity.newService("service-test"),
- new PercentileFunction.PercentileArgument(
+ new AvgHistogramPercentileFunction.AvgPercentileArgument(
new BucketedValues(
BUCKETS,
new long[] {
@@ -67,7 +61,7 @@ public class PercentileFunctionTest {
inst.accept(
MeterEntity.newService("service-test"),
- new PercentileFunction.PercentileArgument(
+ new AvgHistogramPercentileFunction.AvgPercentileArgument(
new BucketedValues(
BUCKETS,
new long[] {
@@ -86,10 +80,10 @@ public class PercentileFunctionTest {
/**
* Expected percentile dataset
* <pre>
- * 0 , 20
- * 50 , 40
- * 100, 60 <- P50
- * 250, 80 <- P90
+ * 0 , 10
+ * 50 , 20
+ * 100, 30 <- P50
+ * 250, 40 <- P90
* </pre>
*/
Assert.assertArrayEquals(new int[] {
@@ -98,48 +92,12 @@ public class PercentileFunctionTest {
}, values);
}
- @Test(expected = IllegalArgumentException.class)
- public void testIncompatible() {
- PercentileFunctionInst inst = new PercentileFunctionInst();
- inst.accept(
- MeterEntity.newService("service-test"),
- new PercentileFunction.PercentileArgument(
- new BucketedValues(
- BUCKETS,
- new long[] {
- 10,
- 20,
- 30,
- 40
- }
- ),
- RANKS
- )
- );
-
- inst.accept(
- MeterEntity.newService("service-test"),
- new PercentileFunction.PercentileArgument(
- new BucketedValues(
- BUCKETS_2ND,
- new long[] {
- 10,
- 20,
- 30,
- 40
- }
- ),
- RANKS
- )
- );
- }
-
@Test
public void testSerialization() {
PercentileFunctionInst inst = new PercentileFunctionInst();
inst.accept(
MeterEntity.newService("service-test"),
- new PercentileFunction.PercentileArgument(
+ new AvgHistogramPercentileFunction.AvgPercentileArgument(
new BucketedValues(
BUCKETS,
new long[] {
@@ -168,7 +126,7 @@ public class PercentileFunctionTest {
PercentileFunctionInst inst = new PercentileFunctionInst();
inst.accept(
MeterEntity.newService("service-test"),
- new PercentileFunction.PercentileArgument(
+ new AvgHistogramPercentileFunction.AvgPercentileArgument(
new BucketedValues(
BUCKETS,
new long[] {
@@ -187,11 +145,13 @@ public class PercentileFunctionTest {
// Simulate the storage layer do, convert the datatable to string.
final Map map = storageBuilder.data2Map(inst);
- map.put(PercentileFunction.DATASET, ((DataTable)
map.get(PercentileFunction.DATASET)).toStorageData());
- map.put(PercentileFunction.VALUE, ((DataTable)
map.get(PercentileFunction.VALUE)).toStorageData());
- map.put(PercentileFunction.RANKS, ((IntList)
map.get(PercentileFunction.RANKS)).toStorageData());
+ map.put(AvgHistogramPercentileFunction.COUNT, ((DataTable)
map.get(AvgHistogramPercentileFunction.COUNT)).toStorageData());
+ map.put(AvgHistogramPercentileFunction.SUMMATION, ((DataTable)
map.get(AvgHistogramPercentileFunction.SUMMATION)).toStorageData());
+ map.put(AvgHistogramPercentileFunction.DATASET, ((DataTable)
map.get(AvgHistogramPercentileFunction.DATASET)).toStorageData());
+ map.put(AvgHistogramPercentileFunction.VALUE, ((DataTable)
map.get(AvgHistogramPercentileFunction.VALUE)).toStorageData());
+ map.put(AvgHistogramPercentileFunction.RANKS, ((IntList)
map.get(AvgHistogramPercentileFunction.RANKS)).toStorageData());
- final PercentileFunction inst2 = (PercentileFunction)
storageBuilder.map2Data(map);
+ final AvgHistogramPercentileFunction inst2 =
(AvgHistogramPercentileFunction) storageBuilder.map2Data(map);
Assert.assertEquals(inst, inst2);
// HistogramFunction equal doesn't include dataset.
Assert.assertEquals(inst.getDataset(), inst2.getDataset());
@@ -199,10 +159,10 @@ public class PercentileFunctionTest {
Assert.assertEquals(inst.getRanks(), inst2.getRanks());
}
- private static class PercentileFunctionInst extends PercentileFunction {
+ private static class PercentileFunctionInst extends
AvgHistogramPercentileFunction {
@Override
- public AcceptableValue<PercentileArgument> createNew() {
- return new PercentileFunctionInst();
+ public
AcceptableValue<AvgHistogramPercentileFunction.AvgPercentileArgument>
createNew() {
+ return new
AvgHistogramPercentileFunctionTest.PercentileFunctionInst();
}
}
-}
+}
\ No newline at end of file
diff --git
a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunctionTest.java
b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunctionTest.java
index 2bceedb..e9f61c8 100644
---
a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunctionTest.java
+++
b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/HistogramFunctionTest.java
@@ -31,22 +31,22 @@ import org.junit.Test;
import static
org.apache.skywalking.oap.server.core.analysis.meter.function.HistogramFunction.DATASET;
public class HistogramFunctionTest {
- private static final int[] BUCKETS = new int[] {
+ private static final long[] BUCKETS = new long[] {
0,
50,
100,
250
};
- private static final int[] BUCKETS_2ND = new int[] {
+ private static final long[] BUCKETS_2ND = new long[] {
0,
51,
100,
250
};
- private static final int[] INFINITE_BUCKETS = new int[] {
- Integer.MIN_VALUE,
+ private static final long[] INFINITE_BUCKETS = new long[] {
+ Long.MIN_VALUE,
-5,
0,
10
diff --git
a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunctionTest.java
b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunctionTest.java
index 6409d7b..3c7870a 100644
---
a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunctionTest.java
+++
b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/PercentileFunctionTest.java
@@ -27,14 +27,14 @@ import org.junit.Assert;
import org.junit.Test;
public class PercentileFunctionTest {
- private static final int[] BUCKETS = new int[] {
+ private static final long[] BUCKETS = new long[] {
0,
50,
100,
250
};
- private static final int[] BUCKETS_2ND = new int[] {
+ private static final long[] BUCKETS_2ND = new long[] {
0,
51,
100,
diff --git
a/oap-server/server-library/library-util/src/main/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/Context.java
b/oap-server/server-library/library-util/src/main/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/Context.java
index d0482fe..e583809 100644
---
a/oap-server/server-library/library-util/src/main/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/Context.java
+++
b/oap-server/server-library/library-util/src/main/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/Context.java
@@ -113,22 +113,31 @@ public class Context {
.build()));
break;
case HISTOGRAM:
- Histogram.HistogramBuilder hBuilder = Histogram.builder();
- hBuilder.name(name).timestamp(now);
- samples.forEach(textSample -> {
- hBuilder.labels(textSample.getLabels());
- if (textSample.getName().endsWith("_count")) {
- hBuilder.sampleCount((long)
convertStringToDouble(textSample.getValue()));
- } else if (textSample.getName().endsWith("_sum")) {
-
hBuilder.sampleSum(convertStringToDouble(textSample.getValue()));
- } else if (textSample.getLabels().containsKey("le")) {
- hBuilder.bucket(
-
convertStringToDouble(textSample.getLabels().remove("le")),
- (long) convertStringToDouble(textSample.getValue())
- );
- }
- });
- metricFamilyBuilder.addMetric(hBuilder.build());
+ samples.stream()
+ .map(sample -> {
+ Map<String, String> labels =
Maps.newHashMap(sample.getLabels());
+ labels.remove("le");
+ return Pair.of(labels, sample);
+ })
+ .collect(groupingBy(Pair::getLeft, mapping(Pair::getRight,
toList())))
+ .forEach((labels, samples) -> {
+ Histogram.HistogramBuilder hBuilder =
Histogram.builder();
+ hBuilder.name(name).timestamp(now);
+ hBuilder.labels(labels);
+ samples.forEach(textSample -> {
+ if (textSample.getName().endsWith("_count")) {
+ hBuilder.sampleCount((long)
convertStringToDouble(textSample.getValue()));
+ } else if (textSample.getName().endsWith("_sum")) {
+
hBuilder.sampleSum(convertStringToDouble(textSample.getValue()));
+ } else if
(textSample.getLabels().containsKey("le")) {
+ hBuilder.bucket(
+
convertStringToDouble(textSample.getLabels().remove("le")),
+ (long)
convertStringToDouble(textSample.getValue())
+ );
+ }
+ });
+ metricFamilyBuilder.addMetric(hBuilder.build());
+ });
break;
case SUMMARY:
samples.stream()
diff --git
a/oap-server/server-library/library-util/src/test/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/TextParserTest.java
b/oap-server/server-library/library-util/src/test/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/TextParserTest.java
index 13c156f..c34e915 100644
---
a/oap-server/server-library/library-util/src/test/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/TextParserTest.java
+++
b/oap-server/server-library/library-util/src/test/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/TextParserTest.java
@@ -68,7 +68,21 @@ public class TextParserTest {
.setType(MetricType.HISTOGRAM)
.setHelp("A histogram of the request duration.")
.addMetric(Histogram.builder()
+
.name("http_request_duration_seconds")
+ .label("status", "400")
+ .sampleCount(55)
+ .sampleSum(12D)
+ .bucket(0.05D, 20L)
+ .bucket(0.1D, 20L)
+ .bucket(0.2D, 20L)
+ .bucket(0.5D, 25L)
+ .bucket(1.0D, 30L)
+
.bucket(Double.POSITIVE_INFINITY, 30L)
+ .timestamp(now)
+ .build())
+ .addMetric(Histogram.builder()
.name("http_request_duration_seconds")
+ .label("status", "200")
.sampleCount(144320L)
.sampleSum(53423.0D)
.bucket(0.05D, 24054L)
diff --git
a/oap-server/server-library/library-util/src/test/resources/testdata/prometheus.txt
b/oap-server/server-library/library-util/src/test/resources/testdata/prometheus.txt
index b6eec13..7b5c4ac 100644
---
a/oap-server/server-library/library-util/src/test/resources/testdata/prometheus.txt
+++
b/oap-server/server-library/library-util/src/test/resources/testdata/prometheus.txt
@@ -15,14 +15,22 @@ something_weird{problem="division by zero"} +Inf -3982045
# A histogram, which has a pretty complex representation in the text format:
# HELP http_request_duration_seconds A histogram of the request duration.
# TYPE http_request_duration_seconds histogram
-http_request_duration_seconds_bucket{le="0.05"} 24054
-http_request_duration_seconds_bucket{le="0.1"} 33444
-http_request_duration_seconds_bucket{le="0.2"} 100392
-http_request_duration_seconds_bucket{le="0.5"} 129389
-http_request_duration_seconds_bucket{le="1"} 133988
-http_request_duration_seconds_bucket{le="+Inf"} 144320
-http_request_duration_seconds_sum 53423
-http_request_duration_seconds_count 144320
+http_request_duration_seconds_bucket{le="0.05",status="200"} 24054
+http_request_duration_seconds_bucket{le="0.1",status="200"} 33444
+http_request_duration_seconds_bucket{le="0.2",status="200"} 100392
+http_request_duration_seconds_bucket{le="0.5",status="200"} 129389
+http_request_duration_seconds_bucket{le="1",status="200"} 133988
+http_request_duration_seconds_bucket{le="+Inf",status="200"} 144320
+http_request_duration_seconds_sum{status="200"} 53423
+http_request_duration_seconds_count{status="200"} 144320
+http_request_duration_seconds_bucket{le="0.05",status="400"} 20
+http_request_duration_seconds_bucket{le="0.1",status="400"} 20
+http_request_duration_seconds_bucket{le="0.2",status="400"} 20
+http_request_duration_seconds_bucket{le="0.5",status="400"} 25
+http_request_duration_seconds_bucket{le="1",status="400"} 30
+http_request_duration_seconds_bucket{le="+Inf",status="400"} 30
+http_request_duration_seconds_sum{status="400"} 12
+http_request_duration_seconds_count{status="400"} 55
# Finally a summary, which has a complex representation, too:
# HELP rpc_duration_seconds A summary of the RPC duration in seconds.