This is an automated email from the ASF dual-hosted git repository.
elek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 2acd2b1 HDDS-3399. Update JaegerTracing
2acd2b1 is described below
commit 2acd2b1d998b7d98fb9ae5ab6945c766d8d4402e
Author: Elek Márton <[email protected]>
AuthorDate: Tue Apr 21 14:07:17 2020 +0200
HDDS-3399. Update JaegerTracing
Closes #835
---
.../apache/hadoop/hdds/scm/XceiverClientGrpc.java | 33 +++++++----
.../apache/hadoop/hdds/scm/XceiverClientRatis.java | 38 +++++++------
hadoop-hdds/common/pom.xml | 2 +-
.../hdds/function/SupplierWithIOException.java | 35 ++++++++++++
.../hadoop/hdds/tracing/GrpcServerInterceptor.java | 11 +++-
.../apache/hadoop/hdds/tracing/TraceAllMethod.java | 8 ++-
.../apache/hadoop/hdds/tracing/TracingUtil.java | 66 +++++++++++++++++++---
.../container/common/impl/HddsDispatcher.java | 8 ++-
.../common/transport/server/XceiverServerGrpc.java | 11 +++-
.../transport/server/ratis/XceiverServerRatis.java | 12 +++-
.../server/OzoneProtocolMessageDispatcher.java | 7 +--
.../org/apache/hadoop/ozone/s3/TracingFilter.java | 58 ++++++++++---------
.../hadoop/ozone/freon/BaseFreonGenerator.java | 15 +++--
.../hadoop/ozone/freon/RandomKeyGenerator.java | 27 +++++----
.../org/apache/hadoop/ozone/shell/OzoneShell.java | 12 ++--
.../org/apache/hadoop/ozone/shell/s3/S3Shell.java | 13 +++--
pom.xml | 2 +-
17 files changed, 244 insertions(+), 114 deletions(-)
diff --git
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
index 0771f4c..ad92621 100644
---
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
+++
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
@@ -35,6 +35,7 @@ import java.util.concurrent.TimeoutException;
import org.apache.hadoop.hdds.HddsUtils;
import org.apache.hadoop.hdds.conf.ConfigurationSource;
+import org.apache.hadoop.hdds.function.SupplierWithIOException;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto;
@@ -57,6 +58,7 @@ import org.apache.hadoop.util.Time;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import io.opentracing.Scope;
+import io.opentracing.Span;
import io.opentracing.util.GlobalTracer;
import org.apache.ratis.thirdparty.io.grpc.ManagedChannel;
import org.apache.ratis.thirdparty.io.grpc.Status;
@@ -265,14 +267,18 @@ public class XceiverClientGrpc extends XceiverClientSpi {
private XceiverClientReply sendCommandWithTraceIDAndRetry(
ContainerCommandRequestProto request, List<CheckedBiFunction> validators)
throws IOException {
- try (Scope scope = GlobalTracer.get()
- .buildSpan("XceiverClientGrpc." + request.getCmdType().name())
- .startActive(true)) {
- ContainerCommandRequestProto finalPayload =
- ContainerCommandRequestProto.newBuilder(request)
- .setTraceID(TracingUtil.exportCurrentSpan()).build();
- return sendCommandWithRetry(finalPayload, validators);
- }
+
+ String spanName = "XceiverClientGrpc." + request.getCmdType().name();
+
+ return TracingUtil.executeInNewSpan(spanName,
+ (SupplierWithIOException<XceiverClientReply>) () -> {
+
+ ContainerCommandRequestProto finalPayload =
+ ContainerCommandRequestProto.newBuilder(request)
+ .setTraceID(TracingUtil.exportCurrentSpan()).build();
+ return sendCommandWithRetry(finalPayload, validators);
+
+ });
}
private XceiverClientReply sendCommandWithRetry(
@@ -387,9 +393,11 @@ public class XceiverClientGrpc extends XceiverClientSpi {
public XceiverClientReply sendCommandAsync(
ContainerCommandRequestProto request)
throws IOException, ExecutionException, InterruptedException {
- try (Scope scope = GlobalTracer.get()
- .buildSpan("XceiverClientGrpc." + request.getCmdType().name())
- .startActive(true)) {
+
+ Span span = GlobalTracer.get()
+ .buildSpan("XceiverClientGrpc." + request.getCmdType().name()).start();
+
+ try (Scope scope = GlobalTracer.get().activateSpan(span)) {
ContainerCommandRequestProto finalPayload =
ContainerCommandRequestProto.newBuilder(request)
@@ -405,6 +413,9 @@ public class XceiverClientGrpc extends XceiverClientSpi {
asyncReply.getResponse().get();
}
return asyncReply;
+
+ } finally {
+ span.finish();
}
}
diff --git
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java
index ae5fe67..7ff5ab1 100644
---
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java
+++
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java
@@ -51,8 +51,7 @@ import org.apache.hadoop.util.Time;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
-import io.opentracing.Scope;
-import io.opentracing.util.GlobalTracer;
+import com.google.common.base.Supplier;
import org.apache.ratis.client.RaftClient;
import org.apache.ratis.grpc.GrpcTlsConfig;
import org.apache.ratis.proto.RaftProtos;
@@ -208,24 +207,27 @@ public final class XceiverClientRatis extends
XceiverClientSpi {
private CompletableFuture<RaftClientReply> sendRequestAsync(
ContainerCommandRequestProto request) {
- try (Scope scope = GlobalTracer.get()
- .buildSpan("XceiverClientRatis." + request.getCmdType().name())
- .startActive(true)) {
- final ContainerCommandRequestMessage message
- = ContainerCommandRequestMessage.toMessage(
+ return TracingUtil.executeInNewSpan(
+ "XceiverClientRatis." + request.getCmdType().name(),
+ (Supplier<CompletableFuture<RaftClientReply>>) () -> {
+ final ContainerCommandRequestMessage message
+ = ContainerCommandRequestMessage.toMessage(
request, TracingUtil.exportCurrentSpan());
- if (HddsUtils.isReadOnly(request)) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("sendCommandAsync ReadOnly {}", message);
- }
- return getClient().sendReadOnlyAsync(message);
- } else {
- if (LOG.isDebugEnabled()) {
- LOG.debug("sendCommandAsync {}", message);
+ if (HddsUtils.isReadOnly(request)) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("sendCommandAsync ReadOnly {}", message);
+ }
+ return getClient().sendReadOnlyAsync(message);
+ } else {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("sendCommandAsync {}", message);
+ }
+ return getClient().sendAsync(message);
+ }
+
}
- return getClient().sendAsync(message);
- }
- }
+
+ );
}
// gets the minimum log index replicated to all servers
diff --git a/hadoop-hdds/common/pom.xml b/hadoop-hdds/common/pom.xml
index 360c9aa..47eaf9f 100644
--- a/hadoop-hdds/common/pom.xml
+++ b/hadoop-hdds/common/pom.xml
@@ -177,7 +177,7 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<dependency>
<groupId>io.opentracing</groupId>
<artifactId>opentracing-util</artifactId>
- <version>0.31.0</version>
+ <version>0.33.0</version>
</dependency>
<dependency>
<groupId>org.yaml</groupId>
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/function/SupplierWithIOException.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/function/SupplierWithIOException.java
new file mode 100644
index 0000000..a30e69a
--- /dev/null
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/function/SupplierWithIOException.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.function;
+
+import java.io.IOException;
+
+/**
+ * Functional interface like java.util.function.Supplier but with
+ * checked exception.
+ */
+@FunctionalInterface
+public interface SupplierWithIOException<T> {
+
+ /**
+ * Return the given output..
+ *
+ * @return the function result
+ */
+ T get() throws IOException;
+}
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java
index b63af12..bd35d56 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.hdds.tracing;
import io.opentracing.Scope;
+import io.opentracing.Span;
+import io.opentracing.util.GlobalTracer;
import
org.apache.ratis.thirdparty.io.grpc.ForwardingServerCallListener.SimpleForwardingServerCallListener;
import org.apache.ratis.thirdparty.io.grpc.Metadata;
import org.apache.ratis.thirdparty.io.grpc.ServerCall;
@@ -39,11 +41,14 @@ public class GrpcServerInterceptor implements
ServerInterceptor {
next.startCall(call, headers)) {
@Override
public void onMessage(ReqT message) {
- try (Scope scope = TracingUtil
- .importAndCreateScope(
+ Span span = TracingUtil
+ .importAndCreateSpan(
call.getMethodDescriptor().getFullMethodName(),
- headers.get(GrpcClientInterceptor.TRACING_HEADER))) {
+ headers.get(GrpcClientInterceptor.TRACING_HEADER));
+ try (Scope scope = GlobalTracer.get().activateSpan(span)) {
super.onMessage(message);
+ } finally {
+ span.finish();
}
}
};
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java
index 6dc9f96..2328094 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java
@@ -25,6 +25,7 @@ import java.util.Map;
import java.util.Map.Entry;
import io.opentracing.Scope;
+import io.opentracing.Span;
import io.opentracing.util.GlobalTracer;
/**
@@ -64,9 +65,10 @@ public class TraceAllMethod<T> implements InvocationHandler {
method.getName());
}
- try (Scope scope = GlobalTracer.get().buildSpan(
+ Span span = GlobalTracer.get().buildSpan(
name + "." + method.getName())
- .startActive(true)) {
+ .start();
+ try (Scope scope = GlobalTracer.get().activateSpan(span)) {
try {
return delegateMethod.invoke(delegate, args);
} catch (Exception ex) {
@@ -75,6 +77,8 @@ public class TraceAllMethod<T> implements InvocationHandler {
} else {
throw ex;
}
+ } finally {
+ span.finish();
}
}
}
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java
index a9a2ab8..fe87bce 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java
@@ -17,7 +17,13 @@
*/
package org.apache.hadoop.hdds.tracing;
+import java.io.IOException;
import java.lang.reflect.Proxy;
+import java.util.function.Supplier;
+
+import org.apache.hadoop.hdds.conf.ConfigurationSource;
+import org.apache.hadoop.hdds.function.SupplierWithIOException;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import io.jaegertracing.Configuration;
import io.jaegertracing.internal.JaegerTracer;
@@ -27,9 +33,6 @@ import io.opentracing.SpanContext;
import io.opentracing.Tracer;
import io.opentracing.util.GlobalTracer;
-import org.apache.hadoop.hdds.conf.ConfigurationSource;
-import org.apache.hadoop.hdds.scm.ScmConfigKeys;
-
/**
* Utility class to collect all the tracing helper methods.
*/
@@ -86,11 +89,11 @@ public final class TracingUtil {
*
* @return OpenTracing scope.
*/
- public static Scope importAndCreateScope(String name, String encodedParent) {
+ public static Span importAndCreateSpan(String name, String encodedParent) {
Tracer tracer = GlobalTracer.get();
return tracer.buildSpan(name)
.asChildOf(extractParent(encodedParent, tracer))
- .startActive(true);
+ .start();
}
private static SpanContext extractParent(String parent, Tracer tracer) {
@@ -130,8 +133,57 @@ public final class TracingUtil {
private static boolean isTracingEnabled(
ConfigurationSource conf) {
return conf.getBoolean(
- ScmConfigKeys.HDDS_TRACING_ENABLED,
- ScmConfigKeys.HDDS_TRACING_ENABLED_DEFAULT);
+ ScmConfigKeys.HDDS_TRACING_ENABLED,
+ ScmConfigKeys.HDDS_TRACING_ENABLED_DEFAULT);
}
+ /**
+ * Execute a new function inside an activated span.
+ */
+ public static <R> R executeInNewSpan(String spanName,
+ SupplierWithIOException<R> supplier)
+ throws IOException {
+ Span span = GlobalTracer.get()
+ .buildSpan(spanName).start();
+ try (Scope scope = GlobalTracer.get().activateSpan(span)) {
+ return supplier.get();
+ } catch (Exception ex) {
+ span.setTag("failed", true);
+ throw ex;
+ } finally {
+ span.finish();
+ }
+ }
+
+ /**
+ * Execute a new function inside an activated span.
+ */
+ public static <R> R executeInNewSpan(String spanName,
+ Supplier<R> supplier) {
+ Span span = GlobalTracer.get()
+ .buildSpan(spanName).start();
+ try (Scope scope = GlobalTracer.get().activateSpan(span)) {
+ return supplier.get();
+ } catch (Exception ex) {
+ span.setTag("failed", true);
+ throw ex;
+ } finally {
+ span.finish();
+ }
+ }
+
+ /**
+ * Create an active span with auto-close at finish.
+ * <p>
+ * This is a simplified way to use span as there is no way to add any tag
+ * in case of Exceptions.
+ */
+ public static AutoCloseable createActivatedSpan(String spanName) {
+ Span span = GlobalTracer.get().buildSpan(spanName).start();
+ Scope scope = GlobalTracer.get().activateSpan(span);
+ return () -> {
+ scope.close();
+ span.finish();
+ };
+ }
}
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/HddsDispatcher.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/HddsDispatcher.java
index 3f07c95..c998f89 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/HddsDispatcher.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/HddsDispatcher.java
@@ -61,6 +61,8 @@ import org.apache.hadoop.security.UserGroupInformation;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import io.opentracing.Scope;
+import io.opentracing.Span;
+import io.opentracing.util.GlobalTracer;
import static
org.apache.hadoop.hdds.scm.protocolPB.ContainerCommandResponseBuilders.malformedRequest;
import static
org.apache.hadoop.hdds.scm.protocolPB.ContainerCommandResponseBuilders.unsupportedRequest;
import org.apache.ratis.thirdparty.com.google.protobuf.ProtocolMessageEnum;
@@ -157,10 +159,12 @@ public class HddsDispatcher implements
ContainerDispatcher, Auditor {
ContainerCommandRequestProto msg, DispatcherContext dispatcherContext) {
String spanName = "HddsDispatcher." + msg.getCmdType().name();
long startTime = System.nanoTime();
- try (Scope scope = TracingUtil
- .importAndCreateScope(spanName, msg.getTraceID())) {
+ Span span = TracingUtil
+ .importAndCreateSpan(spanName, msg.getTraceID());
+ try (Scope scope = GlobalTracer.get().activateSpan(span)) {
return dispatchRequest(msg, dispatcherContext);
} finally {
+ span.finish();
protocolMetrics
.increment(msg.getCmdType(), System.nanoTime() - startTime);
}
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java
index 9adabf4..3647af1 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java
@@ -43,6 +43,8 @@ import
org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
import com.google.common.base.Preconditions;
import io.opentracing.Scope;
+import io.opentracing.Span;
+import io.opentracing.util.GlobalTracer;
import org.apache.ratis.thirdparty.io.grpc.BindableService;
import org.apache.ratis.thirdparty.io.grpc.Server;
import org.apache.ratis.thirdparty.io.grpc.ServerBuilder;
@@ -170,16 +172,19 @@ public final class XceiverServerGrpc implements
XceiverServerSpi {
@Override
public void submitRequest(ContainerCommandRequestProto request,
HddsProtos.PipelineID pipelineID) throws IOException {
- try (Scope scope = TracingUtil
- .importAndCreateScope(
+ Span span = TracingUtil
+ .importAndCreateSpan(
"XceiverServerGrpc." + request.getCmdType().name(),
- request.getTraceID())) {
+ request.getTraceID());
+ try (Scope scope = GlobalTracer.get().activateSpan(span)) {
ContainerProtos.ContainerCommandResponseProto response =
storageContainer.dispatch(request, null);
if (response.getResult() != ContainerProtos.Result.SUCCESS) {
throw new StorageContainerException(response.getMessage(),
response.getResult());
}
+ } finally {
+ span.finish();
}
}
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java
index d3bdb3f..47dc412 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java
@@ -67,6 +67,8 @@ import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import io.opentracing.Scope;
+import io.opentracing.Span;
+import io.opentracing.util.GlobalTracer;
import org.apache.ratis.RaftConfigKeys;
import org.apache.ratis.conf.RaftProperties;
import org.apache.ratis.grpc.GrpcConfigKeys;
@@ -98,6 +100,7 @@ import org.apache.ratis.util.TimeDuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+
/**
* Creates a ratis server endpoint that acts as the communication layer for
* Ozone containers.
@@ -494,10 +497,11 @@ public final class XceiverServerRatis implements
XceiverServerSpi {
public void submitRequest(ContainerCommandRequestProto request,
HddsProtos.PipelineID pipelineID) throws IOException {
RaftClientReply reply;
- try (Scope scope = TracingUtil
- .importAndCreateScope(
+ Span span = TracingUtil
+ .importAndCreateSpan(
"XceiverServerRatis." + request.getCmdType().name(),
- request.getTraceID())) {
+ request.getTraceID());
+ try (Scope scope = GlobalTracer.get().activateSpan(span)) {
RaftClientRequest raftClientRequest =
createRaftClientRequest(request, pipelineID,
@@ -508,6 +512,8 @@ public final class XceiverServerRatis implements
XceiverServerSpi {
throw new IOException(e.getMessage(), e);
}
processReply(reply);
+ } finally {
+ span.finish();
}
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java
index d52fbda..c239c0d 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java
@@ -23,7 +23,7 @@ import org.apache.hadoop.hdds.utils.ProtocolMessageMetrics;
import com.google.protobuf.ProtocolMessageEnum;
import com.google.protobuf.ServiceException;
-import io.opentracing.Scope;
+import io.opentracing.Span;
import org.slf4j.Logger;
/**
@@ -56,8 +56,7 @@ public class OzoneProtocolMessageDispatcher<REQUEST,
RESPONSE> {
FunctionWithServiceException<REQUEST, RESPONSE> methodCall,
ProtocolMessageEnum type,
String traceId) throws ServiceException {
- Scope scope = TracingUtil
- .importAndCreateScope(type.toString(), traceId);
+ Span span = TracingUtil.importAndCreateSpan(type.toString(), traceId);
try {
if (logger.isTraceEnabled()) {
logger.trace(
@@ -87,7 +86,7 @@ public class OzoneProtocolMessageDispatcher<REQUEST,
RESPONSE> {
return response;
} finally {
- scope.close();
+ span.finish();
}
}
}
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java
index 80b28b5..28e5665 100644
---
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java
@@ -17,15 +17,19 @@
*/
package org.apache.hadoop.ozone.s3;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+import javax.ws.rs.container.ResourceInfo;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.ext.Provider;
+
import io.opentracing.Scope;
import io.opentracing.ScopeManager;
import io.opentracing.Span;
import io.opentracing.util.GlobalTracer;
-import javax.ws.rs.container.*;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.ext.Provider;
-
/**
* Filter used to add jaeger tracing span.
*/
@@ -35,44 +39,44 @@ public class TracingFilter implements
ContainerRequestFilter,
ContainerResponseFilter {
public static final String TRACING_SCOPE = "TRACING_SCOPE";
+ public static final String TRACING_SPAN = "TRACING_SPAN";
@Context
private ResourceInfo resourceInfo;
- private void closeScope(Scope scope) {
- if (scope != null) {
- Span span = scope.span();
- if (span != null) {
- span.finish();
- }
-
- scope.close();
- }
- }
-
- private void closeActiveScope() {
- ScopeManager manager = GlobalTracer.get().scopeManager();
-
- if (manager != null) {
- Scope scope = manager.active();
- closeScope(scope);
- }
- }
@Override
public void filter(ContainerRequestContext requestContext) {
- closeActiveScope();
+ finishAndCloseActiveSpan();
- Scope scope = GlobalTracer.get().buildSpan(
+ Span span = GlobalTracer.get().buildSpan(
resourceInfo.getResourceClass().getSimpleName() + "." +
- resourceInfo.getResourceMethod().getName()).startActive(true);
+ resourceInfo.getResourceMethod().getName()).start();
+ Scope scope = GlobalTracer.get().activateSpan(span);
requestContext.setProperty(TRACING_SCOPE, scope);
+ requestContext.setProperty(TRACING_SPAN, span);
}
@Override
public void filter(ContainerRequestContext requestContext,
ContainerResponseContext responseContext) {
Scope scope = (Scope)requestContext.getProperty(TRACING_SCOPE);
- closeScope(scope);
+ if (scope != null) {
+ scope.close();
+ }
+ Span span = (Span) requestContext.getProperty(TRACING_SPAN);
+ if (span != null) {
+ span.finish();
+ }
+
+ finishAndCloseActiveSpan();
+ }
+
+ private void finishAndCloseActiveSpan() {
+ ScopeManager scopeManager = GlobalTracer.get().scopeManager();
+ if (scopeManager != null && scopeManager.activeSpan() != null) {
+ scopeManager.activeSpan().finish();
+ scopeManager.activate(null);
+ }
}
}
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java
index 6ffbc0d..1d57136 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java
@@ -29,8 +29,6 @@ import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import com.codahale.metrics.ScheduledReporter;
-import com.codahale.metrics.Slf4jReporter;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
@@ -53,7 +51,10 @@ import org.apache.hadoop.security.UserGroupInformation;
import com.codahale.metrics.ConsoleReporter;
import com.codahale.metrics.MetricRegistry;
+import com.codahale.metrics.ScheduledReporter;
+import com.codahale.metrics.Slf4jReporter;
import io.opentracing.Scope;
+import io.opentracing.Span;
import io.opentracing.util.GlobalTracer;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.RandomStringUtils;
@@ -170,18 +171,16 @@ public class BaseFreonGenerator {
* @param taskId unique ID of the task
*/
private void tryNextTask(TaskProvider provider, long taskId) {
- Scope scope =
- GlobalTracer.get().buildSpan(spanName)
- .startActive(true);
- try {
+ Span span = GlobalTracer.get().buildSpan(spanName).start();
+ try (Scope scope = GlobalTracer.get().activateSpan(span)) {
provider.executeNextTask(taskId);
successCounter.incrementAndGet();
} catch (Exception e) {
- scope.span().setTag("failure", true);
+ span.setTag("failure", true);
failureCounter.incrementAndGet();
LOG.error("Error on executing task {}", taskId, e);
} finally {
- scope.close();
+ span.finish();
}
}
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
index 9e1a45c..4751672 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
@@ -22,6 +22,7 @@ import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
+import java.security.MessageDigest;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
@@ -31,21 +32,19 @@ import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.LongSupplier;
-import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
-import io.opentracing.Scope;
-import io.opentracing.util.GlobalTracer;
-import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.hdds.HddsConfigKeys;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.client.OzoneQuota;
import org.apache.hadoop.hdds.client.ReplicationFactor;
import org.apache.hadoop.hdds.client.ReplicationType;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.tracing.TracingUtil;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.client.ObjectStore;
@@ -66,6 +65,8 @@ import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.annotations.VisibleForTesting;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.time.DurationFormatUtils;
import org.slf4j.Logger;
@@ -73,8 +74,6 @@ import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
import picocli.CommandLine.ParentCommand;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.security.MessageDigest;
/**
* Data generator tool to generate as much keys as possible.
@@ -623,8 +622,8 @@ public final class RandomKeyGenerator implements
Callable<Void> {
String volumeName = "vol-" + volumeNumber + "-"
+ RandomStringUtils.randomNumeric(5);
LOG.trace("Creating volume: {}", volumeName);
- try (Scope ignored = GlobalTracer.get().buildSpan("createVolume")
- .startActive(true)) {
+ try (AutoCloseable scope = TracingUtil
+ .createActivatedSpan("createVolume")) {
long start = System.nanoTime();
objectStore.createVolume(volumeName);
long volumeCreationDuration = System.nanoTime() - start;
@@ -655,8 +654,9 @@ public final class RandomKeyGenerator implements
Callable<Void> {
RandomStringUtils.randomNumeric(5);
LOG.trace("Creating bucket: {} in volume: {}",
bucketName, volume.getName());
- try (Scope ignored = GlobalTracer.get().buildSpan("createBucket")
- .startActive(true)) {
+ try (AutoCloseable scope = TracingUtil
+ .createActivatedSpan("createBucket")) {
+
long start = System.nanoTime();
volume.createBucket(bucketName);
long bucketCreationDuration = System.nanoTime() - start;
@@ -691,8 +691,7 @@ public final class RandomKeyGenerator implements
Callable<Void> {
LOG.trace("Adding key: {} in bucket: {} of volume: {}",
keyName, bucketName, volumeName);
try {
- try (Scope scope = GlobalTracer.get().buildSpan("createKey")
- .startActive(true)) {
+ try (AutoCloseable scope = TracingUtil.createActivatedSpan("createKey"))
{
long keyCreateStart = System.nanoTime();
try (OzoneOutputStream os = bucket.createKey(keyName, keySize, type,
factor, new HashMap<>())) {
@@ -701,8 +700,8 @@ public final class RandomKeyGenerator implements
Callable<Void> {
.update(keyCreationDuration);
keyCreationTime.getAndAdd(keyCreationDuration);
- try (Scope writeScope = GlobalTracer.get().buildSpan("writeKeyData")
- .startActive(true)) {
+ try (AutoCloseable writeScope = TracingUtil
+ .createActivatedSpan("writeKeyData")) {
long keyWriteStart = System.nanoTime();
for (long nrRemaining = keySize;
nrRemaining > 0; nrRemaining -= bufferSize) {
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneShell.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneShell.java
index 4cedd4a..7dad764 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneShell.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneShell.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.ozone.shell;
+import java.util.function.Supplier;
+
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.tracing.TracingUtil;
import org.apache.hadoop.ozone.shell.bucket.BucketCommands;
@@ -24,8 +26,6 @@ import org.apache.hadoop.ozone.shell.keys.KeyCommands;
import org.apache.hadoop.ozone.shell.token.TokenCommands;
import org.apache.hadoop.ozone.shell.volume.VolumeCommands;
-import io.opentracing.Scope;
-import io.opentracing.util.GlobalTracer;
import picocli.CommandLine.Command;
/**
@@ -56,9 +56,11 @@ public class OzoneShell extends Shell {
@Override
public void execute(String[] argv) {
TracingUtil.initTracing("shell", createOzoneConfiguration());
- try (Scope scope = GlobalTracer.get().buildSpan("main").startActive(true))
{
- super.execute(argv);
- }
+ TracingUtil.executeInNewSpan("main",
+ (Supplier<Void>) () -> {
+ super.execute(argv);
+ return null;
+ });
}
}
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/s3/S3Shell.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/s3/S3Shell.java
index b8f3e6f..1eac7f4 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/s3/S3Shell.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/s3/S3Shell.java
@@ -17,10 +17,11 @@
*/
package org.apache.hadoop.ozone.shell.s3;
-import io.opentracing.Scope;
-import io.opentracing.util.GlobalTracer;
+import java.util.function.Supplier;
+
import org.apache.hadoop.hdds.tracing.TracingUtil;
import org.apache.hadoop.ozone.shell.Shell;
+
import picocli.CommandLine.Command;
/**
@@ -38,9 +39,11 @@ public class S3Shell extends Shell {
@Override
public void execute(String[] argv) {
TracingUtil.initTracing("s3shell", createOzoneConfiguration());
- try (Scope scope = GlobalTracer.get().buildSpan("main").startActive(true))
{
- super.execute(argv);
- }
+ TracingUtil.executeInNewSpan("s3shell",
+ (Supplier<Void>) () -> {
+ super.execute(argv);
+ return null;
+ });
}
/**
diff --git a/pom.xml b/pom.xml
index 09b8975..af80b11 100644
--- a/pom.xml
+++ b/pom.xml
@@ -131,7 +131,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xs
<jackson2.version>2.10.3</jackson2.version>
<!-- jaegertracing veresion -->
- <jaeger.version>0.34.0</jaeger.version>
+ <jaeger.version>1.2.0</jaeger.version>
<!-- httpcomponents versions -->
<httpclient.version>4.5.2</httpclient.version>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]