bharathv commented on a change in pull request #1583:
URL: https://github.com/apache/hbase/pull/1583#discussion_r414894794



##########
File path: 
hbase-it/src/test/java/org/apache/hadoop/hbase/CoprocClusterManager.java
##########
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+import java.util.Objects;
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hbase.client.AsyncAdmin;
+import org.apache.hadoop.hbase.client.AsyncConnection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecService;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Overrides commands to make use of Coproc where possible.
+ */
[email protected]
+public class CoprocClusterManager extends HBaseClusterManager {
+  private static final Logger LOG = 
LoggerFactory.getLogger(CoprocClusterManager.class);
+
+  @Override
+  protected Pair<Integer, String> exec(String hostname, ServiceType service, 
String... cmd)
+    throws IOException {
+    // we don't have ssh to log in as the service user, so wrap the command in 
sudo.
+    // assumes user running the hbase process has sudo privileges to target 
service.
+    final String serviceUser = getServiceUser(service);
+    final String[] commandPrefix =
+      !Objects.equals(serviceUser, getServiceUser(ServiceType.HBASE_MASTER))
+        ? new String[] { "sudo", "-u", serviceUser }
+        : new String[0];
+    final String command = StringUtils.join(ArrayUtils.addAll(commandPrefix, 
cmd), " ");
+    LOG.info("Executing remote command: {}, hostname:{}", command, hostname);
+
+    try (final AsyncConnection conn = 
ConnectionFactory.createAsyncConnection(getConf()).join()) {
+      final AsyncAdmin admin = conn.getAdmin();
+      final ShellExecRequest req = ShellExecRequest.newBuilder()
+        .setCommand(command)
+        .build();
+
+      final ShellExecResponse resp;
+      switch(service) {
+        case HBASE_MASTER:
+          // TODO: what happens if the intended action was killing a backup 
master?
+          resp = masterExec(admin, req);
+          break;
+        case HBASE_REGIONSERVER:
+          final ServerName targetHost = resolveRegionServerName(admin, 
hostname);
+          resp = regionServerExec(admin, req, targetHost);
+          break;
+        default:
+          throw unsupportedServiceType(service);

Review comment:
       nit: Move this to the top, make it a preconditions check?

##########
File path: 
hbase-it/src/test/java/org/apache/hadoop/hbase/CoprocClusterManager.java
##########
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+import java.util.Objects;
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hbase.client.AsyncAdmin;
+import org.apache.hadoop.hbase.client.AsyncConnection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecService;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Overrides commands to make use of Coproc where possible.
+ */
[email protected]
+public class CoprocClusterManager extends HBaseClusterManager {
+  private static final Logger LOG = 
LoggerFactory.getLogger(CoprocClusterManager.class);
+
+  @Override
+  protected Pair<Integer, String> exec(String hostname, ServiceType service, 
String... cmd)
+    throws IOException {
+    // we don't have ssh to log in as the service user, so wrap the command in 
sudo.
+    // assumes user running the hbase process has sudo privileges to target 
service.
+    final String serviceUser = getServiceUser(service);
+    final String[] commandPrefix =
+      !Objects.equals(serviceUser, getServiceUser(ServiceType.HBASE_MASTER))

Review comment:
       Is this possible given the check in L72?

##########
File path: hbase-endpoint/src/main/protobuf/ShellExecEndpoint.proto
##########
@@ -0,0 +1,42 @@
+/**

Review comment:
       port to master as a follow up?

##########
File path: hbase-it/src/test/java/org/apache/hadoop/hbase/ShellExecCoproc.java
##########
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
+import org.apache.hadoop.util.Shell;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+/**
+ * Receives shell commands from the client and executes them blindly.
+ */
[email protected]
+public class ShellExecCoproc

Review comment:
       nit: odd indentation

##########
File path: hbase-it/src/test/java/org/apache/hadoop/hbase/ShellExecCoproc.java
##########
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
+import org.apache.hadoop.util.Shell;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+/**
+ * Receives shell commands from the client and executes them blindly.
+ */
[email protected]
+public class ShellExecCoproc
+  extends ShellExecEndpoint.ShellExecService
+  implements MasterCoprocessor, RegionServerCoprocessor {
+  private static final Logger LOG = 
LoggerFactory.getLogger(ShellExecCoproc.class);
+
+  public static final String ASYNC_DELAY_KEY = 
"hbase.it.shellexeccoproc.async.delay";
+  public static final long DEFAULT_ASYNC_DELAY = 1_000;
+
+  private final ExecutorService workerPool;
+  private Configuration conf;
+
+  public ShellExecCoproc() {
+    workerPool = Executors.newSingleThreadExecutor(
+      new ThreadFactoryBuilder()
+        .setNameFormat(ShellExecCoproc.class.getSimpleName() + "-{}")
+        .setDaemon(true)
+        .setUncaughtExceptionHandler((t, e) -> LOG.warn("Thread {} threw", t, 
e))
+        .build());
+  }
+
+  @Override
+  public Iterable<Service> getServices() {
+    return Collections.singletonList(this);
+  }
+
+  @Override
+  public void start(CoprocessorEnvironment env) {
+    conf = env.getConfiguration();
+  }
+
+  @Override
+  public void shellExec(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, false);
+
+    ShellExecResponse.Builder builder = ShellExecResponse.newBuilder();
+    try {
+      builder = doExec(shell, builder);
+    } catch (IOException e) {
+      LOG.info("Failure launching process", e);
+      CoprocessorRpcUtils.setControllerException(controller, e);
+    }
+
+    done.run(builder.build());
+  }
+
+  @Override
+  public void shellExecAsync(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, true);
+    final long sleepDuration = conf.getLong(ASYNC_DELAY_KEY, 
DEFAULT_ASYNC_DELAY);
+    workerPool.submit(() -> {
+      try {
+        // sleep first so that the RPC can ACK.

Review comment:
       Don't fully understand this, is there a race?

##########
File path: hbase-it/src/test/java/org/apache/hadoop/hbase/ShellExecCoproc.java
##########
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
+import org.apache.hadoop.util.Shell;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+/**
+ * Receives shell commands from the client and executes them blindly.
+ */
[email protected]
+public class ShellExecCoproc
+  extends ShellExecEndpoint.ShellExecService
+  implements MasterCoprocessor, RegionServerCoprocessor {
+  private static final Logger LOG = 
LoggerFactory.getLogger(ShellExecCoproc.class);
+
+  public static final String ASYNC_DELAY_KEY = 
"hbase.it.shellexeccoproc.async.delay";
+  public static final long DEFAULT_ASYNC_DELAY = 1_000;
+
+  private final ExecutorService workerPool;
+  private Configuration conf;
+
+  public ShellExecCoproc() {
+    workerPool = Executors.newSingleThreadExecutor(
+      new ThreadFactoryBuilder()
+        .setNameFormat(ShellExecCoproc.class.getSimpleName() + "-{}")
+        .setDaemon(true)
+        .setUncaughtExceptionHandler((t, e) -> LOG.warn("Thread {} threw", t, 
e))
+        .build());
+  }
+
+  @Override
+  public Iterable<Service> getServices() {
+    return Collections.singletonList(this);
+  }
+
+  @Override
+  public void start(CoprocessorEnvironment env) {
+    conf = env.getConfiguration();
+  }
+
+  @Override
+  public void shellExec(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, false);
+
+    ShellExecResponse.Builder builder = ShellExecResponse.newBuilder();
+    try {
+      builder = doExec(shell, builder);
+    } catch (IOException e) {
+      LOG.info("Failure launching process", e);
+      CoprocessorRpcUtils.setControllerException(controller, e);
+    }
+
+    done.run(builder.build());
+  }
+
+  @Override
+  public void shellExecAsync(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, true);
+    final long sleepDuration = conf.getLong(ASYNC_DELAY_KEY, 
DEFAULT_ASYNC_DELAY);
+    workerPool.submit(() -> {
+      try {
+        // sleep first so that the RPC can ACK.
+        Thread.sleep(sleepDuration);
+        doExec(shell, ShellExecResponse.newBuilder());
+      } catch (InterruptedException e) {
+        LOG.info("Interrupted before launching process.", e);

Review comment:
       LOG.error?

##########
File path: hbase-it/src/test/java/org/apache/hadoop/hbase/ShellExecCoproc.java
##########
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
+import org.apache.hadoop.util.Shell;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+/**
+ * Receives shell commands from the client and executes them blindly.
+ */
[email protected]
+public class ShellExecCoproc
+  extends ShellExecEndpoint.ShellExecService
+  implements MasterCoprocessor, RegionServerCoprocessor {
+  private static final Logger LOG = 
LoggerFactory.getLogger(ShellExecCoproc.class);
+
+  public static final String ASYNC_DELAY_KEY = 
"hbase.it.shellexeccoproc.async.delay";
+  public static final long DEFAULT_ASYNC_DELAY = 1_000;
+
+  private final ExecutorService workerPool;
+  private Configuration conf;
+
+  public ShellExecCoproc() {
+    workerPool = Executors.newSingleThreadExecutor(
+      new ThreadFactoryBuilder()
+        .setNameFormat(ShellExecCoproc.class.getSimpleName() + "-{}")
+        .setDaemon(true)
+        .setUncaughtExceptionHandler((t, e) -> LOG.warn("Thread {} threw", t, 
e))
+        .build());
+  }
+
+  @Override
+  public Iterable<Service> getServices() {
+    return Collections.singletonList(this);
+  }
+
+  @Override
+  public void start(CoprocessorEnvironment env) {
+    conf = env.getConfiguration();
+  }
+
+  @Override
+  public void shellExec(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, false);
+
+    ShellExecResponse.Builder builder = ShellExecResponse.newBuilder();
+    try {
+      builder = doExec(shell, builder);
+    } catch (IOException e) {
+      LOG.info("Failure launching process", e);
+      CoprocessorRpcUtils.setControllerException(controller, e);
+    }
+
+    done.run(builder.build());
+  }
+
+  @Override
+  public void shellExecAsync(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, true);
+    final long sleepDuration = conf.getLong(ASYNC_DELAY_KEY, 
DEFAULT_ASYNC_DELAY);
+    workerPool.submit(() -> {
+      try {
+        // sleep first so that the RPC can ACK.
+        Thread.sleep(sleepDuration);
+        doExec(shell, ShellExecResponse.newBuilder());
+      } catch (InterruptedException e) {
+        LOG.info("Interrupted before launching process.", e);
+      } catch (IOException e) {
+        LOG.info("Failure launching process", e);
+      }
+    });
+    done.run(ShellExecResponse.newBuilder().build());

Review comment:
       This doesn't look right to me, shouldn't this be predicated on whether 
the thread above is successful? (You probably need to chain them?.. 
thenApply()...)

##########
File path: hbase-it/src/test/java/org/apache/hadoop/hbase/ShellExecCoproc.java
##########
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
+import org.apache.hadoop.util.Shell;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+/**
+ * Receives shell commands from the client and executes them blindly.
+ */
[email protected]
+public class ShellExecCoproc
+  extends ShellExecEndpoint.ShellExecService
+  implements MasterCoprocessor, RegionServerCoprocessor {
+  private static final Logger LOG = 
LoggerFactory.getLogger(ShellExecCoproc.class);
+
+  public static final String ASYNC_DELAY_KEY = 
"hbase.it.shellexeccoproc.async.delay";
+  public static final long DEFAULT_ASYNC_DELAY = 1_000;
+
+  private final ExecutorService workerPool;
+  private Configuration conf;
+
+  public ShellExecCoproc() {
+    workerPool = Executors.newSingleThreadExecutor(
+      new ThreadFactoryBuilder()
+        .setNameFormat(ShellExecCoproc.class.getSimpleName() + "-{}")
+        .setDaemon(true)
+        .setUncaughtExceptionHandler((t, e) -> LOG.warn("Thread {} threw", t, 
e))
+        .build());
+  }
+
+  @Override
+  public Iterable<Service> getServices() {
+    return Collections.singletonList(this);
+  }
+
+  @Override
+  public void start(CoprocessorEnvironment env) {
+    conf = env.getConfiguration();
+  }
+
+  @Override
+  public void shellExec(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, false);
+
+    ShellExecResponse.Builder builder = ShellExecResponse.newBuilder();
+    try {
+      builder = doExec(shell, builder);
+    } catch (IOException e) {
+      LOG.info("Failure launching process", e);
+      CoprocessorRpcUtils.setControllerException(controller, e);
+    }
+
+    done.run(builder.build());
+  }
+
+  @Override
+  public void shellExecAsync(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, true);

Review comment:
       nit: I think in a true async mode, this should be async too :-)

##########
File path: 
hbase-it/src/test/java/org/apache/hadoop/hbase/TestShellExecCoproc.java
##########
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Optional;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.AsyncAdmin;
+import org.apache.hadoop.hbase.client.AsyncConnection;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecService;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Test for the {@link ShellExecCoproc}.
+ */
+@Category(MediumTests.class)
+public class TestShellExecCoproc {
+
+  @ClassRule
+  public static final HBaseClassTestRule testRule =
+    HBaseClassTestRule.forClass(TestShellExecCoproc.class);
+
+  @ClassRule
+  public static final MiniClusterRule miniClusterRule = 
MiniClusterRule.newBuilder()
+    .setConfiguration(createConfiguration())
+    .build();
+
+  @Rule
+  public final ConnectionRule connectionRule =
+    new ConnectionRule(miniClusterRule::createConnection);
+
+  @Test
+  public void testShellExec() {
+    final AsyncConnection conn = connectionRule.getConnection();
+    final AsyncAdmin admin = conn.getAdmin();
+
+    final String command = "echo -n \"hello world\"";
+    final ShellExecRequest req = ShellExecRequest.newBuilder()
+      .setCommand(command)
+      .build();
+    final ShellExecResponse resp = admin
+      .<ShellExecService.Stub, ShellExecResponse>coprocessorService(
+        ShellExecService::newStub,
+        (stub, controller, callback) -> stub.shellExec(controller, req, 
callback))
+      .join();
+    assertEquals(0, resp.getExitCode());
+    assertEquals("hello world", resp.getStdout());
+  }
+
+  @Test
+  public void testShellExecAsync() throws IOException {
+    final AsyncConnection conn = connectionRule.getConnection();
+    final AsyncAdmin admin = conn.getAdmin();
+
+    final File testDataDir = 
ensureTestDataDirExists(miniClusterRule.getTestingUtility());
+    final File testFile = new File(testDataDir, "shell_exec_async.txt");
+    assertTrue(testFile.createNewFile());
+    assertEquals(0, testFile.length());
+
+    final String command = "echo \"hello world\" >> " + 
testFile.getAbsolutePath();
+    final ShellExecRequest req = ShellExecRequest.newBuilder()
+      .setCommand(command)
+      .build();
+    admin.<ShellExecService.Stub, ShellExecResponse>coprocessorService(
+      ShellExecService::newStub,
+      (stub, controller, callback) -> stub.shellExecAsync(controller, req, 
callback))
+      .join();
+
+    Waiter.waitFor(conn.getConfiguration(), 5_000, () -> testFile.length() > 
0);

Review comment:
       How about reading back using a coprocessorCommand too? that checks the 
stdout codepaths (cat foo)

##########
File path: hbase-it/src/test/java/org/apache/hadoop/hbase/ShellExecCoproc.java
##########
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
+import org.apache.hadoop.util.Shell;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+/**
+ * Receives shell commands from the client and executes them blindly.
+ */
[email protected]
+public class ShellExecCoproc
+  extends ShellExecEndpoint.ShellExecService
+  implements MasterCoprocessor, RegionServerCoprocessor {
+  private static final Logger LOG = 
LoggerFactory.getLogger(ShellExecCoproc.class);
+
+  public static final String ASYNC_DELAY_KEY = 
"hbase.it.shellexeccoproc.async.delay";
+  public static final long DEFAULT_ASYNC_DELAY = 1_000;
+
+  private final ExecutorService workerPool;
+  private Configuration conf;
+
+  public ShellExecCoproc() {
+    workerPool = Executors.newSingleThreadExecutor(
+      new ThreadFactoryBuilder()
+        .setNameFormat(ShellExecCoproc.class.getSimpleName() + "-{}")
+        .setDaemon(true)
+        .setUncaughtExceptionHandler((t, e) -> LOG.warn("Thread {} threw", t, 
e))
+        .build());
+  }
+
+  @Override
+  public Iterable<Service> getServices() {
+    return Collections.singletonList(this);
+  }
+
+  @Override
+  public void start(CoprocessorEnvironment env) {
+    conf = env.getConfiguration();
+  }
+
+  @Override
+  public void shellExec(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, false);
+
+    ShellExecResponse.Builder builder = ShellExecResponse.newBuilder();
+    try {
+      builder = doExec(shell, builder);
+    } catch (IOException e) {
+      LOG.info("Failure launching process", e);
+      CoprocessorRpcUtils.setControllerException(controller, e);
+    }
+
+    done.run(builder.build());
+  }
+
+  @Override
+  public void shellExecAsync(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, true);
+    final long sleepDuration = conf.getLong(ASYNC_DELAY_KEY, 
DEFAULT_ASYNC_DELAY);
+    workerPool.submit(() -> {
+      try {
+        // sleep first so that the RPC can ACK.
+        Thread.sleep(sleepDuration);
+        doExec(shell, ShellExecResponse.newBuilder());
+      } catch (InterruptedException e) {
+        LOG.info("Interrupted before launching process.", e);
+      } catch (IOException e) {
+        LOG.info("Failure launching process", e);

Review comment:
       LOG.error?

##########
File path: hbase-it/src/test/java/org/apache/hadoop/hbase/ShellExecCoproc.java
##########
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
+import 
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
+import org.apache.hadoop.util.Shell;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+/**
+ * Receives shell commands from the client and executes them blindly.
+ */
[email protected]
+public class ShellExecCoproc
+  extends ShellExecEndpoint.ShellExecService
+  implements MasterCoprocessor, RegionServerCoprocessor {
+  private static final Logger LOG = 
LoggerFactory.getLogger(ShellExecCoproc.class);
+
+  public static final String ASYNC_DELAY_KEY = 
"hbase.it.shellexeccoproc.async.delay";
+  public static final long DEFAULT_ASYNC_DELAY = 1_000;
+
+  private final ExecutorService workerPool;
+  private Configuration conf;
+
+  public ShellExecCoproc() {
+    workerPool = Executors.newSingleThreadExecutor(
+      new ThreadFactoryBuilder()
+        .setNameFormat(ShellExecCoproc.class.getSimpleName() + "-{}")
+        .setDaemon(true)
+        .setUncaughtExceptionHandler((t, e) -> LOG.warn("Thread {} threw", t, 
e))
+        .build());
+  }
+
+  @Override
+  public Iterable<Service> getServices() {
+    return Collections.singletonList(this);
+  }
+
+  @Override
+  public void start(CoprocessorEnvironment env) {
+    conf = env.getConfiguration();
+  }
+
+  @Override
+  public void shellExec(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, false);
+
+    ShellExecResponse.Builder builder = ShellExecResponse.newBuilder();
+    try {
+      builder = doExec(shell, builder);
+    } catch (IOException e) {
+      LOG.info("Failure launching process", e);
+      CoprocessorRpcUtils.setControllerException(controller, e);
+    }
+
+    done.run(builder.build());
+  }
+
+  @Override
+  public void shellExecAsync(
+    final RpcController controller,
+    final ShellExecRequest request,
+    final RpcCallback<ShellExecResponse> done
+  ) {
+    final Shell.ShellCommandExecutor shell = prepareShell(request, true);
+    final long sleepDuration = conf.getLong(ASYNC_DELAY_KEY, 
DEFAULT_ASYNC_DELAY);
+    workerPool.submit(() -> {
+      try {
+        // sleep first so that the RPC can ACK.
+        Thread.sleep(sleepDuration);
+        doExec(shell, ShellExecResponse.newBuilder());
+      } catch (InterruptedException e) {
+        LOG.info("Interrupted before launching process.", e);
+      } catch (IOException e) {
+        LOG.info("Failure launching process", e);
+      }
+    });
+    done.run(ShellExecResponse.newBuilder().build());
+  }
+
+  private Shell.ShellCommandExecutor prepareShell(
+    final ShellExecRequest request,
+    final boolean async
+  ) {
+    final String command = request.getCommand();
+    if (StringUtils.isBlank(command)) {
+      throw new RuntimeException("Request contained an empty command.");
+    }
+    final String msgFmt = "Executing command"
+      + (async ? " on a background thread" : "") + ": {}";
+    LOG.info(msgFmt, command);
+    final String[] subShellCmd = new String[] { "/usr/bin/env", "bash", "-c", 
command };
+    return new Shell.ShellCommandExecutor(subShellCmd);
+  }
+
+  private ShellExecResponse.Builder doExec(
+    final Shell.ShellCommandExecutor shell,
+    final ShellExecResponse.Builder builder
+  ) throws IOException {
+    try {
+      shell.execute();
+      builder
+        .setExitCode(shell.getExitCode())
+        .setStdout(shell.getOutput());
+    } catch (Shell.ExitCodeException e) {
+      LOG.info("Launched process failed", e);

Review comment:
       LOG.err()




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to