This is an automated email from the ASF dual-hosted git repository.
adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 6487de7def HDDS-10742. Add option to close all pipelines (#6577)
6487de7def is described below
commit 6487de7defc87f6456e986100de07620f88b1015
Author: Andrei Mikhalev <[email protected]>
AuthorDate: Fri May 10 11:31:26 2024 +0300
HDDS-10742. Add option to close all pipelines (#6577)
---
.../scm/cli/pipeline/ClosePipelineSubcommand.java | 50 ++++++++++++-
.../scm/cli/pipeline/FilterPipelineOptions.java | 85 ++++++++++++++++++++++
.../scm/cli/pipeline/ListPipelinesSubcommand.java | 77 ++------------------
3 files changed, 139 insertions(+), 73 deletions(-)
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ClosePipelineSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ClosePipelineSubcommand.java
index 78b83e56db..7c70456995 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ClosePipelineSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ClosePipelineSubcommand.java
@@ -18,13 +18,19 @@
package org.apache.hadoop.hdds.scm.cli.pipeline;
+import com.google.common.base.Strings;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import picocli.CommandLine;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.function.Predicate;
/**
* Handler of close pipeline command.
@@ -35,13 +41,49 @@ import java.io.IOException;
mixinStandardHelpOptions = true,
versionProvider = HddsVersionProvider.class)
public class ClosePipelineSubcommand extends ScmSubcommand {
+ @CommandLine.ArgGroup(multiplicity = "1")
+ private CloseOptionGroup closeOption;
- @CommandLine.Parameters(description = "ID of the pipeline to close")
- private String pipelineId;
+ @CommandLine.Mixin
+ private final FilterPipelineOptions filterOptions = new
FilterPipelineOptions();
@Override
public void execute(ScmClient scmClient) throws IOException {
- scmClient.closePipeline(
- HddsProtos.PipelineID.newBuilder().setId(pipelineId).build());
+ if (!Strings.isNullOrEmpty(closeOption.pipelineId)) {
+ if (filterOptions.getReplicationFilter().isPresent()) {
+ throw new IllegalArgumentException("Replication filters can only be
used with --all");
+ }
+
scmClient.closePipeline(HddsProtos.PipelineID.newBuilder().setId(closeOption.pipelineId).build());
+ } else if (closeOption.closeAll) {
+ Optional<Predicate<? super Pipeline>> replicationFilter =
filterOptions.getReplicationFilter();
+
+ List<Pipeline> pipelineList = new ArrayList<>();
+ Predicate<? super Pipeline> predicate = replicationFilter.orElse(null);
+ for (Pipeline pipeline : scmClient.listPipelines()) {
+ boolean filterPassed = (predicate != null) && predicate.test(pipeline);
+ if (pipeline.getPipelineState() != Pipeline.PipelineState.CLOSED &&
filterPassed) {
+ pipelineList.add(pipeline);
+ }
+ }
+ System.out.println("Sending close command for " + pipelineList.size() +
" pipelines...");
+ pipelineList.forEach(pipeline -> {
+ try {
+ scmClient.closePipeline(
+
HddsProtos.PipelineID.newBuilder().setId(pipeline.getId().getId().toString()).build());
+ } catch (IOException e) {
+ System.err.println("Error closing pipeline: " + pipeline.getId() +
", cause: " + e.getMessage());
+ }
+ });
+ }
+ }
+
+ private static class CloseOptionGroup {
+ @CommandLine.Parameters(description = "ID of the pipeline to close")
+ private String pipelineId;
+
+ @CommandLine.Option(
+ names = {"--all"},
+ description = "Close all pipelines")
+ private boolean closeAll;
}
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/FilterPipelineOptions.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/FilterPipelineOptions.java
new file mode 100644
index 0000000000..afb61c1dd6
--- /dev/null
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/FilterPipelineOptions.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli.pipeline;
+
+import com.google.common.base.Strings;
+import org.apache.hadoop.hdds.client.RatisReplicationConfig;
+import org.apache.hadoop.hdds.client.ReplicationConfig;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
+import picocli.CommandLine;
+
+import java.util.Optional;
+import java.util.function.Predicate;
+
+/**
+ * Defines command-line option for filtering pipelines.
+ */
+public class FilterPipelineOptions {
+ @CommandLine.Option(
+ names = {"-t", "--type"},
+ description = "Filter pipelines by replication type, RATIS or EC",
+ defaultValue = "")
+ private String replicationType;
+
+ @CommandLine.Option(
+ names = {"-r", "--replication"},
+ description = "Filter pipelines by replication, eg ONE, THREE or for EC
rs-3-2-1024k",
+ defaultValue = "")
+ private String replication;
+
+ @CommandLine.Option(
+ names = {"-ffc", "--filterByFactor", "--filter-by-factor"},
+ description = "[deprecated] Filter pipelines by factor (e.g. ONE, THREE)
(implies RATIS replication type)")
+ private ReplicationFactor factor;
+
+ Optional<Predicate<? super Pipeline>> getReplicationFilter() {
+ boolean hasReplication = !Strings.isNullOrEmpty(replication);
+ boolean hasFactor = factor != null;
+ boolean hasReplicationType = !Strings.isNullOrEmpty(replicationType);
+
+ if (hasFactor) {
+ if (hasReplication) {
+ throw new IllegalArgumentException("Factor and replication are
mutually exclusive");
+ }
+ ReplicationConfig replicationConfig =
RatisReplicationConfig.getInstance(factor.toProto());
+ return Optional.of(p ->
replicationConfig.equals(p.getReplicationConfig()));
+ }
+
+ if (hasReplication) {
+ if (!hasReplicationType) {
+ throw new IllegalArgumentException("Replication type is required if
replication is set");
+ }
+
+ ReplicationConfig replicationConfig =
+ ReplicationConfig.parse(ReplicationType.valueOf(replicationType),
replication, new OzoneConfiguration());
+ return Optional.of(p ->
replicationConfig.equals(p.getReplicationConfig()));
+ }
+
+ if (hasReplicationType) {
+ return Optional.of(p -> p.getReplicationConfig()
+ .getReplicationType()
+ .toString()
+ .compareToIgnoreCase(replicationType) == 0);
+ }
+
+ return Optional.empty();
+ }
+}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ListPipelinesSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ListPipelinesSubcommand.java
index f08d316500..9f88b73756 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ListPipelinesSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ListPipelinesSubcommand.java
@@ -20,11 +20,6 @@ package org.apache.hadoop.hdds.scm.cli.pipeline;
import com.google.common.base.Strings;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
-import org.apache.hadoop.hdds.client.RatisReplicationConfig;
-import org.apache.hadoop.hdds.client.ReplicationConfig;
-import org.apache.hadoop.hdds.client.ReplicationFactor;
-import org.apache.hadoop.hdds.client.ReplicationType;
-import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
import org.apache.hadoop.hdds.scm.client.ScmClient;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
@@ -47,24 +42,8 @@ import java.util.stream.Stream;
mixinStandardHelpOptions = true,
versionProvider = HddsVersionProvider.class)
public class ListPipelinesSubcommand extends ScmSubcommand {
-
- @CommandLine.Option(names = {"-t", "--type"},
- description = "Filter listed pipelines by replication type, RATIS or EC",
- defaultValue = "")
- private String replicationType;
-
- @CommandLine.Option(
- names = {"-r", "--replication"},
- description = "Filter listed pipelines by replication, eg ONE, THREE or "
- + "for EC rs-3-2-1024k",
- defaultValue = "")
- private String replication;
-
- @CommandLine.Option(
- names = {"-ffc", "--filterByFactor", "--filter-by-factor"},
- description = "[deprecated] Filter pipelines by factor (e.g. ONE, THREE)
"
- + " (implies RATIS replication type)")
- private ReplicationFactor factor;
+ @CommandLine.Mixin
+ private final FilterPipelineOptions filterOptions = new
FilterPipelineOptions();
@CommandLine.Option(
names = {"-s", "--state", "-fst", "--filterByState",
"--filter-by-state"},
@@ -72,15 +51,15 @@ public class ListPipelinesSubcommand extends ScmSubcommand {
defaultValue = "")
private String state;
- @CommandLine.Option(names = { "--json" },
- defaultValue = "false",
- description = "Format output as JSON")
- private boolean json;
+ @CommandLine.Option(
+ names = {"--json"},
+ defaultValue = "false",
+ description = "Format output as JSON")
+ private boolean json;
@Override
public void execute(ScmClient scmClient) throws IOException {
- Optional<Predicate<? super Pipeline>> replicationFilter =
- getReplicationFilter();
+ Optional<Predicate<? super Pipeline>> replicationFilter =
filterOptions.getReplicationFilter();
Stream<Pipeline> stream = scmClient.listPipelines().stream();
if (replicationFilter.isPresent()) {
@@ -99,44 +78,4 @@ public class ListPipelinesSubcommand extends ScmSubcommand {
stream.forEach(System.out::println);
}
}
-
- private Optional<Predicate<? super Pipeline>> getReplicationFilter() {
- boolean hasReplication = !Strings.isNullOrEmpty(replication);
- boolean hasFactor = factor != null;
- boolean hasReplicationType = !Strings.isNullOrEmpty(replicationType);
-
- if (hasFactor) {
- if (hasReplication) {
- throw new IllegalArgumentException(
- "Factor and replication are mutually exclusive");
- }
-
- ReplicationConfig replicationConfig =
- RatisReplicationConfig.getInstance(factor.toProto());
- return Optional.of(
- p -> replicationConfig.equals(p.getReplicationConfig()));
- }
-
- if (hasReplication) {
- if (!hasReplicationType) {
- throw new IllegalArgumentException(
- "Replication type is required if replication is set");
- }
-
- ReplicationConfig replicationConfig =
- ReplicationConfig.parse(ReplicationType.valueOf(replicationType),
- replication, new OzoneConfiguration());
- return Optional.of(
- p -> replicationConfig.equals(p.getReplicationConfig()));
- }
-
- if (hasReplicationType) {
- return Optional.of(p -> p.getReplicationConfig()
- .getReplicationType()
- .toString()
- .compareToIgnoreCase(replicationType) == 0);
- }
-
- return Optional.empty();
- }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]