timmylicheng commented on a change in pull request #980: URL: https://github.com/apache/hadoop-ozone/pull/980#discussion_r432252645
########## File path: hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineManagerV2Impl.java ########## @@ -0,0 +1,637 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hdds.scm.pipeline; + +import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hdds.HddsConfigKeys; +import org.apache.hadoop.hdds.conf.ConfigurationSource; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.container.ContainerID; +import org.apache.hadoop.hdds.scm.ha.SCMHAManager; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.safemode.SCMSafeModeManager; +import org.apache.hadoop.hdds.server.events.EventPublisher; +import org.apache.hadoop.hdds.utils.Scheduler; +import org.apache.hadoop.hdds.utils.db.Table; +import org.apache.hadoop.hdds.utils.db.TableIterator; +import org.apache.hadoop.metrics2.util.MBeans; +import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.management.ObjectName; +import java.io.IOException; +import java.time.Duration; +import java.time.Instant; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.NavigableSet; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.stream.Collectors; + +/** + * SCM Pipeline Manager implementation. + * All the write operations for pipelines must come via PipelineManager. + * It synchronises all write and read operations via a ReadWriteLock. + */ +public class PipelineManagerV2Impl implements PipelineManager { + private static final Logger LOG = + LoggerFactory.getLogger(SCMPipelineManager.class); + + private final ReadWriteLock lock; + private PipelineFactory pipelineFactory; + private PipelineStateManagerV2 stateManager; + private Scheduler scheduler; + private BackgroundPipelineCreator backgroundPipelineCreator; + private final NodeManager nodeManager; + private final ConfigurationSource conf; + // Pipeline Manager MXBean + private ObjectName pmInfoBean; + private final SCMPipelineMetrics metrics; + private long pipelineWaitDefaultTimeout; + private final AtomicBoolean isInSafeMode; + // Used to track if the safemode pre-checks have completed. This is designed + // to prevent pipelines being created until sufficient nodes have registered. + private final AtomicBoolean pipelineCreationAllowed; + + public PipelineManagerV2Impl(ConfigurationSource conf, + NodeManager nodeManager, + PipelineStateManagerV2 pipelineStateManager, + PipelineFactory pipelineFactory) + throws IOException { + this.lock = new ReentrantReadWriteLock(); + this.pipelineFactory = pipelineFactory; + this.stateManager = pipelineStateManager; + this.nodeManager = nodeManager; + this.conf = conf; + this.pmInfoBean = MBeans.register("SCMPipelineManager", + "SCMPipelineManagerInfo", this); + this.metrics = SCMPipelineMetrics.create(); + this.pipelineWaitDefaultTimeout = conf.getTimeDuration( + HddsConfigKeys.HDDS_PIPELINE_REPORT_INTERVAL, + HddsConfigKeys.HDDS_PIPELINE_REPORT_INTERVAL_DEFAULT, + TimeUnit.MILLISECONDS); + this.isInSafeMode = new AtomicBoolean(conf.getBoolean( + HddsConfigKeys.HDDS_SCM_SAFEMODE_ENABLED, + HddsConfigKeys.HDDS_SCM_SAFEMODE_ENABLED_DEFAULT)); + // Pipeline creation is only allowed after the safemode prechecks have + // passed, eg sufficient nodes have registered. + this.pipelineCreationAllowed = new AtomicBoolean(!this.isInSafeMode.get()); + initializePipelineState(); + } + + public static PipelineManager newPipelineManager( + ConfigurationSource conf, SCMHAManager scmhaManager, Review comment: We may have more stuff in SCMHAManager than just SCMRatisServer for PipelineManager use cases. The idea here is to have a manager interface for SCM HA so that we won't worry about passing more things into PipelineManager like configs or other things. @xiaoyuyao ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
