http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java
deleted file mode 100644
index 184c547..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-package org.apache.hadoop.hdds.scm.client;
-
-import org.apache.hadoop.classification.InterfaceStability;
-import 
org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline;
-import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
-import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
-    .ContainerData;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.List;
-
-/**
- * The interface to call into underlying container layer.
- *
- * Written as interface to allow easy testing: implement a mock container layer
- * for standalone testing of CBlock API without actually calling into remote
- * containers. Actual container layer can simply re-implement this.
- *
- * NOTE this is temporarily needed class. When SCM containers are full-fledged,
- * this interface will likely be removed.
- */
[email protected]
-public interface ScmClient extends Closeable {
-  /**
-   * Creates a Container on SCM and returns the pipeline.
-   * @return ContainerInfo
-   * @throws IOException
-   */
-  ContainerWithPipeline createContainer(String owner) throws IOException;
-
-  /**
-   * Gets a container by Name -- Throws if the container does not exist.
-   * @param containerId - Container ID
-   * @return Pipeline
-   * @throws IOException
-   */
-  ContainerInfo getContainer(long containerId) throws IOException;
-
-  /**
-   * Gets a container by Name -- Throws if the container does not exist.
-   * @param containerId - Container ID
-   * @return ContainerWithPipeline
-   * @throws IOException
-   */
-  ContainerWithPipeline getContainerWithPipeline(long containerId)
-      throws IOException;
-
-  /**
-   * Close a container.
-   *
-   * @param containerId - ID of the container.
-   * @param pipeline - Pipeline where the container is located.
-   * @throws IOException
-   */
-  void closeContainer(long containerId, Pipeline pipeline) throws IOException;
-
-  /**
-   * Close a container.
-   *
-   * @param containerId - ID of the container.
-   * @throws IOException
-   */
-  void closeContainer(long containerId) throws IOException;
-
-  /**
-   * Deletes an existing container.
-   * @param containerId - ID of the container.
-   * @param pipeline - Pipeline that represents the container.
-   * @param force - true to forcibly delete the container.
-   * @throws IOException
-   */
-  void deleteContainer(long containerId, Pipeline pipeline, boolean force)
-      throws IOException;
-
-  /**
-   * Deletes an existing container.
-   * @param containerId - ID of the container.
-   * @param force - true to forcibly delete the container.
-   * @throws IOException
-   */
-  void deleteContainer(long containerId, boolean force) throws IOException;
-
-  /**
-   * Lists a range of containers and get their info.
-   *
-   * @param startContainerID start containerID.
-   * @param count count must be > 0.
-   *
-   * @return a list of pipeline.
-   * @throws IOException
-   */
-  List<ContainerInfo> listContainer(long startContainerID,
-      int count) throws IOException;
-
-  /**
-   * Read meta data from an existing container.
-   * @param containerID - ID of the container.
-   * @param pipeline - Pipeline where the container is located.
-   * @return ContainerInfo
-   * @throws IOException
-   */
-  ContainerData readContainer(long containerID, Pipeline pipeline)
-      throws IOException;
-
-  /**
-   * Read meta data from an existing container.
-   * @param containerID - ID of the container.
-   * @return ContainerInfo
-   * @throws IOException
-   */
-  ContainerData readContainer(long containerID)
-      throws IOException;
-
-  /**
-   * Gets the container size -- Computed by SCM from Container Reports.
-   * @param containerID - ID of the container.
-   * @return number of bytes used by this container.
-   * @throws IOException
-   */
-  long getContainerSize(long containerID) throws IOException;
-
-  /**
-   * Creates a Container on SCM and returns the pipeline.
-   * @param type - Replication Type.
-   * @param replicationFactor - Replication Factor
-   * @return ContainerInfo
-   * @throws IOException - in case of error.
-   */
-  ContainerWithPipeline createContainer(HddsProtos.ReplicationType type,
-      HddsProtos.ReplicationFactor replicationFactor,
-      String owner) throws IOException;
-
-  /**
-   * Returns a set of Nodes that meet a query criteria.
-   * @param nodeStatuses - Criteria that we want the node to have.
-   * @param queryScope - Query scope - Cluster or pool.
-   * @param poolName - if it is pool, a pool name is required.
-   * @return A set of nodes that meet the requested criteria.
-   * @throws IOException
-   */
-  List<HddsProtos.Node> queryNode(HddsProtos.NodeState nodeStatuses,
-      HddsProtos.QueryScope queryScope, String poolName) throws IOException;
-
-  /**
-   * Creates a specified replication pipeline.
-   * @param type - Type
-   * @param factor - Replication factor
-   * @param nodePool - Set of machines.
-   * @throws IOException
-   */
-  Pipeline createReplicationPipeline(HddsProtos.ReplicationType type,
-      HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool)
-      throws IOException;
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java
deleted file mode 100644
index e2f7033..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hdds.scm.client;
-
-/**
- * This package contains classes for the client of the storage container
- * protocol.
- */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
deleted file mode 100644
index 49af297..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership.  The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations 
under
- * the License.
- *
- */
-
-package org.apache.hadoop.hdds.scm.container;
-
-import com.google.common.base.Preconditions;
-import org.apache.commons.lang3.builder.CompareToBuilder;
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
-
-/**
- * Container ID is an integer that is a value between 1..MAX_CONTAINER ID.
- * <p>
- * We are creating a specific type for this to avoid mixing this with
- * normal integers in code.
- */
-public class ContainerID implements Comparable {
-
-  private final long id;
-
-  /**
-   * Constructs ContainerID.
-   *
-   * @param id int
-   */
-  public ContainerID(long id) {
-    Preconditions.checkState(id > 0,
-        "Container ID should be a positive long. "+ id);
-    this.id = id;
-  }
-
-  /**
-   * Factory method for creation of ContainerID.
-   * @param containerID  long
-   * @return ContainerID.
-   */
-  public static ContainerID valueof(long containerID) {
-    return new ContainerID(containerID);
-  }
-
-  /**
-   * Returns int representation of ID.
-   *
-   * @return int
-   */
-  public long getId() {
-    return id;
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) {
-      return true;
-    }
-
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    ContainerID that = (ContainerID) o;
-
-    return new EqualsBuilder()
-        .append(getId(), that.getId())
-        .isEquals();
-  }
-
-  @Override
-  public int hashCode() {
-    return new HashCodeBuilder(61, 71)
-        .append(getId())
-        .toHashCode();
-  }
-
-  @Override
-  public int compareTo(Object o) {
-    Preconditions.checkNotNull(o);
-    if(getClass() != o.getClass()) {
-      throw new ClassCastException("ContainerID class expected. found:" +
-          o.getClass().toString());
-    }
-
-    ContainerID that = (ContainerID) o;
-    return new CompareToBuilder()
-        .append(this.getId(), that.getId())
-        .build();
-  }
-
-  @Override
-  public String toString() {
-    return "id=" + id;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
deleted file mode 100644
index 63781a8..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.hadoop.hdds.scm.container.common.helpers;
-
-import org.apache.hadoop.hdds.client.BlockID;
-
-/**
- * Allocated block wraps the result returned from SCM#allocateBlock which
- * contains a Pipeline and the key.
- */
-public final class AllocatedBlock {
-  private Pipeline pipeline;
-  private BlockID blockID;
-  // Indicates whether the client should create container before writing block.
-  private boolean shouldCreateContainer;
-
-  /**
-   * Builder for AllocatedBlock.
-   */
-  public static class Builder {
-    private Pipeline pipeline;
-    private BlockID blockID;
-    private boolean shouldCreateContainer;
-
-    public Builder setPipeline(Pipeline p) {
-      this.pipeline = p;
-      return this;
-    }
-
-    public Builder setBlockID(BlockID blockId) {
-      this.blockID = blockId;
-      return this;
-    }
-
-    public Builder setShouldCreateContainer(boolean shouldCreate) {
-      this.shouldCreateContainer = shouldCreate;
-      return this;
-    }
-
-    public AllocatedBlock build() {
-      return new AllocatedBlock(pipeline, blockID, shouldCreateContainer);
-    }
-  }
-
-  private AllocatedBlock(Pipeline pipeline, BlockID blockID,
-      boolean shouldCreateContainer) {
-    this.pipeline = pipeline;
-    this.blockID = blockID;
-    this.shouldCreateContainer = shouldCreateContainer;
-  }
-
-  public Pipeline getPipeline() {
-    return pipeline;
-  }
-
-  public BlockID getBlockID() {
-    return blockID;
-  }
-
-  public boolean getCreateContainer() {
-    return shouldCreateContainer;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/BlockNotCommittedException.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/BlockNotCommittedException.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/BlockNotCommittedException.java
deleted file mode 100644
index 86f5a66..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/BlockNotCommittedException.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-package org.apache.hadoop.hdds.scm.container.common.helpers;
-
-import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
-
-/**
- * Exceptions thrown when a block is yet to be committed on the datanode.
- */
-public class BlockNotCommittedException extends StorageContainerException {
-
-  /**
-   * Constructs an {@code IOException} with the specified detail message.
-   *
-   * @param message The detail message (which is saved for later retrieval by
-   * the {@link #getMessage()} method)
-   */
-  public BlockNotCommittedException(String message) {
-    super(message, ContainerProtos.Result.BLOCK_NOT_COMMITTED);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
deleted file mode 100644
index 5abcd14..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
+++ /dev/null
@@ -1,482 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-package org.apache.hadoop.hdds.scm.container.common.helpers;
-
-import static java.lang.Math.max;
-
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.PropertyAccessor;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectWriter;
-import com.google.common.base.Preconditions;
-import java.io.Externalizable;
-import java.io.IOException;
-import java.io.ObjectInput;
-import java.io.ObjectOutput;
-import java.util.Arrays;
-import java.util.Comparator;
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
-import org.apache.hadoop.hdds.scm.container.ContainerID;
-import org.apache.hadoop.util.Time;
-
-/**
- * Class wraps ozone container info.
- */
-public class ContainerInfo implements Comparator<ContainerInfo>,
-    Comparable<ContainerInfo>, Externalizable {
-
-  private static final ObjectWriter WRITER;
-  private static final String SERIALIZATION_ERROR_MSG = "Java serialization 
not"
-      + " supported. Use protobuf instead.";
-
-  static {
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.setVisibility(PropertyAccessor.FIELD, 
JsonAutoDetect.Visibility.ANY);
-    mapper
-        .setVisibility(PropertyAccessor.GETTER, 
JsonAutoDetect.Visibility.NONE);
-    WRITER = mapper.writer();
-  }
-
-  private HddsProtos.LifeCycleState state;
-  @JsonIgnore
-  private PipelineID pipelineID;
-  private ReplicationFactor replicationFactor;
-  private ReplicationType replicationType;
-  // Bytes allocated by SCM for clients.
-  private long allocatedBytes;
-  // Actual container usage, updated through heartbeat.
-  private long usedBytes;
-  private long numberOfKeys;
-  private long lastUsed;
-  // The wall-clock ms since the epoch at which the current state enters.
-  private long stateEnterTime;
-  private String owner;
-  private long containerID;
-  private long deleteTransactionId;
-  /**
-   * Allows you to maintain private data on ContainerInfo. This is not
-   * serialized via protobuf, just allows us to maintain some private data.
-   */
-  @JsonIgnore
-  private byte[] data;
-
-  ContainerInfo(
-      long containerID,
-      HddsProtos.LifeCycleState state,
-      PipelineID pipelineID,
-      long allocatedBytes,
-      long usedBytes,
-      long numberOfKeys,
-      long stateEnterTime,
-      String owner,
-      long deleteTransactionId,
-      ReplicationFactor replicationFactor,
-      ReplicationType repType) {
-    this.containerID = containerID;
-    this.pipelineID = pipelineID;
-    this.allocatedBytes = allocatedBytes;
-    this.usedBytes = usedBytes;
-    this.numberOfKeys = numberOfKeys;
-    this.lastUsed = Time.monotonicNow();
-    this.state = state;
-    this.stateEnterTime = stateEnterTime;
-    this.owner = owner;
-    this.deleteTransactionId = deleteTransactionId;
-    this.replicationFactor = replicationFactor;
-    this.replicationType = repType;
-  }
-
-  public ContainerInfo(ContainerInfo info) {
-    this(info.getContainerID(), info.getState(), info.getPipelineID(),
-        info.getAllocatedBytes(), info.getUsedBytes(), info.getNumberOfKeys(),
-        info.getStateEnterTime(), info.getOwner(),
-        info.getDeleteTransactionId(), info.getReplicationFactor(),
-        info.getReplicationType());
-  }
-  /**
-   * Needed for serialization findbugs.
-   */
-  public ContainerInfo() {
-  }
-
-  public static ContainerInfo fromProtobuf(HddsProtos.SCMContainerInfo info) {
-    ContainerInfo.Builder builder = new ContainerInfo.Builder();
-    return builder.setPipelineID(
-        PipelineID.getFromProtobuf(info.getPipelineID()))
-        .setAllocatedBytes(info.getAllocatedBytes())
-        .setUsedBytes(info.getUsedBytes())
-        .setNumberOfKeys(info.getNumberOfKeys())
-        .setState(info.getState())
-        .setStateEnterTime(info.getStateEnterTime())
-        .setOwner(info.getOwner())
-        .setContainerID(info.getContainerID())
-        .setDeleteTransactionId(info.getDeleteTransactionId())
-        .setReplicationFactor(info.getReplicationFactor())
-        .setReplicationType(info.getReplicationType())
-        .build();
-  }
-
-  public long getContainerID() {
-    return containerID;
-  }
-
-  public HddsProtos.LifeCycleState getState() {
-    return state;
-  }
-
-  public void setState(HddsProtos.LifeCycleState state) {
-    this.state = state;
-  }
-
-  public long getStateEnterTime() {
-    return stateEnterTime;
-  }
-
-  public ReplicationFactor getReplicationFactor() {
-    return replicationFactor;
-  }
-
-  public PipelineID getPipelineID() {
-    return pipelineID;
-  }
-
-  public long getAllocatedBytes() {
-    return allocatedBytes;
-  }
-
-  /**
-   * Set Allocated bytes.
-   *
-   * @param size - newly allocated bytes -- negative size is case of deletes
-   * can be used.
-   */
-  public void updateAllocatedBytes(long size) {
-    this.allocatedBytes += size;
-  }
-
-  public long getUsedBytes() {
-    return usedBytes;
-  }
-
-  public long getNumberOfKeys() {
-    return numberOfKeys;
-  }
-
-  public long getDeleteTransactionId() {
-    return deleteTransactionId;
-  }
-
-  public void updateDeleteTransactionId(long transactionId) {
-    deleteTransactionId = max(transactionId, deleteTransactionId);
-  }
-
-  public ContainerID containerID() {
-    return new ContainerID(getContainerID());
-  }
-
-  /**
-   * Gets the last used time from SCM's perspective.
-   *
-   * @return time in milliseconds.
-   */
-  public long getLastUsed() {
-    return lastUsed;
-  }
-
-  public ReplicationType getReplicationType() {
-    return replicationType;
-  }
-
-  public void updateLastUsedTime() {
-    lastUsed = Time.monotonicNow();
-  }
-
-  public void allocate(long size) {
-    // should we also have total container size in ContainerInfo
-    // and check before allocating?
-    allocatedBytes += size;
-  }
-
-  public HddsProtos.SCMContainerInfo getProtobuf() {
-    HddsProtos.SCMContainerInfo.Builder builder =
-        HddsProtos.SCMContainerInfo.newBuilder();
-    Preconditions.checkState(containerID > 0);
-    return builder.setAllocatedBytes(getAllocatedBytes())
-        .setContainerID(getContainerID())
-        .setUsedBytes(getUsedBytes())
-        .setNumberOfKeys(getNumberOfKeys()).setState(getState())
-        
.setStateEnterTime(getStateEnterTime()).setContainerID(getContainerID())
-        .setDeleteTransactionId(getDeleteTransactionId())
-        .setPipelineID(getPipelineID().getProtobuf())
-        .setReplicationFactor(getReplicationFactor())
-        .setReplicationType(getReplicationType())
-        .setOwner(getOwner())
-        .build();
-  }
-
-  public String getOwner() {
-    return owner;
-  }
-
-  public void setOwner(String owner) {
-    this.owner = owner;
-  }
-
-  @Override
-  public String toString() {
-    return "ContainerInfo{"
-        + "id=" + containerID
-        + ", state=" + state
-        + ", pipelineID=" + pipelineID
-        + ", stateEnterTime=" + stateEnterTime
-        + ", owner=" + owner
-        + '}';
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) {
-      return true;
-    }
-
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    ContainerInfo that = (ContainerInfo) o;
-
-    return new EqualsBuilder()
-        .append(getContainerID(), that.getContainerID())
-
-        // TODO : Fix this later. If we add these factors some tests fail.
-        // So Commenting this to continue and will enforce this with
-        // Changes in pipeline where we remove Container Name to
-        // SCMContainerinfo from Pipeline.
-        // .append(pipeline.getFactor(), that.pipeline.getFactor())
-        // .append(pipeline.getType(), that.pipeline.getType())
-        .append(owner, that.owner)
-        .isEquals();
-  }
-
-  @Override
-  public int hashCode() {
-    return new HashCodeBuilder(11, 811)
-        .append(getContainerID())
-        .append(getOwner())
-        .toHashCode();
-  }
-
-  /**
-   * Compares its two arguments for order.  Returns a negative integer, zero, 
or
-   * a positive integer as the first argument is less than, equal to, or 
greater
-   * than the second.<p>
-   *
-   * @param o1 the first object to be compared.
-   * @param o2 the second object to be compared.
-   * @return a negative integer, zero, or a positive integer as the first
-   * argument is less than, equal to, or greater than the second.
-   * @throws NullPointerException if an argument is null and this comparator
-   *                              does not permit null arguments
-   * @throws ClassCastException   if the arguments' types prevent them from
-   *                              being compared by this comparator.
-   */
-  @Override
-  public int compare(ContainerInfo o1, ContainerInfo o2) {
-    return Long.compare(o1.getLastUsed(), o2.getLastUsed());
-  }
-
-  /**
-   * Compares this object with the specified object for order.  Returns a
-   * negative integer, zero, or a positive integer as this object is less than,
-   * equal to, or greater than the specified object.
-   *
-   * @param o the object to be compared.
-   * @return a negative integer, zero, or a positive integer as this object is
-   * less than, equal to, or greater than the specified object.
-   * @throws NullPointerException if the specified object is null
-   * @throws ClassCastException   if the specified object's type prevents it
-   *                              from being compared to this object.
-   */
-  @Override
-  public int compareTo(ContainerInfo o) {
-    return this.compare(this, o);
-  }
-
-  /**
-   * Returns a JSON string of this object.
-   *
-   * @return String - json string
-   * @throws IOException
-   */
-  public String toJsonString() throws IOException {
-    return WRITER.writeValueAsString(this);
-  }
-
-  /**
-   * Returns private data that is set on this containerInfo.
-   *
-   * @return blob, the user can interpret it any way they like.
-   */
-  public byte[] getData() {
-    if (this.data != null) {
-      return Arrays.copyOf(this.data, this.data.length);
-    } else {
-      return null;
-    }
-  }
-
-  /**
-   * Set private data on ContainerInfo object.
-   *
-   * @param data -- private data.
-   */
-  public void setData(byte[] data) {
-    if (data != null) {
-      this.data = Arrays.copyOf(data, data.length);
-    }
-  }
-
-  /**
-   * Throws IOException as default java serialization is not supported. Use
-   * serialization via protobuf instead.
-   *
-   * @param out the stream to write the object to
-   * @throws IOException Includes any I/O exceptions that may occur
-   * @serialData Overriding methods should use this tag to describe
-   * the data layout of this Externalizable object.
-   * List the sequence of element types and, if possible,
-   * relate the element to a public/protected field and/or
-   * method of this Externalizable class.
-   */
-  @Override
-  public void writeExternal(ObjectOutput out) throws IOException {
-    throw new IOException(SERIALIZATION_ERROR_MSG);
-  }
-
-  /**
-   * Throws IOException as default java serialization is not supported. Use
-   * serialization via protobuf instead.
-   *
-   * @param in the stream to read data from in order to restore the object
-   * @throws IOException            if I/O errors occur
-   * @throws ClassNotFoundException If the class for an object being
-   *                                restored cannot be found.
-   */
-  @Override
-  public void readExternal(ObjectInput in)
-      throws IOException, ClassNotFoundException {
-    throw new IOException(SERIALIZATION_ERROR_MSG);
-  }
-
-  /**
-   * Builder class for ContainerInfo.
-   */
-  public static class Builder {
-    private HddsProtos.LifeCycleState state;
-    private long allocated;
-    private long used;
-    private long keys;
-    private long stateEnterTime;
-    private String owner;
-    private long containerID;
-    private long deleteTransactionId;
-    private PipelineID pipelineID;
-    private ReplicationFactor replicationFactor;
-    private ReplicationType replicationType;
-
-    public Builder setReplicationType(
-        ReplicationType repType) {
-      this.replicationType = repType;
-      return this;
-    }
-
-    public Builder setPipelineID(PipelineID pipelineId) {
-      this.pipelineID = pipelineId;
-      return this;
-    }
-
-    public Builder setReplicationFactor(ReplicationFactor repFactor) {
-      this.replicationFactor = repFactor;
-      return this;
-    }
-
-    public Builder setContainerID(long id) {
-      Preconditions.checkState(id >= 0);
-      this.containerID = id;
-      return this;
-    }
-
-    public Builder setState(HddsProtos.LifeCycleState lifeCycleState) {
-      this.state = lifeCycleState;
-      return this;
-    }
-
-    public Builder setAllocatedBytes(long bytesAllocated) {
-      this.allocated = bytesAllocated;
-      return this;
-    }
-
-    public Builder setUsedBytes(long bytesUsed) {
-      this.used = bytesUsed;
-      return this;
-    }
-
-    public Builder setNumberOfKeys(long keyCount) {
-      this.keys = keyCount;
-      return this;
-    }
-
-    public Builder setStateEnterTime(long time) {
-      this.stateEnterTime = time;
-      return this;
-    }
-
-    public Builder setOwner(String containerOwner) {
-      this.owner = containerOwner;
-      return this;
-    }
-
-    public Builder setDeleteTransactionId(long deleteTransactionID) {
-      this.deleteTransactionId = deleteTransactionID;
-      return this;
-    }
-
-    public ContainerInfo build() {
-      return new ContainerInfo(containerID, state, pipelineID, allocated,
-              used, keys, stateEnterTime, owner, deleteTransactionId,
-          replicationFactor, replicationType);
-    }
-  }
-
-  /**
-   * Check if a container is in open state, this will check if the
-   * container is either open, allocated, creating or creating.
-   * Any containers in these states is managed as an open container by SCM.
-   */
-  public boolean isContainerOpen() {
-    return state == HddsProtos.LifeCycleState.ALLOCATED ||
-        state == HddsProtos.LifeCycleState.CREATING ||
-        state == HddsProtos.LifeCycleState.OPEN ||
-        state == HddsProtos.LifeCycleState.CLOSING;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
deleted file mode 100644
index 64f42b3..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.hadoop.hdds.scm.container.common.helpers;
-
-import java.util.Comparator;
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-
-/**
- * Class wraps ozone container info.
- */
-public class ContainerWithPipeline implements 
Comparator<ContainerWithPipeline>,
-    Comparable<ContainerWithPipeline> {
-
-  private final ContainerInfo containerInfo;
-  private final Pipeline pipeline;
-
-  public ContainerWithPipeline(ContainerInfo containerInfo, Pipeline pipeline) 
{
-    this.containerInfo = containerInfo;
-    this.pipeline = pipeline;
-  }
-
-  public ContainerInfo getContainerInfo() {
-    return containerInfo;
-  }
-
-  public Pipeline getPipeline() {
-    return pipeline;
-  }
-
-  public static ContainerWithPipeline fromProtobuf(
-      HddsProtos.ContainerWithPipeline allocatedContainer) {
-    return new ContainerWithPipeline(
-        ContainerInfo.fromProtobuf(allocatedContainer.getContainerInfo()),
-        Pipeline.getFromProtoBuf(allocatedContainer.getPipeline()));
-  }
-
-  public HddsProtos.ContainerWithPipeline getProtobuf() {
-    HddsProtos.ContainerWithPipeline.Builder builder =
-        HddsProtos.ContainerWithPipeline.newBuilder();
-    builder.setContainerInfo(getContainerInfo().getProtobuf())
-        .setPipeline(getPipeline().getProtobufMessage());
-
-    return builder.build();
-  }
-
-
-  @Override
-  public String toString() {
-    return containerInfo.toString() + " | " + pipeline.toString();
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) {
-      return true;
-    }
-
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    ContainerWithPipeline that = (ContainerWithPipeline) o;
-
-    return new EqualsBuilder()
-        .append(getContainerInfo(), that.getContainerInfo())
-        .append(getPipeline(), that.getPipeline())
-        .isEquals();
-  }
-
-  @Override
-  public int hashCode() {
-    return new HashCodeBuilder(11, 811)
-        .append(getContainerInfo())
-        .append(getPipeline())
-        .toHashCode();
-  }
-
-  /**
-   * Compares its two arguments for order.  Returns a negative integer, zero, 
or
-   * a positive integer as the first argument is less than, equal to, or 
greater
-   * than the second.<p>
-   *
-   * @param o1 the first object to be compared.
-   * @param o2 the second object to be compared.
-   * @return a negative integer, zero, or a positive integer as the first
-   * argument is less than, equal to, or greater than the second.
-   * @throws NullPointerException if an argument is null and this comparator
-   *                              does not permit null arguments
-   * @throws ClassCastException   if the arguments' types prevent them from
-   *                              being compared by this comparator.
-   */
-  @Override
-  public int compare(ContainerWithPipeline o1, ContainerWithPipeline o2) {
-    return o1.getContainerInfo().compareTo(o2.getContainerInfo());
-  }
-
-  /**
-   * Compares this object with the specified object for order.  Returns a
-   * negative integer, zero, or a positive integer as this object is less than,
-   * equal to, or greater than the specified object.
-   *
-   * @param o the object to be compared.
-   * @return a negative integer, zero, or a positive integer as this object is
-   * less than, equal to, or greater than the specified object.
-   * @throws NullPointerException if the specified object is null
-   * @throws ClassCastException   if the specified object's type prevents it
-   *                              from being compared to this object.
-   */
-  @Override
-  public int compareTo(ContainerWithPipeline o) {
-    return this.compare(this, o);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java
deleted file mode 100644
index 5f5aace..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership.  The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations 
under
- * the License.
- */
-
-package org.apache.hadoop.hdds.scm.container.common.helpers;
-
-import org.apache.hadoop.hdds.client.BlockID;
-
-import static org.apache.hadoop.hdds.protocol.proto
-    .ScmBlockLocationProtocolProtos.DeleteScmBlockResult;
-
-/**
- * Class wraps storage container manager block deletion results.
- */
-public class DeleteBlockResult {
-  private BlockID blockID;
-  private DeleteScmBlockResult.Result result;
-
-  public DeleteBlockResult(final BlockID blockID,
-      final DeleteScmBlockResult.Result result) {
-    this.blockID = blockID;
-    this.result = result;
-  }
-
-  /**
-   * Get block id deleted.
-   * @return block id.
-   */
-  public BlockID getBlockID() {
-    return blockID;
-  }
-
-  /**
-   * Get key deletion result.
-   * @return key deletion result.
-   */
-  public DeleteScmBlockResult.Result getResult() {
-    return result;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
deleted file mode 100644
index c36ca1f..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
+++ /dev/null
@@ -1,315 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.hadoop.hdds.scm.container.common.helpers;
-
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonFilter;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.PropertyAccessor;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectWriter;
-import com.fasterxml.jackson.databind.ser.FilterProvider;
-import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
-import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.hdds.protocol.DatanodeDetails;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.List;
-
-/**
- * A pipeline represents the group of machines over which a container lives.
- */
-public class Pipeline {
-  static final String PIPELINE_INFO = "PIPELINE_INFO_FILTER";
-  private static final ObjectWriter WRITER;
-
-  static {
-    ObjectMapper mapper = new ObjectMapper();
-    String[] ignorableFieldNames = {"leaderID", "datanodes"};
-    FilterProvider filters = new SimpleFilterProvider()
-        .addFilter(PIPELINE_INFO, SimpleBeanPropertyFilter
-            .serializeAllExcept(ignorableFieldNames));
-    mapper.setVisibility(PropertyAccessor.FIELD,
-        JsonAutoDetect.Visibility.ANY);
-    mapper.addMixIn(Object.class, MixIn.class);
-
-    WRITER = mapper.writer(filters);
-  }
-
-  @JsonIgnore
-  private String leaderID;
-  @JsonIgnore
-  private Map<String, DatanodeDetails> datanodes;
-  private HddsProtos.LifeCycleState lifeCycleState;
-  private HddsProtos.ReplicationType type;
-  private HddsProtos.ReplicationFactor factor;
-  private PipelineID id;
-
-  /**
-   * Constructs a new pipeline data structure.
-   *
-   * @param leaderID       -  Leader datanode id
-   * @param lifeCycleState  - Pipeline State
-   * @param replicationType - Replication protocol
-   * @param replicationFactor - replication count on datanodes
-   * @param id  - pipeline ID
-   */
-  public Pipeline(String leaderID, HddsProtos.LifeCycleState lifeCycleState,
-      HddsProtos.ReplicationType replicationType,
-      HddsProtos.ReplicationFactor replicationFactor, PipelineID id) {
-    this.leaderID = leaderID;
-    this.lifeCycleState = lifeCycleState;
-    this.type = replicationType;
-    this.factor = replicationFactor;
-    this.id = id;
-    datanodes = new ConcurrentHashMap<>();
-  }
-
-  @Override
-  public int hashCode() {
-    return id.hashCode();
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    Pipeline that = (Pipeline) o;
-
-    return id.equals(that.id)
-            && factor.equals(that.factor)
-            && type.equals(that.type)
-            && lifeCycleState.equals(that.lifeCycleState)
-            && leaderID.equals(that.leaderID);
-
-  }
-
-  /**
-   * Gets pipeline object from protobuf.
-   *
-   * @param pipelineProto - ProtoBuf definition for the pipeline.
-   * @return Pipeline Object
-   */
-  public static Pipeline getFromProtoBuf(
-      HddsProtos.Pipeline pipelineProto) {
-    Preconditions.checkNotNull(pipelineProto);
-    Pipeline pipeline =
-        new Pipeline(pipelineProto.getLeaderID(),
-            pipelineProto.getState(),
-            pipelineProto.getType(),
-            pipelineProto.getFactor(),
-            PipelineID.getFromProtobuf(pipelineProto.getId()));
-
-    for (HddsProtos.DatanodeDetailsProto dataID :
-        pipelineProto.getMembersList()) {
-      pipeline.addMember(DatanodeDetails.getFromProtoBuf(dataID));
-    }
-    return pipeline;
-  }
-
-  /**
-   * returns the replication count.
-   * @return Replication Factor
-   */
-  public HddsProtos.ReplicationFactor getFactor() {
-    return factor;
-  }
-
-  /**
-   * Returns the first machine in the set of datanodes.
-   *
-   * @return First Machine.
-   */
-  @JsonIgnore
-  public DatanodeDetails getLeader() {
-    return getDatanodes().get(leaderID);
-  }
-
-  /**
-   * Adds a datanode to pipeline
-   * @param datanodeDetails datanode to be added.
-   * @return true if the dn was not earlier present, false otherwise
-   */
-  public boolean addMember(DatanodeDetails datanodeDetails) {
-    return datanodes.put(datanodeDetails.getUuid().toString(),
-        datanodeDetails) == null;
-
-  }
-
-  public void resetPipeline() {
-    // reset datanodes in pipeline and learn about them through
-    // pipeline reports on SCM restart
-    datanodes.clear();
-  }
-
-  public Map<String, DatanodeDetails> getDatanodes() {
-    return datanodes;
-  }
-
-  public boolean isEmpty() {
-    return datanodes.isEmpty();
-  }
-  /**
-   * Returns the leader host.
-   *
-   * @return First Machine.
-   */
-  public String getLeaderHost() {
-    return getDatanodes()
-        .get(leaderID).getHostName();
-  }
-
-  /**
-   *
-   * @return lead
-   */
-  public String getLeaderID() {
-    return leaderID;
-  }
-  /**
-   * Returns all machines that make up this pipeline.
-   *
-   * @return List of Machines.
-   */
-  @JsonIgnore
-  public List<DatanodeDetails> getMachines() {
-    return new ArrayList<>(getDatanodes().values());
-  }
-
-  /**
-   * Returns all machines that make up this pipeline.
-   *
-   * @return List of Machines.
-   */
-  public List<String> getDatanodeHosts() {
-    List<String> dataHosts = new ArrayList<>();
-    for (DatanodeDetails datanode : getDatanodes().values()) {
-      dataHosts.add(datanode.getHostName());
-    }
-    return dataHosts;
-  }
-
-  /**
-   * Return a Protobuf Pipeline message from pipeline.
-   *
-   * @return Protobuf message
-   */
-  @JsonIgnore
-  public HddsProtos.Pipeline getProtobufMessage() {
-    HddsProtos.Pipeline.Builder builder =
-        HddsProtos.Pipeline.newBuilder();
-    for (DatanodeDetails datanode : datanodes.values()) {
-      builder.addMembers(datanode.getProtoBufMessage());
-    }
-    builder.setLeaderID(leaderID);
-
-    if (lifeCycleState != null) {
-      builder.setState(lifeCycleState);
-    }
-    if (type != null) {
-      builder.setType(type);
-    }
-
-    if (factor != null) {
-      builder.setFactor(factor);
-    }
-
-    if (id != null) {
-      builder.setId(id.getProtobuf());
-    }
-    return builder.build();
-  }
-
-  /**
-   * Gets the State of the pipeline.
-   *
-   * @return - LifeCycleStates.
-   */
-  public HddsProtos.LifeCycleState getLifeCycleState() {
-    return lifeCycleState;
-  }
-
-  /**
-   * Update the State of the pipeline.
-   */
-  public void setLifeCycleState(HddsProtos.LifeCycleState nextState) {
-    lifeCycleState = nextState;
-  }
-
-  /**
-   * Gets the pipeline id.
-   *
-   * @return - Id of the pipeline
-   */
-  public PipelineID getId() {
-    return id;
-  }
-
-  /**
-   * Returns the type.
-   *
-   * @return type - Standalone, Ratis, Chained.
-   */
-  public HddsProtos.ReplicationType getType() {
-    return type;
-  }
-
-  @Override
-  public String toString() {
-    final StringBuilder b = new StringBuilder(getClass().getSimpleName())
-        .append("[");
-    getDatanodes().keySet().forEach(
-        node -> b.append(node.endsWith(getLeaderID()) ? "*" + id : id));
-    b.append(" id:").append(id);
-    if (getType() != null) {
-      b.append(" type:").append(getType().toString());
-    }
-    if (getFactor() != null) {
-      b.append(" factor:").append(getFactor().toString());
-    }
-    if (getLifeCycleState() != null) {
-      b.append(" State:").append(getLifeCycleState().toString());
-    }
-    return b.toString();
-  }
-
-  /**
-   * Returns a JSON string of this object.
-   *
-   * @return String - json string
-   * @throws IOException
-   */
-  public String toJsonString() throws IOException {
-    return WRITER.writeValueAsString(this);
-  }
-
-  @JsonFilter(PIPELINE_INFO)
-  class MixIn {
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineID.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineID.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineID.java
deleted file mode 100644
index 6e27a71..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineID.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.hadoop.hdds.scm.container.common.helpers;
-
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-import org.apache.ratis.protocol.RaftGroupId;
-
-import java.util.UUID;
-
-/**
- * ID for the pipeline, the ID is based on UUID so that it can be used
- * in Ratis as RaftGroupId, GroupID is used by the datanodes to initialize
- * the ratis group they are part of.
- */
-public final class PipelineID implements Comparable<PipelineID> {
-
-  private UUID id;
-  private RaftGroupId groupId;
-
-  private PipelineID(UUID id) {
-    this.id = id;
-    this.groupId = RaftGroupId.valueOf(id);
-  }
-
-  public static PipelineID randomId() {
-    return new PipelineID(UUID.randomUUID());
-  }
-
-  public static PipelineID valueOf(UUID id) {
-    return new PipelineID(id);
-  }
-
-  public static PipelineID valueOf(RaftGroupId groupId) {
-    return valueOf(groupId.getUuid());
-  }
-
-  public RaftGroupId getRaftGroupID() {
-    return groupId;
-  }
-
-  public UUID getId() {
-    return id;
-  }
-
-  public HddsProtos.PipelineID getProtobuf() {
-    return HddsProtos.PipelineID.newBuilder().setId(id.toString()).build();
-  }
-
-  public static PipelineID getFromProtobuf(HddsProtos.PipelineID protos) {
-    return new PipelineID(UUID.fromString(protos.getId()));
-  }
-
-  @Override
-  public String toString() {
-    return "pipelineId=" + id;
-  }
-
-  @Override
-  public int compareTo(PipelineID o) {
-    return this.id.compareTo(o.id);
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    PipelineID that = (PipelineID) o;
-
-    return id.equals(that.id);
-  }
-
-  @Override
-  public int hashCode() {
-    return id.hashCode();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java
deleted file mode 100644
index f1405ff..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-package org.apache.hadoop.hdds.scm.container.common.helpers;
-
-import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
-
-import java.io.IOException;
-
-/**
- * Exceptions thrown from the Storage Container.
- */
-public class StorageContainerException extends IOException {
-  private ContainerProtos.Result result;
-
-  /**
-   * Constructs an {@code IOException} with {@code null}
-   * as its error detail message.
-   */
-  public StorageContainerException(ContainerProtos.Result result) {
-    this.result = result;
-  }
-
-  /**
-   * Constructs an {@code IOException} with the specified detail message.
-   *
-   * @param message The detail message (which is saved for later retrieval by
-   * the {@link #getMessage()} method)
-   * @param result - The result code
-   */
-  public StorageContainerException(String message,
-      ContainerProtos.Result result) {
-    super(message);
-    this.result = result;
-  }
-
-  /**
-   * Constructs an {@code IOException} with the specified detail message
-   * and cause.
-   * <p>
-   * <p> Note that the detail message associated with {@code cause} is
-   * <i>not</i> automatically incorporated into this exception's detail
-   * message.
-   *
-   * @param message The detail message (which is saved for later retrieval by
-   * the {@link #getMessage()} method)
-   *
-   * @param cause The cause (which is saved for later retrieval by the {@link
-   * #getCause()} method).  (A null value is permitted, and indicates that the
-   * cause is nonexistent or unknown.)
-   *
-   * @param result - The result code
-   * @since 1.6
-   */
-  public StorageContainerException(String message, Throwable cause,
-      ContainerProtos.Result result) {
-    super(message, cause);
-    this.result = result;
-  }
-
-  /**
-   * Constructs an {@code IOException} with the specified cause and a
-   * detail message of {@code (cause==null ? null : cause.toString())}
-   * (which typically contains the class and detail message of {@code cause}).
-   * This constructor is useful for IO exceptions that are little more
-   * than wrappers for other throwables.
-   *
-   * @param cause The cause (which is saved for later retrieval by the {@link
-   * #getCause()} method).  (A null value is permitted, and indicates that the
-   * cause is nonexistent or unknown.)
-   * @param result - The result code
-   * @since 1.6
-   */
-  public StorageContainerException(Throwable cause, ContainerProtos.Result
-      result) {
-    super(cause);
-    this.result = result;
-  }
-
-  /**
-   * Returns Result.
-   *
-   * @return Result.
-   */
-  public ContainerProtos.Result getResult() {
-    return result;
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java
deleted file mode 100644
index ffe0d3d..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hdds.scm.container.common.helpers;
-/**
- Contains protocol buffer helper classes and utilites used in
- impl.
- **/
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java
deleted file mode 100644
index d13dcb1..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java
+++ /dev/null
@@ -1,18 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hdds.scm.container;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
deleted file mode 100644
index 3c544db..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hdds.scm;
-
-/**
- * This package contains classes for the client of the storage container
- * protocol.
- */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java
deleted file mode 100644
index 14ee3d2..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hdds.scm.protocol;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-
-import java.util.Set;
-
-/**
- * Holds the nodes that currently host the container for an object key hash.
- */
[email protected]
-public final class LocatedContainer {
-  private final String key;
-  private final String matchedKeyPrefix;
-  private final String containerName;
-  private final Set<DatanodeInfo> locations;
-  private final DatanodeInfo leader;
-
-  /**
-   * Creates a LocatedContainer.
-   *
-   * @param key object key
-   * @param matchedKeyPrefix prefix of key that was used to find the location
-   * @param containerName container name
-   * @param locations nodes that currently host the container
-   * @param leader node that currently acts as pipeline leader
-   */
-  public LocatedContainer(String key, String matchedKeyPrefix,
-      String containerName, Set<DatanodeInfo> locations, DatanodeInfo leader) {
-    this.key = key;
-    this.matchedKeyPrefix = matchedKeyPrefix;
-    this.containerName = containerName;
-    this.locations = locations;
-    this.leader = leader;
-  }
-
-  /**
-   * Returns the container name.
-   *
-   * @return container name
-   */
-  public String getContainerName() {
-    return this.containerName;
-  }
-
-  /**
-   * Returns the object key.
-   *
-   * @return object key
-   */
-  public String getKey() {
-    return this.key;
-  }
-
-  /**
-   * Returns the node that currently acts as pipeline leader.
-   *
-   * @return node that currently acts as pipeline leader
-   */
-  public DatanodeInfo getLeader() {
-    return this.leader;
-  }
-
-  /**
-   * Returns the nodes that currently host the container.
-   *
-   * @return Set<DatanodeInfo> nodes that currently host the container
-   */
-  public Set<DatanodeInfo> getLocations() {
-    return this.locations;
-  }
-
-  /**
-   * Returns the prefix of the key that was used to find the location.
-   *
-   * @return prefix of the key that was used to find the location
-   */
-  public String getMatchedKeyPrefix() {
-    return this.matchedKeyPrefix;
-  }
-
-  @Override
-  public boolean equals(Object otherObj) {
-    if (otherObj == null) {
-      return false;
-    }
-    if (!(otherObj instanceof LocatedContainer)) {
-      return false;
-    }
-    LocatedContainer other = (LocatedContainer)otherObj;
-    return this.key == null ? other.key == null : this.key.equals(other.key);
-  }
-
-  @Override
-  public int hashCode() {
-    return key.hashCode();
-  }
-
-  @Override
-  public String toString() {
-    return getClass().getSimpleName()
-        + "{key=" + key
-        + "; matchedKeyPrefix=" + matchedKeyPrefix
-        + "; containerName=" + containerName
-        + "; locations=" + locations
-        + "; leader=" + leader
-        + "}";
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
deleted file mode 100644
index c8d4a80..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hdds.scm.protocol;
-
-import org.apache.hadoop.hdds.scm.ScmInfo;
-import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
-import org.apache.hadoop.ozone.common.BlockGroup;
-import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- * ScmBlockLocationProtocol is used by an HDFS node to find the set of nodes
- * to read/write a block.
- */
-public interface ScmBlockLocationProtocol {
-
-  /**
-   * Asks SCM where a block should be allocated. SCM responds with the
-   * set of datanodes that should be used creating this block.
-   * @param size - size of the block.
-   * @return allocated block accessing info (key, pipeline).
-   * @throws IOException
-   */
-  AllocatedBlock allocateBlock(long size, ReplicationType type,
-      ReplicationFactor factor, String owner) throws IOException;
-
-  /**
-   * Delete blocks for a set of object keys.
-   *
-   * @param keyBlocksInfoList Map of object key and its blocks.
-   * @return list of block deletion results.
-   * @throws IOException if there is any failure.
-   */
-  List<DeleteBlockGroupResult>
-      deleteKeyBlocks(List<BlockGroup> keyBlocksInfoList) throws IOException;
-
-  /**
-   * Gets the Clusterid and SCM Id from SCM.
-   */
-  ScmInfo getScmInfo() throws IOException;
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java
deleted file mode 100644
index 6cbdee4..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hdds.scm.protocol;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-
-import java.util.List;
-import java.util.stream.Collectors;
-
-/**
- * Holds the nodes that currently host the block for a block key.
- */
[email protected]
-public final class ScmLocatedBlock {
-  private final String key;
-  private final List<DatanodeInfo> locations;
-  private final DatanodeInfo leader;
-
-  /**
-   * Creates a ScmLocatedBlock.
-   *
-   * @param key object key
-   * @param locations nodes that currently host the block
-   * @param leader node that currently acts as pipeline leader
-   */
-  public ScmLocatedBlock(final String key, final List<DatanodeInfo> locations,
-      final DatanodeInfo leader) {
-    this.key = key;
-    this.locations = locations;
-    this.leader = leader;
-  }
-
-  /**
-   * Returns the object key.
-   *
-   * @return object key
-   */
-  public String getKey() {
-    return this.key;
-  }
-
-  /**
-   * Returns the node that currently acts as pipeline leader.
-   *
-   * @return node that currently acts as pipeline leader
-   */
-  public DatanodeInfo getLeader() {
-    return this.leader;
-  }
-
-  /**
-   * Returns the nodes that currently host the block.
-   *
-   * @return List<DatanodeInfo> nodes that currently host the block
-   */
-  public List<DatanodeInfo> getLocations() {
-    return this.locations;
-  }
-
-  @Override
-  public boolean equals(Object otherObj) {
-    if (otherObj == null) {
-      return false;
-    }
-    if (!(otherObj instanceof ScmLocatedBlock)) {
-      return false;
-    }
-    ScmLocatedBlock other = (ScmLocatedBlock)otherObj;
-    return this.key == null ? other.key == null : this.key.equals(other.key);
-  }
-
-  @Override
-  public int hashCode() {
-    return key.hashCode();
-  }
-
-  @Override
-  public String toString() {
-    return getClass().getSimpleName() + "{key=" + key + "; locations="
-        + locations.stream().map(loc -> loc.toString()).collect(Collectors
-            .joining(",")) + "; leader=" + leader + "}";
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
deleted file mode 100644
index e38077f..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership.  The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations 
under
- * the License.
- */
-
-package org.apache.hadoop.hdds.scm.protocol;
-
-import org.apache.hadoop.hdds.scm.ScmInfo;
-import 
org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline;
-import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
-import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-import org.apache.hadoop.hdds.protocol.proto
-    .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- * ContainerLocationProtocol is used by an HDFS node to find the set of nodes
- * that currently host a container.
- */
-public interface StorageContainerLocationProtocol {
-  /**
-   * Asks SCM where a container should be allocated. SCM responds with the
-   * set of datanodes that should be used creating this container.
-   *
-   */
-  ContainerWithPipeline allocateContainer(
-      HddsProtos.ReplicationType replicationType,
-      HddsProtos.ReplicationFactor factor, String owner)
-      throws IOException;
-
-  /**
-   * Ask SCM the location of the container. SCM responds with a group of
-   * nodes where this container and its replicas are located.
-   *
-   * @param containerID - ID of the container.
-   * @return ContainerInfo - the container info such as where the pipeline
-   *                         is located.
-   * @throws IOException
-   */
-  ContainerInfo getContainer(long containerID) throws IOException;
-
-  /**
-   * Ask SCM the location of the container. SCM responds with a group of
-   * nodes where this container and its replicas are located.
-   *
-   * @param containerID - ID of the container.
-   * @return ContainerWithPipeline - the container info with the pipeline.
-   * @throws IOException
-   */
-  ContainerWithPipeline getContainerWithPipeline(long containerID)
-      throws IOException;
-
-  /**
-   * Ask SCM a list of containers with a range of container names
-   * and the limit of count.
-   * Search container names between start name(exclusive), and
-   * use prefix name to filter the result. the max size of the
-   * searching range cannot exceed the value of count.
-   *
-   * @param startContainerID start container ID.
-   * @param count count, if count < 0, the max size is unlimited.(
-   *              Usually the count will be replace with a very big
-   *              value instead of being unlimited in case the db is very big)
-   *
-   * @return a list of container.
-   * @throws IOException
-   */
-  List<ContainerInfo> listContainer(long startContainerID, int count)
-      throws IOException;
-
-  /**
-   * Deletes a container in SCM.
-   *
-   * @param containerID
-   * @throws IOException
-   *   if failed to delete the container mapping from db store
-   *   or container doesn't exist.
-   */
-  void deleteContainer(long containerID) throws IOException;
-
-  /**
-   *  Queries a list of Node Statuses.
-   * @param state
-   * @return List of Datanodes.
-   */
-  List<HddsProtos.Node> queryNode(HddsProtos.NodeState state,
-      HddsProtos.QueryScope queryScope, String poolName) throws IOException;
-
-  /**
-   * Notify from client when begin or finish creating objects like pipeline
-   * or containers on datanodes.
-   * Container will be in Operational state after that.
-   * @param type object type
-   * @param id object id
-   * @param op operation type (e.g., create, close, delete)
-   * @param stage creation stage
-   */
-  void notifyObjectStageChange(
-      ObjectStageChangeRequestProto.Type type, long id,
-      ObjectStageChangeRequestProto.Op op,
-      ObjectStageChangeRequestProto.Stage stage) throws IOException;
-
-  /**
-   * Creates a replication pipeline of a specified type.
-   * @param type - replication type
-   * @param factor - factor 1 or 3
-   * @param nodePool - optional machine list to build a pipeline.
-   * @throws IOException
-   */
-  Pipeline createReplicationPipeline(HddsProtos.ReplicationType type,
-      HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool)
-      throws IOException;
-
-  /**
-   * Returns information about SCM.
-   *
-   * @return {@link ScmInfo}
-   * @throws IOException
-   */
-  ScmInfo getScmInfo() throws IOException;
-
-  /**
-   * Check if SCM is in chill mode.
-   *
-   * @return Returns true if SCM is in chill mode else returns false.
-   * @throws IOException
-   */
-  boolean inChillMode() throws IOException;
-
-  /**
-   * Force SCM out of Chill mode.
-   *
-   * @return returns true if operation is successful.
-   * @throws IOException
-   */
-  boolean forceExitChillMode() throws IOException;
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java
deleted file mode 100644
index b56a749..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hdds.scm.protocol;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c392da8/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java
deleted file mode 100644
index aed0fb7..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership.  The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations 
under
- * the License.
- */
-package org.apache.hadoop.hdds.scm.protocolPB;
-
-import com.google.common.base.Preconditions;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hdds.client.BlockID;
-import org.apache.hadoop.hdds.scm.ScmInfo;
-import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
-import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
-    .AllocateScmBlockRequestProto;
-import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
-    .AllocateScmBlockResponseProto;
-import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
-    .DeleteScmKeyBlocksRequestProto;
-import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
-    .DeleteScmKeyBlocksResponseProto;
-import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
-    .KeyBlocks;
-import org.apache.hadoop.ipc.ProtobufHelper;
-import org.apache.hadoop.ipc.ProtocolTranslator;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.ozone.common.BlockGroup;
-import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.stream.Collectors;
-
-/**
- * This class is the client-side translator to translate the requests made on
- * the {@link ScmBlockLocationProtocol} interface to the RPC server
- * implementing {@link ScmBlockLocationProtocolPB}.
- */
[email protected]
-public final class ScmBlockLocationProtocolClientSideTranslatorPB
-    implements ScmBlockLocationProtocol, ProtocolTranslator, Closeable {
-
-  /**
-   * RpcController is not used and hence is set to null.
-   */
-  private static final RpcController NULL_RPC_CONTROLLER = null;
-
-  private final ScmBlockLocationProtocolPB rpcProxy;
-
-  /**
-   * Creates a new StorageContainerLocationProtocolClientSideTranslatorPB.
-   *
-   * @param rpcProxy {@link StorageContainerLocationProtocolPB} RPC proxy
-   */
-  public ScmBlockLocationProtocolClientSideTranslatorPB(
-      ScmBlockLocationProtocolPB rpcProxy) {
-    this.rpcProxy = rpcProxy;
-  }
-
-  /**
-   * Asks SCM where a block should be allocated. SCM responds with the
-   * set of datanodes that should be used creating this block.
-   * @param size - size of the block.
-   * @return allocated block accessing info (key, pipeline).
-   * @throws IOException
-   */
-  @Override
-  public AllocatedBlock allocateBlock(long size,
-      HddsProtos.ReplicationType type, HddsProtos.ReplicationFactor factor,
-      String owner) throws IOException {
-    Preconditions.checkArgument(size > 0, "block size must be greater than 0");
-
-    AllocateScmBlockRequestProto request =
-        AllocateScmBlockRequestProto.newBuilder().setSize(size).setType(type)
-            .setFactor(factor).setOwner(owner).build();
-    final AllocateScmBlockResponseProto response;
-    try {
-      response = rpcProxy.allocateScmBlock(NULL_RPC_CONTROLLER, request);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
-    if (response.getErrorCode() !=
-        AllocateScmBlockResponseProto.Error.success) {
-      throw new IOException(response.hasErrorMessage() ?
-          response.getErrorMessage() : "Allocate block failed.");
-    }
-    AllocatedBlock.Builder builder = new AllocatedBlock.Builder()
-        .setBlockID(BlockID.getFromProtobuf(response.getBlockID()))
-        .setPipeline(Pipeline.getFromProtoBuf(response.getPipeline()))
-        .setShouldCreateContainer(response.getCreateContainer());
-    return builder.build();
-  }
-
-  /**
-   * Delete the set of keys specified.
-   *
-   * @param keyBlocksInfoList batch of block keys to delete.
-   * @return list of block deletion results.
-   * @throws IOException if there is any failure.
-   *
-   */
-  @Override
-  public List<DeleteBlockGroupResult> deleteKeyBlocks(
-      List<BlockGroup> keyBlocksInfoList) throws IOException {
-    List<KeyBlocks> keyBlocksProto = keyBlocksInfoList.stream()
-        .map(BlockGroup::getProto).collect(Collectors.toList());
-    DeleteScmKeyBlocksRequestProto request = DeleteScmKeyBlocksRequestProto
-        .newBuilder().addAllKeyBlocks(keyBlocksProto).build();
-
-    final DeleteScmKeyBlocksResponseProto resp;
-    try {
-      resp = rpcProxy.deleteScmKeyBlocks(NULL_RPC_CONTROLLER, request);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
-    List<DeleteBlockGroupResult> results =
-        new ArrayList<>(resp.getResultsCount());
-    results.addAll(resp.getResultsList().stream().map(
-        result -> new DeleteBlockGroupResult(result.getObjectKey(),
-            DeleteBlockGroupResult
-                .convertBlockResultProto(result.getBlockResultsList())))
-        .collect(Collectors.toList()));
-    return results;
-  }
-
-  /**
-   * Gets the cluster Id and Scm Id from SCM.
-   * @return ScmInfo
-   * @throws IOException
-   */
-  @Override
-  public ScmInfo getScmInfo() throws IOException {
-    HddsProtos.GetScmInfoRequestProto request =
-        HddsProtos.GetScmInfoRequestProto.getDefaultInstance();
-    HddsProtos.GetScmInfoRespsonseProto resp;
-    try {
-      resp = rpcProxy.getScmInfo(NULL_RPC_CONTROLLER, request);
-    } catch (ServiceException e) {
-      throw ProtobufHelper.getRemoteException(e);
-    }
-    ScmInfo.Builder builder = new ScmInfo.Builder()
-        .setClusterId(resp.getClusterId())
-        .setScmId(resp.getScmId());
-    return builder.build();
-  }
-
-  @Override
-  public Object getUnderlyingProxyObject() {
-    return rpcProxy;
-  }
-
-  @Override
-  public void close() {
-    RPC.stopProxy(rpcProxy);
-  }
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to