This is an automated email from the ASF dual-hosted git repository.
dineshc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 91a6477 HDDS-2687. Fix sonar issues in package
org.apache.hadoop.ozone.recon.api. (#325)
91a6477 is described below
commit 91a647777d21ca1e1b6abfed5f5445ecb9fda127
Author: avijayanhwx <[email protected]>
AuthorDate: Sun Dec 8 20:41:42 2019 -0800
HDDS-2687. Fix sonar issues in package org.apache.hadoop.ozone.recon.api.
(#325)
---
.../ozone/recon/api/ContainerKeyService.java | 112 ++++++++++++---------
.../hadoop/ozone/recon/api/TaskStatusService.java | 10 +-
.../hadoop/ozone/recon/api/UtilizationService.java | 5 -
3 files changed, 66 insertions(+), 61 deletions(-)
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/ContainerKeyService.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/ContainerKeyService.java
index cb4ff72..ee12350 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/ContainerKeyService.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/ContainerKeyService.java
@@ -127,61 +127,51 @@ public class ContainerKeyService {
// Directly calling get() on the Key table instead of iterating since
// only full keys are supported now. When we change to using a prefix
- // of the key, this needs to change to prefix seek (TODO).
+ // of the key, this needs to change to prefix seek.
OmKeyInfo omKeyInfo = omMetadataManager.getKeyTable().get(
containerKeyPrefix.getKeyPrefix());
- if (null == omKeyInfo) {
- continue;
- }
-
- // Filter keys by version.
- List<OmKeyLocationInfoGroup> matchedKeys = omKeyInfo
- .getKeyLocationVersions()
- .stream()
- .filter(k -> (k.getVersion() ==
containerKeyPrefix.getKeyVersion()))
- .collect(Collectors.toList());
-
- List<ContainerBlockMetadata> blockIds = new ArrayList<>();
- for (OmKeyLocationInfoGroup omKeyLocationInfoGroup : matchedKeys) {
- List<OmKeyLocationInfo> omKeyLocationInfos = omKeyLocationInfoGroup
- .getLocationList()
+ if (null != omKeyInfo) {
+ // Filter keys by version.
+ List<OmKeyLocationInfoGroup> matchedKeys = omKeyInfo
+ .getKeyLocationVersions()
.stream()
- .filter(c -> c.getContainerID() == containerID)
+ .filter(k -> (k.getVersion() ==
+ containerKeyPrefix.getKeyVersion()))
.collect(Collectors.toList());
- for (OmKeyLocationInfo omKeyLocationInfo : omKeyLocationInfos) {
- blockIds.add(new ContainerBlockMetadata(omKeyLocationInfo
- .getContainerID(), omKeyLocationInfo.getLocalID()));
- }
- }
- String ozoneKey = omMetadataManager.getOzoneKey(
- omKeyInfo.getVolumeName(),
- omKeyInfo.getBucketName(),
- omKeyInfo.getKeyName());
- if (keyMetadataMap.containsKey(ozoneKey)) {
- keyMetadataMap.get(ozoneKey).getVersions()
- .add(containerKeyPrefix.getKeyVersion());
-
- keyMetadataMap.get(ozoneKey).getBlockIds()
- .put(containerKeyPrefix.getKeyVersion(), blockIds);
- } else {
- // break the for loop if limit has been reached
- if (keyMetadataMap.size() == limit) {
- break;
+ List<ContainerBlockMetadata> blockIds =
+ getBlocks(matchedKeys, containerID);
+
+ String ozoneKey = omMetadataManager.getOzoneKey(
+ omKeyInfo.getVolumeName(),
+ omKeyInfo.getBucketName(),
+ omKeyInfo.getKeyName());
+ if (keyMetadataMap.containsKey(ozoneKey)) {
+ keyMetadataMap.get(ozoneKey).getVersions()
+ .add(containerKeyPrefix.getKeyVersion());
+
+ keyMetadataMap.get(ozoneKey).getBlockIds()
+ .put(containerKeyPrefix.getKeyVersion(), blockIds);
+ } else {
+ // break the for loop if limit has been reached
+ if (keyMetadataMap.size() == limit) {
+ break;
+ }
+ KeyMetadata keyMetadata = new KeyMetadata();
+ keyMetadata.setBucket(omKeyInfo.getBucketName());
+ keyMetadata.setVolume(omKeyInfo.getVolumeName());
+ keyMetadata.setKey(omKeyInfo.getKeyName());
+ keyMetadata.setCreationTime(
+ Instant.ofEpochMilli(omKeyInfo.getCreationTime()));
+ keyMetadata.setModificationTime(
+ Instant.ofEpochMilli(omKeyInfo.getModificationTime()));
+ keyMetadata.setDataSize(omKeyInfo.getDataSize());
+ keyMetadata.getVersions().add(containerKeyPrefix.getKeyVersion());
+ keyMetadataMap.put(ozoneKey, keyMetadata);
+ keyMetadata.getBlockIds().put(containerKeyPrefix.getKeyVersion(),
+ blockIds);
}
- KeyMetadata keyMetadata = new KeyMetadata();
- keyMetadata.setBucket(omKeyInfo.getBucketName());
- keyMetadata.setVolume(omKeyInfo.getVolumeName());
- keyMetadata.setKey(omKeyInfo.getKeyName());
- keyMetadata.setCreationTime(
- Instant.ofEpochMilli(omKeyInfo.getCreationTime()));
- keyMetadata.setModificationTime(
- Instant.ofEpochMilli(omKeyInfo.getModificationTime()));
- keyMetadata.setDataSize(omKeyInfo.getDataSize());
- keyMetadata.getVersions().add(containerKeyPrefix.getKeyVersion());
- keyMetadataMap.put(ozoneKey, keyMetadata);
- keyMetadata.getBlockIds().put(containerKeyPrefix.getKeyVersion(),
- blockIds);
+
}
}
@@ -195,4 +185,28 @@ public class ContainerKeyService {
new KeysResponse(totalCount, keyMetadataMap.values());
return Response.ok(keysResponse).build();
}
+
+ /**
+ * Helper function to extract the blocks for a given container from a given
+ * OM Key.
+ * @param matchedKeys List of OM Key Info locations
+ * @param containerID containerId.
+ * @return List of blocks.
+ */
+ private List<ContainerBlockMetadata> getBlocks(
+ List<OmKeyLocationInfoGroup> matchedKeys, long containerID) {
+ List<ContainerBlockMetadata> blockIds = new ArrayList<>();
+ for (OmKeyLocationInfoGroup omKeyLocationInfoGroup : matchedKeys) {
+ List<OmKeyLocationInfo> omKeyLocationInfos = omKeyLocationInfoGroup
+ .getLocationList()
+ .stream()
+ .filter(c -> c.getContainerID() == containerID)
+ .collect(Collectors.toList());
+ for (OmKeyLocationInfo omKeyLocationInfo : omKeyLocationInfos) {
+ blockIds.add(new ContainerBlockMetadata(omKeyLocationInfo
+ .getContainerID(), omKeyLocationInfo.getLocalID()));
+ }
+ }
+ return blockIds;
+ }
}
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/TaskStatusService.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/TaskStatusService.java
index 5b8ec1b..3898d70 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/TaskStatusService.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/TaskStatusService.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.ozone.recon.api;
import org.hadoop.ozone.recon.schema.tables.daos.ReconTaskStatusDao;
import org.hadoop.ozone.recon.schema.tables.pojos.ReconTaskStatus;
import org.jooq.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.ws.rs.GET;
@@ -38,13 +36,10 @@ import java.util.List;
@Path("/task")
@Produces(MediaType.APPLICATION_JSON)
public class TaskStatusService {
- private static final Logger LOG =
- LoggerFactory.getLogger(TaskStatusService.class);
-
- private ReconTaskStatusDao reconTaskStatusDao;
@Inject
private Configuration sqlConfiguration;
+
/**
* Return the list of Recon Tasks and the last successful timestamp and
* sequence number.
@@ -53,7 +48,8 @@ public class TaskStatusService {
@GET
@Path("status")
public Response getTaskTimes() {
- reconTaskStatusDao = new ReconTaskStatusDao(sqlConfiguration);
+ ReconTaskStatusDao reconTaskStatusDao =
+ new ReconTaskStatusDao(sqlConfiguration);
List<ReconTaskStatus> resultSet = reconTaskStatusDao.findAll();
return Response.ok(resultSet).build();
}
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/UtilizationService.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/UtilizationService.java
index 0bc33f3..58fa07b 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/UtilizationService.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/UtilizationService.java
@@ -22,8 +22,6 @@ import javax.inject.Inject;
import org.hadoop.ozone.recon.schema.tables.daos.FileCountBySizeDao;
import org.hadoop.ozone.recon.schema.tables.pojos.FileCountBySize;
import org.jooq.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
@@ -38,9 +36,6 @@ import java.util.List;
@Path("/utilization")
@Produces(MediaType.APPLICATION_JSON)
public class UtilizationService {
- private static final Logger LOG =
- LoggerFactory.getLogger(UtilizationService.class);
-
private FileCountBySizeDao fileCountBySizeDao;
@Inject
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]