[ 
https://issues.apache.org/jira/browse/SDAP-18?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16388939#comment-16388939
 ] 

ASF GitHub Bot commented on SDAP-18:
------------------------------------

lewismc closed pull request #5: SDAP-18 Review code contribition from Sigee
URL: https://github.com/apache/incubator-sdap-mudrod/pull/5
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/RecommendEngine.java
 
b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/RecommendEngine.java
index 2c829ff..d54a556 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/RecommendEngine.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/RecommendEngine.java
@@ -8,7 +8,7 @@
 import org.apache.sdap.mudrod.recommendation.pre.SessionCooccurence;
 import org.apache.sdap.mudrod.recommendation.process.AbstractBasedSimilarity;
 import org.apache.sdap.mudrod.recommendation.process.VariableBasedSimilarity;
-import org.apache.sdap.mudrod.recommendation.process.sessionBasedCF;
+import org.apache.sdap.mudrod.recommendation.process.SessionBasedCF;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -60,7 +60,7 @@ public void process() {
     DiscoveryStepAbstract cbCF = new VariableBasedSimilarity(this.props, 
this.es, this.spark);
     cbCF.execute();
 
-    DiscoveryStepAbstract sbCF = new sessionBasedCF(this.props, this.es, 
this.spark);
+    DiscoveryStepAbstract sbCF = new SessionBasedCF(this.props, this.es, 
this.spark);
     sbCF.execute();
 
     endTime = System.currentTimeMillis();
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/WeblogDiscoveryEngine.java
 
b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/WeblogDiscoveryEngine.java
index b672a54..b2cdb9f 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/WeblogDiscoveryEngine.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/WeblogDiscoveryEngine.java
@@ -99,11 +99,11 @@ public void preprocess() {
 
     ArrayList<String> inputList = (ArrayList<String>) 
getFileList(props.getProperty(MudrodConstants.DATA_DIR));
 
-    for (int i = 0; i < inputList.size(); i++) {
-      timeSuffix = inputList.get(i);
+    for (String anInputList : inputList) {
+      timeSuffix = anInputList;
       props.put(MudrodConstants.TIME_SUFFIX, timeSuffix);
       startTime = System.currentTimeMillis();
-      LOG.info("Processing logs dated {}", inputList.get(i));
+      LOG.info("Processing logs dated {}", anInputList);
 
       DiscoveryStepAbstract im = new ImportLogFile(this.props, this.es, 
this.spark);
       im.execute();
@@ -122,7 +122,7 @@ public void preprocess() {
 
       endTime = System.currentTimeMillis();
 
-      LOG.info("Web log preprocessing for logs dated {} complete. Time elapsed 
{} seconds.", inputList.get(i), (endTime - startTime) / 1000);
+      LOG.info("Web log preprocessing for logs dated {} complete. Time elapsed 
{} seconds.", anInputList, (endTime - startTime) / 1000);
     }
 
     DiscoveryStepAbstract hg = new HistoryGenerator(this.props, this.es, 
this.spark);
@@ -140,8 +140,8 @@ public void preprocess() {
   public void logIngest() {
     LOG.info("Starting Web log ingest.");
     ArrayList<String> inputList = (ArrayList<String>) 
getFileList(props.getProperty(MudrodConstants.DATA_DIR));
-    for (int i = 0; i < inputList.size(); i++) {
-      timeSuffix = inputList.get(i);
+    for (String anInputList : inputList) {
+      timeSuffix = anInputList;
       props.put("TimeSuffix", timeSuffix);
       DiscoveryStepAbstract im = new ImportLogFile(this.props, this.es, 
this.spark);
       im.execute();
@@ -157,8 +157,8 @@ public void logIngest() {
   public void sessionRestruct() {
     LOG.info("Starting Session reconstruction.");
     ArrayList<String> inputList = (ArrayList<String>) 
getFileList(props.getProperty(MudrodConstants.DATA_DIR));
-    for (int i = 0; i < inputList.size(); i++) {
-      timeSuffix = inputList.get(i); // change timeSuffix dynamically
+    for (String anInputList : inputList) {
+      timeSuffix = anInputList; // change timeSuffix dynamically
       props.put(MudrodConstants.TIME_SUFFIX, timeSuffix);
       DiscoveryStepAbstract cd = new CrawlerDetection(this.props, this.es, 
this.spark);
       cd.execute();
diff --git a/core/src/main/java/org/apache/sdap/mudrod/driver/ESDriver.java 
b/core/src/main/java/org/apache/sdap/mudrod/driver/ESDriver.java
index 7fe0cb4..54b9128 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/driver/ESDriver.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/driver/ESDriver.java
@@ -168,10 +168,9 @@ public String customAnalyzing(String indexName, String 
analyzer, String str) thr
     if (list == null) {
       return list;
     }
-    int size = list.size();
     List<String> customlist = new ArrayList<>();
-    for (int i = 0; i < size; i++) {
-      customlist.add(this.customAnalyzing(indexName, list.get(i)));
+    for (String aList : list) {
+      customlist.add(this.customAnalyzing(indexName, aList));
     }
 
     return customlist;
@@ -224,9 +223,7 @@ public void deleteType(String index, String type) {
     String[] indices = client.admin().indices().getIndex(new 
GetIndexRequest()).actionGet().getIndices();
 
     ArrayList<String> indexList = new ArrayList<>();
-    int length = indices.length;
-    for (int i = 0; i < length; i++) {
-      String indexName = indices[i];
+    for (String indexName : indices) {
       if (indexName.startsWith(object.toString())) {
         indexList.add(indexName);
       }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/driver/SparkDriver.java 
b/core/src/main/java/org/apache/sdap/mudrod/driver/SparkDriver.java
index c6b0aef..14e7f1a 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/driver/SparkDriver.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/driver/SparkDriver.java
@@ -19,43 +19,11 @@
 import org.apache.spark.serializer.KryoSerializer;
 import org.apache.spark.sql.SQLContext;
 
-import java.io.File;
 import java.io.Serializable;
-import java.net.URISyntaxException;
 import java.util.Properties;
-//import org.apache.spark.sql.SparkSession;
 
 public class SparkDriver implements Serializable {
 
-  //TODO the commented out code below is the API uprgade
-  //for Spark 2.0.0. It requires a large upgrade and simplification
-  //across the mudrod codebase so should be done in an individual ticket.
-  //  /**
-  //   *
-  //   */
-  //  private static final long serialVersionUID = 1L;
-  //  private SparkSession builder;
-  //
-  //  public SparkDriver() {
-  //    builder = SparkSession.builder()
-  //        .master("local[2]")
-  //        .config("spark.hadoop.validateOutputSpecs", "false")
-  //        .config("spark.files.overwrite", "true")
-  //        .getOrCreate();
-  //  }
-  //
-  //  public SparkSession getBuilder() {
-  //    return builder;
-  //  }
-  //
-  //  public void setBuilder(SparkSession builder) {
-  //    this.builder = builder;
-  //  }
-  //
-  //  public void close() {
-  //    builder.stop();
-  //  }
-
   /**
    *
    */
diff --git a/core/src/main/java/org/apache/sdap/mudrod/main/MudrodEngine.java 
b/core/src/main/java/org/apache/sdap/mudrod/main/MudrodEngine.java
index 341d5fc..359ae71 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/main/MudrodEngine.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/main/MudrodEngine.java
@@ -167,8 +167,7 @@ public Properties loadConfig() {
       Element rootNode = document.getRootElement();
       List<Element> paraList = rootNode.getChildren("para");
 
-      for (int i = 0; i < paraList.size(); i++) {
-        Element paraNode = paraList.get(i);
+      for (Element paraNode : paraList) {
         String attributeName = paraNode.getAttributeValue("name");
         if (MudrodConstants.SVM_SGD_MODEL.equals(attributeName)) {
           props.put(attributeName, 
decompressSVMWithSGDModel(paraNode.getTextTrim()));
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/ApiHarvester.java 
b/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/ApiHarvester.java
index 7b8b5c1..32b4609 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/ApiHarvester.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/ApiHarvester.java
@@ -148,7 +148,7 @@ private void harvestMetadatafromWeb() {
         int docId = startIndex + i;
         File itemfile = new 
File(props.getProperty(MudrodConstants.RAW_METADATA_PATH) + "/" + docId + 
".json");
 
-        try (FileWriter fw = new FileWriter(itemfile.getAbsoluteFile()); 
BufferedWriter bw = new BufferedWriter(fw);) {
+        try (FileWriter fw = new FileWriter(itemfile.getAbsoluteFile()); 
BufferedWriter bw = new BufferedWriter(fw)) {
           itemfile.createNewFile();
           bw.write(item.toString());
         } catch (IOException e) {
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/PODAACMetadata.java
 
b/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/PODAACMetadata.java
index de3edf7..4c3070b 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/PODAACMetadata.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/PODAACMetadata.java
@@ -319,15 +319,15 @@ private void splitString(String oristr, List<String> 
list) {
 
     String strs[] = oristr.trim().split(",");
     if (strs != null) {
-      for (int i = 0; i < strs.length; i++) {
-        String str = strs[i].trim();
+      for (String str1 : strs) {
+        String str = str1.trim();
         if (str.startsWith(",") || str.startsWith("\"")) {
           str = str.substring(1);
         }
         if (str.endsWith(",") || str.endsWith("\"")) {
           str = str.substring(0, str.length() - 1);
         }
-        if (str == "") {
+        if ("".equals(str)) {
           continue;
         }
         list.add(str);
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/AggregateTriples.java 
b/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/AggregateTriples.java
index e94d678..e988d15 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/AggregateTriples.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/AggregateTriples.java
@@ -167,8 +167,8 @@ public Element findChild(String str, Element ele) {
   public void getAllClass() throws IOException {
     List<?> classElements = rootNode.getChildren("Class", 
Namespace.getNamespace("owl", owl_namespace));
 
-    for (int i = 0; i < classElements.size(); i++) {
-      Element classElement = (Element) classElements.get(i);
+    for (Object classElement1 : classElements) {
+      Element classElement = (Element) classElement1;
       String className = classElement.getAttributeValue("about", 
Namespace.getNamespace("rdf", rdf_namespace));
 
       if (className == null) {
@@ -176,8 +176,8 @@ public void getAllClass() throws IOException {
       }
 
       List<?> subclassElements = classElement.getChildren("subClassOf", 
Namespace.getNamespace("rdfs", rdfs_namespace));
-      for (int j = 0; j < subclassElements.size(); j++) {
-        Element subclassElement = (Element) subclassElements.get(j);
+      for (Object subclassElement1 : subclassElements) {
+        Element subclassElement = (Element) subclassElement1;
         String subclassName = subclassElement.getAttributeValue("resource", 
Namespace.getNamespace("rdf", rdf_namespace));
         if (subclassName == null) {
           Element allValuesFromEle = findChild("allValuesFrom", 
subclassElement);
@@ -192,8 +192,8 @@ public void getAllClass() throws IOException {
       }
 
       List equalClassElements = classElement.getChildren("equivalentClass", 
Namespace.getNamespace("owl", owl_namespace));
-      for (int k = 0; k < equalClassElements.size(); k++) {
-        Element equalClassElement = (Element) equalClassElements.get(k);
+      for (Object equalClassElement1 : equalClassElements) {
+        Element equalClassElement = (Element) equalClassElement1;
         String equalClassElementName = 
equalClassElement.getAttributeValue("resource", Namespace.getNamespace("rdf", 
rdf_namespace));
 
         if (equalClassElementName != null) {
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/LocalOntology.java 
b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/LocalOntology.java
index 0380c07..f106594 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/LocalOntology.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/LocalOntology.java
@@ -116,12 +116,10 @@ public void load() {
    */
   @Override
   public void load(String[] urls) {
-    for (int i = 0; i < urls.length; i++) {
-      String url = urls[i].trim();
-      if (!"".equals(url))
-        if (LOG.isInfoEnabled()) {
-          LOG.info("Reading and processing {}", url);
-        }
+    for (String url1 : urls) {
+      String url = url1.trim();
+      if (!"".equals(url) && LOG.isInfoEnabled())
+        LOG.info("Reading and processing {}", url);
       load(ontologyModel, url);
     }
     parser.parse(ontology, ontologyModel);
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OwlParser.java 
b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OwlParser.java
index 2332c81..3f12cd6 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OwlParser.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OwlParser.java
@@ -24,7 +24,6 @@
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/NormalizeVariables.java
 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/NormalizeVariables.java
index fa734c9..28ffd5d 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/NormalizeVariables.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/NormalizeVariables.java
@@ -98,7 +98,7 @@ private Double getVersionNum(String version) {
     if (version == null) {
       return 0.0;
     }
-    Double versionNum = 0.0;
+    Double versionNum;
     Pattern p = Pattern.compile(".*[a-zA-Z].*");
     if ("Operational/Near-Real-Time".equals(version)) {
       versionNum = 2.0;
@@ -197,7 +197,7 @@ public Double getProLevelNum(String pro) {
     if (pro == null) {
       return 1.0;
     }
-    Double proNum = 0.0;
+    Double proNum;
     Pattern p = Pattern.compile(".*[a-zA-Z].*");
     if (pro.matches("[0-9]{1}[a-zA-Z]{1}")) {
       proNum = Double.parseDouble(pro.substring(0, 1));
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/SessionCooccurence.java
 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/SessionCooccurence.java
index 63cb33e..e2b1f38 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/SessionCooccurence.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/SessionCooccurence.java
@@ -109,9 +109,7 @@ public Object execute(Object o) {
       public Tuple2<String, List<String>> call(Tuple2<String, List<String>> 
arg0) throws Exception {
         List<String> oriDatasets = arg0._2;
         List<String> newDatasets = new ArrayList<>();
-        int size = oriDatasets.size();
-        for (int i = 0; i < size; i++) {
-          String name = oriDatasets.get(i);
+        for (String name : oriDatasets) {
           if (nameMap.containsKey(name)) {
             newDatasets.add(nameMap.get(name));
           }
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/sessionBasedCF.java
 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/SessionBasedCF.java
similarity index 91%
rename from 
core/src/main/java/org/apache/sdap/mudrod/recommendation/process/sessionBasedCF.java
rename to 
core/src/main/java/org/apache/sdap/mudrod/recommendation/process/SessionBasedCF.java
index d953d3d..5ea461b 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/sessionBasedCF.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/SessionBasedCF.java
@@ -29,12 +29,12 @@
 /**
  * ClassName: Recommend metedata based on session level co-occurrence
  */
-public class sessionBasedCF extends DiscoveryStepAbstract {
+public class SessionBasedCF extends DiscoveryStepAbstract {
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(sessionBasedCF.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(SessionBasedCF.class);
 
   /**
-   * Creates a new instance of sessionBasedCF.
+   * Creates a new instance of SessionBasedCF.
    *
    * @param props
    *          the Mudrod configuration
@@ -43,7 +43,7 @@
    * @param spark
    *          the spark drive
    */
-  public sessionBasedCF(Properties props, ESDriver es, SparkDriver spark) {
+  public SessionBasedCF(Properties props, ESDriver es, SparkDriver spark) {
     super(props, es, spark);
   }
 
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/VariableBasedSimilarity.java
 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/VariableBasedSimilarity.java
index c741ea1..9bf0884 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/VariableBasedSimilarity.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/VariableBasedSimilarity.java
@@ -15,8 +15,11 @@
 
 import java.io.IOException;
 import java.io.Serializable;
-import java.text.DecimalFormat;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
 
 import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
 
@@ -29,13 +32,10 @@
 
   private static final Logger LOG = 
LoggerFactory.getLogger(VariableBasedSimilarity.class);
 
-  private DecimalFormat df = new DecimalFormat("#.000");
   // a map from variable to its type
   public Map<String, Integer> variableTypes;
   public Map<String, Integer> variableWeights;
 
-  private static final Integer VAR_SPATIAL = 1;
-  private static final Integer VAR_TEMPORAL = 2;
   private static final Integer VAR_CATEGORICAL = 3;
   private static final Integer VAR_ORDINAL = 4;
 
@@ -155,14 +155,10 @@ public void VariableBasedSimilarity(ESDriver es) {
       }
     }
 
-    int size = metadatas.size();
-    for (int i = 0; i < size; i++) {
-      Map<String, Object> metadataA = metadatas.get(i);
+    for (Map<String, Object> metadataA : metadatas) {
       String shortNameA = (String) metadataA.get("Dataset-ShortName");
 
-      for (int j = 0; j < size; j++) {
-        metadataA = metadatas.get(i);
-        Map<String, Object> metadataB = metadatas.get(j);
+      for (Map<String, Object> metadataB : metadatas) {
         String shortNameB = (String) metadataB.get("Dataset-ShortName");
 
         try {
@@ -230,10 +226,10 @@ public void spatialSimilarity(Map<String, Object> 
metadataA, Map<String, Object>
 
   public void temporalSimilarity(Map<String, Object> metadataA, Map<String, 
Object> metadataB, XContentBuilder contentBuilder) throws IOException {
 
-    double similarity = 0.0;
+    double similarity;
     double startTimeA = Double.parseDouble((String) 
metadataA.get("Dataset-DatasetCoverage-StartTimeLong"));
     String endTimeAStr = (String) 
metadataA.get("Dataset-DatasetCoverage-StopTimeLong");
-    double endTimeA = 0.0;
+    double endTimeA;
     if ("".equals(endTimeAStr)) {
       endTimeA = System.currentTimeMillis();
     } else {
@@ -243,7 +239,7 @@ public void temporalSimilarity(Map<String, Object> 
metadataA, Map<String, Object
 
     double startTimeB = Double.parseDouble((String) 
metadataB.get("Dataset-DatasetCoverage-StartTimeLong"));
     String endTimeBStr = (String) 
metadataB.get("Dataset-DatasetCoverage-StopTimeLong");
-    double endTimeB = 0.0;
+    double endTimeB;
     if ("".equals(endTimeBStr)) {
       endTimeB = System.currentTimeMillis();
     } else {
@@ -251,7 +247,7 @@ public void temporalSimilarity(Map<String, Object> 
metadataA, Map<String, Object
     }
     double timespanB = endTimeB - startTimeB;
 
-    double intersect = 0.0;
+    double intersect;
     if (startTimeB >= endTimeA || endTimeB <= startTimeA) {
       intersect = 0.0;
     } else if (startTimeB >= startTimeA && endTimeB <= endTimeA) {
@@ -283,7 +279,6 @@ public void categoricalVariablesSimilarity(Map<String, 
Object> metadataA, Map<St
         if (aList != null && bList != null) {
 
           int lengthA = aList.size();
-          int lengthB = bList.size();
           List<String> newAList = new ArrayList<>(aList);
           List<String> newBList = new ArrayList<>(bList);
           newAList.retainAll(newBList);
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/HybridRecommendation.java
 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/HybridRecommendation.java
index 2e314de..f38f8ed 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/HybridRecommendation.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/HybridRecommendation.java
@@ -183,9 +183,8 @@ protected JsonElement mapToJson(Map<String, Double> 
wordweights, int num) {
     Map<String, Double> sortedMap = new HashMap<>();
     try {
       List<LinkedTerm> links = getRelatedDataFromES(type, input, num);
-      int size = links.size();
-      for (int i = 0; i < size; i++) {
-        termsMap.put(links.get(i).term, links.get(i).weight);
+      for (LinkedTerm link : links) {
+        termsMap.put(link.term, link.weight);
       }
 
       sortedMap = sortMapByValue(termsMap); // terms_map will be empty
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataOpt.java
 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataOpt.java
index a7cc5e3..cda8d6f 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataOpt.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataOpt.java
@@ -112,9 +112,7 @@ public MetadataOpt(Properties props) {
         String shortName = (String) result.get("Dataset-ShortName");
 
         String filedStr = "";
-        int size = variables.size();
-        for (int i = 0; i < size; i++) {
-          String filed = variables.get(i);
+        for (String filed : variables) {
           Object filedValue = result.get(filed);
 
           if (filedValue != null) {
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/RecomData.java
 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/RecomData.java
index bea0b40..7bdbd0d 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/RecomData.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/RecomData.java
@@ -121,9 +121,8 @@ protected JsonElement mapToJson(Map<String, Double> 
wordweights, int num) {
     Map<String, Double> sortedMap = new HashMap<>();
     try {
       List<LinkedTerm> links = getRelatedDataFromES(type, input, num);
-      int size = links.size();
-      for (int i = 0; i < size; i++) {
-        termsMap.put(links.get(i).term, links.get(i).weight);
+      for (LinkedTerm link : links) {
+        termsMap.put(link.term, link.weight);
       }
 
       sortedMap = sortMapByValue(termsMap); // terms_map will be empty
@@ -136,7 +135,7 @@ protected JsonElement mapToJson(Map<String, Double> 
wordweights, int num) {
 
   public List<LinkedTerm> getRelatedDataFromES(String type, String input, int 
num) {
     SearchRequestBuilder builder = 
es.getClient().prepareSearch(props.getProperty(INDEX_NAME)).setTypes(type).setQuery(QueryBuilders.termQuery("concept_A",
 input)).addSort(WEIGHT, SortOrder.DESC)
-        .setSize(num);
+            .setSize(num);
 
     SearchResponse usrhis = builder.execute().actionGet();
 
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/Searcher.java 
b/core/src/main/java/org/apache/sdap/mudrod/ssearch/Searcher.java
index 0438267..8791bf4 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/Searcher.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/Searcher.java
@@ -27,7 +27,6 @@
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.index.query.QueryBuilders;
 import org.elasticsearch.search.SearchHit;
-import org.elasticsearch.search.sort.SortBuilder;
 import org.elasticsearch.search.sort.SortOrder;
 
 import java.io.Serializable;
@@ -187,7 +186,7 @@ public Double exists(ArrayList<String> strList, String 
query) {
       }
 
       ArrayList<String> longdate = (ArrayList<String>) 
result.get("DatasetCitation-ReleaseDateLong");
-      Date date = new Date(Long.valueOf(longdate.get(0)).longValue());
+      Date date = new Date(Long.valueOf(longdate.get(0)));
       SimpleDateFormat df2 = new SimpleDateFormat("MM/dd/yyyy");
       String dateText = df2.format(date);
 
@@ -261,19 +260,19 @@ public String ssearch(String index, String type, String 
query, String queryOpera
     Gson gson = new Gson();
     List<JsonObject> fileList = new ArrayList<>();
 
-    for (int i = 0; i < li.size(); i++) {
+    for (SResult aLi : li) {
       JsonObject file = new JsonObject();
-      file.addProperty("Short Name", (String) SResult.get(li.get(i), 
"shortName"));
-      file.addProperty("Long Name", (String) SResult.get(li.get(i), 
"longName"));
-      file.addProperty("Topic", (String) SResult.get(li.get(i), "topic"));
-      file.addProperty("Description", (String) SResult.get(li.get(i), 
"description"));
-      file.addProperty("Release Date", (String) SResult.get(li.get(i), 
"relase_date"));
+      file.addProperty("Short Name", (String) SResult.get(aLi, "shortName"));
+      file.addProperty("Long Name", (String) SResult.get(aLi, "longName"));
+      file.addProperty("Topic", (String) SResult.get(aLi, "topic"));
+      file.addProperty("Description", (String) SResult.get(aLi, 
"description"));
+      file.addProperty("Release Date", (String) SResult.get(aLi, 
"relase_date"));
       fileList.add(file);
 
-      file.addProperty("Start/End Date", (String) SResult.get(li.get(i), 
"startDate") + " - " + (String) SResult.get(li.get(i), "endDate"));
-      file.addProperty("Processing Level", (String) SResult.get(li.get(i), 
"processingLevel"));
+      file.addProperty("Start/End Date", (String) SResult.get(aLi, 
"startDate") + " - " + (String) SResult.get(aLi, "endDate"));
+      file.addProperty("Processing Level", (String) SResult.get(aLi, 
"processingLevel"));
 
-      file.addProperty("Sensor", (String) SResult.get(li.get(i), "sensors"));
+      file.addProperty("Sensor", (String) SResult.get(aLi, "sensors"));
     }
     JsonElement fileListElement = gson.toJsonTree(fileList);
 
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/DataGenerator.java 
b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/DataGenerator.java
index ba3c88e..4e43ec8 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/DataGenerator.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/DataGenerator.java
@@ -20,6 +20,7 @@
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 /**
  * SVMData is a program designed to create appropriate input data for the 
RankSVM
@@ -33,10 +34,10 @@
   private static boolean isMultFiles;
 
   private static String[] myHeader;
-  private static List<List<String>> myMasterList = new 
ArrayList<List<String>>();
+  private static List<List<String>> myMasterList = new ArrayList<>();
 
   // HashMap used for comparing evaluation classes
-  public static final HashMap<String, Integer> map1 = new HashMap<String, 
Integer>();
+  public static final Map<String, Integer> map1 = new HashMap<>();
 
   static {
     map1.put("Excellent", 7);
@@ -82,7 +83,7 @@ public static void parseFile() {
     try {
       String sourceDir = mySourceDir;
 
-      if (isMultFiles == true) // Case where multiple files have to be 
processed
+      if (isMultFiles) // Case where multiple files have to be processed
       {
         // Iterate over files in directory 
         File directory = new File(sourceDir);
@@ -135,7 +136,7 @@ public static void parseFile() {
    * @param arr the parsed contents of the original CSV file
    */
   public static void calculateVec(String[][] arr) {
-    List<List<String>> listofLists = new ArrayList<List<String>>(); // Holds 
calculations 
+    List<List<String>> listofLists = new ArrayList<>(); // Holds calculations 
 
     int rowStart = 1;
     for (int row = rowStart; row < arr.length; row++) // Start at row 1 
because row 0 is heading lol
@@ -144,7 +145,6 @@ public static void calculateVec(String[][] arr) {
         List<String> colList = new ArrayList<String>(); // create vector to 
store all values inside of a column, which is stored inside 2D vector
         for (int col = 0; col < arr[0].length - 1; col++) // Columns go until 
the next to last column
         {
-          //System.out.println(col + " " + arr[row][col]);
           // Extract double value from each cell
           double x1 = Double.parseDouble(arr[row][col]);
           double x2 = Double.parseDouble(arr[row + i][col]);
@@ -209,8 +209,8 @@ public static int compareEvaluation(String eval1, String 
eval2) {
    */
   public static List<List<String>> equalizeList(List<List<String>> rawList) {
     // Create two sets - one containing row index for +1 and the other for -1
-    List<Integer> pos1List = new ArrayList<Integer>();
-    List<Integer> neg1List = new ArrayList<Integer>();
+    List<Integer> pos1List = new ArrayList<>();
+    List<Integer> neg1List = new ArrayList<>();
 
     for (int i = 0; i < rawList.size(); i++) // Iterate through all rows to 
get indexes
     {
@@ -276,9 +276,7 @@ public static int compareEvaluation(String eval1, String 
eval2) {
   public static void storeHead(String[][] arr) {
     myHeader = new String[arr[0].length]; // Reside private variable
 
-    for (int col = 0; col < arr[0].length; col++) {
-      myHeader[col] = arr[0][col];
-    }
+    System.arraycopy(arr[0], 0, myHeader, 0, arr[0].length);
   }
 
   /**
@@ -296,10 +294,9 @@ public static void writeCSVfile(List<List<String>> list) {
       if (!alreadyExists) {
         csvOutput.writeNext(myHeader); // Write the text headers first before 
data
 
-        for (int i = 0; i < list.size(); i++) // Iterate through all rows in 
2D array
-        {
-          String[] temp = new String[list.get(i).size()]; // Convert row array 
list in 2D array to regular string array
-          temp = list.get(i).toArray(temp);
+        for (List<String> aList : list) { // Iterate through all rows in 2D 
array
+          String[] temp = new String[aList.size()]; // Convert row array list 
in 2D array to regular string array
+          temp = aList.toArray(temp);
           csvOutput.writeNext(temp); // Write this array to the file
         }
       }
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/Evaluator.java 
b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/Evaluator.java
index ad7f159..0efb82f 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/Evaluator.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/Evaluator.java
@@ -28,12 +28,12 @@
    *
    * @param list a list of integer with each integer element indicating
    *             the performance at its position
-   * @param K    the number of elements needed to be included in the 
calculation
+   * @param k    the number of elements needed to be included in the 
calculation
    * @return NDCG score
    */
-  public double getNDCG(int[] list, int K) {
-    double dcg = this.getDCG(list, K);
-    double idcg = this.getIDCG(list, K);
+  public double getNDCG(int[] list, int k) {
+    double dcg = this.getDCG(list, k);
+    double idcg = this.getIDCG(list, k);
     double ndcg = 0.0;
     if (idcg > 0.0) {
       ndcg = dcg / idcg;
@@ -46,22 +46,21 @@ public double getNDCG(int[] list, int K) {
    *
    * @param list a list of integer with each integer element indicating
    *             the performance at its position
-   * @param K    the number of elements needed to be included in the 
calculation
+   * @param k    the number of elements needed to be included in the 
calculation
    * @return precision at K
    */
-  public double getPrecision(int[] list, int K) {
+  public double getPrecision(int[] list, int k) {
     int size = list.length;
-    if (size == 0 || K == 0) {
+    if (size == 0 || k == 0) {
       return 0;
     }
 
-    if (K > size) {
-      K = size;
+    if (k > size) {
+      k = size;
     }
 
-    int rel_doc_num = this.getRelevantDocNum(list, K);
-    double precision = (double) rel_doc_num / (double) K;
-    return precision;
+    int relDocNum = this.getRelevantDocNum(list, k);
+    return (double) relDocNum / (double) k;
   }
 
   /**
@@ -69,26 +68,26 @@ public double getPrecision(int[] list, int K) {
    *
    * @param list a list of integer with each integer element indicating
    *             the performance at its position
-   * @param K    the number of elements needed to be included in the 
calculation
+   * @param k    the number of elements needed to be included in the 
calculation
    * @return the number of relevant element
    */
-  private int getRelevantDocNum(int[] list, int K) {
+  private int getRelevantDocNum(int[] list, int k) {
     int size = list.length;
-    if (size == 0 || K == 0) {
+    if (size == 0 || k == 0) {
       return 0;
     }
 
-    if (K > size) {
-      K = size;
+    if (k > size) {
+      k = size;
     }
 
-    int rel_num = 0;
-    for (int i = 0; i < K; i++) {
+    int relNum = 0;
+    for (int i = 0; i < k; i++) {
       if (list[i] > 3) { // 3 refers to "OK"
-        rel_num++;
+        relNum++;
       }
     }
-    return rel_num;
+    return relNum;
   }
 
   /**
@@ -96,25 +95,25 @@ private int getRelevantDocNum(int[] list, int K) {
    *
    * @param list a list of integer with each integer element indicating
    *             the performance at its position
-   * @param K    the number of elements needed to be included in the 
calculation
+   * @param k    the number of elements needed to be included in the 
calculation
    * @return DCG score
    */
-  private double getDCG(int[] list, int K) {
+  private double getDCG(int[] list, int k) {
     int size = list.length;
-    if (size == 0 || K == 0) {
+    if (size == 0 || k == 0) {
       return 0.0;
     }
 
-    if (K > size) {
-      K = size;
+    if (k > size) {
+      k = size;
     }
 
     double dcg = list[0];
-    for (int i = 1; i < K; i++) {
+    for (int i = 1; i < k; i++) {
       int rel = list[i];
       int pos = i + 1;
-      double rel_log = Math.log(pos) / Math.log(2);
-      dcg += rel / rel_log;
+      double relLog = Math.log(pos) / Math.log(2);
+      dcg += rel / relLog;
     }
     return dcg;
   }
@@ -124,10 +123,10 @@ private double getDCG(int[] list, int K) {
    *
    * @param list a list of integer with each integer element indicating
    *             the performance at its position
-   * @param K    the number of elements needed to be included in the 
calculation
+   * @param k    the number of elements needed to be included in the 
calculation
    * @return IDCG score
    */
-  private double getIDCG(int[] list, int K) {
+  private double getIDCG(int[] list, int k) {
     Comparator<Integer> comparator = new Comparator<Integer>() {
       @Override
       public int compare(Integer o1, Integer o2) {
@@ -135,11 +134,9 @@ public int compare(Integer o1, Integer o2) {
       }
     };
     List<Integer> sortlist = 
IntStream.of(list).boxed().collect(Collectors.toList());
-    ;
     Collections.sort(sortlist, comparator);
     int[] sortedArr = sortlist.stream().mapToInt(i -> i).toArray();
-    double idcg = this.getDCG(sortedArr, K);
-    return idcg;
+    return this.getDCG(sortedArr, k);
   }
 
 }
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/SResult.java 
b/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/SResult.java
index cf94ddb..fce4e34 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/SResult.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/SResult.java
@@ -94,8 +94,8 @@ public SResult(String shortName, String longName, String 
topic, String descripti
   }
 
   public SResult(SResult sr) {
-    for (int i = 0; i < rlist.length; i++) {
-      set(this, rlist[i], get(sr, rlist[i]));
+    for (String aRlist : rlist) {
+      set(this, aRlist, get(sr, aRlist));
     }
   }
 
@@ -107,8 +107,8 @@ public SResult(SResult sr) {
    */
   public static String getHeader(String delimiter) {
     String str = "";
-    for (int i = 0; i < rlist.length; i++) {
-      str += rlist[i] + delimiter;
+    for (String aRlist : rlist) {
+      str += aRlist + delimiter;
     }
     str = str + "label" + "\n";
     return "ShortName" + delimiter + "below" + delimiter + str;
@@ -122,8 +122,8 @@ public static String getHeader(String delimiter) {
    */
   public String toString(String delimiter) {
     String str = "";
-    for (int i = 0; i < rlist.length; i++) {
-      double score = get(this, rlist[i]);
+    for (String aRlist : rlist) {
+      double score = get(this, aRlist);
       str += score + delimiter;
     }
     str = str + label + "\n";
diff --git a/core/src/main/java/org/apache/sdap/mudrod/utils/LinkageTriple.java 
b/core/src/main/java/org/apache/sdap/mudrod/utils/LinkageTriple.java
index 3245da6..a574041 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/utils/LinkageTriple.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/utils/LinkageTriple.java
@@ -85,20 +85,19 @@ public static void insertTriples(ESDriver es, 
List<LinkageTriple> triples, Strin
     }
 
     es.createBulkProcessor();
-    int size = triples.size();
-    for (int i = 0; i < size; i++) {
+    for (LinkageTriple triple : triples) {
 
       XContentBuilder jsonBuilder = jsonBuilder().startObject();
       if (bTriple) {
 
-        jsonBuilder.field("concept_A", triples.get(i).keyA);
-        jsonBuilder.field("concept_B", triples.get(i).keyB);
+        jsonBuilder.field("concept_A", triple.keyA);
+        jsonBuilder.field("concept_B", triple.keyB);
 
       } else {
-        jsonBuilder.field("keywords", triples.get(i).keyA + "," + 
triples.get(i).keyB);
+        jsonBuilder.field("keywords", triple.keyA + "," + triple.keyB);
       }
 
-      jsonBuilder.field("weight", 
Double.parseDouble(df.format(triples.get(i).weight)));
+      jsonBuilder.field("weight", 
Double.parseDouble(df.format(triple.weight)));
       jsonBuilder.endObject();
 
       IndexRequest ir = new IndexRequest(index, type).source(jsonBuilder);
@@ -106,10 +105,10 @@ public static void insertTriples(ESDriver es, 
List<LinkageTriple> triples, Strin
 
       if (bTriple && bSymmetry) {
         XContentBuilder symmetryJsonBuilder = jsonBuilder().startObject();
-        symmetryJsonBuilder.field("concept_A", triples.get(i).keyB);
-        symmetryJsonBuilder.field("concept_B", triples.get(i).keyA);
+        symmetryJsonBuilder.field("concept_A", triple.keyB);
+        symmetryJsonBuilder.field("concept_B", triple.keyA);
 
-        symmetryJsonBuilder.field("weight", 
Double.parseDouble(df.format(triples.get(i).weight)));
+        symmetryJsonBuilder.field("weight", 
Double.parseDouble(df.format(triple.weight)));
 
         symmetryJsonBuilder.endObject();
 
@@ -121,14 +120,14 @@ public static void insertTriples(ESDriver es, 
List<LinkageTriple> triples, Strin
   }
 
   public static void addMapping(ESDriver es, String index, String type) {
-    XContentBuilder Mapping;
+    XContentBuilder mapping;
     try {
-      Mapping = 
jsonBuilder().startObject().startObject(type).startObject("properties").startObject("concept_A").field("type",
 "string").field("index", "not_analyzed").endObject()
+      mapping = 
jsonBuilder().startObject().startObject(type).startObject("properties").startObject("concept_A").field("type",
 "string").field("index", "not_analyzed").endObject()
           .startObject("concept_B").field("type", "string").field("index", 
"not_analyzed").endObject()
 
           .endObject().endObject().endObject();
 
-      
es.getClient().admin().indices().preparePutMapping(index).setType(type).setSource(Mapping).execute().actionGet();
+      
es.getClient().admin().indices().preparePutMapping(index).setType(type).setSource(mapping).execute().actionGet();
     } catch (IOException e) {
       e.printStackTrace();
     }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/utils/MatrixUtil.java 
b/core/src/main/java/org/apache/sdap/mudrod/utils/MatrixUtil.java
index 8259ce7..7eef272 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/utils/MatrixUtil.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/utils/MatrixUtil.java
@@ -129,9 +129,8 @@ public static LabeledRowMatrix 
createWordDocMatrix(JavaPairRDD<String, List<Stri
       public Iterator<Tuple2<Tuple2<String, Long>, Double>> 
call(Tuple2<List<String>, Long> docwords) throws Exception {
         List<Tuple2<Tuple2<String, Long>, Double>> pairs = new ArrayList<>();
         List<String> words = docwords._1;
-        int n = words.size();
-        for (int i = 0; i < n; i++) {
-          Tuple2<String, Long> worddoc = new Tuple2<>(words.get(i), 
docwords._2);
+        for (String word : words) {
+          Tuple2<String, Long> worddoc = new Tuple2<>(word, docwords._2);
           pairs.add(new Tuple2<Tuple2<String, Long>, Double>(worddoc, 1.0));
         }
         return pairs.iterator();
@@ -234,9 +233,8 @@ public static LabeledRowMatrix 
createDocWordMatrix(JavaPairRDD<String, List<Stri
       public Iterator<Tuple2<Tuple2<String, String>, Double>> 
call(Tuple2<String, List<String>> docwords) throws Exception {
         List<Tuple2<Tuple2<String, String>, Double>> pairs = new ArrayList<>();
         List<String> words = docwords._2;
-        int n = words.size();
-        for (int i = 0; i < n; i++) {
-          Tuple2<String, String> worddoc = new Tuple2<>(docwords._1, 
words.get(i));
+        for (String word : words) {
+          Tuple2<String, String> worddoc = new Tuple2<>(docwords._1, word);
           pairs.add(new Tuple2<Tuple2<String, String>, Double>(worddoc, 1.0));
         }
         return pairs.iterator();
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/partition/KGreedyPartitionSolver.java
 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/partition/KGreedyPartitionSolver.java
index 8f4e263..1e25cec 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/partition/KGreedyPartitionSolver.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/partition/KGreedyPartitionSolver.java
@@ -20,22 +20,22 @@ public KGreedyPartitionSolver(boolean bsorted) {
     List<String> months = null;
 
     if (!this.bsorted) {
-      LinkedHashMap sortedMap = this.sortMapByValue(labelNums);
-      lista = new ArrayList(sortedMap.values());
-      months = new ArrayList(sortedMap.keySet());
+      LinkedHashMap<String, Double> sortedMap = (LinkedHashMap<String, 
Double>) this.sortMapByValue(labelNums);
+      lista = new ArrayList<>(sortedMap.values());
+      months = new ArrayList<>(sortedMap.keySet());
     } else {
-      lista = new ArrayList(labelNums.values());
-      months = new ArrayList(labelNums.keySet());
+      lista = new ArrayList<>(labelNums.values());
+      months = new ArrayList<>(labelNums.keySet());
     }
 
     List<List<Double>> parts = new ArrayList<>();
     List<List<String>> splitMonths = new ArrayList<>();
 
     for (int i = 0; i < k; i++) {
-      List<Double> part = new ArrayList();
+      List<Double> part = new ArrayList<>();
       parts.add(part);
 
-      List<String> monthList = new ArrayList();
+      List<String> monthList = new ArrayList<>();
       splitMonths.add(monthList);
     }
 
@@ -47,10 +47,10 @@ public KGreedyPartitionSolver(boolean bsorted) {
       for (int i = 0; i < parts.size(); i++) {
         List<Double> part = parts.get(i);
         if (minimalSum == -1) {
-          minimalSum = Suma(part);
+          minimalSum = suma(part);
           position = i;
-        } else if (Suma(part) < minimalSum) {
-          minimalSum = Suma(part);
+        } else if (suma(part) < minimalSum) {
+          minimalSum = suma(part);
           position = i;
         }
       }
@@ -65,38 +65,29 @@ public KGreedyPartitionSolver(boolean bsorted) {
       j++;
     }
 
-    /*  for(int i=0; i<splitMonths.size(); i++){
-        System.out.println("group:" + i);
-        printStrList(splitMonths.get(i));
-      }
-      
-      for(int i=0; i<parts.size(); i++){
-        print(parts.get(i));
-      }*/
-
-    Map<String, Integer> LabelGroups = new HashMap<String, Integer>();
+    Map<String, Integer> labelGroups = new HashMap<>();
     for (int i = 0; i < splitMonths.size(); i++) {
       List<String> list = splitMonths.get(i);
-      for (int m = 0; m < list.size(); m++) {
-        LabelGroups.put(list.get(m), i);
+      for (String aList : list) {
+        labelGroups.put(aList, i);
       }
     }
 
-    return LabelGroups;
+    return labelGroups;
   }
 
-  public LinkedHashMap<String, Double> sortMapByValue(Map passedMap) {
-    List mapKeys = new ArrayList(passedMap.keySet());
-    List mapValues = new ArrayList(passedMap.values());
+  public Map<String, Double> sortMapByValue(Map<String, Double> passedMap) {
+    List<String> mapKeys = new ArrayList<>(passedMap.keySet());
+    List<Double> mapValues = new ArrayList<>(passedMap.values());
     Collections.sort(mapValues, Collections.reverseOrder());
     Collections.sort(mapKeys, Collections.reverseOrder());
 
-    LinkedHashMap sortedMap = new LinkedHashMap();
+    LinkedHashMap<String, Double> sortedMap = new LinkedHashMap<>();
 
-    Iterator valueIt = mapValues.iterator();
+    Iterator<Double> valueIt = mapValues.iterator();
     while (valueIt.hasNext()) {
       Object val = valueIt.next();
-      Iterator keyIt = mapKeys.iterator();
+      Iterator<String> keyIt = mapKeys.iterator();
 
       while (keyIt.hasNext()) {
         Object key = keyIt.next();
@@ -116,27 +107,12 @@ public KGreedyPartitionSolver(boolean bsorted) {
     return sortedMap;
   }
 
-  private Double Suma(List<Double> part) {
+  private Double suma(List<Double> part) {
     Double ret = 0.0;
-    for (int i = 0; i < part.size(); i++) {
-      ret += part.get(i);
+    for (Double aPart : part) {
+      ret += aPart;
     }
     return ret;
   }
 
-  private void print(List<Double> list) {
-    /*for (int i = 0; i < list.size(); i++) {
-        System.out.print(list.get(i)+",");
-    }*/
-    System.out.print("sum is:" + Suma(list));
-    System.out.println();
-  }
-
-  private void printStrList(List<String> list) {
-    for (int i = 0; i < list.size(); i++) {
-      System.out.print(list.get(i) + ",");
-    }
-    System.out.println();
-  }
-
 }
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/HistoryGenerator.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/HistoryGenerator.java
index f92d79c..cd41fbe 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/HistoryGenerator.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/HistoryGenerator.java
@@ -113,9 +113,9 @@ public void generateBinaryMatrix() {
 
             ipMap.put(IP.getKey().toString(), 1);
           }
-          for (int i = 0; i < ipList.size(); i++) {
-            if (ipMap.containsKey(ipList.get(i))) {
-              bw.write(ipMap.get(ipList.get(i)) + ",");
+          for (String anIpList : ipList) {
+            if (ipMap.containsKey(anIpList)) {
+              bw.write(ipMap.get(anIpList) + ",");
             } else {
               bw.write("0,");
             }
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ImportLogFile.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ImportLogFile.java
index ca47f01..933b061 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ImportLogFile.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ImportLogFile.java
@@ -307,8 +307,8 @@ public void parseSingleLineHTTP(String log, String index, 
String type) {
     if (!crawlerDe.checkKnownCrawler(agent)) {
       boolean tag = false;
       String[] mimeTypes = { ".js", ".css", ".jpg", ".png", ".ico", 
"image_captcha", "autocomplete", ".gif", "/alldata/", "/api/", "get / 
http/1.1", ".jpeg", "/ws/" };
-      for (int i = 0; i < mimeTypes.length; i++) {
-        if (request.contains(mimeTypes[i])) {
+      for (String mimeType : mimeTypes) {
+        if (request.contains(mimeType)) {
           tag = true;
           break;
         }
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/LogAbstract.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/LogAbstract.java
index 23ddbee..3fcc67f 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/LogAbstract.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/LogAbstract.java
@@ -146,16 +146,12 @@ public Terms getUserTerms(String... type) {
     Map<String, Long> userList = new HashMap<>();
     for (Terms.Bucket user : users.getBuckets()) {
       String ip = (String) user.getKey();
-
       System.out.println(ip);
-
       Histogram agg = user.getAggregations().get("by_day");
       List<? extends Histogram.Bucket> dateList = agg.getBuckets();
-      int size = dateList.size();
-      for (int i = 0; i < size; i++) {
-        Long count = dateList.get(i).getDocCount();
-        String date = dateList.get(i).getKey().toString();
-
+      for (Histogram.Bucket aDateList : dateList) {
+        Long count = aDateList.getDocCount();
+        String date = aDateList.getKey().toString();
         System.out.println(date);
         System.out.println(count);
       }
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionGenerator.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionGenerator.java
index b1153bf..d884bf9 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionGenerator.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionGenerator.java
@@ -435,7 +435,6 @@ public void combineShortSessions(ESDriver es, String user, 
int timeThres) throws
             }
           }
         }
-        ;
       }
       lastoldID = s.getID();
       lastnewID = s.getNewID();
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionStatistic.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionStatistic.java
index 6f5c5f7..f084a90 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionStatistic.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionStatistic.java
@@ -82,25 +82,25 @@ public Object execute() {
 
   public void processSession() throws InterruptedException, IOException, 
ExecutionException {
     String processingType = props.getProperty(MudrodConstants.PROCESS_TYPE);
-    if (processingType.equals("sequential")) {
+    if ("sequential".equals(processingType)) {
       processSessionInSequential();
-    } else if (processingType.equals("parallel")) {
+    } else if ("parallel".equals(processingType)) {
       processSessionInParallel();
     }
   }
 
   public void processSessionInSequential() throws IOException, 
InterruptedException, ExecutionException {
     es.createBulkProcessor();
-    Terms Sessions = this.getSessionTerms();
-    int session_count = 0;
-    for (Terms.Bucket entry : Sessions.getBuckets()) {
-      if (entry.getDocCount() >= 3 && !entry.getKey().equals("invalid")) {
+    Terms sessions = this.getSessionTerms();
+    int sessionCount = 0;
+    for (Terms.Bucket entry : sessions.getBuckets()) {
+      if (entry.getDocCount() >= 3 && !"invalid".equals(entry.getKey())) {
         String sessionid = entry.getKey().toString();
         int sessionNum = processSession(es, sessionid);
-        session_count += sessionNum;
+        sessionCount += sessionNum;
       }
     }
-    LOG.info("Final Session count: {}", Integer.toString(session_count));
+    LOG.info("Final Session count: {}", Integer.toString(sessionCount));
     es.destroyBulkProcessor();
   }
 
@@ -138,7 +138,7 @@ public void processSessionInParallel() throws 
InterruptedException, IOException
       public Iterator<Integer> call(Iterator<String> arg0) throws Exception {
         ESDriver tmpES = new ESDriver(props);
         tmpES.createBulkProcessor();
-        List<Integer> sessionNums = new ArrayList<Integer>();
+        List<Integer> sessionNums = new ArrayList<>();
         sessionNums.add(0);
         while (arg0.hasNext()) {
           String s = arg0.next();
@@ -170,17 +170,17 @@ public int processSession(ESDriver es, String sessionId) 
throws IOException, Int
     DateTime start = null;
     DateTime end = null;
     int duration = 0;
-    float request_rate = 0;
+    float requestRate = 0;
 
-    int session_count = 0;
+    int sessionCount = 0;
     Pattern pattern = Pattern.compile("get (.*?) http/*");
 
     StatsAggregationBuilder statsAgg = 
AggregationBuilders.stats("Stats").field("Time");
 
-    BoolQueryBuilder filter_search = new BoolQueryBuilder();
-    filter_search.must(QueryBuilders.termQuery("SessionID", sessionId));
+    BoolQueryBuilder filterSearch = new BoolQueryBuilder();
+    filterSearch.must(QueryBuilders.termQuery("SessionID", sessionId));
 
-    SearchResponse sr = 
es.getClient().prepareSearch(logIndex).setTypes(inputType).setQuery(filter_search).addAggregation(statsAgg).execute().actionGet();
+    SearchResponse sr = 
es.getClient().prepareSearch(logIndex).setTypes(inputType).setQuery(filterSearch).addAggregation(statsAgg).execute().actionGet();
 
     Stats agg = sr.getAggregations().get("Stats");
     min = agg.getMinAsString();
@@ -190,18 +190,24 @@ public int processSession(ESDriver es, String sessionId) 
throws IOException, Int
 
     duration = Seconds.secondsBetween(start, end).getSeconds();
 
-    int searchDataListRequest_count = 0;
-    int searchDataRequest_count = 0;
-    int searchDataListRequest_byKeywords_count = 0;
-    int ftpRequest_count = 0;
-    int keywords_num = 0;
+    int searchDataListRequestCount = 0;
+    int searchDataRequestCount = 0;
+    int searchDataListRequestByKeywordsCount = 0;
+    int ftpRequestCount = 0;
+    int keywordsNum = 0;
 
-    String IP = null;
+    String iP = null;
     String keywords = "";
     String views = "";
     String downloads = "";
 
-    SearchResponse scrollResp = 
es.getClient().prepareSearch(logIndex).setTypes(inputType).setScroll(new 
TimeValue(60000)).setQuery(filter_search).setSize(100).execute().actionGet();
+    SearchResponse scrollResp = es.getClient()
+            .prepareSearch(logIndex)
+            .setTypes(inputType)
+            .setScroll(new TimeValue(60000))
+            .setQuery(filterSearch)
+            .setSize(100)
+            .execute().actionGet();
 
     while (true) {
       for (SearchHit hit : scrollResp.getHits().getHits()) {
@@ -209,7 +215,7 @@ public int processSession(ESDriver es, String sessionId) 
throws IOException, Int
 
         String request = (String) result.get("Request");
         String logType = (String) result.get("LogType");
-        IP = (String) result.get("IP");
+        iP = (String) result.get("IP");
         Matcher matcher = pattern.matcher(request.trim().toLowerCase());
         while (matcher.find()) {
           request = matcher.group(1);
@@ -218,21 +224,21 @@ public int processSession(ESDriver es, String sessionId) 
throws IOException, Int
         String datasetlist = "/datasetlist?";
         String dataset = "/dataset/";
         if (request.contains(datasetlist)) {
-          searchDataListRequest_count++;
+          searchDataListRequestCount++;
 
           RequestUrl requestURL = new RequestUrl();
           String infoStr = requestURL.getSearchInfo(request) + ",";
           String info = es.customAnalyzing(props.getProperty("indexName"), 
infoStr);
 
-          if (!info.equals(",")) {
-            if (keywords.equals("")) {
+          if (!",".equals(info)) {
+            if ("".equals(keywords)) {
               keywords = keywords + info;
             } else {
               String[] items = info.split(",");
               String[] keywordList = keywords.split(",");
-              for (int m = 0; m < items.length; m++) {
-                if (!Arrays.asList(keywordList).contains(items[m])) {
-                  keywords = keywords + items[m] + ",";
+              for (String item : items) {
+                if (!Arrays.asList(keywordList).contains(item)) {
+                  keywords = keywords + item + ",";
                 }
               }
             }
@@ -240,7 +246,7 @@ public int processSession(ESDriver es, String sessionId) 
throws IOException, Int
 
         }
         if (request.startsWith(dataset)) {
-          searchDataRequest_count++;
+          searchDataRequestCount++;
           if (findDataset(request) != null) {
             String view = findDataset(request);
 
@@ -256,19 +262,20 @@ public int processSession(ESDriver es, String sessionId) 
throws IOException, Int
           }
         }
         if ("ftp".equals(logType)) {
-          ftpRequest_count++;
+          ftpRequestCount++;
           String download = "";
           String requestLowercase = request.toLowerCase();
-          if (requestLowercase.endsWith(".jpg") == false && 
requestLowercase.endsWith(".pdf") == false && requestLowercase.endsWith(".txt") 
== false && requestLowercase.endsWith(".gif") == false) {
+          if (!requestLowercase.endsWith(".jpg") && 
+                  !requestLowercase.endsWith(".pdf") && 
+                  !requestLowercase.endsWith(".txt") && 
+                  !requestLowercase.endsWith(".gif")) {
             download = request;
           }
 
           if ("".equals(downloads)) {
             downloads = download;
           } else {
-            if (downloads.contains(download)) {
-
-            } else {
+            if (!downloads.contains(download)) {
               downloads = downloads + "," + download;
             }
           }
@@ -283,25 +290,43 @@ public int processSession(ESDriver es, String sessionId) 
throws IOException, Int
       }
     }
 
-    if (!keywords.equals("")) {
-      keywords_num = keywords.split(",").length;
+    if (!"".equals(keywords)) {
+      keywordsNum = keywords.split(",").length;
     }
 
-    if (searchDataListRequest_count != 0 && searchDataListRequest_count <= 
Integer.parseInt(props.getProperty("searchf")) && searchDataRequest_count != 0 
&& searchDataRequest_count <= Integer
-        .parseInt(props.getProperty("viewf")) && ftpRequest_count <= 
Integer.parseInt(props.getProperty("downloadf"))) {
+    if (searchDataListRequestCount != 0
+            && searchDataListRequestCount <= 
Integer.parseInt(props.getProperty("searchf"))
+            && searchDataRequestCount != 0
+            && searchDataRequestCount <= 
Integer.parseInt(props.getProperty("viewf"))
+            && ftpRequestCount <= 
Integer.parseInt(props.getProperty("downloadf"))) {
       String sessionURL = props.getProperty("SessionPort") + 
props.getProperty("SessionUrl") + "?sessionid=" + sessionId + "&sessionType=" + 
outputType + "&requestType=" + inputType;
-      session_count = 1;
+      sessionCount = 1;
 
       IndexRequest ir = new IndexRequest(logIndex, outputType).source(
-          jsonBuilder().startObject().field("SessionID", 
sessionId).field("SessionURL", sessionURL).field("Duration", 
duration).field("Number of Keywords", keywords_num).field("Time", min)
-              .field("End_time", max).field("searchDataListRequest_count", 
searchDataListRequest_count).field("searchDataListRequest_byKeywords_count", 
searchDataListRequest_byKeywords_count)
-              .field("searchDataRequest_count", 
searchDataRequest_count).field("keywords", es.customAnalyzing(logIndex, 
keywords)).field("views", views).field("downloads", downloads)
-              .field("request_rate", request_rate).field("Comments", 
"").field("Validation", 0).field("Produceby", 0).field("Correlation", 
0).field("IP", IP).endObject());
+              jsonBuilder().startObject()
+              .field("SessionID", sessionId)
+              .field("SessionURL", sessionURL)
+              .field("Duration", duration)
+              .field("Number of Keywords", keywordsNum)
+              .field("Time", min)
+              .field("End_time", max)
+              .field("searchDataListRequest_count", searchDataListRequestCount)
+              .field("searchDataListRequest_byKeywords_count", 
searchDataListRequestByKeywordsCount)
+              .field("searchDataRequest_count", searchDataRequestCount)
+              .field("keywords", es.customAnalyzing(logIndex, keywords))
+              .field("views", views)
+              .field("downloads", downloads)
+              .field("request_rate", requestRate)
+              .field("Comments", "")
+              .field("Validation", 0)
+              .field("Produceby", 0)
+              .field("Correlation", 0)
+              .field("IP", iP).endObject());
 
       es.getBulkProcessor().add(ir);
     }
 
-    return session_count;
+    return sessionCount;
   }
 
   @Override
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/ApacheAccessLog.java
 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/ApacheAccessLog.java
index 0127e2d..985b2d7 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/ApacheAccessLog.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/ApacheAccessLog.java
@@ -31,10 +31,19 @@
  */
 public class ApacheAccessLog extends WebLog implements Serializable {
 
-  // double Bytes;
-  String Response;
-  String Referer;
-  String Browser;
+
+  /**
+   * 
+   */
+  private static final long serialVersionUID = 1L;
+
+  public ApacheAccessLog() {
+    //default constructor
+  }
+
+  String response;
+  String referer;
+  String browser;
 
   @Override
   public double getBytes() {
@@ -42,31 +51,28 @@ public double getBytes() {
   }
 
   public String getBrowser() {
-    return this.Browser;
+    return this.browser;
   }
 
   public String getResponse() {
-    return this.Response;
+    return this.response;
   }
 
   public String getReferer() {
-    return this.Referer;
+    return this.referer;
   }
 
-  public ApacheAccessLog() {
-
-  }
 
   public static String parseFromLogLine(String log) throws IOException, 
ParseException {
 
     String logEntryPattern = "^([\\d.]+) (\\S+) (\\S+) 
\\[([\\w:/]+\\s[+\\-]\\d{4})\\] \"(.+?)\" (\\d{3}) (\\d+|-) \"((?:[^\"]|\")+)\" 
\"([^\"]+)\"";
-    final int NUM_FIELDS = 9;
+    final int numFields = 9;
     Pattern p = Pattern.compile(logEntryPattern);
     Matcher matcher;
 
     String lineJson = "{}";
     matcher = p.matcher(log);
-    if (!matcher.matches() || NUM_FIELDS != matcher.groupCount()) {
+    if (!matcher.matches() || numFields != matcher.groupCount()) {
       return lineJson;
     }
 
@@ -77,7 +83,7 @@ public static String parseFromLogLine(String log) throws 
IOException, ParseExcep
 
     String bytes = matcher.group(7);
 
-    if (bytes.equals("-")) {
+    if ("-".equals(bytes)) {
       bytes = "0";
     }
 
@@ -88,36 +94,29 @@ public static String parseFromLogLine(String log) throws 
IOException, ParseExcep
       return lineJson;
     } else {
 
-      boolean tag = false;
       String[] mimeTypes = { ".js", ".css", ".jpg", ".png", ".ico", 
"image_captcha", "autocomplete", ".gif", "/alldata/", "/api/", "get / 
http/1.1", ".jpeg", "/ws/" };
-      for (int i = 0; i < mimeTypes.length; i++) {
-        if (request.contains(mimeTypes[i])) {
-          tag = true;
+      for (String mimeType : mimeTypes) {
+        if (request.contains(mimeType)) {
           return lineJson;
         }
       }
 
-      if (tag == false) {
-        ApacheAccessLog accesslog = new ApacheAccessLog();
-        accesslog.LogType = "PO.DAAC";
-        accesslog.IP = matcher.group(1);
-        accesslog.Request = matcher.group(5);
-        accesslog.Response = matcher.group(6);
-        accesslog.Bytes = Double.parseDouble(bytes);
-        accesslog.Referer = matcher.group(8);
-        accesslog.Browser = matcher.group(9);
-        SimpleDateFormat df = new 
SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.sss'Z'");
-        accesslog.Time = df.format(date);
-
-        Gson gson = new Gson();
-        lineJson = gson.toJson(accesslog);
-
-        return lineJson;
-      }
-    }
+      ApacheAccessLog accesslog = new ApacheAccessLog();
+      accesslog.LogType = "PO.DAAC";
+      accesslog.IP = matcher.group(1);
+      accesslog.Request = matcher.group(5);
+      accesslog.response = matcher.group(6);
+      accesslog.Bytes = Double.parseDouble(bytes);
+      accesslog.referer = matcher.group(8);
+      accesslog.browser = matcher.group(9);
+      SimpleDateFormat df = new 
SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.sss'Z'");
+      accesslog.Time = df.format(date);
 
-    lineJson = "{}";
-    return lineJson;
+      Gson gson = new Gson();
+      lineJson = gson.toJson(accesslog);
+
+      return lineJson;
+    }
   }
 
   public static boolean checknull(WebLog s) {
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/Session.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/Session.java
index 15c3d81..f11efc6 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/Session.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/Session.java
@@ -278,7 +278,7 @@ private JsonElement getRequests(String cleanuptype, String 
sessionID) throws Uns
 
     List<RankingTrainData> trainData = new ArrayList<>();
     try {
-      trainData = tree.getRankingTrainData(indexName, sessionID);
+      trainData = tree.getRankingTrainData(indexName);
     } catch (UnsupportedEncodingException e) {
       LOG.error("Error whilst retreiving ranking training data: {}", e);
     }
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
index ad97bef..6adaf97 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
@@ -95,15 +95,13 @@ public SessionExtractor() {
     List<String> logIndexList = 
es.getIndexListWithPrefix(props.getProperty(MudrodConstants.LOG_INDEX));
 
     List<ClickStream> result = new ArrayList<>();
-    for (int n = 0; n < logIndexList.size(); n++) {
-      String logIndex = logIndexList.get(n);
+    for (String logIndex : logIndexList) {
       List<String> sessionIdList;
       try {
         sessionIdList = this.getSessions(props, es, logIndex);
         Session session = new Session(props, es);
-        int sessionNum = sessionIdList.size();
-        for (int i = 0; i < sessionNum; i++) {
-          String[] sArr = sessionIdList.get(i).split(",");
+        for (String aSessionIdList : sessionIdList) {
+          String[] sArr = aSessionIdList.split(",");
           List<ClickStream> datas = session.getClickStreamList(sArr[1], 
sArr[2], sArr[0]);
           result.addAll(datas);
         }
@@ -122,15 +120,15 @@ public SessionExtractor() {
     LOG.info("Retrieved {}", logIndexList.toString());
 
     List<String> sessionIdList = new ArrayList<>();
-    for (int n = 0; n < logIndexList.size(); n++) {
-      String logIndex = logIndexList.get(n);
+    for (String logIndex : logIndexList) {
       List<String> tmpsessionList = this.getSessions(props, es, logIndex);
       sessionIdList.addAll(tmpsessionList);
     }
 
     JavaRDD<String> sessionRDD = spark.sc.parallelize(sessionIdList, 16);
 
-    JavaRDD<ClickStream> clickStreamRDD = sessionRDD.mapPartitions(new 
FlatMapFunction<Iterator<String>, ClickStream>() {
+    JavaRDD<ClickStream> clickStreamRDD = sessionRDD.mapPartitions(
+            new FlatMapFunction<Iterator<String>, ClickStream>() {
       /**
        *
        */
@@ -382,8 +380,7 @@ public Boolean call(Tuple2<String, Double> arg0) throws 
Exception {
 
     List<String> result = new ArrayList<>();
     List<String> logIndexList = 
es.getIndexListWithPrefix(props.getProperty(MudrodConstants.LOG_INDEX));
-    for (int n = 0; n < logIndexList.size(); n++) {
-      String logIndex = logIndexList.get(n);
+    for (String logIndex : logIndexList) {
       SearchResponse scrollResp = 
es.getClient().prepareSearch(logIndex).setTypes(props.getProperty(MudrodConstants.SESSION_STATS_PREFIX)).setScroll(new
 TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery())
               .setSize(100).execute().actionGet();
       while (true) {
@@ -417,11 +414,9 @@ public Boolean call(Tuple2<String, Double> arg0) throws 
Exception {
 
         String items = splits[1];
         String[] itemArr = items.split(",");
-        int size = itemArr.length;
-        for (int i = 0; i < size; i++) {
-          String item = itemArr[i];
+        for (String item : itemArr) {
           if (!itemList.contains(item))
-            itemList.add(itemArr[i]);
+            itemList.add(item);
         }
 
         return new Tuple2<>(sessionId, itemList);
@@ -463,15 +458,13 @@ public Boolean call(Tuple2<String, Double> arg0) throws 
Exception {
     LOG.info(logIndexList.toString());
 
     List<RankingTrainData> result = new ArrayList<>();
-    for (int n = 0; n < logIndexList.size(); n++) {
-      String logIndex = logIndexList.get(n);
+    for (String logIndex : logIndexList) {
       List<String> sessionIdList;
       try {
         sessionIdList = this.getSessions(props, es, logIndex);
         Session session = new Session(props, es);
-        int sessionNum = sessionIdList.size();
-        for (int i = 0; i < sessionNum; i++) {
-          String[] sArr = sessionIdList.get(i).split(",");
+        for (String aSessionIdList : sessionIdList) {
+          String[] sArr = aSessionIdList.split(",");
           List<RankingTrainData> datas = session.getRankingTrainData(sArr[1], 
sArr[2], sArr[0]);
           result.addAll(datas);
         }
@@ -490,15 +483,15 @@ public Boolean call(Tuple2<String, Double> arg0) throws 
Exception {
     LOG.info(logIndexList.toString());
 
     List<String> sessionIdList = new ArrayList<>();
-    for (int n = 0; n < logIndexList.size(); n++) {
-      String logIndex = logIndexList.get(n);
+    for (String logIndex : logIndexList) {
       List<String> tmpsessionList = this.getSessions(props, es, logIndex);
       sessionIdList.addAll(tmpsessionList);
     }
 
     JavaRDD<String> sessionRDD = spark.sc.parallelize(sessionIdList, 16);
 
-    JavaRDD<RankingTrainData> clickStreamRDD = sessionRDD.mapPartitions(new 
FlatMapFunction<Iterator<String>, RankingTrainData>() {
+    JavaRDD<RankingTrainData> clickStreamRDD = sessionRDD.mapPartitions(
+            new FlatMapFunction<Iterator<String>, RankingTrainData>() {
       /**
        *
        */
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java
index db0d372..ac547dc 100644
--- 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java
@@ -23,7 +23,11 @@
 import org.slf4j.LoggerFactory;
 
 import java.io.UnsupportedEncodingException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
 import java.util.concurrent.ExecutionException;
 
 /**
@@ -93,14 +97,16 @@ public SessionTree(Properties props, ESDriver es, String 
sessionID, String clean
    */
   public SessionNode insert(SessionNode node) {
     // begin with datasetlist
-    if (node.getKey().equals("datasetlist")) {
+    if ("datasetlist".equals(node.getKey())) {
       this.binsert = true;
     }
     if (!this.binsert) {
       return null;
     }
     // remove unrelated node
-    if (!node.getKey().equals("datasetlist") && 
!node.getKey().equals("dataset") && !node.getKey().equals("ftp")) {
+    if (!"datasetlist".equals(node.getKey()) &&
+            !"dataset".equals(node.getKey()) &&
+            !"ftp".equals(node.getKey())) {
       return null;
     }
     // remove dumplicated click
@@ -186,9 +192,7 @@ public JsonObject treeToJson(SessionNode node) {
 
     List<ClickStream> clickthroughs = new ArrayList<>();
     List<SessionNode> viewnodes = this.getViewNodes(this.root);
-    for (int i = 0; i < viewnodes.size(); i++) {
-
-      SessionNode viewnode = viewnodes.get(i);
+    for (SessionNode viewnode : viewnodes) {
       SessionNode parent = viewnode.getParent();
       List<SessionNode> children = viewnode.getChildren();
 
@@ -207,8 +211,7 @@ public JsonObject treeToJson(SessionNode node) {
 
       String dataset = viewnode.getDatasetId();
       boolean download = false;
-      for (int j = 0; j < children.size(); j++) {
-        SessionNode child = children.get(j);
+      for (SessionNode child : children) {
         if ("ftp".equals(child.getKey())) {
           download = true;
           break;
@@ -218,8 +221,8 @@ public JsonObject treeToJson(SessionNode node) {
       if (viewquery != null && !"".equals(viewquery)) {
         String[] queries = viewquery.trim().split(",");
         if (queries.length > 0) {
-          for (int k = 0; k < queries.length; k++) {
-            ClickStream data = new ClickStream(queries[k], dataset, download);
+          for (String query : queries) {
+            ClickStream data = new ClickStream(query, dataset, download);
             data.setSessionId(this.sessionID);
             data.setType(this.cleanupType);
             clickthroughs.add(data);
@@ -326,8 +329,8 @@ private SessionNode iterChild(SessionNode start, String 
refer) {
    * @return
    */
   private boolean check(List<SessionNode> children, String str) {
-    for (int i = 0; i < children.size(); i++) {
-      if (children.get(i).key.equals(str)) {
+    for (SessionNode aChildren : children) {
+      if (aChildren.key.equals(str)) {
         return true;
       }
     }
@@ -342,8 +345,8 @@ private boolean check(List<SessionNode> children, String 
str) {
    * @return
    */
   private boolean insertHelperChildren(SessionNode entry, List<SessionNode> 
children) {
-    for (int i = 0; i < children.size(); i++) {
-      boolean result = insertHelper(entry, children.get(i));
+    for (SessionNode aChildren : children) {
+      boolean result = insertHelper(entry, aChildren);
       if (result) {
         return result;
       }
@@ -447,30 +450,26 @@ private boolean insertHelper(SessionNode entry, 
SessionNode node) {
    * Obtain the ranking training data.
    *
    * @param indexName   the index from whcih to obtain the data
-   * @param sessionID   a valid session identifier
    * @return {@link ClickStream}
    * @throws UnsupportedEncodingException if there is an error whilst
    *                                      processing the ranking training data.
    */
-  public List<RankingTrainData> getRankingTrainData(String indexName, String 
sessionID) throws UnsupportedEncodingException {
+  public List<RankingTrainData> getRankingTrainData(String indexName) throws 
UnsupportedEncodingException {
 
     List<RankingTrainData> trainDatas = new ArrayList<>();
 
     List<SessionNode> queryNodes = this.getQueryNodes(this.root);
-    for (int i = 0; i < queryNodes.size(); i++) {
-      SessionNode querynode = queryNodes.get(i);
+    for (SessionNode querynode : queryNodes) {
       List<SessionNode> children = querynode.getChildren();
 
       LinkedHashMap<String, Boolean> datasetOpt = new LinkedHashMap<>();
       int ndownload = 0;
-      for (int j = 0; j < children.size(); j++) {
-        SessionNode node = children.get(j);
+      for (SessionNode node : children) {
         if ("dataset".equals(node.getKey())) {
           Boolean bDownload = false;
           List<SessionNode> nodeChildren = node.getChildren();
-          int childSize = nodeChildren.size();
-          for (int k = 0; k < childSize; k++) {
-            if ("ftp".equals(nodeChildren.get(k).getKey())) {
+          for (SessionNode aNodeChildren : nodeChildren) {
+            if ("ftp".equals(aNodeChildren.getKey())) {
               bDownload = true;
               ndownload += 1;
               break;
@@ -502,9 +501,8 @@ private boolean insertHelper(SessionNode entry, SessionNode 
node) {
               if (!bDownloadB) {
 
                 String[] queries = query.split(",");
-                for (int l = 0; l < queries.length; l++) {
-                  RankingTrainData trainData = new 
RankingTrainData(queries[l], datasetA, datasetB);
-
+                for (String query1 : queries) {
+                  RankingTrainData trainData = new RankingTrainData(query1, 
datasetA, datasetB);
                   trainData.setSessionId(this.sessionID);
                   trainData.setIndex(indexName);
                   trainData.setFilter(filter);
diff --git 
a/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/HybridRecomDatasetsResource.java
 
b/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/HybridRecomDatasetsResource.java
index efdf0a9..935c7ac 100644
--- 
a/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/HybridRecomDatasetsResource.java
+++ 
b/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/HybridRecomDatasetsResource.java
@@ -16,7 +16,11 @@
 import com.google.gson.JsonObject;
 
 import javax.servlet.ServletContext;
-import javax.ws.rs.*;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
diff --git 
a/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/RecomDatasetsResource.java
 
b/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/RecomDatasetsResource.java
index d1628c5..072ba5e 100644
--- 
a/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/RecomDatasetsResource.java
+++ 
b/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/RecomDatasetsResource.java
@@ -16,7 +16,12 @@
 import com.google.gson.JsonObject;
 
 import javax.servlet.ServletContext;
-import javax.ws.rs.*;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
@@ -24,8 +29,6 @@
 import org.apache.sdap.mudrod.main.MudrodEngine;
 import org.apache.sdap.mudrod.recommendation.structure.RecomData;
 
-;
-
 /**
  * A Dataset recommendation resource.
  */
diff --git a/web/META-INF/resources/css/index.css 
b/web/META-INF/resources/css/index.css
index 8512cc0..928839e 100644
--- a/web/META-INF/resources/css/index.css
+++ b/web/META-INF/resources/css/index.css
@@ -56,22 +56,14 @@ a:link, a:visited {
     width: 100%;
     padding-top: 1em;
     background-size: cover;
-    background-repeat: no-repeat;
-    background-position: 0 0;
-    background-image: url('../images/OceanWave.jpg');
-    background-color: transparent;
-    background-attachment: fixed;
+    background: transparent url('../images/OceanWave.jpg') no-repeat fixed 0 0;
 }
 
 .view-area-scroll {
     width: 100%;
     height: 800px;
     background-size: cover;
-    background-repeat: no-repeat;
-    background-position: 0 0;
-    background-image: url('../images/OceanWave.jpg');
-    background-color: transparent;
-    background-attachment: scroll;
+    background: transparent url('../images/OceanWave.jpg') no-repeat scroll 0 
0;
 }
 
 .landing-dialog {


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


> Review code contribition from Sigee 
> ------------------------------------
>
>                 Key: SDAP-18
>                 URL: https://issues.apache.org/jira/browse/SDAP-18
>             Project: Apache Science Data Analytics Platform
>          Issue Type: New Feature
>            Reporter: Lewis John McGibbney
>            Priority: Major
>
> Until right now I did not see the contribution at 
> [https://github.com/aist-oceanworks/mudrod/pull/214]
> We should review this and merge into master mudrod branch



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to