AMBARI-18227. Add unit tests for Log Search components and refactor them as 
needed - Vol 1. (Miklos Gergely via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ffcf5328
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ffcf5328
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ffcf5328

Branch: refs/heads/branch-dev-logsearch
Commit: ffcf5328eb177f76dfe3b78d6d236453b6651eec
Parents: a565ead
Author: Miklos Gergely <[email protected]>
Authored: Mon Aug 22 18:32:39 2016 +0200
Committer: oleewere <[email protected]>
Committed: Mon Aug 22 18:32:39 2016 +0200

----------------------------------------------------------------------
 .../ambari-logsearch-portal/pom.xml             |  16 +
 .../org/apache/ambari/logsearch/LogSearch.java  |   9 +-
 .../logsearch/common/LogSearchConstants.java    |  11 -
 .../logsearch/common/LogSearchContext.java      |  62 +++
 .../logsearch/common/LogsearchContextUtil.java  |  60 ---
 .../logsearch/common/ManageStartEndTime.java    |  60 +--
 .../ambari/logsearch/common/MessageEnums.java   |  43 +-
 .../ambari/logsearch/common/RequestContext.java | 143 ------
 .../ambari/logsearch/common/SearchCriteria.java |  76 +--
 .../logsearch/common/UserSessionInfo.java       |  46 --
 .../ambari/logsearch/dao/AuditSolrDao.java      |  53 +--
 .../logsearch/dao/ServiceLogsSolrDao.java       |  31 +-
 .../ambari/logsearch/dao/SolrDaoBase.java       | 462 +++++++------------
 .../ambari/logsearch/dao/UserConfigSolrDao.java |  80 ++--
 .../apache/ambari/logsearch/dao/UserDao.java    | 137 ++----
 .../logsearch/dao/UserInfoAttributes.java       |  28 --
 .../logsearch/graph/GraphDataGenerator.java     | 167 +++----
 .../logsearch/graph/GraphDataGeneratorBase.java | 203 ++------
 .../ambari/logsearch/manager/AuditMgr.java      | 323 ++++---------
 .../ambari/logsearch/manager/LogFileMgr.java    |  71 +--
 .../ambari/logsearch/manager/LogsMgr.java       | 148 +++---
 .../ambari/logsearch/manager/MgrBase.java       |  95 ++--
 .../ambari/logsearch/manager/PublicMgr.java     |   2 +-
 .../ambari/logsearch/manager/SessionMgr.java    |  38 +-
 .../ambari/logsearch/manager/UserConfigMgr.java | 139 ++----
 .../ambari/logsearch/query/QueryGeneration.java | 274 ++++-------
 .../logsearch/query/QueryGenerationBase.java    | 292 +++---------
 .../ambari/logsearch/rest/ServiceLogsREST.java  |  10 +-
 .../context/LogsearchContextHolder.java         |  41 --
 .../context/LogsearchSecurityContext.java       |  55 ---
 .../apache/ambari/logsearch/util/BizUtil.java   |  94 ++--
 .../ambari/logsearch/util/CommonUtil.java       |  49 +-
 .../ambari/logsearch/util/ConfigUtil.java       |  40 +-
 .../apache/ambari/logsearch/util/DateUtil.java  | 141 +-----
 .../apache/ambari/logsearch/util/FileUtil.java  |  56 +--
 .../apache/ambari/logsearch/util/JSONUtil.java  | 136 +-----
 .../util/LogsearchPropertiesConfiguration.java  |  89 ----
 .../ambari/logsearch/util/PropertiesUtil.java   |  16 +-
 .../apache/ambari/logsearch/util/QueryBase.java |  16 +-
 .../ambari/logsearch/util/RESTErrorUtil.java    |   2 +-
 .../apache/ambari/logsearch/util/SolrUtil.java  |  75 +--
 .../ambari/logsearch/util/StringUtil.java       |  33 --
 ...LogsearchSecurityContextFormationFilter.java |  28 +-
 ...rchExternalServerAuthenticationProvider.java |   9 +-
 .../LogsearchFileAuthenticationProvider.java    |  16 +-
 .../LogsearchSimpleAuthenticationProvider.java  |   8 +-
 .../common/LogSearchContextUtilTest.java        |  51 ++
 .../common/ManageStartEndTimeTest.java          |  35 ++
 .../ambari/logsearch/dao/AuditSolrDaoTest.java  |  68 +++
 .../logsearch/dao/ServiceLogsSolrDaoTest.java   |  66 +++
 .../ambari/logsearch/dao/SolrDaoBaseTest.java   | 286 ++++++++++++
 .../logsearch/dao/UserConfigSolrDaoTest.java    | 129 ++++++
 .../ambari/logsearch/dao/UserDaoTest.java       |  58 +++
 .../src/test/resources/HadoopServiceConfig.json |  17 +
 .../src/test/resources/applicationContext.xml   |  53 +++
 .../applicationContext_testManagers.xml         |  53 +++
 .../src/test/resources/logsearch.properties     |  32 ++
 .../src/test/resources/user_pass.json           |   8 +
 58 files changed, 1967 insertions(+), 2872 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/pom.xml 
b/ambari-logsearch/ambari-logsearch-portal/pom.xml
index a886b37..8d345d9 100755
--- a/ambari-logsearch/ambari-logsearch-portal/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/pom.xml
@@ -466,6 +466,17 @@
     </profile>
   </profiles>
   <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <version>3.4</version>
+      <scope>test</scope>
+    </dependency>
     <!-- Spring dependencies -->
     <dependency>
       <groupId>org.springframework</groupId>
@@ -477,6 +488,11 @@
       <artifactId>spring-context</artifactId>
       <version>${spring.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-test</artifactId>
+      <version>${spring.version}</version>
+    </dependency>
     <!-- Spring Security -->
     <dependency>
       <groupId>org.springframework.security</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index d1fbcde..fcebcea 100644
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -24,7 +24,6 @@ import java.net.ServerSocket;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
-import java.util.Timer;
 
 import org.apache.ambari.logsearch.common.ManageStartEndTime;
 import org.apache.ambari.logsearch.solr.metrics.SolrMetricsLoader;
@@ -46,7 +45,6 @@ import org.eclipse.jetty.util.resource.Resource;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.webapp.WebAppContext;
 
-
 public class LogSearch {
   private static final Logger logger = Logger.getLogger(LogSearch.class);
 
@@ -71,8 +69,7 @@ public class LogSearch {
  
   public static void main(String[] argv) {
     LogSearch logSearch = new LogSearch();
-    Timer timer = new Timer();
-    timer.schedule(new ManageStartEndTime(), 0, 40000);
+    ManageStartEndTime.manage();
     try {
       logSearch.run(argv);
     } catch (Throwable e) {
@@ -149,7 +146,7 @@ public class LogSearch {
   }
 
   private WebAppContext createBaseWebappContext() throws MalformedURLException 
{
-    URI webResourceBase = 
findWebResourceBase(LogSearch.class.getClassLoader());
+    URI webResourceBase = findWebResourceBase();
     WebAppContext context = new WebAppContext();
     context.setBaseResource(Resource.newResource(webResourceBase));
     context.setContextPath(ROOT_CONTEXT);
@@ -168,7 +165,7 @@ public class LogSearch {
     return context;
   }
 
-  private URI findWebResourceBase(ClassLoader classLoader) {
+  private URI findWebResourceBase() {
     URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
         .getResource(WEB_RESOURCE_FOLDER);
     if (fileCompleteUrl != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
index 5dcdac1..142b29b 100644
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
+++ 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
@@ -33,7 +33,6 @@ public class LogSearchConstants {
 
   // Application Constants
   public static final String HOST = "H";
-  public static final String SERVICE = "S";
   public static final String COMPONENT = "C";
   public static final String SCROLL_TYPE_AFTER = "after";
   public static final String SCROLL_TYPE_BEFORE = "before";
@@ -44,8 +43,6 @@ public class LogSearchConstants {
   public static final String VALUES = "jsons";
   public static final String FILTER_NAME = "filtername";
   public static final String ROW_TYPE = "rowtype";
-  public static final String USER_CONFIG_DASHBOARD = "dashboard";
-  public static final String USER_CONFIG_HISTORY = "history";
   public static final String COMPOSITE_KEY = "composite_filtername-username";
   public static final String SHARE_NAME_LIST = "share_username_list";
 
@@ -73,17 +70,11 @@ public class LogSearchConstants {
   public static final String MINUS_OPERATOR = "-";
   public static final String NO_OPERATOR = "";
 
-
   //operation
   public static final String EXCLUDE_QUERY = "excludeQuery";
   public static final String INCLUDE_QUERY = "includeQuery";
   public static final String COLUMN_QUERY = "columnQuery";
 
-  //URL PARAMS
-  public static final String GLOBAL_START_TIME = "globalStartTime";
-  public static final String GLOBAL_END_TIME = "globalEndTime";
-
-
   // Seprator's
   public static final String I_E_SEPRATOR = "\\|i\\:\\:e\\|";
 
@@ -128,7 +119,5 @@ public class LogSearchConstants {
   public static final String FACET_GROUP = "group";
   public static final String FACET_GROUP_MAIN = "group.main";
   public static final String FACET_GROUP_FIELD = "group.field"; 
-  public static final String FACET_LIMIT = "facet.limit";
   
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java
new file mode 100644
index 0000000..b4b52b3
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.common;
+
+import java.io.Serializable;
+
+import org.apache.ambari.logsearch.web.model.User;
+
+public class LogSearchContext implements Serializable {
+  
+  private static final long serialVersionUID = 1L;
+  
+  private User user;
+
+  public User getUser() {
+    return user;
+  }
+
+  public void setUser(User user) {
+    this.user = user;
+  }
+
+  
//------------------------------------------------------------------------------------------------------
+  
+  private static final ThreadLocal<LogSearchContext> contextThreadLocal = new 
ThreadLocal<LogSearchContext>();
+
+  public static LogSearchContext getContext() {
+    return contextThreadLocal.get();
+  }
+
+  public static void setContext(LogSearchContext context) {
+    contextThreadLocal.set(context);
+  }
+
+  public static void resetContext() {
+    contextThreadLocal.remove();
+  }
+
+  public static String getCurrentUsername() {
+    LogSearchContext context = LogSearchContext.getContext();
+    if (context != null && context.getUser() != null) {
+        return context.getUser().getUsername();
+    }
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogsearchContextUtil.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogsearchContextUtil.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogsearchContextUtil.java
deleted file mode 100644
index 617f2fd..0000000
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogsearchContextUtil.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.common;
-
-import org.apache.ambari.logsearch.security.context.LogsearchContextHolder;
-import org.apache.ambari.logsearch.security.context.LogsearchSecurityContext;
-
-public class LogsearchContextUtil {
-
-  /**
-   * Singleton class
-   */
-  private LogsearchContextUtil() {
-  }
-
-  public static String getCurrentUsername() {
-    LogsearchSecurityContext context = 
LogsearchContextHolder.getSecurityContext();
-    if (context != null) {
-      UserSessionInfo userSession = context.getUserSession();
-      if (userSession != null) {
-        return userSession.getUsername();
-      }
-    }
-    return null;
-  }
-
-  public static UserSessionInfo getCurrentUserSession() {
-    UserSessionInfo userSession = null;
-    LogsearchSecurityContext context = 
LogsearchContextHolder.getSecurityContext();
-    if (context != null) {
-      userSession = context.getUserSession();
-    }
-    return userSession;
-  }
-
-  public static RequestContext getCurrentRequestContext() {
-    LogsearchSecurityContext context = 
LogsearchContextHolder.getSecurityContext();
-    if (context != null) {
-      return context.getRequestContext();
-    }
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
index 94b7159..b6aa2d0 100644
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
+++ 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
@@ -20,54 +20,36 @@
 package org.apache.ambari.logsearch.common;
 
 import java.util.Date;
-import java.util.GregorianCalendar;
+import java.util.Timer;
 import java.util.TimerTask;
 
-import org.apache.log4j.Logger;
+import org.apache.commons.lang.time.DateUtils;
 
 public class ManageStartEndTime extends TimerTask {
-  static Logger logger = Logger.getLogger(ManageStartEndTime.class);
-
-  public static Date startDate = new Date();
-
-  public static Date endDate = new Date();
-
-  public ManageStartEndTime() {
-    intailizeStartEndTime();
+  private static final int UPDATE_TIME_IN_SECONDS = 40;
+
+  private static Date startDate;
+  private static Date endDate;
+  
+  public static void manage() {
+    Timer timer = new Timer();
+    timer.schedule(new ManageStartEndTime(), 0, UPDATE_TIME_IN_SECONDS * 1000);
+  }
+  
+  private ManageStartEndTime() {
+    endDate = new Date();
+    startDate = DateUtils.addHours(endDate, -1);
   }
 
   @Override
-  public void run() {
-    if (startDate == null){
-      intailizeStartEndTime();
-    }else{
-      adjustStartEndTime();
+  public synchronized void run() {
+    synchronized (ManageStartEndTime.class) {
+      startDate = DateUtils.addSeconds(startDate, UPDATE_TIME_IN_SECONDS);
+      endDate = DateUtils.addHours(startDate, 1);
     }
   }
 
-  private void adjustStartEndTime() {
-    startDate = addSecondsToDate(startDate, 40);
-    endDate = addHoursToDate(startDate, 1);
-  }
-
-  private Date addSecondsToDate(Date date, int i) {
-    GregorianCalendar greorianCalendar = new GregorianCalendar();
-    greorianCalendar.setTime(date);
-    greorianCalendar.add(GregorianCalendar.SECOND, i);
-    return greorianCalendar.getTime();
+  public static synchronized Date[] getStartEndTime() {
+    return new Date[] {startDate, endDate};
   }
-
-  private Date addHoursToDate(Date date, int i) {
-    GregorianCalendar greorianCalendar = new GregorianCalendar();
-    greorianCalendar.setTime(date);
-    greorianCalendar.add(GregorianCalendar.HOUR_OF_DAY, i);
-    return greorianCalendar.getTime();
-  }
-
-  private void intailizeStartEndTime() {
-
-    endDate = new Date();
-    startDate = addHoursToDate(endDate, -1);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
index 59e698f..786cf99 100644
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
+++ 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
@@ -23,33 +23,30 @@ import org.apache.ambari.logsearch.view.VMessage;
 public enum MessageEnums {
 
   // Common Errors
-  DATA_NOT_FOUND("logsearch.error.data_not_found", "Data not found"), 
OPER_NOT_ALLOWED_FOR_STATE(
-    "logsearch.error.oper_not_allowed_for_state",
-    "Operation not allowed in current state"), OPER_NOT_ALLOWED_FOR_ENTITY(
-    "logsearch.error.oper_not_allowed_for_state",
-    "Operation not allowed for entity"), OPER_NO_PERMISSION(
-    "logsearch.error.oper_no_permission",
-    "User doesn't have permission to perform this operation"), 
DATA_NOT_UPDATABLE(
-    "logsearch.error.data_not_updatable", "Data not updatable"), 
ERROR_CREATING_OBJECT(
-    "logsearch.error.create_object", "Error creating object"), 
ERROR_DUPLICATE_OBJECT(
-    "logsearch.error.duplicate_object", "Error creating duplicate object"), 
ERROR_SYSTEM(
-    "logsearch.error.system", "System Error. Please try later."),
-    SOLR_ERROR("logsearch.solr.error","Something went wrong, For more details 
check the logs or configuration."),
+  DATA_NOT_FOUND("logsearch.error.data_not_found", "Data not found"),
+  OPER_NOT_ALLOWED_FOR_STATE("logsearch.error.oper_not_allowed_for_state", 
"Operation not allowed in current state"),
+  OPER_NOT_ALLOWED_FOR_ENTITY("logsearch.error.oper_not_allowed_for_state", 
"Operation not allowed for entity"),
+  OPER_NO_PERMISSION("logsearch.error.oper_no_permission", "User doesn't have 
permission to perform this operation"),
+  DATA_NOT_UPDATABLE("logsearch.error.data_not_updatable", "Data not 
updatable"),
+  ERROR_CREATING_OBJECT("logsearch.error.create_object", "Error creating 
object"),
+  ERROR_DUPLICATE_OBJECT("logsearch.error.duplicate_object", "Error creating 
duplicate object"),
+  ERROR_SYSTEM("logsearch.error.system", "System Error. Please try later."),
+  SOLR_ERROR("logsearch.solr.error","Something went wrong, For more details 
check the logs or configuration."),
 
   // Common Validations
-  INVALID_PASSWORD("logsearch.validation.invalid_password", "Invalid 
password"), INVALID_INPUT_DATA(
-    "logsearch.validation.invalid_input_data", "Invalid input data"), 
NO_INPUT_DATA(
-    "logsearch.validation.no_input_data", "Input data is not provided"), 
INPUT_DATA_OUT_OF_BOUND(
-    "logsearch.validation.data_out_of_bound", "Input data if out of bound"), 
NO_NAME(
-    "logsearch.validation.no_name", "Name is not provided"), 
NO_OR_INVALID_COUNTRY_ID(
-    "logsearch.validation.no_country_id", "Valid Country Id was not 
provided"), NO_OR_INVALID_CITY_ID(
-    "logsearch.validation.no_city_id", "Valid City Id was not provided"), 
NO_OR_INVALID_STATE_ID(
-    "logsearch.validation.no_state_id", "Valid State Id was not provided");
+  INVALID_PASSWORD("logsearch.validation.invalid_password", "Invalid 
password"),
+  INVALID_INPUT_DATA("logsearch.validation.invalid_input_data", "Invalid input 
data"),
+  NO_INPUT_DATA("logsearch.validation.no_input_data", "Input data is not 
provided"),
+  INPUT_DATA_OUT_OF_BOUND("logsearch.validation.data_out_of_bound", "Input 
data if out of bound"),
+  NO_NAME("logsearch.validation.no_name", "Name is not provided"),
+  NO_OR_INVALID_COUNTRY_ID("logsearch.validation.no_country_id", "Valid 
Country Id was not provided"),
+  NO_OR_INVALID_CITY_ID("logsearch.validation.no_city_id", "Valid City Id was 
not provided"),
+  NO_OR_INVALID_STATE_ID("logsearch.validation.no_state_id", "Valid State Id 
was not provided");
 
-  String rbKey;
-  String messageDesc;
+  private String rbKey;
+  private String messageDesc;
 
-  MessageEnums(String rbKey, String messageDesc) {
+  private MessageEnums(String rbKey, String messageDesc) {
     this.rbKey = rbKey;
     this.messageDesc = messageDesc;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/RequestContext.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/RequestContext.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/RequestContext.java
deleted file mode 100644
index 83ed3bd..0000000
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/RequestContext.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.common;
-
-import java.io.Serializable;
-
-public class RequestContext implements Serializable {
-  private static final long serialVersionUID = -7083383106845193385L;
-  String ipAddress = null;
-  String msaCookie = null;
-  String userAgent = null;
-  String requestURL = null;
-  String serverRequestId = null;
-  String clientTimeOffset;
-  boolean isSync = true;
-  long startTime = System.currentTimeMillis();
-
-  /**
-   * @return the ipAddress
-   */
-  public String getIpAddress() {
-    return ipAddress;
-  }
-
-  /**
-   * @param ipAddress the ipAddress to set
-   */
-  public void setIpAddress(String ipAddress) {
-    this.ipAddress = ipAddress;
-  }
-
-  /**
-   * @return the msaCookie
-   */
-  public String getMsaCookie() {
-    return msaCookie;
-  }
-
-  /**
-   * @param msaCookie the msaCookie to set
-   */
-  public void setMsaCookie(String msaCookie) {
-    this.msaCookie = msaCookie;
-  }
-
-  /**
-   * @return the userAgent
-   */
-  public String getUserAgent() {
-    return userAgent;
-  }
-
-  /**
-   * @param userAgent the userAgent to set
-   */
-  public void setUserAgent(String userAgent) {
-    this.userAgent = userAgent;
-  }
-
-  /**
-   * @return the serverRequestId
-   */
-  public String getServerRequestId() {
-    return serverRequestId;
-  }
-
-  /**
-   * @param serverRequestId the serverRequestId to set
-   */
-  public void setServerRequestId(String serverRequestId) {
-    this.serverRequestId = serverRequestId;
-  }
-
-  /**
-   * @return the isSync
-   */
-  public boolean isSync() {
-    return isSync;
-  }
-
-  /**
-   * @param isSync the isSync to set
-   */
-  public void setSync(boolean isSync) {
-    this.isSync = isSync;
-  }
-
-  /**
-   * @return the requestURL
-   */
-  public String getRequestURL() {
-    return requestURL;
-  }
-
-  /**
-   * @param requestURL the requestURL to set
-   */
-  public void setRequestURL(String requestURL) {
-    this.requestURL = requestURL;
-  }
-
-  /**
-   * @return the startTime
-   */
-  public long getStartTime() {
-    return startTime;
-  }
-
-  /**
-   * @param startTime the startTime to set
-   */
-  public void setStartTime(long startTime) {
-    this.startTime = startTime;
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see java.lang.Object#toString()
-   */
-  @Override
-  public String toString() {
-    return "RequestContext [ipAddress=" + ipAddress + ", msaCookie=" + 
msaCookie + ", userAgent=" + userAgent
-      + ", requestURL=" + requestURL + ",serverRequestId=" + serverRequestId + 
", isSync=" + isSync
-      + ", startTime=" + startTime + "]";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
index d0facbc..2cf2139 100644
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
+++ 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
@@ -49,44 +49,35 @@ public class SearchCriteria {
 
   public SearchCriteria(HttpServletRequest request) {
     try {
-      if (request.getParameter("startIndex") != null
-        && (!request.getParameter("startIndex").isEmpty())) {
-        this.startIndex = new Integer(
-          request.getParameter("startIndex"));
+      if (request.getParameter("startIndex") != null && 
(!request.getParameter("startIndex").isEmpty())) {
+        this.startIndex = new Integer(request.getParameter("startIndex"));
       }
-      if (request.getParameter("page") != null
-        && (!request.getParameter("page").isEmpty())) {
+      if (request.getParameter("page") != null && 
(!request.getParameter("page").isEmpty())) {
         this.page = new Integer(request.getParameter("page"));
       }
-      if (request.getParameter("pageSize") != null
-        && (!request.getParameter("pageSize").isEmpty())) {
+      if (request.getParameter("pageSize") != null && 
(!request.getParameter("pageSize").isEmpty())) {
         this.maxRows = new Integer(request.getParameter("pageSize"));
       } else {
-        this.maxRows = PropertiesUtil.getIntProperty("db.maxResults",
-          50);
+        this.maxRows = PropertiesUtil.getIntProperty("db.maxResults", 50);
       }
     } catch (NumberFormatException e) {
       // do nothing
     }
 
     // Sort fields
-    if (request.getParameter("sortBy") != null
-      && (!request.getParameter("sortBy").isEmpty())) {
+    if (request.getParameter("sortBy") != null && 
(!request.getParameter("sortBy").isEmpty())) {
       this.sortBy = "" + request.getParameter("sortBy");
     }
-    if (request.getParameter("sortType") != null
-      && (!request.getParameter("sortType").isEmpty())) {
+    if (request.getParameter("sortType") != null && 
(!request.getParameter("sortType").isEmpty())) {
       this.sortType = "" + request.getParameter("sortType");
     }
 
     // url params
-    if (request.getParameter("start_time") != null
-      && (!request.getParameter("start_time").isEmpty())) {
+    if (request.getParameter("start_time") != null && 
(!request.getParameter("start_time").isEmpty())) {
       this.globalStartTime = "" + request.getParameter("start_time");
       this.urlParamMap.put("globalStartTime", 
request.getParameter("start_time"));
     }
-    if (request.getParameter("end_time") != null
-      && (!request.getParameter("end_time").isEmpty())) {
+    if (request.getParameter("end_time") != null && 
(!request.getParameter("end_time").isEmpty())) {
       this.globalEndTime = "" + request.getParameter("end_time");
       this.urlParamMap.put("globalEndTime", request.getParameter("end_time"));
     }
@@ -164,31 +155,24 @@ public class SearchCriteria {
   public void addRequiredServiceLogsParams(HttpServletRequest request) {
     this.addParam("advanceSearch", 
StringEscapeUtils.unescapeXml(request.getParameter("advanceSearch")));
     this.addParam("q", request.getParameter("q"));
-    this.addParam("treeParams", StringEscapeUtils
-      .unescapeHtml(request.getParameter("treeParams")));
+    this.addParam("treeParams", 
StringEscapeUtils.unescapeHtml(request.getParameter("treeParams")));
     this.addParam("level", request.getParameter("level"));
     this.addParam("gMustNot", request.getParameter("gMustNot"));
     this.addParam("from", request.getParameter("from"));
     this.addParam("to", request.getParameter("to"));
     this.addParam("selectComp", request.getParameter("mustBe"));
     this.addParam("unselectComp", request.getParameter("mustNot"));
-    this.addParam("iMessage", StringEscapeUtils.unescapeXml(request
-      .getParameter("iMessage")));
-    this.addParam("gEMessage", StringEscapeUtils
-      .unescapeXml(request.getParameter("gEMessage")));
-    this
-      .addParam("eMessage", StringEscapeUtils.unescapeXml(request
-        .getParameter("eMessage")));
+    this.addParam("iMessage", 
StringEscapeUtils.unescapeXml(request.getParameter("iMessage")));
+    this.addParam("gEMessage", 
StringEscapeUtils.unescapeXml(request.getParameter("gEMessage")));
+    this.addParam("eMessage", 
StringEscapeUtils.unescapeXml(request.getParameter("eMessage")));
     this.addParam(LogSearchConstants.BUNDLE_ID, 
request.getParameter(LogSearchConstants.BUNDLE_ID));
     this.addParam("host_name", request.getParameter("host_name"));
     this.addParam("component_name", request.getParameter("component_name"));
     this.addParam("file_name", request.getParameter("file_name"));
     this.addParam("startDate", request.getParameter("start_time"));
     this.addParam("endDate", request.getParameter("end_time"));
-    this.addParam("excludeQuery", StringEscapeUtils.unescapeXml(
-      request.getParameter("excludeQuery")));
-    this.addParam("includeQuery", StringEscapeUtils.unescapeXml(
-      request.getParameter("includeQuery")));
+    this.addParam("excludeQuery", 
StringEscapeUtils.unescapeXml(request.getParameter("excludeQuery")));
+    this.addParam("includeQuery", 
StringEscapeUtils.unescapeXml(request.getParameter("includeQuery")));
   }
 
   /**
@@ -196,20 +180,14 @@ public class SearchCriteria {
    */
   public void addRequiredAuditLogsParams(HttpServletRequest request) {
     this.addParam("q", request.getParameter("q"));
-    this.addParam("columnQuery", StringEscapeUtils
-      .unescapeXml(request.getParameter("columnQuery")));
-    this.addParam("iMessage", StringEscapeUtils.unescapeXml(request
-      .getParameter("iMessage")));
-    this.addParam("gEMessage", StringEscapeUtils
-      .unescapeXml(request.getParameter("gEMessage")));
-    this.addParam("eMessage", StringEscapeUtils.unescapeXml(request
-      .getParameter("eMessage")));
+    this.addParam("columnQuery", 
StringEscapeUtils.unescapeXml(request.getParameter("columnQuery")));
+    this.addParam("iMessage", 
StringEscapeUtils.unescapeXml(request.getParameter("iMessage")));
+    this.addParam("gEMessage", 
StringEscapeUtils.unescapeXml(request.getParameter("gEMessage")));
+    this.addParam("eMessage", 
StringEscapeUtils.unescapeXml(request.getParameter("eMessage")));
     this.addParam("includeString", request.getParameter("mustBe"));
     this.addParam("unselectComp", request.getParameter("mustNot"));
-    this.addParam("excludeQuery", StringEscapeUtils.unescapeXml(
-      request.getParameter("excludeQuery")));
-    this.addParam("includeQuery", StringEscapeUtils.unescapeXml(
-      request.getParameter("includeQuery")));
+    this.addParam("excludeQuery", 
StringEscapeUtils.unescapeXml(request.getParameter("excludeQuery")));
+    this.addParam("includeQuery", 
StringEscapeUtils.unescapeXml(request.getParameter("includeQuery")));
     this.addParam("startTime", request.getParameter("from"));
     this.addParam("endTime", request.getParameter("to"));
   }
@@ -220,9 +198,9 @@ public class SearchCriteria {
    */
   public void addParam(String name, Object value) {
     String solrValue = PropertiesUtil.getProperty(name);
-    if (solrValue == null || solrValue.isEmpty()){
+    if (solrValue == null || solrValue.isEmpty()) {
       paramList.put(name, value);
-    }else {
+    } else {
       try {
         String propertyFieldMappings[] = solrValue.split(",");
         HashMap<String, String> propertyFieldValue = new HashMap<String, 
String>();
@@ -253,14 +231,6 @@ public class SearchCriteria {
   }
 
   /**
-   * @param string
-   * @param caId
-   */
-  public Object removeParam(String name) {
-    return paramList.remove(name);
-  }
-
-  /**
    * @return the nullParamList
    */
   public Set<String> getNullParamList() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
deleted file mode 100644
index 78fea31..0000000
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.common;
-
-import java.io.Serializable;
-
-import org.apache.ambari.logsearch.web.model.User;
-
-public class UserSessionInfo implements Serializable {
-
-  private static final long serialVersionUID = 1L;
-
-  private User user;
-
-  public User getUser() {
-    return user;
-  }
-
-  public void setUser(User user) {
-    this.user = user;
-  }
-
-  public String getUsername() {
-    if (user != null) {
-      return user.getUsername();
-    }
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
index 5a56ad7..a6f77e9 100644
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
+++ 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
@@ -19,13 +19,14 @@
 
 package org.apache.ambari.logsearch.dao;
 
-import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 
 import javax.annotation.PostConstruct;
 
-import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
+import org.apache.ambari.logsearch.manager.MgrBase.LogType;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
 
@@ -35,50 +36,36 @@ public class AuditSolrDao extends SolrDaoBase {
   static private Logger logger = Logger.getLogger(AuditSolrDao.class);
   
   public AuditSolrDao() {
-    super(LOG_TYPE.AUDIT);
+    super(LogType.AUDIT);
   }
 
   @PostConstruct
   public void postConstructor() {
     String solrUrl = 
PropertiesUtil.getProperty("logsearch.solr.audit.logs.url");
     String zkConnectString = 
PropertiesUtil.getProperty("logsearch.solr.audit.logs.zk_connect_string");
-    String collection = PropertiesUtil.getProperty(
-      "logsearch.solr.collection.audit.logs", "audit_logs");
-    String aliasNameIn = PropertiesUtil.getProperty(
-        "logsearch.solr.audit.logs.alias.name", "audit_logs_alias");
-    String rangerAuditCollection = PropertiesUtil.getProperty(
-        "logsearch.ranger.audit.logs.collection.name");
-    String splitInterval = PropertiesUtil.getProperty(
-      "logsearch.audit.logs.split.interval.mins", "none");
-    String configName = PropertiesUtil.getProperty(
-      "logsearch.solr.audit.logs.config.name", "audit_logs");
-    int numberOfShards = PropertiesUtil.getIntProperty(
-      "logsearch.collection.audit.logs.numshards", 1);
-    int replicationFactor = PropertiesUtil.getIntProperty(
-      "logsearch.collection.audit.logs.replication.factor", 1);
+    String collection = 
PropertiesUtil.getProperty("logsearch.solr.collection.audit.logs", 
"audit_logs");
+    String aliasNameIn = 
PropertiesUtil.getProperty("logsearch.solr.audit.logs.alias.name", 
"audit_logs_alias");
+    String rangerAuditCollection = 
PropertiesUtil.getProperty("logsearch.ranger.audit.logs.collection.name");
+    String splitInterval = 
PropertiesUtil.getProperty("logsearch.audit.logs.split.interval.mins", "none");
+    String configName = 
PropertiesUtil.getProperty("logsearch.solr.audit.logs.config.name", 
"audit_logs");
+    int numberOfShards = 
PropertiesUtil.getIntProperty("logsearch.collection.audit.logs.numshards", 1);
+    int replicationFactor = 
PropertiesUtil.getIntProperty("logsearch.collection.audit.logs.replication.factor",
 1);
 
     try {
       connectToSolr(solrUrl, zkConnectString, collection);
-      boolean createAlias = false;
-      if (aliasNameIn != null && rangerAuditCollection != null
-          && rangerAuditCollection.trim().length() > 0) {
-        createAlias = true;
-      }
+      
+      boolean createAlias = (aliasNameIn != null && 
!StringUtils.isBlank(rangerAuditCollection));
       boolean needToPopulateSchemaField = !createAlias;
-      setupCollections(splitInterval, configName, numberOfShards,
-          replicationFactor, needToPopulateSchemaField);
-      if(createAlias) {
-        Collection<String> collectionsIn = new ArrayList<String>();
-        collectionsIn.add(collection);
-        collectionsIn.add(rangerAuditCollection.trim());
+      
+      setupCollections(splitInterval, configName, numberOfShards, 
replicationFactor, needToPopulateSchemaField);
+      
+      if (createAlias) {
+        Collection<String> collectionsIn = Arrays.asList(collection, 
rangerAuditCollection.trim());
         setupAlias(aliasNameIn, collectionsIn);
       }
     } catch (Exception e) {
-      logger.error(
-        "Error while connecting to Solr for audit logs : solrUrl="
-          + solrUrl + ", zkConnectString=" + zkConnectString
-          + ", collection=" + collection, e);
+      logger.error("Error while connecting to Solr for audit logs : solrUrl=" 
+ solrUrl + ", zkConnectString=" +
+          zkConnectString + ", collection=" + collection, e);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
index 5aba6d1..af6d62d 100644
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
+++ 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
@@ -21,7 +21,7 @@ package org.apache.ambari.logsearch.dao;
 
 import javax.annotation.PostConstruct;
 
-import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
+import org.apache.ambari.logsearch.manager.MgrBase.LogType;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
@@ -29,10 +29,10 @@ import org.springframework.stereotype.Component;
 @Component
 public class ServiceLogsSolrDao extends SolrDaoBase {
 
-  static private Logger logger = Logger.getLogger(ServiceLogsSolrDao.class);
+  private static final Logger logger = 
Logger.getLogger(ServiceLogsSolrDao.class);
   
   public ServiceLogsSolrDao() {
-    super(LOG_TYPE.SERVICE);
+    super(LogType.SERVICE);
   }
 
   @PostConstruct
@@ -40,27 +40,18 @@ public class ServiceLogsSolrDao extends SolrDaoBase {
     logger.info("postConstructor() called.");
     String solrUrl = PropertiesUtil.getProperty("logsearch.solr.url");
     String zkConnectString = 
PropertiesUtil.getProperty("logsearch.solr.zk_connect_string");
-    String collection = 
PropertiesUtil.getProperty("logsearch.solr.collection.service.logs",
-      "hadoop_logs");
-    String splitInterval = PropertiesUtil.getProperty(
-      "logsearch.service.logs.split.interval.mins", "none");
-    String configName = PropertiesUtil.getProperty(
-      "logsearch.solr.service.logs.config.name", "hadoop_logs");
-    int numberOfShards = PropertiesUtil.getIntProperty(
-      "logsearch.collection.service.logs.numshards", 1);
-    int replicationFactor = PropertiesUtil.getIntProperty(
-      "logsearch.collection.service.logs.replication.factor", 1);
+    String collection = 
PropertiesUtil.getProperty("logsearch.solr.collection.service.logs", 
"hadoop_logs");
+    String splitInterval = 
PropertiesUtil.getProperty("logsearch.service.logs.split.interval.mins", 
"none");
+    String configName = 
PropertiesUtil.getProperty("logsearch.solr.service.logs.config.name", 
"hadoop_logs");
+    int numberOfShards = 
PropertiesUtil.getIntProperty("logsearch.collection.service.logs.numshards", 1);
+    int replicationFactor = 
PropertiesUtil.getIntProperty("logsearch.collection.service.logs.replication.factor",
 1);
 
     try {
       connectToSolr(solrUrl, zkConnectString, collection);
-      setupCollections(splitInterval, configName, numberOfShards,
-        replicationFactor,true);
+      setupCollections(splitInterval, configName, numberOfShards, 
replicationFactor, true);
     } catch (Exception e) {
-      logger.error(
-        "error while connecting to Solr for service logs : solrUrl="
-          + solrUrl + ", zkConnectString=" + zkConnectString
-          + ", collection=" + collection, e);
+      logger.error("error while connecting to Solr for service logs : 
solrUrl=" + solrUrl + ", zkConnectString=" +
+          zkConnectString + ", collection=" + collection, e);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ffcf5328/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
index cda5e26..2129f9e 100644
--- 
a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
+++ 
b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
@@ -24,16 +24,16 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
-import org.apache.ambari.logsearch.common.LogsearchContextUtil;
+
+import org.apache.ambari.logsearch.common.LogSearchContext;
 import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
+import org.apache.ambari.logsearch.manager.MgrBase.LogType;
 import org.apache.ambari.logsearch.util.ConfigUtil;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -58,104 +58,87 @@ import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.NamedList;
 import org.springframework.beans.factory.annotation.Autowired;
 
+import com.google.common.annotations.VisibleForTesting;
+
 public abstract class SolrDaoBase {
-  static private Logger logger = Logger.getLogger(SolrDaoBase.class);
+  private static final Logger logger = Logger.getLogger(SolrDaoBase.class);
+  private static final Logger logPerformance = 
Logger.getLogger("org.apache.ambari.logsearch.performance");
   
   public HashMap<String, String> schemaFieldsNameMap = new HashMap<String, 
String>();
   public HashMap<String, String> schemaFieldTypeMap = new HashMap<String, 
String>();
-  
-  private static Logger logPerformance = Logger
-    .getLogger("org.apache.ambari.logsearch.performance");
 
   private static final String ROUTER_FIELD = "_router_field_";
- 
-  protected LOG_TYPE logType;
+  
+  private static final int SETUP_RETRY_SECOND = 30;
+  private static final int SETUP_UPDATE_SECOND = 10*60; //10 min
+  private static final int ALIAS_SETUP_RETRY_SECOND = 30*60;
 
-  @Autowired
-  StringUtil stringUtil;
+  private LogType logType;
 
   @Autowired
-  JSONUtil jsonUtil;
-  
+  protected JSONUtil jsonUtil;
   @Autowired
-  RESTErrorUtil restErrorUtil;
-
-  String collectionName = null;
+  protected RESTErrorUtil restErrorUtil;
+
+  @VisibleForTesting
+  protected String collectionName = null;
+  @VisibleForTesting
+  protected SolrClient solrClient = null;
+  @VisibleForTesting
+  protected CloudSolrClient solrClouldClient = null;
+  @VisibleForTesting
+  protected boolean isZkConnectString = false;
   
-  String aliasName = null;
-  Collection<String> aliasCollectionList = new ArrayList<String>();
-
-  private SolrClient solrClient = null;
-  CloudSolrClient solrClouldClient = null;
-
-  boolean isSolrCloud = true;
-  String solrDetail = "";
-
-  boolean isSolrInitialized = false;
-
-  private boolean setup_status = false;
+  private String solrDetail = "";
 
   private boolean populateFieldsThreadActive = false;
-
-  int SETUP_RETRY_SECOND = 30;
-  int SETUP_UPDATE_SECOND = 10*60; //10 min
-  int ALIAS_SETUP_RETRY_SECOND = 30*60; //30 minutes
   
-  private boolean isZkConnectString=false;//by default its false
-  
-  //set logtype
-  public SolrDaoBase(LOG_TYPE logType) {
+  protected SolrDaoBase(LogType logType) {
     this.logType = logType;
   }
 
-  public SolrClient connectToSolr(String url, String zkConnectString,
-                                  String collection) throws Exception {
+  protected SolrClient connectToSolr(String url, String zkConnectString, 
String collection) throws Exception {
     this.collectionName = collection;
-    solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + 
collection
-      + ", url=" + url;
+    solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + 
collection + ", url=" + url;
 
     logger.info("connectToSolr() " + solrDetail);
-    if (stringUtil.isEmpty(collection)) {
-      throw new Exception("For solr, collection name is mandatory. "
-        + solrDetail);
+    if (StringUtils.isBlank(collection)) {
+      throw new Exception("For solr, collection name is mandatory. " + 
solrDetail);
     }
+    
     setupSecurity();
-    if (!stringUtil.isEmpty(zkConnectString)) {
-      isZkConnectString=true;
+    
+    if (solrClient != null) {
+      return solrClient;
+    }
+      
+    if (!StringUtils.isBlank(zkConnectString)) {
+      isZkConnectString = true;
       solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + 
collection;
-      logger.info("Using zookeepr. " + solrDetail);
+      logger.info("Using zookeeper. " + solrDetail);
       solrClouldClient = new CloudSolrClient(zkConnectString);
       solrClouldClient.setDefaultCollection(collection);
       solrClient = solrClouldClient;
       int waitDurationMS = 3 * 60 * 1000;
       checkSolrStatus(waitDurationMS);
     } else {
-      if (stringUtil.isEmpty(url)) {
-        throw new Exception("Both zkConnectString and URL are empty. 
zkConnectString="
-          + zkConnectString + ", collection=" + collection + ", url="
-          + url);
+      if (StringUtils.isBlank(url)) {
+        throw new Exception("Both zkConnectString and URL are empty. 
zkConnectString=" + zkConnectString + ", " +
+            "collection=" + collection + ", url=" + url);
       }
       solrDetail = "collection=" + collection + ", url=" + url;
       String collectionURL = url + "/" + collection;
       logger.info("Connecting to  solr : " + collectionURL);
       solrClient = new HttpSolrClient(collectionURL);
-
     }
-    // populateSchemaFields(collection);
-    return solrClient;
-  }
-
-  public SolrClient getSolrClient() {
     return solrClient;
   }
-
+  
   /**
    * This will try to get the collections from the Solr. Ping doesn't work if
    * collection is not given
-   *
-   * @param waitDurationMS
    */
-  public boolean checkSolrStatus(int waitDurationMS) {
+  protected boolean checkSolrStatus(int waitDurationMS) {
     boolean status = false;
     try {
       long beginTimeMS = System.currentTimeMillis();
@@ -166,10 +149,7 @@ public abstract class SolrDaoBase {
         try {
           List<String> collectionList = getCollections();
           if (collectionList != null) {
-            logger.info("checkSolrStatus(): Solr getCollections() is success. 
solr="
-              + solrDetail
-              + ", collectionList="
-              + collectionList);
+            logger.info("checkSolrStatus(): Solr getCollections() is success. 
solr=" + solrDetail + ", collectionList=" + collectionList);
             status = true;
             break;
           }
@@ -177,17 +157,12 @@ public abstract class SolrDaoBase {
           logger.error("Error while doing Solr check", ex);
         }
         if (System.currentTimeMillis() - beginTimeMS > waitDurationMS) {
-          logger.error("Solr is not reachable even after "
-            + (System.currentTimeMillis() - beginTimeMS)
-            + " ms. If you are using alias, then you might have to restart 
LogSearch after Solr is up and running. solr="
-            + solrDetail);
+          logger.error("Solr is not reachable even after " + 
(System.currentTimeMillis() - beginTimeMS) + " ms. " +
+              "If you are using alias, then you might have to restart 
LogSearch after Solr is up and running. solr=" + solrDetail);
           break;
         } else {
-          logger.warn("Solr is not not reachable yet. getCollections() attempt 
count="
-            + pingCount
-            + ". Will sleep for "
-            + waitIntervalMS
-            + " ms and try again." + " solr=" + solrDetail);
+          logger.warn("Solr is not not reachable yet. getCollections() attempt 
count=" + pingCount + ". " +
+              "Will sleep for " + waitIntervalMS + " ms and try again." + " 
solr=" + solrDetail);
         }
         Thread.sleep(waitIntervalMS);
 
@@ -198,37 +173,34 @@ public abstract class SolrDaoBase {
     return status;
   }
 
-  public void setupAlias(final String aliasNameIn, final Collection<String> 
collectionListIn ) throws Exception {
-    if( aliasNameIn == null || collectionListIn== null || 
collectionListIn.size() == 0 || solrClouldClient == null) {
-      logger.info("Will not create alias " + aliasNameIn + " for "
-        + (collectionListIn==null?null: collectionListIn.toString()) + ", 
solrCloudClient=" + solrClouldClient);
+  protected void setupAlias(final String aliasNameIn, final Collection<String> 
collectionListIn ) throws Exception {
+    if (aliasNameIn == null || collectionListIn == null || 
collectionListIn.size() == 0 || solrClouldClient == null) {
+      logger.info("Will not create alias " + aliasNameIn + " for " +
+          (collectionListIn == null ? null: collectionListIn.toString()) + ", 
solrCloudClient=" + solrClouldClient);
       return;
     }
-    logger.info("setupAlias " + aliasNameIn + " for " + 
(collectionListIn==null?null: collectionListIn.toString()));
-    aliasName = aliasNameIn;
-    aliasCollectionList = collectionListIn;
-
+    
+    logger.info("setupAlias " + aliasNameIn + " for " + (collectionListIn == 
null ? null: collectionListIn.toString()));
     // Start a background thread to do setup
     Thread setupThread = new Thread("setup_alias_" + aliasNameIn) {
       @Override
       public void run() {
-        logger.info("Started monitoring thread to check availability of Solr 
server. alias="
-            + aliasNameIn + ", collections=" + collectionListIn.toString());
+        logger.info("Started monitoring thread to check availability of Solr 
server. alias=" + aliasNameIn +
+            ", collections=" + collectionListIn.toString());
         int retryCount = 0;
         while (true) {
           try {
-            int count = createAlias(aliasNameIn,collectionListIn);
+            int count = createAlias(aliasNameIn, collectionListIn);
             if (count > 0) {
               solrClouldClient.setDefaultCollection(aliasNameIn);
-              if( count == collectionListIn.size()) {
-                logger.info("Setup for alias " + aliasNameIn
-                    + " is successful. Exiting setup retry thread. 
Collections=" + collectionListIn);
+              if (count == collectionListIn.size()) {
+                logger.info("Setup for alias " + aliasNameIn + " is 
successful. Exiting setup retry thread. " +
+                    "Collections=" + collectionListIn);
                 populateSchemaFields();
                 break;
               }
             } else {
-              logger.warn("Not able to create alias="
-                  + aliasNameIn + ", retryCount=" + retryCount);
+              logger.warn("Not able to create alias=" + aliasNameIn + ", 
retryCount=" + retryCount);
             }
           } catch (Exception e) {
             logger.error("Error setting up alias=" + aliasNameIn, e);
@@ -236,8 +208,7 @@ public abstract class SolrDaoBase {
           try {
             Thread.sleep(ALIAS_SETUP_RETRY_SECOND * 1000);
           } catch (InterruptedException sleepInterrupted) {
-            logger.info("Sleep interrupted while setting up alias "
-                + aliasNameIn);
+            logger.info("Sleep interrupted while setting up alias " + 
aliasNameIn);
             break;
           }
           retryCount++;
@@ -245,93 +216,64 @@ public abstract class SolrDaoBase {
       }
     };
     setupThread.setDaemon(true);
-    setupThread.start();     
+    setupThread.start();
   }
   
-  /**
-   * @param aliasNameIn
-   * @param collectionListIn
-   * @return
-   * @throws IOException 
-   * @throws SolrServerException 
-   */
-  protected int createAlias(String aliasNameIn,
-      Collection<String> collectionListIn) throws SolrServerException, 
IOException {
-    List<String> collections = getCollections();
-    List<String> collectionToAdd = new ArrayList<String>();
-    for (String col : collections) {
-      if( collectionListIn.contains(col)) {
-        collectionToAdd.add(col);
-      }
-    }
+  private int createAlias(String aliasNameIn, Collection<String> 
collectionListIn) throws SolrServerException, IOException {
+    List<String> collectionToAdd = getCollections();
+    collectionToAdd.retainAll(collectionListIn);
+    
     String collectionsCSV = null;
-    if( collectionToAdd.size() > 0 ) {
-      for (String col : collectionToAdd) {
-        if(collectionsCSV == null) {
-          collectionsCSV = col;
-        } else {
-          collectionsCSV = collectionsCSV + ","  + col;
-        }
-      }
+    if (!collectionToAdd.isEmpty()) {
+      collectionsCSV = StringUtils.join(collectionToAdd, ',');
       CollectionAdminRequest.CreateAlias aliasCreateRequest = new 
CollectionAdminRequest.CreateAlias(); 
       aliasCreateRequest.setAliasName(aliasNameIn);
       aliasCreateRequest.setAliasedCollections(collectionsCSV);
       CollectionAdminResponse createResponse = 
aliasCreateRequest.process(solrClouldClient);
       if (createResponse.getStatus() != 0) {
-        logger.error("Error creating alias. alias="
-        + aliasNameIn + ", collectionList=" + collectionsCSV
-        + ", solrDetail=" + solrDetail + ", response="
-        + createResponse);
+        logger.error("Error creating alias. alias=" + aliasNameIn + ", 
collectionList=" + collectionsCSV +
+            ", solrDetail=" + solrDetail + ", response=" + createResponse);
         return 0;
       }
     } 
-    if( collectionToAdd.size() == collectionListIn.size()) {
-      logger.info("Created alias for all collections. alias=" + aliasNameIn + 
", collectionsCSV="
-          + collectionsCSV + ", solrDetail=" + solrDetail);        
+    if ( collectionToAdd.size() == collectionListIn.size()) {
+      logger.info("Created alias for all collections. alias=" + aliasNameIn + 
", collectionsCSV=" + collectionsCSV +
+          ", solrDetail=" + solrDetail);
     } else {
-      logger.info("Created alias for " + collectionToAdd.size() + " out of " + 
-          + collectionListIn.size() + " collections. alias=" + aliasNameIn 
-          + ", collectionsCSV=" + collectionsCSV + ", solrDetail=" + 
solrDetail);
+      logger.info("Created alias for " + collectionToAdd.size() + " out of " + 
collectionListIn.size() + " collections. " +
+          "alias=" + aliasNameIn + ", collectionsCSV=" + collectionsCSV + ", 
solrDetail=" + solrDetail);
     }
     return collectionToAdd.size();
   }
 
-  public void setupCollections(final String splitMode, final String configName,
-      final int numberOfShards, final int replicationFactor,boolean 
needToPopulateSchemaField) throws Exception {
+  protected void setupCollections(final String splitInterval, final String 
configName, final int numberOfShards,
+      final int replicationFactor, boolean needToPopulateSchemaField) throws 
Exception {
     if (isZkConnectString) {
-      setup_status = createCollectionsIfNeeded(splitMode, configName,
-          numberOfShards, replicationFactor);
-      logger.info("Setup status for " + collectionName + " is " + 
setup_status);
-      if (!setup_status) {
+      boolean setupStatus = createCollectionsIfNeeded(splitInterval, 
configName, numberOfShards, replicationFactor);
+      logger.info("Setup status for " + collectionName + " is " + setupStatus);
+      if (!setupStatus) {
         // Start a background thread to do setup
         Thread setupThread = new Thread("setup_collection_" + collectionName) {
           @Override
           public void run() {
-            logger
-                .info("Started monitoring thread to check availability of Solr 
server. collection="
-                    + collectionName);
+            logger.info("Started monitoring thread to check availability of 
Solr server. collection=" + collectionName);
             int retryCount = 0;
             while (true) {
               try {
                 Thread.sleep(SETUP_RETRY_SECOND * 1000);
                 retryCount++;
-                setup_status = createCollectionsIfNeeded(splitMode, configName,
-                    numberOfShards, replicationFactor);
-                if (setup_status) {
-                  logger.info("Setup for collection " + collectionName
-                      + " is successful. Exiting setup retry thread");
+                boolean setupStatus = createCollectionsIfNeeded(splitInterval, 
configName, numberOfShards, replicationFactor);
+                if (setupStatus) {
+                  logger.info("Setup for collection " + collectionName + " is 
successful. Exiting setup retry thread");
                   break;
                 }
               } catch (InterruptedException sleepInterrupted) {
-                logger.info("Sleep interrupted while setting up collection "
-                    + collectionName);
+                logger.info("Sleep interrupted while setting up collection " + 
collectionName);
                 break;
               } catch (Exception e) {
-                logger
-                    .error("Error setting up collection=" + collectionName, e);
+                logger.error("Error setting up collection=" + collectionName, 
e);
               }
-              logger.error("Error setting collection. collection="
-                  + collectionName + ", retryCount=" + retryCount);
+              logger.error("Error setting collection. collection=" + 
collectionName + ", retryCount=" + retryCount);
             }
           }
         };
@@ -339,46 +281,39 @@ public abstract class SolrDaoBase {
         setupThread.start();
       }
     }
-    if(needToPopulateSchemaField){
+    
+    if (needToPopulateSchemaField){
       populateSchemaFields();
     }
   }
 
-  public boolean createCollectionsIfNeeded(final String splitMode,
-                                           final String configName, final int 
numberOfShards,
-                                           final int replicationFactor) {
+  private boolean createCollectionsIfNeeded(String splitInterval, String 
configName, int numberOfShards, int replicationFactor) {
     boolean result = false;
     try {
       List<String> allCollectionList = getCollections();
-      if (splitMode.equalsIgnoreCase("none")) {
-        // Just create regular collection
-        result = createCollection(collectionName, configName,
-          numberOfShards, replicationFactor, allCollectionList);
+      if (splitInterval.equalsIgnoreCase("none")) {
+        result = createCollection(configName, numberOfShards, 
replicationFactor, allCollectionList);
       } else {
-        result = setupCollectionsWithImplicitRouting(splitMode,
-          configName, numberOfShards, replicationFactor, allCollectionList);
+        result = setupCollectionsWithImplicitRouting(configName, 
numberOfShards, replicationFactor, allCollectionList);
       }
     } catch (Exception ex) {
-      logger.error("Error creating collection. collectionName="
-        + collectionName, ex);
+      logger.error("Error creating collection. collectionName=" + 
collectionName, ex);
     }
     return result;
   }
 
-  public List<String> getCollections() throws SolrServerException,
+  private List<String> getCollections() throws SolrServerException,
     IOException {
     try {
       CollectionAdminRequest.List colListReq = new 
CollectionAdminRequest.List();
       CollectionAdminResponse response = colListReq.process(solrClient);
       if (response.getStatus() != 0) {
-        logger.error("Error getting collection list from solr.  response="
-          + response);
+        logger.error("Error getting collection list from solr.  response=" + 
response);
         return null;
       }
 
       @SuppressWarnings("unchecked")
-      List<String> allCollectionList = (List<String>) response
-        .getResponse().get("collections");
+      List<String> allCollectionList = (List<String>) 
response.getResponse().get("collections");
       return allCollectionList;
     } catch (SolrException e) {
       logger.error(e);
@@ -386,91 +321,61 @@ public abstract class SolrDaoBase {
     }
   }
 
-  public boolean setupCollectionsWithImplicitRouting(String splitMode,
-                                                     String configName, int 
numberOfShards, int replicationFactor,
+  private boolean setupCollectionsWithImplicitRouting(String configName, int 
numberOfShards, int replicationFactor,
                                                      List<String> 
allCollectionList) throws Exception {
-    logger.info("setupCollectionsWithImplicitRouting(). collectionName="
-      + collectionName + ", numberOfShards=" + numberOfShards);
-    return createCollectionWithImplicitRoute(collectionName, configName,
-      numberOfShards, replicationFactor, allCollectionList);
-  }
-
-  public boolean createCollectionWithImplicitRoute(String colName,
-                                                   String configName, int 
numberOfShards, int replicationFactor,
-                                                   List<String> 
allCollectionList) throws SolrServerException,
-    IOException {
+    logger.info("setupCollectionsWithImplicitRouting(). collectionName=" + 
collectionName + ", numberOfShards=" + numberOfShards);
 
-    // Default is true, because if the collection and shard is already
-    // there, then it will return true
+    // Default is true, because if the collection and shard is already there, 
then it will return true
     boolean returnValue = true;
-    String shardsListStr = "";
+    
     List<String> shardsList = new ArrayList<String>();
     for (int i = 0; i < numberOfShards; i++) {
-      if (i != 0) {
-        shardsListStr += ",";
-      }
-      String shard = "shard" + i;
-      shardsListStr += shard;
-      shardsList.add(shard);
+      shardsList.add("shard" + i);
     }
+    String shardsListStr = StringUtils.join(shardsList, ',');
 
     // Check if collection is already in zookeeper
-    if (!allCollectionList.contains(colName)) {
-      logger.info("Creating collection " + colName + ", shardsList="
-        + shardsList + ", solrDetail=" + solrDetail);
+    if (!allCollectionList.contains(collectionName)) {
+      logger.info("Creating collection " + collectionName + ", shardsList=" + 
shardsList + ", solrDetail=" + solrDetail);
       CollectionAdminRequest.Create collectionCreateRequest = new 
CollectionAdminRequest.Create();
-      collectionCreateRequest.setCollectionName(colName);
+      collectionCreateRequest.setCollectionName(collectionName);
       collectionCreateRequest.setRouterName("implicit");
       collectionCreateRequest.setShards(shardsListStr);
-      collectionCreateRequest.setMaxShardsPerNode(numberOfShards);
+      collectionCreateRequest.setNumShards(numberOfShards);
       collectionCreateRequest.setReplicationFactor(replicationFactor);
       collectionCreateRequest.setConfigName(configName);
       collectionCreateRequest.setRouterField(ROUTER_FIELD);
-      collectionCreateRequest.setMaxShardsPerNode(replicationFactor
-        * numberOfShards);
+      collectionCreateRequest.setMaxShardsPerNode(replicationFactor * 
numberOfShards);
 
-      CollectionAdminResponse createResponse = collectionCreateRequest
-        .process(solrClient);
+      CollectionAdminResponse createResponse = 
collectionCreateRequest.process(solrClient);
       if (createResponse.getStatus() != 0) {
         returnValue = false;
-        logger.error("Error creating collection. collectionName="
-          + colName + ", shardsList=" + shardsList
-          + ", solrDetail=" + solrDetail + ", response="
-          + createResponse);
+        logger.error("Error creating collection. collectionName=" + 
collectionName + ", shardsList=" + shardsList +
+            ", solrDetail=" + solrDetail + ", response=" + createResponse);
       } else {
-        logger.info("Created collection " + colName + ", shardsList="
-          + shardsList + ", solrDetail=" + solrDetail);
+        logger.info("Created collection " + collectionName + ", shardsList=" + 
shardsList + ", solrDetail=" + solrDetail);
       }
     } else {
-      logger.info("Collection "
-        + colName
-        + " is already there. Will check whether it has the required shards");
+      logger.info("Collection " + collectionName + " is already there. Will 
check whether it has the required shards");
       Collection<String> existingShards = getShards();
       for (String shard : shardsList) {
         if (!existingShards.contains(shard)) {
           try {
-            logger.info("Going to add Shard " + shard
-              + " to collection " + collectionName);
+            logger.info("Going to add Shard " + shard + " to collection " + 
collectionName);
             CollectionAdminRequest.CreateShard createShardRequest = new 
CollectionAdminRequest.CreateShard();
             createShardRequest.setCollectionName(collectionName);
             createShardRequest.setShardName(shard);
-            CollectionAdminResponse response = createShardRequest
-              .process(solrClient);
+            CollectionAdminResponse response = 
createShardRequest.process(solrClient);
             if (response.getStatus() != 0) {
-              logger.error("Error creating shard " + shard
-                + " in collection " + collectionName
-                + ", response=" + response
-                + ", solrDetail=" + solrDetail);
+              logger.error("Error creating shard " + shard + " in collection " 
+ collectionName + ", response=" + response +
+                  ", solrDetail=" + solrDetail);
               returnValue = false;
               break;
             } else {
-              logger.info("Successfully created shard " + shard
-                + " in collection " + collectionName);
+              logger.info("Successfully created shard " + shard + " in 
collection " + collectionName);
             }
           } catch (Throwable t) {
-            logger.error("Error creating shard " + shard
-              + " in collection " + collectionName
-              + ", solrDetail=" + solrDetail, t);
+            logger.error("Error creating shard " + shard + " in collection " + 
collectionName + ", solrDetail=" + solrDetail, t);
             returnValue = false;
             break;
           }
@@ -480,7 +385,7 @@ public abstract class SolrDaoBase {
     return returnValue;
   }
 
-  public Collection<String> getShards() {
+  private Collection<String> getShards() {
     Collection<String> list = new HashSet<String>();
 
     if (solrClouldClient == null) {
@@ -489,106 +394,76 @@ public abstract class SolrDaoBase {
     }
 
     ZkStateReader reader = solrClouldClient.getZkStateReader();
-    Collection<Slice> slices = reader.getClusterState().getSlices(
-      collectionName);
-    Iterator<Slice> iter = slices.iterator();
-
-    while (iter.hasNext()) {
-      Slice slice = iter.next();
+    Collection<Slice> slices = 
reader.getClusterState().getSlices(collectionName);
+    for (Slice slice : slices) {
       for (Replica replica : slice.getReplicas()) {
-        logger.info("colName=" + collectionName + ", slice.name="
-          + slice.getName() + ", slice.state=" + slice.getState()
-          + ", replica.core=" + replica.getStr("core")
-          + ", replica.state=" + replica.getStr("state"));
+        logger.info("colName=" + collectionName + ", slice.name=" + 
slice.getName() + ", slice.state=" + slice.getState() +
+            ", replica.core=" + replica.getStr("core") + ", replica.state=" + 
replica.getStr("state"));
         list.add(slice.getName());
       }
     }
     return list;
   }
 
-  public boolean createCollection(String colName, String configName,
-                                  int numberOfShards, int replicationFactor,
-                                  List<String> allCollectionList) throws 
SolrServerException,
-    IOException {
-    // Check if collection is already in zookeeper
-    if (allCollectionList.contains(colName)) {
-      logger.info("Collection " + colName
-        + " is already there. Won't create it");
+  private boolean createCollection(String configName, int numberOfShards, int 
replicationFactor,
+                                  List<String> allCollectionList) throws 
SolrServerException, IOException {
+    if (allCollectionList.contains(collectionName)) {
+      logger.info("Collection " + collectionName + " is already there. Won't 
create it");
       return true;
     }
 
-    logger.info("Creating collection " + colName + ", numberOfShards="
-      + numberOfShards + ", replicationFactor=" + replicationFactor
-      + ", solrDetail=" + solrDetail);
+    logger.info("Creating collection " + collectionName + ", numberOfShards=" 
+ numberOfShards +
+        ", replicationFactor=" + replicationFactor + ", solrDetail=" + 
solrDetail);
 
     CollectionAdminRequest.Create collectionCreateRequest = new 
CollectionAdminRequest.Create();
-    collectionCreateRequest.setCollectionName(colName);
+    collectionCreateRequest.setCollectionName(collectionName);
     collectionCreateRequest.setNumShards(numberOfShards);
     collectionCreateRequest.setReplicationFactor(replicationFactor);
     collectionCreateRequest.setConfigName(configName);
-    collectionCreateRequest.setMaxShardsPerNode(replicationFactor
-      * numberOfShards);
-    CollectionAdminResponse createResponse = collectionCreateRequest
-      .process(solrClient);
+    collectionCreateRequest.setMaxShardsPerNode(replicationFactor * 
numberOfShards);
+    CollectionAdminResponse createResponse = 
collectionCreateRequest.process(solrClient);
     if (createResponse.getStatus() != 0) {
-      logger.error("Error creating collection. collectionName=" + colName
-        + ", solrDetail=" + solrDetail + ", response="
-        + createResponse);
+      logger.error("Error creating collection. collectionName=" + 
collectionName + ", solrDetail=" + solrDetail + ", response=" +
+    createResponse);
       return false;
     } else {
-      logger.info("Created collection " + colName + ", numberOfShards="
-        + numberOfShards + ", replicationFactor="
-        + replicationFactor + ", solrDetail=" + solrDetail);
+      logger.info("Created collection " + collectionName + ", numberOfShards=" 
+ numberOfShards +
+          ", replicationFactor=" + replicationFactor + ", solrDetail=" + 
solrDetail);
       return true;
     }
   }
 
-  public QueryResponse process(SolrQuery solrQuery)
-    throws SolrServerException, IOException {
+  public QueryResponse process(SolrQuery solrQuery) throws 
SolrServerException, IOException {
     if (solrClient != null) {
       String event = solrQuery.get("event");
       solrQuery.remove("event");
-      QueryResponse queryResponse = solrClient.query(solrQuery,
-        METHOD.POST);
+      QueryResponse queryResponse = solrClient.query(solrQuery, METHOD.POST);
 
       if (event != null && !"/audit/logs/live/count".equalsIgnoreCase(event)) {
-        logPerformance.info("\n Username :- "
-          + LogsearchContextUtil.getCurrentUsername()
-          + " Event :- " + event + " SolrQuery :- " + solrQuery
-          + "\nQuery Time Execution :- "
-          + queryResponse.getQTime()
-          + " Total Time Elapsed is :- "
-          + queryResponse.getElapsedTime());
+        logPerformance.info("\n Username :- " + 
LogSearchContext.getCurrentUsername() + " Event :- " + event + " SolrQuery :- " 
+
+            solrQuery + "\nQuery Time Execution :- " + 
queryResponse.getQTime() + " Total Time Elapsed is :- " +
+            queryResponse.getElapsedTime());
       }
       return queryResponse;
     } else {
-      throw restErrorUtil.createRESTException(
-          "Solr configuration improper for " + logType.getLabel() +" logs",
+      throw restErrorUtil.createRESTException("Solr configuration improper for 
" + logType.getLabel() +" logs",
           MessageEnums.ERROR_SYSTEM);
     }
   }
 
-  public UpdateResponse addDocs(SolrInputDocument doc)
-    throws SolrServerException, IOException, SolrException {
+  public UpdateResponse addDocs(SolrInputDocument doc) throws 
SolrServerException, IOException, SolrException {
     UpdateResponse updateResoponse = solrClient.add(doc);
-    logPerformance.info("\n Username :- "
-      + LogsearchContextUtil.getCurrentUsername()
-      + " Update Time Execution :- " + updateResoponse.getQTime()
-      + " Total Time Elapsed is :- "
-      + updateResoponse.getElapsedTime());
+    logPerformance.info("\n Username :- " + 
LogSearchContext.getCurrentUsername() +
+        " Update Time Execution :- " + updateResoponse.getQTime() + " Total 
Time Elapsed is :- " + updateResoponse.getElapsedTime());
     solrClient.commit();
     return updateResoponse;
   }
 
-  public UpdateResponse removeDoc(String query) throws SolrServerException,
-    IOException, SolrException {
+  public UpdateResponse removeDoc(String query) throws SolrServerException, 
IOException, SolrException {
     UpdateResponse updateResoponse = solrClient.deleteByQuery(query);
     solrClient.commit();
-    logPerformance.info("\n Username :- "
-      + LogsearchContextUtil.getCurrentUsername()
-      + " Remove Time Execution :- " + updateResoponse.getQTime()
-      + " Total Time Elapsed is :- "
-      + updateResoponse.getElapsedTime());
+    logPerformance.info("\n Username :- " + 
LogSearchContext.getCurrentUsername() +
+        " Remove Time Execution :- " + updateResoponse.getQTime() + " Total 
Time Elapsed is :- " + updateResoponse.getElapsedTime());
     return updateResoponse;
   }
 
@@ -605,14 +480,11 @@ public abstract class SolrDaoBase {
   private void populateSchemaFields() {
     if (!populateFieldsThreadActive) {
       populateFieldsThreadActive = true;
-      logger.info("Creating thread to populated fields for collection="
-          + collectionName);
-      Thread fieldPopulationThread = new Thread("populated_fields_"
-          + collectionName) {
+      logger.info("Creating thread to populated fields for collection=" + 
collectionName);
+      Thread fieldPopulationThread = new Thread("populated_fields_" + 
collectionName) {
         @Override
         public void run() {
-          logger.info("Started thread to get fields for collection="
-              + collectionName);
+          logger.info("Started thread to get fields for collection=" + 
collectionName);
           int retryCount = 0;
           while (true) {
             try {
@@ -620,24 +492,19 @@ public abstract class SolrDaoBase {
               retryCount++;
               boolean _result = _populateSchemaFields();
               if (_result) {
-                logger.info("Populate fields for collection " + collectionName
-                    + " is success, Update it after " + SETUP_UPDATE_SECOND
-                    + " sec");
+                logger.info("Populate fields for collection " + collectionName 
+ " is success, Update it after " +
+                    SETUP_UPDATE_SECOND + " sec");
                 Thread.sleep(SETUP_UPDATE_SECOND * 1000);
               }
             } catch (InterruptedException sleepInterrupted) {
-              logger
-                  .info("Sleep interrupted while populating fields for 
collection "
-                      + collectionName);
+              logger.info("Sleep interrupted while populating fields for 
collection " + collectionName);
               break;
             } catch (Exception ex) {
-              logger.error("Error while populating fields for collection "
-                  + collectionName + ", retryCount=" + retryCount);
+              logger.error("Error while populating fields for collection " + 
collectionName + ", retryCount=" + retryCount);
             }
           }
           populateFieldsThreadActive = false;
-          logger.info("Exiting thread for populating fields. collection="
-              + collectionName);
+          logger.info("Exiting thread for populating fields. collection=" + 
collectionName);
         }
       };
       fieldPopulationThread.setDaemon(true);
@@ -657,16 +524,13 @@ public abstract class SolrDaoBase {
       NamedList<Object> namedList = null;
       try {
         namedList = solrClient.request(request);
-        logger.info("populateSchemaFields() collection="
-          + collectionName + ", fields=" + namedList);
+        logger.info("populateSchemaFields() collection=" + collectionName + ", 
fields=" + namedList);
       } catch (SolrException | SolrServerException | IOException e) {
-        logger.error(
-          "Error occured while popuplating field. collection="
-            + collectionName, e);
+        logger.error("Error occured while popuplating field. collection=" + 
collectionName, e);
       }
+      
       if (namedList != null) {
-        ConfigUtil.extractSchemaFieldsName(namedList.toString(),
-          schemaFieldsNameMap,schemaFieldTypeMap);
+        ConfigUtil.extractSchemaFieldsName(namedList.toString(), 
schemaFieldsNameMap,schemaFieldTypeMap);
         return true;
       }
     }

Reply via email to