oleewere closed pull request #40: AMBARI-24949. Log Search: add Solr node 
discovery + small cleanups
URL: https://github.com/apache/ambari-logsearch/pull/40
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git 
a/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java
 
b/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java
index 2a698a6d3b..62c05333a4 100644
--- 
a/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java
+++ 
b/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java
@@ -102,8 +102,11 @@
   public static final String MONITOR_SOLR_FILTER_STORAGE_PROPERTY = 
"logfeeder.configs.filter.solr.monitor.enabled";
   public static final boolean MONITOR_SOLR_FILTER_STORAGE_DEFAULT = true;
 
+  public static final String MONITOR_SOLR_FILTER_INTERVAL_PROPERTY = 
"logfeeder.configs.filter.solr.monitor.interval";
+
   public static final String SOLR_ZK_CONNECTION_STRING = 
"logfeeder.solr.zk_connect_string";
   public static final String SOLR_URLS = "logfeeder.solr.urls";
+  public static final String SOLR_CLOUD_DISCOVER = 
"logfeeder.solr.cloud.client.discover";
   public static final String SOLR_METADATA_COLLECTION = 
"logfeeder.solr.metadata.collection";
 
   public static final String CLOUD_STORAGE_MODE = 
"logfeeder.cloud.storage.mode";
diff --git 
a/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederSolrClientFactory.java
 
b/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederSolrClientFactory.java
index dec3007374..30481aa0fd 100644
--- 
a/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederSolrClientFactory.java
+++ 
b/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederSolrClientFactory.java
@@ -18,12 +18,23 @@
  */
 package org.apache.ambari.logfeeder.common;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.ZkStateReader;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
 
 /**
  * Factory for creating specific Solr clients based on provided configurations 
(simple / LB or cloud Solr client)
@@ -38,32 +49,132 @@
    * @param zkConnectionString zookeeper connection string, e.g.: 
localhost1:2181,localhost2:2181/solr
    * @param solrUrls list of solr urls
    * @param collection name of the Solr collection
+   * @param discover use cloud solr client to discover solr nodes, then uses 
LB client
    * @return created client
    */
-  public SolrClient createSolrClient(String zkConnectionString, String[] 
solrUrls, String collection) {
+  public SolrClient createSolrClient(String zkConnectionString, String[] 
solrUrls, String collection, boolean discover) {
     logger.info("Creating solr client ...");
     logger.info("Using collection=" + collection);
-    if (solrUrls != null && solrUrls.length > 0) {
+    if (discover && zkConnectionString.length() > 0) {
+      final CloudSolrClient discoverNodesClient = 
createSolrCloudClient(zkConnectionString, collection);
+      return createLBClientsWithDiscoverNodes(discoverNodesClient, collection);
+    }
+    else if (solrUrls != null && solrUrls.length > 0) {
       logger.info("Using lbHttpSolrClient with urls: {}",
         StringUtils.join(appendTo("/" + collection, solrUrls), ","));
       LBHttpSolrClient.Builder builder = new LBHttpSolrClient.Builder();
       builder.withBaseSolrUrls(solrUrls);
       return builder.build();
     } else {
-      logger.info("Using zookeepr. zkConnectString=" + zkConnectionString);
-      CloudSolrClient.Builder builder = new CloudSolrClient.Builder();
-      builder.withZkHost(zkConnectionString);
-      CloudSolrClient solrClient = builder.build();
-      solrClient.setDefaultCollection(collection);
-      return solrClient;
+      return createSolrCloudClient(zkConnectionString, collection);
+    }
+  }
+
+  @VisibleForTesting
+  ZkConnection createZKConnection(String zkConnectionString) {
+    String split[] = zkConnectionString.split("/", 2);
+    String zkChroot = null;
+    final List<String> zkHosts;
+    if (split.length == 1) {
+      zkHosts = Arrays.asList(split[0].split(","));
+    } else {
+      zkHosts = Arrays.asList(split[0].split(","));
+      zkChroot = ("/" + split[1]).replaceAll("/+", "/");
+      if (zkChroot.endsWith("/")) {
+        zkChroot = zkChroot.substring(0, zkChroot.lastIndexOf("/"));
+      }
     }
+    return new ZkConnection(zkHosts, zkChroot);
   }
 
-  private String[] appendTo(String toAppend, String... appendees) {
+  @VisibleForTesting
+  String[] appendTo(String toAppend, String... appendees) {
     for (int i = 0; i < appendees.length; i++) {
       appendees[i] = appendees[i] + toAppend;
     }
     return appendees;
   }
 
+  private CloudSolrClient createSolrCloudClient(String zkConnectionString, 
String collection) {
+    logger.info("Using zookeepr. zkConnectString=" + zkConnectionString);
+    final ZkConnection zkConnection = createZKConnection(zkConnectionString);
+    final CloudSolrClient.Builder builder =
+      new CloudSolrClient.Builder(zkConnection.getZkHosts(), 
Optional.ofNullable(zkConnection.getZkChroot()));
+    CloudSolrClient solrClient = builder.build();
+    solrClient.setDefaultCollection(collection);
+    return solrClient;
+  }
+
+  private LBHttpSolrClient createLBClientsWithDiscoverNodes(CloudSolrClient 
discoverClient, String collection) {
+    final List<String> baseUrls = waitUntilAvailableBaseUrls(discoverClient, 
collection);
+    final String[] finalBaseUrls = appendTo("/" + collection, 
baseUrls.toArray(new String[0]));
+    logger.info("Following URLs will be used for LB Solr client (collection: 
'{}'): {}", collection, StringUtils.join(finalBaseUrls));
+    return new LBHttpSolrClient.Builder()
+      .withBaseSolrUrls(finalBaseUrls)
+      .build();
+  }
+
+  private List<String> waitUntilAvailableBaseUrls(CloudSolrClient 
discoverClient, String collection) {
+    final List<String> baseUrls = new ArrayList<>();
+    while(true) {
+      try {
+        ZkStateReader zkStateReader = discoverClient.getZkStateReader();
+        ClusterState clusterState = zkStateReader.getClusterState();
+        if (clusterState != null) {
+          DocCollection docCollection = clusterState.getCollection(collection);
+          if (docCollection != null) {
+            List<Replica> replicas = docCollection.getReplicas();
+            if (replicas != null && !replicas.isEmpty()) {
+              for (Replica replica : replicas) {
+                String baseUrl = replica.getBaseUrl();
+                if (!baseUrls.contains(baseUrl)) {
+                  baseUrls.add(baseUrl);
+                }
+              }
+            }
+          }
+        }
+      } catch (Exception e) {
+        logger.error("Error during getting Solr node data by discovery solr 
loud client", e);
+      }
+      if (baseUrls.isEmpty()) {
+        logger.info("Not found any base urls yet for '{}' collection. Retrying 
...", collection);
+        try {
+          Thread.sleep(5000);
+        } catch (InterruptedException e) {
+          logger.info("Discovery solr cloud client was interrupted", e);
+          Thread.currentThread().interrupt();
+          break;
+        }
+      } else {
+        try {
+          logger.info("Closing discovery solr client for '{}' collection", 
collection);
+          discoverClient.close();
+        } catch (IOException e) {
+          logger.error("Error during closing solr cloud client for discovering 
hosts", e);
+        }
+        break;
+      }
+    }
+    return baseUrls;
+  }
+
+  final class ZkConnection {
+    private final List<String> zkHosts;
+    private final String zkChroot;
+
+    ZkConnection(List<String> zkHosts, String zkChroot) {
+      this.zkHosts = zkHosts;
+      this.zkChroot = zkChroot;
+    }
+
+    List<String> getZkHosts() {
+      return zkHosts;
+    }
+
+    String getZkChroot() {
+      return zkChroot;
+    }
+  }
+
 }
diff --git 
a/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/ApplicationConfig.java
 
b/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/ApplicationConfig.java
index 275ce16f22..97c3f87d0d 100644
--- 
a/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/ApplicationConfig.java
+++ 
b/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/ApplicationConfig.java
@@ -136,7 +136,8 @@ public LogSearchConfigLogFeeder logSearchConfigLogFeeder() 
throws Exception {
   public LogLevelFilterManager logLevelFilterManager() throws Exception {
     if (logFeederProps.isSolrFilterStorage()) {
       SolrClient solrClient = new 
LogFeederSolrClientFactory().createSolrClient(
-        logFeederProps.getSolrZkConnectString(), logFeederProps.getSolrUrls(), 
logFeederProps.getSolrMetadataCollection());
+        logFeederProps.getSolrZkConnectString(), logFeederProps.getSolrUrls(), 
logFeederProps.getSolrMetadataCollection(),
+        logFeederProps.isSolrCloudDiscover());
       return new LogLevelFilterManagerSolr(solrClient);
     } else if (logFeederProps.isUseLocalConfigs() && 
logFeederProps.isZkFilterStorage()) {
       final HashMap<String, String> map = new HashMap<>();
@@ -156,7 +157,7 @@ public LogLevelFilterUpdater logLevelFilterUpdater() throws 
Exception {
     if (logFeederProps.isSolrFilterStorage() && 
logFeederProps.isSolrFilterMonitor()) {
       LogLevelFilterUpdater logLevelFilterUpdater = new 
LogLevelFilterUpdaterSolr(
         "filter-updater-solr", logLevelFilterHandler(),
-        30, (LogLevelFilterManagerSolr) logLevelFilterManager(), 
logFeederProps.getClusterName());
+        logFeederProps.getSolrFilterMonitorInterval(), 
(LogLevelFilterManagerSolr) logLevelFilterManager(), 
logFeederProps.getClusterName());
       logLevelFilterUpdater.start();
       return logLevelFilterUpdater;
     }
diff --git 
a/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogFeederProps.java
 
b/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogFeederProps.java
index 1e6be21711..3be62aee2b 100644
--- 
a/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogFeederProps.java
+++ 
b/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/conf/LogFeederProps.java
@@ -193,6 +193,16 @@
   @Value("${" + LogFeederConstants.MONITOR_SOLR_FILTER_STORAGE_PROPERTY + ":" 
+ LogFeederConstants.MONITOR_SOLR_FILTER_STORAGE_DEFAULT +"}")
   public boolean solrFilterMonitor;
 
+  @LogSearchPropertyDescription(
+    name = LogFeederConstants.MONITOR_SOLR_FILTER_INTERVAL_PROPERTY,
+    description = "Time interval (in seconds) between monitoring input config 
filter definitions from Solr.",
+    examples = {"60"},
+    defaultValue = "30",
+    sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE}
+  )
+  @Value("${" + LogFeederConstants.MONITOR_SOLR_FILTER_INTERVAL_PROPERTY + 
":30}")
+  public Integer solrFilterMonitorInterval;
+
   @LogSearchPropertyDescription(
     name = LogFeederConstants.SOLR_ZK_CONNECTION_STRING,
     description = "Zookeeper connection string for Solr.",
@@ -211,6 +221,16 @@
   @Value("${" + LogFeederConstants.SOLR_URLS + ":}")
   private String solrUrlsStr;
 
+  @LogSearchPropertyDescription(
+    name = LogFeederConstants.SOLR_CLOUD_DISCOVER,
+    description = "On startup, with a Solr Cloud client, the Solr nodes will 
be discovered, then LBHttpClient will be built from that.",
+    examples = {"true"},
+    sources = {LogFeederConstants.LOGFEEDER_PROPERTIES_FILE},
+    defaultValue = "false"
+  )
+  @Value("${" + LogFeederConstants.SOLR_CLOUD_DISCOVER + ":false}")
+  private boolean solrCloudDiscover;
+
   @LogSearchPropertyDescription(
     name = LogFeederConstants.SOLR_METADATA_COLLECTION,
     description = "Metadata collection name that could contain log level 
filters or input configurations.",
@@ -446,6 +466,14 @@ public void setSolrFilterMonitor(boolean 
solrFilterMonitor) {
     this.solrFilterMonitor = solrFilterMonitor;
   }
 
+  public Integer getSolrFilterMonitorInterval() {
+    return solrFilterMonitorInterval;
+  }
+
+  public void setSolrFilterMonitorInterval(Integer solrFilterMonitorInterval) {
+    this.solrFilterMonitorInterval = solrFilterMonitorInterval;
+  }
+
   public String getSolrUrlsStr() {
     return this.solrUrlsStr;
   }
@@ -566,6 +594,14 @@ public void setSolrMetadataCollection(String 
solrMetadataCollection) {
     this.solrMetadataCollection = solrMetadataCollection;
   }
 
+  public boolean isSolrCloudDiscover() {
+    return solrCloudDiscover;
+  }
+
+  public void setSolrCloudDiscover(boolean solrCloudDiscover) {
+    this.solrCloudDiscover = solrCloudDiscover;
+  }
+
   public String[] getSolrUrls() {
     if (StringUtils.isNotBlank(this.solrUrlsStr)) {
       return this.solrUrlsStr.split(",");
diff --git 
a/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
 
b/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
index 90d13ef038..8c76e548fb 100644
--- 
a/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
+++ 
b/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
@@ -111,6 +111,7 @@
   private boolean implicitRouting = false;
   private int lastSlotByMin = -1;
   private boolean skipLogtime = false;
+  private boolean discoverSolrNodes = false;
   private List<String> idFields = new ArrayList<>();
 
   private BlockingQueue<OutputData> outgoingBuffer = null;
@@ -178,6 +179,8 @@ private void initParams(LogFeederProps logFeederProps) 
throws Exception {
       throw new IllegalStateException("Collection property is mandatory");
     }
 
+    discoverSolrNodes = logFeederProps.isSolrCloudDiscover();
+
     maxBufferSize = getIntValue("flush_size", DEFAULT_MAX_BUFFER_SIZE);
     if (maxBufferSize < 1) {
       logger.warn("maxBufferSize is less than 1. Making it 1");
@@ -219,7 +222,7 @@ private void createSolrWorkers() throws Exception, 
MalformedURLException {
   }
 
   private SolrClient getSolrClient(int count) throws Exception, 
MalformedURLException {
-    SolrClient solrClient = new 
LogFeederSolrClientFactory().createSolrClient(zkConnectString, solrUrls, 
collection);
+    SolrClient solrClient = new 
LogFeederSolrClientFactory().createSolrClient(zkConnectString, solrUrls, 
collection, discoverSolrNodes);
     pingSolr(count, solrClient);
     return solrClient;
   }
diff --git 
a/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/common/LogFeederSolrClientFactoryTest.java
 
b/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/common/LogFeederSolrClientFactoryTest.java
new file mode 100644
index 0000000000..21de3802f9
--- /dev/null
+++ 
b/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/common/LogFeederSolrClientFactoryTest.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.common;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Optional;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class LogFeederSolrClientFactoryTest {
+
+  private LogFeederSolrClientFactory underTest;
+
+  @Before
+  public void setUp() {
+    underTest = new LogFeederSolrClientFactory();
+  }
+
+  @Test
+  public void testCreateZKConnection() {
+    // GIVEN
+    String input = "localhost:2181/solr";
+    // WHEN
+    LogFeederSolrClientFactory.ZkConnection result = 
underTest.createZKConnection(input);
+    // THEN
+    assertEquals("/solr", Optional.ofNullable(result.getZkChroot()).get());
+    assertEquals("localhost:2181", result.getZkHosts().get(0));
+  }
+
+  @Test
+  public void testCreateZKConnectionWithoutChroot() {
+    // GIVEN
+    String input = "localhost:2181";
+    // WHEN
+    LogFeederSolrClientFactory.ZkConnection result = 
underTest.createZKConnection(input);
+    // THEN
+    assertFalse(Optional.ofNullable(result.getZkChroot()).isPresent());
+    assertEquals(input, result.getZkHosts().get(0));
+  }
+
+  @Test
+  public void testCreateZKConnectionWithMultipleHosts() {
+    // GIVEN
+    String input = "localhost1:2181,localhost2:2181";
+    // WHEN
+    LogFeederSolrClientFactory.ZkConnection result = 
underTest.createZKConnection(input);
+    // THEN
+    assertFalse(Optional.ofNullable(result.getZkChroot()).isPresent());
+    assertEquals(2, result.getZkHosts().size());
+    assertTrue(result.getZkHosts().contains("localhost1:2181"));
+    assertTrue(result.getZkHosts().contains("localhost2:2181"));
+  }
+
+  @Test
+  public void testCreateZKConnectionWithMultipleHostsAndChroot() {
+    // GIVEN
+    String input = "localhost1:2181,localhost2:2181/solr";
+    // WHEN
+    LogFeederSolrClientFactory.ZkConnection result = 
underTest.createZKConnection(input);
+    // THEN
+    assertEquals("/solr", result.getZkChroot());
+    assertEquals(2, result.getZkHosts().size());
+    assertTrue(result.getZkHosts().contains("localhost1:2181"));
+    assertTrue(result.getZkHosts().contains("localhost2:2181"));
+  }
+
+  @Test
+  public void testCreateZKConnectionInvalidSlashes() {
+    // GIVEN
+    String input = "localhost:2181//solr/";
+    // WHEN
+    LogFeederSolrClientFactory.ZkConnection result = 
underTest.createZKConnection(input);
+    // THEN
+    assertEquals("/solr", Optional.ofNullable(result.getZkChroot()).get());
+    assertEquals("localhost:2181", result.getZkHosts().get(0));
+  }
+
+  @Test
+  public void testCreateZKConnectionInvalidSlashesMultipleTimes() {
+    // GIVEN
+    String input = "localhost:2181//solr/my//root";
+    // WHEN
+    LogFeederSolrClientFactory.ZkConnection result = 
underTest.createZKConnection(input);
+    // THEN
+    assertEquals("/solr/my/root", 
Optional.ofNullable(result.getZkChroot()).get());
+    assertEquals("localhost:2181", result.getZkHosts().get(0));
+  }
+
+  @Test
+  public void testAppendTo() {
+    // GIVEN
+    String toAppend = "/mycollection";
+    String[] appendees = new String[]{"http://solr1:8886";, 
"http://solr2:8886"};
+    // WHEN
+    String[] result = underTest.appendTo(toAppend, appendees);
+    // THEN
+    assertEquals("http://solr1:8886/mycollection";, result[0]);
+    assertEquals("http://solr2:8886/mycollection";, result[1]);
+  }
+}
diff --git 
a/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchConfigApiConfig.java
 
b/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchConfigApiConfig.java
index 2765ebde9a..3eb9291907 100644
--- 
a/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchConfigApiConfig.java
+++ 
b/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/LogSearchConfigApiConfig.java
@@ -50,7 +50,7 @@
   public boolean solrFilterStorage;
 
   @LogSearchPropertyDescription(
-    name = "logsearch.config.api.filter.zk-only.enabled",
+    name = "logsearch.config.api.filter.zk.enabled",
     description = "Use zookeeper as a log level filter storage",
     examples = {"true"},
     defaultValue = "false",
diff --git a/ambari-logsearch-server/src/main/resources/logsearch.properties 
b/ambari-logsearch-server/src/main/resources/logsearch.properties
index bea61729bb..6dc3c3f10e 100755
--- a/ambari-logsearch-server/src/main/resources/logsearch.properties
+++ b/ambari-logsearch-server/src/main/resources/logsearch.properties
@@ -14,19 +14,17 @@
 # limitations under the License.
 logsearch.solr.zk_connect_string=localhost:2181
 logsearch.solr.collection.service.logs=hadoop_logs
-logsearch.service.logs.split.interval.mins=15
 logsearch.collection.service.logs.numshards=3
 logsearch.collection.service.logs.replication.factor=2
 logsearch.solr.audit.logs.zk_connect_string=localhost:2181
 logsearch.solr.collection.audit.logs=audit_logs
-logsearch.audit.logs.split.interval.mins=15
 logsearch.collection.audit.logs.numshards=2
 logsearch.collection.audit.logs.replication.factor=2
 
logsearch.solr.config_set.folder=${LOGSEARCH_SERVER_RELATIVE_LOCATION:}src/main/configsets
 
logsearch.solr.audit.logs.config_set.folder=${LOGSEARCH_SERVER_RELATIVE_LOCATION:}src/main/configsets
-logsearch.solr.collection.history=history
-logsearch.solr.history.config.name=history
-logsearch.collection.history.replication.factor=1
+logsearch.solr.collection.metadata=logsearch_metadata
+logsearch.solr.collection.config.name=logsearch_metadata
+logsearch.collection.metadata.replication.factor=1
 logsearch.auth.file.enabled=true
 logsearch.login.credentials.file=user_pass.json
 
diff --git a/docker/test-config/logsearch/logsearch.properties 
b/docker/test-config/logsearch/logsearch.properties
index aa9d4c48fe..bc161f8e4a 100644
--- a/docker/test-config/logsearch/logsearch.properties
+++ b/docker/test-config/logsearch/logsearch.properties
@@ -22,7 +22,6 @@ logsearch.solr.collection.service.logs=hadoop_logs
 #logsearch.config.api.enabled=false
 #logsearch.config.api.filter.zk.enabled=true
 
-logsearch.service.logs.split.interval.mins=15
 logsearch.collection.service.logs.numshards=3
 logsearch.collection.service.logs.replication.factor=2
 
@@ -31,7 +30,6 @@ logsearch.solr.audit.logs.zk_connect_string=localhost:9983
 logsearch.solr.collection.audit.logs=audit_logs
 logsearch.solr.audit.logs.url=
 
-logsearch.audit.logs.split.interval.mins=15
 logsearch.collection.audit.logs.numshards=3
 logsearch.collection.audit.logs.replication.factor=2
 
@@ -39,9 +37,9 @@ 
logsearch.solr.config_set.folder=/root/ambari/ambari-logsearch/ambari-logsearch-
 
logsearch.solr.audit.logs.config_set.folder=/root/ambari/ambari-logsearch/ambari-logsearch-server/target/package/conf/solr_configsets
 
 # History logs
-logsearch.solr.collection.history=history
-logsearch.solr.history.config.name=history
-logsearch.collection.history.replication.factor=1
+logsearch.solr.collection.metadata=logsearch_metadata
+logsearch.solr.metadata.config.name=logsearch_metadata
+logsearch.collection.metadata.replication.factor=1
 
 # Metrics
 logsearch.solr.metrics.collector.hosts=
diff --git 
a/jenkins/containers/docker-logsearch-portal/conf/logsearch.properties 
b/jenkins/containers/docker-logsearch-portal/conf/logsearch.properties
index 59d207754b..78b546942d 100644
--- a/jenkins/containers/docker-logsearch-portal/conf/logsearch.properties
+++ b/jenkins/containers/docker-logsearch-portal/conf/logsearch.properties
@@ -38,10 +38,10 @@ logsearch.collection.audit.logs.replication.factor=2
 
logsearch.solr.config_set.folder=/usr/lib/ambari-logsearch-portal/conf/solr_configsets
 
logsearch.solr.audit.logs.config_set.folder=/usr/lib/ambari-logsearch-portal/conf/solr_configsets
 
-# History logs
-logsearch.solr.collection.history=history
-logsearch.solr.history.config.name=history
-logsearch.collection.history.replication.factor=1
+# Log search metadata
+logsearch.solr.collection.metadata=logsearch_metadata
+logsearch.solr.metadata.config.name=logsearch_metadata
+logsearch.collection.metadata.replication.factor=1
 
 # Metrics
 logsearch.solr.metrics.collector.hosts=


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to