Repository: ambari
Updated Branches:
  refs/heads/branch-2.5 ec8809cea -> 777d97afb


AMBARI-19397 Infra Solr Upgrade support 2.4 -> 2.5 (mgergely)

Change-Id: I4c753856516fc416433f677664df2515c0d9f493


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/777d97af
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/777d97af
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/777d97af

Branch: refs/heads/branch-2.5
Commit: 777d97afb6c8f0846c0ea8386db5a6f6aa1c5f86
Parents: ec8809c
Author: Miklos Gergely <[email protected]>
Authored: Tue Jan 10 00:08:28 2017 +0100
Committer: Miklos Gergely <[email protected]>
Committed: Tue Jan 10 00:08:28 2017 +0100

----------------------------------------------------------------------
 .../server/upgrade/SchemaUpgradeUtil.java       |  75 ++++++++++++
 .../server/upgrade/UpgradeCatalog250.java       |  70 +++++++++++
 .../configuration/infra-solr-client-log4j.xml   |   4 +-
 .../0.1.0/configuration/infra-solr-log4j.xml    |  18 +--
 .../0.1.0/properties/solr-log4j.properties.j2   |   4 +-
 .../server/upgrade/UpgradeCatalog250Test.java   | 115 ++++++++++++++++---
 6 files changed, 260 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/777d97af/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeUtil.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeUtil.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeUtil.java
new file mode 100644
index 0000000..999004c
--- /dev/null
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeUtil.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.upgrade;
+
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.springframework.util.StringUtils;
+
+public class SchemaUpgradeUtil {
+
+  private SchemaUpgradeUtil() {
+    throw new UnsupportedOperationException();
+  }
+
+  /**
+   * Extracts a new property that was introduced to make a content type 
property more configurable by making a hard coded value
+   * changeable. The function takes a pattern to identify the previous value 
of the property, and replaces the actual value with
+   * a reference to the new property, and also adds the new property to a map.
+   * 
+   * Example:
+   * content: ... some.variable=HARDCODED_VALUE ...
+   * propertyName: some_variable
+   * variableName: some_variable_processed
+   * propertyPattern: some.variable=(\w)
+   * 
+   * the function returns: ... some.variable={{some_variable_processed}}
+   * newProperties will contain: some_variable -> HARDCODED_VALUE
+   * 
+   * @param content the content type variable's content before upgrade
+   * @param propertyName the name of the new property
+   * @param variableName the name of the variable that will be used in the 
content
+   * @param propertyPattern the regexp pattern to identify the property, must 
contain exactly one "(\w+)" for the actual value of
+   *                        the new property
+   * @param defaultValue the default value for the new property if the actual 
value can not be found in the content
+   * @param newProperties map of new properties where the extracted property 
will be put
+   * @return the updated content containing a reference to the new property
+   */
+  public static String extractProperty(String content, String propertyName, 
String variableName, String propertyPattern,
+      String defaultValue, Map<String, String> newProperties) {
+    if (StringUtils.countOccurrencesOf(propertyPattern, "(\\w+)") != 1) {
+      throw new IllegalArgumentException("propertyPattern must contain exactly 
one '(\\w+)': " + propertyPattern);
+    }
+    
+    Pattern p = Pattern.compile(propertyPattern);
+    Matcher m = p.matcher(content);
+    
+    String propertyValue = defaultValue;
+    if (m.find()) {
+      propertyValue = m.group(1);
+      
+      String toReplace = propertyPattern.replace("(\\w+)", propertyValue);
+      String replaceWith = propertyPattern.replace("(\\w+)", "{{" + 
variableName + "}}");
+      content = content.replace(toReplace, replaceWith);
+    }
+    newProperties.put(propertyName, propertyValue);
+    return content;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/777d97af/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index 3701e2f..172cbd4 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@ -160,6 +160,7 @@ public class UpgradeCatalog250 extends 
AbstractUpgradeCatalog {
     updateTablesForZeppelinViewRemoval();
     updateAtlasConfigs();
     updateLogSearchConfigs();
+    updateAmbariInfraConfigs();
     addManageServiceAutoStartPermissions();
   }
 
@@ -674,6 +675,75 @@ public class UpgradeCatalog250 extends 
AbstractUpgradeCatalog {
   }
   
   /**
+   * Updates Log Search configs.
+   *
+   * @throws AmbariException
+   */
+  protected void updateAmbariInfraConfigs() throws AmbariException {
+    AmbariManagementController ambariManagementController = 
injector.getInstance(AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+          Config infraSolrEnvProperties = 
cluster.getDesiredConfigByType("infra-solr-env");
+          if (infraSolrEnvProperties != null) {
+            String content = 
infraSolrEnvProperties.getProperties().get("content");
+            if 
(content.contains("SOLR_SSL_TRUST_STORE={{infra_solr_keystore_location}}")) {
+              content = 
content.replace("SOLR_SSL_TRUST_STORE={{infra_solr_keystore_location}}", 
"SOLR_SSL_TRUST_STORE={{infra_solr_truststore_location}}");
+            }
+            if 
(content.contains("SOLR_SSL_TRUST_STORE_PASSWORD={{infra_solr_keystore_password}}"))
 {
+              content = 
content.replace("SOLR_SSL_TRUST_STORE_PASSWORD={{infra_solr_keystore_password}}",
 "SOLR_SSL_TRUST_STORE_PASSWORD={{infra_solr_truststore_password}}");
+            }
+            if 
(content.contains("SOLR_KERB_NAME_RULES={{infra_solr_kerberos_name_rules}}")) {
+              content = 
content.replace("SOLR_KERB_NAME_RULES={{infra_solr_kerberos_name_rules}}", 
"SOLR_KERB_NAME_RULES=\"{{infra_solr_kerberos_name_rules}}\"");
+            }
+            if (content.contains(" 
-Dsolr.kerberos.name.rules=${SOLR_KERB_NAME_RULES}")) {
+              content = content.replace(" 
-Dsolr.kerberos.name.rules=${SOLR_KERB_NAME_RULES}", "");
+            }
+            if 
(!content.equals(infraSolrEnvProperties.getProperties().get("content"))) {
+              updateConfigurationPropertiesForCluster(cluster, 
"infra-solr-env", Collections.singletonMap("content", content), true, true);
+            }
+          }
+          
+          Config infraSolrLog4jProperties = 
cluster.getDesiredConfigByType("infra-solr-log4j");
+          if (infraSolrLog4jProperties != null) {
+            Map<String, String> newProperties = new HashMap<>();
+            
+            String content = 
infraSolrLog4jProperties.getProperties().get("content");
+            content = SchemaUpgradeUtil.extractProperty(content, 
"infra_log_maxfilesize", "infra_log_maxfilesize",
+                "log4j.appender.file.MaxFileSize=(\\w+)MB", "10", 
newProperties);
+            content = SchemaUpgradeUtil.extractProperty(content, 
"infra_log_maxbackupindex", "infra_log_maxbackupindex",
+                "log4j.appender.file.MaxBackupIndex=(\\w+)\n", "9", 
newProperties);
+            
+            newProperties.put("content", content);
+            updateConfigurationPropertiesForCluster(cluster, 
"infra-solr-log4j", newProperties, true, true);
+          }
+          
+          Config infraSolrClientLog4jProperties = 
cluster.getDesiredConfigByType("infra-solr-client-log4j");
+          if (infraSolrClientLog4jProperties != null) {
+            Map<String, String> newProperties = new HashMap<>();
+            
+            String content = 
infraSolrClientLog4jProperties.getProperties().get("content");
+            if (content.contains("infra_client_log")) {
+              content = content.replace("infra_client_log", "solr_client_log");
+            }
+            
+            content = SchemaUpgradeUtil.extractProperty(content, 
"infra_client_log_maxfilesize", "solr_client_log_maxfilesize",
+                "log4j.appender.file.MaxFileSize=(\\w+)MB", "80", 
newProperties);
+            content = SchemaUpgradeUtil.extractProperty(content, 
"infra_client_log_maxbackupindex", "solr_client_log_maxbackupindex",
+                "log4j.appender.file.MaxBackupIndex=(\\w+)\n", "60", 
newProperties);
+            
+            newProperties.put("content", content);
+            updateConfigurationPropertiesForCluster(cluster, 
"infra-solr-client-log4j", newProperties, true, true);
+          }
+        }
+      }
+    }
+  }
+  
+  /**
    * Add permissions for managing service auto-start.
    * <p>
    * <ul>

http://git-wip-us.apache.org/repos/asf/ambari/blob/777d97af/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-client-log4j.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-client-log4j.xml
 
b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-client-log4j.xml
index 87468a2..024c950 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-client-log4j.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-client-log4j.xml
@@ -30,7 +30,7 @@
     <value-attributes>
       <unit>MB</unit>
     </value-attributes>
-    <on-ambari-upgrade add="false"/>
+    <on-ambari-upgrade add="true"/>
    </property>
    <property>
     <name>infra_client_log_maxbackupindex</name>
@@ -41,7 +41,7 @@
       <type>int</type>
       <minimum>0</minimum>
     </value-attributes>
-    <on-ambari-upgrade add="false"/>
+    <on-ambari-upgrade add="true"/>
   </property>
   <property>
     <name>infra_solr_client_log_dir</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/777d97af/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-log4j.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-log4j.xml
 
b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-log4j.xml
index b22dbf2..e797b37 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-log4j.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-log4j.xml
@@ -21,16 +21,16 @@
 -->
 <configuration supports_adding_forbidden="true">
   <property>
-   <name>infra_log_maxfilesize</name>
-   <value>10</value>
-   <description>The maximum size of backup file before the log is 
rotated</description>
-   <display-name>Ambari Infra Log: backup file size</display-name>
-   <value-attributes>
+    <name>infra_log_maxfilesize</name>
+    <value>10</value>
+    <description>The maximum size of backup file before the log is 
rotated</description>
+    <display-name>Ambari Infra Log: backup file size</display-name>
+    <value-attributes>
       <unit>MB</unit>
     </value-attributes>
-    <on-ambari-upgrade add="false"/>
-   </property>
-   <property>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
     <name>infra_log_maxbackupindex</name>
     <value>9</value>
     <description>The number of backup files</description>
@@ -39,7 +39,7 @@
       <type>int</type>
       <minimum>0</minimum>
     </value-attributes>
-    <on-ambari-upgrade add="false"/>
+    <on-ambari-upgrade add="true"/>
   </property>
   <property>
     <name>content</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/777d97af/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-log4j.properties.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-log4j.properties.j2
 
b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-log4j.properties.j2
index 275203a..d81aa17 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-log4j.properties.j2
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/solr-log4j.properties.j2
@@ -25,8 +25,8 @@ log4j.appender.CONSOLE.layout.ConversionPattern=%-4r [%t] 
%-5p %c %x [%X{collect
 
 #- size rotation with log cleanup.
 log4j.appender.file=org.apache.log4j.RollingFileAppender
-log4j.appender.file.MaxFileSize=10MB
-log4j.appender.file.MaxBackupIndex=9
+log4j.appender.file.MaxFileSize={{infra_log_maxfilesize}}MB
+log4j.appender.file.MaxBackupIndex={{infra_log_maxbackupindex}}
 
 #- File to log to and log format
 log4j.appender.file.File=${infra.solr.log.dir}/solr.log

http://git-wip-us.apache.org/repos/asf/ambari/blob/777d97af/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
index c89522f..98aaa0f 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
@@ -276,6 +276,7 @@ public class UpgradeCatalog250Test {
     Method updateHIVEInteractiveConfigs = 
UpgradeCatalog250.class.getDeclaredMethod("updateHIVEInteractiveConfigs");
     Method updateTEZInteractiveConfigs = 
UpgradeCatalog250.class.getDeclaredMethod("updateTEZInteractiveConfigs");
     Method updateLogSearchConfigs = 
UpgradeCatalog250.class.getDeclaredMethod("updateLogSearchConfigs");
+    Method updateAmbariInfraConfigs = 
UpgradeCatalog250.class.getDeclaredMethod("updateAmbariInfraConfigs");
     Method addManageServiceAutoStartPermissions = 
UpgradeCatalog250.class.getDeclaredMethod("addManageServiceAutoStartPermissions");
 
     UpgradeCatalog250 upgradeCatalog250 = 
createMockBuilder(UpgradeCatalog250.class)
@@ -288,6 +289,7 @@ public class UpgradeCatalog250Test {
       .addMockedMethod(updateTablesForZeppelinViewRemoval)
       .addMockedMethod(updateAtlasConfigs)
       .addMockedMethod(updateLogSearchConfigs)
+      .addMockedMethod(updateAmbariInfraConfigs)
       .addMockedMethod(addNewConfigurationsFromXml)
       .addMockedMethod(addManageServiceAutoStartPermissions)
       .createMock();
@@ -323,6 +325,9 @@ public class UpgradeCatalog250Test {
     upgradeCatalog250.updateLogSearchConfigs();
     expectLastCall().once();
 
+    upgradeCatalog250.updateAmbariInfraConfigs();
+    expectLastCall().once();
+
     upgradeCatalog250.addManageServiceAutoStartPermissions();
     expectLastCall().once();
 
@@ -334,7 +339,6 @@ public class UpgradeCatalog250Test {
   }
 
   @Test
-  @SuppressWarnings("unchecked")
   public void testAmsEnvUpdateConfigs() throws Exception {
 
     Map<String, String> oldPropertiesAmsEnv = new HashMap<String, String>() {
@@ -389,7 +393,7 @@ public class UpgradeCatalog250Test {
     
expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
     expect(controller.getClusters()).andReturn(clusters).anyTimes();
     expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(propertiesCapture), anyString(),
-      anyObject(Map.class))).andReturn(config).once();
+        EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
 
     replay(controller, injector2);
     new UpgradeCatalog250(injector2).updateAMSConfigs();
@@ -426,7 +430,6 @@ public class UpgradeCatalog250Test {
 
   }
 
-  @SuppressWarnings("unchecked")
   private void testAmsHbaseRootDir(Map<String, String> oldProperties, 
Map<String, String> newProperties) throws AmbariException {
     Map<String, String> amsSite = new HashMap<String, String>() {
       {
@@ -463,7 +466,7 @@ public class UpgradeCatalog250Test {
     
expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
     expect(controller.getClusters()).andReturn(clusters).anyTimes();
     expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(propertiesCapture), anyString(),
-      anyObject(Map.class))).andReturn(config).once();
+        EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
 
     replay(controller, injector2);
     new UpgradeCatalog250(injector2).updateAMSConfigs();
@@ -474,7 +477,6 @@ public class UpgradeCatalog250Test {
   }
 
   @Test
-  @SuppressWarnings("unchecked")
   public void testKafkaUpdateConfigs() throws Exception{
 
     Map<String, String> oldProperties = new HashMap<String, String>() {
@@ -513,7 +515,7 @@ public class UpgradeCatalog250Test {
     
expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
     expect(controller.getClusters()).andReturn(clusters).anyTimes();
     expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(propertiesCapture), anyString(),
-      anyObject(Map.class))).andReturn(config).once();
+        EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
 
     replay(controller, injector2);
     new UpgradeCatalog250(injector2).updateKafkaConfigs();
@@ -524,7 +526,6 @@ public class UpgradeCatalog250Test {
   }
 
   @Test
-  @SuppressWarnings("unchecked")
   public void testLogSearchUpdateConfigs() throws Exception {
     reset(clusters, cluster);
     expect(clusters.getClusters()).andReturn(ImmutableMap.of("normal", 
cluster)).once();
@@ -557,7 +558,7 @@ public class UpgradeCatalog250Test {
     
expect(mockLogSearchProperties.getProperties()).andReturn(oldLogSearchProperties).anyTimes();
     Capture<Map<String, String>> logSearchPropertiesCapture = 
EasyMock.newCapture();
     expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(logSearchPropertiesCapture), anyString(),
-      anyObject(Map.class))).andReturn(config).once();
+      EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
 
     Map<String, String> oldLogFeederEnv = ImmutableMap.of(
         "content", "infra_solr_ssl_enabled");
@@ -570,7 +571,7 @@ public class UpgradeCatalog250Test {
     
expect(mockLogFeederEnv.getProperties()).andReturn(oldLogFeederEnv).anyTimes();
     Capture<Map<String, String>> logFeederEnvCapture = EasyMock.newCapture();
     expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(logFeederEnvCapture), anyString(),
-      anyObject(Map.class))).andReturn(config).once();
+        EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
 
     Map<String, String> oldLogSearchEnv = ImmutableMap.of(
         "logsearch_solr_audit_logs_use_ranger", "false",
@@ -586,7 +587,7 @@ public class UpgradeCatalog250Test {
     
expect(mockLogSearchEnv.getProperties()).andReturn(oldLogSearchEnv).anyTimes();
     Capture<Map<String, String>> logSearchEnvCapture = EasyMock.newCapture();
     expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(logSearchEnvCapture), anyString(),
-      anyObject(Map.class))).andReturn(config).once();
+        EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
 
     Map<String, String> oldLogSearchLog4j = ImmutableMap.of(
         "content", "{{logsearch_log_dir}}/logsearch.err\n" +
@@ -601,7 +602,7 @@ public class UpgradeCatalog250Test {
     
expect(mockLogSearchLog4j.getProperties()).andReturn(oldLogSearchLog4j).anyTimes();
     Capture<Map<String, String>> logSearchLog4jCapture = EasyMock.newCapture();
     expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(logSearchLog4jCapture), anyString(),
-      anyObject(Map.class))).andReturn(config).once();
+        EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
 
     replay(clusters, cluster);
     replay(controller, injector2);
@@ -623,6 +624,95 @@ public class UpgradeCatalog250Test {
   }
   
   @Test
+  public void testAmbariInfraUpdateConfigs() throws Exception {
+    reset(clusters, cluster);
+    expect(clusters.getClusters()).andReturn(ImmutableMap.of("normal", 
cluster)).once();
+    
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    
+    Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+    AmbariManagementControllerImpl controller = 
createMockBuilder(AmbariManagementControllerImpl.class)
+        .addMockedMethod("createConfiguration")
+        .addMockedMethod("getClusters", new Class[] {})
+        .addMockedMethod("createConfig")
+        .withConstructor(actionManager, clusters, injector)
+        .createNiceMock();
+
+    
expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+    expect(controller.getClusters()).andReturn(clusters).anyTimes();
+
+    Map<String, String> oldInfraSolrEnv = ImmutableMap.of(
+        "content", "SOLR_SSL_TRUST_STORE={{infra_solr_keystore_location}}\n" +
+                   
"SOLR_SSL_TRUST_STORE_PASSWORD={{infra_solr_keystore_password}}\n" +
+                   "SOLR_KERB_NAME_RULES={{infra_solr_kerberos_name_rules}}\n" 
+
+                   "SOLR_AUTHENTICATION_OPTS=\" 
-DauthenticationPlugin=org.apache.solr.security.KerberosPlugin 
-Djava.security.auth.login.config=$SOLR_JAAS_FILE 
-Dsolr.kerberos.principal=${SOLR_KERB_PRINCIPAL} 
-Dsolr.kerberos.keytab=${SOLR_KERB_KEYTAB} 
-Dsolr.kerberos.cookie.domain=${SOLR_HOST} 
-Dsolr.kerberos.name.rules=${SOLR_KERB_NAME_RULES}\"");
+    
+    Map<String, String> expectedInfraSolrEnv = ImmutableMap.of(
+        "content", "SOLR_SSL_TRUST_STORE={{infra_solr_truststore_location}}\n" 
+
+                   
"SOLR_SSL_TRUST_STORE_PASSWORD={{infra_solr_truststore_password}}\n" +
+                   
"SOLR_KERB_NAME_RULES=\"{{infra_solr_kerberos_name_rules}}\"\n" +
+                   "SOLR_AUTHENTICATION_OPTS=\" 
-DauthenticationPlugin=org.apache.solr.security.KerberosPlugin 
-Djava.security.auth.login.config=$SOLR_JAAS_FILE 
-Dsolr.kerberos.principal=${SOLR_KERB_PRINCIPAL} 
-Dsolr.kerberos.keytab=${SOLR_KERB_KEYTAB} 
-Dsolr.kerberos.cookie.domain=${SOLR_HOST}\"");
+    
+    Config mockInfraSolrEnv = easyMockSupport.createNiceMock(Config.class);
+    
expect(cluster.getDesiredConfigByType("infra-solr-env")).andReturn(mockInfraSolrEnv).atLeastOnce();
+    
expect(mockInfraSolrEnv.getProperties()).andReturn(oldInfraSolrEnv).anyTimes();
+    Capture<Map<String, String>> infraSolrEnvCapture = EasyMock.newCapture();
+    expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(infraSolrEnvCapture), anyString(),
+        EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
+
+    Map<String, String> oldInfraSolrLog4j = ImmutableMap.of(
+        "content", "log4j.appender.file.MaxFileSize=15MB\n" +
+                   "log4j.appender.file.MaxBackupIndex=5\n");
+    
+    Map<String, String> expectedInfraSolrLog4j = ImmutableMap.of(
+        "content", 
"log4j.appender.file.MaxFileSize={{infra_log_maxfilesize}}MB\n" +
+                   
"log4j.appender.file.MaxBackupIndex={{infra_log_maxbackupindex}}\n",
+        "infra_log_maxfilesize", "15",
+        "infra_log_maxbackupindex", "5");
+    
+    Config mockInfraSolrLog4j = easyMockSupport.createNiceMock(Config.class);
+    
expect(cluster.getDesiredConfigByType("infra-solr-log4j")).andReturn(mockInfraSolrLog4j).atLeastOnce();
+    
expect(mockInfraSolrLog4j.getProperties()).andReturn(oldInfraSolrLog4j).anyTimes();
+    Capture<Map<String, String>> infraSolrLog4jCapture = EasyMock.newCapture();
+    expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(infraSolrLog4jCapture), anyString(),
+        EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
+
+    Map<String, String> oldInfraSolrClientLog4j = ImmutableMap.of(
+        "content", 
"log4j.appender.file.File\u003d{{infra_client_log|default(\u0027/var/log/ambari-infra-solr-client/solr-client.log\u0027)}}\n"
 +
+                   "log4j.appender.file.MaxFileSize=55MB\n" +
+                   "log4j.appender.file.MaxBackupIndex=10\n");
+    
+    Map<String, String> expectedInfraSolrClientLog4j = ImmutableMap.of(
+        "content", 
"log4j.appender.file.File\u003d{{solr_client_log|default(\u0027/var/log/ambari-infra-solr-client/solr-client.log\u0027)}}\n"
 +
+                   
"log4j.appender.file.MaxFileSize={{solr_client_log_maxfilesize}}MB\n" +
+                   
"log4j.appender.file.MaxBackupIndex={{solr_client_log_maxbackupindex}}\n",
+        "infra_client_log_maxfilesize", "55",
+        "infra_client_log_maxbackupindex", "10");
+    
+    Config mockInfraSolrClientLog4j = 
easyMockSupport.createNiceMock(Config.class);
+    
expect(cluster.getDesiredConfigByType("infra-solr-client-log4j")).andReturn(mockInfraSolrClientLog4j).atLeastOnce();
+    
expect(mockInfraSolrClientLog4j.getProperties()).andReturn(oldInfraSolrClientLog4j).anyTimes();
+    Capture<Map<String, String>> infraSolrClientLog4jCapture = 
EasyMock.newCapture();
+    expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(infraSolrClientLog4jCapture), anyString(),
+        EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
+
+    replay(clusters, cluster);
+    replay(controller, injector2);
+    replay(mockInfraSolrEnv, mockInfraSolrLog4j, mockInfraSolrClientLog4j);
+    new UpgradeCatalog250(injector2).updateAmbariInfraConfigs();
+    easyMockSupport.verifyAll();
+
+    Map<String, String> updatedInfraSolrEnv = infraSolrEnvCapture.getValue();
+    assertTrue(Maps.difference(expectedInfraSolrEnv, 
updatedInfraSolrEnv).areEqual());
+
+    Map<String, String> updatedInfraSolrLog4j = 
infraSolrLog4jCapture.getValue();
+    assertTrue(Maps.difference(expectedInfraSolrLog4j, 
updatedInfraSolrLog4j).areEqual());
+
+    Map<String, String> updatedInfraSolrClientLog4j = 
infraSolrClientLog4jCapture.getValue();
+    assertTrue(Maps.difference(expectedInfraSolrClientLog4j, 
updatedInfraSolrClientLog4j).areEqual());
+  }
+  
+  @Test
   public void testUpdateAtlasConfigs() throws Exception {
 
     Map<String, String> oldHiveProperties = new HashMap<String, String>();
@@ -651,7 +741,6 @@ public class UpgradeCatalog250Test {
     testUpdateAtlasHookConfig(oldSqoopProperties, newSqoopProperties, 
"sqoop-env");
   }
 
-  @SuppressWarnings("unchecked")
   public void testUpdateAtlasHookConfig(Map<String, String> oldProperties, 
Map<String, String> newProperties, String configType) throws Exception {
 
     Map<String, Service> installedServices = new HashMap<String, Service>() {
@@ -692,7 +781,7 @@ public class UpgradeCatalog250Test {
     
expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
     expect(controller.getClusters()).andReturn(clusters).anyTimes();
     expect(controller.createConfig(anyObject(Cluster.class), anyString(), 
capture(propertiesCapture), anyString(),
-      anyObject(Map.class))).andReturn(config).once();
+        EasyMock.<Map<String, Map<String, 
String>>>anyObject())).andReturn(config).once();
 
     replay(controller, injector2);
     new UpgradeCatalog250(injector2).updateAtlasConfigs();

Reply via email to