AMBARI-14740. Add params to alert definitions during ambari upgrade.(vbrodetskyi)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8887eb1 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8887eb1 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8887eb1 Branch: refs/heads/branch-dev-patch-upgrade Commit: a8887eb175afdc0e9340647a83e0f02cddbfdb8b Parents: 5380fef Author: Vitaly Brodetskyi <vbrodets...@hortonworks.com> Authored: Thu Jan 21 09:32:58 2016 +0200 Committer: Vitaly Brodetskyi <vbrodets...@hortonworks.com> Committed: Thu Jan 21 09:32:58 2016 +0200 ---------------------------------------------------------------------- .../server/upgrade/SchemaUpgradeHelper.java | 1 + .../server/upgrade/UpgradeCatalog240.java | 298 +++++++++++++++++++ .../server/upgrade/UpgradeCatalog240Test.java | 130 ++++++++ 3 files changed, 429 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/a8887eb1/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java index 82aa6ca..a07d42d 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java @@ -183,6 +183,7 @@ public class SchemaUpgradeHelper { catalogBinder.addBinding().to(UpgradeCatalog220.class); catalogBinder.addBinding().to(UpgradeCatalog221.class); catalogBinder.addBinding().to(UpgradeCatalog230.class); + catalogBinder.addBinding().to(UpgradeCatalog240.class); catalogBinder.addBinding().to(FinalUpgradeCatalog.class); EventBusSynchronizer.synchronizeAmbariEventPublisher(binder()); http://git-wip-us.apache.org/repos/asf/ambari/blob/a8887eb1/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java new file mode 100644 index 0000000..1ccea5c --- /dev/null +++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java @@ -0,0 +1,298 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.server.upgrade; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.gson.JsonPrimitive; +import com.google.inject.Inject; +import com.google.inject.Injector; +import org.apache.ambari.server.AmbariException; +import org.apache.ambari.server.controller.AmbariManagementController; +import org.apache.ambari.server.orm.dao.AlertDefinitionDAO; +import org.apache.ambari.server.orm.dao.DaoUtils; +import org.apache.ambari.server.orm.entities.AlertDefinitionEntity; +import org.apache.ambari.server.state.Cluster; +import org.apache.ambari.server.state.Clusters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +/** + * Upgrade catalog for version 2.4.0. + */ +public class UpgradeCatalog240 extends AbstractUpgradeCatalog { + + @Inject + DaoUtils daoUtils; + + /** + * Logger. + */ + private static final Logger LOG = LoggerFactory.getLogger(UpgradeCatalog240.class); + + + + + // ----- Constructors ------------------------------------------------------ + + /** + * Don't forget to register new UpgradeCatalogs in {@link org.apache.ambari.server.upgrade.SchemaUpgradeHelper.UpgradeHelperModule#configure()} + * + * @param injector Guice injector to track dependencies and uses bindings to inject them. + */ + @Inject + public UpgradeCatalog240(Injector injector) { + super(injector); + this.injector = injector; + } + + // ----- UpgradeCatalog ---------------------------------------------------- + + /** + * {@inheritDoc} + */ + @Override + public String getTargetVersion() { + return "2.4.0"; + } + + // ----- AbstractUpgradeCatalog -------------------------------------------- + + /** + * {@inheritDoc} + */ + @Override + public String getSourceVersion() { + return "2.3.0"; + } + + + @Override + protected void executeDDLUpdates() throws AmbariException, SQLException { + //To change body of implemented methods use File | Settings | File Templates. + } + + @Override + protected void executePreDMLUpdates() throws AmbariException, SQLException { + //To change body of implemented methods use File | Settings | File Templates. + } + + @Override + protected void executeDMLUpdates() throws AmbariException, SQLException { + addNewConfigurationsFromXml(); + updateAlerts(); + + } + + protected void updateAlerts() { + LOG.info("Updating alert definitions."); + AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class); + AlertDefinitionDAO alertDefinitionDAO = injector.getInstance(AlertDefinitionDAO.class); + Clusters clusters = ambariManagementController.getClusters(); + + Map<String, Cluster> clusterMap = getCheckedClusterMap(clusters); + for (final Cluster cluster : clusterMap.values()) { + long clusterID = cluster.getClusterId(); + + final AlertDefinitionEntity namenodeLastCheckpointAlertDefinitionEntity = alertDefinitionDAO.findByName( + clusterID, "namenode_last_checkpoint"); + final AlertDefinitionEntity namenodeHAHealthAlertDefinitionEntity = alertDefinitionDAO.findByName( + clusterID, "namenode_ha_health"); + final AlertDefinitionEntity nodemanagerHealthAlertDefinitionEntity = alertDefinitionDAO.findByName( + clusterID, "yarn_nodemanager_health"); + final AlertDefinitionEntity nodemanagerHealthSummaryAlertDefinitionEntity = alertDefinitionDAO.findByName( + clusterID, "nodemanager_health_summary"); + final AlertDefinitionEntity hiveMetastoreProcessAlertDefinitionEntity = alertDefinitionDAO.findByName( + clusterID, "hive_metastore_process"); + final AlertDefinitionEntity hiveServerProcessAlertDefinitionEntity = alertDefinitionDAO.findByName( + clusterID, "hive_server_process"); + final AlertDefinitionEntity hiveWebhcatServerStatusAlertDefinitionEntity = alertDefinitionDAO.findByName( + clusterID, "hive_webhcat_server_status"); + final AlertDefinitionEntity flumeAgentStatusAlertDefinitionEntity = alertDefinitionDAO.findByName( + clusterID, "flume_agent_status"); + + Map<AlertDefinitionEntity, List<String>> alertDefinitionParams = new HashMap<>(); + alertDefinitionParams.put(namenodeLastCheckpointAlertDefinitionEntity, + new ArrayList<String>(Arrays.asList("connection.timeout", "checkpoint.time.warning.threshold", "checkpoint.time.critical.threshold"))); + alertDefinitionParams.put(namenodeHAHealthAlertDefinitionEntity, + new ArrayList<String>(Arrays.asList("connection.timeout"))); + alertDefinitionParams.put(nodemanagerHealthAlertDefinitionEntity, + new ArrayList<String>(Arrays.asList("connection.timeout"))); + alertDefinitionParams.put(nodemanagerHealthSummaryAlertDefinitionEntity, + new ArrayList<String>(Arrays.asList("connection.timeout"))); + alertDefinitionParams.put(hiveMetastoreProcessAlertDefinitionEntity, + new ArrayList<String>(Arrays.asList("default.smoke.user", "default.smoke.principal", "default.smoke.keytab"))); + alertDefinitionParams.put(hiveServerProcessAlertDefinitionEntity, + new ArrayList<String>(Arrays.asList("default.smoke.user", "default.smoke.principal", "default.smoke.keytab"))); + alertDefinitionParams.put(hiveWebhcatServerStatusAlertDefinitionEntity, + new ArrayList<String>(Arrays.asList("default.smoke.user", "connection.timeout"))); + alertDefinitionParams.put(flumeAgentStatusAlertDefinitionEntity, + new ArrayList<String>(Arrays.asList("run.directory"))); + + for(Map.Entry<AlertDefinitionEntity, List<String>> entry : alertDefinitionParams.entrySet()){ + AlertDefinitionEntity alertDefinition = entry.getKey(); + String source = alertDefinition.getSource(); + + alertDefinition.setSource(addParam(source, entry.getValue())); + alertDefinition.setHash(UUID.randomUUID().toString()); + + alertDefinitionDAO.merge(alertDefinition); + } + + } + } + + protected String addParam(String source, List<String> params) { + JsonObject sourceJson = new JsonParser().parse(source).getAsJsonObject(); + JsonArray parametersJson = sourceJson.getAsJsonArray("parameters"); + + boolean parameterExists = parametersJson != null && !parametersJson.isJsonNull(); + + if (parameterExists) { + Iterator<JsonElement> jsonElementIterator = parametersJson.iterator(); + while(jsonElementIterator.hasNext()) { + JsonElement element = jsonElementIterator.next(); + JsonElement name = element.getAsJsonObject().get("name"); + if (name != null && !name.isJsonNull() && params.contains(name.getAsString())) { + params.remove(name.getAsString()); + } + } + if (params.size() == 0) { + return sourceJson.toString(); + } + } + + List<JsonObject> paramsToAdd = new ArrayList<>(); + + if (params.contains("connection.timeout")) { + JsonObject param = new JsonObject(); + param.add("name", new JsonPrimitive("connection.timeout")); + param.add("display_name", new JsonPrimitive("Connection Timeout")); + param.add("value", new JsonPrimitive(5.0)); + param.add("type", new JsonPrimitive("NUMERIC")); + param.add("description", new JsonPrimitive("The maximum time before this alert is considered to be CRITICAL")); + param.add("units", new JsonPrimitive("seconds")); + param.add("threshold", new JsonPrimitive("CRITICAL")); + + paramsToAdd.add(param); + + } + if (params.contains("checkpoint.time.warning.threshold")) { + JsonObject param = new JsonObject(); + param.add("name", new JsonPrimitive("checkpoint.time.warning.threshold")); + param.add("display_name", new JsonPrimitive("Checkpoint Warning")); + param.add("value", new JsonPrimitive(2.0)); + param.add("type", new JsonPrimitive("PERCENT")); + param.add("description", new JsonPrimitive("The percentage of the last checkpoint time greater than the interval in order to trigger a warning alert.")); + param.add("units", new JsonPrimitive("%")); + param.add("threshold", new JsonPrimitive("WARNING")); + + paramsToAdd.add(param); + + } + if (params.contains("checkpoint.time.critical.threshold")) { + JsonObject param = new JsonObject(); + param.add("name", new JsonPrimitive("checkpoint.time.critical.threshold")); + param.add("display_name", new JsonPrimitive("Checkpoint Critical")); + param.add("value", new JsonPrimitive(2.0)); + param.add("type", new JsonPrimitive("PERCENT")); + param.add("description", new JsonPrimitive("The percentage of the last checkpoint time greater than the interval in order to trigger a critical alert.")); + param.add("units", new JsonPrimitive("%")); + param.add("threshold", new JsonPrimitive("CRITICAL")); + + paramsToAdd.add(param); + + } + if (params.contains("default.smoke.user")) { + JsonObject param = new JsonObject(); + param.add("name", new JsonPrimitive("default.smoke.user")); + param.add("display_name", new JsonPrimitive("Default Smoke User")); + param.add("value", new JsonPrimitive("ambari-qa")); + param.add("type", new JsonPrimitive("STRING")); + param.add("description", new JsonPrimitive("The user that will run the Hive commands if not specified in cluster-env/smokeuser")); + + paramsToAdd.add(param); + + } + if (params.contains("default.smoke.principal")) { + JsonObject param = new JsonObject(); + param.add("name", new JsonPrimitive("default.smoke.principal")); + param.add("display_name", new JsonPrimitive("Default Smoke Principal")); + param.add("value", new JsonPrimitive("ambari...@example.com")); + param.add("type", new JsonPrimitive("STRING")); + param.add("description", new JsonPrimitive("The principal to use when retrieving the kerberos ticket if not specified in cluster-env/smokeuser_principal_name")); + + paramsToAdd.add(param); + + } + if (params.contains("default.smoke.keytab")) { + JsonObject param = new JsonObject(); + param.add("name", new JsonPrimitive("default.smoke.keytab")); + param.add("display_name", new JsonPrimitive("Default Smoke Keytab")); + param.add("value", new JsonPrimitive("/etc/security/keytabs/smokeuser.headless.keytab")); + param.add("type", new JsonPrimitive("STRING")); + param.add("description", new JsonPrimitive("The keytab to use when retrieving the kerberos ticket if not specified in cluster-env/smokeuser_keytab")); + + paramsToAdd.add(param); + + } + if (params.contains("run.directory")) { + JsonObject param = new JsonObject(); + param.add("name", new JsonPrimitive("run.directory")); + param.add("display_name", new JsonPrimitive("Run Directory")); + param.add("value", new JsonPrimitive("/var/run/flume")); + param.add("type", new JsonPrimitive("STRING")); + param.add("description", new JsonPrimitive("The directory where flume agent processes will place their PID files.")); + + paramsToAdd.add(param); + + } + + + if (!parameterExists) { + parametersJson = new JsonArray(); + for (JsonObject param : paramsToAdd) { + parametersJson.add(param); + } + sourceJson.add("parameters", parametersJson); + } else { + for (JsonObject param : paramsToAdd) { + parametersJson.add(param); + } + sourceJson.remove("parameters"); + sourceJson.add("parameters", parametersJson); + } + + return sourceJson.toString(); + } + + + +} http://git-wip-us.apache.org/repos/asf/ambari/blob/a8887eb1/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java new file mode 100644 index 0000000..d1d68f2 --- /dev/null +++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.server.upgrade; + + +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.google.inject.Provider; +import com.google.inject.persist.PersistService; +import junit.framework.Assert; +import org.apache.ambari.server.api.services.AmbariMetaInfo; +import org.apache.ambari.server.orm.GuiceJpaInitializer; +import org.apache.ambari.server.orm.InMemoryDefaultTestModule; +import org.apache.ambari.server.orm.dao.StackDAO; +import org.apache.ambari.server.orm.entities.StackEntity; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import javax.persistence.EntityManager; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.easymock.EasyMock.createMockBuilder; +import static org.easymock.EasyMock.createNiceMock; +import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.expectLastCall; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.reset; +import static org.easymock.EasyMock.verify; + +public class UpgradeCatalog240Test { + private Injector injector; + private Provider<EntityManager> entityManagerProvider = createStrictMock(Provider.class); + private EntityManager entityManager = createNiceMock(EntityManager.class); + private UpgradeCatalogHelper upgradeCatalogHelper; + private StackEntity desiredStackEntity; + + + + @Before + public void init() { + reset(entityManagerProvider); + expect(entityManagerProvider.get()).andReturn(entityManager).anyTimes(); + replay(entityManagerProvider); + injector = Guice.createInjector(new InMemoryDefaultTestModule()); + injector.getInstance(GuiceJpaInitializer.class); + + upgradeCatalogHelper = injector.getInstance(UpgradeCatalogHelper.class); + // inject AmbariMetaInfo to ensure that stacks get populated in the DB + injector.getInstance(AmbariMetaInfo.class); + // load the stack entity + StackDAO stackDAO = injector.getInstance(StackDAO.class); + desiredStackEntity = stackDAO.find("HDP", "2.2.0"); + } + + @After + public void tearDown() { + injector.getInstance(PersistService.class).stop(); + } + + @Test + public void testExecuteDMLUpdates() throws Exception { + Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml"); + Method updateAlerts = UpgradeCatalog240.class.getDeclaredMethod("updateAlerts"); + + + + UpgradeCatalog240 upgradeCatalog240 = createMockBuilder(UpgradeCatalog240.class) + .addMockedMethod(addNewConfigurationsFromXml) + .addMockedMethod(updateAlerts) + .createMock(); + + upgradeCatalog240.addNewConfigurationsFromXml(); + expectLastCall().once(); + upgradeCatalog240.updateAlerts(); + expectLastCall().once(); + + + replay(upgradeCatalog240); + + upgradeCatalog240.executeDMLUpdates(); + + verify(upgradeCatalog240); + } + + @Test + public void test_addParam_ParamsNotAvailable() { + + UpgradeCatalog240 upgradeCatalog240 = new UpgradeCatalog240(injector); + String inputSource = "{ \"path\" : \"test_path\", \"type\" : \"SCRIPT\"}"; + List<String> params = Arrays.asList("connection.timeout", "checkpoint.time.warning.threshold", "checkpoint.time.critical.threshold"); + String expectedSource = "{\"path\":\"test_path\",\"type\":\"SCRIPT\",\"parameters\":[{\"name\":\"connection.timeout\",\"display_name\":\"Connection Timeout\",\"value\":5.0,\"type\":\"NUMERIC\",\"description\":\"The maximum time before this alert is considered to be CRITICAL\",\"units\":\"seconds\",\"threshold\":\"CRITICAL\"},{\"name\":\"checkpoint.time.warning.threshold\",\"display_name\":\"Checkpoint Warning\",\"value\":2.0,\"type\":\"PERCENT\",\"description\":\"The percentage of the last checkpoint time greater than the interval in order to trigger a warning alert.\",\"units\":\"%\",\"threshold\":\"WARNING\"},{\"name\":\"checkpoint.time.critical.threshold\",\"display_name\":\"Checkpoint Critical\",\"value\":2.0,\"type\":\"PERCENT\",\"description\":\"The percentage of the last checkpoint time greater than the interval in order to trigger a critical alert.\",\"units\":\"%\",\"threshold\":\"CRITICAL\"}]}"; + + String result = upgradeCatalog240.addParam(inputSource, params); + Assert.assertEquals(result, expectedSource); + } + + @Test + public void test_addParam_ParamsAvailableWithOneOFNeededItem() { + + UpgradeCatalog240 upgradeCatalog240 = new UpgradeCatalog240(injector); + String inputSource = "{\"path\":\"test_path\",\"type\":\"SCRIPT\",\"parameters\":[{\"name\":\"connection.timeout\",\"display_name\":\"Connection Timeout\",\"value\":5.0,\"type\":\"NUMERIC\",\"description\":\"The maximum time before this alert is considered to be CRITICAL\",\"units\":\"seconds\",\"threshold\":\"CRITICAL\"}]}"; + List<String> params = new ArrayList<String>(Arrays.asList("connection.timeout", "checkpoint.time.warning.threshold", "checkpoint.time.critical.threshold")); + String expectedSource = "{\"path\":\"test_path\",\"type\":\"SCRIPT\",\"parameters\":[{\"name\":\"connection.timeout\",\"display_name\":\"Connection Timeout\",\"value\":5.0,\"type\":\"NUMERIC\",\"description\":\"The maximum time before this alert is considered to be CRITICAL\",\"units\":\"seconds\",\"threshold\":\"CRITICAL\"},{\"name\":\"checkpoint.time.warning.threshold\",\"display_name\":\"Checkpoint Warning\",\"value\":2.0,\"type\":\"PERCENT\",\"description\":\"The percentage of the last checkpoint time greater than the interval in order to trigger a warning alert.\",\"units\":\"%\",\"threshold\":\"WARNING\"},{\"name\":\"checkpoint.time.critical.threshold\",\"display_name\":\"Checkpoint Critical\",\"value\":2.0,\"type\":\"PERCENT\",\"description\":\"The percentage of the last checkpoint time greater than the interval in order to trigger a critical alert.\",\"units\":\"%\",\"threshold\":\"CRITICAL\"}]}"; + + String result = upgradeCatalog240.addParam(inputSource, params); + Assert.assertEquals(result, expectedSource); + } + +}