[1/2] ambari git commit: AMBARI-17462. Install Log search client package with Ranger Service (Part-2) (Mugdha Varadkar via gautam)
Repository: ambari Updated Branches: refs/heads/branch-2.4 7ad4e4c07 -> 71593b1d8 refs/heads/trunk 9e43840ae -> ef29683ce AMBARI-17462. Install Log search client package with Ranger Service (Part-2) (Mugdha Varadkar via gautam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/71593b1d Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/71593b1d Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/71593b1d Branch: refs/heads/branch-2.4 Commit: 71593b1d8f207b929510794fd77ab24dedaf2a35 Parents: 7ad4e4c Author: Gautam BoradAuthored: Thu Jun 30 08:09:44 2016 +0530 Committer: Gautam Borad Committed: Thu Jun 30 10:52:45 2016 +0530 -- .../RANGER/0.4.0/package/scripts/params.py | 12 +- .../0.4.0/package/scripts/ranger_admin.py | 5 + .../0.4.0/package/scripts/setup_ranger_xml.py | 4 - .../stacks/2.5/RANGER/test_ranger_admin.py | 158 ++- 4 files changed, 93 insertions(+), 86 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/71593b1d/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py -- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py index e708843..ba8d3b3 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py @@ -269,15 +269,17 @@ logsearch_solr_hosts = default("/clusterHostInfo/logsearch_solr_hosts", []) has_logsearch = len(logsearch_solr_hosts) > 0 is_solrCloud_enabled = default('/configurations/ranger-env/is_solrCloud_enabled', False) solr_znode = '/ranger_audits' -if is_solrCloud_enabled: - solr_znode = config['configurations']['ranger-admin-site']['ranger.audit.solr.zookeepers'] +if stack_supports_logsearch_client and is_solrCloud_enabled: + solr_znode = default('/configurations/ranger-admin-site/ranger.audit.solr.zookeepers', 'NONE') if solr_znode != '' and solr_znode.upper() != 'NONE': -solr_znode = solr_znode.split('/')[1] -solr_znode = format('/{solr_znode}') +solr_znode = solr_znode.split('/') +if len(solr_znode) > 1 and len(solr_znode) == 2: + solr_znode = solr_znode[1] + solr_znode = format('/{solr_znode}') if has_logsearch: solr_znode = config['configurations']['logsearch-solr-env']['logsearch_solr_znode'] solr_user = default('/configurations/logsearch-solr-env/logsearch_solr_user', unix_user) -custom_log4j = True if has_logsearch else False +custom_log4j = has_logsearch # get comma separated list of zookeeper hosts zookeeper_port = default('/configurations/zoo.cfg/clientPort', None) http://git-wip-us.apache.org/repos/asf/ambari/blob/71593b1d/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py -- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py index 6367281..c6e5b70 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py +++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py @@ -30,6 +30,7 @@ from resource_management.core.logger import Logger from resource_management.core import shell from ranger_service import ranger_service from setup_ranger_xml import setup_ranger_audit_solr +from resource_management.libraries.functions import solr_cloud_util import upgrade import os, errno @@ -82,6 +83,10 @@ class RangerAdmin(Script): env.set_params(params) self.configure(env, upgrade_type=upgrade_type) +if params.stack_supports_logsearch_client and params.is_solrCloud_enabled: + solr_cloud_util.setup_solr_client(params.config, user = params.solr_user, custom_log4j = params.custom_log4j) + setup_ranger_audit_solr() + ranger_service('ranger_admin') http://git-wip-us.apache.org/repos/asf/ambari/blob/71593b1d/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py -- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py index 23bb764..e35ea5f 100644 ---
[2/2] ambari git commit: AMBARI-17462. Install Log search client package with Ranger Service (Part-2) (Mugdha Varadkar via gautam)
AMBARI-17462. Install Log search client package with Ranger Service (Part-2) (Mugdha Varadkar via gautam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ef29683c Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ef29683c Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ef29683c Branch: refs/heads/trunk Commit: ef29683ce2cd49c0fc06baf4cba83ee39baf7997 Parents: 9e43840 Author: Gautam BoradAuthored: Thu Jun 30 08:09:44 2016 +0530 Committer: Gautam Borad Committed: Thu Jun 30 10:52:56 2016 +0530 -- .../RANGER/0.4.0/package/scripts/params.py | 12 +- .../0.4.0/package/scripts/ranger_admin.py | 5 + .../0.4.0/package/scripts/setup_ranger_xml.py | 4 - .../stacks/2.5/RANGER/test_ranger_admin.py | 158 ++- 4 files changed, 93 insertions(+), 86 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/ef29683c/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py -- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py index e708843..ba8d3b3 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py @@ -269,15 +269,17 @@ logsearch_solr_hosts = default("/clusterHostInfo/logsearch_solr_hosts", []) has_logsearch = len(logsearch_solr_hosts) > 0 is_solrCloud_enabled = default('/configurations/ranger-env/is_solrCloud_enabled', False) solr_znode = '/ranger_audits' -if is_solrCloud_enabled: - solr_znode = config['configurations']['ranger-admin-site']['ranger.audit.solr.zookeepers'] +if stack_supports_logsearch_client and is_solrCloud_enabled: + solr_znode = default('/configurations/ranger-admin-site/ranger.audit.solr.zookeepers', 'NONE') if solr_znode != '' and solr_znode.upper() != 'NONE': -solr_znode = solr_znode.split('/')[1] -solr_znode = format('/{solr_znode}') +solr_znode = solr_znode.split('/') +if len(solr_znode) > 1 and len(solr_znode) == 2: + solr_znode = solr_znode[1] + solr_znode = format('/{solr_znode}') if has_logsearch: solr_znode = config['configurations']['logsearch-solr-env']['logsearch_solr_znode'] solr_user = default('/configurations/logsearch-solr-env/logsearch_solr_user', unix_user) -custom_log4j = True if has_logsearch else False +custom_log4j = has_logsearch # get comma separated list of zookeeper hosts zookeeper_port = default('/configurations/zoo.cfg/clientPort', None) http://git-wip-us.apache.org/repos/asf/ambari/blob/ef29683c/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py -- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py index 6367281..c6e5b70 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py +++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py @@ -30,6 +30,7 @@ from resource_management.core.logger import Logger from resource_management.core import shell from ranger_service import ranger_service from setup_ranger_xml import setup_ranger_audit_solr +from resource_management.libraries.functions import solr_cloud_util import upgrade import os, errno @@ -82,6 +83,10 @@ class RangerAdmin(Script): env.set_params(params) self.configure(env, upgrade_type=upgrade_type) +if params.stack_supports_logsearch_client and params.is_solrCloud_enabled: + solr_cloud_util.setup_solr_client(params.config, user = params.solr_user, custom_log4j = params.custom_log4j) + setup_ranger_audit_solr() + ranger_service('ranger_admin') http://git-wip-us.apache.org/repos/asf/ambari/blob/ef29683c/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py -- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py index 23bb764..e35ea5f 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py +++
ambari git commit: AMBARI-17455: Auto restart flag should be set to 1 for Metrics Collector after upgrade to 2.4.0
Repository: ambari Updated Branches: refs/heads/trunk 4db43122f -> 9e43840ae AMBARI-17455: Auto restart flag should be set to 1 for Metrics Collector after upgrade to 2.4.0 Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9e43840a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9e43840a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9e43840a Branch: refs/heads/trunk Commit: 9e43840aeff2d0f13e3f61bb8dcb44046dd8877d Parents: 4db4312 Author: Nahappan SomasundaramAuthored: Tue Jun 28 13:06:22 2016 -0700 Committer: Nahappan Somasundaram Committed: Wed Jun 29 21:27:10 2016 -0700 -- .../server/configuration/Configuration.java | 71 +++ .../server/upgrade/UpgradeCatalog240.java | 122 +++ .../server/upgrade/UpgradeCatalog240Test.java | 3 + 3 files changed, 196 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/9e43840a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java index 153289e..399f26c 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java @@ -419,6 +419,17 @@ public class Configuration { public static final String KERBEROS_CHECK_JAAS_CONFIGURATION_DEFAULT = "false"; /** + * Recovery related configuration + */ + public static final String RECOVERY_TYPE_KEY = "recovery.type"; + public static final String RECOVERY_LIFETIME_MAX_COUNT_KEY = "recovery.lifetime_max_count"; + public static final String RECOVERY_MAX_COUNT_KEY = "recovery.max_count"; + public static final String RECOVERY_WINDOW_IN_MIN_KEY = "recovery.window_in_minutes"; + public static final String RECOVERY_RETRY_GAP_KEY = "recovery.retry_interval"; + public static final String RECOVERY_DISABLED_COMPONENTS_KEY = "recovery.disabled_components"; + public static final String RECOVERY_ENABLED_COMPONENTS_KEY = "recovery.enabled_components"; + + /** * Allow proxy calls to these hosts and ports only */ public static final String PROXY_ALLOWED_HOST_PORTS = "proxy.allowed.hostports"; @@ -2635,6 +2646,66 @@ public class Configuration { } /** + * Get the node recovery type DEFAULT|AUTO_START|FULL + * @return + */ + public String getNodeRecoveryType() { +return properties.getProperty(RECOVERY_TYPE_KEY); + } + + /** + * Get configured max count of recovery attempt allowed per host component in a window + * This is reset when agent is restarted. + * @return + */ + public String getNodeRecoveryMaxCount() { +return properties.getProperty(RECOVERY_MAX_COUNT_KEY); + } + + /** + * Get configured max lifetime count of recovery attempt allowed per host component. + * This is reset when agent is restarted. + * @return + */ + public String getNodeRecoveryLifetimeMaxCount() { +return properties.getProperty(RECOVERY_LIFETIME_MAX_COUNT_KEY); + } + + /** + * Get configured window size in minutes + * @return + */ + public String getNodeRecoveryWindowInMin() { +return properties.getProperty(RECOVERY_WINDOW_IN_MIN_KEY); + } + + /** + * Get the components for which recovery is disabled + * @return + */ + public String getRecoveryDisabledComponents() { +return properties.getProperty(RECOVERY_DISABLED_COMPONENTS_KEY); + } + + /** + * Get the components for which recovery is enabled + * @return + */ + public String getRecoveryEnabledComponents() { +return properties.getProperty(RECOVERY_ENABLED_COMPONENTS_KEY); + } + + /** + * Get the configured retry gap between tries per host component + * @return + */ + public String getNodeRecoveryRetryGap() { +return properties.getProperty(RECOVERY_RETRY_GAP_KEY); + } + + /** + + /** * Gets the default KDC port to use when no port is specified in KDC hostname * * @return the default KDC port to use. http://git-wip-us.apache.org/repos/asf/ambari/blob/9e43840a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java index 18dd877..7ef12a7 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
ambari git commit: AMBARI-17443. Allow commands to specify if they should be auto-retried upon failure (smohanty)
Repository: ambari Updated Branches: refs/heads/branch-2.4 3fdf16ed6 -> 7ad4e4c07 AMBARI-17443. Allow commands to specify if they should be auto-retried upon failure (smohanty) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7ad4e4c0 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7ad4e4c0 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7ad4e4c0 Branch: refs/heads/branch-2.4 Commit: 7ad4e4c07f6edc7162597d1377375ddaad28d8d4 Parents: 3fdf16e Author: Sumit MohantyAuthored: Wed Jun 29 17:10:06 2016 -0700 Committer: Sumit Mohanty Committed: Wed Jun 29 17:10:55 2016 -0700 -- .../AmbariCustomCommandExecutionHelper.java | 15 .../ambari/server/state/ConfigHelper.java | 3 +- .../AmbariManagementControllerTest.java | 38 3 files changed, 55 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/7ad4e4c0/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java index 9526077..8bb6225 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java @@ -72,6 +72,7 @@ import org.apache.ambari.server.state.stack.OsFamily; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent; import org.apache.ambari.server.utils.StageUtils; import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.math.NumberUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -990,6 +991,20 @@ public class AmbariCustomCommandExecutionHelper { actionExecutionContext.getParameters().put(KeyNames.REFRESH_ADITIONAL_COMPONENT_TAGS, requestParams.get(KeyNames.REFRESH_ADITIONAL_COMPONENT_TAGS)); } +// If command should be retried upon failure then add the option and also the default duration for retry +if (requestParams.containsKey(KeyNames.COMMAND_RETRY_ENABLED)) { + extraParams.put(KeyNames.COMMAND_RETRY_ENABLED, requestParams.get(KeyNames.COMMAND_RETRY_ENABLED)); + String commandRetryDuration = ConfigHelper.COMMAND_RETRY_MAX_TIME_IN_SEC_DEFAULT; + if (requestParams.containsKey(KeyNames.MAX_DURATION_OF_RETRIES)) { +String commandRetryDurationStr = requestParams.get(KeyNames.MAX_DURATION_OF_RETRIES); +Integer commandRetryDurationInt = NumberUtils.toInt(commandRetryDurationStr, 0); +if (commandRetryDurationInt > 0) { + commandRetryDuration = Integer.toString(commandRetryDurationInt); +} + } + extraParams.put(KeyNames.MAX_DURATION_OF_RETRIES, commandRetryDuration); +} + if(requestParams.containsKey(KeyNames.REFRESH_CONFIG_TAGS_BEFORE_EXECUTION)){ actionExecutionContext.getParameters().put(KeyNames.REFRESH_CONFIG_TAGS_BEFORE_EXECUTION, requestParams.get(KeyNames.REFRESH_CONFIG_TAGS_BEFORE_EXECUTION)); } http://git-wip-us.apache.org/repos/asf/ambari/blob/7ad4e4c0/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java index ecb4a75..46a3f3e 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java @@ -101,6 +101,7 @@ public class ConfigHelper { public static final String CLUSTER_ENV_RETRY_ENABLED = "command_retry_enabled"; public static final String CLUSTER_ENV_RETRY_COMMANDS = "commands_to_retry"; public static final String CLUSTER_ENV_RETRY_MAX_TIME_IN_SEC = "command_retry_max_time_in_sec"; + public static final String COMMAND_RETRY_MAX_TIME_IN_SEC_DEFAULT = "600"; public static final String CLUSTER_ENV_STACK_FEATURES_PROPERTY = "stack_features"; public static final String CLUSTER_ENV_STACK_TOOLS_PROPERTY = "stack_tools"; @@ -565,7 +566,7 @@ public class ConfigHelper { */ public Set findConfigTypesByPropertyName(StackId stackId, String propertyName, String clusterName) throws AmbariException { StackInfo stack =
ambari git commit: AMBARI-17443. Allow commands to specify if they should be auto-retried upon failure (smohanty)
Repository: ambari Updated Branches: refs/heads/trunk 850a29593 -> 4db43122f AMBARI-17443. Allow commands to specify if they should be auto-retried upon failure (smohanty) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4db43122 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4db43122 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4db43122 Branch: refs/heads/trunk Commit: 4db43122f20851bb59893b3575c3a590e6da8cc5 Parents: 850a295 Author: Sumit MohantyAuthored: Wed Jun 29 17:10:06 2016 -0700 Committer: Sumit Mohanty Committed: Wed Jun 29 17:10:06 2016 -0700 -- .../AmbariCustomCommandExecutionHelper.java | 15 .../ambari/server/state/ConfigHelper.java | 3 +- .../AmbariManagementControllerTest.java | 38 3 files changed, 55 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/4db43122/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java index 9526077..8bb6225 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java @@ -72,6 +72,7 @@ import org.apache.ambari.server.state.stack.OsFamily; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent; import org.apache.ambari.server.utils.StageUtils; import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.math.NumberUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -990,6 +991,20 @@ public class AmbariCustomCommandExecutionHelper { actionExecutionContext.getParameters().put(KeyNames.REFRESH_ADITIONAL_COMPONENT_TAGS, requestParams.get(KeyNames.REFRESH_ADITIONAL_COMPONENT_TAGS)); } +// If command should be retried upon failure then add the option and also the default duration for retry +if (requestParams.containsKey(KeyNames.COMMAND_RETRY_ENABLED)) { + extraParams.put(KeyNames.COMMAND_RETRY_ENABLED, requestParams.get(KeyNames.COMMAND_RETRY_ENABLED)); + String commandRetryDuration = ConfigHelper.COMMAND_RETRY_MAX_TIME_IN_SEC_DEFAULT; + if (requestParams.containsKey(KeyNames.MAX_DURATION_OF_RETRIES)) { +String commandRetryDurationStr = requestParams.get(KeyNames.MAX_DURATION_OF_RETRIES); +Integer commandRetryDurationInt = NumberUtils.toInt(commandRetryDurationStr, 0); +if (commandRetryDurationInt > 0) { + commandRetryDuration = Integer.toString(commandRetryDurationInt); +} + } + extraParams.put(KeyNames.MAX_DURATION_OF_RETRIES, commandRetryDuration); +} + if(requestParams.containsKey(KeyNames.REFRESH_CONFIG_TAGS_BEFORE_EXECUTION)){ actionExecutionContext.getParameters().put(KeyNames.REFRESH_CONFIG_TAGS_BEFORE_EXECUTION, requestParams.get(KeyNames.REFRESH_CONFIG_TAGS_BEFORE_EXECUTION)); } http://git-wip-us.apache.org/repos/asf/ambari/blob/4db43122/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java index ecb4a75..46a3f3e 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java @@ -101,6 +101,7 @@ public class ConfigHelper { public static final String CLUSTER_ENV_RETRY_ENABLED = "command_retry_enabled"; public static final String CLUSTER_ENV_RETRY_COMMANDS = "commands_to_retry"; public static final String CLUSTER_ENV_RETRY_MAX_TIME_IN_SEC = "command_retry_max_time_in_sec"; + public static final String COMMAND_RETRY_MAX_TIME_IN_SEC_DEFAULT = "600"; public static final String CLUSTER_ENV_STACK_FEATURES_PROPERTY = "stack_features"; public static final String CLUSTER_ENV_STACK_TOOLS_PROPERTY = "stack_tools"; @@ -565,7 +566,7 @@ public class ConfigHelper { */ public Set findConfigTypesByPropertyName(StackId stackId, String propertyName, String clusterName) throws AmbariException { StackInfo stack =
[2/3] ambari git commit: AMBARI-17488. [Grafana] Add Dashboards for HDF Stack. (Prajwal Rao via yusaku)
http://git-wip-us.apache.org/repos/asf/ambari/blob/850a2959/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-hosts.json -- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-hosts.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-hosts.json new file mode 100644 index 000..dc38595 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-hosts.json @@ -0,0 +1,1991 @@ +{ + "id": null, + "title": "Kafka - Hosts", + "originalTitle": "Kafka - Hosts", + "tags": [ +"kafka" + ], + "style": "dark", + "timezone": "browser", + "editable": true, + "hideControls": false, + "sharedCrosshair": false, + "rows": [ +{ + "collapse": false, + "editable": true, + "height": "25px", + "panels": [ +{ + "content": "Metrics for the Kafka cluster on a per broker level. Click on each row title to expand on demand to look at various metrics. \nThis dashboard is managed by Ambari. You may lose any changes made to this dashboard. If you want to customize, make your own copy.", + "editable": true, + "error": false, + "id": 18, + "isNew": true, + "links": [], + "mode": "html", + "span": 12, + "style": {}, + "title": "", + "type": "text" +} + ], + "title": "New row" +}, +{ + "collapse": true, + "editable": true, + "height": "250px", + "panels": [ +{ + "aliasColors": {}, + "bars": false, + "datasource": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { +"leftLogBase": 1, +"leftMax": null, +"leftMin": null, +"rightLogBase": 1, +"rightMax": null, +"rightMin": null, +"threshold1": null, +"threshold1Color": "rgba(216, 200, 27, 0.27)", +"threshold2": null, +"threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 5, + "isNew": true, + "legend": { +"avg": false, +"current": false, +"max": false, +"min": false, +"show": false, +"total": false, +"values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 4, + "stack": false, + "steppedLine": false, + "targets": [ +{ + "aggregator": "avg", + "alias": "Bytes In", + "app": "kafka_broker", + "downsampleAggregator": "avg", + "errors": {}, + "metric": "kafka.server.BrokerTopicMetrics.BytesInPerSec.count", + "precision": "default", + "refId": "A", + "templatedHost": "", + "transform": "none", + "transformData": "none" +}, +{ + "aggregator": "avg", + "alias": "Bytes Out", + "app": "kafka_broker", + "downsampleAggregator": "avg", + "errors": {}, + "metric": "kafka.server.BrokerTopicMetrics.BytesOutPerSec.count", + "precision": "default", + "refId": "B", + "templatedHost": "", + "transform": "none", + "transformData": "none" +} + ], + "timeFrom": null, + "timeShift": null, + "title": "Bytes In & Out /s", + "tooltip": { +"shared": false, +"value_type": "cumulative" + }, + "type": "graph", + "x-axis": true, + "y-axis": true, + "y_formats": [ +"bytes", +"short" + ] +}, +{ + "aliasColors": {}, + "bars": false, + "datasource": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { +"leftLogBase": 1, +"leftMax": null, +"leftMin": null, +"rightLogBase": 1, +"rightMax": null, +"rightMin": null, +"threshold1": null, +"threshold1Color": "rgba(216, 200, 27, 0.27)", +"threshold2": null, +"threshold2Color": "rgba(234, 112, 112, 0.22)" + }, +
[3/3] ambari git commit: AMBARI-17488. [Grafana] Add Dashboards for HDF Stack. (Prajwal Rao via yusaku)
AMBARI-17488. [Grafana] Add Dashboards for HDF Stack. (Prajwal Rao via yusaku) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/850a2959 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/850a2959 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/850a2959 Branch: refs/heads/trunk Commit: 850a29593c6328d20d93fb143eb11ca207c05342 Parents: 2e8bfaa Author: Yusaku SakoAuthored: Wed Jun 29 15:49:56 2016 -0700 Committer: Yusaku Sako Committed: Wed Jun 29 15:50:39 2016 -0700 -- .../HDF/grafana-kafka-home.json | 1089 ++ .../HDF/grafana-kafka-hosts.json| 1991 ++ .../HDF/grafana-kafka-topics.json | 521 + .../HDF/grafana-nifi-home.json | 940 + 4 files changed, 4541 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/850a2959/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json -- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json new file mode 100644 index 000..b754231 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json @@ -0,0 +1,1089 @@ +{ + "id": null, + "title": "Kafka - Home", + "originalTitle": "Kafka - Home", + "tags": [ +"kafka" + ], + "style": "dark", + "timezone": "browser", + "editable": true, + "hideControls": false, + "sharedCrosshair": false, + "rows": [ +{ + "collapse": false, + "editable": true, + "height": "25px", + "panels": [ +{ + "content": "Metrics to see the overall status for the Kafka cluster. Click on each row title to expand on demand to look at various metrics. \nThis dashboard is managed by Ambari. You may lose any changes made to this dashboard. If you want to customize, make your own copy.", + "editable": true, + "error": false, + "id": 15, + "isNew": true, + "links": [], + "mode": "html", + "span": 12, + "style": {}, + "title": "", + "type": "text" +} + ], + "title": "New row" +}, +{ + "collapse": false, + "editable": true, + "height": "250px", + "panels": [ +{ + "aliasColors": {}, + "bars": false, + "datasource": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { +"leftLogBase": 1, +"leftMax": null, +"leftMin": null, +"rightLogBase": 1, +"rightMax": null, +"rightMin": null, +"threshold1": null, +"threshold1Color": "rgba(216, 200, 27, 0.27)", +"threshold2": null, +"threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 5, + "isNew": true, + "legend": { +"avg": false, +"current": false, +"max": false, +"min": false, +"show": true, +"total": false, +"values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 6, + "stack": false, + "steppedLine": false, + "targets": [ +{ + "aggregator": "avg", + "alias": "Bytes In", + "app": "kafka_broker", + "downsampleAggregator": "avg", + "errors": {}, + "metric": "kafka.server.BrokerTopicMetrics.BytesInPerSec.count", + "precision": "default", + "refId": "A", + "transform": "none", + "transformData": "none" +}, +{ + "aggregator": "avg", + "alias": "Bytes Out", + "app": "kafka_broker", + "downsampleAggregator": "avg", + "errors": {}, + "metric": "kafka.server.BrokerTopicMetrics.BytesOutPerSec.count", + "precision": "default", + "refId": "B", + "transform": "none", + "transformData": "none" +
[1/3] ambari git commit: AMBARI-17488. [Grafana] Add Dashboards for HDF Stack. (Prajwal Rao via yusaku)
Repository: ambari Updated Branches: refs/heads/trunk 2e8bfaa97 -> 850a29593 http://git-wip-us.apache.org/repos/asf/ambari/blob/850a2959/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-home.json -- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-home.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-home.json new file mode 100644 index 000..68e1393 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-home.json @@ -0,0 +1,940 @@ +{ + "id": null, + "title": "NiFi - Home", + "originalTitle": "NiFi - Home", + "tags": [ +"nifi" + ], + "style": "dark", + "timezone": "browser", + "editable": true, + "hideControls": false, + "sharedCrosshair": false, + "rows": [ +{ + "collapse": false, + "editable": true, + "height": "25px", + "panels": [ +{ + "content": "NiFi Metrics for the cluster. Click on each row title to expand on demand to look at various metrics. \nThis dashboard is managed by Ambari. You may lose any changes made to this dashboard. If you want to customize, make your own copy.", + "editable": true, + "error": false, + "id": 13, + "isNew": true, + "links": [], + "mode": "html", + "span": 12, + "style": {}, + "title": "", + "type": "text" +} + ], + "title": "New row" +}, +{ + "collapse": true, + "editable": true, + "height": "200px", + "panels": [ +{ + "aliasColors": {}, + "bars": false, + "datasource": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { +"leftLogBase": 1, +"leftMax": null, +"leftMin": null, +"rightLogBase": 1, +"rightMax": null, +"rightMin": null, +"threshold1": null, +"threshold1Color": "rgba(216, 200, 27, 0.27)", +"threshold2": null, +"threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 10, + "isNew": true, + "legend": { +"avg": false, +"current": false, +"max": false, +"min": false, +"show": true, +"total": false, +"values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 4, + "stack": false, + "steppedLine": false, + "targets": [ +{ + "aggregator": "avg", + "app": "nifi", + "downsampleAggregator": "avg", + "errors": {}, + "metric": "jvm.heap_usage", + "precision": "default", + "refId": "A", + "transform": "none" +} + ], + "timeFrom": null, + "timeShift": null, + "title": "JVM Heap Usage", + "tooltip": { +"shared": true, +"value_type": "cumulative" + }, + "type": "graph", + "x-axis": true, + "y-axis": true, + "y_formats": [ +"percentunit", +"short" + ] +}, +{ + "aliasColors": {}, + "bars": false, + "datasource": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { +"leftLogBase": 1, +"leftMax": null, +"leftMin": null, +"rightLogBase": 1, +"rightMax": null, +"rightMin": null, +"threshold1": null, +"threshold1Color": "rgba(216, 200, 27, 0.27)", +"threshold2": null, +"threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 11, + "isNew": true, + "legend": { +"avg": false, +"current": false, +"max": false, +"min": false, +"show": true, +"total": false, +"values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 4, +
[1/3] ambari git commit: AMBARI-17488. [Grafana] Add Dashboards for HDF Stack. (Prajwal Rao via yusaku)
Repository: ambari Updated Branches: refs/heads/branch-2.4 0b8e52274 -> 3fdf16ed6 http://git-wip-us.apache.org/repos/asf/ambari/blob/3fdf16ed/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-home.json -- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-home.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-home.json new file mode 100644 index 000..68e1393 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-home.json @@ -0,0 +1,940 @@ +{ + "id": null, + "title": "NiFi - Home", + "originalTitle": "NiFi - Home", + "tags": [ +"nifi" + ], + "style": "dark", + "timezone": "browser", + "editable": true, + "hideControls": false, + "sharedCrosshair": false, + "rows": [ +{ + "collapse": false, + "editable": true, + "height": "25px", + "panels": [ +{ + "content": "NiFi Metrics for the cluster. Click on each row title to expand on demand to look at various metrics. \nThis dashboard is managed by Ambari. You may lose any changes made to this dashboard. If you want to customize, make your own copy.", + "editable": true, + "error": false, + "id": 13, + "isNew": true, + "links": [], + "mode": "html", + "span": 12, + "style": {}, + "title": "", + "type": "text" +} + ], + "title": "New row" +}, +{ + "collapse": true, + "editable": true, + "height": "200px", + "panels": [ +{ + "aliasColors": {}, + "bars": false, + "datasource": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { +"leftLogBase": 1, +"leftMax": null, +"leftMin": null, +"rightLogBase": 1, +"rightMax": null, +"rightMin": null, +"threshold1": null, +"threshold1Color": "rgba(216, 200, 27, 0.27)", +"threshold2": null, +"threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 10, + "isNew": true, + "legend": { +"avg": false, +"current": false, +"max": false, +"min": false, +"show": true, +"total": false, +"values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 4, + "stack": false, + "steppedLine": false, + "targets": [ +{ + "aggregator": "avg", + "app": "nifi", + "downsampleAggregator": "avg", + "errors": {}, + "metric": "jvm.heap_usage", + "precision": "default", + "refId": "A", + "transform": "none" +} + ], + "timeFrom": null, + "timeShift": null, + "title": "JVM Heap Usage", + "tooltip": { +"shared": true, +"value_type": "cumulative" + }, + "type": "graph", + "x-axis": true, + "y-axis": true, + "y_formats": [ +"percentunit", +"short" + ] +}, +{ + "aliasColors": {}, + "bars": false, + "datasource": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { +"leftLogBase": 1, +"leftMax": null, +"leftMin": null, +"rightLogBase": 1, +"rightMax": null, +"rightMin": null, +"threshold1": null, +"threshold1Color": "rgba(216, 200, 27, 0.27)", +"threshold2": null, +"threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 11, + "isNew": true, + "legend": { +"avg": false, +"current": false, +"max": false, +"min": false, +"show": true, +"total": false, +"values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 4, +
[2/3] ambari git commit: AMBARI-17488. [Grafana] Add Dashboards for HDF Stack. (Prajwal Rao via yusaku)
http://git-wip-us.apache.org/repos/asf/ambari/blob/3fdf16ed/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-hosts.json -- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-hosts.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-hosts.json new file mode 100644 index 000..dc38595 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-hosts.json @@ -0,0 +1,1991 @@ +{ + "id": null, + "title": "Kafka - Hosts", + "originalTitle": "Kafka - Hosts", + "tags": [ +"kafka" + ], + "style": "dark", + "timezone": "browser", + "editable": true, + "hideControls": false, + "sharedCrosshair": false, + "rows": [ +{ + "collapse": false, + "editable": true, + "height": "25px", + "panels": [ +{ + "content": "Metrics for the Kafka cluster on a per broker level. Click on each row title to expand on demand to look at various metrics. \nThis dashboard is managed by Ambari. You may lose any changes made to this dashboard. If you want to customize, make your own copy.", + "editable": true, + "error": false, + "id": 18, + "isNew": true, + "links": [], + "mode": "html", + "span": 12, + "style": {}, + "title": "", + "type": "text" +} + ], + "title": "New row" +}, +{ + "collapse": true, + "editable": true, + "height": "250px", + "panels": [ +{ + "aliasColors": {}, + "bars": false, + "datasource": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { +"leftLogBase": 1, +"leftMax": null, +"leftMin": null, +"rightLogBase": 1, +"rightMax": null, +"rightMin": null, +"threshold1": null, +"threshold1Color": "rgba(216, 200, 27, 0.27)", +"threshold2": null, +"threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 5, + "isNew": true, + "legend": { +"avg": false, +"current": false, +"max": false, +"min": false, +"show": false, +"total": false, +"values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 4, + "stack": false, + "steppedLine": false, + "targets": [ +{ + "aggregator": "avg", + "alias": "Bytes In", + "app": "kafka_broker", + "downsampleAggregator": "avg", + "errors": {}, + "metric": "kafka.server.BrokerTopicMetrics.BytesInPerSec.count", + "precision": "default", + "refId": "A", + "templatedHost": "", + "transform": "none", + "transformData": "none" +}, +{ + "aggregator": "avg", + "alias": "Bytes Out", + "app": "kafka_broker", + "downsampleAggregator": "avg", + "errors": {}, + "metric": "kafka.server.BrokerTopicMetrics.BytesOutPerSec.count", + "precision": "default", + "refId": "B", + "templatedHost": "", + "transform": "none", + "transformData": "none" +} + ], + "timeFrom": null, + "timeShift": null, + "title": "Bytes In & Out /s", + "tooltip": { +"shared": false, +"value_type": "cumulative" + }, + "type": "graph", + "x-axis": true, + "y-axis": true, + "y_formats": [ +"bytes", +"short" + ] +}, +{ + "aliasColors": {}, + "bars": false, + "datasource": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { +"leftLogBase": 1, +"leftMax": null, +"leftMin": null, +"rightLogBase": 1, +"rightMax": null, +"rightMin": null, +"threshold1": null, +"threshold1Color": "rgba(216, 200, 27, 0.27)", +"threshold2": null, +"threshold2Color": "rgba(234, 112, 112, 0.22)" + }, +
[3/3] ambari git commit: AMBARI-17488. [Grafana] Add Dashboards for HDF Stack. (Prajwal Rao via yusaku)
AMBARI-17488. [Grafana] Add Dashboards for HDF Stack. (Prajwal Rao via yusaku) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3fdf16ed Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3fdf16ed Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3fdf16ed Branch: refs/heads/branch-2.4 Commit: 3fdf16ed60a994cef15bfb0cb21c017a5a8286d4 Parents: 0b8e522 Author: Yusaku SakoAuthored: Wed Jun 29 15:49:56 2016 -0700 Committer: Yusaku Sako Committed: Wed Jun 29 15:49:56 2016 -0700 -- .../HDF/grafana-kafka-home.json | 1089 ++ .../HDF/grafana-kafka-hosts.json| 1991 ++ .../HDF/grafana-kafka-topics.json | 521 + .../HDF/grafana-nifi-home.json | 940 + 4 files changed, 4541 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/3fdf16ed/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json -- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json new file mode 100644 index 000..b754231 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json @@ -0,0 +1,1089 @@ +{ + "id": null, + "title": "Kafka - Home", + "originalTitle": "Kafka - Home", + "tags": [ +"kafka" + ], + "style": "dark", + "timezone": "browser", + "editable": true, + "hideControls": false, + "sharedCrosshair": false, + "rows": [ +{ + "collapse": false, + "editable": true, + "height": "25px", + "panels": [ +{ + "content": "Metrics to see the overall status for the Kafka cluster. Click on each row title to expand on demand to look at various metrics. \nThis dashboard is managed by Ambari. You may lose any changes made to this dashboard. If you want to customize, make your own copy.", + "editable": true, + "error": false, + "id": 15, + "isNew": true, + "links": [], + "mode": "html", + "span": 12, + "style": {}, + "title": "", + "type": "text" +} + ], + "title": "New row" +}, +{ + "collapse": false, + "editable": true, + "height": "250px", + "panels": [ +{ + "aliasColors": {}, + "bars": false, + "datasource": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { +"leftLogBase": 1, +"leftMax": null, +"leftMin": null, +"rightLogBase": 1, +"rightMax": null, +"rightMin": null, +"threshold1": null, +"threshold1Color": "rgba(216, 200, 27, 0.27)", +"threshold2": null, +"threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 5, + "isNew": true, + "legend": { +"avg": false, +"current": false, +"max": false, +"min": false, +"show": true, +"total": false, +"values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 6, + "stack": false, + "steppedLine": false, + "targets": [ +{ + "aggregator": "avg", + "alias": "Bytes In", + "app": "kafka_broker", + "downsampleAggregator": "avg", + "errors": {}, + "metric": "kafka.server.BrokerTopicMetrics.BytesInPerSec.count", + "precision": "default", + "refId": "A", + "transform": "none", + "transformData": "none" +}, +{ + "aggregator": "avg", + "alias": "Bytes Out", + "app": "kafka_broker", + "downsampleAggregator": "avg", + "errors": {}, + "metric": "kafka.server.BrokerTopicMetrics.BytesOutPerSec.count", + "precision": "default", + "refId": "B", + "transform": "none", + "transformData": "none" +
ambari git commit: AMBARI-17221. Smart configs for Atlas to setup authentication as either LDAP or AD (alejandro)
Repository: ambari Updated Branches: refs/heads/branch-2.4 8964b5a9b -> 0b8e52274 AMBARI-17221. Smart configs for Atlas to setup authentication as either LDAP or AD (alejandro) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0b8e5227 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0b8e5227 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0b8e5227 Branch: refs/heads/branch-2.4 Commit: 0b8e522745a9b308d894724a3065623635c77cbe Parents: 8964b5a Author: Alejandro FernandezAuthored: Wed Jun 29 15:04:33 2016 -0700 Committer: Alejandro Fernandez Committed: Wed Jun 29 15:04:33 2016 -0700 -- .../configuration/application-properties.xml| 159 - .../ATLAS/0.1.0.2.3/metainfo.xml| 7 + .../ATLAS/0.1.0.2.3/themes/theme.json | 619 +++ .../0.5.0/configuration/ranger-admin-site.xml | 4 +- .../configuration/application-properties.xml| 20 +- .../stacks/HDP/2.5/services/stack_advisor.py| 39 ++ 6 files changed, 838 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/0b8e5227/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml -- diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml index 25df1c5..1437251 100644 --- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml +++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml @@ -230,4 +230,161 @@ - + + + +atlas.authentication.method.ldap.type + +The LDAP type (ldap or ad). + + false + value-list + + + ldap + LDAP + + + ad + AD + + + 1 + + + + + + +atlas.authentication.method.ldap.url + +The LDAP URL. + + + +atlas.authentication.method.ldap.userDNpattern +uid= +User DN Pattern. This pattern is used to create a distinguished name (DN) for a user during login + + + +atlas.authentication.method.ldap.groupSearchBase + + + + + +atlas.authentication.method.ldap.groupSearchFilter + + + + + +atlas.authentication.method.ldap.groupRoleAttribute +cn + + + + +atlas.authentication.method.ldap.base.dn + +The Distinguished Name (DN) of the starting point for directory server searches. + + + +atlas.authentication.method.ldap.bind.dn + +Full distinguished name (DN), including common name (CN), of an LDAP user account that has privileges to search. + + + +atlas.authentication.method.ldap.bind.password +admin +PASSWORD +Password for the account that can search + + password + false + + + + +atlas.authentication.method.ldap.referral +ignore +Set to follow if multiple LDAP servers are configured to return continuation references for results. Set to ignore (default) if no referrals should be followed. Possible values are follow|throw|ignore + + + +atlas.authentication.method.ldap.user.searchfilter + + + + + +atlas.authentication.method.ldap.default.role +ROLE_USER + + + + + + + +atlas.authentication.method.ldap.ad.domain +Domain Name (Only for AD) + +AD domain, only used if Authentication method is AD + + + +atlas.authentication.method.ldap.ad.url + +AD URL, only used if Authentication method is AD + + + +atlas.authentication.method.ldap.ad.base.dn + +The Distinguished Name (DN) of the starting point for directory server searches. + + + +atlas.authentication.method.ldap.ad.bind.dn + +Full distinguished name (DN), including common name (CN), of an LDAP user account that has privileges to search. + + + +atlas.authentication.method.ldap.ad.bind.password +admin +PASSWORD +Password for the account that can search + + password + false + + + + +atlas.authentication.method.ldap.ad.referral +ignore +Set to follow if multiple AD servers are configured to return continuation references for results. Set to ignore (default) if no referrals should be followed. Possible values are follow|throw|ignore + + + +atlas.authentication.method.ldap.ad.user.searchfilter +
ambari git commit: AMBARI-17221. Smart configs for Atlas to setup authentication as either LDAP or AD (alejandro)
Repository: ambari Updated Branches: refs/heads/trunk 278e442ad -> 2e8bfaa97 AMBARI-17221. Smart configs for Atlas to setup authentication as either LDAP or AD (alejandro) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2e8bfaa9 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2e8bfaa9 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2e8bfaa9 Branch: refs/heads/trunk Commit: 2e8bfaa97a9d69cc55ebb10373f1efa941a7adac Parents: 278e442 Author: Alejandro FernandezAuthored: Wed Jun 29 14:56:26 2016 -0700 Committer: Alejandro Fernandez Committed: Wed Jun 29 14:56:26 2016 -0700 -- .../configuration/application-properties.xml| 159 - .../ATLAS/0.1.0.2.3/metainfo.xml| 7 + .../ATLAS/0.1.0.2.3/themes/theme.json | 619 +++ .../0.5.0/configuration/ranger-admin-site.xml | 4 +- .../configuration/application-properties.xml| 20 +- .../stacks/HDP/2.5/services/stack_advisor.py| 39 ++ 6 files changed, 838 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/2e8bfaa9/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml -- diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml index 25df1c5..1437251 100644 --- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml +++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml @@ -230,4 +230,161 @@ - + + + +atlas.authentication.method.ldap.type + +The LDAP type (ldap or ad). + + false + value-list + + + ldap + LDAP + + + ad + AD + + + 1 + + + + + + +atlas.authentication.method.ldap.url + +The LDAP URL. + + + +atlas.authentication.method.ldap.userDNpattern +uid= +User DN Pattern. This pattern is used to create a distinguished name (DN) for a user during login + + + +atlas.authentication.method.ldap.groupSearchBase + + + + + +atlas.authentication.method.ldap.groupSearchFilter + + + + + +atlas.authentication.method.ldap.groupRoleAttribute +cn + + + + +atlas.authentication.method.ldap.base.dn + +The Distinguished Name (DN) of the starting point for directory server searches. + + + +atlas.authentication.method.ldap.bind.dn + +Full distinguished name (DN), including common name (CN), of an LDAP user account that has privileges to search. + + + +atlas.authentication.method.ldap.bind.password +admin +PASSWORD +Password for the account that can search + + password + false + + + + +atlas.authentication.method.ldap.referral +ignore +Set to follow if multiple LDAP servers are configured to return continuation references for results. Set to ignore (default) if no referrals should be followed. Possible values are follow|throw|ignore + + + +atlas.authentication.method.ldap.user.searchfilter + + + + + +atlas.authentication.method.ldap.default.role +ROLE_USER + + + + + + + +atlas.authentication.method.ldap.ad.domain +Domain Name (Only for AD) + +AD domain, only used if Authentication method is AD + + + +atlas.authentication.method.ldap.ad.url + +AD URL, only used if Authentication method is AD + + + +atlas.authentication.method.ldap.ad.base.dn + +The Distinguished Name (DN) of the starting point for directory server searches. + + + +atlas.authentication.method.ldap.ad.bind.dn + +Full distinguished name (DN), including common name (CN), of an LDAP user account that has privileges to search. + + + +atlas.authentication.method.ldap.ad.bind.password +admin +PASSWORD +Password for the account that can search + + password + false + + + + +atlas.authentication.method.ldap.ad.referral +ignore +Set to follow if multiple AD servers are configured to return continuation references for results. Set to ignore (default) if no referrals should be followed. Possible values are follow|throw|ignore + + + +atlas.authentication.method.ldap.ad.user.searchfilter +
ambari git commit: AMBARI-17436. Falcon fixes: Fix permissiosn for extensions folder and rename the extensions folder (Venkat Ranganathan via alejandro) [Forced Update!]
Repository: ambari Updated Branches: refs/heads/trunk 834b658b8 -> 278e442ad (forced update) AMBARI-17436. Falcon fixes: Fix permissiosn for extensions folder and rename the extensions folder (Venkat Ranganathan via alejandro) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/278e442a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/278e442a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/278e442a Branch: refs/heads/trunk Commit: 278e442adf07bed539c842945607cae0968f1afe Parents: 1f73cbe Author: Alejandro FernandezAuthored: Wed Jun 29 14:21:34 2016 -0700 Committer: Alejandro Fernandez Committed: Wed Jun 29 14:26:21 2016 -0700 -- .../FALCON/0.5.0.2.1/package/scripts/falcon.py | 17 +++ .../FALCON/configuration/oozie-site.xml | 47 2 files changed, 56 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/278e442a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py -- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py index 2fd1add..c2f1f53 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py @@ -144,13 +144,6 @@ def falcon(type, action = None, upgrade_type=None): source = params.local_data_mirroring_dir) if params.supports_falcon_extensions: -# In HDP 2.5, data-mirroring directory is still needed by Falcon for the data store, but don't copy any content. -# Instead, copy the extensions folder to HDFS. -params.HdfsResource(params.dfs_data_mirroring_dir, -type = "directory", -action = "create_on_execute", -owner = params.falcon_user, -mode = 0777) params.HdfsResource(params.falcon_extensions_dest_dir, type = "directory", @@ -159,8 +152,16 @@ def falcon(type, action = None, upgrade_type=None): group = params.proxyuser_group, recursive_chown = True, recursive_chmod = True, -mode = 0770, +mode = 0755, source = params.falcon_extensions_source_dir) +# Create the extensons HiveDR store +params.HdfsResource(os.path.join(params.falcon_extensions_dest_dir, "mirroring"), +type = "directory", +action = "create_on_execute", +owner = params.falcon_user, +group = params.proxyuser_group, +mode = 0770) + # At least one HDFS Dir should be created, so execute the change now. params.HdfsResource(None, action = "execute") http://git-wip-us.apache.org/repos/asf/ambari/blob/278e442a/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml new file mode 100644 index 000..191e667 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml @@ -0,0 +1,47 @@ + + + + + +oozie.service.ELService.ext.functions.workflow + + now=org.apache.oozie.extensions.OozieELExtensions#ph1_now_echo, + today=org.apache.oozie.extensions.OozieELExtensions#ph1_today_echo, + yesterday=org.apache.oozie.extensions.OozieELExtensions#ph1_yesterday_echo, + currentMonth=org.apache.oozie.extensions.OozieELExtensions#ph1_currentMonth_echo, + lastMonth=org.apache.oozie.extensions.OozieELExtensions#ph1_lastMonth_echo, + currentYear=org.apache.oozie.extensions.OozieELExtensions#ph1_currentYear_echo, + lastYear=org.apache.oozie.extensions.OozieELExtensions#ph1_lastYear_echo, + formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, + latest=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latest_echo, + future=org.apache.oozie.coord.CoordELFunctions#ph2_coord_future_echo + + + EL functions declarations, separated by commas,
ambari git commit: AMBARI-17436. Additional Ambari change for Falcon (Venkat Ranganathan via alejandro)
Repository: ambari Updated Branches: refs/heads/trunk 1f73cbe50 -> 834b658b8 AMBARI-17436. Additional Ambari change for Falcon (Venkat Ranganathan via alejandro) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/834b658b Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/834b658b Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/834b658b Branch: refs/heads/trunk Commit: 834b658b886d528421adca8c456f136a46f88505 Parents: 1f73cbe Author: Alejandro FernandezAuthored: Wed Jun 29 14:21:34 2016 -0700 Committer: Alejandro Fernandez Committed: Wed Jun 29 14:21:55 2016 -0700 -- .../FALCON/0.5.0.2.1/package/scripts/falcon.py | 17 +++ .../FALCON/configuration/oozie-site.xml | 47 2 files changed, 56 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/834b658b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py -- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py index 2fd1add..c2f1f53 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py @@ -144,13 +144,6 @@ def falcon(type, action = None, upgrade_type=None): source = params.local_data_mirroring_dir) if params.supports_falcon_extensions: -# In HDP 2.5, data-mirroring directory is still needed by Falcon for the data store, but don't copy any content. -# Instead, copy the extensions folder to HDFS. -params.HdfsResource(params.dfs_data_mirroring_dir, -type = "directory", -action = "create_on_execute", -owner = params.falcon_user, -mode = 0777) params.HdfsResource(params.falcon_extensions_dest_dir, type = "directory", @@ -159,8 +152,16 @@ def falcon(type, action = None, upgrade_type=None): group = params.proxyuser_group, recursive_chown = True, recursive_chmod = True, -mode = 0770, +mode = 0755, source = params.falcon_extensions_source_dir) +# Create the extensons HiveDR store +params.HdfsResource(os.path.join(params.falcon_extensions_dest_dir, "mirroring"), +type = "directory", +action = "create_on_execute", +owner = params.falcon_user, +group = params.proxyuser_group, +mode = 0770) + # At least one HDFS Dir should be created, so execute the change now. params.HdfsResource(None, action = "execute") http://git-wip-us.apache.org/repos/asf/ambari/blob/834b658b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml new file mode 100644 index 000..191e667 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml @@ -0,0 +1,47 @@ + + + + + +oozie.service.ELService.ext.functions.workflow + + now=org.apache.oozie.extensions.OozieELExtensions#ph1_now_echo, + today=org.apache.oozie.extensions.OozieELExtensions#ph1_today_echo, + yesterday=org.apache.oozie.extensions.OozieELExtensions#ph1_yesterday_echo, + currentMonth=org.apache.oozie.extensions.OozieELExtensions#ph1_currentMonth_echo, + lastMonth=org.apache.oozie.extensions.OozieELExtensions#ph1_lastMonth_echo, + currentYear=org.apache.oozie.extensions.OozieELExtensions#ph1_currentYear_echo, + lastYear=org.apache.oozie.extensions.OozieELExtensions#ph1_lastYear_echo, + formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, + latest=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latest_echo, + future=org.apache.oozie.coord.CoordELFunctions#ph2_coord_future_echo + + + EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. + This property is a
ambari git commit: AMBARI-17436. Falcon fixes: Fix permissiosn for extensions folder and rename the extensions folder (Venkat Ranganathan via alejandro)
Repository: ambari Updated Branches: refs/heads/branch-2.4 9e98a01dc -> 8964b5a9b AMBARI-17436. Falcon fixes: Fix permissiosn for extensions folder and rename the extensions folder (Venkat Ranganathan via alejandro) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8964b5a9 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8964b5a9 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8964b5a9 Branch: refs/heads/branch-2.4 Commit: 8964b5a9b215408c40a4c8ea3073aa6de64540e7 Parents: 9e98a01 Author: Alejandro FernandezAuthored: Wed Jun 29 14:24:53 2016 -0700 Committer: Alejandro Fernandez Committed: Wed Jun 29 14:24:53 2016 -0700 -- .../FALCON/0.5.0.2.1/package/scripts/falcon.py | 17 +++ .../FALCON/configuration/oozie-site.xml | 47 2 files changed, 56 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/8964b5a9/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py -- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py index 2fd1add..c2f1f53 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py @@ -144,13 +144,6 @@ def falcon(type, action = None, upgrade_type=None): source = params.local_data_mirroring_dir) if params.supports_falcon_extensions: -# In HDP 2.5, data-mirroring directory is still needed by Falcon for the data store, but don't copy any content. -# Instead, copy the extensions folder to HDFS. -params.HdfsResource(params.dfs_data_mirroring_dir, -type = "directory", -action = "create_on_execute", -owner = params.falcon_user, -mode = 0777) params.HdfsResource(params.falcon_extensions_dest_dir, type = "directory", @@ -159,8 +152,16 @@ def falcon(type, action = None, upgrade_type=None): group = params.proxyuser_group, recursive_chown = True, recursive_chmod = True, -mode = 0770, +mode = 0755, source = params.falcon_extensions_source_dir) +# Create the extensons HiveDR store +params.HdfsResource(os.path.join(params.falcon_extensions_dest_dir, "mirroring"), +type = "directory", +action = "create_on_execute", +owner = params.falcon_user, +group = params.proxyuser_group, +mode = 0770) + # At least one HDFS Dir should be created, so execute the change now. params.HdfsResource(None, action = "execute") http://git-wip-us.apache.org/repos/asf/ambari/blob/8964b5a9/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml new file mode 100644 index 000..191e667 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/configuration/oozie-site.xml @@ -0,0 +1,47 @@ + + + + + +oozie.service.ELService.ext.functions.workflow + + now=org.apache.oozie.extensions.OozieELExtensions#ph1_now_echo, + today=org.apache.oozie.extensions.OozieELExtensions#ph1_today_echo, + yesterday=org.apache.oozie.extensions.OozieELExtensions#ph1_yesterday_echo, + currentMonth=org.apache.oozie.extensions.OozieELExtensions#ph1_currentMonth_echo, + lastMonth=org.apache.oozie.extensions.OozieELExtensions#ph1_lastMonth_echo, + currentYear=org.apache.oozie.extensions.OozieELExtensions#ph1_currentYear_echo, + lastYear=org.apache.oozie.extensions.OozieELExtensions#ph1_lastYear_echo, + formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, + latest=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latest_echo, + future=org.apache.oozie.coord.CoordELFunctions#ph2_coord_future_echo + + + EL functions declarations, separated by commas,
ambari git commit: AMBARI-17485. RU: 'Write Client configs' - step failed (ncole)
Repository: ambari Updated Branches: refs/heads/trunk e71b06383 -> 1f73cbe50 AMBARI-17485. RU: 'Write Client configs' - step failed (ncole) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1f73cbe5 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1f73cbe5 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1f73cbe5 Branch: refs/heads/trunk Commit: 1f73cbe50c88195e1cadce2250ef02e22653f3ec Parents: e71b063 Author: Nate ColeAuthored: Wed Jun 29 15:15:32 2016 -0400 Committer: Nate Cole Committed: Wed Jun 29 15:15:47 2016 -0400 -- .../libraries/script/script.py| 2 +- .../stacks/2.0.6/YARN/test_mapreduce2_client.py | 18 ++ .../test/python/stacks/2.1/TEZ/test_tez_client.py | 18 ++ .../python/stacks/2.2/SPARK/test_spark_client.py | 18 ++ 4 files changed, 55 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/1f73cbe5/ambari-common/src/main/python/resource_management/libraries/script/script.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py index 77228a9..5deeb52 100644 --- a/ambari-common/src/main/python/resource_management/libraries/script/script.py +++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py @@ -167,7 +167,7 @@ class Script(object): required_attributes = ["stack_name", "stack_root", "version"] for attribute in required_attributes: - if not has_attr(params, attribute): + if not hasattr(params, attribute): raise Fail("Failed in function 'stack_upgrade_save_new_config' because params was missing variable %s." % attribute) Logger.info("stack_upgrade_save_new_config(): Checking if can write new client configs to new config version folder.") http://git-wip-us.apache.org/repos/asf/ambari/blob/1f73cbe5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py index 0083d75..edd9615 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py +++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py @@ -419,3 +419,21 @@ class TestMapReduce2Client(RMFTestCase): self.assertEquals( ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'), mocks_dict['call'].call_args_list[0][0][0]) + + def test_stack_upgrade_save_new_config(self): +config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/client-upgrade.json" +with open(config_file, "r") as f: + json_content = json.load(f) +version = '2.3.0.0-1234' +json_content['commandParams']['version'] = version + +mocks_dict = {} +self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mapreduce2_client.py", + classname = "MapReduce2Client", + command = "stack_upgrade_save_new_config", + config_dict = json_content, + stack_version = self.STACK_VERSION, + target = RMFTestCase.TARGET_COMMON_SERVICES, + call_mocks = [(0, None, ''), (0, None)], + mocks_dict = mocks_dict) +# for now, it's enough to know the method didn't fail http://git-wip-us.apache.org/repos/asf/ambari/blob/1f73cbe5/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py -- diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py index 73615a4..e63090a 100644 --- a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py +++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py @@ -126,3 +126,21 @@ class TestTezClient(RMFTestCase): self.assertEquals( ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'), mocks_dict['call'].call_args_list[1][0][0]) + + def test_stack_upgrade_save_new_config(self): +config_file = self.get_src_folder()+"/test/python/stacks/2.1/configs/client-upgrade.json" +with open(config_file, "r") as f: + json_content = json.load(f) +
ambari git commit: AMBARI-17485. RU: 'Write Client configs' - step failed (ncole)
Repository: ambari Updated Branches: refs/heads/branch-2.4 1b645c048 -> 9e98a01dc AMBARI-17485. RU: 'Write Client configs' - step failed (ncole) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9e98a01d Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9e98a01d Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9e98a01d Branch: refs/heads/branch-2.4 Commit: 9e98a01dcf673ec42cf634c122413289eee69bcb Parents: 1b645c0 Author: Nate ColeAuthored: Wed Jun 29 13:14:47 2016 -0400 Committer: Nate Cole Committed: Wed Jun 29 15:14:52 2016 -0400 -- .../libraries/script/script.py| 4 ++-- .../SPARK/1.2.1/package/scripts/spark_client.py | 14 -- .../TEZ/0.4.0.2.1/package/scripts/tez.py | 7 ++- .../package/scripts/mapreduce2_client.py | 13 - .../stacks/2.0.6/YARN/test_mapreduce2_client.py | 18 ++ .../test/python/stacks/2.1/TEZ/test_tez_client.py | 18 ++ .../python/stacks/2.2/SPARK/test_spark_client.py | 18 ++ 7 files changed, 78 insertions(+), 14 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/9e98a01d/ambari-common/src/main/python/resource_management/libraries/script/script.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py index 77228a9..534daa6 100644 --- a/ambari-common/src/main/python/resource_management/libraries/script/script.py +++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py @@ -167,7 +167,7 @@ class Script(object): required_attributes = ["stack_name", "stack_root", "version"] for attribute in required_attributes: - if not has_attr(params, attribute): + if not hasattr(params, attribute): raise Fail("Failed in function 'stack_upgrade_save_new_config' because params was missing variable %s." % attribute) Logger.info("stack_upgrade_save_new_config(): Checking if can write new client configs to new config version folder.") @@ -834,4 +834,4 @@ class Script(object): def __init__(self): if Script.instance is not None: - raise Fail("An instantiation already exists! Use, get_instance() method.") \ No newline at end of file + raise Fail("An instantiation already exists! Use, get_instance() method.") http://git-wip-us.apache.org/repos/asf/ambari/blob/9e98a01d/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py -- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py index bade06e..31bf4c6 100644 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py +++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py @@ -17,16 +17,18 @@ See the License for the specific language governing permissions and limitations under the License. """ - +# Python imports +import os import sys -from resource_management import * -from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import stack_select + +# Local imports +from resource_management.libraries.script.script import Script +from resource_management.libraries.functions import conf_select, stack_select from resource_management.libraries.functions.stack_features import check_stack_feature -from resource_management.libraries.functions import StackFeature +from resource_management.libraries.functions.constants import StackFeature from resource_management.core.exceptions import ClientComponentHasNoStatus +from ambari_commons.constants import UPGRADE_TYPE_ROLLING from resource_management.core.logger import Logger -from resource_management.core import shell from setup_spark import setup_spark http://git-wip-us.apache.org/repos/asf/ambari/blob/9e98a01d/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py -- diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py index 01940d7..dfa6501 100644 --- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py +++
ambari git commit: AMBARI-17320. AmbariServer looks for ldap_url, container_dn in blueprint even for MIT security type (rlevas)
Repository: ambari Updated Branches: refs/heads/branch-2.4 3097e0525 -> 1b645c048 AMBARI-17320. AmbariServer looks for ldap_url, container_dn in blueprint even for MIT security type (rlevas) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1b645c04 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1b645c04 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1b645c04 Branch: refs/heads/branch-2.4 Commit: 1b645c048694329916bcc65bb76eb50c59c7dfa7 Parents: 3097e05 Author: Robert LevasAuthored: Wed Jun 29 14:48:51 2016 -0400 Committer: Robert Levas Committed: Wed Jun 29 14:48:51 2016 -0400 -- .../KERBEROS/1.10.3-10/configuration/kerberos-env.xml | 10 +- 1 file changed, 5 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/1b645c04/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml -- diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml index 98d2ada..29c46e9 100644 --- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml +++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml @@ -77,7 +77,7 @@ - + ldap_url LDAP url @@ -91,7 +91,7 @@ - + container_dn Container DN @@ -104,7 +104,7 @@ - + encryption_types Encryption Types @@ -131,7 +131,7 @@ - + kdc_hosts A comma-delimited list of IP addresses or FQDNs declaring the KDC hosts. @@ -145,7 +145,7 @@ - + admin_server_host Kadmin host
ambari git commit: AMBARI-17320. AmbariServer looks for ldap_url, container_dn in blueprint even for MIT security type (rlevas)
Repository: ambari Updated Branches: refs/heads/trunk b39ba60a5 -> e71b06383 AMBARI-17320. AmbariServer looks for ldap_url, container_dn in blueprint even for MIT security type (rlevas) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e71b0638 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e71b0638 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e71b0638 Branch: refs/heads/trunk Commit: e71b06383773993352b1222f2ec61af4e2dec987 Parents: b39ba60 Author: Robert LevasAuthored: Wed Jun 29 14:47:03 2016 -0400 Committer: Robert Levas Committed: Wed Jun 29 14:47:09 2016 -0400 -- .../KERBEROS/1.10.3-10/configuration/kerberos-env.xml | 10 +- 1 file changed, 5 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/e71b0638/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml -- diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml index 98d2ada..29c46e9 100644 --- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml +++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml @@ -77,7 +77,7 @@ - + ldap_url LDAP url @@ -91,7 +91,7 @@ - + container_dn Container DN @@ -104,7 +104,7 @@ - + encryption_types Encryption Types @@ -131,7 +131,7 @@ - + kdc_hosts A comma-delimited list of IP addresses or FQDNs declaring the KDC hosts. @@ -145,7 +145,7 @@ - + admin_server_host Kadmin host
ambari git commit: AMBARI-17419. Disabling the auto-start for ambari-server and ambari-agent doesn't work on systemd. (stoader)
Repository: ambari Updated Branches: refs/heads/branch-2.4 50c297ba5 -> 3097e0525 AMBARI-17419. Disabling the auto-start for ambari-server and ambari-agent doesn't work on systemd. (stoader) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3097e052 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3097e052 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3097e052 Branch: refs/heads/branch-2.4 Commit: 3097e052548289ceb50e53120a4b47c50b7a20a4 Parents: 50c297b Author: Toader, SebastianAuthored: Wed Jun 29 11:37:00 2016 -0700 Committer: Toader, Sebastian Committed: Wed Jun 29 11:38:10 2016 -0700 -- ambari-agent/etc/init.d/ambari-agent | 8 ambari-server/sbin/ambari-server | 8 2 files changed, 16 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/3097e052/ambari-agent/etc/init.d/ambari-agent -- diff --git a/ambari-agent/etc/init.d/ambari-agent b/ambari-agent/etc/init.d/ambari-agent index b2c3109..2d597a9 100644 --- a/ambari-agent/etc/init.d/ambari-agent +++ b/ambari-agent/etc/init.d/ambari-agent @@ -19,6 +19,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +### BEGIN INIT INFO +# Provides: ambari-agent +# Required-Start: +# Required-Stop: +# Default-Start: 2 3 4 5 +# Default-Stop: 0 6 +### END INIT INFO + run_as_user=`cat /etc/ambari-agent/conf/ambari-agent.ini | grep run_as_user | tr -d ' ' | grep -v '^;\|^#' | awk -F '=' '{ print $2}'` if [ "$EUID" != `id -u $run_as_user` ] ; then http://git-wip-us.apache.org/repos/asf/ambari/blob/3097e052/ambari-server/sbin/ambari-server -- diff --git a/ambari-server/sbin/ambari-server b/ambari-server/sbin/ambari-server index 29af308..81cf53b 100755 --- a/ambari-server/sbin/ambari-server +++ b/ambari-server/sbin/ambari-server @@ -19,6 +19,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +### BEGIN INIT INFO +# Provides: ambari-server +# Required-Start: +# Required-Stop: +# Default-Start: 2 3 4 5 +# Default-Stop: 0 6 +### END INIT INFO + # /etc/init.d/ambari-server VERSION="${ambariFullVersion}"
ambari git commit: AMBARI-17419. Disabling the auto-start for ambari-server and ambari-agent doesn't work on systemd. (stoader)
Repository: ambari Updated Branches: refs/heads/trunk 15d7e2035 -> b39ba60a5 AMBARI-17419. Disabling the auto-start for ambari-server and ambari-agent doesn't work on systemd. (stoader) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b39ba60a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b39ba60a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b39ba60a Branch: refs/heads/trunk Commit: b39ba60a5e4fd0e772e2015553ba993d1f21d180 Parents: 15d7e20 Author: Toader, SebastianAuthored: Wed Jun 29 11:37:00 2016 -0700 Committer: Toader, Sebastian Committed: Wed Jun 29 11:37:00 2016 -0700 -- ambari-agent/etc/init.d/ambari-agent | 8 ambari-server/sbin/ambari-server | 8 2 files changed, 16 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/b39ba60a/ambari-agent/etc/init.d/ambari-agent -- diff --git a/ambari-agent/etc/init.d/ambari-agent b/ambari-agent/etc/init.d/ambari-agent index b2c3109..2d597a9 100644 --- a/ambari-agent/etc/init.d/ambari-agent +++ b/ambari-agent/etc/init.d/ambari-agent @@ -19,6 +19,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +### BEGIN INIT INFO +# Provides: ambari-agent +# Required-Start: +# Required-Stop: +# Default-Start: 2 3 4 5 +# Default-Stop: 0 6 +### END INIT INFO + run_as_user=`cat /etc/ambari-agent/conf/ambari-agent.ini | grep run_as_user | tr -d ' ' | grep -v '^;\|^#' | awk -F '=' '{ print $2}'` if [ "$EUID" != `id -u $run_as_user` ] ; then http://git-wip-us.apache.org/repos/asf/ambari/blob/b39ba60a/ambari-server/sbin/ambari-server -- diff --git a/ambari-server/sbin/ambari-server b/ambari-server/sbin/ambari-server index 29af308..81cf53b 100755 --- a/ambari-server/sbin/ambari-server +++ b/ambari-server/sbin/ambari-server @@ -19,6 +19,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +### BEGIN INIT INFO +# Provides: ambari-server +# Required-Start: +# Required-Stop: +# Default-Start: 2 3 4 5 +# Default-Stop: 0 6 +### END INIT INFO + # /etc/init.d/ambari-server VERSION="${ambariFullVersion}"
ambari git commit: AMBARI-17293 Ambari does not refresh yarn queues when HiveServerIntearctive component is restarted. Second patch. (akovalenko)
Repository: ambari Updated Branches: refs/heads/branch-2.4 661c26239 -> 50c297ba5 AMBARI-17293 Ambari does not refresh yarn queues when HiveServerIntearctive component is restarted. Second patch. (akovalenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/50c297ba Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/50c297ba Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/50c297ba Branch: refs/heads/branch-2.4 Commit: 50c297ba52e9279107d1922fb760a7efc8d217e8 Parents: 661c262 Author: Aleksandr KovalenkoAuthored: Wed Jun 29 18:11:32 2016 +0300 Committer: Aleksandr Kovalenko Committed: Wed Jun 29 20:59:04 2016 +0300 -- ambari-web/app/controllers/main/host/details.js | 38 ++-- .../test/controllers/main/host/details_test.js | 6 ++-- 2 files changed, 22 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/50c297ba/ambari-web/app/controllers/main/host/details.js -- diff --git a/ambari-web/app/controllers/main/host/details.js b/ambari-web/app/controllers/main/host/details.js index 517adb2..74255b9 100644 --- a/ambari-web/app/controllers/main/host/details.js +++ b/ambari-web/app/controllers/main/host/details.js @@ -526,7 +526,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow }); } else if (this.get('refreshYARNQueueComponents').contains(component.get('componentName'))) { return App.showConfirmationPopup(function () { -self.restartComponentAndRefreshYARNQueue(component); +self.refreshYARNQueueAndRestartComponent(component); }); } else { return App.showConfirmationPopup(function () { @@ -535,7 +535,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow } }, - restartComponentAndRefreshYARNQueue: function (component) { + refreshYARNQueueAndRestartComponent: function (component) { var componentToRestartHost = App.HostComponent.find().findProperty('componentName', component.get('componentName')).get('hostName'); var resourceManagerHost = App.HostComponent.find().findProperty('componentName', 'RESOURCEMANAGER').get('hostName'); var batches = [ @@ -545,19 +545,14 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow "uri": App.apiPrefix + "/clusters/" + App.get('clusterName') + "/requests", "RequestBodyInfo": { "RequestInfo": { -"context": "Restart " + component.get('displayName'), -"command": "RESTART", -"operation_level": { - "level": "HOST_COMPONENT", - "cluster_name": App.get('clusterName'), - "service_name": component.get('service.serviceName'), - "hostcomponent_name": component.get('componentName') -} +"context": "Refresh YARN Capacity Scheduler", +"command": "REFRESHQUEUES", +"parameters/forceRefreshConfigTags": "capacity-scheduler" }, "Requests/resource_filters": [{ -"service_name": component.get('service.serviceName'), -"component_name": component.get('componentName'), -"hosts": componentToRestartHost +"service_name": "YARN", +"component_name": "RESOURCEMANAGER", +"hosts": resourceManagerHost }] } }, @@ -567,14 +562,19 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow "uri": App.apiPrefix + "/clusters/" + App.get('clusterName') + "/requests", "RequestBodyInfo": { "RequestInfo": { -"context": "Refresh YARN Capacity Scheduler", -"command": "REFRESHQUEUES", -"parameters/forceRefreshConfigTags": "capacity-scheduler" +"context": "Restart " + component.get('displayName'), +"command": "RESTART", +"operation_level": { + "level": "HOST_COMPONENT", + "cluster_name": App.get('clusterName'), + "service_name": component.get('service.serviceName'), + "hostcomponent_name": component.get('componentName') +} }, "Requests/resource_filters": [{ -"service_name": "YARN", -"component_name": "RESOURCEMANAGER", -"hosts": resourceManagerHost +"service_name": component.get('service.serviceName'), +"component_name": component.get('componentName'), +"hosts": componentToRestartHost }] } }
ambari git commit: AMBARI-17293 Ambari does not refresh yarn queues when HiveServerIntearctive component is restarted. Second patch. (akovalenko)
Repository: ambari Updated Branches: refs/heads/trunk 917988f76 -> 15d7e2035 AMBARI-17293 Ambari does not refresh yarn queues when HiveServerIntearctive component is restarted. Second patch. (akovalenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/15d7e203 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/15d7e203 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/15d7e203 Branch: refs/heads/trunk Commit: 15d7e20355f1f8428d1a776feef597fe0d1df694 Parents: 917988f Author: Aleksandr KovalenkoAuthored: Wed Jun 29 18:11:32 2016 +0300 Committer: Aleksandr Kovalenko Committed: Wed Jun 29 20:58:21 2016 +0300 -- ambari-web/app/controllers/main/host/details.js | 38 ++-- .../test/controllers/main/host/details_test.js | 6 ++-- 2 files changed, 22 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/15d7e203/ambari-web/app/controllers/main/host/details.js -- diff --git a/ambari-web/app/controllers/main/host/details.js b/ambari-web/app/controllers/main/host/details.js index 517adb2..74255b9 100644 --- a/ambari-web/app/controllers/main/host/details.js +++ b/ambari-web/app/controllers/main/host/details.js @@ -526,7 +526,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow }); } else if (this.get('refreshYARNQueueComponents').contains(component.get('componentName'))) { return App.showConfirmationPopup(function () { -self.restartComponentAndRefreshYARNQueue(component); +self.refreshYARNQueueAndRestartComponent(component); }); } else { return App.showConfirmationPopup(function () { @@ -535,7 +535,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow } }, - restartComponentAndRefreshYARNQueue: function (component) { + refreshYARNQueueAndRestartComponent: function (component) { var componentToRestartHost = App.HostComponent.find().findProperty('componentName', component.get('componentName')).get('hostName'); var resourceManagerHost = App.HostComponent.find().findProperty('componentName', 'RESOURCEMANAGER').get('hostName'); var batches = [ @@ -545,19 +545,14 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow "uri": App.apiPrefix + "/clusters/" + App.get('clusterName') + "/requests", "RequestBodyInfo": { "RequestInfo": { -"context": "Restart " + component.get('displayName'), -"command": "RESTART", -"operation_level": { - "level": "HOST_COMPONENT", - "cluster_name": App.get('clusterName'), - "service_name": component.get('service.serviceName'), - "hostcomponent_name": component.get('componentName') -} +"context": "Refresh YARN Capacity Scheduler", +"command": "REFRESHQUEUES", +"parameters/forceRefreshConfigTags": "capacity-scheduler" }, "Requests/resource_filters": [{ -"service_name": component.get('service.serviceName'), -"component_name": component.get('componentName'), -"hosts": componentToRestartHost +"service_name": "YARN", +"component_name": "RESOURCEMANAGER", +"hosts": resourceManagerHost }] } }, @@ -567,14 +562,19 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow "uri": App.apiPrefix + "/clusters/" + App.get('clusterName') + "/requests", "RequestBodyInfo": { "RequestInfo": { -"context": "Refresh YARN Capacity Scheduler", -"command": "REFRESHQUEUES", -"parameters/forceRefreshConfigTags": "capacity-scheduler" +"context": "Restart " + component.get('displayName'), +"command": "RESTART", +"operation_level": { + "level": "HOST_COMPONENT", + "cluster_name": App.get('clusterName'), + "service_name": component.get('service.serviceName'), + "hostcomponent_name": component.get('componentName') +} }, "Requests/resource_filters": [{ -"service_name": "YARN", -"component_name": "RESOURCEMANAGER", -"hosts": resourceManagerHost +"service_name": component.get('service.serviceName'), +"component_name": component.get('componentName'), +"hosts": componentToRestartHost }] } }
[1/4] ambari git commit: AMBARI-17481. Incorrect error message on login page. (mpapirkovskyy)
Repository: ambari Updated Branches: refs/heads/branch-2.4 f8bb9f850 -> 661c26239 refs/heads/trunk 7a43ef483 -> 917988f76 AMBARI-17481. Incorrect error message on login page. (mpapirkovskyy) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/917988f7 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/917988f7 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/917988f7 Branch: refs/heads/trunk Commit: 917988f7668405299b0d59fe017c6b115716df8a Parents: 21a5448 Author: Myroslav PapirkovskyiAuthored: Wed Jun 29 18:09:53 2016 +0300 Committer: Myroslav Papirkovskyi Committed: Wed Jun 29 19:21:57 2016 +0300 -- .../AmbariLdapAuthenticationProvider.java | 7 ++-- .../AmbariLdapAuthoritiesPopulator.java | 3 +- .../authorization/AmbariLocalUserProvider.java | 16 +++- ...lidUsernamePasswordCombinationException.java | 34 + .../server/security/authorization/Users.java| 6 +-- .../AmbariInternalAuthenticationProvider.java | 5 +-- .../ambari/server/state/ConfigHelper.java | 8 +++- ...ariAuthorizationProviderDisableUserTest.java | 13 +++ ...uthenticationProviderForDNWithSpaceTest.java | 4 +- .../AmbariLdapAuthenticationProviderTest.java | 18 +++-- .../AmbariLocalUserProviderTest.java| 8 ++-- .../svccomphost/ServiceComponentHostTest.java | 40 12 files changed, 108 insertions(+), 54 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/917988f7/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java index 8527271..6905757 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java @@ -30,7 +30,6 @@ import org.slf4j.LoggerFactory; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.ldap.core.support.LdapContextSource; import org.springframework.security.authentication.AuthenticationProvider; -import org.springframework.security.authentication.DisabledException; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.AuthenticationException; @@ -86,7 +85,7 @@ public class AmbariLdapAuthenticationProvider implements AuthenticationProvider "connecting to LDAP server) are invalid.", e); } } -throw e; +throw new InvalidUsernamePasswordCombinationException(e); } catch (IncorrectResultSizeDataAccessException multipleUsersFound) { String message = configuration.isLdapAlternateUserSearchEnabled() ? String.format("Login Failed: Please append your domain to your username and try again. Example: %s@domain", username) : @@ -198,13 +197,13 @@ public class AmbariLdapAuthenticationProvider implements AuthenticationProvider // lookup is case insensitive, so no need for string comparison if (userEntity == null) { LOG.info("user not found "); - throw new UsernameNotFoundException("Username " + userName + " not found"); + throw new InvalidUsernamePasswordCombinationException(); } if (!userEntity.getActive()) { LOG.debug("User account is disabled"); - throw new DisabledException("Username " + userName + " is disabled"); + throw new InvalidUsernamePasswordCombinationException(); } return userEntity.getUserId(); http://git-wip-us.apache.org/repos/asf/ambari/blob/917988f7/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthoritiesPopulator.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthoritiesPopulator.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthoritiesPopulator.java index 7df8dc3..b3be046 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthoritiesPopulator.java +++
[3/4] ambari git commit: AMBARI-17374. Ambari reports "IN PROGRESS" status for a finished install task. (mpapirkovskyy)
AMBARI-17374. Ambari reports "IN PROGRESS" status for a finished install task. (mpapirkovskyy) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/be17fbec Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/be17fbec Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/be17fbec Branch: refs/heads/branch-2.4 Commit: be17fbec298e33ec608b63bb4984b3ab3bbeee68 Parents: f8bb9f8 Author: Myroslav PapirkovskyiAuthored: Wed Jun 22 19:13:02 2016 +0300 Committer: Myroslav Papirkovskyi Committed: Wed Jun 29 19:22:24 2016 +0300 -- .../src/main/python/ambari_agent/ActionQueue.py | 9 +++- .../ambari_agent/CustomServiceOrchestrator.py | 5 +- .../TestCustomServiceOrchestrator.py| 2 +- .../server/actionmanager/ActionManager.java | 7 +++ .../server/actionmanager/ActionScheduler.java | 13 + .../server/actionmanager/TestActionManager.java | 56 .../actionmanager/TestActionScheduler.java | 20 --- .../server/agent/HeartbeatProcessorTest.java| 4 +- 8 files changed, 104 insertions(+), 12 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/be17fbec/ambari-agent/src/main/python/ambari_agent/ActionQueue.py -- diff --git a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py index f217a54..60c72af 100644 --- a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py +++ b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py @@ -26,6 +26,7 @@ import pprint import os import ambari_simplejson as json import time +import signal from AgentException import AgentException from LiveStatus import LiveStatus @@ -141,7 +142,7 @@ class ActionQueue(threading.Thread): logger.info("Canceling " + queued_command['commandType'] + \ " for service " + queued_command['serviceName'] + \ " and role " + queued_command['role'] + \ - " with taskId " + queued_command['taskId']) + " with taskId " + str(queued_command['taskId'])) # Kill if in progress self.customServiceOrchestrator.cancel_command(task_id, reason) @@ -313,7 +314,11 @@ class ActionQueue(threading.Thread): if commandresult['exitcode'] == 0: status = self.COMPLETED_STATUS else: - status = self.FAILED_STATUS + if (commandresult['exitcode'] == -signal.SIGTERM) or (commandresult['exitcode'] == -signal.SIGKILL): +logger.info('Command {cid} was canceled!'.format(cid=taskId)) +return + else: +status = self.FAILED_STATUS if status != self.COMPLETED_STATUS and retryAble and retryDuration > 0: delay = self.get_retry_delay(delay) http://git-wip-us.apache.org/repos/asf/ambari/blob/be17fbec/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py -- diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py index fc1b72a..57416a4 100644 --- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py +++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py @@ -244,7 +244,10 @@ class CustomServiceOrchestrator(): logger.debug('Pop with taskId %s' % task_id) pid = self.commands_in_progress.pop(task_id) if not isinstance(pid, int): - return '\nCommand aborted. ' + pid + if pid: +return '\nCommand aborted. ' + pid + else: +return '' return None def requestComponentStatus(self, command): http://git-wip-us.apache.org/repos/asf/ambari/blob/be17fbec/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py -- diff --git a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py index 0ff0ba5..c9724b7 100644 --- a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py +++ b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py @@ -440,7 +440,7 @@ class TestCustomServiceOrchestrator(TestCase): time.sleep(.1) -orchestrator.cancel_command(19,'') +orchestrator.cancel_command(19,'reason') self.assertTrue(kill_process_with_children_mock.called) kill_process_with_children_mock.assert_called_with(33)
[2/4] ambari git commit: AMBARI-17374. Ambari reports "IN PROGRESS" status for a finished install task. (mpapirkovskyy)
AMBARI-17374. Ambari reports "IN PROGRESS" status for a finished install task. (mpapirkovskyy) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/21a54489 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/21a54489 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/21a54489 Branch: refs/heads/trunk Commit: 21a544891e7388c807ad8b88eb80afada9e31b19 Parents: 7a43ef4 Author: Myroslav PapirkovskyiAuthored: Wed Jun 22 19:13:02 2016 +0300 Committer: Myroslav Papirkovskyi Committed: Wed Jun 29 19:21:57 2016 +0300 -- .../src/main/python/ambari_agent/ActionQueue.py | 9 +++- .../ambari_agent/CustomServiceOrchestrator.py | 5 +- .../TestCustomServiceOrchestrator.py| 2 +- .../server/actionmanager/ActionManager.java | 7 +++ .../server/actionmanager/ActionScheduler.java | 13 + .../server/actionmanager/TestActionManager.java | 56 .../actionmanager/TestActionScheduler.java | 20 --- .../server/agent/HeartbeatProcessorTest.java| 4 +- 8 files changed, 104 insertions(+), 12 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/21a54489/ambari-agent/src/main/python/ambari_agent/ActionQueue.py -- diff --git a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py index f217a54..60c72af 100644 --- a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py +++ b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py @@ -26,6 +26,7 @@ import pprint import os import ambari_simplejson as json import time +import signal from AgentException import AgentException from LiveStatus import LiveStatus @@ -141,7 +142,7 @@ class ActionQueue(threading.Thread): logger.info("Canceling " + queued_command['commandType'] + \ " for service " + queued_command['serviceName'] + \ " and role " + queued_command['role'] + \ - " with taskId " + queued_command['taskId']) + " with taskId " + str(queued_command['taskId'])) # Kill if in progress self.customServiceOrchestrator.cancel_command(task_id, reason) @@ -313,7 +314,11 @@ class ActionQueue(threading.Thread): if commandresult['exitcode'] == 0: status = self.COMPLETED_STATUS else: - status = self.FAILED_STATUS + if (commandresult['exitcode'] == -signal.SIGTERM) or (commandresult['exitcode'] == -signal.SIGKILL): +logger.info('Command {cid} was canceled!'.format(cid=taskId)) +return + else: +status = self.FAILED_STATUS if status != self.COMPLETED_STATUS and retryAble and retryDuration > 0: delay = self.get_retry_delay(delay) http://git-wip-us.apache.org/repos/asf/ambari/blob/21a54489/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py -- diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py index fc1b72a..57416a4 100644 --- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py +++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py @@ -244,7 +244,10 @@ class CustomServiceOrchestrator(): logger.debug('Pop with taskId %s' % task_id) pid = self.commands_in_progress.pop(task_id) if not isinstance(pid, int): - return '\nCommand aborted. ' + pid + if pid: +return '\nCommand aborted. ' + pid + else: +return '' return None def requestComponentStatus(self, command): http://git-wip-us.apache.org/repos/asf/ambari/blob/21a54489/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py -- diff --git a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py index 0ff0ba5..c9724b7 100644 --- a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py +++ b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py @@ -440,7 +440,7 @@ class TestCustomServiceOrchestrator(TestCase): time.sleep(.1) -orchestrator.cancel_command(19,'') +orchestrator.cancel_command(19,'reason') self.assertTrue(kill_process_with_children_mock.called) kill_process_with_children_mock.assert_called_with(33)
[4/4] ambari git commit: AMBARI-17481. Incorrect error message on login page. (mpapirkovskyy)
AMBARI-17481. Incorrect error message on login page. (mpapirkovskyy) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/661c2623 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/661c2623 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/661c2623 Branch: refs/heads/branch-2.4 Commit: 661c262394e0c7e2d1aac0518dfef4ca62b28094 Parents: be17fbe Author: Myroslav PapirkovskyiAuthored: Wed Jun 29 18:09:53 2016 +0300 Committer: Myroslav Papirkovskyi Committed: Wed Jun 29 19:22:24 2016 +0300 -- .../AmbariLdapAuthenticationProvider.java | 7 ++-- .../AmbariLdapAuthoritiesPopulator.java | 3 +- .../authorization/AmbariLocalUserProvider.java | 16 +++- ...lidUsernamePasswordCombinationException.java | 34 + .../server/security/authorization/Users.java| 6 +-- .../AmbariInternalAuthenticationProvider.java | 5 +-- .../ambari/server/state/ConfigHelper.java | 8 +++- ...ariAuthorizationProviderDisableUserTest.java | 13 +++ ...uthenticationProviderForDNWithSpaceTest.java | 4 +- .../AmbariLdapAuthenticationProviderTest.java | 18 +++-- .../AmbariLocalUserProviderTest.java| 8 ++-- .../svccomphost/ServiceComponentHostTest.java | 40 12 files changed, 108 insertions(+), 54 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/661c2623/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java index 0bf7ec2..6be7e10 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java @@ -30,7 +30,6 @@ import org.slf4j.LoggerFactory; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.ldap.core.support.LdapContextSource; import org.springframework.security.authentication.AuthenticationProvider; -import org.springframework.security.authentication.DisabledException; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.AuthenticationException; @@ -86,7 +85,7 @@ public class AmbariLdapAuthenticationProvider implements AuthenticationProvider "connecting to LDAP server) are invalid.", e); } } -throw e; +throw new InvalidUsernamePasswordCombinationException(e); } catch (IncorrectResultSizeDataAccessException multipleUsersFound) { String message = configuration.isLdapAlternateUserSearchEnabled() ? String.format("Login Failed: Please append your domain to your username and try again. Example: %s@domain", username) : @@ -197,13 +196,13 @@ public class AmbariLdapAuthenticationProvider implements AuthenticationProvider if (userEntity == null || !StringUtils.equals(userEntity.getUserName(), userName)) { LOG.info("user not found "); - throw new UsernameNotFoundException("Username " + userName + " not found"); + throw new InvalidUsernamePasswordCombinationException(); } if (!userEntity.getActive()) { LOG.debug("User account is disabled"); - throw new DisabledException("Username " + userName + " is disabled"); + throw new InvalidUsernamePasswordCombinationException(); } return userEntity.getUserId(); http://git-wip-us.apache.org/repos/asf/ambari/blob/661c2623/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthoritiesPopulator.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthoritiesPopulator.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthoritiesPopulator.java index 7df8dc3..b3be046 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthoritiesPopulator.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthoritiesPopulator.java @@ -32,7 +32,6 @@ import org.apache.ambari.server.orm.entities.UserEntity; import org.slf4j.Logger; import
[2/2] ambari git commit: AMBARI-17475. Ambari try to find old jdbc etc despite reset and resetuping with another DB (aonishuk)
AMBARI-17475. Ambari try to find old jdbc etc despite reset and resetuping with another DB (aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f8bb9f85 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f8bb9f85 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f8bb9f85 Branch: refs/heads/branch-2.4 Commit: f8bb9f850548570e1b4b2c0b6416ff84f2c05ab1 Parents: 2edf6f6 Author: Andrew OnishukAuthored: Wed Jun 29 19:08:39 2016 +0300 Committer: Andrew Onishuk Committed: Wed Jun 29 19:08:39 2016 +0300 -- .../python/ambari_server/dbConfiguration_linux.py | 18 +- 1 file changed, 9 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/f8bb9f85/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py -- diff --git a/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py b/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py index 95c82c1..d09bd77 100644 --- a/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py +++ b/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py @@ -551,15 +551,15 @@ class PGConfig(LinuxDBMSConfig): # Store local database connection properties def _store_local_properties(self, properties, options): -properties.removeOldProp(JDBC_DATABASE_PROPERTY) -properties.removeOldProp(JDBC_DATABASE_NAME_PROPERTY) -properties.removeOldProp(JDBC_POSTGRES_SCHEMA_PROPERTY) -properties.removeOldProp(JDBC_HOSTNAME_PROPERTY) -properties.removeOldProp(JDBC_RCA_DRIVER_PROPERTY) -properties.removeOldProp(JDBC_RCA_URL_PROPERTY) -properties.removeOldProp(JDBC_PORT_PROPERTY) -properties.removeOldProp(JDBC_DRIVER_PROPERTY) -properties.removeOldProp(JDBC_URL_PROPERTY) +properties.removeProp(JDBC_DATABASE_PROPERTY) +properties.removeProp(JDBC_DATABASE_NAME_PROPERTY) +properties.removeProp(JDBC_POSTGRES_SCHEMA_PROPERTY) +properties.removeProp(JDBC_HOSTNAME_PROPERTY) +properties.removeProp(JDBC_RCA_DRIVER_PROPERTY) +properties.removeProp(JDBC_RCA_URL_PROPERTY) +properties.removeProp(JDBC_PORT_PROPERTY) +properties.removeProp(JDBC_DRIVER_PROPERTY) +properties.removeProp(JDBC_URL_PROPERTY) # Store the properties properties.process_pair(PERSISTENCE_TYPE_PROPERTY, self.persistence_type)
[1/2] ambari git commit: AMBARI-17475. Ambari try to find old jdbc etc despite reset and resetuping with another DB (aonishuk)
Repository: ambari Updated Branches: refs/heads/branch-2.4 2edf6f695 -> f8bb9f850 refs/heads/trunk f9c89885e -> 7a43ef483 AMBARI-17475. Ambari try to find old jdbc etc despite reset and resetuping with another DB (aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7a43ef48 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7a43ef48 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7a43ef48 Branch: refs/heads/trunk Commit: 7a43ef483f33bcf03debe52ca1b24076630c771d Parents: f9c8988 Author: Andrew OnishukAuthored: Wed Jun 29 19:08:36 2016 +0300 Committer: Andrew Onishuk Committed: Wed Jun 29 19:08:36 2016 +0300 -- .../python/ambari_server/dbConfiguration_linux.py | 18 +- 1 file changed, 9 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/7a43ef48/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py -- diff --git a/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py b/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py index 95c82c1..d09bd77 100644 --- a/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py +++ b/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py @@ -551,15 +551,15 @@ class PGConfig(LinuxDBMSConfig): # Store local database connection properties def _store_local_properties(self, properties, options): -properties.removeOldProp(JDBC_DATABASE_PROPERTY) -properties.removeOldProp(JDBC_DATABASE_NAME_PROPERTY) -properties.removeOldProp(JDBC_POSTGRES_SCHEMA_PROPERTY) -properties.removeOldProp(JDBC_HOSTNAME_PROPERTY) -properties.removeOldProp(JDBC_RCA_DRIVER_PROPERTY) -properties.removeOldProp(JDBC_RCA_URL_PROPERTY) -properties.removeOldProp(JDBC_PORT_PROPERTY) -properties.removeOldProp(JDBC_DRIVER_PROPERTY) -properties.removeOldProp(JDBC_URL_PROPERTY) +properties.removeProp(JDBC_DATABASE_PROPERTY) +properties.removeProp(JDBC_DATABASE_NAME_PROPERTY) +properties.removeProp(JDBC_POSTGRES_SCHEMA_PROPERTY) +properties.removeProp(JDBC_HOSTNAME_PROPERTY) +properties.removeProp(JDBC_RCA_DRIVER_PROPERTY) +properties.removeProp(JDBC_RCA_URL_PROPERTY) +properties.removeProp(JDBC_PORT_PROPERTY) +properties.removeProp(JDBC_DRIVER_PROPERTY) +properties.removeProp(JDBC_URL_PROPERTY) # Store the properties properties.process_pair(PERSISTENCE_TYPE_PROPERTY, self.persistence_type)
ambari git commit: AMBARI-17406 Update Zeppelin service definition (Renjith Kamath via pallavkul)
Repository: ambari Updated Branches: refs/heads/branch-2.4 f350cae82 -> 2edf6f695 AMBARI-17406 Update Zeppelin service definition (Renjith Kamath via pallavkul) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2edf6f69 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2edf6f69 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2edf6f69 Branch: refs/heads/branch-2.4 Commit: 2edf6f695f212722874a7de8666d5b7b8d9b4661 Parents: f350cae Author: Pallav KulshreshthaAuthored: Wed Jun 29 20:37:37 2016 +0530 Committer: Pallav Kulshreshtha Committed: Wed Jun 29 20:39:02 2016 +0530 -- .../0.6.0.2.5/configuration/zeppelin-env.xml| 16 ++-- .../0.6.0.2.5/package/scripts/master.py | 82 +--- .../0.6.0.2.5/package/scripts/params.py | 10 ++- 3 files changed, 75 insertions(+), 33 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/2edf6f69/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml index 844ae98..ddca230 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml @@ -168,10 +168,10 @@ export SPARK_YARN_USER_ENV="PYTHONPATH=${PYTHONPATH}" #ldapRealm.userDnTemplate = uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com #ldapRealm.contextFactory.url = ldap://ldaphost:389 #ldapRealm.contextFactory.authenticationMechanism = SIMPLE -sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager -securityManager.sessionManager = $sessionManager +#sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager +#securityManager.sessionManager = $sessionManager # 86,400,000 milliseconds = 24 hour -securityManager.sessionManager.globalSessionTimeout = 8640 +#securityManager.sessionManager.globalSessionTimeout = 8640 shiro.loginUrl = /api/login [urls] @@ -224,7 +224,10 @@ log4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m zeppelin.server.kerberos.principal -none + + + true + Kerberos principal name for the Zeppelin. @@ -232,7 +235,10 @@ log4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m zeppelin.server.kerberos.keytab -none + + + true + Location of the kerberos keytab file for the Zeppelin. http://git-wip-us.apache.org/repos/asf/ambari/blob/2edf6f69/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py index f2899d0..fd6cbb6 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py @@ -161,8 +161,11 @@ class Master(Script): not os.path.exists(params.conf_dir + "/interpreter.json"): Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh start >> ' + params.zeppelin_log_file, user=params.zeppelin_user) -time.sleep(20) -self.update_zeppelin_interpreter() + time.sleep(20) + self.update_zeppelin_interpreter() + +if params.security_enabled: + self.update_kerberos_properties() Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh restart >> ' + params.zeppelin_log_file, user=params.zeppelin_user) @@ -183,7 +186,7 @@ class Master(Script): pid_file = '' check_process_status(pid_file) - def update_zeppelin_interpreter(self): + def get_interpreter_settings(self): import params import json @@ -191,39 +194,66 @@ class Master(Script): interpreter_config_file = open(interpreter_config, "r") config_data = json.load(interpreter_config_file) interpreter_config_file.close() +return config_data + + def set_interpreter_settings(self, config_data): +import params +import json + +interpreter_config = params.conf_dir + "/interpreter.json" +interpreter_config_file = open(interpreter_config, "w+") +
ambari git commit: AMBARI-17406 Update Zeppelin service definition (Renjith Kamath via pallavkul)
Repository: ambari Updated Branches: refs/heads/trunk e63317d8a -> f9c89885e AMBARI-17406 Update Zeppelin service definition (Renjith Kamath via pallavkul) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f9c89885 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f9c89885 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f9c89885 Branch: refs/heads/trunk Commit: f9c89885eae11163bcf471593bd35698f70eaeb0 Parents: e63317d Author: Pallav KulshreshthaAuthored: Wed Jun 29 20:37:37 2016 +0530 Committer: Pallav Kulshreshtha Committed: Wed Jun 29 20:37:37 2016 +0530 -- .../0.6.0.2.5/configuration/zeppelin-env.xml| 16 ++-- .../0.6.0.2.5/package/scripts/master.py | 82 +--- .../0.6.0.2.5/package/scripts/params.py | 10 ++- 3 files changed, 75 insertions(+), 33 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/f9c89885/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml index 844ae98..ddca230 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml @@ -168,10 +168,10 @@ export SPARK_YARN_USER_ENV="PYTHONPATH=${PYTHONPATH}" #ldapRealm.userDnTemplate = uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com #ldapRealm.contextFactory.url = ldap://ldaphost:389 #ldapRealm.contextFactory.authenticationMechanism = SIMPLE -sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager -securityManager.sessionManager = $sessionManager +#sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager +#securityManager.sessionManager = $sessionManager # 86,400,000 milliseconds = 24 hour -securityManager.sessionManager.globalSessionTimeout = 8640 +#securityManager.sessionManager.globalSessionTimeout = 8640 shiro.loginUrl = /api/login [urls] @@ -224,7 +224,10 @@ log4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m zeppelin.server.kerberos.principal -none + + + true + Kerberos principal name for the Zeppelin. @@ -232,7 +235,10 @@ log4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m zeppelin.server.kerberos.keytab -none + + + true + Location of the kerberos keytab file for the Zeppelin. http://git-wip-us.apache.org/repos/asf/ambari/blob/f9c89885/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py index f2899d0..fd6cbb6 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py @@ -161,8 +161,11 @@ class Master(Script): not os.path.exists(params.conf_dir + "/interpreter.json"): Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh start >> ' + params.zeppelin_log_file, user=params.zeppelin_user) -time.sleep(20) -self.update_zeppelin_interpreter() + time.sleep(20) + self.update_zeppelin_interpreter() + +if params.security_enabled: + self.update_kerberos_properties() Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh restart >> ' + params.zeppelin_log_file, user=params.zeppelin_user) @@ -183,7 +186,7 @@ class Master(Script): pid_file = '' check_process_status(pid_file) - def update_zeppelin_interpreter(self): + def get_interpreter_settings(self): import params import json @@ -191,39 +194,66 @@ class Master(Script): interpreter_config_file = open(interpreter_config, "r") config_data = json.load(interpreter_config_file) interpreter_config_file.close() +return config_data + + def set_interpreter_settings(self, config_data): +import params +import json + +interpreter_config = params.conf_dir + "/interpreter.json" +interpreter_config_file = open(interpreter_config, "w+") +interpreter_config_file.write(json.dumps(config_data,
[1/6] ambari git commit: AMBARI-17333. ranger kms repo creation is failing after ranger kms is installed(Mugdha Varadkar via gautam)
Repository: ambari Updated Branches: refs/heads/branch-2.4 283256c83 -> f350cae82 refs/heads/trunk c40ec5820 -> 2360560fb http://git-wip-us.apache.org/repos/asf/ambari/blob/f350cae8/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json -- diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json new file mode 100644 index 000..7a68a07 --- /dev/null +++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json @@ -0,0 +1,873 @@ +{ +"localComponents": [ +"SECONDARY_NAMENODE", +"HDFS_CLIENT", +"DATANODE", +"NAMENODE", +"RANGER_ADMIN", +"RANGER_TAGSYNC", +"RANGER_USERSYNC", +"ZOOKEEPER_SERVER", +"ZOOKEEPER_CLIENT", +"KERBEROS_CLIENT", +"RANGER_KMS_SERVER" +], +"configuration_attributes": { +"ranger-kms-site": {}, +"ranger-hdfs-audit": {}, +"ssl-client": {}, +"ranger-admin-site": {}, +"kms-log4j": {}, +"ranger-hdfs-policymgr-ssl": {}, +"tagsync-application-properties": {}, +"ranger-env": {}, +"ranger-ugsync-site": {}, +"ranger-hdfs-plugin-properties": {}, +"ranger-kms-security": {}, +"kerberos-env": {}, +"kms-properties": {}, +"admin-properties": {}, +"ranger-kms-policymgr-ssl": {}, +"hdfs-site": { +"final": { +"dfs.datanode.data.dir": "true", +"dfs.namenode.http-address": "true", +"dfs.datanode.failed.volumes.tolerated": "true", +"dfs.support.append": "true", +"dfs.namenode.name.dir": "true", +"dfs.webhdfs.enabled": "true" +} +}, +"ranger-tagsync-site": {}, +"tagsync-log4j": {}, +"ranger-kms-audit": {}, +"hadoop-policy": {}, +"hdfs-log4j": {}, +"usersync-log4j": {}, +"krb5-conf": {}, +"kms-site": {}, +"core-site": { +"final": { +"fs.defaultFS": "true" +} +}, +"hadoop-env": {}, +"zookeeper-log4j": {}, +"ssl-server": {}, +"ranger-site": {}, +"zookeeper-env": {}, +"admin-log4j": {}, +"zoo.cfg": {}, +"ranger-hdfs-security": {}, +"usersync-properties": {}, +"kms-env": {}, +"dbks-site": {}, +"cluster-env": {} +}, +"public_hostname": "c6401.ambari.apache.org", +"commandId": "43-0", +"hostname": "c6401.ambari.apache.org", +"kerberosCommandParams": [], +"serviceName": "RANGER_KMS", +"role": "RANGER_KMS_SERVER", +"forceRefreshConfigTagsBeforeExecution": [], +"requestId": 43, +"agentConfigParams": { +"agent": { +"parallel_execution": 0 +} +}, +"clusterName": "c1", +"commandType": "EXECUTION_COMMAND", +"taskId": 200, +"roleParams": {}, +"configurationTags": { +"ranger-kms-site": { +"tag": "version1467026737262" +}, +"ranger-hdfs-audit": { +"tag": "version1466705299922" +}, +"ssl-client": { +"tag": "version1" +}, +"ranger-admin-site": { +"tag": "version1467016680635" +}, +"kms-log4j": { +"tag": "version1467026737262" +}, +"ranger-hdfs-policymgr-ssl": { +"tag": "version1466705299922" +}, +"tagsync-application-properties": { +"tag": "version1467016680511" +}, +"ranger-env": { +"tag": "version1466705299949" +}, +"ranger-ugsync-site": { +"tag": "version1467016680537" +}, +"ranger-hdfs-plugin-properties": { +"tag": "version1466705299922" +}, +"ranger-kms-security": { +"tag": "version1467026737262" +}, +"kerberos-env": { +"tag": "version1467016537243" +}, +"admin-log4j": { +"tag": "version1466705299949" +}, +"admin-properties": { +"tag": "version1466705299949" +}, +"ranger-kms-policymgr-ssl": { +"tag": "version1467026737262" +}, +"hdfs-site": { +"tag": "version1467016680401" +}, +"ranger-tagsync-site": { +"tag": "version1467016680586" +}, +"zoo.cfg": { +"tag": "version1" +}, +"ranger-kms-audit": { +"tag": "version1467026737262" +}, +"hadoop-policy": { +"tag": "version1" +}, +"hdfs-log4j": { +"tag": "version1"
[5/6] ambari git commit: AMBARI-17333. ranger kms repo creation is failing after ranger kms is installed(Mugdha Varadkar via gautam)
http://git-wip-us.apache.org/repos/asf/ambari/blob/2360560f/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json -- diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json new file mode 100644 index 000..d1365ac --- /dev/null +++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json @@ -0,0 +1,803 @@ +{ +"localComponents": [ +"SECONDARY_NAMENODE", +"HDFS_CLIENT", +"DATANODE", +"NAMENODE", +"ZOOKEEPER_SERVER", +"ZOOKEEPER_CLIENT", +"RANGER_USERSYNC", +"RANGER_ADMIN", +"RANGER_TAGSYNC", +"RANGER_KMS_SERVER" +], +"configuration_attributes": { +"ranger-hdfs-audit": {}, +"ssl-client": {}, +"ranger-admin-site": {}, +"ranger-hdfs-policymgr-ssl": {}, +"tagsync-application-properties": {}, +"ranger-env": {}, +"usersync-log4j": {}, +"admin-properties": {}, +"ranger-ugsync-site": {}, +"hdfs-site": { +"final": { +"dfs.datanode.data.dir": "true", +"dfs.namenode.http-address": "true", +"dfs.datanode.failed.volumes.tolerated": "true", +"dfs.support.append": "true", +"dfs.namenode.name.dir": "true", +"dfs.webhdfs.enabled": "true" +} +}, +"ranger-tagsync-site": {}, +"zoo.cfg": {}, +"hadoop-policy": {}, +"hdfs-log4j": {}, +"ranger-hdfs-plugin-properties": {}, +"core-site": { +"final": { +"fs.defaultFS": "true" +} +}, +"hadoop-env": {}, +"zookeeper-log4j": {}, +"ssl-server": {}, +"ranger-site": {}, +"admin-log4j": {}, +"tagsync-log4j": {}, +"ranger-hdfs-security": {}, +"usersync-properties": {}, +"zookeeper-env": {}, +"cluster-env": {}, +"dbks-site": {}, +"kms-env": {}, +"kms-log4j": {}, +"kms-properties": {}, +"kms-site": {}, +"ranger-kms-security": {}, +"ranger-kms-site": {}, +"ranger-kms-policymgr-ssl": {}, +"ranger-kms-audit": {} +}, +"public_hostname": "c6401.ambari.apache.org", +"commandId": "9-1", +"hostname": "c6401.ambari.apache.org", +"kerberosCommandParams": [], +"serviceName": "RANGER_KMS", +"role": "RANGER_KMS_SERVER", +"forceRefreshConfigTagsBeforeExecution": [], +"requestId": 9, +"agentConfigParams": { +"agent": { +"parallel_execution": 0 +} +}, +"clusterName": "c1", +"commandType": "EXECUTION_COMMAND", +"taskId": 64, +"roleParams": {}, +"configurationTags": { +"ranger-hdfs-audit": { +"tag": "version1466427664617" +}, +"ssl-client": { +"tag": "version1" +}, +"ranger-admin-site": { +"tag": "version1466427664621" +}, +"ranger-hdfs-policymgr-ssl": { +"tag": "version1466427664617" +}, +"tagsync-application-properties": { +"tag": "version1466427664621" +}, +"ranger-env": { +"tag": "version1466427664621" +}, +"usersync-log4j": { +"tag": "version1466427664621" +}, +"admin-properties": { +"tag": "version1466427664621" +}, +"ranger-ugsync-site": { +"tag": "version1466427664621" +}, +"hdfs-site": { +"tag": "version1" +}, +"ranger-tagsync-site": { +"tag": "version1466427664621" +}, +"zoo.cfg": { +"tag": "version1" +}, +"hadoop-policy": { +"tag": "version1" +}, +"hdfs-log4j": { +"tag": "version1" +}, +"ranger-hdfs-plugin-properties": { +"tag": "version1466427664617" +}, +"core-site": { +"tag": "version1" +}, +"hadoop-env": { +"tag": "version1" +}, +"zookeeper-log4j": { +"tag": "version1" +}, +"ssl-server": { +"tag": "version1" +}, +"ranger-site": { +"tag": "version1466427664621" +}, +"admin-log4j": { +"tag": "version1466427664621" +}, +"tagsync-log4j": { +"tag": "version1466427664621" +}, +"ranger-hdfs-security": { +"tag": "version1466427664617" +}, +"usersync-properties": { +"tag": "version1466427664621" +}, +"zookeeper-env": { +
ambari git commit: AMBARI-17476. ServiceAccounts page nota available in ambari (alexantonenko)
Repository: ambari Updated Branches: refs/heads/trunk 2360560fb -> e63317d8a AMBARI-17476. ServiceAccounts page nota available in ambari (alexantonenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e63317d8 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e63317d8 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e63317d8 Branch: refs/heads/trunk Commit: e63317d8acc010b47d36fb1c5caeb30f3782f993 Parents: 2360560 Author: Alex AntonenkoAuthored: Wed Jun 29 15:49:55 2016 +0300 Committer: Alex Antonenko Committed: Wed Jun 29 17:39:47 2016 +0300 -- .../main/resources/ui/admin-web/app/scripts/services/Cluster.js | 2 +- ambari-web/app/routes/main.js| 4 ++-- ambari-web/app/templates/application.hbs | 4 ++-- ambari-web/app/views/main/admin.js | 2 +- ambari-web/app/views/main/menu.js| 4 ++-- ambari-web/test/app_test.js | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/e63317d8/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js -- diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js index 3160cd0..36baeb5 100644 --- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js +++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js @@ -76,7 +76,7 @@ angular.module('ambariAdminConsole') "AMBARI.MANAGE_USERS", "AMBARI.MANAGE_VIEWS", "AMBARI.RENAME_CLUSTER", - "AMBARI.SET_SERVICE_USERS_GROUPS" + "SERVICE.SET_SERVICE_USERS_GROUPS" ], orderedLevels: ['SERVICE', 'HOST', 'CLUSTER', 'AMBARI'], http://git-wip-us.apache.org/repos/asf/ambari/blob/e63317d8/ambari-web/app/routes/main.js -- diff --git a/ambari-web/app/routes/main.js b/ambari-web/app/routes/main.js index 4545f54..ae16e27 100644 --- a/ambari-web/app/routes/main.js +++ b/ambari-web/app/routes/main.js @@ -348,7 +348,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, { admin: Em.Route.extend({ route: '/admin', enter: function (router, transition) { - if (router.get('loggedIn') && !App.isAuthorized('CLUSTER.TOGGLE_KERBEROS, AMBARI.SET_SERVICE_USERS_GROUPS, CLUSTER.UPGRADE_DOWNGRADE_STACK, CLUSTER.VIEW_STACK_DETAILS') + if (router.get('loggedIn') && !App.isAuthorized('CLUSTER.TOGGLE_KERBEROS, SERVICE.SET_SERVICE_USERS_GROUPS, CLUSTER.UPGRADE_DOWNGRADE_STACK, CLUSTER.VIEW_STACK_DETAILS') && !(App.get('upgradeInProgress') || App.get('upgradeHolding'))) { Em.run.next(function () { router.transitionTo('main.dashboard.index'); @@ -527,7 +527,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, { adminServiceAccounts: Em.Route.extend({ route: '/serviceAccounts', enter: function (router, transition) { -if (router.get('loggedIn') && !App.isAuthorized('AMBARI.SET_SERVICE_USERS_GROUPS')) { +if (router.get('loggedIn') && !App.isAuthorized('SERVICE.SET_SERVICE_USERS_GROUPS')) { router.transitionTo('main.dashboard.index'); } }, http://git-wip-us.apache.org/repos/asf/ambari/blob/e63317d8/ambari-web/app/templates/application.hbs -- diff --git a/ambari-web/app/templates/application.hbs b/ambari-web/app/templates/application.hbs index ce78c89..47be986 100644 --- a/ambari-web/app/templates/application.hbs +++ b/ambari-web/app/templates/application.hbs @@ -65,7 +65,7 @@ {{t app.aboutAmbari}} {{#if App.router.clusterInstallCompleted}} {{#if isClusterDataLoaded}} -{{#isAuthorized "AMBARI.ADD_DELETE_CLUSTERS, AMBARI.ASSIGN_ROLES, AMBARI.EDIT_STACK_REPOS, AMBARI.MANAGE_GROUPS, AMBARI.MANAGE_STACK_VERSIONS, AMBARI.MANAGE_USERS, AMBARI.MANAGE_VIEWS, AMBARI.RENAME_CLUSTER, AMBARI.SET_SERVICE_USERS_GROUPS"}} +{{#isAuthorized "AMBARI.ADD_DELETE_CLUSTERS, AMBARI.ASSIGN_ROLES, AMBARI.EDIT_STACK_REPOS, AMBARI.MANAGE_GROUPS, AMBARI.MANAGE_STACK_VERSIONS, AMBARI.MANAGE_USERS, AMBARI.MANAGE_VIEWS, AMBARI.RENAME_CLUSTER, SERVICE.SET_SERVICE_USERS_GROUPS"}} {{t app.manageAmbari}} @@ -73,7 +73,7 @@ {{/if}} {{else}} {{#if
[6/6] ambari git commit: AMBARI-17333. ranger kms repo creation is failing after ranger kms is installed(Mugdha Varadkar via gautam)
AMBARI-17333. ranger kms repo creation is failing after ranger kms is installed(Mugdha Varadkar via gautam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2360560f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2360560f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2360560f Branch: refs/heads/trunk Commit: 2360560fb39d36888db6372dab678b1d501ebd18 Parents: c40ec58 Author: Gautam BoradAuthored: Tue Jun 28 11:32:20 2016 +0530 Committer: Gautam Borad Committed: Wed Jun 29 20:09:11 2016 +0530 -- .../libraries/functions/ranger_functions_v2.py | 8 +- .../common-services/RANGER/0.6.0/kerberos.json | 2 +- .../RANGER_KMS/0.5.0.2.3/package/scripts/kms.py | 8 +- .../0.5.0.2.3/package/scripts/params.py | 35 +- .../stacks/2.5/RANGER_KMS/test_kms_server.py| 712 +++ .../stacks/2.5/configs/ranger-kms-default.json | 803 + .../stacks/2.5/configs/ranger-kms-secured.json | 873 +++ 7 files changed, 2419 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/2360560f/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions_v2.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions_v2.py b/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions_v2.py index 05beadb..5c3a3bb 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions_v2.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions_v2.py @@ -331,7 +331,7 @@ class RangeradminV2: @safe_retry(times=5, sleep_time=8, backoff_factor=1.5, err_class=Fail, return_on_fail=None) - def get_repository_by_name_curl(self, component_user,component_user_keytab,component_user_principal,name, component, status): + def get_repository_by_name_curl(self, component_user, component_user_keytab, component_user_principal, name, component, status, is_keyadmin = False): """ :param component_user: service user for which call is to be made :param component_user_keytab: keytab of service user @@ -344,6 +344,8 @@ class RangeradminV2: """ try: search_repo_url = self.url_repos_pub + "?serviceName=" + name + "=" + component + "=" + status + if is_keyadmin: +search_repo_url = '{0}=keyadmin'.format(search_repo_url) response,error_message,time_in_millis = self.call_curl_request(component_user,component_user_keytab,component_user_principal,search_repo_url,False,request_method='GET') response_stripped = response[1:len(response) - 1] if response_stripped and len(response_stripped) > 0: @@ -360,7 +362,7 @@ class RangeradminV2: @safe_retry(times=5, sleep_time=8, backoff_factor=1.5, err_class=Fail, return_on_fail=None) - def create_repository_curl(self,component_user,component_user_keytab,component_user_principal,name, data,policy_user): + def create_repository_curl(self, component_user, component_user_keytab, component_user_principal, name, data, policy_user, is_keyadmin = False): """ :param component_user: service user for which call is to be made :param component_user_keytab: keytab of service user @@ -371,6 +373,8 @@ class RangeradminV2: """ try: search_repo_url = self.url_repos_pub + if is_keyadmin: +search_repo_url = '{0}?suser=keyadmin'.format(search_repo_url) header = 'Content-Type: application/json' method = 'POST' http://git-wip-us.apache.org/repos/asf/ambari/blob/2360560f/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json -- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json index c633230..91a0032 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json +++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json @@ -119,7 +119,7 @@ "atlas.jaas.KafkaClient.option.keyTab": "{{tagsync_keytab_path}}", "atlas.jaas.KafkaClient.option.principal": "{{tagsync_jaas_principal}}", "atlas.kafka.sasl.kerberos.service.name": "kafka", -"atlas.kafka.security.protocol": "SASL_PLAINTEXT" +"atlas.kafka.security.protocol": "PLAINTEXTSASL" } } ]
[3/6] ambari git commit: AMBARI-17333. ranger kms repo creation is failing after ranger kms is installed(Mugdha Varadkar via gautam)
AMBARI-17333. ranger kms repo creation is failing after ranger kms is installed(Mugdha Varadkar via gautam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f350cae8 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f350cae8 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f350cae8 Branch: refs/heads/branch-2.4 Commit: f350cae8249195cf256cdd7d988a007c2aa917bc Parents: 283256c Author: Gautam BoradAuthored: Tue Jun 28 11:32:20 2016 +0530 Committer: Gautam Borad Committed: Wed Jun 29 20:08:58 2016 +0530 -- .../libraries/functions/ranger_functions_v2.py | 8 +- .../common-services/RANGER/0.6.0/kerberos.json | 2 +- .../RANGER_KMS/0.5.0.2.3/package/scripts/kms.py | 8 +- .../0.5.0.2.3/package/scripts/params.py | 35 +- .../stacks/2.5/RANGER_KMS/test_kms_server.py| 712 +++ .../stacks/2.5/configs/ranger-kms-default.json | 803 + .../stacks/2.5/configs/ranger-kms-secured.json | 873 +++ 7 files changed, 2419 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/f350cae8/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions_v2.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions_v2.py b/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions_v2.py index 05beadb..5c3a3bb 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions_v2.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions_v2.py @@ -331,7 +331,7 @@ class RangeradminV2: @safe_retry(times=5, sleep_time=8, backoff_factor=1.5, err_class=Fail, return_on_fail=None) - def get_repository_by_name_curl(self, component_user,component_user_keytab,component_user_principal,name, component, status): + def get_repository_by_name_curl(self, component_user, component_user_keytab, component_user_principal, name, component, status, is_keyadmin = False): """ :param component_user: service user for which call is to be made :param component_user_keytab: keytab of service user @@ -344,6 +344,8 @@ class RangeradminV2: """ try: search_repo_url = self.url_repos_pub + "?serviceName=" + name + "=" + component + "=" + status + if is_keyadmin: +search_repo_url = '{0}=keyadmin'.format(search_repo_url) response,error_message,time_in_millis = self.call_curl_request(component_user,component_user_keytab,component_user_principal,search_repo_url,False,request_method='GET') response_stripped = response[1:len(response) - 1] if response_stripped and len(response_stripped) > 0: @@ -360,7 +362,7 @@ class RangeradminV2: @safe_retry(times=5, sleep_time=8, backoff_factor=1.5, err_class=Fail, return_on_fail=None) - def create_repository_curl(self,component_user,component_user_keytab,component_user_principal,name, data,policy_user): + def create_repository_curl(self, component_user, component_user_keytab, component_user_principal, name, data, policy_user, is_keyadmin = False): """ :param component_user: service user for which call is to be made :param component_user_keytab: keytab of service user @@ -371,6 +373,8 @@ class RangeradminV2: """ try: search_repo_url = self.url_repos_pub + if is_keyadmin: +search_repo_url = '{0}?suser=keyadmin'.format(search_repo_url) header = 'Content-Type: application/json' method = 'POST' http://git-wip-us.apache.org/repos/asf/ambari/blob/f350cae8/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json -- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json index c633230..91a0032 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json +++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json @@ -119,7 +119,7 @@ "atlas.jaas.KafkaClient.option.keyTab": "{{tagsync_keytab_path}}", "atlas.jaas.KafkaClient.option.principal": "{{tagsync_jaas_principal}}", "atlas.kafka.sasl.kerberos.service.name": "kafka", -"atlas.kafka.security.protocol": "SASL_PLAINTEXT" +"atlas.kafka.security.protocol": "PLAINTEXTSASL" } } ]
[2/6] ambari git commit: AMBARI-17333. ranger kms repo creation is failing after ranger kms is installed(Mugdha Varadkar via gautam)
http://git-wip-us.apache.org/repos/asf/ambari/blob/f350cae8/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json -- diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json new file mode 100644 index 000..d1365ac --- /dev/null +++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json @@ -0,0 +1,803 @@ +{ +"localComponents": [ +"SECONDARY_NAMENODE", +"HDFS_CLIENT", +"DATANODE", +"NAMENODE", +"ZOOKEEPER_SERVER", +"ZOOKEEPER_CLIENT", +"RANGER_USERSYNC", +"RANGER_ADMIN", +"RANGER_TAGSYNC", +"RANGER_KMS_SERVER" +], +"configuration_attributes": { +"ranger-hdfs-audit": {}, +"ssl-client": {}, +"ranger-admin-site": {}, +"ranger-hdfs-policymgr-ssl": {}, +"tagsync-application-properties": {}, +"ranger-env": {}, +"usersync-log4j": {}, +"admin-properties": {}, +"ranger-ugsync-site": {}, +"hdfs-site": { +"final": { +"dfs.datanode.data.dir": "true", +"dfs.namenode.http-address": "true", +"dfs.datanode.failed.volumes.tolerated": "true", +"dfs.support.append": "true", +"dfs.namenode.name.dir": "true", +"dfs.webhdfs.enabled": "true" +} +}, +"ranger-tagsync-site": {}, +"zoo.cfg": {}, +"hadoop-policy": {}, +"hdfs-log4j": {}, +"ranger-hdfs-plugin-properties": {}, +"core-site": { +"final": { +"fs.defaultFS": "true" +} +}, +"hadoop-env": {}, +"zookeeper-log4j": {}, +"ssl-server": {}, +"ranger-site": {}, +"admin-log4j": {}, +"tagsync-log4j": {}, +"ranger-hdfs-security": {}, +"usersync-properties": {}, +"zookeeper-env": {}, +"cluster-env": {}, +"dbks-site": {}, +"kms-env": {}, +"kms-log4j": {}, +"kms-properties": {}, +"kms-site": {}, +"ranger-kms-security": {}, +"ranger-kms-site": {}, +"ranger-kms-policymgr-ssl": {}, +"ranger-kms-audit": {} +}, +"public_hostname": "c6401.ambari.apache.org", +"commandId": "9-1", +"hostname": "c6401.ambari.apache.org", +"kerberosCommandParams": [], +"serviceName": "RANGER_KMS", +"role": "RANGER_KMS_SERVER", +"forceRefreshConfigTagsBeforeExecution": [], +"requestId": 9, +"agentConfigParams": { +"agent": { +"parallel_execution": 0 +} +}, +"clusterName": "c1", +"commandType": "EXECUTION_COMMAND", +"taskId": 64, +"roleParams": {}, +"configurationTags": { +"ranger-hdfs-audit": { +"tag": "version1466427664617" +}, +"ssl-client": { +"tag": "version1" +}, +"ranger-admin-site": { +"tag": "version1466427664621" +}, +"ranger-hdfs-policymgr-ssl": { +"tag": "version1466427664617" +}, +"tagsync-application-properties": { +"tag": "version1466427664621" +}, +"ranger-env": { +"tag": "version1466427664621" +}, +"usersync-log4j": { +"tag": "version1466427664621" +}, +"admin-properties": { +"tag": "version1466427664621" +}, +"ranger-ugsync-site": { +"tag": "version1466427664621" +}, +"hdfs-site": { +"tag": "version1" +}, +"ranger-tagsync-site": { +"tag": "version1466427664621" +}, +"zoo.cfg": { +"tag": "version1" +}, +"hadoop-policy": { +"tag": "version1" +}, +"hdfs-log4j": { +"tag": "version1" +}, +"ranger-hdfs-plugin-properties": { +"tag": "version1466427664617" +}, +"core-site": { +"tag": "version1" +}, +"hadoop-env": { +"tag": "version1" +}, +"zookeeper-log4j": { +"tag": "version1" +}, +"ssl-server": { +"tag": "version1" +}, +"ranger-site": { +"tag": "version1466427664621" +}, +"admin-log4j": { +"tag": "version1466427664621" +}, +"tagsync-log4j": { +"tag": "version1466427664621" +}, +"ranger-hdfs-security": { +"tag": "version1466427664617" +}, +"usersync-properties": { +"tag": "version1466427664621" +}, +"zookeeper-env": { +
[4/6] ambari git commit: AMBARI-17333. ranger kms repo creation is failing after ranger kms is installed(Mugdha Varadkar via gautam)
http://git-wip-us.apache.org/repos/asf/ambari/blob/2360560f/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json -- diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json new file mode 100644 index 000..7a68a07 --- /dev/null +++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json @@ -0,0 +1,873 @@ +{ +"localComponents": [ +"SECONDARY_NAMENODE", +"HDFS_CLIENT", +"DATANODE", +"NAMENODE", +"RANGER_ADMIN", +"RANGER_TAGSYNC", +"RANGER_USERSYNC", +"ZOOKEEPER_SERVER", +"ZOOKEEPER_CLIENT", +"KERBEROS_CLIENT", +"RANGER_KMS_SERVER" +], +"configuration_attributes": { +"ranger-kms-site": {}, +"ranger-hdfs-audit": {}, +"ssl-client": {}, +"ranger-admin-site": {}, +"kms-log4j": {}, +"ranger-hdfs-policymgr-ssl": {}, +"tagsync-application-properties": {}, +"ranger-env": {}, +"ranger-ugsync-site": {}, +"ranger-hdfs-plugin-properties": {}, +"ranger-kms-security": {}, +"kerberos-env": {}, +"kms-properties": {}, +"admin-properties": {}, +"ranger-kms-policymgr-ssl": {}, +"hdfs-site": { +"final": { +"dfs.datanode.data.dir": "true", +"dfs.namenode.http-address": "true", +"dfs.datanode.failed.volumes.tolerated": "true", +"dfs.support.append": "true", +"dfs.namenode.name.dir": "true", +"dfs.webhdfs.enabled": "true" +} +}, +"ranger-tagsync-site": {}, +"tagsync-log4j": {}, +"ranger-kms-audit": {}, +"hadoop-policy": {}, +"hdfs-log4j": {}, +"usersync-log4j": {}, +"krb5-conf": {}, +"kms-site": {}, +"core-site": { +"final": { +"fs.defaultFS": "true" +} +}, +"hadoop-env": {}, +"zookeeper-log4j": {}, +"ssl-server": {}, +"ranger-site": {}, +"zookeeper-env": {}, +"admin-log4j": {}, +"zoo.cfg": {}, +"ranger-hdfs-security": {}, +"usersync-properties": {}, +"kms-env": {}, +"dbks-site": {}, +"cluster-env": {} +}, +"public_hostname": "c6401.ambari.apache.org", +"commandId": "43-0", +"hostname": "c6401.ambari.apache.org", +"kerberosCommandParams": [], +"serviceName": "RANGER_KMS", +"role": "RANGER_KMS_SERVER", +"forceRefreshConfigTagsBeforeExecution": [], +"requestId": 43, +"agentConfigParams": { +"agent": { +"parallel_execution": 0 +} +}, +"clusterName": "c1", +"commandType": "EXECUTION_COMMAND", +"taskId": 200, +"roleParams": {}, +"configurationTags": { +"ranger-kms-site": { +"tag": "version1467026737262" +}, +"ranger-hdfs-audit": { +"tag": "version1466705299922" +}, +"ssl-client": { +"tag": "version1" +}, +"ranger-admin-site": { +"tag": "version1467016680635" +}, +"kms-log4j": { +"tag": "version1467026737262" +}, +"ranger-hdfs-policymgr-ssl": { +"tag": "version1466705299922" +}, +"tagsync-application-properties": { +"tag": "version1467016680511" +}, +"ranger-env": { +"tag": "version1466705299949" +}, +"ranger-ugsync-site": { +"tag": "version1467016680537" +}, +"ranger-hdfs-plugin-properties": { +"tag": "version1466705299922" +}, +"ranger-kms-security": { +"tag": "version1467026737262" +}, +"kerberos-env": { +"tag": "version1467016537243" +}, +"admin-log4j": { +"tag": "version1466705299949" +}, +"admin-properties": { +"tag": "version1466705299949" +}, +"ranger-kms-policymgr-ssl": { +"tag": "version1467026737262" +}, +"hdfs-site": { +"tag": "version1467016680401" +}, +"ranger-tagsync-site": { +"tag": "version1467016680586" +}, +"zoo.cfg": { +"tag": "version1" +}, +"ranger-kms-audit": { +"tag": "version1467026737262" +}, +"hadoop-policy": { +"tag": "version1" +}, +"hdfs-log4j": { +"tag": "version1" +}, +"usersync-log4j": { +"tag": "version1466705299949" +}, +"krb5-conf": { +
[06/10] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java deleted file mode 100644 index 9f9e053..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java +++ /dev/null @@ -1,281 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery; - -import org.apache.ambari.view.ViewContext; -import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; -import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel; -import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound; -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.MysqlQuerySetAmbariDB; -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.OracleQuerySetAmbariDB; -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.PostgressQuerySetAmbariDB; -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB; -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.*; -import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse; -import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation; -import org.apache.log4j.Logger; - -import java.beans.PropertyVetoException; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.Connection; -import java.sql.SQLException; -import java.text.ParseException; -import java.util.ArrayList; - -public class HiveSavedQueryMigrationUtility { - - - - protected MigrationResourceManager resourceManager = null; - - public synchronized PersonalCRUDResourceManager getResourceManager(ViewContext view) { -if (resourceManager == null) { - resourceManager = new MigrationResourceManager(view); -} -return resourceManager; - } - - public MigrationModel hiveSavedQueryMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound { - -long startTime = System.currentTimeMillis(); - -final Logger logger = Logger.getLogger(HiveSavedQueryMigrationUtility.class); - -Connection connectionAmbaridb = null; -Connection connectionHuedb = null; - -int i = 0; - -logger.info("-"); -logger.info("hive saved query Migration started"); -logger.info("-"); -logger.info("start date: " + startDate); -logger.info("enddate date: " + endDate); -logger.info("instance is: " + instance); -logger.info("hue username is : " + username); - -HiveSavedQueryMigrationImplementation hivesavedqueryimpl = new HiveSavedQueryMigrationImplementation();/* creating Implementation object */ - -QuerySet huedatabase=null; - -if(view.getProperties().get("huedrivername").contains("mysql")) -{ - huedatabase=new
[07/10] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java deleted file mode 100644 index bcbe4de..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java +++ /dev/null @@ -1,106 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.huetoambarimigration.migration.configuration; - -import org.apache.ambari.view.ViewContext; -import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.InstanceModel; - -import java.beans.PropertyVetoException; -import java.io.IOException; -import java.sql.*; -import java.util.ArrayList; -import java.util.List; - -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail.*; - - -public class HiveInstanceDetailsUtility { - - public List getInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException { - -List instancelist = new ArrayList<>(); -Connection conn = null; -conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection(); -conn.setAutoCommit(false); -PreparedStatement prSt; - -QuerySetAmbariDB ambaridatabase = null; - -if (view.getProperties().get("ambaridrivername").contains("mysql")) { - ambaridatabase = new MysqlQuerySetAmbariDB(); -} else if (view.getProperties().get("ambaridrivername").contains("postgresql")) { - ambaridatabase = new PostgressQuerySetAmbariDB(); -} else if (view.getProperties().get("ambaridrivername").contains("oracle")) { - ambaridatabase = new OracleQuerySetAmbariDB(); -} - -ResultSet rs1 = null; -prSt = ambaridatabase.getHiveInstanceDeatil(conn); -rs1 = prSt.executeQuery(); -int i = 0; - -while (rs1.next()) { - InstanceModel I = new InstanceModel(); - I.setInstanceName(rs1.getString(1)); - I.setId(i); - instancelist.add(I); - i++; -} -return instancelist; - - } - - public List getAllInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException { - -List instancelist = new ArrayList<>(); -Connection conn = null; -Statement stmt = null; -conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection(); -conn.setAutoCommit(false); -PreparedStatement prSt; - -QuerySetAmbariDB ambaridatabase = null; - -if (view.getProperties().get("ambaridrivername").contains("mysql")) { - ambaridatabase = new MysqlQuerySetAmbariDB(); -} else if (view.getProperties().get("ambaridrivername").contains("postgresql")) { - ambaridatabase = new PostgressQuerySetAmbariDB(); -} else if (view.getProperties().get("ambaridrivername").contains("oracle")) { - ambaridatabase = new OracleQuerySetAmbariDB(); -} - -ResultSet rs1 = null; -int i = 0; -prSt = ambaridatabase.getAllInstanceDeatil(conn); -rs1 = prSt.executeQuery(); - -while (rs1.next()) { - InstanceModel I = new InstanceModel(); - I.setInstanceName(rs1.getString(1)); - I.setId(i); - instancelist.add(I); - i++; -} -rs1.close(); -return instancelist; - - } - - -}
[02/10] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc deleted file mode 100644 index 08096ef..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc +++ /dev/null @@ -1,32 +0,0 @@ -{ - "predef": [ -"document", -"window", -"-Promise" - ], - "browser": true, - "boss": true, - "curly": true, - "debug": false, - "devel": true, - "eqeqeq": true, - "evil": true, - "forin": false, - "immed": false, - "laxbreak": false, - "newcap": true, - "noarg": true, - "noempty": false, - "nonew": false, - "nomen": false, - "onevar": false, - "plusplus": false, - "regexp": false, - "undef": true, - "sub": true, - "strict": false, - "white": false, - "eqnull": true, - "esnext": true, - "unused": true -} http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml deleted file mode 100644 index 64533be..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml +++ /dev/null @@ -1,22 +0,0 @@ -language: node_js -node_js: - - "4" - -sudo: false - -cache: - directories: -- node_modules - -before_install: - - npm config set spin false - - npm install -g bower - - npm install phantomjs-prebuilt - -install: - - npm install - - bower install - -script: - - npm test http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig deleted file mode 100644 index e7834e3..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig +++ /dev/null @@ -1,3 +0,0 @@ -{ - "ignore_dirs": ["tmp", "dist"] -} http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md deleted file mode 100644 index 1d1a14f..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md +++ /dev/null @@ -1,67 +0,0 @@ - -# Hueambarimigration-view - -This README outlines the details of collaborating on this Ember application. -A short introduction of this app could easily go here. - -## Prerequisites - -You will need the following things properly installed on your computer. - -* [Git](http://git-scm.com/) -* [Node.js](http://nodejs.org/) (with NPM) -* [Bower](http://bower.io/) -* [Ember CLI](http://ember-cli.com/) -* [PhantomJS](http://phantomjs.org/) - -## Installation - -* `git clone ` this repository -* change into the new directory -* `npm install` -* `bower install` - -## Running / Development - -* `ember server` -* Visit your app at [http://localhost:4200](http://localhost:4200). - -### Code Generators - -Make use of the many generators for code, try `ember help generate` for more details - -### Running Tests - -* `ember test` -* `ember test --server` - -### Building - -* `ember build` (development) -* `ember build --environment production` (production) - -### Deploying - -Specify what it takes to deploy your app. - -## Further Reading / Useful Links - -* [ember.js](http://emberjs.com/) -* [ember-cli](http://ember-cli.com/) -* Development Browser Extensions - * [ember inspector for chrome](https://chrome.google.com/webstore/detail/ember-inspector/bmdblncegkenkacieihfhpjfppoconhi) - * [ember inspector for firefox](https://addons.mozilla.org/en-US/firefox/addon/ember-inspector/) - http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/adapters/application.js -- diff --git
[09/10] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java deleted file mode 100644 index f3349c6..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset; - -/** - * - * Overriding methods for Oracle specific queries - */ - -public class OracleQuerySetAmbariDB extends QuerySetAmbariDB { - - @Override - protected String getSqlMaxDSidFromTableId(int id) { -return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ""; - } - @Override - protected String getTableIdSqlFromInstanceName() { -return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?"; - } - @Override - protected String getSqlInsertHiveHistory(int id) { -return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet')"; - } - @Override - protected String getRevSql(int id,String maxcount){ -return "delete from ds_jobimpl_" + id + " where ds_id='" + maxcount + "'"; - } - -} http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java deleted file mode 100644 index 5f4356b..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset; - - -public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB { -} http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
[04/10] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java deleted file mode 100644 index d7f2868..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.huetoambarimigration.resources.scripts.models; - - -public class InstanceModel { - - String instanceName; - int id; - - public String getInstanceName() { - return instanceName; - } - - public void setInstanceName(String instanceName) { - this.instanceName = instanceName; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java deleted file mode 100644 index 1a247bb..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.huetoambarimigration.resources.scripts.models; - -/** - * Created by temp on 5/19/16. - */ -public class JobReturnIdModel { - - int id; - String idforJob; - - public String getIdforJob() { -return idforJob; - } - - public void setIdforJob(String idforJob) { -this.idforJob = idforJob; - } - - public int getId() { -return id; - } - - public void setId(int id) { -this.id = id; - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java deleted file mode 100644 index f765e15..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java +++ /dev/null @@ -1,130 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more
[03/10] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java new file mode 100644 index 000..614c171 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java @@ -0,0 +1,563 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.huetoambarimigration.service.pig; + +import java.nio.charset.Charset; +import java.security.PrivilegedExceptionAction; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.io.BufferedInputStream; +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.Scanner; +import java.io.*; +import java.net.URISyntaxException; +import java.net.URL; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.log4j.Logger; +import org.jdom.Attribute; +import org.jdom.Document; +import org.jdom.Element; +import org.jdom.JDOMException; +import org.jdom.input.SAXBuilder; +import org.jdom.output.Format; +import org.jdom.output.XMLOutputter; +import org.json.JSONArray; +import org.json.JSONObject; + +import org.apache.ambari.view.huetoambarimigration.model.*; +import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader; + +public class PigJobImpl { + + static final Logger logger = Logger.getLogger(PigJobImpl.class); + + private static String readAll(Reader rd) throws IOException { +StringBuilder sb = new StringBuilder(); +int cp; +while ((cp = rd.read()) != -1) { + sb.append((char) cp); +} +return sb.toString(); + } + + public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException { +Date dNow = new Date(); +SimpleDateFormat ft = new SimpleDateFormat("-MM-dd hh:mm:ss"); +String currentDate = ft.format(dNow); +XMLOutputter xmlOutput = new XMLOutputter(); +xmlOutput.setFormat(Format.getPrettyFormat()); +File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml"); +if (xmlfile.exists()) { + String iteration = Integer.toString(i + 1); + SAXBuilder builder = new SAXBuilder(); + Document doc; + try { +doc = (Document) builder.build(xmlfile); +Element rootNode = doc.getRootElement(); +Element record = new Element("RevertRecord"); +record.setAttribute(new Attribute("id", iteration)); +record.addContent(new Element("datetime").setText(currentDate.toString())); +record.addContent(new Element("dirname").setText(dirname)); +record.addContent(new Element("instance").setText(instance)); +record.addContent(new Element("query").setText(content)); +rootNode.addContent(record); +xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml")); + } catch (JDOMException e) { + +logger.error("Jdom Exception: " , e); + } + + +} else { + // create + try { +String iteration = Integer.toString(i + 1); +Element revertrecord = new
[10/10] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)" This reverts commit 6f4a9c288ad52f913ae7d2a9eef7fb7bc1bfa568. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c40ec582 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c40ec582 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c40ec582 Branch: refs/heads/trunk Commit: c40ec58203cd01d823734343c209322a6040cb60 Parents: 5352dac Author: Alex AntonenkoAuthored: Wed Jun 29 17:37:44 2016 +0300 Committer: Alex Antonenko Committed: Wed Jun 29 17:37:44 2016 +0300 -- contrib/views/hueambarimigration/pom.xml| 144 ++-- .../configurationcheck/ConfigurationCheck.java | 182 + .../configurationcheck/ProgressBarStatus.java | 54 ++ .../controller/hive/HiveHistoryMigration.java | 222 ++ .../hive/HiveSavedQueryMigration.java | 231 ++ .../controller/pig/PigJobMigration.java | 201 + .../controller/pig/PigScriptMigration.java | 208 + .../controller/revertchange/RevertChange.java | 217 ++ .../datasource/DataSourceAmbariDatabase.java| 2 + .../datasource/DataSourceHueDatabase.java | 2 + .../historyqueryset/MysqlQuerySetAmbariDB.java | 46 -- .../historyqueryset/OracleQuerySetAmbariDB.java | 44 -- .../PostgressQuerySetAmbariDB.java | 22 - .../hive/historyqueryset/QuerySetAmbariDB.java | 79 -- .../instancedetail/MysqlQuerySetAmbariDB.java | 23 - .../instancedetail/OracleQuerySetAmbariDB.java | 31 - .../PostgressQuerySetAmbariDB.java | 22 - .../hive/instancedetail/QuerySetAmbariDB.java | 48 -- .../savedqueryset/MysqlQuerySetAmbariDB.java| 65 -- .../savedqueryset/OracleQuerySetAmbariDB.java | 58 -- .../PostgressQuerySetAmbariDB.java | 22 - .../hive/savedqueryset/QuerySetAmbariDB.java| 131 .../instancedetail/MysqlQuerySetAmbariDB.java | 23 - .../instancedetail/OracleQuerySetAmbariDB.java | 30 - .../PostgressQuerySetAmbariDB.java | 22 - .../pig/instancedetail/QuerySetAmbariDB.java| 39 - .../pig/jobqueryset/MysqlQuerySetAmbariDB.java | 43 - .../pig/jobqueryset/OracleQuerySetAmbariDB.java | 41 - .../jobqueryset/PostgressQuerySetAmbariDB.java | 22 - .../pig/jobqueryset/QuerySetAmbariDB.java | 80 -- .../MysqlQuerySetAmbariDB.java | 43 - .../OracleQuerySetAmbariDB.java | 41 - .../PostgressQuerySetAmbariDB.java | 22 - .../savedscriptqueryset/QuerySetAmbariDB.java | 70 -- .../hive/historyqueryset/MysqlQuerySet.java | 23 - .../hive/historyqueryset/OracleQuerySet.java| 61 -- .../hive/historyqueryset/PostgressQuerySet.java | 22 - .../hive/historyqueryset/QuerySet.java | 130 .../hive/historyqueryset/SqliteQuerySet.java| 22 - .../hive/savedqueryset/MysqlQuerySet.java | 23 - .../hive/savedqueryset/OracleQuerySet.java | 65 -- .../hive/savedqueryset/PostgressQuerySet.java | 22 - .../hive/savedqueryset/QuerySet.java| 134 .../hive/savedqueryset/SqliteQuerySet.java | 22 - .../pig/jobqueryset/MysqlQuerySet.java | 22 - .../pig/jobqueryset/OracleQuerySet.java | 65 -- .../pig/jobqueryset/PostgressQuerySet.java | 22 - .../huequeryset/pig/jobqueryset/QuerySet.java | 132 .../pig/jobqueryset/SqliteQuerySet.java | 22 - .../pig/savedscriptqueryset/MysqlQuerySet.java | 22 - .../pig/savedscriptqueryset/OracleQuerySet.java | 60 -- .../savedscriptqueryset/PostgressQuerySet.java | 67 -- .../pig/savedscriptqueryset/QuerySet.java | 135 .../pig/savedscriptqueryset/SqliteQuerySet.java | 24 - .../huequeryset/userdetails/MysqlQuerySet.java | 24 - .../huequeryset/userdetails/OracleQuerySet.java | 28 - .../userdetails/PostgressQuerySet.java | 22 - .../huequeryset/userdetails/QuerySet.java | 42 - .../huequeryset/userdetails/SqliteQuerySet.java | 22 - .../migration/CreateJobId.java | 85 -- .../migration/CreateJobIdRevertChange.java | 84 -- .../migration/InitiateJobMigration.java | 102 --- .../InitiateJobMigrationforRevertchange.java| 85 -- .../configuration/AllInstanceDetailsAmbari.java | 59 -- .../configuration/AmbariDatabaseCheck.java | 62 -- .../configuration/AmbariWebHdfsCheck.java | 60 -- .../configuration/CheckProgresStatus.java | 70 -- .../ConfigurationCheckImplementation.java | 134 .../HiveInstanceDetailsAmbari.java | 53 -- .../HiveInstanceDetailsUtility.java | 106 --- .../configuration/HueDatabaseCheck.java | 60 -- .../configuration/HueHttpUrlCheck.java | 60 -- .../configuration/HueWebHdfsCheck.java
[05/10] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java deleted file mode 100644 index 7e1bbf4..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java +++ /dev/null @@ -1,225 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.huetoambarimigration.migration.revertchange; - -import java.beans.PropertyVetoException; -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.net.URISyntaxException; -import java.security.PrivilegedExceptionAction; -import java.sql.Connection; -import java.sql.SQLException; -import java.sql.Statement; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.List; - -import org.apache.ambari.view.ViewContext; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel; -import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound; -import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse; -import org.apache.log4j.Logger; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.input.SAXBuilder; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.UserGroupInformation; - -import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; -import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation; - - -public class RevertChangeUtility { - - - - protected MigrationResourceManager resourceManager = null; - - public synchronized PersonalCRUDResourceManager getResourceManager(ViewContext view) { -if (resourceManager == null) { - resourceManager = new MigrationResourceManager(view); -} -return resourceManager; - } - - public boolean stringtoDatecompare(String datefromservlet, - String datefromfile) throws ParseException { - -SimpleDateFormat formatter = new SimpleDateFormat("-MM-dd"); -Date date1 = formatter.parse(datefromservlet); -Date date2 = formatter.parse(datefromfile); -if (date1.compareTo(date2) < 0) { - return true; -} else { - return false; -} - - } - - public void removedir(final String dir, final String namenodeuri) -throws IOException, URISyntaxException { - -try { - UserGroupInformation ugi = UserGroupInformation -.createRemoteUser("hdfs"); - - ugi.doAs(new PrivilegedExceptionAction() { - -public Void run() throws Exception { - - Configuration conf = new Configuration(); - conf.set("fs.hdfs.impl", -org.apache.hadoop.hdfs.DistributedFileSystem.class - .getName()); - conf.set("fs.file.impl", -org.apache.hadoop.fs.LocalFileSystem.class - .getName()); - conf.set("fs.defaultFS", namenodeuri); - conf.set("hadoop.job.ugi", "hdfs"); - - FileSystem fs = FileSystem.get(conf); - Path src = new Path(dir); - fs.delete(src, true); - return null; -} - }); -} catch (Exception e) { - e.printStackTrace(); -} - } - - public MigrationModel revertChangeUtility(String instance, String revertDate,String jobid,ViewContext
[01/10] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
Repository: ambari Updated Branches: refs/heads/trunk 5352dac06 -> c40ec5820 http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs deleted file mode 100644 index 60fccf3..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs +++ /dev/null @@ -1,42 +0,0 @@ -{{! - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -}} - - - - Hive - - {{#link-to 'homePage.hive-history'}}HiveHistory Query{{/link-to}} - {{#link-to 'homePage.hive-saved-query'}}HiveSaved Query{{/link-to}} - - - Pig - - {{#link-to 'homePage.pig-script'}}PigSaved script{{/link-to}} - {{#link-to 'homePage.pig-job'}}PigJob{{/link-to}} - - - {{#link-to 'homePage.revert-change'}}RevertChange{{/link-to}} - - - - - - {{outlet}} - - - http://git-wip-us.apache.org/repos/asf/ambari/blob/c40ec582/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs deleted file mode 100644 index b43e49f..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs +++ /dev/null @@ -1,125 +0,0 @@ -{{! -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at -http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -}} - - -History Query Migration - - - - - -User Name - * - - - -{{ember-selectize content=model.usersdetail optionValuePath="content.username" optionLabelPath="content.username" value=usernamehue placeholder="Select an userName" }} - - - - - -Instance Name - * - - - -{{ember-selectize content=model.hiveinstancedetail optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename placeholder="Select an Instance name" }} - - - - - -Start Date - - -{{date-picker size="35" date=startdate valueFormat='-MM-DD' name="startdate" id="startdate" value=startdate}} - - - - - -End Date - - -{{date-picker size="35" date=enddate valueFormat='-MM-DD' name="enddate" id="enddate" value=enddate}} - - - - - -Submit - - -{{#if jobstatus}} - -Job has been Submitted. - - -{{/if}} - - - - - -{{#if jobstatus}} - - - - - -{{/if}} - - - - -{{#if completionStatus}} - Migration Report - - - - -
[06/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java deleted file mode 100644 index 9f9e053..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java +++ /dev/null @@ -1,281 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery; - -import org.apache.ambari.view.ViewContext; -import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; -import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel; -import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound; -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.MysqlQuerySetAmbariDB; -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.OracleQuerySetAmbariDB; -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.PostgressQuerySetAmbariDB; -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB; -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.*; -import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse; -import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation; -import org.apache.log4j.Logger; - -import java.beans.PropertyVetoException; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.Connection; -import java.sql.SQLException; -import java.text.ParseException; -import java.util.ArrayList; - -public class HiveSavedQueryMigrationUtility { - - - - protected MigrationResourceManager resourceManager = null; - - public synchronized PersonalCRUDResourceManager getResourceManager(ViewContext view) { -if (resourceManager == null) { - resourceManager = new MigrationResourceManager(view); -} -return resourceManager; - } - - public MigrationModel hiveSavedQueryMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound { - -long startTime = System.currentTimeMillis(); - -final Logger logger = Logger.getLogger(HiveSavedQueryMigrationUtility.class); - -Connection connectionAmbaridb = null; -Connection connectionHuedb = null; - -int i = 0; - -logger.info("-"); -logger.info("hive saved query Migration started"); -logger.info("-"); -logger.info("start date: " + startDate); -logger.info("enddate date: " + endDate); -logger.info("instance is: " + instance); -logger.info("hue username is : " + username); - -HiveSavedQueryMigrationImplementation hivesavedqueryimpl = new HiveSavedQueryMigrationImplementation();/* creating Implementation object */ - -QuerySet huedatabase=null; - -if(view.getProperties().get("huedrivername").contains("mysql")) -{ - huedatabase=new
[09/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java deleted file mode 100644 index f3349c6..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset; - -/** - * - * Overriding methods for Oracle specific queries - */ - -public class OracleQuerySetAmbariDB extends QuerySetAmbariDB { - - @Override - protected String getSqlMaxDSidFromTableId(int id) { -return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ""; - } - @Override - protected String getTableIdSqlFromInstanceName() { -return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?"; - } - @Override - protected String getSqlInsertHiveHistory(int id) { -return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet')"; - } - @Override - protected String getRevSql(int id,String maxcount){ -return "delete from ds_jobimpl_" + id + " where ds_id='" + maxcount + "'"; - } - -} http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java deleted file mode 100644 index 5f4356b..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset; - - -public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB { -} http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
[03/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java new file mode 100644 index 000..614c171 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java @@ -0,0 +1,563 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.huetoambarimigration.service.pig; + +import java.nio.charset.Charset; +import java.security.PrivilegedExceptionAction; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.io.BufferedInputStream; +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.Scanner; +import java.io.*; +import java.net.URISyntaxException; +import java.net.URL; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.log4j.Logger; +import org.jdom.Attribute; +import org.jdom.Document; +import org.jdom.Element; +import org.jdom.JDOMException; +import org.jdom.input.SAXBuilder; +import org.jdom.output.Format; +import org.jdom.output.XMLOutputter; +import org.json.JSONArray; +import org.json.JSONObject; + +import org.apache.ambari.view.huetoambarimigration.model.*; +import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader; + +public class PigJobImpl { + + static final Logger logger = Logger.getLogger(PigJobImpl.class); + + private static String readAll(Reader rd) throws IOException { +StringBuilder sb = new StringBuilder(); +int cp; +while ((cp = rd.read()) != -1) { + sb.append((char) cp); +} +return sb.toString(); + } + + public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException { +Date dNow = new Date(); +SimpleDateFormat ft = new SimpleDateFormat("-MM-dd hh:mm:ss"); +String currentDate = ft.format(dNow); +XMLOutputter xmlOutput = new XMLOutputter(); +xmlOutput.setFormat(Format.getPrettyFormat()); +File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml"); +if (xmlfile.exists()) { + String iteration = Integer.toString(i + 1); + SAXBuilder builder = new SAXBuilder(); + Document doc; + try { +doc = (Document) builder.build(xmlfile); +Element rootNode = doc.getRootElement(); +Element record = new Element("RevertRecord"); +record.setAttribute(new Attribute("id", iteration)); +record.addContent(new Element("datetime").setText(currentDate.toString())); +record.addContent(new Element("dirname").setText(dirname)); +record.addContent(new Element("instance").setText(instance)); +record.addContent(new Element("query").setText(content)); +rootNode.addContent(record); +xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml")); + } catch (JDOMException e) { + +logger.error("Jdom Exception: " , e); + } + + +} else { + // create + try { +String iteration = Integer.toString(i + 1); +Element revertrecord = new
[11/11] ambari git commit: AMBARI-17476. ServiceAccounts page nota available in ambari (alexantonenko)
AMBARI-17476. ServiceAccounts page nota available in ambari (alexantonenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6b6ce800 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6b6ce800 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6b6ce800 Branch: refs/heads/branch-2.4 Commit: 6b6ce800e48a6ac0ae8c10edc36d6076e64497c9 Parents: f403a36 Author: Alex AntonenkoAuthored: Wed Jun 29 15:49:55 2016 +0300 Committer: Alex Antonenko Committed: Wed Jun 29 17:36:51 2016 +0300 -- .../main/resources/ui/admin-web/app/scripts/services/Cluster.js | 2 +- ambari-web/app/routes/main.js| 4 ++-- ambari-web/app/templates/application.hbs | 4 ++-- ambari-web/app/views/main/admin.js | 2 +- ambari-web/app/views/main/menu.js| 4 ++-- ambari-web/test/app_test.js | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/6b6ce800/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js -- diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js index 3160cd0..36baeb5 100644 --- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js +++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js @@ -76,7 +76,7 @@ angular.module('ambariAdminConsole') "AMBARI.MANAGE_USERS", "AMBARI.MANAGE_VIEWS", "AMBARI.RENAME_CLUSTER", - "AMBARI.SET_SERVICE_USERS_GROUPS" + "SERVICE.SET_SERVICE_USERS_GROUPS" ], orderedLevels: ['SERVICE', 'HOST', 'CLUSTER', 'AMBARI'], http://git-wip-us.apache.org/repos/asf/ambari/blob/6b6ce800/ambari-web/app/routes/main.js -- diff --git a/ambari-web/app/routes/main.js b/ambari-web/app/routes/main.js index 4545f54..ae16e27 100644 --- a/ambari-web/app/routes/main.js +++ b/ambari-web/app/routes/main.js @@ -348,7 +348,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, { admin: Em.Route.extend({ route: '/admin', enter: function (router, transition) { - if (router.get('loggedIn') && !App.isAuthorized('CLUSTER.TOGGLE_KERBEROS, AMBARI.SET_SERVICE_USERS_GROUPS, CLUSTER.UPGRADE_DOWNGRADE_STACK, CLUSTER.VIEW_STACK_DETAILS') + if (router.get('loggedIn') && !App.isAuthorized('CLUSTER.TOGGLE_KERBEROS, SERVICE.SET_SERVICE_USERS_GROUPS, CLUSTER.UPGRADE_DOWNGRADE_STACK, CLUSTER.VIEW_STACK_DETAILS') && !(App.get('upgradeInProgress') || App.get('upgradeHolding'))) { Em.run.next(function () { router.transitionTo('main.dashboard.index'); @@ -527,7 +527,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, { adminServiceAccounts: Em.Route.extend({ route: '/serviceAccounts', enter: function (router, transition) { -if (router.get('loggedIn') && !App.isAuthorized('AMBARI.SET_SERVICE_USERS_GROUPS')) { +if (router.get('loggedIn') && !App.isAuthorized('SERVICE.SET_SERVICE_USERS_GROUPS')) { router.transitionTo('main.dashboard.index'); } }, http://git-wip-us.apache.org/repos/asf/ambari/blob/6b6ce800/ambari-web/app/templates/application.hbs -- diff --git a/ambari-web/app/templates/application.hbs b/ambari-web/app/templates/application.hbs index ce78c89..47be986 100644 --- a/ambari-web/app/templates/application.hbs +++ b/ambari-web/app/templates/application.hbs @@ -65,7 +65,7 @@ {{t app.aboutAmbari}} {{#if App.router.clusterInstallCompleted}} {{#if isClusterDataLoaded}} -{{#isAuthorized "AMBARI.ADD_DELETE_CLUSTERS, AMBARI.ASSIGN_ROLES, AMBARI.EDIT_STACK_REPOS, AMBARI.MANAGE_GROUPS, AMBARI.MANAGE_STACK_VERSIONS, AMBARI.MANAGE_USERS, AMBARI.MANAGE_VIEWS, AMBARI.RENAME_CLUSTER, AMBARI.SET_SERVICE_USERS_GROUPS"}} +{{#isAuthorized "AMBARI.ADD_DELETE_CLUSTERS, AMBARI.ASSIGN_ROLES, AMBARI.EDIT_STACK_REPOS, AMBARI.MANAGE_GROUPS, AMBARI.MANAGE_STACK_VERSIONS, AMBARI.MANAGE_USERS, AMBARI.MANAGE_VIEWS, AMBARI.RENAME_CLUSTER, SERVICE.SET_SERVICE_USERS_GROUPS"}} {{t app.manageAmbari}} @@ -73,7 +73,7 @@ {{/if}} {{else}} {{#if App.isPermissionDataLoaded}} -{{#isAuthorized "AMBARI.ADD_DELETE_CLUSTERS,
[08/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java deleted file mode 100644 index c81d51a..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java +++ /dev/null @@ -1,134 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset; - -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.SQLException; - - -public abstract class QuerySet { - - - public PreparedStatement getUseridfromUserName(Connection connection, String username) throws SQLException { -PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql()); -prSt.setString(1, username); -return prSt; - } - - public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection, int id) throws SQLException { -PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql()); -prSt.setInt(1, id); -return prSt; - } - - public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection, int id, String enddate) throws SQLException { -PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql()); -prSt.setInt(1, id); -prSt.setString(2, enddate); -return prSt; - } - - public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection, int id, String startdate) throws SQLException { -PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql()); -prSt.setInt(1, id); -prSt.setString(2, startdate); -return prSt; - } - - public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection, int id, String startdate, String endate) throws SQLException { -PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql()); -prSt.setInt(1, id); -prSt.setString(2, startdate); -prSt.setString(3, endate); -return prSt; - } - - /** - * for all user - */ - public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException { -PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql()); -return prSt; - } - - public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection, String enddate) throws SQLException { -PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql()); -prSt.setString(1, enddate); -return prSt; - } - - public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection, String startdate) throws SQLException { -PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql()); -prSt.setString(1, startdate); -return prSt; - } - - public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection, String startdate, String endate) throws SQLException { -PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql()); -prSt.setString(1, startdate); -prSt.setString(2, endate); -return prSt; - } - - - protected String fetchuserIdfromUsernameSql() { -return "select id from auth_user where username=?;"; - - } - - protected String fetchHueQueriesNoStartdateNoEnddateSql() { -return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =?;"; -
[02/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc deleted file mode 100644 index 08096ef..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc +++ /dev/null @@ -1,32 +0,0 @@ -{ - "predef": [ -"document", -"window", -"-Promise" - ], - "browser": true, - "boss": true, - "curly": true, - "debug": false, - "devel": true, - "eqeqeq": true, - "evil": true, - "forin": false, - "immed": false, - "laxbreak": false, - "newcap": true, - "noarg": true, - "noempty": false, - "nonew": false, - "nomen": false, - "onevar": false, - "plusplus": false, - "regexp": false, - "undef": true, - "sub": true, - "strict": false, - "white": false, - "eqnull": true, - "esnext": true, - "unused": true -} http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml deleted file mode 100644 index 64533be..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml +++ /dev/null @@ -1,22 +0,0 @@ -language: node_js -node_js: - - "4" - -sudo: false - -cache: - directories: -- node_modules - -before_install: - - npm config set spin false - - npm install -g bower - - npm install phantomjs-prebuilt - -install: - - npm install - - bower install - -script: - - npm test http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig deleted file mode 100644 index e7834e3..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig +++ /dev/null @@ -1,3 +0,0 @@ -{ - "ignore_dirs": ["tmp", "dist"] -} http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md deleted file mode 100644 index 1d1a14f..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md +++ /dev/null @@ -1,67 +0,0 @@ - -# Hueambarimigration-view - -This README outlines the details of collaborating on this Ember application. -A short introduction of this app could easily go here. - -## Prerequisites - -You will need the following things properly installed on your computer. - -* [Git](http://git-scm.com/) -* [Node.js](http://nodejs.org/) (with NPM) -* [Bower](http://bower.io/) -* [Ember CLI](http://ember-cli.com/) -* [PhantomJS](http://phantomjs.org/) - -## Installation - -* `git clone ` this repository -* change into the new directory -* `npm install` -* `bower install` - -## Running / Development - -* `ember server` -* Visit your app at [http://localhost:4200](http://localhost:4200). - -### Code Generators - -Make use of the many generators for code, try `ember help generate` for more details - -### Running Tests - -* `ember test` -* `ember test --server` - -### Building - -* `ember build` (development) -* `ember build --environment production` (production) - -### Deploying - -Specify what it takes to deploy your app. - -## Further Reading / Useful Links - -* [ember.js](http://emberjs.com/) -* [ember-cli](http://ember-cli.com/) -* Development Browser Extensions - * [ember inspector for chrome](https://chrome.google.com/webstore/detail/ember-inspector/bmdblncegkenkacieihfhpjfppoconhi) - * [ember inspector for firefox](https://addons.mozilla.org/en-US/firefox/addon/ember-inspector/) - http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/adapters/application.js -- diff --git
[05/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java deleted file mode 100644 index 7e1bbf4..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java +++ /dev/null @@ -1,225 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.huetoambarimigration.migration.revertchange; - -import java.beans.PropertyVetoException; -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.net.URISyntaxException; -import java.security.PrivilegedExceptionAction; -import java.sql.Connection; -import java.sql.SQLException; -import java.sql.Statement; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.List; - -import org.apache.ambari.view.ViewContext; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel; -import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound; -import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse; -import org.apache.log4j.Logger; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.input.SAXBuilder; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.UserGroupInformation; - -import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; -import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation; - - -public class RevertChangeUtility { - - - - protected MigrationResourceManager resourceManager = null; - - public synchronized PersonalCRUDResourceManager getResourceManager(ViewContext view) { -if (resourceManager == null) { - resourceManager = new MigrationResourceManager(view); -} -return resourceManager; - } - - public boolean stringtoDatecompare(String datefromservlet, - String datefromfile) throws ParseException { - -SimpleDateFormat formatter = new SimpleDateFormat("-MM-dd"); -Date date1 = formatter.parse(datefromservlet); -Date date2 = formatter.parse(datefromfile); -if (date1.compareTo(date2) < 0) { - return true; -} else { - return false; -} - - } - - public void removedir(final String dir, final String namenodeuri) -throws IOException, URISyntaxException { - -try { - UserGroupInformation ugi = UserGroupInformation -.createRemoteUser("hdfs"); - - ugi.doAs(new PrivilegedExceptionAction() { - -public Void run() throws Exception { - - Configuration conf = new Configuration(); - conf.set("fs.hdfs.impl", -org.apache.hadoop.hdfs.DistributedFileSystem.class - .getName()); - conf.set("fs.file.impl", -org.apache.hadoop.fs.LocalFileSystem.class - .getName()); - conf.set("fs.defaultFS", namenodeuri); - conf.set("hadoop.job.ugi", "hdfs"); - - FileSystem fs = FileSystem.get(conf); - Path src = new Path(dir); - fs.delete(src, true); - return null; -} - }); -} catch (Exception e) { - e.printStackTrace(); -} - } - - public MigrationModel revertChangeUtility(String instance, String revertDate,String jobid,ViewContext
[10/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)" This reverts commit 424afb471ac76da5ca4d4cafb93b103b543b910e. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/283256c8 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/283256c8 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/283256c8 Branch: refs/heads/branch-2.4 Commit: 283256c83436bfccde5f8c74706e2698265a139d Parents: 6b6ce80 Author: Alex AntonenkoAuthored: Wed Jun 29 17:36:27 2016 +0300 Committer: Alex Antonenko Committed: Wed Jun 29 17:36:51 2016 +0300 -- contrib/views/hueambarimigration/pom.xml| 142 ++-- .../configurationcheck/ConfigurationCheck.java | 182 + .../configurationcheck/ProgressBarStatus.java | 54 ++ .../controller/hive/HiveHistoryMigration.java | 222 ++ .../hive/HiveSavedQueryMigration.java | 231 ++ .../controller/pig/PigJobMigration.java | 201 + .../controller/pig/PigScriptMigration.java | 208 + .../controller/revertchange/RevertChange.java | 217 ++ .../datasource/DataSourceAmbariDatabase.java| 2 + .../datasource/DataSourceHueDatabase.java | 2 + .../historyqueryset/MysqlQuerySetAmbariDB.java | 46 -- .../historyqueryset/OracleQuerySetAmbariDB.java | 44 -- .../PostgressQuerySetAmbariDB.java | 22 - .../hive/historyqueryset/QuerySetAmbariDB.java | 79 -- .../instancedetail/MysqlQuerySetAmbariDB.java | 23 - .../instancedetail/OracleQuerySetAmbariDB.java | 31 - .../PostgressQuerySetAmbariDB.java | 22 - .../hive/instancedetail/QuerySetAmbariDB.java | 48 -- .../savedqueryset/MysqlQuerySetAmbariDB.java| 65 -- .../savedqueryset/OracleQuerySetAmbariDB.java | 58 -- .../PostgressQuerySetAmbariDB.java | 22 - .../hive/savedqueryset/QuerySetAmbariDB.java| 131 .../instancedetail/MysqlQuerySetAmbariDB.java | 23 - .../instancedetail/OracleQuerySetAmbariDB.java | 30 - .../PostgressQuerySetAmbariDB.java | 22 - .../pig/instancedetail/QuerySetAmbariDB.java| 39 - .../pig/jobqueryset/MysqlQuerySetAmbariDB.java | 43 - .../pig/jobqueryset/OracleQuerySetAmbariDB.java | 41 - .../jobqueryset/PostgressQuerySetAmbariDB.java | 22 - .../pig/jobqueryset/QuerySetAmbariDB.java | 80 -- .../MysqlQuerySetAmbariDB.java | 43 - .../OracleQuerySetAmbariDB.java | 41 - .../PostgressQuerySetAmbariDB.java | 22 - .../savedscriptqueryset/QuerySetAmbariDB.java | 70 -- .../hive/historyqueryset/MysqlQuerySet.java | 23 - .../hive/historyqueryset/OracleQuerySet.java| 61 -- .../hive/historyqueryset/PostgressQuerySet.java | 22 - .../hive/historyqueryset/QuerySet.java | 130 .../hive/historyqueryset/SqliteQuerySet.java| 22 - .../hive/savedqueryset/MysqlQuerySet.java | 23 - .../hive/savedqueryset/OracleQuerySet.java | 65 -- .../hive/savedqueryset/PostgressQuerySet.java | 22 - .../hive/savedqueryset/QuerySet.java| 134 .../hive/savedqueryset/SqliteQuerySet.java | 22 - .../pig/jobqueryset/MysqlQuerySet.java | 22 - .../pig/jobqueryset/OracleQuerySet.java | 65 -- .../pig/jobqueryset/PostgressQuerySet.java | 22 - .../huequeryset/pig/jobqueryset/QuerySet.java | 132 .../pig/jobqueryset/SqliteQuerySet.java | 22 - .../pig/savedscriptqueryset/MysqlQuerySet.java | 22 - .../pig/savedscriptqueryset/OracleQuerySet.java | 60 -- .../savedscriptqueryset/PostgressQuerySet.java | 67 -- .../pig/savedscriptqueryset/QuerySet.java | 135 .../pig/savedscriptqueryset/SqliteQuerySet.java | 24 - .../huequeryset/userdetails/MysqlQuerySet.java | 24 - .../huequeryset/userdetails/OracleQuerySet.java | 28 - .../userdetails/PostgressQuerySet.java | 22 - .../huequeryset/userdetails/QuerySet.java | 42 - .../huequeryset/userdetails/SqliteQuerySet.java | 22 - .../migration/CreateJobId.java | 85 -- .../migration/CreateJobIdRevertChange.java | 84 -- .../migration/InitiateJobMigration.java | 102 --- .../InitiateJobMigrationforRevertchange.java| 85 -- .../configuration/AllInstanceDetailsAmbari.java | 59 -- .../configuration/AmbariDatabaseCheck.java | 62 -- .../configuration/AmbariWebHdfsCheck.java | 60 -- .../configuration/CheckProgresStatus.java | 70 -- .../ConfigurationCheckImplementation.java | 134 .../HiveInstanceDetailsAmbari.java | 53 -- .../HiveInstanceDetailsUtility.java | 106 --- .../configuration/HueDatabaseCheck.java | 60 -- .../configuration/HueHttpUrlCheck.java | 60 --
[01/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
Repository: ambari Updated Branches: refs/heads/branch-2.4 f403a36ff -> 283256c83 http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs deleted file mode 100644 index 60fccf3..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs +++ /dev/null @@ -1,42 +0,0 @@ -{{! - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -}} - - - - Hive - - {{#link-to 'homePage.hive-history'}}HiveHistory Query{{/link-to}} - {{#link-to 'homePage.hive-saved-query'}}HiveSaved Query{{/link-to}} - - - Pig - - {{#link-to 'homePage.pig-script'}}PigSaved script{{/link-to}} - {{#link-to 'homePage.pig-job'}}PigJob{{/link-to}} - - - {{#link-to 'homePage.revert-change'}}RevertChange{{/link-to}} - - - - - - {{outlet}} - - - http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs deleted file mode 100644 index b43e49f..000 --- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs +++ /dev/null @@ -1,125 +0,0 @@ -{{! -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at -http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -}} - - -History Query Migration - - - - - -User Name - * - - - -{{ember-selectize content=model.usersdetail optionValuePath="content.username" optionLabelPath="content.username" value=usernamehue placeholder="Select an userName" }} - - - - - -Instance Name - * - - - -{{ember-selectize content=model.hiveinstancedetail optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename placeholder="Select an Instance name" }} - - - - - -Start Date - - -{{date-picker size="35" date=startdate valueFormat='-MM-DD' name="startdate" id="startdate" value=startdate}} - - - - - -End Date - - -{{date-picker size="35" date=enddate valueFormat='-MM-DD' name="enddate" id="enddate" value=enddate}} - - - - - -Submit - - -{{#if jobstatus}} - -Job has been Submitted. - - -{{/if}} - - - - - -{{#if jobstatus}} - - - - - -{{/if}} - - - - -{{#if completionStatus}} - Migration Report - - - - -
[07/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java deleted file mode 100644 index bcbe4de..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java +++ /dev/null @@ -1,106 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.huetoambarimigration.migration.configuration; - -import org.apache.ambari.view.ViewContext; -import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; -import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.InstanceModel; - -import java.beans.PropertyVetoException; -import java.io.IOException; -import java.sql.*; -import java.util.ArrayList; -import java.util.List; - -import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail.*; - - -public class HiveInstanceDetailsUtility { - - public List getInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException { - -List instancelist = new ArrayList<>(); -Connection conn = null; -conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection(); -conn.setAutoCommit(false); -PreparedStatement prSt; - -QuerySetAmbariDB ambaridatabase = null; - -if (view.getProperties().get("ambaridrivername").contains("mysql")) { - ambaridatabase = new MysqlQuerySetAmbariDB(); -} else if (view.getProperties().get("ambaridrivername").contains("postgresql")) { - ambaridatabase = new PostgressQuerySetAmbariDB(); -} else if (view.getProperties().get("ambaridrivername").contains("oracle")) { - ambaridatabase = new OracleQuerySetAmbariDB(); -} - -ResultSet rs1 = null; -prSt = ambaridatabase.getHiveInstanceDeatil(conn); -rs1 = prSt.executeQuery(); -int i = 0; - -while (rs1.next()) { - InstanceModel I = new InstanceModel(); - I.setInstanceName(rs1.getString(1)); - I.setId(i); - instancelist.add(I); - i++; -} -return instancelist; - - } - - public List getAllInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException { - -List instancelist = new ArrayList<>(); -Connection conn = null; -Statement stmt = null; -conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection(); -conn.setAutoCommit(false); -PreparedStatement prSt; - -QuerySetAmbariDB ambaridatabase = null; - -if (view.getProperties().get("ambaridrivername").contains("mysql")) { - ambaridatabase = new MysqlQuerySetAmbariDB(); -} else if (view.getProperties().get("ambaridrivername").contains("postgresql")) { - ambaridatabase = new PostgressQuerySetAmbariDB(); -} else if (view.getProperties().get("ambaridrivername").contains("oracle")) { - ambaridatabase = new OracleQuerySetAmbariDB(); -} - -ResultSet rs1 = null; -int i = 0; -prSt = ambaridatabase.getAllInstanceDeatil(conn); -rs1 = prSt.executeQuery(); - -while (rs1.next()) { - InstanceModel I = new InstanceModel(); - I.setInstanceName(rs1.getString(1)); - I.setId(i); - instancelist.add(I); - i++; -} -rs1.close(); -return instancelist; - - } - - -}
[04/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java deleted file mode 100644 index d7f2868..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.huetoambarimigration.resources.scripts.models; - - -public class InstanceModel { - - String instanceName; - int id; - - public String getInstanceName() { - return instanceName; - } - - public void setInstanceName(String instanceName) { - this.instanceName = instanceName; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java deleted file mode 100644 index 1a247bb..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.huetoambarimigration.resources.scripts.models; - -/** - * Created by temp on 5/19/16. - */ -public class JobReturnIdModel { - - int id; - String idforJob; - - public String getIdforJob() { -return idforJob; - } - - public void setIdforJob(String idforJob) { -this.idforJob = idforJob; - } - - public int getId() { -return id; - } - - public void setId(int id) { -this.id = id; - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java deleted file mode 100644 index f765e15..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java +++ /dev/null @@ -1,130 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more
ambari git commit: AMBARI-17454. Add manual task for regenerating keytabs for Ranger upgrade (ncole)
Repository: ambari Updated Branches: refs/heads/branch-2.4 74a262c48 -> f403a36ff AMBARI-17454. Add manual task for regenerating keytabs for Ranger upgrade (ncole) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f403a36f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f403a36f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f403a36f Branch: refs/heads/branch-2.4 Commit: f403a36ff55a8909e2df6705affa897d718bfae7 Parents: 74a262c Author: Nate ColeAuthored: Wed Jun 29 09:51:04 2016 -0400 Committer: Nate Cole Committed: Wed Jun 29 09:51:04 2016 -0400 -- .../internal/UpgradeResourceProvider.java | 41 ++-- .../server/serveraction/kerberos/KDCType.java | 2 - .../upgrades/KerberosKeytabsAction.java | 112 ++ .../server/state/stack/upgrade/ManualTask.java | 6 - .../stack/upgrade/ServerSideActionTask.java | 7 + .../HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml | 6 - .../HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml | 6 - .../HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml | 6 - .../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml | 6 - .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml | 6 - .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml | 6 - .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml | 7 + .../HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml | 6 - .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml | 6 - .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml | 7 + .../HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml | 6 - .../upgrades/KerberosKeytabsActionTest.java | 204 +++ 17 files changed, 363 insertions(+), 77 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/f403a36f/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java index 6aab72b..2e976ba 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java @@ -919,6 +919,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider continue; } UpgradeItemEntity itemEntity = new UpgradeItemEntity(); + itemEntity.setText(wrapper.getText()); itemEntity.setTasks(wrapper.getTasksJson()); itemEntity.setHosts(wrapper.getHostsJson()); @@ -1539,28 +1540,38 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider String itemDetail = entity.getText(); String stageText = StringUtils.abbreviate(entity.getText(), 255); + switch (task.getType()) { + case SERVER_ACTION: case MANUAL: { -ManualTask mt = (ManualTask) task; -JsonArray messageArray = new JsonArray(); -for(String message: mt.messages){ - JsonObject messageObj = new JsonObject(); - messageObj.addProperty("message", message); - messageArray.add(messageObj); -} -itemDetail = messageArray.toString(); -if (null != mt.summary) { - stageText = mt.summary; +ServerSideActionTask serverTask = (ServerSideActionTask) task; + +if (null != serverTask.summary) { + stageText = serverTask.summary; } -entity.setText(itemDetail); +if (task.getType() == Task.Type.MANUAL) { + ManualTask mt = (ManualTask) task; -if (null != mt.structuredOut) { - commandParams.put(COMMAND_PARAM_STRUCT_OUT, mt.structuredOut); + if (StringUtils.isNotBlank(mt.structuredOut)) { +commandParams.put(COMMAND_PARAM_STRUCT_OUT, mt.structuredOut); + } } -//To be used later on by the Stage... -itemDetail = StringUtils.join(mt.messages, " "); +if (!serverTask.messages.isEmpty()) { + JsonArray messageArray = new JsonArray(); + for (String message : serverTask.messages) { +JsonObject messageObj = new JsonObject(); +messageObj.addProperty("message", message); +messageArray.add(messageObj); + } + itemDetail = messageArray.toString(); + + entity.setText(itemDetail); + + //To be used later on by the Stage... + itemDetail = StringUtils.join(serverTask.messages, " "); +} break; } case CONFIGURE: {
ambari git commit: AMBARI-17454. Add manual task for regenerating keytabs for Ranger upgrade (ncole)
Repository: ambari Updated Branches: refs/heads/trunk 530128d80 -> 5352dac06 AMBARI-17454. Add manual task for regenerating keytabs for Ranger upgrade (ncole) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5352dac0 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5352dac0 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5352dac0 Branch: refs/heads/trunk Commit: 5352dac06d2936421c557993cad3460e2f2521fd Parents: 530128d Author: Nate ColeAuthored: Mon Jun 27 17:20:37 2016 -0400 Committer: Nate Cole Committed: Wed Jun 29 09:25:17 2016 -0400 -- .../internal/UpgradeResourceProvider.java | 41 ++-- .../server/serveraction/kerberos/KDCType.java | 2 - .../upgrades/KerberosKeytabsAction.java | 112 ++ .../server/state/stack/upgrade/ManualTask.java | 6 - .../stack/upgrade/ServerSideActionTask.java | 7 + .../HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml | 6 - .../HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml | 6 - .../HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml | 6 - .../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml | 6 - .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml | 6 - .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml | 6 - .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml | 7 + .../HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml | 6 - .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml | 6 - .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml | 7 + .../HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml | 6 - .../upgrades/KerberosKeytabsActionTest.java | 204 +++ 17 files changed, 363 insertions(+), 77 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/5352dac0/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java index 6aab72b..2e976ba 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java @@ -919,6 +919,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider continue; } UpgradeItemEntity itemEntity = new UpgradeItemEntity(); + itemEntity.setText(wrapper.getText()); itemEntity.setTasks(wrapper.getTasksJson()); itemEntity.setHosts(wrapper.getHostsJson()); @@ -1539,28 +1540,38 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider String itemDetail = entity.getText(); String stageText = StringUtils.abbreviate(entity.getText(), 255); + switch (task.getType()) { + case SERVER_ACTION: case MANUAL: { -ManualTask mt = (ManualTask) task; -JsonArray messageArray = new JsonArray(); -for(String message: mt.messages){ - JsonObject messageObj = new JsonObject(); - messageObj.addProperty("message", message); - messageArray.add(messageObj); -} -itemDetail = messageArray.toString(); -if (null != mt.summary) { - stageText = mt.summary; +ServerSideActionTask serverTask = (ServerSideActionTask) task; + +if (null != serverTask.summary) { + stageText = serverTask.summary; } -entity.setText(itemDetail); +if (task.getType() == Task.Type.MANUAL) { + ManualTask mt = (ManualTask) task; -if (null != mt.structuredOut) { - commandParams.put(COMMAND_PARAM_STRUCT_OUT, mt.structuredOut); + if (StringUtils.isNotBlank(mt.structuredOut)) { +commandParams.put(COMMAND_PARAM_STRUCT_OUT, mt.structuredOut); + } } -//To be used later on by the Stage... -itemDetail = StringUtils.join(mt.messages, " "); +if (!serverTask.messages.isEmpty()) { + JsonArray messageArray = new JsonArray(); + for (String message : serverTask.messages) { +JsonObject messageObj = new JsonObject(); +messageObj.addProperty("message", message); +messageArray.add(messageObj); + } + itemDetail = messageArray.toString(); + + entity.setText(itemDetail); + + //To be used later on by the Stage... + itemDetail = StringUtils.join(serverTask.messages, " "); +} break; } case CONFIGURE: {
ambari git commit: AMBARI-17313. Handle desired state for services/components on restart (Laszlo Puskas via magyari_sandor)
Repository: ambari Updated Branches: refs/heads/branch-2.4 32326775f -> 74a262c48 AMBARI-17313. Handle desired state for services/components on restart (Laszlo Puskas via magyari_sandor) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/74a262c4 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/74a262c4 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/74a262c4 Branch: refs/heads/branch-2.4 Commit: 74a262c48da8a5faf6996b250ec0c5bf660e5ab8 Parents: 3232677 Author: Laszlo PuskasAuthored: Wed Jun 29 15:04:38 2016 +0200 Committer: Sandor Magyari Committed: Wed Jun 29 15:17:05 2016 +0200 -- .../AmbariCustomCommandExecutionHelper.java | 72 +++--- .../AmbariCustomCommandExecutionHelperTest.java | 221 +-- .../AmbariManagementControllerTest.java | 39 ++-- 3 files changed, 168 insertions(+), 164 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/74a262c4/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java index b60592d..9526077 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java @@ -18,33 +18,6 @@ package org.apache.ambari.server.controller; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_COUNT; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_ON_UNAVAILABILITY; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CLIENTS_TO_UPDATE_CONFIGS; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CUSTOM_COMMAND; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GROUP_LIST; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOST_SYS_PREPPED; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_HOME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_VERSION; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JCE_NAME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_NAME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MYSQL_JDBC_URL; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.NOT_MANAGED_HDFS_PATH_LIST; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.ORACLE_JDBC_URL; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.USER_LIST; - import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; @@ -109,6 +82,33 @@ import com.google.gson.JsonObject; import com.google.inject.Inject; import com.google.inject.Singleton; +import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_COUNT; +import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_ON_UNAVAILABILITY; +import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CLIENTS_TO_UPDATE_CONFIGS; +import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT; +import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY; +import static
ambari git commit: AMBARI-17313. Handle desired state for services/components on restart (magyari_sandor)
Repository: ambari Updated Branches: refs/heads/trunk 7474ec4e9 -> 530128d80 AMBARI-17313. Handle desired state for services/components on restart (magyari_sandor) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/530128d8 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/530128d8 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/530128d8 Branch: refs/heads/trunk Commit: 530128d8058ebe1384426a6ebceef6076d47fafb Parents: 7474ec4 Author: Laszlo PuskasAuthored: Wed Jun 29 15:04:38 2016 +0200 Committer: Sandor Magyari Committed: Wed Jun 29 15:04:38 2016 +0200 -- .../AmbariCustomCommandExecutionHelper.java | 72 +++--- .../AmbariCustomCommandExecutionHelperTest.java | 221 +-- .../AmbariManagementControllerTest.java | 39 ++-- 3 files changed, 168 insertions(+), 164 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/530128d8/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java index b60592d..9526077 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java @@ -18,33 +18,6 @@ package org.apache.ambari.server.controller; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_COUNT; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_ON_UNAVAILABILITY; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CLIENTS_TO_UPDATE_CONFIGS; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CUSTOM_COMMAND; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GROUP_LIST; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOST_SYS_PREPPED; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_HOME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_VERSION; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JCE_NAME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_NAME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MYSQL_JDBC_URL; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.NOT_MANAGED_HDFS_PATH_LIST; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.ORACLE_JDBC_URL; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION; -import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.USER_LIST; - import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; @@ -109,6 +82,33 @@ import com.google.gson.JsonObject; import com.google.inject.Inject; import com.google.inject.Singleton; +import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_COUNT; +import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_ON_UNAVAILABILITY; +import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CLIENTS_TO_UPDATE_CONFIGS; +import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT; +import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY; +import static
[1/2] ambari git commit: AMBARI-17343. Blueprint attribute provision_action=INSTALL_ONLY loses its value after server restart (magyari_sandor)
Repository: ambari Updated Branches: refs/heads/branch-2.4 424afb471 -> 32326775f AMBARI-17343. Blueprint attribute provision_action=INSTALL_ONLY loses its value after server restart (magyari_sandor) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c5919ddc Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c5919ddc Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c5919ddc Branch: refs/heads/branch-2.4 Commit: c5919ddc963cd2fa3945617f6c8a7382acdfaf58 Parents: 424afb4 Author: Sandor MagyariAuthored: Wed Jun 29 14:02:40 2016 +0200 Committer: Sandor Magyari Committed: Wed Jun 29 14:09:51 2016 +0200 -- .../controller/internal/BaseClusterRequest.java | 18 ++ .../internal/ProvisionClusterRequest.java | 17 + .../orm/entities/TopologyRequestEntity.java | 16 .../ambari/server/topology/PersistedState.java| 4 +++- .../server/topology/PersistedStateImpl.java | 13 +++-- .../ambari/server/upgrade/UpgradeCatalog240.java | 9 + .../main/resources/Ambari-DDL-Derby-CREATE.sql| 1 + .../main/resources/Ambari-DDL-MySQL-CREATE.sql| 1 + .../main/resources/Ambari-DDL-Oracle-CREATE.sql | 1 + .../main/resources/Ambari-DDL-Postgres-CREATE.sql | 1 + .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql | 1 + .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql | 1 + .../resources/Ambari-DDL-SQLServer-CREATE.sql | 1 + .../internal/ClusterResourceProviderTest.java | 2 +- .../ambari/server/state/cluster/ClustersTest.java | 5 +++-- .../server/upgrade/UpgradeCatalog240Test.java | 13 +++-- 16 files changed, 80 insertions(+), 24 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/c5919ddc/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java index a67317a..54389da 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java @@ -42,10 +42,17 @@ import java.util.Set; */ public abstract class BaseClusterRequest implements TopologyRequest { /** + * Support for controlling whether Install and Start tasks are created on + * blueprint deploy by default. + */ + public static final String PROVISION_ACTION_PROPERTY = "provision_action"; + /** * host group info map */ protected final Map hostGroupInfoMap = new HashMap (); + protected ProvisionAction provisionAction; + /** * cluster id */ @@ -185,4 +192,15 @@ public abstract class BaseClusterRequest implements TopologyRequest { } return hostResourceProvider; } + + /** + * Get requested @ProvisionClusterRequest.ProvisionAction + */ + public ProvisionAction getProvisionAction() { +return provisionAction; + } + + public void setProvisionAction(ProvisionAction provisionAction) { +this.provisionAction = provisionAction; + } } http://git-wip-us.apache.org/repos/asf/ambari/blob/c5919ddc/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java index 3feac55..a35da86 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java @@ -101,12 +101,6 @@ public class ProvisionClusterRequest extends BaseClusterRequest { public static final String CONFIG_RECOMMENDATION_STRATEGY = "config_recommendation_strategy"; /** - * Support for controlling whether Install and Start tasks are created on - * blueprint deploy by default. - */ - public static final String PROVISION_ACTION_PROPERTY = "provision_action"; - - /** * The repo version to use */ public static final String REPO_VERSION_PROPERTY = "repository_version"; @@ -134,8 +128,6 @@ public class ProvisionClusterRequest extends BaseClusterRequest { */ private final ConfigRecommendationStrategy
[2/2] ambari git commit: AMBARI-17403. Fix hostname in stack advisor for proxy user properties (magyari_sandor)
AMBARI-17403. Fix hostname in stack advisor for proxy user properties (magyari_sandor) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/32326775 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/32326775 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/32326775 Branch: refs/heads/branch-2.4 Commit: 32326775fadacc7993f4ecf0eb9ce6e453bb6bb6 Parents: c5919dd Author: Sandor MagyariAuthored: Wed Jun 29 14:04:02 2016 +0200 Committer: Sandor Magyari Committed: Wed Jun 29 14:10:01 2016 +0200 -- .../resources/stacks/HDP/2.0.6/services/stack_advisor.py | 8 .../test/python/stacks/2.0.6/common/test_stack_advisor.py| 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/32326775/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py index 373403c..9a310b5 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py @@ -195,7 +195,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor): if oozieServerrHosts is not None: oozieServerHostsNameList = [] for oozieServerHost in oozieServerrHosts: - oozieServerHostsNameList.append(oozieServerHost["Hosts"]["public_host_name"]) + oozieServerHostsNameList.append(oozieServerHost["Hosts"]["host_name"]) oozieServerHostsNames = ",".join(oozieServerHostsNameList) if not oozie_user in users and oozie_user is not None: users[oozie_user] = {"propertyHosts" : oozieServerHostsNames,"propertyGroups" : "*", "config" : "oozie-env", "propertyName" : "oozie_user"} @@ -213,7 +213,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor): if hiveServerHosts is not None: hiveServerHostsNameList = [] for hiveServerHost in hiveServerHosts: - hiveServerHostsNameList.append(hiveServerHost["Hosts"]["public_host_name"]) + hiveServerHostsNameList.append(hiveServerHost["Hosts"]["host_name"]) hiveServerHostsNames = ",".join(hiveServerHostsNameList) if not hive_user in users and hive_user is not None: users[hive_user] = {"propertyHosts" : hiveServerHostsNames,"propertyGroups" : "*", "config" : "hive-env", "propertyName" : "hive_user"} @@ -221,7 +221,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor): if webHcatServerHosts is not None: webHcatServerHostsNameList = [] for webHcatServerHost in webHcatServerHosts: - webHcatServerHostsNameList.append(webHcatServerHost["Hosts"]["public_host_name"]) + webHcatServerHostsNameList.append(webHcatServerHost["Hosts"]["host_name"]) webHcatServerHostsNames = ",".join(webHcatServerHostsNameList) if not webhcat_user in users and webhcat_user is not None: users[webhcat_user] = {"propertyHosts" : webHcatServerHostsNames,"propertyGroups" : "*", "config" : "hive-env", "propertyName" : "webhcat_user"} @@ -235,7 +235,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor): if len(rmHosts) > 1: rmHostsNameList = [] for rmHost in rmHosts: -rmHostsNameList.append(rmHost["Hosts"]["public_host_name"]) +rmHostsNameList.append(rmHost["Hosts"]["host_name"]) rmHostsNames = ",".join(rmHostsNameList) if not yarn_user in users and yarn_user is not None: users[yarn_user] = {"propertyHosts" : rmHostsNames, "config" : "yarn-env", "propertyName" : "yarn_user"} http://git-wip-us.apache.org/repos/asf/ambari/blob/32326775/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py index 6a8f3e2..7a092fc 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py +++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py @@ -979,7 +979,7 @@ class TestHDP206StackAdvisor(TestCase): "os_arch": "x86_64", "os_type": "centos6", "ph_cpu_count": 1, -"public_host_name": "c6401.ambari.apache.org", +"public_host_name": "public.c6401.ambari.apache.org", "rack_info": "/default-rack",
[2/2] ambari git commit: AMBARI-17403. Fix hostname in stack advisor for proxy user properties (magyari_sandor)
AMBARI-17403. Fix hostname in stack advisor for proxy user properties (magyari_sandor) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7474ec4e Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7474ec4e Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7474ec4e Branch: refs/heads/trunk Commit: 7474ec4e9c9bbefeeb09f701380cd7093eff28c6 Parents: addc5d0 Author: Sandor MagyariAuthored: Wed Jun 29 14:04:02 2016 +0200 Committer: Sandor Magyari Committed: Wed Jun 29 14:04:02 2016 +0200 -- .../resources/stacks/HDP/2.0.6/services/stack_advisor.py | 8 .../test/python/stacks/2.0.6/common/test_stack_advisor.py| 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/7474ec4e/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py index 373403c..9a310b5 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py @@ -195,7 +195,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor): if oozieServerrHosts is not None: oozieServerHostsNameList = [] for oozieServerHost in oozieServerrHosts: - oozieServerHostsNameList.append(oozieServerHost["Hosts"]["public_host_name"]) + oozieServerHostsNameList.append(oozieServerHost["Hosts"]["host_name"]) oozieServerHostsNames = ",".join(oozieServerHostsNameList) if not oozie_user in users and oozie_user is not None: users[oozie_user] = {"propertyHosts" : oozieServerHostsNames,"propertyGroups" : "*", "config" : "oozie-env", "propertyName" : "oozie_user"} @@ -213,7 +213,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor): if hiveServerHosts is not None: hiveServerHostsNameList = [] for hiveServerHost in hiveServerHosts: - hiveServerHostsNameList.append(hiveServerHost["Hosts"]["public_host_name"]) + hiveServerHostsNameList.append(hiveServerHost["Hosts"]["host_name"]) hiveServerHostsNames = ",".join(hiveServerHostsNameList) if not hive_user in users and hive_user is not None: users[hive_user] = {"propertyHosts" : hiveServerHostsNames,"propertyGroups" : "*", "config" : "hive-env", "propertyName" : "hive_user"} @@ -221,7 +221,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor): if webHcatServerHosts is not None: webHcatServerHostsNameList = [] for webHcatServerHost in webHcatServerHosts: - webHcatServerHostsNameList.append(webHcatServerHost["Hosts"]["public_host_name"]) + webHcatServerHostsNameList.append(webHcatServerHost["Hosts"]["host_name"]) webHcatServerHostsNames = ",".join(webHcatServerHostsNameList) if not webhcat_user in users and webhcat_user is not None: users[webhcat_user] = {"propertyHosts" : webHcatServerHostsNames,"propertyGroups" : "*", "config" : "hive-env", "propertyName" : "webhcat_user"} @@ -235,7 +235,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor): if len(rmHosts) > 1: rmHostsNameList = [] for rmHost in rmHosts: -rmHostsNameList.append(rmHost["Hosts"]["public_host_name"]) +rmHostsNameList.append(rmHost["Hosts"]["host_name"]) rmHostsNames = ",".join(rmHostsNameList) if not yarn_user in users and yarn_user is not None: users[yarn_user] = {"propertyHosts" : rmHostsNames, "config" : "yarn-env", "propertyName" : "yarn_user"} http://git-wip-us.apache.org/repos/asf/ambari/blob/7474ec4e/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py index 6a8f3e2..7a092fc 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py +++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py @@ -979,7 +979,7 @@ class TestHDP206StackAdvisor(TestCase): "os_arch": "x86_64", "os_type": "centos6", "ph_cpu_count": 1, -"public_host_name": "c6401.ambari.apache.org", +"public_host_name": "public.c6401.ambari.apache.org", "rack_info": "/default-rack",
[1/2] ambari git commit: AMBARI-17343. Blueprint attribute provision_action=INSTALL_ONLY loses its value after server restart (magyari_sandor)
Repository: ambari Updated Branches: refs/heads/trunk 6f4a9c288 -> 7474ec4e9 AMBARI-17343. Blueprint attribute provision_action=INSTALL_ONLY loses its value after server restart (magyari_sandor) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/addc5d04 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/addc5d04 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/addc5d04 Branch: refs/heads/trunk Commit: addc5d04626fdaf976df2a0427cb9cdb3c923c22 Parents: 6f4a9c2 Author: Sandor MagyariAuthored: Wed Jun 29 14:02:40 2016 +0200 Committer: Sandor Magyari Committed: Wed Jun 29 14:02:40 2016 +0200 -- .../controller/internal/BaseClusterRequest.java | 18 ++ .../internal/ProvisionClusterRequest.java | 17 + .../orm/entities/TopologyRequestEntity.java | 16 .../ambari/server/topology/PersistedState.java| 4 +++- .../server/topology/PersistedStateImpl.java | 13 +++-- .../ambari/server/upgrade/UpgradeCatalog240.java | 9 + .../main/resources/Ambari-DDL-Derby-CREATE.sql| 1 + .../main/resources/Ambari-DDL-MySQL-CREATE.sql| 1 + .../main/resources/Ambari-DDL-Oracle-CREATE.sql | 1 + .../main/resources/Ambari-DDL-Postgres-CREATE.sql | 1 + .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql | 1 + .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql | 1 + .../resources/Ambari-DDL-SQLServer-CREATE.sql | 1 + .../internal/ClusterResourceProviderTest.java | 2 +- .../ambari/server/state/cluster/ClustersTest.java | 5 +++-- .../server/upgrade/UpgradeCatalog240Test.java | 13 +++-- 16 files changed, 80 insertions(+), 24 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/addc5d04/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java index a67317a..54389da 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java @@ -42,10 +42,17 @@ import java.util.Set; */ public abstract class BaseClusterRequest implements TopologyRequest { /** + * Support for controlling whether Install and Start tasks are created on + * blueprint deploy by default. + */ + public static final String PROVISION_ACTION_PROPERTY = "provision_action"; + /** * host group info map */ protected final Map hostGroupInfoMap = new HashMap (); + protected ProvisionAction provisionAction; + /** * cluster id */ @@ -185,4 +192,15 @@ public abstract class BaseClusterRequest implements TopologyRequest { } return hostResourceProvider; } + + /** + * Get requested @ProvisionClusterRequest.ProvisionAction + */ + public ProvisionAction getProvisionAction() { +return provisionAction; + } + + public void setProvisionAction(ProvisionAction provisionAction) { +this.provisionAction = provisionAction; + } } http://git-wip-us.apache.org/repos/asf/ambari/blob/addc5d04/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java index 3feac55..a35da86 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java @@ -101,12 +101,6 @@ public class ProvisionClusterRequest extends BaseClusterRequest { public static final String CONFIG_RECOMMENDATION_STRATEGY = "config_recommendation_strategy"; /** - * Support for controlling whether Install and Start tasks are created on - * blueprint deploy by default. - */ - public static final String PROVISION_ACTION_PROPERTY = "provision_action"; - - /** * The repo version to use */ public static final String REPO_VERSION_PROPERTY = "repository_version"; @@ -134,8 +128,6 @@ public class ProvisionClusterRequest extends BaseClusterRequest { */ private final ConfigRecommendationStrategy configRecommendationStrategy; -
[04/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java new file mode 100644 index 000..d7f2868 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.huetoambarimigration.resources.scripts.models; + + +public class InstanceModel { + + String instanceName; + int id; + + public String getInstanceName() { + return instanceName; + } + + public void setInstanceName(String instanceName) { + this.instanceName = instanceName; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java new file mode 100644 index 000..1a247bb --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.huetoambarimigration.resources.scripts.models; + +/** + * Created by temp on 5/19/16. + */ +public class JobReturnIdModel { + + int id; + String idforJob; + + public String getIdforJob() { +return idforJob; + } + + public void setIdforJob(String idforJob) { +this.idforJob = idforJob; + } + + public int getId() { +return id; + } + + public void setId(int id) { +this.id = id; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java new file mode 100644 index 000..f765e15 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java @@ -0,0 +1,130 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license
[06/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java new file mode 100644 index 000..9f9e053 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java @@ -0,0 +1,281 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery; + +import org.apache.ambari.view.ViewContext; +import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; +import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel; +import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound; +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.MysqlQuerySetAmbariDB; +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.OracleQuerySetAmbariDB; +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.PostgressQuerySetAmbariDB; +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB; +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.*; +import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse; +import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation; +import org.apache.log4j.Logger; + +import java.beans.PropertyVetoException; +import java.io.IOException; +import java.net.URISyntaxException; +import java.sql.Connection; +import java.sql.SQLException; +import java.text.ParseException; +import java.util.ArrayList; + +public class HiveSavedQueryMigrationUtility { + + + + protected MigrationResourceManager resourceManager = null; + + public synchronized PersonalCRUDResourceManager getResourceManager(ViewContext view) { +if (resourceManager == null) { + resourceManager = new MigrationResourceManager(view); +} +return resourceManager; + } + + public MigrationModel hiveSavedQueryMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound { + +long startTime = System.currentTimeMillis(); + +final Logger logger = Logger.getLogger(HiveSavedQueryMigrationUtility.class); + +Connection connectionAmbaridb = null; +Connection connectionHuedb = null; + +int i = 0; + +logger.info("-"); +logger.info("hive saved query Migration started"); +logger.info("-"); +logger.info("start date: " + startDate); +logger.info("enddate date: " + endDate); +logger.info("instance is: " + instance); +logger.info("hue username is : " + username); + +HiveSavedQueryMigrationImplementation hivesavedqueryimpl = new HiveSavedQueryMigrationImplementation();/* creating Implementation object */ + +QuerySet huedatabase=null; + +if(view.getProperties().get("huedrivername").contains("mysql")) +{ + huedatabase=new
[01/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
Repository: ambari Updated Branches: refs/heads/branch-2.4 fd9574fc7 -> 424afb471 http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs new file mode 100644 index 000..60fccf3 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs @@ -0,0 +1,42 @@ +{{! + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +}} + + + + Hive + + {{#link-to 'homePage.hive-history'}}HiveHistory Query{{/link-to}} + {{#link-to 'homePage.hive-saved-query'}}HiveSaved Query{{/link-to}} + + + Pig + + {{#link-to 'homePage.pig-script'}}PigSaved script{{/link-to}} + {{#link-to 'homePage.pig-job'}}PigJob{{/link-to}} + + + {{#link-to 'homePage.revert-change'}}RevertChange{{/link-to}} + + + + + + {{outlet}} + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs new file mode 100644 index 000..b43e49f --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs @@ -0,0 +1,125 @@ +{{! +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at +http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +}} + + +History Query Migration + + + + + +User Name + * + + + +{{ember-selectize content=model.usersdetail optionValuePath="content.username" optionLabelPath="content.username" value=usernamehue placeholder="Select an userName" }} + + + + + +Instance Name + * + + + +{{ember-selectize content=model.hiveinstancedetail optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename placeholder="Select an Instance name" }} + + + + + +Start Date + + +{{date-picker size="35" date=startdate valueFormat='-MM-DD' name="startdate" id="startdate" value=startdate}} + + + + + +End Date + + +{{date-picker size="35" date=enddate valueFormat='-MM-DD' name="enddate" id="enddate" value=enddate}} + + + + + +Submit + + +{{#if jobstatus}} + +Job has been Submitted. + + +{{/if}} + + + + + +{{#if jobstatus}} + + + + + +{{/if}} + + + + +{{#if completionStatus}} + Migration Report + + + + +
[02/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc new file mode 100644 index 000..08096ef --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc @@ -0,0 +1,32 @@ +{ + "predef": [ +"document", +"window", +"-Promise" + ], + "browser": true, + "boss": true, + "curly": true, + "debug": false, + "devel": true, + "eqeqeq": true, + "evil": true, + "forin": false, + "immed": false, + "laxbreak": false, + "newcap": true, + "noarg": true, + "noempty": false, + "nonew": false, + "nomen": false, + "onevar": false, + "plusplus": false, + "regexp": false, + "undef": true, + "sub": true, + "strict": false, + "white": false, + "eqnull": true, + "esnext": true, + "unused": true +} http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml new file mode 100644 index 000..64533be --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml @@ -0,0 +1,22 @@ +--- +language: node_js +node_js: + - "4" + +sudo: false + +cache: + directories: +- node_modules + +before_install: + - npm config set spin false + - npm install -g bower + - npm install phantomjs-prebuilt + +install: + - npm install + - bower install + +script: + - npm test http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig new file mode 100644 index 000..e7834e3 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig @@ -0,0 +1,3 @@ +{ + "ignore_dirs": ["tmp", "dist"] +} http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md new file mode 100644 index 000..1d1a14f --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md @@ -0,0 +1,67 @@ + +# Hueambarimigration-view + +This README outlines the details of collaborating on this Ember application. +A short introduction of this app could easily go here. + +## Prerequisites + +You will need the following things properly installed on your computer. + +* [Git](http://git-scm.com/) +* [Node.js](http://nodejs.org/) (with NPM) +* [Bower](http://bower.io/) +* [Ember CLI](http://ember-cli.com/) +* [PhantomJS](http://phantomjs.org/) + +## Installation + +* `git clone ` this repository +* change into the new directory +* `npm install` +* `bower install` + +## Running / Development + +* `ember server` +* Visit your app at [http://localhost:4200](http://localhost:4200). + +### Code Generators + +Make use of the many generators for code, try `ember help generate` for more details + +### Running Tests + +* `ember test` +* `ember test --server` + +### Building + +* `ember build` (development) +* `ember build --environment production` (production) + +### Deploying + +Specify what it takes to deploy your app. + +## Further Reading / Useful Links + +* [ember.js](http://emberjs.com/) +* [ember-cli](http://ember-cli.com/) +* Development Browser Extensions + * [ember inspector for chrome](https://chrome.google.com/webstore/detail/ember-inspector/bmdblncegkenkacieihfhpjfppoconhi) + * [ember inspector for firefox](https://addons.mozilla.org/en-US/firefox/addon/ember-inspector/) + http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/adapters/application.js -- diff --git
[05/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java new file mode 100644 index 000..7e1bbf4 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java @@ -0,0 +1,225 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.huetoambarimigration.migration.revertchange; + +import java.beans.PropertyVetoException; +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.net.URISyntaxException; +import java.security.PrivilegedExceptionAction; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; + +import org.apache.ambari.view.ViewContext; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel; +import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound; +import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse; +import org.apache.log4j.Logger; +import org.jdom.Document; +import org.jdom.Element; +import org.jdom.JDOMException; +import org.jdom.input.SAXBuilder; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.UserGroupInformation; + +import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; +import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation; + + +public class RevertChangeUtility { + + + + protected MigrationResourceManager resourceManager = null; + + public synchronized PersonalCRUDResourceManager getResourceManager(ViewContext view) { +if (resourceManager == null) { + resourceManager = new MigrationResourceManager(view); +} +return resourceManager; + } + + public boolean stringtoDatecompare(String datefromservlet, + String datefromfile) throws ParseException { + +SimpleDateFormat formatter = new SimpleDateFormat("-MM-dd"); +Date date1 = formatter.parse(datefromservlet); +Date date2 = formatter.parse(datefromfile); +if (date1.compareTo(date2) < 0) { + return true; +} else { + return false; +} + + } + + public void removedir(final String dir, final String namenodeuri) +throws IOException, URISyntaxException { + +try { + UserGroupInformation ugi = UserGroupInformation +.createRemoteUser("hdfs"); + + ugi.doAs(new PrivilegedExceptionAction() { + +public Void run() throws Exception { + + Configuration conf = new Configuration(); + conf.set("fs.hdfs.impl", +org.apache.hadoop.hdfs.DistributedFileSystem.class + .getName()); + conf.set("fs.file.impl", +org.apache.hadoop.fs.LocalFileSystem.class + .getName()); + conf.set("fs.defaultFS", namenodeuri); + conf.set("hadoop.job.ugi", "hdfs"); + + FileSystem fs = FileSystem.get(conf); + Path src = new Path(dir); + fs.delete(src, true); + return null; +} + }); +} catch (Exception e) { + e.printStackTrace(); +} + } + + public MigrationModel revertChangeUtility(String instance, String revertDate,String jobid,ViewContext
[07/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java new file mode 100644 index 000..bcbe4de --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java @@ -0,0 +1,106 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.huetoambarimigration.migration.configuration; + +import org.apache.ambari.view.ViewContext; +import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.InstanceModel; + +import java.beans.PropertyVetoException; +import java.io.IOException; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail.*; + + +public class HiveInstanceDetailsUtility { + + public List getInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException { + +List instancelist = new ArrayList<>(); +Connection conn = null; +conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection(); +conn.setAutoCommit(false); +PreparedStatement prSt; + +QuerySetAmbariDB ambaridatabase = null; + +if (view.getProperties().get("ambaridrivername").contains("mysql")) { + ambaridatabase = new MysqlQuerySetAmbariDB(); +} else if (view.getProperties().get("ambaridrivername").contains("postgresql")) { + ambaridatabase = new PostgressQuerySetAmbariDB(); +} else if (view.getProperties().get("ambaridrivername").contains("oracle")) { + ambaridatabase = new OracleQuerySetAmbariDB(); +} + +ResultSet rs1 = null; +prSt = ambaridatabase.getHiveInstanceDeatil(conn); +rs1 = prSt.executeQuery(); +int i = 0; + +while (rs1.next()) { + InstanceModel I = new InstanceModel(); + I.setInstanceName(rs1.getString(1)); + I.setId(i); + instancelist.add(I); + i++; +} +return instancelist; + + } + + public List getAllInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException { + +List instancelist = new ArrayList<>(); +Connection conn = null; +Statement stmt = null; +conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection(); +conn.setAutoCommit(false); +PreparedStatement prSt; + +QuerySetAmbariDB ambaridatabase = null; + +if (view.getProperties().get("ambaridrivername").contains("mysql")) { + ambaridatabase = new MysqlQuerySetAmbariDB(); +} else if (view.getProperties().get("ambaridrivername").contains("postgresql")) { + ambaridatabase = new PostgressQuerySetAmbariDB(); +} else if (view.getProperties().get("ambaridrivername").contains("oracle")) { + ambaridatabase = new OracleQuerySetAmbariDB(); +} + +ResultSet rs1 = null; +int i = 0; +prSt = ambaridatabase.getAllInstanceDeatil(conn); +rs1 = prSt.executeQuery(); + +while (rs1.next()) { + InstanceModel I = new InstanceModel(); + I.setInstanceName(rs1.getString(1)); + I.setId(i); + instancelist.add(I); + i++; +} +rs1.close(); +return instancelist; + + } + + +}
[08/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java new file mode 100644 index 000..c81d51a --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java @@ -0,0 +1,134 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; + + +public abstract class QuerySet { + + + public PreparedStatement getUseridfromUserName(Connection connection, String username) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql()); +prSt.setString(1, username); +return prSt; + } + + public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection, int id) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql()); +prSt.setInt(1, id); +return prSt; + } + + public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection, int id, String enddate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql()); +prSt.setInt(1, id); +prSt.setString(2, enddate); +return prSt; + } + + public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection, int id, String startdate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql()); +prSt.setInt(1, id); +prSt.setString(2, startdate); +return prSt; + } + + public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection, int id, String startdate, String endate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql()); +prSt.setInt(1, id); +prSt.setString(2, startdate); +prSt.setString(3, endate); +return prSt; + } + + /** + * for all user + */ + public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql()); +return prSt; + } + + public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection, String enddate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql()); +prSt.setString(1, enddate); +return prSt; + } + + public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection, String startdate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql()); +prSt.setString(1, startdate); +return prSt; + } + + public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection, String startdate, String endate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql()); +prSt.setString(1, startdate); +prSt.setString(2, endate); +return prSt; + } + + + protected String fetchuserIdfromUsernameSql() { +return "select id from auth_user where username=?;"; + + } + + protected String fetchHueQueriesNoStartdateNoEnddateSql() { +return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =?;"; + } +
[10/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/424afb47 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/424afb47 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/424afb47 Branch: refs/heads/branch-2.4 Commit: 424afb471ac76da5ca4d4cafb93b103b543b910e Parents: fd9574f Author: Dipayan BhowmickAuthored: Wed Jun 29 17:15:04 2016 +0530 Committer: Dipayan Bhowmick Committed: Wed Jun 29 17:15:56 2016 +0530 -- contrib/views/hueambarimigration/pom.xml| 142 ++-- .../configurationcheck/ConfigurationCheck.java | 182 - .../configurationcheck/ProgressBarStatus.java | 54 -- .../controller/hive/HiveHistoryMigration.java | 222 -- .../hive/HiveSavedQueryMigration.java | 231 -- .../controller/pig/PigJobMigration.java | 201 - .../controller/pig/PigScriptMigration.java | 208 - .../controller/revertchange/RevertChange.java | 217 -- .../datasource/DataSourceAmbariDatabase.java| 2 - .../datasource/DataSourceHueDatabase.java | 2 - .../historyqueryset/MysqlQuerySetAmbariDB.java | 46 ++ .../historyqueryset/OracleQuerySetAmbariDB.java | 44 ++ .../PostgressQuerySetAmbariDB.java | 22 + .../hive/historyqueryset/QuerySetAmbariDB.java | 79 ++ .../instancedetail/MysqlQuerySetAmbariDB.java | 23 + .../instancedetail/OracleQuerySetAmbariDB.java | 31 + .../PostgressQuerySetAmbariDB.java | 22 + .../hive/instancedetail/QuerySetAmbariDB.java | 48 ++ .../savedqueryset/MysqlQuerySetAmbariDB.java| 65 ++ .../savedqueryset/OracleQuerySetAmbariDB.java | 58 ++ .../PostgressQuerySetAmbariDB.java | 22 + .../hive/savedqueryset/QuerySetAmbariDB.java| 131 .../instancedetail/MysqlQuerySetAmbariDB.java | 23 + .../instancedetail/OracleQuerySetAmbariDB.java | 30 + .../PostgressQuerySetAmbariDB.java | 22 + .../pig/instancedetail/QuerySetAmbariDB.java| 39 + .../pig/jobqueryset/MysqlQuerySetAmbariDB.java | 43 + .../pig/jobqueryset/OracleQuerySetAmbariDB.java | 41 + .../jobqueryset/PostgressQuerySetAmbariDB.java | 22 + .../pig/jobqueryset/QuerySetAmbariDB.java | 80 ++ .../MysqlQuerySetAmbariDB.java | 43 + .../OracleQuerySetAmbariDB.java | 41 + .../PostgressQuerySetAmbariDB.java | 22 + .../savedscriptqueryset/QuerySetAmbariDB.java | 70 ++ .../hive/historyqueryset/MysqlQuerySet.java | 23 + .../hive/historyqueryset/OracleQuerySet.java| 61 ++ .../hive/historyqueryset/PostgressQuerySet.java | 22 + .../hive/historyqueryset/QuerySet.java | 130 .../hive/historyqueryset/SqliteQuerySet.java| 22 + .../hive/savedqueryset/MysqlQuerySet.java | 23 + .../hive/savedqueryset/OracleQuerySet.java | 65 ++ .../hive/savedqueryset/PostgressQuerySet.java | 22 + .../hive/savedqueryset/QuerySet.java| 134 .../hive/savedqueryset/SqliteQuerySet.java | 22 + .../pig/jobqueryset/MysqlQuerySet.java | 22 + .../pig/jobqueryset/OracleQuerySet.java | 65 ++ .../pig/jobqueryset/PostgressQuerySet.java | 22 + .../huequeryset/pig/jobqueryset/QuerySet.java | 132 .../pig/jobqueryset/SqliteQuerySet.java | 22 + .../pig/savedscriptqueryset/MysqlQuerySet.java | 22 + .../pig/savedscriptqueryset/OracleQuerySet.java | 60 ++ .../savedscriptqueryset/PostgressQuerySet.java | 67 ++ .../pig/savedscriptqueryset/QuerySet.java | 135 .../pig/savedscriptqueryset/SqliteQuerySet.java | 24 + .../huequeryset/userdetails/MysqlQuerySet.java | 24 + .../huequeryset/userdetails/OracleQuerySet.java | 28 + .../userdetails/PostgressQuerySet.java | 22 + .../huequeryset/userdetails/QuerySet.java | 42 + .../huequeryset/userdetails/SqliteQuerySet.java | 22 + .../migration/CreateJobId.java | 85 ++ .../migration/CreateJobIdRevertChange.java | 84 ++ .../migration/InitiateJobMigration.java | 102 +++ .../InitiateJobMigrationforRevertchange.java| 85 ++ .../configuration/AllInstanceDetailsAmbari.java | 59 ++ .../configuration/AmbariDatabaseCheck.java | 62 ++ .../configuration/AmbariWebHdfsCheck.java | 60 ++ .../configuration/CheckProgresStatus.java | 70 ++ .../ConfigurationCheckImplementation.java | 134 .../HiveInstanceDetailsAmbari.java | 53 ++ .../HiveInstanceDetailsUtility.java | 106 +++ .../configuration/HueDatabaseCheck.java | 60 ++ .../configuration/HueHttpUrlCheck.java | 60 ++ .../configuration/HueWebHdfsCheck.java | 60 ++
[09/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java new file mode 100644 index 000..f3349c6 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset; + +/** + * + * Overriding methods for Oracle specific queries + */ + +public class OracleQuerySetAmbariDB extends QuerySetAmbariDB { + + @Override + protected String getSqlMaxDSidFromTableId(int id) { +return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ""; + } + @Override + protected String getTableIdSqlFromInstanceName() { +return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?"; + } + @Override + protected String getSqlInsertHiveHistory(int id) { +return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet')"; + } + @Override + protected String getRevSql(int id,String maxcount){ +return "delete from ds_jobimpl_" + id + " where ds_id='" + maxcount + "'"; + } + +} http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java new file mode 100644 index 000..5f4356b --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java @@ -0,0 +1,22 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset; + + +public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB { +} http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
[03/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java deleted file mode 100644 index 614c171..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java +++ /dev/null @@ -1,563 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.huetoambarimigration.service.pig; - -import java.nio.charset.Charset; -import java.security.PrivilegedExceptionAction; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.io.BufferedInputStream; -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Date; -import java.util.GregorianCalendar; -import java.util.Scanner; -import java.io.*; -import java.net.URISyntaxException; -import java.net.URL; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.log4j.Logger; -import org.jdom.Attribute; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.input.SAXBuilder; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; -import org.json.JSONArray; -import org.json.JSONObject; - -import org.apache.ambari.view.huetoambarimigration.model.*; -import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader; - -public class PigJobImpl { - - static final Logger logger = Logger.getLogger(PigJobImpl.class); - - private static String readAll(Reader rd) throws IOException { -StringBuilder sb = new StringBuilder(); -int cp; -while ((cp = rd.read()) != -1) { - sb.append((char) cp); -} -return sb.toString(); - } - - public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException { -Date dNow = new Date(); -SimpleDateFormat ft = new SimpleDateFormat("-MM-dd hh:mm:ss"); -String currentDate = ft.format(dNow); -XMLOutputter xmlOutput = new XMLOutputter(); -xmlOutput.setFormat(Format.getPrettyFormat()); -File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml"); -if (xmlfile.exists()) { - String iteration = Integer.toString(i + 1); - SAXBuilder builder = new SAXBuilder(); - Document doc; - try { -doc = (Document) builder.build(xmlfile); -Element rootNode = doc.getRootElement(); -Element record = new Element("RevertRecord"); -record.setAttribute(new Attribute("id", iteration)); -record.addContent(new Element("datetime").setText(currentDate.toString())); -record.addContent(new Element("dirname").setText(dirname)); -record.addContent(new Element("instance").setText(instance)); -record.addContent(new Element("query").setText(content)); -rootNode.addContent(record); -xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml")); - } catch (JDOMException e) { - -logger.error("Jdom Exception: " , e); - } - - -} else { - // create - try { -String iteration = Integer.toString(i + 1); -Element revertrecord =
ambari git commit: AMBARI-17466. Cleanup HDP 2.5 metainfo.xml for services (ncole)
Repository: ambari Updated Branches: refs/heads/branch-2.4 64613ae9b -> fd9574fc7 AMBARI-17466. Cleanup HDP 2.5 metainfo.xml for services (ncole) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fd9574fc Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fd9574fc Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fd9574fc Branch: refs/heads/branch-2.4 Commit: fd9574fc759cc8d192349c66ae26038b5fd65480 Parents: 64613ae Author: Nate ColeAuthored: Wed Jun 29 07:37:20 2016 -0400 Committer: Nate Cole Committed: Wed Jun 29 07:37:20 2016 -0400 -- .../src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml | 2 +- .../src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml | 2 +- .../src/main/resources/stacks/HDP/2.5/services/KAFKA/metainfo.xml | 2 +- .../src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml | 2 +- .../src/main/resources/stacks/HDP/2.5/services/PIG/metainfo.xml| 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/fd9574fc/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml index 449ea9e..88c1915 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml @@ -20,7 +20,7 @@ ATLAS - 0.5.0.2.5 + 0.7.0.2.5 ATLAS_SERVER http://git-wip-us.apache.org/repos/asf/ambari/blob/fd9574fc/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml index 93f4847..6586937 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml @@ -20,7 +20,7 @@ FALCON - 0.6.1.2.5 + 0.10.0.2.5 application-properties http://git-wip-us.apache.org/repos/asf/ambari/blob/fd9574fc/ambari-server/src/main/resources/stacks/HDP/2.5/services/KAFKA/metainfo.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/KAFKA/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KAFKA/metainfo.xml index e722029..12f6c45 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/KAFKA/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KAFKA/metainfo.xml @@ -20,7 +20,7 @@ KAFKA - 0.9.0.2.5 + 0.10.0.2.5 http://git-wip-us.apache.org/repos/asf/ambari/blob/fd9574fc/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml index f94e1d6..8f303a5 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml @@ -20,7 +20,7 @@ KNOX - 0.6.0.2.5 + 0.9.0.2.5 http://git-wip-us.apache.org/repos/asf/ambari/blob/fd9574fc/ambari-server/src/main/resources/stacks/HDP/2.5/services/PIG/metainfo.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/PIG/metainfo.xml index 9d056bb..0dbf74c 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/PIG/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/PIG/metainfo.xml @@ -20,7 +20,7 @@ PIG - 0.15.0.2.5 + 0.16.0.2.5
[10/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6f4a9c28 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6f4a9c28 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6f4a9c28 Branch: refs/heads/trunk Commit: 6f4a9c288ad52f913ae7d2a9eef7fb7bc1bfa568 Parents: 259279c Author: Dipayan BhowmickAuthored: Wed Jun 29 17:04:46 2016 +0530 Committer: Dipayan Bhowmick Committed: Wed Jun 29 17:05:08 2016 +0530 -- contrib/views/hueambarimigration/pom.xml| 144 ++-- .../configurationcheck/ConfigurationCheck.java | 182 - .../configurationcheck/ProgressBarStatus.java | 54 -- .../controller/hive/HiveHistoryMigration.java | 222 -- .../hive/HiveSavedQueryMigration.java | 231 -- .../controller/pig/PigJobMigration.java | 201 - .../controller/pig/PigScriptMigration.java | 208 - .../controller/revertchange/RevertChange.java | 217 -- .../datasource/DataSourceAmbariDatabase.java| 2 - .../datasource/DataSourceHueDatabase.java | 2 - .../historyqueryset/MysqlQuerySetAmbariDB.java | 46 ++ .../historyqueryset/OracleQuerySetAmbariDB.java | 44 ++ .../PostgressQuerySetAmbariDB.java | 22 + .../hive/historyqueryset/QuerySetAmbariDB.java | 79 ++ .../instancedetail/MysqlQuerySetAmbariDB.java | 23 + .../instancedetail/OracleQuerySetAmbariDB.java | 31 + .../PostgressQuerySetAmbariDB.java | 22 + .../hive/instancedetail/QuerySetAmbariDB.java | 48 ++ .../savedqueryset/MysqlQuerySetAmbariDB.java| 65 ++ .../savedqueryset/OracleQuerySetAmbariDB.java | 58 ++ .../PostgressQuerySetAmbariDB.java | 22 + .../hive/savedqueryset/QuerySetAmbariDB.java| 131 .../instancedetail/MysqlQuerySetAmbariDB.java | 23 + .../instancedetail/OracleQuerySetAmbariDB.java | 30 + .../PostgressQuerySetAmbariDB.java | 22 + .../pig/instancedetail/QuerySetAmbariDB.java| 39 + .../pig/jobqueryset/MysqlQuerySetAmbariDB.java | 43 + .../pig/jobqueryset/OracleQuerySetAmbariDB.java | 41 + .../jobqueryset/PostgressQuerySetAmbariDB.java | 22 + .../pig/jobqueryset/QuerySetAmbariDB.java | 80 ++ .../MysqlQuerySetAmbariDB.java | 43 + .../OracleQuerySetAmbariDB.java | 41 + .../PostgressQuerySetAmbariDB.java | 22 + .../savedscriptqueryset/QuerySetAmbariDB.java | 70 ++ .../hive/historyqueryset/MysqlQuerySet.java | 23 + .../hive/historyqueryset/OracleQuerySet.java| 61 ++ .../hive/historyqueryset/PostgressQuerySet.java | 22 + .../hive/historyqueryset/QuerySet.java | 130 .../hive/historyqueryset/SqliteQuerySet.java| 22 + .../hive/savedqueryset/MysqlQuerySet.java | 23 + .../hive/savedqueryset/OracleQuerySet.java | 65 ++ .../hive/savedqueryset/PostgressQuerySet.java | 22 + .../hive/savedqueryset/QuerySet.java| 134 .../hive/savedqueryset/SqliteQuerySet.java | 22 + .../pig/jobqueryset/MysqlQuerySet.java | 22 + .../pig/jobqueryset/OracleQuerySet.java | 65 ++ .../pig/jobqueryset/PostgressQuerySet.java | 22 + .../huequeryset/pig/jobqueryset/QuerySet.java | 132 .../pig/jobqueryset/SqliteQuerySet.java | 22 + .../pig/savedscriptqueryset/MysqlQuerySet.java | 22 + .../pig/savedscriptqueryset/OracleQuerySet.java | 60 ++ .../savedscriptqueryset/PostgressQuerySet.java | 67 ++ .../pig/savedscriptqueryset/QuerySet.java | 135 .../pig/savedscriptqueryset/SqliteQuerySet.java | 24 + .../huequeryset/userdetails/MysqlQuerySet.java | 24 + .../huequeryset/userdetails/OracleQuerySet.java | 28 + .../userdetails/PostgressQuerySet.java | 22 + .../huequeryset/userdetails/QuerySet.java | 42 + .../huequeryset/userdetails/SqliteQuerySet.java | 22 + .../migration/CreateJobId.java | 85 ++ .../migration/CreateJobIdRevertChange.java | 84 ++ .../migration/InitiateJobMigration.java | 102 +++ .../InitiateJobMigrationforRevertchange.java| 85 ++ .../configuration/AllInstanceDetailsAmbari.java | 59 ++ .../configuration/AmbariDatabaseCheck.java | 62 ++ .../configuration/AmbariWebHdfsCheck.java | 60 ++ .../configuration/CheckProgresStatus.java | 70 ++ .../ConfigurationCheckImplementation.java | 134 .../HiveInstanceDetailsAmbari.java | 53 ++ .../HiveInstanceDetailsUtility.java | 106 +++ .../configuration/HueDatabaseCheck.java | 60 ++ .../configuration/HueHttpUrlCheck.java | 60 ++ .../configuration/HueWebHdfsCheck.java | 60 ++
[09/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java new file mode 100644 index 000..f3349c6 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset; + +/** + * + * Overriding methods for Oracle specific queries + */ + +public class OracleQuerySetAmbariDB extends QuerySetAmbariDB { + + @Override + protected String getSqlMaxDSidFromTableId(int id) { +return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ""; + } + @Override + protected String getTableIdSqlFromInstanceName() { +return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?"; + } + @Override + protected String getSqlInsertHiveHistory(int id) { +return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet')"; + } + @Override + protected String getRevSql(int id,String maxcount){ +return "delete from ds_jobimpl_" + id + " where ds_id='" + maxcount + "'"; + } + +} http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java new file mode 100644 index 000..5f4356b --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java @@ -0,0 +1,22 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset; + + +public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB { +} http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
[07/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java new file mode 100644 index 000..bcbe4de --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java @@ -0,0 +1,106 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.huetoambarimigration.migration.configuration; + +import org.apache.ambari.view.ViewContext; +import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.InstanceModel; + +import java.beans.PropertyVetoException; +import java.io.IOException; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail.*; + + +public class HiveInstanceDetailsUtility { + + public List getInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException { + +List instancelist = new ArrayList<>(); +Connection conn = null; +conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection(); +conn.setAutoCommit(false); +PreparedStatement prSt; + +QuerySetAmbariDB ambaridatabase = null; + +if (view.getProperties().get("ambaridrivername").contains("mysql")) { + ambaridatabase = new MysqlQuerySetAmbariDB(); +} else if (view.getProperties().get("ambaridrivername").contains("postgresql")) { + ambaridatabase = new PostgressQuerySetAmbariDB(); +} else if (view.getProperties().get("ambaridrivername").contains("oracle")) { + ambaridatabase = new OracleQuerySetAmbariDB(); +} + +ResultSet rs1 = null; +prSt = ambaridatabase.getHiveInstanceDeatil(conn); +rs1 = prSt.executeQuery(); +int i = 0; + +while (rs1.next()) { + InstanceModel I = new InstanceModel(); + I.setInstanceName(rs1.getString(1)); + I.setId(i); + instancelist.add(I); + i++; +} +return instancelist; + + } + + public List getAllInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException { + +List instancelist = new ArrayList<>(); +Connection conn = null; +Statement stmt = null; +conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection(); +conn.setAutoCommit(false); +PreparedStatement prSt; + +QuerySetAmbariDB ambaridatabase = null; + +if (view.getProperties().get("ambaridrivername").contains("mysql")) { + ambaridatabase = new MysqlQuerySetAmbariDB(); +} else if (view.getProperties().get("ambaridrivername").contains("postgresql")) { + ambaridatabase = new PostgressQuerySetAmbariDB(); +} else if (view.getProperties().get("ambaridrivername").contains("oracle")) { + ambaridatabase = new OracleQuerySetAmbariDB(); +} + +ResultSet rs1 = null; +int i = 0; +prSt = ambaridatabase.getAllInstanceDeatil(conn); +rs1 = prSt.executeQuery(); + +while (rs1.next()) { + InstanceModel I = new InstanceModel(); + I.setInstanceName(rs1.getString(1)); + I.setId(i); + instancelist.add(I); + i++; +} +rs1.close(); +return instancelist; + + } + + +}
[08/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java new file mode 100644 index 000..c81d51a --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java @@ -0,0 +1,134 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; + + +public abstract class QuerySet { + + + public PreparedStatement getUseridfromUserName(Connection connection, String username) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql()); +prSt.setString(1, username); +return prSt; + } + + public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection, int id) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql()); +prSt.setInt(1, id); +return prSt; + } + + public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection, int id, String enddate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql()); +prSt.setInt(1, id); +prSt.setString(2, enddate); +return prSt; + } + + public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection, int id, String startdate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql()); +prSt.setInt(1, id); +prSt.setString(2, startdate); +return prSt; + } + + public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection, int id, String startdate, String endate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql()); +prSt.setInt(1, id); +prSt.setString(2, startdate); +prSt.setString(3, endate); +return prSt; + } + + /** + * for all user + */ + public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql()); +return prSt; + } + + public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection, String enddate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql()); +prSt.setString(1, enddate); +return prSt; + } + + public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection, String startdate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql()); +prSt.setString(1, startdate); +return prSt; + } + + public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection, String startdate, String endate) throws SQLException { +PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql()); +prSt.setString(1, startdate); +prSt.setString(2, endate); +return prSt; + } + + + protected String fetchuserIdfromUsernameSql() { +return "select id from auth_user where username=?;"; + + } + + protected String fetchHueQueriesNoStartdateNoEnddateSql() { +return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =?;"; + } +
[06/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java new file mode 100644 index 000..9f9e053 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java @@ -0,0 +1,281 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery; + +import org.apache.ambari.view.ViewContext; +import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; +import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel; +import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound; +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.MysqlQuerySetAmbariDB; +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.OracleQuerySetAmbariDB; +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.PostgressQuerySetAmbariDB; +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB; +import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.*; +import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse; +import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation; +import org.apache.log4j.Logger; + +import java.beans.PropertyVetoException; +import java.io.IOException; +import java.net.URISyntaxException; +import java.sql.Connection; +import java.sql.SQLException; +import java.text.ParseException; +import java.util.ArrayList; + +public class HiveSavedQueryMigrationUtility { + + + + protected MigrationResourceManager resourceManager = null; + + public synchronized PersonalCRUDResourceManager getResourceManager(ViewContext view) { +if (resourceManager == null) { + resourceManager = new MigrationResourceManager(view); +} +return resourceManager; + } + + public MigrationModel hiveSavedQueryMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound { + +long startTime = System.currentTimeMillis(); + +final Logger logger = Logger.getLogger(HiveSavedQueryMigrationUtility.class); + +Connection connectionAmbaridb = null; +Connection connectionHuedb = null; + +int i = 0; + +logger.info("-"); +logger.info("hive saved query Migration started"); +logger.info("-"); +logger.info("start date: " + startDate); +logger.info("enddate date: " + endDate); +logger.info("instance is: " + instance); +logger.info("hue username is : " + username); + +HiveSavedQueryMigrationImplementation hivesavedqueryimpl = new HiveSavedQueryMigrationImplementation();/* creating Implementation object */ + +QuerySet huedatabase=null; + +if(view.getProperties().get("huedrivername").contains("mysql")) +{ + huedatabase=new
[02/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc new file mode 100644 index 000..08096ef --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc @@ -0,0 +1,32 @@ +{ + "predef": [ +"document", +"window", +"-Promise" + ], + "browser": true, + "boss": true, + "curly": true, + "debug": false, + "devel": true, + "eqeqeq": true, + "evil": true, + "forin": false, + "immed": false, + "laxbreak": false, + "newcap": true, + "noarg": true, + "noempty": false, + "nonew": false, + "nomen": false, + "onevar": false, + "plusplus": false, + "regexp": false, + "undef": true, + "sub": true, + "strict": false, + "white": false, + "eqnull": true, + "esnext": true, + "unused": true +} http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml new file mode 100644 index 000..64533be --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml @@ -0,0 +1,22 @@ +--- +language: node_js +node_js: + - "4" + +sudo: false + +cache: + directories: +- node_modules + +before_install: + - npm config set spin false + - npm install -g bower + - npm install phantomjs-prebuilt + +install: + - npm install + - bower install + +script: + - npm test http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig new file mode 100644 index 000..e7834e3 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig @@ -0,0 +1,3 @@ +{ + "ignore_dirs": ["tmp", "dist"] +} http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md new file mode 100644 index 000..1d1a14f --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md @@ -0,0 +1,67 @@ + +# Hueambarimigration-view + +This README outlines the details of collaborating on this Ember application. +A short introduction of this app could easily go here. + +## Prerequisites + +You will need the following things properly installed on your computer. + +* [Git](http://git-scm.com/) +* [Node.js](http://nodejs.org/) (with NPM) +* [Bower](http://bower.io/) +* [Ember CLI](http://ember-cli.com/) +* [PhantomJS](http://phantomjs.org/) + +## Installation + +* `git clone ` this repository +* change into the new directory +* `npm install` +* `bower install` + +## Running / Development + +* `ember server` +* Visit your app at [http://localhost:4200](http://localhost:4200). + +### Code Generators + +Make use of the many generators for code, try `ember help generate` for more details + +### Running Tests + +* `ember test` +* `ember test --server` + +### Building + +* `ember build` (development) +* `ember build --environment production` (production) + +### Deploying + +Specify what it takes to deploy your app. + +## Further Reading / Useful Links + +* [ember.js](http://emberjs.com/) +* [ember-cli](http://ember-cli.com/) +* Development Browser Extensions + * [ember inspector for chrome](https://chrome.google.com/webstore/detail/ember-inspector/bmdblncegkenkacieihfhpjfppoconhi) + * [ember inspector for firefox](https://addons.mozilla.org/en-US/firefox/addon/ember-inspector/) + http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/adapters/application.js -- diff --git
[03/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java deleted file mode 100644 index 614c171..000 --- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java +++ /dev/null @@ -1,563 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.huetoambarimigration.service.pig; - -import java.nio.charset.Charset; -import java.security.PrivilegedExceptionAction; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.io.BufferedInputStream; -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Date; -import java.util.GregorianCalendar; -import java.util.Scanner; -import java.io.*; -import java.net.URISyntaxException; -import java.net.URL; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.log4j.Logger; -import org.jdom.Attribute; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.input.SAXBuilder; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; -import org.json.JSONArray; -import org.json.JSONObject; - -import org.apache.ambari.view.huetoambarimigration.model.*; -import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader; - -public class PigJobImpl { - - static final Logger logger = Logger.getLogger(PigJobImpl.class); - - private static String readAll(Reader rd) throws IOException { -StringBuilder sb = new StringBuilder(); -int cp; -while ((cp = rd.read()) != -1) { - sb.append((char) cp); -} -return sb.toString(); - } - - public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException { -Date dNow = new Date(); -SimpleDateFormat ft = new SimpleDateFormat("-MM-dd hh:mm:ss"); -String currentDate = ft.format(dNow); -XMLOutputter xmlOutput = new XMLOutputter(); -xmlOutput.setFormat(Format.getPrettyFormat()); -File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml"); -if (xmlfile.exists()) { - String iteration = Integer.toString(i + 1); - SAXBuilder builder = new SAXBuilder(); - Document doc; - try { -doc = (Document) builder.build(xmlfile); -Element rootNode = doc.getRootElement(); -Element record = new Element("RevertRecord"); -record.setAttribute(new Attribute("id", iteration)); -record.addContent(new Element("datetime").setText(currentDate.toString())); -record.addContent(new Element("dirname").setText(dirname)); -record.addContent(new Element("instance").setText(instance)); -record.addContent(new Element("query").setText(content)); -rootNode.addContent(record); -xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml")); - } catch (JDOMException e) { - -logger.error("Jdom Exception: " , e); - } - - -} else { - // create - try { -String iteration = Integer.toString(i + 1); -Element revertrecord =
[04/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java new file mode 100644 index 000..d7f2868 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.huetoambarimigration.resources.scripts.models; + + +public class InstanceModel { + + String instanceName; + int id; + + public String getInstanceName() { + return instanceName; + } + + public void setInstanceName(String instanceName) { + this.instanceName = instanceName; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java new file mode 100644 index 000..1a247bb --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.huetoambarimigration.resources.scripts.models; + +/** + * Created by temp on 5/19/16. + */ +public class JobReturnIdModel { + + int id; + String idforJob; + + public String getIdforJob() { +return idforJob; + } + + public void setIdforJob(String idforJob) { +this.idforJob = idforJob; + } + + public int getId() { +return id; + } + + public void setId(int id) { +this.id = id; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java new file mode 100644 index 000..f765e15 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java @@ -0,0 +1,130 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license
[01/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
Repository: ambari Updated Branches: refs/heads/trunk 259279cd6 -> 6f4a9c288 http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs new file mode 100644 index 000..60fccf3 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs @@ -0,0 +1,42 @@ +{{! + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +}} + + + + Hive + + {{#link-to 'homePage.hive-history'}}HiveHistory Query{{/link-to}} + {{#link-to 'homePage.hive-saved-query'}}HiveSaved Query{{/link-to}} + + + Pig + + {{#link-to 'homePage.pig-script'}}PigSaved script{{/link-to}} + {{#link-to 'homePage.pig-job'}}PigJob{{/link-to}} + + + {{#link-to 'homePage.revert-change'}}RevertChange{{/link-to}} + + + + + + {{outlet}} + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs -- diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs new file mode 100644 index 000..b43e49f --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs @@ -0,0 +1,125 @@ +{{! +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at +http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +}} + + +History Query Migration + + + + + +User Name + * + + + +{{ember-selectize content=model.usersdetail optionValuePath="content.username" optionLabelPath="content.username" value=usernamehue placeholder="Select an userName" }} + + + + + +Instance Name + * + + + +{{ember-selectize content=model.hiveinstancedetail optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename placeholder="Select an Instance name" }} + + + + + +Start Date + + +{{date-picker size="35" date=startdate valueFormat='-MM-DD' name="startdate" id="startdate" value=startdate}} + + + + + +End Date + + +{{date-picker size="35" date=enddate valueFormat='-MM-DD' name="enddate" id="enddate" value=enddate}} + + + + + +Submit + + +{{#if jobstatus}} + +Job has been Submitted. + + +{{/if}} + + + + + +{{#if jobstatus}} + + + + + +{{/if}} + + + + +{{#if completionStatus}} + Migration Report + + + + +
[05/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java -- diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java new file mode 100644 index 000..7e1bbf4 --- /dev/null +++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java @@ -0,0 +1,225 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.huetoambarimigration.migration.revertchange; + +import java.beans.PropertyVetoException; +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.net.URISyntaxException; +import java.security.PrivilegedExceptionAction; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; + +import org.apache.ambari.view.ViewContext; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel; +import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound; +import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager; +import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse; +import org.apache.log4j.Logger; +import org.jdom.Document; +import org.jdom.Element; +import org.jdom.JDOMException; +import org.jdom.input.SAXBuilder; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.UserGroupInformation; + +import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase; +import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation; + + +public class RevertChangeUtility { + + + + protected MigrationResourceManager resourceManager = null; + + public synchronized PersonalCRUDResourceManager getResourceManager(ViewContext view) { +if (resourceManager == null) { + resourceManager = new MigrationResourceManager(view); +} +return resourceManager; + } + + public boolean stringtoDatecompare(String datefromservlet, + String datefromfile) throws ParseException { + +SimpleDateFormat formatter = new SimpleDateFormat("-MM-dd"); +Date date1 = formatter.parse(datefromservlet); +Date date2 = formatter.parse(datefromfile); +if (date1.compareTo(date2) < 0) { + return true; +} else { + return false; +} + + } + + public void removedir(final String dir, final String namenodeuri) +throws IOException, URISyntaxException { + +try { + UserGroupInformation ugi = UserGroupInformation +.createRemoteUser("hdfs"); + + ugi.doAs(new PrivilegedExceptionAction() { + +public Void run() throws Exception { + + Configuration conf = new Configuration(); + conf.set("fs.hdfs.impl", +org.apache.hadoop.hdfs.DistributedFileSystem.class + .getName()); + conf.set("fs.file.impl", +org.apache.hadoop.fs.LocalFileSystem.class + .getName()); + conf.set("fs.defaultFS", namenodeuri); + conf.set("hadoop.job.ugi", "hdfs"); + + FileSystem fs = FileSystem.get(conf); + Path src = new Path(dir); + fs.delete(src, true); + return null; +} + }); +} catch (Exception e) { + e.printStackTrace(); +} + } + + public MigrationModel revertChangeUtility(String instance, String revertDate,String jobid,ViewContext
ambari git commit: AMBARI-17170. Logfeeder should read every service/ambari logs (oleewere)
Repository: ambari Updated Branches: refs/heads/branch-2.4 6097a0e13 -> 64613ae9b AMBARI-17170. Logfeeder should read every service/ambari logs (oleewere) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/64613ae9 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/64613ae9 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/64613ae9 Branch: refs/heads/branch-2.4 Commit: 64613ae9b7bb76e5d248a0d353f5fcde0941f301 Parents: 6097a0e Author: oleewereAuthored: Tue Jun 28 20:04:11 2016 +0200 Committer: oleewere Committed: Wed Jun 29 12:18:28 2016 +0200 -- .../0.5.0/configuration/logfeeder-env.xml | 12 - .../LOGSEARCH/0.5.0/kerberos.json | 2 +- .../0.5.0/package/scripts/logfeeder.py | 15 ++ .../LOGSEARCH/0.5.0/package/scripts/params.py | 1 - .../0.5.0/package/scripts/setup_logfeeder.py| 25 ++ .../stacks/2.4/LOGSEARCH/test_logfeeder.py | 49 .../test/python/stacks/2.4/configs/default.json | 2 - ambari-web/app/data/HDP2/site_properties.js | 31 + 8 files changed, 31 insertions(+), 106 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/64613ae9/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml -- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml index 9a6f32c..ee885e3 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml @@ -42,18 +42,6 @@ -logfeeder_user -logfeeder -USER -logfeeder user -Logsfeeder User - - user - false - - - - logfeeder_max_mem 512m Max Memory for Logfeeder http://git-wip-us.apache.org/repos/asf/ambari/blob/64613ae9/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json -- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json index 7920f4d..ab4793b 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json @@ -80,7 +80,7 @@ "keytab": { "file": "${keytab_dir}/logfeeder.service.keytab", "owner": { - "name": "${logfeeder-env/logfeeder_user}", + "name": "root", "access": "r" }, "group": { http://git-wip-us.apache.org/repos/asf/ambari/blob/64613ae9/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py -- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py index e92db10..9567db2 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py @@ -16,14 +16,13 @@ See the License for the specific language governing permissions and limitations under the License. """ -import os -import grp + +import getpass from resource_management.core.resources.system import Execute, File from resource_management.libraries.functions.format import format from resource_management.libraries.functions.check_process_status import check_process_status from resource_management.libraries.script.script import Script from setup_logfeeder import setup_logfeeder -from resource_management.core.resources.accounts import User from logsearch_common import kill_process @@ -37,9 +36,6 @@ class LogFeeder(Script): def configure(self, env, upgrade_type=None): import params env.set_params(params) -User(params.logfeeder_user, - groups=[params.user_group, grp.getgrgid(os.getegid()).gr_name], - fetch_nonlocal_groups = params.fetch_nonlocal_groups) setup_logfeeder() @@ -47,16 +43,15 @@ class LogFeeder(Script): import params env.set_params(params) self.configure(env) -Execute(format("{logfeeder_dir}/run.sh"), -
ambari git commit: AMBARI-17170. Logfeeder should read every service/ambari logs (oleewere)
Repository: ambari Updated Branches: refs/heads/trunk 124c93960 -> 259279cd6 AMBARI-17170. Logfeeder should read every service/ambari logs (oleewere) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/259279cd Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/259279cd Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/259279cd Branch: refs/heads/trunk Commit: 259279cd67ba2626d68d3565982729769b9f5ea9 Parents: 124c939 Author: oleewereAuthored: Tue Jun 28 20:04:11 2016 +0200 Committer: oleewere Committed: Wed Jun 29 12:04:57 2016 +0200 -- .../0.5.0/configuration/logfeeder-env.xml | 12 - .../LOGSEARCH/0.5.0/kerberos.json | 2 +- .../0.5.0/package/scripts/logfeeder.py | 15 ++ .../LOGSEARCH/0.5.0/package/scripts/params.py | 1 - .../0.5.0/package/scripts/setup_logfeeder.py| 25 ++ .../stacks/2.4/LOGSEARCH/test_logfeeder.py | 49 .../test/python/stacks/2.4/configs/default.json | 2 - ambari-web/app/data/HDP2/site_properties.js | 31 + 8 files changed, 31 insertions(+), 106 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/259279cd/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml -- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml index 9a6f32c..ee885e3 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml @@ -42,18 +42,6 @@ -logfeeder_user -logfeeder -USER -logfeeder user -Logsfeeder User - - user - false - - - - logfeeder_max_mem 512m Max Memory for Logfeeder http://git-wip-us.apache.org/repos/asf/ambari/blob/259279cd/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json -- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json index 7920f4d..ab4793b 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json @@ -80,7 +80,7 @@ "keytab": { "file": "${keytab_dir}/logfeeder.service.keytab", "owner": { - "name": "${logfeeder-env/logfeeder_user}", + "name": "root", "access": "r" }, "group": { http://git-wip-us.apache.org/repos/asf/ambari/blob/259279cd/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py -- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py index e92db10..9567db2 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py @@ -16,14 +16,13 @@ See the License for the specific language governing permissions and limitations under the License. """ -import os -import grp + +import getpass from resource_management.core.resources.system import Execute, File from resource_management.libraries.functions.format import format from resource_management.libraries.functions.check_process_status import check_process_status from resource_management.libraries.script.script import Script from setup_logfeeder import setup_logfeeder -from resource_management.core.resources.accounts import User from logsearch_common import kill_process @@ -37,9 +36,6 @@ class LogFeeder(Script): def configure(self, env, upgrade_type=None): import params env.set_params(params) -User(params.logfeeder_user, - groups=[params.user_group, grp.getgrgid(os.getegid()).gr_name], - fetch_nonlocal_groups = params.fetch_nonlocal_groups) setup_logfeeder() @@ -47,16 +43,15 @@ class LogFeeder(Script): import params env.set_params(params) self.configure(env) -Execute(format("{logfeeder_dir}/run.sh"), -
ambari git commit: AMBARI-17461. Assign Hive Server Interactive config 'hive.llap.daemon.yarn.container.mb' value of 'yarn.scheduler.minimum-allocation-mb' only if call is made via Blueprint.
Repository: ambari Updated Branches: refs/heads/branch-2.4 b3cbdb3a1 -> 6097a0e13 AMBARI-17461. Assign Hive Server Interactive config 'hive.llap.daemon.yarn.container.mb' value of 'yarn.scheduler.minimum-allocation-mb' only if call is made via Blueprint. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6097a0e1 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6097a0e1 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6097a0e1 Branch: refs/heads/branch-2.4 Commit: 6097a0e13514c9d5309d947a596d9aac02447195 Parents: b3cbdb3 Author: Swapan ShridharAuthored: Wed Jun 29 01:27:19 2016 -0700 Committer: Swapan Shridhar Committed: Wed Jun 29 01:27:19 2016 -0700 -- .../HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py | 2 +- .../main/resources/stacks/HDP/2.5/services/stack_advisor.py | 9 + 2 files changed, 6 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/6097a0e1/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py index c3aab6f..6232dff 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py @@ -160,7 +160,7 @@ def execute(configurations={}, parameters={}, host_name=None): if STACK_ROOT in configurations: llap_status_cmd = configurations[STACK_ROOT] + format("/current/hive-server2-hive2/bin/hive --service llapstatus --name {llap_app_name} -findAppTimeout {LLAP_APP_STATUS_CMD_TIMEOUT}") else: - llap_status_cmd = STACK_ROOT_DEFAULT + format("/current/hive-server2-hive2/bin/hive --service llapstatus --name {llap_app_name}") + llap_status_cmd = STACK_ROOT_DEFAULT + format("/current/hive-server2-hive2/bin/hive --service llapstatus --name {llap_app_name} -findAppTimeout {LLAP_APP_STATUS_CMD_TIMEOUT}") code, output, error = shell.checked_call(llap_status_cmd, user=hive_user, stderr=subprocess.PIPE, timeout=check_command_timeout, http://git-wip-us.apache.org/repos/asf/ambari/blob/6097a0e1/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py index af2d2c1..3dcb426 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py @@ -454,10 +454,11 @@ class HDP25StackAdvisor(HDP24StackAdvisor): # Update 'hive.llap.daemon.queue.name' prop combo entries and llap capacity slider visibility. self.setLlapDaemonQueuePropAttributesAndCapSliderVisibility(services, configurations) -read_llap_daemon_yarn_cont_mb = long(self.get_yarn_min_container_size(services, configurations)) -putHiveInteractiveSiteProperty('hive.llap.daemon.yarn.container.mb', read_llap_daemon_yarn_cont_mb) -# initial memory setting to make sure hive.llap.daemon.yarn.container.mb >= yarn.scheduler.minimum-allocation-mb -Logger.debug("Adjusted 'hive.llap.daemon.yarn.container.mb' to yarn min container size as initial size " +if not services["changed-configurations"]: + read_llap_daemon_yarn_cont_mb = long(self.get_yarn_min_container_size(services, configurations)) + putHiveInteractiveSiteProperty('hive.llap.daemon.yarn.container.mb', read_llap_daemon_yarn_cont_mb) + # initial memory setting to make sure hive.llap.daemon.yarn.container.mb >= yarn.scheduler.minimum-allocation-mb + Logger.info("Adjusted 'hive.llap.daemon.yarn.container.mb' to yarn min container size as initial size " "(" + str(self.get_yarn_min_container_size(services, configurations)) + " MB).") try:
ambari git commit: AMBARI-17461. Assign Hive Server Interactive config 'hive.llap.daemon.yarn.container.mb' value of 'yarn.scheduler.minimum-allocation-mb' only if call is made via Blueprint.
Repository: ambari Updated Branches: refs/heads/trunk bf5ca2f5e -> 124c93960 AMBARI-17461. Assign Hive Server Interactive config 'hive.llap.daemon.yarn.container.mb' value of 'yarn.scheduler.minimum-allocation-mb' only if call is made via Blueprint. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/124c9396 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/124c9396 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/124c9396 Branch: refs/heads/trunk Commit: 124c9396078ebb52d5ab5a0196e021059f0ae849 Parents: bf5ca2f Author: Swapan ShridharAuthored: Tue Jun 28 00:26:33 2016 -0700 Committer: Swapan Shridhar Committed: Wed Jun 29 01:23:57 2016 -0700 -- .../HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py | 2 +- .../main/resources/stacks/HDP/2.5/services/stack_advisor.py | 9 + 2 files changed, 6 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/124c9396/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py index c3aab6f..6232dff 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_llap_app_status.py @@ -160,7 +160,7 @@ def execute(configurations={}, parameters={}, host_name=None): if STACK_ROOT in configurations: llap_status_cmd = configurations[STACK_ROOT] + format("/current/hive-server2-hive2/bin/hive --service llapstatus --name {llap_app_name} -findAppTimeout {LLAP_APP_STATUS_CMD_TIMEOUT}") else: - llap_status_cmd = STACK_ROOT_DEFAULT + format("/current/hive-server2-hive2/bin/hive --service llapstatus --name {llap_app_name}") + llap_status_cmd = STACK_ROOT_DEFAULT + format("/current/hive-server2-hive2/bin/hive --service llapstatus --name {llap_app_name} -findAppTimeout {LLAP_APP_STATUS_CMD_TIMEOUT}") code, output, error = shell.checked_call(llap_status_cmd, user=hive_user, stderr=subprocess.PIPE, timeout=check_command_timeout, http://git-wip-us.apache.org/repos/asf/ambari/blob/124c9396/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py index af2d2c1..3dcb426 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py @@ -454,10 +454,11 @@ class HDP25StackAdvisor(HDP24StackAdvisor): # Update 'hive.llap.daemon.queue.name' prop combo entries and llap capacity slider visibility. self.setLlapDaemonQueuePropAttributesAndCapSliderVisibility(services, configurations) -read_llap_daemon_yarn_cont_mb = long(self.get_yarn_min_container_size(services, configurations)) -putHiveInteractiveSiteProperty('hive.llap.daemon.yarn.container.mb', read_llap_daemon_yarn_cont_mb) -# initial memory setting to make sure hive.llap.daemon.yarn.container.mb >= yarn.scheduler.minimum-allocation-mb -Logger.debug("Adjusted 'hive.llap.daemon.yarn.container.mb' to yarn min container size as initial size " +if not services["changed-configurations"]: + read_llap_daemon_yarn_cont_mb = long(self.get_yarn_min_container_size(services, configurations)) + putHiveInteractiveSiteProperty('hive.llap.daemon.yarn.container.mb', read_llap_daemon_yarn_cont_mb) + # initial memory setting to make sure hive.llap.daemon.yarn.container.mb >= yarn.scheduler.minimum-allocation-mb + Logger.info("Adjusted 'hive.llap.daemon.yarn.container.mb' to yarn min container size as initial size " "(" + str(self.get_yarn_min_container_size(services, configurations)) + " MB).") try: