[ambari] branch branch-2.7 updated: AMBARI-25447. Multiple filter conditions not working in Ambari workflow manager view (sree) (#3160)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/branch-2.7 by this push: new 8440289 AMBARI-25447. Multiple filter conditions not working in Ambari workflow manager view (sree) (#3160) 8440289 is described below commit 8440289188d25faa6fc1e4bdc7eb1e6e19ca53c9 Author: Sreenath Somarajapuram AuthorDate: Fri Dec 13 13:21:01 2019 +0530 AMBARI-25447. Multiple filter conditions not working in Ambari workflow manager view (sree) (#3160) --- .../src/main/java/org/apache/oozie/ambari/view/Utils.java | 7 +++ 1 file changed, 7 insertions(+) diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/Utils.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/Utils.java index 1ff68c9..c0459c6 100644 --- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/Utils.java +++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/Utils.java @@ -29,6 +29,8 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.Map.Entry; +import java.net.URLEncoder; +import java.io.UnsupportedEncodingException; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.HttpHeaders; @@ -148,6 +150,11 @@ public class Utils { } boolean firstVal = true; for (String val : entry.getValue()) { + try { + val = URLEncoder.encode(val, "UTF-8"); + } catch(UnsupportedEncodingException e) { + LOGGER.error(e.getMessage(), e); + } urlBuilder.append(firstVal ? "" : "&").append(entry.getKey()) .append("=").append(val); firstVal = false;
[ambari] branch branch-2.7 updated: [AMBARI-25426] Error while Validating Coordinator xml in Workflow Manager View (asnaik) (#3153)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/branch-2.7 by this push: new cb4a5b3 [AMBARI-25426] Error while Validating Coordinator xml in Workflow Manager View (asnaik) (#3153) cb4a5b3 is described below commit cb4a5b3eae14d35692e5136a3af642b57e5a658d Author: Asnaik HWX AuthorDate: Thu Dec 12 12:30:14 2019 +0530 [AMBARI-25426] Error while Validating Coordinator xml in Workflow Manager View (asnaik) (#3153) --- .../wfmanager/src/main/resources/ui/app/components/coord-config.js| 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js index 1aeca5b..df3e834 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js @@ -505,7 +505,9 @@ export default Ember.Component.extend(Validations, Ember.Evented, { deferred.promise.then(function(data){ var x2js = new X2JS(); var workflowJson = x2js.xml_str2json(data); -this.set('workflowName', workflowJson["workflow-app"]._name); +if(workflowJson["workflow-app"] && workflowJson["workflow-app"]._name){ + this.set('workflowName', workflowJson["workflow-app"]._name); +} var workflowProps = this.get('propertyExtractor').getDynamicProperties(data); var dynamicProperties = this.get('coordinatorConfigs.props'); workflowProps.forEach((prop)=>{
[ambari] branch branch-2.7 updated: AMBARI-25424 Failed to edit workflow from Ambari workflow Manager while accessing Ambari UI over Knox (#3152)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/branch-2.7 by this push: new 93769b6 AMBARI-25424 Failed to edit workflow from Ambari workflow Manager while accessing Ambari UI over Knox (#3152) 93769b6 is described below commit 93769b68e00f45b1512ccdddb031637a5fbccd3a Author: Venkata Sairam Lanka AuthorDate: Mon Dec 9 11:59:44 2019 +0530 AMBARI-25424 Failed to edit workflow from Ambari workflow Manager while accessing Ambari UI over Knox (#3152) --- .../src/main/resources/ui/app/components/flow-designer.js | 13 + 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js index 2c77b5b..c5d2a45 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js @@ -350,7 +350,10 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { var workflowXmlDefered=this.getWorkflowFromHdfs(filePath); workflowXmlDefered.promise.then(function(response){ if(response.type === 'xml'){ -this.importWorkflowFromString(response.data); +var x2js = new X2JS(); +let resData = x2js.json2xml_str(x2js.xml2json(response.data)); + +this.importWorkflowFromString(resData); }else { this.importWorkflowFromJSON(response.data); } @@ -423,10 +426,10 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { Ember.$.ajax({ url: url, method: 'GET', - dataType: "text", beforeSend: function (xhr) { xhr.setRequestHeader("X-XSRF-HEADER", Math.round(Math.random()*10)); xhr.setRequestHeader("X-Requested-By", "Ambari"); +xhr.setRequestHeader("accept", "text/xml"); } }).done(function(data, status, xhr){ var type = xhr.getResponseHeader("response-type") === "xml" ? 'xml' : 'json'; @@ -604,9 +607,11 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { getWorkflowAsJsonJsoGImpl(){ try{ var json=JSOG.stringify(this.get("workflow")), self = this; -var actionVersions = JSOG.stringify(CommonUtils.toArray(this.get("workflow").schemaVersions.actionVersions)); +var actionVersions = this.get("workflow").schemaVersions ? JSOG.stringify(CommonUtils.toArray(this.get("workflow").schemaVersions.actionVersions)) : []; var workflow = JSOG.parse(json); -workflow.schemaVersions.actionVersions = actionVersions +if(workflow.schemaVersions) { + workflow.schemaVersions.actionVersions = actionVersions +} return JSOG.stringify(workflow); }catch(err){ console.error(err);
[ambari] 01/01: AMBARI-25424 Failed to edit workflow from Ambari workflow Manager while accessing Ambari UI over Knox
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-25424-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git commit e7ee83767daa1a8145b8d375ad2fe1a7e4d1a871 Author: vsairam AuthorDate: Fri Dec 6 19:14:11 2019 +0530 AMBARI-25424 Failed to edit workflow from Ambari workflow Manager while accessing Ambari UI over Knox --- .../src/main/resources/ui/app/components/flow-designer.js | 13 + 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js index 2c77b5b..c5d2a45 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js @@ -350,7 +350,10 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { var workflowXmlDefered=this.getWorkflowFromHdfs(filePath); workflowXmlDefered.promise.then(function(response){ if(response.type === 'xml'){ -this.importWorkflowFromString(response.data); +var x2js = new X2JS(); +let resData = x2js.json2xml_str(x2js.xml2json(response.data)); + +this.importWorkflowFromString(resData); }else { this.importWorkflowFromJSON(response.data); } @@ -423,10 +426,10 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { Ember.$.ajax({ url: url, method: 'GET', - dataType: "text", beforeSend: function (xhr) { xhr.setRequestHeader("X-XSRF-HEADER", Math.round(Math.random()*10)); xhr.setRequestHeader("X-Requested-By", "Ambari"); +xhr.setRequestHeader("accept", "text/xml"); } }).done(function(data, status, xhr){ var type = xhr.getResponseHeader("response-type") === "xml" ? 'xml' : 'json'; @@ -604,9 +607,11 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { getWorkflowAsJsonJsoGImpl(){ try{ var json=JSOG.stringify(this.get("workflow")), self = this; -var actionVersions = JSOG.stringify(CommonUtils.toArray(this.get("workflow").schemaVersions.actionVersions)); +var actionVersions = this.get("workflow").schemaVersions ? JSOG.stringify(CommonUtils.toArray(this.get("workflow").schemaVersions.actionVersions)) : []; var workflow = JSOG.parse(json); -workflow.schemaVersions.actionVersions = actionVersions +if(workflow.schemaVersions) { + workflow.schemaVersions.actionVersions = actionVersions +} return JSOG.stringify(workflow); }catch(err){ console.error(err);
[ambari] branch AMBARI-25424-branch-2.7 created (now 4dfecf1)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-25424-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git. at 4dfecf1 AMBARI-25424 Failed to edit workflow from Ambari workflow Manager while accessing Ambari UI over Knox This branch includes the following new commits: new 4dfecf1 AMBARI-25424 Failed to edit workflow from Ambari workflow Manager while accessing Ambari UI over Knox The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference.
[ambari] 01/01: AMBARI-25424 Failed to edit workflow from Ambari workflow Manager while accessing Ambari UI over Knox
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-25424-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git commit 4dfecf12158e4f7927bc6d7cea04115baf03c752 Author: vsairam AuthorDate: Fri Dec 6 18:50:52 2019 +0530 AMBARI-25424 Failed to edit workflow from Ambari workflow Manager while accessing Ambari UI over Knox --- .../src/main/resources/ui/app/components/flow-designer.js | 13 + 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js index 2c77b5b..c5d2a45 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js @@ -350,7 +350,10 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { var workflowXmlDefered=this.getWorkflowFromHdfs(filePath); workflowXmlDefered.promise.then(function(response){ if(response.type === 'xml'){ -this.importWorkflowFromString(response.data); +var x2js = new X2JS(); +let resData = x2js.json2xml_str(x2js.xml2json(response.data)); + +this.importWorkflowFromString(resData); }else { this.importWorkflowFromJSON(response.data); } @@ -423,10 +426,10 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { Ember.$.ajax({ url: url, method: 'GET', - dataType: "text", beforeSend: function (xhr) { xhr.setRequestHeader("X-XSRF-HEADER", Math.round(Math.random()*10)); xhr.setRequestHeader("X-Requested-By", "Ambari"); +xhr.setRequestHeader("accept", "text/xml"); } }).done(function(data, status, xhr){ var type = xhr.getResponseHeader("response-type") === "xml" ? 'xml' : 'json'; @@ -604,9 +607,11 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { getWorkflowAsJsonJsoGImpl(){ try{ var json=JSOG.stringify(this.get("workflow")), self = this; -var actionVersions = JSOG.stringify(CommonUtils.toArray(this.get("workflow").schemaVersions.actionVersions)); +var actionVersions = this.get("workflow").schemaVersions ? JSOG.stringify(CommonUtils.toArray(this.get("workflow").schemaVersions.actionVersions)) : []; var workflow = JSOG.parse(json); -workflow.schemaVersions.actionVersions = actionVersions +if(workflow.schemaVersions) { + workflow.schemaVersions.actionVersions = actionVersions +} return JSOG.stringify(workflow); }catch(err){ console.error(err);
[ambari] branch AMBARI-25190-trunk deleted (was 3da0487)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-25190-trunk in repository https://gitbox.apache.org/repos/asf/ambari.git. was 3da0487 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow The revisions that were on this branch are still contained in other references; therefore, this change does not discard any commits from the repository.
[ambari] branch AMBARI-25190-branch-2.6 deleted (was f3c4e56)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-25190-branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git. was f3c4e56 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow The revisions that were on this branch are still contained in other references; therefore, this change does not discard any commits from the repository.
[ambari] branch AMBARI-25190-branch-2.7 deleted (was 483e7e1)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-25190-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git. was 483e7e1 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow The revisions that were on this branch are still contained in other references; therefore, this change does not discard any commits from the repository.
[ambari] branch branch-2.7 updated: Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow (#2857)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/branch-2.7 by this push: new 98fc890 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow (#2857) 98fc890 is described below commit 98fc8906434a7db995e578a6bb75cb5c0f701f74 Author: Venkata Sairam Lanka AuthorDate: Wed Mar 13 10:50:53 2019 +0530 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow (#2857) --- .../src/main/resources/ui/app/domain/workflow-importer.js| 9 + 1 file changed, 9 insertions(+) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js index 2afc304..dfc6875 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js @@ -63,6 +63,15 @@ var WorkflowImporter= Ember.Object.extend({ var nodeMap=this.setupNodeMap(workflowAppJson,workflow,Ember.$(xmlDoc)); this.setupTransitions(workflowAppJson,nodeMap); workflow.set("startNode",nodeMap.get("start").node); +let globalProperties = workflowJson["workflow-app"].global.configuration.property; +if(workflowJson["workflow-app"].global) { + if(Ember.isArray(globalProperties)) { +workflow.set("globalSetting", workflowJson["workflow-app"].global); + } else { +workflow.set("globalSetting", {configuration : { property:[globalProperties] }} ); + } +} + this.populateKillNodes(workflow,nodeMap); return {workflow: workflow, errors: errors}; },
[ambari] branch branch-2.6 updated: Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow (#2858)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/branch-2.6 by this push: new 6c79b47 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow (#2858) 6c79b47 is described below commit 6c79b47e053140a34b49241a629602a1423e0ebe Author: Venkata Sairam Lanka AuthorDate: Wed Mar 13 10:50:59 2019 +0530 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow (#2858) --- .../src/main/resources/ui/app/domain/workflow-importer.js| 9 + 1 file changed, 9 insertions(+) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js index 2afc304..dfc6875 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js @@ -63,6 +63,15 @@ var WorkflowImporter= Ember.Object.extend({ var nodeMap=this.setupNodeMap(workflowAppJson,workflow,Ember.$(xmlDoc)); this.setupTransitions(workflowAppJson,nodeMap); workflow.set("startNode",nodeMap.get("start").node); +let globalProperties = workflowJson["workflow-app"].global.configuration.property; +if(workflowJson["workflow-app"].global) { + if(Ember.isArray(globalProperties)) { +workflow.set("globalSetting", workflowJson["workflow-app"].global); + } else { +workflow.set("globalSetting", {configuration : { property:[globalProperties] }} ); + } +} + this.populateKillNodes(workflow,nodeMap); return {workflow: workflow, errors: errors}; },
[ambari] branch trunk updated: "Global Configurations" defined in workflow XML are being lost during Import workflow (or) during "Reset workflow" (#2856)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/trunk by this push: new e366b43 "Global Configurations" defined in workflow XML are being lost during Import workflow (or) during "Reset workflow" (#2856) e366b43 is described below commit e366b43f499812df6005bb51f295bfd6a5f642fa Author: Venkata Sairam Lanka AuthorDate: Wed Mar 13 10:50:44 2019 +0530 "Global Configurations" defined in workflow XML are being lost during Import workflow (or) during "Reset workflow" (#2856) * wfm spark name node appending * Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow --- .../main/resources/ui/app/components/spark-action.js | 18 ++ .../main/resources/ui/app/domain/workflow-importer.js | 9 + .../src/main/resources/ui/app/styles/app.less | 3 +++ .../ui/app/templates/components/spark-action.hbs | 7 +++ 4 files changed, 37 insertions(+) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/spark-action.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/spark-action.js index 1a778c4..513b77c 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/spark-action.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/spark-action.js @@ -54,6 +54,7 @@ export default Ember.Component.extend(Validations,{ this.set('isJar', this.get('actionModel.jar') && this.get('actionModel.jar').endsWith('.jar')); this.sendAction('register','sparkAction', this); }.on('init'), + appendNameNode: false, initialize : function(){ this.on('fileSelected',function(fileName){ this.set(this.get('filePathModel'), fileName); @@ -105,6 +106,20 @@ export default Ember.Component.extend(Validations,{ this.$('#collapseOne').collapse('show'); } }.on('didUpdate'), + validateJarPathAndAppend() { + let nameNode = this.get('actionModel.nameNode'), jar = this.get('actionModel.jar'); + if(!jar) { +return; + } + this.toggleProperty('appendNameNode'); + if(!jar.startsWith('${nameNode}') && this.get('appendNameNode')) { +this.set('actionModel.jar', `${nameNode}${jar}`); + } else if(jar.startsWith('${nameNode}') && this.get('appendNameNode')) { +this.set('actionModel.jar', `${jar}`); + } else { +this.set('actionModel.jar', jar.replace('${nameNode}', '')); + } + }, actions : { openFileBrowser(model, context){ if(undefined === context){ @@ -116,6 +131,9 @@ export default Ember.Component.extend(Validations,{ register (name, context){ this.sendAction('register',name , context); }, +appendNamenode() { + this.validateJarPathAndAppend(); +}, onMasterChange (elt){ var value = this.$(elt).val(); if(value !== 'other'){ diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js index 2afc304..dfc6875 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js @@ -63,6 +63,15 @@ var WorkflowImporter= Ember.Object.extend({ var nodeMap=this.setupNodeMap(workflowAppJson,workflow,Ember.$(xmlDoc)); this.setupTransitions(workflowAppJson,nodeMap); workflow.set("startNode",nodeMap.get("start").node); +let globalProperties = workflowJson["workflow-app"].global.configuration.property; +if(workflowJson["workflow-app"].global) { + if(Ember.isArray(globalProperties)) { +workflow.set("globalSetting", workflowJson["workflow-app"].global); + } else { +workflow.set("globalSetting", {configuration : { property:[globalProperties] }} ); + } +} + this.populateKillNodes(workflow,nodeMap); return {workflow: workflow, errors: errors}; }, diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less index 9a35aca..95480d9 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less +++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less @@ -1824,4 +1824,7 @@ input:invalid { .note-info { position: relative; top: 10px; +} +.spark-namenode { + top: 15px; } \ No newline at end of file diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/spark-action.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/spark-action.hbs index c8f14
[ambari] 01/01: Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-25190-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git commit 483e7e11402d126078733487b6de3659b12bef4d Author: Venkata Sairam AuthorDate: Tue Mar 12 15:35:12 2019 +0530 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow --- .../src/main/resources/ui/app/domain/workflow-importer.js| 9 + 1 file changed, 9 insertions(+) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js index 2afc304..dfc6875 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js @@ -63,6 +63,15 @@ var WorkflowImporter= Ember.Object.extend({ var nodeMap=this.setupNodeMap(workflowAppJson,workflow,Ember.$(xmlDoc)); this.setupTransitions(workflowAppJson,nodeMap); workflow.set("startNode",nodeMap.get("start").node); +let globalProperties = workflowJson["workflow-app"].global.configuration.property; +if(workflowJson["workflow-app"].global) { + if(Ember.isArray(globalProperties)) { +workflow.set("globalSetting", workflowJson["workflow-app"].global); + } else { +workflow.set("globalSetting", {configuration : { property:[globalProperties] }} ); + } +} + this.populateKillNodes(workflow,nodeMap); return {workflow: workflow, errors: errors}; },
[ambari] branch AMBARI-25190-branch-2.6 created (now f3c4e56)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-25190-branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git. at f3c4e56 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow This branch includes the following new commits: new f3c4e56 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference.
[ambari] branch AMBARI-25190-branch-2.7 created (now 483e7e1)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-25190-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git. at 483e7e1 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow This branch includes the following new commits: new 483e7e1 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference.
[ambari] 01/01: Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-25190-branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git commit f3c4e5687a5cf7ae6529a376d22a9ea9e53ef411 Author: Venkata Sairam AuthorDate: Tue Mar 12 15:35:12 2019 +0530 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow --- .../src/main/resources/ui/app/domain/workflow-importer.js| 9 + 1 file changed, 9 insertions(+) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js index 2afc304..dfc6875 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js @@ -63,6 +63,15 @@ var WorkflowImporter= Ember.Object.extend({ var nodeMap=this.setupNodeMap(workflowAppJson,workflow,Ember.$(xmlDoc)); this.setupTransitions(workflowAppJson,nodeMap); workflow.set("startNode",nodeMap.get("start").node); +let globalProperties = workflowJson["workflow-app"].global.configuration.property; +if(workflowJson["workflow-app"].global) { + if(Ember.isArray(globalProperties)) { +workflow.set("globalSetting", workflowJson["workflow-app"].global); + } else { +workflow.set("globalSetting", {configuration : { property:[globalProperties] }} ); + } +} + this.populateKillNodes(workflow,nodeMap); return {workflow: workflow, errors: errors}; },
[ambari] 02/02: Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-25190-trunk in repository https://gitbox.apache.org/repos/asf/ambari.git commit 3da04876b6be41e9bd3b783a3579e35311a548c5 Author: Venkata Sairam AuthorDate: Tue Mar 12 15:35:12 2019 +0530 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow --- .../src/main/resources/ui/app/domain/workflow-importer.js| 9 + 1 file changed, 9 insertions(+) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js index 2afc304..dfc6875 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js @@ -63,6 +63,15 @@ var WorkflowImporter= Ember.Object.extend({ var nodeMap=this.setupNodeMap(workflowAppJson,workflow,Ember.$(xmlDoc)); this.setupTransitions(workflowAppJson,nodeMap); workflow.set("startNode",nodeMap.get("start").node); +let globalProperties = workflowJson["workflow-app"].global.configuration.property; +if(workflowJson["workflow-app"].global) { + if(Ember.isArray(globalProperties)) { +workflow.set("globalSetting", workflowJson["workflow-app"].global); + } else { +workflow.set("globalSetting", {configuration : { property:[globalProperties] }} ); + } +} + this.populateKillNodes(workflow,nodeMap); return {workflow: workflow, errors: errors}; },
[ambari] branch AMBARI-25190-trunk created (now 3da0487)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-25190-trunk in repository https://gitbox.apache.org/repos/asf/ambari.git. at 3da0487 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow This branch includes the following new commits: new 4a2ea5b wfm spark name node appending new 3da0487 Global Configurations defined in workflow XML are being lost during Import workflow (or) during Reset workflow The 2 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference.
[ambari] 01/02: wfm spark name node appending
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-25190-trunk in repository https://gitbox.apache.org/repos/asf/ambari.git commit 4a2ea5b4dfac8009ea4f0bcbddb84abbf961f0a5 Author: Venkata Sairam AuthorDate: Thu Aug 16 17:34:13 2018 +0530 wfm spark name node appending --- .../main/resources/ui/app/components/spark-action.js | 18 ++ .../src/main/resources/ui/app/styles/app.less | 3 +++ .../ui/app/templates/components/spark-action.hbs | 7 +++ 3 files changed, 28 insertions(+) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/spark-action.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/spark-action.js index 1a778c4..513b77c 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/spark-action.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/spark-action.js @@ -54,6 +54,7 @@ export default Ember.Component.extend(Validations,{ this.set('isJar', this.get('actionModel.jar') && this.get('actionModel.jar').endsWith('.jar')); this.sendAction('register','sparkAction', this); }.on('init'), + appendNameNode: false, initialize : function(){ this.on('fileSelected',function(fileName){ this.set(this.get('filePathModel'), fileName); @@ -105,6 +106,20 @@ export default Ember.Component.extend(Validations,{ this.$('#collapseOne').collapse('show'); } }.on('didUpdate'), + validateJarPathAndAppend() { + let nameNode = this.get('actionModel.nameNode'), jar = this.get('actionModel.jar'); + if(!jar) { +return; + } + this.toggleProperty('appendNameNode'); + if(!jar.startsWith('${nameNode}') && this.get('appendNameNode')) { +this.set('actionModel.jar', `${nameNode}${jar}`); + } else if(jar.startsWith('${nameNode}') && this.get('appendNameNode')) { +this.set('actionModel.jar', `${jar}`); + } else { +this.set('actionModel.jar', jar.replace('${nameNode}', '')); + } + }, actions : { openFileBrowser(model, context){ if(undefined === context){ @@ -116,6 +131,9 @@ export default Ember.Component.extend(Validations,{ register (name, context){ this.sendAction('register',name , context); }, +appendNamenode() { + this.validateJarPathAndAppend(); +}, onMasterChange (elt){ var value = this.$(elt).val(); if(value !== 'other'){ diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less index 9a35aca..95480d9 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less +++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less @@ -1824,4 +1824,7 @@ input:invalid { .note-info { position: relative; top: 10px; +} +.spark-namenode { + top: 15px; } \ No newline at end of file diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/spark-action.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/spark-action.hbs index c8f14d3..bebe3d3 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/spark-action.hbs +++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/spark-action.hbs @@ -49,6 +49,13 @@ {{/if}} + Append master* + + + + + + Runs On* {{#each mastersList as |master|}}
[ambari] branch branch-2.6 updated: AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) (#1715) (#2781)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/branch-2.6 by this push: new 4afe5b5 AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) (#1715) (#2781) 4afe5b5 is described below commit 4afe5b518998ac54e6209027f89b508c3b18d9a5 Author: Venkata Sairam Lanka AuthorDate: Mon Jan 28 11:28:04 2019 +0530 AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) (#1715) (#2781) --- .../org/apache/oozie/ambari/view/OozieProxyImpersonator.java | 12 +++- .../src/main/resources/ui/app/services/user-info.js | 5 +++-- contrib/views/wfmanager/src/main/resources/ui/package.json | 2 +- contrib/views/wfmanager/src/main/resources/ui/yarn.lock | 8 4 files changed, 19 insertions(+), 8 deletions(-) diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java index 8d2b5a5..15a1042 100644 --- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java +++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java @@ -58,6 +58,11 @@ import org.slf4j.LoggerFactory; import com.google.inject.Singleton; +import org.json.simple.JSONObject; + + + + /** * This is a class used to bridge the communication between the and the Oozie * API executing inside ambari. @@ -154,7 +159,12 @@ public class OozieProxyImpersonator { @GET @Path("/getCurrentUserName") public Response getCurrentUserName() { -return Response.ok(viewContext.getUsername()).build(); + +JSONObject obj = new JSONObject(); + +obj.put("username", viewContext.getUsername()); + +return Response.ok(obj).build(); } @GET diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js b/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js index 701f953..3c1c5c5 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js @@ -37,8 +37,9 @@ export default Ember.Service.extend({ xhr.setRequestHeader("X-Requested-By", "Ambari"); } }).done(function(data){ -self.set("userName", data); -deferred.resolve(data); +let uname = JSON.parse(data).username; +self.set("userName", JSON.parse(data).username); +deferred.resolve(uname); }).fail(function(data){ self.set("userName", ""); deferred.reject(data); diff --git a/contrib/views/wfmanager/src/main/resources/ui/package.json b/contrib/views/wfmanager/src/main/resources/ui/package.json index 25ed6c1..8405047 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/package.json +++ b/contrib/views/wfmanager/src/main/resources/ui/package.json @@ -19,7 +19,7 @@ "author": "", "license": "MIT", "devDependencies": { -"bower": "^1.7.7", +"bower": "1.8.4", "broccoli-asset-rev": "^2.2.0", "ember-ajax": "0.7.1", "ember-cli": "2.3.0", diff --git a/contrib/views/wfmanager/src/main/resources/ui/yarn.lock b/contrib/views/wfmanager/src/main/resources/ui/yarn.lock index f3602c9..e10f44b 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/yarn.lock +++ b/contrib/views/wfmanager/src/main/resources/ui/yarn.lock @@ -588,7 +588,7 @@ babel-plugin-transform-es2015-block-scoped-functions@^6.22.0: dependencies: babel-runtime "^6.22.0" -babel-plugin-transform-es2015-block-scoping@^6.23.0: +babel-plugin-transform-es2015-block-scoping@^6.23.0, babel-plugin-transform-es2015-block-scoping@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576; dependencies: @@ -989,9 +989,9 @@ bower-endpoint-parser@0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/bower-endpoint-parser/-/bower-endpoint-parser-0.2.2.tgz#00b565adbfab6f2d35addde977e97962acbcb3f6; -bower@^1.3.12, bower@^1.7.7: - version "1.8.0" - resolved "https://registry.yarnpkg.com/bower/-/bower-1.8.0.tgz#55dbebef0ad9155382d9e9d3e497c1372345b44a; +bower@1.8.4, bower@^1.3.12: + version "1.8.4" + resolved "https://registry.yarnpkg.com/bower/-/bower-1.8.4.tgz#e7876a076deb8137f7d06525dc5e8c66db82f28a; brace-expansion@^1.0.0: version "1.1.7"
[ambari] branch trunk updated: AMBARI-24819 do not allow creation of new hive actions in WFM (#2506)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/trunk by this push: new 29450c4 AMBARI-24819 do not allow creation of new hive actions in WFM (#2506) 29450c4 is described below commit 29450c4ebb99cd6fd9b7fc90c2bc15a1daa2bac8 Author: Venkata Sairam Lanka AuthorDate: Wed Oct 24 15:36:28 2018 +0530 AMBARI-24819 do not allow creation of new hive actions in WFM (#2506) --- .../src/main/resources/ui/app/templates/components/workflow-actions.hbs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs index 8907fdd..bf192b2 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs +++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs @@ -39,7 +39,7 @@ Import Asset from Shared File System -Hive +{{!-- Hive --}} Hive2 Sqoop Pig
[ambari] branch branch-2.7 updated: AMBARI-24819 do not allow creation of new hive actions in WFM (#2507)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/branch-2.7 by this push: new 6434601 AMBARI-24819 do not allow creation of new hive actions in WFM (#2507) 6434601 is described below commit 64346015f36ea0f466cd9bae0c5f9c875b3292ad Author: Venkata Sairam Lanka AuthorDate: Wed Oct 24 15:36:19 2018 +0530 AMBARI-24819 do not allow creation of new hive actions in WFM (#2507) --- .../src/main/resources/ui/app/templates/components/workflow-actions.hbs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs index 8907fdd..bf192b2 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs +++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs @@ -39,7 +39,7 @@ Import Asset from Shared File System -Hive +{{!-- Hive --}} Hive2 Sqoop Pig
[ambari] branch branch-2.7 updated: AMBARI-24387 - Support YARN Application timeout feature in Ambari Capaacity Scheduler View (#1959)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/branch-2.7 by this push: new 7bed0e8 AMBARI-24387 - Support YARN Application timeout feature in Ambari Capaacity Scheduler View (#1959) 7bed0e8 is described below commit 7bed0e8b4903d0d72cda6c065a6267810471a4bb Author: Akhil S Naik AuthorDate: Wed Aug 8 14:05:28 2018 +0530 AMBARI-24387 - Support YARN Application timeout feature in Ambari Capaacity Scheduler View (#1959) * AMBARI-24387 - Support YARN Application timeout feature in Ambari Capacity Scheduler View * AMBARI-24387 - Support YARN Application timeout feature in Ambari Capaacity Scheduler View (asnaik) --- .../src/main/resources/ui/app/models/queue.js | 14 - .../src/main/resources/ui/app/serializers.js | 6 +- .../src/main/resources/ui/app/templates/queue.hbs | 68 ++ 3 files changed, 86 insertions(+), 2 deletions(-) diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js index 37d9716..ddfe0db 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js +++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js @@ -258,6 +258,8 @@ App.Queue = DS.Model.extend({ priority: DS.attr('number', {defaultValue: 0}), maximum_allocation_mb:DS.attr('number'), maximum_allocation_vcores:DS.attr('number'), + maximum_application_lifetime:DS.attr('number'), + default_application_lifetime:DS.attr('number'), disable_preemption: DS.attr('string', {defaultValue: ''}), isPreemptionInherited: DS.attr('boolean', {defaultValue: true}), @@ -340,5 +342,15 @@ App.Queue = DS.Model.extend({ isLeafQ: function() { return this.get('queues') === null; - }.property('queues') + }.property('queues'), + + /** + * To reset the maximum_application_lifetime and default_application_lifetime if current Q is no longer Leaf Queue + */ + watchChangeLeafQueue: function () { +if (this.get('isLeafQ') == false) { + this.set('maximum_application_lifetime', null); + this.set('default_application_lifetime', null); +} + }.observes('isLeafQ') }); diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js index edcf5d1..91ca01f 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js +++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js @@ -109,7 +109,9 @@ App.SerializerMixin = Em.Mixin.create({ disable_preemption:props[base_path + '.disable_preemption'] || '', isPreemptionInherited: (props[base_path + '.disable_preemption'] !== undefined)?false:true, maximum_allocation_mb: props[base_path + '.maximum-allocation-mb'] || null, - maximum_allocation_vcores: props[base_path + '.maximum-allocation-vcores'] || null + maximum_allocation_vcores: props[base_path + '.maximum-allocation-vcores'] || null, + maximum_application_lifetime: props[base_path + '.maximum-application-lifetime'] || null, + default_application_lifetime: props[base_path + '.default-application-lifetime'] || null }; //Converting capacity and max-capacity into two decimal point float numbers @@ -233,6 +235,8 @@ App.QueueSerializer = DS.RESTSerializer.extend(App.SerializerMixin,{ json[this.PREFIX + "." + record.get('path') + ".ordering-policy"] = record.get('ordering_policy')||null; json[this.PREFIX + "." + record.get('path') + ".maximum-allocation-mb"] = record.get('maximum_allocation_mb') || null; json[this.PREFIX + "." + record.get('path') + ".maximum-allocation-vcores"] = record.get('maximum_allocation_vcores') || null; +json[this.PREFIX + "." + record.get('path') + ".maximum-application-lifetime"] = record.get('maximum_application_lifetime') || null; +json[this.PREFIX + "." + record.get('path') + ".default-application-lifetime"] = record.get('default_application_lifetime') || null; if (record.get('ordering_policy') == 'fair') { json[this.PREFIX + "." + record.get('path') + ".ordering-policy.fair.enable-size-based-weight"] = record.get('enable_size_based_weight'); diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs b/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs index 802b09c..2d2ce06 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/tem
[ambari] branch trunk updated: AMBARI-24387 - Support YARN Application timeout feature in Ambari Capaacity Scheduler View (#1924)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/trunk by this push: new 4912556 AMBARI-24387 - Support YARN Application timeout feature in Ambari Capaacity Scheduler View (#1924) 4912556 is described below commit 49125564ffb60e1568a8eee19e07a431036f2a7d Author: Akhil S Naik AuthorDate: Wed Aug 8 14:05:17 2018 +0530 AMBARI-24387 - Support YARN Application timeout feature in Ambari Capaacity Scheduler View (#1924) * AMBARI-24387 - Support YARN Application timeout feature in Ambari Capacity Scheduler View * AMBARI-24387 - Support YARN Application timeout feature in Ambari Capacity Scheduler View change MAximum Lifetime to MAximum Application lifetime (asnaik) * Revert git ignore -- AMBARI-24387 - Support YARN Application timeout feature in Ambari Capacity Scheduler View change MAximum Lifetime to MAximum Application lifetime (asnaik) --- .../src/main/resources/ui/app/models/queue.js | 14 - .../src/main/resources/ui/app/serializers.js | 6 +- .../src/main/resources/ui/app/templates/queue.hbs | 68 ++ 3 files changed, 86 insertions(+), 2 deletions(-) diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js index 37d9716..ddfe0db 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js +++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js @@ -258,6 +258,8 @@ App.Queue = DS.Model.extend({ priority: DS.attr('number', {defaultValue: 0}), maximum_allocation_mb:DS.attr('number'), maximum_allocation_vcores:DS.attr('number'), + maximum_application_lifetime:DS.attr('number'), + default_application_lifetime:DS.attr('number'), disable_preemption: DS.attr('string', {defaultValue: ''}), isPreemptionInherited: DS.attr('boolean', {defaultValue: true}), @@ -340,5 +342,15 @@ App.Queue = DS.Model.extend({ isLeafQ: function() { return this.get('queues') === null; - }.property('queues') + }.property('queues'), + + /** + * To reset the maximum_application_lifetime and default_application_lifetime if current Q is no longer Leaf Queue + */ + watchChangeLeafQueue: function () { +if (this.get('isLeafQ') == false) { + this.set('maximum_application_lifetime', null); + this.set('default_application_lifetime', null); +} + }.observes('isLeafQ') }); diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js index edcf5d1..91ca01f 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js +++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js @@ -109,7 +109,9 @@ App.SerializerMixin = Em.Mixin.create({ disable_preemption:props[base_path + '.disable_preemption'] || '', isPreemptionInherited: (props[base_path + '.disable_preemption'] !== undefined)?false:true, maximum_allocation_mb: props[base_path + '.maximum-allocation-mb'] || null, - maximum_allocation_vcores: props[base_path + '.maximum-allocation-vcores'] || null + maximum_allocation_vcores: props[base_path + '.maximum-allocation-vcores'] || null, + maximum_application_lifetime: props[base_path + '.maximum-application-lifetime'] || null, + default_application_lifetime: props[base_path + '.default-application-lifetime'] || null }; //Converting capacity and max-capacity into two decimal point float numbers @@ -233,6 +235,8 @@ App.QueueSerializer = DS.RESTSerializer.extend(App.SerializerMixin,{ json[this.PREFIX + "." + record.get('path') + ".ordering-policy"] = record.get('ordering_policy')||null; json[this.PREFIX + "." + record.get('path') + ".maximum-allocation-mb"] = record.get('maximum_allocation_mb') || null; json[this.PREFIX + "." + record.get('path') + ".maximum-allocation-vcores"] = record.get('maximum_allocation_vcores') || null; +json[this.PREFIX + "." + record.get('path') + ".maximum-application-lifetime"] = record.get('maximum_application_lifetime') || null; +json[this.PREFIX + "." + record.get('path') + ".default-application-lifetime"] = record.get('default_application_lifetime') || null; if (record.get('ordering_policy') == 'fair') { json[this.PREFIX + "." + record.get('path') + ".ordering-policy.fair.enable-size-based-weight"] = record.get('enable_size_based_weight'); diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/templ
[ambari] branch branch-2.7 updated: AMBARI-24299 [Yarn Queue Manager] Yarn Queue manager View is resetting value some properties that are not defined UI (asnaik) (#1774) (#1934)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/branch-2.7 by this push: new 0af78ff AMBARI-24299 [Yarn Queue Manager] Yarn Queue manager View is resetting value some properties that are not defined UI (asnaik) (#1774) (#1934) 0af78ff is described below commit 0af78ffee08e824822e7c46453bc7675358c5565 Author: Akhil S Naik AuthorDate: Fri Aug 3 16:03:12 2018 +0530 AMBARI-24299 [Yarn Queue Manager] Yarn Queue manager View is resetting value some properties that are not defined UI (asnaik) (#1774) (#1934) --- .../src/main/resources/ui/app/models/queue.js | 2 + .../src/main/resources/ui/app/serializers.js | 6 ++- .../src/main/resources/ui/app/templates/queue.hbs | 58 ++ 3 files changed, 65 insertions(+), 1 deletion(-) diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js index e574159..37d9716 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js +++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js @@ -256,6 +256,8 @@ App.Queue = DS.Model.extend({ maximum_applications: DS.attr('number', { defaultValue: null }), maximum_am_resource_percent: DS.attr('number', { defaultValue: null }), priority: DS.attr('number', {defaultValue: 0}), + maximum_allocation_mb:DS.attr('number'), + maximum_allocation_vcores:DS.attr('number'), disable_preemption: DS.attr('string', {defaultValue: ''}), isPreemptionInherited: DS.attr('boolean', {defaultValue: true}), diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js index 43d087d..edcf5d1 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js +++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js @@ -107,7 +107,9 @@ App.SerializerMixin = Em.Mixin.create({ priority: (props[base_path + ".priority"])? +props[base_path + ".priority"] : 0, labelsEnabled: props.hasOwnProperty(labelsPath), disable_preemption:props[base_path + '.disable_preemption'] || '', - isPreemptionInherited: (props[base_path + '.disable_preemption'] !== undefined)?false:true + isPreemptionInherited: (props[base_path + '.disable_preemption'] !== undefined)?false:true, + maximum_allocation_mb: props[base_path + '.maximum-allocation-mb'] || null, + maximum_allocation_vcores: props[base_path + '.maximum-allocation-vcores'] || null }; //Converting capacity and max-capacity into two decimal point float numbers @@ -229,6 +231,8 @@ App.QueueSerializer = DS.RESTSerializer.extend(App.SerializerMixin,{ json[this.PREFIX + "." + record.get('path') + ".queues"] = record.get('queues')||null; json[this.PREFIX + "." + record.get('path') + ".default-node-label-expression"] = record.get('default_node_label_expression')||null; json[this.PREFIX + "." + record.get('path') + ".ordering-policy"] = record.get('ordering_policy')||null; +json[this.PREFIX + "." + record.get('path') + ".maximum-allocation-mb"] = record.get('maximum_allocation_mb') || null; +json[this.PREFIX + "." + record.get('path') + ".maximum-allocation-vcores"] = record.get('maximum_allocation_vcores') || null; if (record.get('ordering_policy') == 'fair') { json[this.PREFIX + "." + record.get('path') + ".ordering-policy.fair.enable-size-based-weight"] = record.get('enable_size_based_weight'); diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs b/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs index dcfb84f..802b09c 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs +++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs @@ -456,6 +456,64 @@ {{/if}} {{/if}} + + {{tooltip-label + class="col-xs-6 control-label" + label='Maximum Allocation Vcores' + message='The per queue maximum limit of virtual cores to allocate to each container request at the Resource Manager.' + }} + {{#if isOperator}} + + {{int-input value=content.maximu
[ambari] branch trunk updated: AMBARI-24299 [Yarn Queue Manager] Yarn Queue manager View is resetting value some properties that are not defined UI (asnaik) (#1774)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/trunk by this push: new 5859561 AMBARI-24299 [Yarn Queue Manager] Yarn Queue manager View is resetting value some properties that are not defined UI (asnaik) (#1774) 5859561 is described below commit 58595617f6e4042107e95d81221df14fc8fe9a49 Author: Akhil S Naik AuthorDate: Mon Jul 30 20:20:11 2018 +0530 AMBARI-24299 [Yarn Queue Manager] Yarn Queue manager View is resetting value some properties that are not defined UI (asnaik) (#1774) --- .../src/main/resources/ui/app/models/queue.js | 2 + .../src/main/resources/ui/app/serializers.js | 6 ++- .../src/main/resources/ui/app/templates/queue.hbs | 58 ++ 3 files changed, 65 insertions(+), 1 deletion(-) diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js index e574159..37d9716 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js +++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js @@ -256,6 +256,8 @@ App.Queue = DS.Model.extend({ maximum_applications: DS.attr('number', { defaultValue: null }), maximum_am_resource_percent: DS.attr('number', { defaultValue: null }), priority: DS.attr('number', {defaultValue: 0}), + maximum_allocation_mb:DS.attr('number'), + maximum_allocation_vcores:DS.attr('number'), disable_preemption: DS.attr('string', {defaultValue: ''}), isPreemptionInherited: DS.attr('boolean', {defaultValue: true}), diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js index 43d087d..edcf5d1 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js +++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js @@ -107,7 +107,9 @@ App.SerializerMixin = Em.Mixin.create({ priority: (props[base_path + ".priority"])? +props[base_path + ".priority"] : 0, labelsEnabled: props.hasOwnProperty(labelsPath), disable_preemption:props[base_path + '.disable_preemption'] || '', - isPreemptionInherited: (props[base_path + '.disable_preemption'] !== undefined)?false:true + isPreemptionInherited: (props[base_path + '.disable_preemption'] !== undefined)?false:true, + maximum_allocation_mb: props[base_path + '.maximum-allocation-mb'] || null, + maximum_allocation_vcores: props[base_path + '.maximum-allocation-vcores'] || null }; //Converting capacity and max-capacity into two decimal point float numbers @@ -229,6 +231,8 @@ App.QueueSerializer = DS.RESTSerializer.extend(App.SerializerMixin,{ json[this.PREFIX + "." + record.get('path') + ".queues"] = record.get('queues')||null; json[this.PREFIX + "." + record.get('path') + ".default-node-label-expression"] = record.get('default_node_label_expression')||null; json[this.PREFIX + "." + record.get('path') + ".ordering-policy"] = record.get('ordering_policy')||null; +json[this.PREFIX + "." + record.get('path') + ".maximum-allocation-mb"] = record.get('maximum_allocation_mb') || null; +json[this.PREFIX + "." + record.get('path') + ".maximum-allocation-vcores"] = record.get('maximum_allocation_vcores') || null; if (record.get('ordering_policy') == 'fair') { json[this.PREFIX + "." + record.get('path') + ".ordering-policy.fair.enable-size-based-weight"] = record.get('enable_size_based_weight'); diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs b/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs index dcfb84f..802b09c 100644 --- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs +++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs @@ -456,6 +456,64 @@ {{/if}} {{/if}} + + {{tooltip-label + class="col-xs-6 control-label" + label='Maximum Allocation Vcores' + message='The per queue maximum limit of virtual cores to allocate to each container request at the Resource Manager.' + }} + {{#if isOperator}} + + {{int-input value=content.maximu
[ambari] branch AMBARI-24152-branch-2.7 deleted (was 69aaaeb)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-24152-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git. was 69aaaeb AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) The revisions that were on this branch are still contained in other references; therefore, this change does not discard any commits from the repository.
[ambari] branch branch-2.7 updated: AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) (#1715)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/branch-2.7 by this push: new 9797350 AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) (#1715) 9797350 is described below commit 9797350332cc7181b266294dd8ddb09d9333b9d0 Author: Venkata Sairam Lanka AuthorDate: Mon Jul 9 13:12:15 2018 +0530 AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) (#1715) --- .../org/apache/oozie/ambari/view/OozieProxyImpersonator.java | 12 +++- .../src/main/resources/ui/app/services/user-info.js | 5 +++-- contrib/views/wfmanager/src/main/resources/ui/package.json | 2 +- contrib/views/wfmanager/src/main/resources/ui/yarn.lock | 8 4 files changed, 19 insertions(+), 8 deletions(-) diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java index 8d2b5a5..15a1042 100644 --- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java +++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java @@ -58,6 +58,11 @@ import org.slf4j.LoggerFactory; import com.google.inject.Singleton; +import org.json.simple.JSONObject; + + + + /** * This is a class used to bridge the communication between the and the Oozie * API executing inside ambari. @@ -154,7 +159,12 @@ public class OozieProxyImpersonator { @GET @Path("/getCurrentUserName") public Response getCurrentUserName() { -return Response.ok(viewContext.getUsername()).build(); + +JSONObject obj = new JSONObject(); + +obj.put("username", viewContext.getUsername()); + +return Response.ok(obj).build(); } @GET diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js b/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js index 701f953..3c1c5c5 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js @@ -37,8 +37,9 @@ export default Ember.Service.extend({ xhr.setRequestHeader("X-Requested-By", "Ambari"); } }).done(function(data){ -self.set("userName", data); -deferred.resolve(data); +let uname = JSON.parse(data).username; +self.set("userName", JSON.parse(data).username); +deferred.resolve(uname); }).fail(function(data){ self.set("userName", ""); deferred.reject(data); diff --git a/contrib/views/wfmanager/src/main/resources/ui/package.json b/contrib/views/wfmanager/src/main/resources/ui/package.json index 69f43c8..18b4ae0 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/package.json +++ b/contrib/views/wfmanager/src/main/resources/ui/package.json @@ -19,7 +19,7 @@ "author": "", "license": "MIT", "devDependencies": { -"bower": "^1.7.7", +"bower": "1.8.4", "broccoli-asset-rev": "^2.2.0", "babel-plugin-transform-es2015-block-scoping": "^6.24.1", "ember-ajax": "0.7.1", diff --git a/contrib/views/wfmanager/src/main/resources/ui/yarn.lock b/contrib/views/wfmanager/src/main/resources/ui/yarn.lock index f3602c9..e10f44b 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/yarn.lock +++ b/contrib/views/wfmanager/src/main/resources/ui/yarn.lock @@ -588,7 +588,7 @@ babel-plugin-transform-es2015-block-scoped-functions@^6.22.0: dependencies: babel-runtime "^6.22.0" -babel-plugin-transform-es2015-block-scoping@^6.23.0: +babel-plugin-transform-es2015-block-scoping@^6.23.0, babel-plugin-transform-es2015-block-scoping@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576; dependencies: @@ -989,9 +989,9 @@ bower-endpoint-parser@0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/bower-endpoint-parser/-/bower-endpoint-parser-0.2.2.tgz#00b565adbfab6f2d35addde977e97962acbcb3f6; -bower@^1.3.12, bower@^1.7.7: - version "1.8.0" - resolved "https://registry.yarnpkg.com/bower/-/bower-1.8.0.tgz#55dbebef0ad9155382d9e9d3e497c1372345b44a; +bower@1.8.4, bower@^1.3.12: + version "1.8.4" + resolved "https://registry.yarnpkg.com/bower/-/bower-1.8.4.tgz#e7876a076deb8137f7d06525dc5e8c66db82f28a; brace-expansion@^1.0.0: version "1.1.7"
[ambari] branch AMBARI-24266-branch-2.7 created (now a229094)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-24266-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git. at a229094 AMBARI-24266 Error in Validating And Submitting Workflow With Node as Hive This branch includes the following new commits: new a229094 AMBARI-24266 Error in Validating And Submitting Workflow With Node as Hive The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference.
[ambari] 01/01: AMBARI-24266 Error in Validating And Submitting Workflow With Node as Hive
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-24266-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git commit a229094ad0af03a6232f994ff601573e9749e855 Author: Venkata Sairam AuthorDate: Mon Jul 9 12:33:39 2018 +0530 AMBARI-24266 Error in Validating And Submitting Workflow With Node as Hive --- .../main/resources/ui/app/components/flow-designer.js | 18 +- .../src/main/resources/ui/app/components/job-config.js | 3 ++- .../resources/ui/app/components/workflow-actions.js| 1 + .../resources/ui/app/domain/cytoscape-flow-renderer.js | 11 +-- .../ui/app/templates/components/flow-designer.hbs | 6 ++ .../ui/app/templates/components/workflow-actions.hbs | 4 +++- .../src/main/resources/ui/app/utils/constants.js | 1 + .../views/wfmanager/src/main/resources/ui/bower.json | 2 +- 8 files changed, 40 insertions(+), 6 deletions(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js index 2c77b5b..730b8e9 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js @@ -365,7 +365,7 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { }, importWorkflowFromString(data){ this.showSparkMasterFieldError(data); - +this.hiveActionStatus(data); var wfObject=this.get("workflowImporter").importWorkflow(data); this.set("errors", wfObject.errors); if (wfObject.workflow === null) { @@ -388,6 +388,7 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { }, importWorkflowFromJSON(data){ this.showSparkMasterFieldError(data); +this.hiveActionStatus(data); var workflowImporter=WorkflowJsonImporter.create({}); var workflow=workflowImporter.importWorkflow(data); this.resetDesigner(); @@ -417,6 +418,21 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { } } }, + hiveActionStatus(data) { +if(Constants.enableHiveAction) { + return; +} +let x2js = new X2JS(); +let actionSettingsObj = x2js.xml_str2json(data); +let hiveActionList, hiveActionArray = []; +if(actionSettingsObj["workflow-app"] && actionSettingsObj["workflow-app"].action) { + hiveActionList = actionSettingsObj["workflow-app"].action; + hiveActionArray = this.migrateActionObjectToCollection(hiveActionList); + if(hiveActionArray.findBy('hive') && this.migrateActionObjectToCollection(hiveActionArray.findBy('hive'))) { +this.set('isHiveActionDisabled', true); + } +} + }, getWorkflowFromHdfs(filePath){ var url = Ember.ENV.API_URL + "/readWorkflow?workflowPath="+filePath+'=WORKFLOW'; var deferred = Ember.RSVP.defer(); diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js index 326cf38..d1c3400 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js @@ -140,7 +140,8 @@ export default Ember.Component.extend(Validations, { } } } else { -val = self.get("workflowManagerConfigs").getWfmConfigs()[propName]; +let tmp = self.get("workflowManagerConfigs").getWfmConfigs(); +val = tmp ? tmp[propName] : ""; } var prop= Ember.Object.create({ name: propName, diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js index 2f8cdaa..dfb1b16 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js @@ -24,6 +24,7 @@ export default Ember.Component.extend({ }), initialize : function(){ this.set('customActionEnabled', Constants.customActionEnabled); +this.set('enableHiveAction', Constants.enableHiveAction); }.on('init'), actions : { addAction : function(type){ diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js index bee901e..8163844 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js @@ -17,6 +17,8 @@ import Ember from 'ember'; import CytoscapeStyles from '../domain/cytoscape-style'; +im
[ambari] 01/01: AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-24152-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git commit 69aaaeb916327bb6dfb5f38cc9cbf99e31124a57 Author: Venkata Sairam Lanka AuthorDate: Mon Jul 9 12:21:17 2018 +0530 AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) --- .../org/apache/oozie/ambari/view/OozieProxyImpersonator.java | 12 +++- .../src/main/resources/ui/app/services/user-info.js | 5 +++-- contrib/views/wfmanager/src/main/resources/ui/package.json | 2 +- contrib/views/wfmanager/src/main/resources/ui/yarn.lock | 8 4 files changed, 19 insertions(+), 8 deletions(-) diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java index 8d2b5a5..15a1042 100644 --- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java +++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java @@ -58,6 +58,11 @@ import org.slf4j.LoggerFactory; import com.google.inject.Singleton; +import org.json.simple.JSONObject; + + + + /** * This is a class used to bridge the communication between the and the Oozie * API executing inside ambari. @@ -154,7 +159,12 @@ public class OozieProxyImpersonator { @GET @Path("/getCurrentUserName") public Response getCurrentUserName() { -return Response.ok(viewContext.getUsername()).build(); + +JSONObject obj = new JSONObject(); + +obj.put("username", viewContext.getUsername()); + +return Response.ok(obj).build(); } @GET diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js b/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js index 701f953..3c1c5c5 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js @@ -37,8 +37,9 @@ export default Ember.Service.extend({ xhr.setRequestHeader("X-Requested-By", "Ambari"); } }).done(function(data){ -self.set("userName", data); -deferred.resolve(data); +let uname = JSON.parse(data).username; +self.set("userName", JSON.parse(data).username); +deferred.resolve(uname); }).fail(function(data){ self.set("userName", ""); deferred.reject(data); diff --git a/contrib/views/wfmanager/src/main/resources/ui/package.json b/contrib/views/wfmanager/src/main/resources/ui/package.json index 69f43c8..18b4ae0 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/package.json +++ b/contrib/views/wfmanager/src/main/resources/ui/package.json @@ -19,7 +19,7 @@ "author": "", "license": "MIT", "devDependencies": { -"bower": "^1.7.7", +"bower": "1.8.4", "broccoli-asset-rev": "^2.2.0", "babel-plugin-transform-es2015-block-scoping": "^6.24.1", "ember-ajax": "0.7.1", diff --git a/contrib/views/wfmanager/src/main/resources/ui/yarn.lock b/contrib/views/wfmanager/src/main/resources/ui/yarn.lock index f3602c9..e10f44b 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/yarn.lock +++ b/contrib/views/wfmanager/src/main/resources/ui/yarn.lock @@ -588,7 +588,7 @@ babel-plugin-transform-es2015-block-scoped-functions@^6.22.0: dependencies: babel-runtime "^6.22.0" -babel-plugin-transform-es2015-block-scoping@^6.23.0: +babel-plugin-transform-es2015-block-scoping@^6.23.0, babel-plugin-transform-es2015-block-scoping@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576; dependencies: @@ -989,9 +989,9 @@ bower-endpoint-parser@0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/bower-endpoint-parser/-/bower-endpoint-parser-0.2.2.tgz#00b565adbfab6f2d35addde977e97962acbcb3f6; -bower@^1.3.12, bower@^1.7.7: - version "1.8.0" - resolved "https://registry.yarnpkg.com/bower/-/bower-1.8.0.tgz#55dbebef0ad9155382d9e9d3e497c1372345b44a; +bower@1.8.4, bower@^1.3.12: + version "1.8.4" + resolved "https://registry.yarnpkg.com/bower/-/bower-1.8.4.tgz#e7876a076deb8137f7d06525dc5e8c66db82f28a; brace-expansion@^1.0.0: version "1.1.7"
[ambari] branch AMBARI-24152-branch-2.7 created (now 69aaaeb)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-24152-branch-2.7 in repository https://gitbox.apache.org/repos/asf/ambari.git. at 69aaaeb AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) This branch includes the following new commits: new 69aaaeb AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference.
[ambari] branch AMBARI-24266-trunk created (now 0904bbf)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-24266-trunk in repository https://gitbox.apache.org/repos/asf/ambari.git. at 0904bbf AMBARI-24266 Error in Validating And Submitting Workflow With Node as Hive This branch includes the following new commits: new 0904bbf AMBARI-24266 Error in Validating And Submitting Workflow With Node as Hive The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference.
[ambari] 01/01: AMBARI-24266 Error in Validating And Submitting Workflow With Node as Hive
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-24266-trunk in repository https://gitbox.apache.org/repos/asf/ambari.git commit 0904bbf21f52ba6080d439ccf824901bc984dbde Author: Venkata Sairam AuthorDate: Mon Jul 9 12:33:39 2018 +0530 AMBARI-24266 Error in Validating And Submitting Workflow With Node as Hive --- .../main/resources/ui/app/components/flow-designer.js | 18 +- .../src/main/resources/ui/app/components/job-config.js | 3 ++- .../resources/ui/app/components/workflow-actions.js| 1 + .../resources/ui/app/domain/cytoscape-flow-renderer.js | 11 +-- .../ui/app/templates/components/flow-designer.hbs | 6 ++ .../ui/app/templates/components/workflow-actions.hbs | 4 +++- .../src/main/resources/ui/app/utils/constants.js | 1 + .../views/wfmanager/src/main/resources/ui/bower.json | 2 +- 8 files changed, 40 insertions(+), 6 deletions(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js index 2c77b5b..730b8e9 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js @@ -365,7 +365,7 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { }, importWorkflowFromString(data){ this.showSparkMasterFieldError(data); - +this.hiveActionStatus(data); var wfObject=this.get("workflowImporter").importWorkflow(data); this.set("errors", wfObject.errors); if (wfObject.workflow === null) { @@ -388,6 +388,7 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { }, importWorkflowFromJSON(data){ this.showSparkMasterFieldError(data); +this.hiveActionStatus(data); var workflowImporter=WorkflowJsonImporter.create({}); var workflow=workflowImporter.importWorkflow(data); this.resetDesigner(); @@ -417,6 +418,21 @@ export default Ember.Component.extend(FindNodeMixin, Validations, { } } }, + hiveActionStatus(data) { +if(Constants.enableHiveAction) { + return; +} +let x2js = new X2JS(); +let actionSettingsObj = x2js.xml_str2json(data); +let hiveActionList, hiveActionArray = []; +if(actionSettingsObj["workflow-app"] && actionSettingsObj["workflow-app"].action) { + hiveActionList = actionSettingsObj["workflow-app"].action; + hiveActionArray = this.migrateActionObjectToCollection(hiveActionList); + if(hiveActionArray.findBy('hive') && this.migrateActionObjectToCollection(hiveActionArray.findBy('hive'))) { +this.set('isHiveActionDisabled', true); + } +} + }, getWorkflowFromHdfs(filePath){ var url = Ember.ENV.API_URL + "/readWorkflow?workflowPath="+filePath+'=WORKFLOW'; var deferred = Ember.RSVP.defer(); diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js index 326cf38..d1c3400 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js @@ -140,7 +140,8 @@ export default Ember.Component.extend(Validations, { } } } else { -val = self.get("workflowManagerConfigs").getWfmConfigs()[propName]; +let tmp = self.get("workflowManagerConfigs").getWfmConfigs(); +val = tmp ? tmp[propName] : ""; } var prop= Ember.Object.create({ name: propName, diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js index 2f8cdaa..dfb1b16 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js @@ -24,6 +24,7 @@ export default Ember.Component.extend({ }), initialize : function(){ this.set('customActionEnabled', Constants.customActionEnabled); +this.set('enableHiveAction', Constants.enableHiveAction); }.on('init'), actions : { addAction : function(type){ diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js index bee901e..8163844 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js @@ -17,6 +17,8 @@ import Ember from 'ember'; import CytoscapeStyles from '../domain/cytoscape-style'; +im
[ambari] branch AMBARI-24152-trunk deleted (was c5a1fb2)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-24152-trunk in repository https://gitbox.apache.org/repos/asf/ambari.git. was c5a1fb2 AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. The revisions that were on this branch are still contained in other references; therefore, this change does not discard any commits from the repository.
[ambari] branch trunk updated: AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git The following commit(s) were added to refs/heads/trunk by this push: new 119f8bb AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) 119f8bb is described below commit 119f8bbeb01c6f80c356769650d7b1a93be5f52e Author: Venkata Sairam Lanka AuthorDate: Mon Jul 9 12:21:17 2018 +0530 AMBARI-24152 Ambari Workflow Manager (wfmanager) sends plaintext content over API. JSON is expected. (#1659) --- .../org/apache/oozie/ambari/view/OozieProxyImpersonator.java | 12 +++- .../src/main/resources/ui/app/services/user-info.js | 5 +++-- contrib/views/wfmanager/src/main/resources/ui/package.json | 2 +- contrib/views/wfmanager/src/main/resources/ui/yarn.lock | 8 4 files changed, 19 insertions(+), 8 deletions(-) diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java index 8d2b5a5..15a1042 100644 --- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java +++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java @@ -58,6 +58,11 @@ import org.slf4j.LoggerFactory; import com.google.inject.Singleton; +import org.json.simple.JSONObject; + + + + /** * This is a class used to bridge the communication between the and the Oozie * API executing inside ambari. @@ -154,7 +159,12 @@ public class OozieProxyImpersonator { @GET @Path("/getCurrentUserName") public Response getCurrentUserName() { -return Response.ok(viewContext.getUsername()).build(); + +JSONObject obj = new JSONObject(); + +obj.put("username", viewContext.getUsername()); + +return Response.ok(obj).build(); } @GET diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js b/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js index 701f953..3c1c5c5 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/services/user-info.js @@ -37,8 +37,9 @@ export default Ember.Service.extend({ xhr.setRequestHeader("X-Requested-By", "Ambari"); } }).done(function(data){ -self.set("userName", data); -deferred.resolve(data); +let uname = JSON.parse(data).username; +self.set("userName", JSON.parse(data).username); +deferred.resolve(uname); }).fail(function(data){ self.set("userName", ""); deferred.reject(data); diff --git a/contrib/views/wfmanager/src/main/resources/ui/package.json b/contrib/views/wfmanager/src/main/resources/ui/package.json index 69f43c8..18b4ae0 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/package.json +++ b/contrib/views/wfmanager/src/main/resources/ui/package.json @@ -19,7 +19,7 @@ "author": "", "license": "MIT", "devDependencies": { -"bower": "^1.7.7", +"bower": "1.8.4", "broccoli-asset-rev": "^2.2.0", "babel-plugin-transform-es2015-block-scoping": "^6.24.1", "ember-ajax": "0.7.1", diff --git a/contrib/views/wfmanager/src/main/resources/ui/yarn.lock b/contrib/views/wfmanager/src/main/resources/ui/yarn.lock index f3602c9..e10f44b 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/yarn.lock +++ b/contrib/views/wfmanager/src/main/resources/ui/yarn.lock @@ -588,7 +588,7 @@ babel-plugin-transform-es2015-block-scoped-functions@^6.22.0: dependencies: babel-runtime "^6.22.0" -babel-plugin-transform-es2015-block-scoping@^6.23.0: +babel-plugin-transform-es2015-block-scoping@^6.23.0, babel-plugin-transform-es2015-block-scoping@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576; dependencies: @@ -989,9 +989,9 @@ bower-endpoint-parser@0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/bower-endpoint-parser/-/bower-endpoint-parser-0.2.2.tgz#00b565adbfab6f2d35addde977e97962acbcb3f6; -bower@^1.3.12, bower@^1.7.7: - version "1.8.0" - resolved "https://registry.yarnpkg.com/bower/-/bower-1.8.0.tgz#55dbebef0ad9155382d9e9d3e497c1372345b44a; +bower@1.8.4, bower@^1.3.12: + version "1.8.4" + resolved "https://registry.yarnpkg.com/bower/-/bower-1.8.4.tgz#e7876a076deb8137f7d06525dc5e8c66db82f28a; brace-expansion@^1.0.0: version "1.1.7"
[ambari] branch branch-2.6 updated (79c56db -> 2699217)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git. from 79c56db AMBARI-23457 : Hive-server-interactive service needs to be started after Ranger-Admin. (#906) add 300dbe6 AMBARI-23002 Cancel Button in Upload table doesnt works in Hive views 2.0 new 2699217 Merge pull request #422 from Akhilsnaik/AMBARI-23002-branch-2.6 The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../views/hive20/src/main/resources/ui/app/components/upload-table.js | 3 +++ 1 file changed, 3 insertions(+) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #422 from Akhilsnaik/AMBARI-23002-branch-2.6
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git commit 26992174c3d5f2396261be396a56ca16e0aa999f Merge: 79c56db 300dbe6 Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Wed Apr 11 12:25:25 2018 +0530 Merge pull request #422 from Akhilsnaik/AMBARI-23002-branch-2.6 AMBARI-23002 Cancel Button in Upload table doesnt works in Hive views 2.0 .../views/hive20/src/main/resources/ui/app/components/upload-table.js | 3 +++ 1 file changed, 3 insertions(+) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch branch-2.5 updated (f37a37b -> e4a2518)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch branch-2.5 in repository https://gitbox.apache.org/repos/asf/ambari.git. from f37a37b AMBARI-22999 : Ambari Hive View 2.0 'Upload Table' does not support UTF8 files with BOM (nitirajrathore) (#510) (#527) add 5cc69d4 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) new e4a2518 Merge pull request #669 from apache/AMBARI-22868-branch-2.5 The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../resources/ui/app/components/dataset-config.js| 8 .../domain/coordinator/coordinator-xml-generator.js | 2 +- .../domain/coordinator/coordinator-xml-importer.js | 5 - .../ui/app/templates/components/dataset-config.hbs | 20 4 files changed, 33 insertions(+), 2 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #669 from apache/AMBARI-22868-branch-2.5
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.5 in repository https://gitbox.apache.org/repos/asf/ambari.git commit e4a2518101677116762c8bed9e7bf9ab3cdf251e Merge: f37a37b 5cc69d4 Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Mon Mar 19 12:24:05 2018 +0530 Merge pull request #669 from apache/AMBARI-22868-branch-2.5 AMBARI-22868.Add ability to configure done-flag element of dataset in… .../resources/ui/app/components/dataset-config.js| 8 .../domain/coordinator/coordinator-xml-generator.js | 2 +- .../domain/coordinator/coordinator-xml-importer.js | 5 - .../ui/app/templates/components/dataset-config.hbs | 20 4 files changed, 33 insertions(+), 2 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch branch-2.6 updated (50c9a8e -> 622bb9e)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git. from 50c9a8e [AMBARI-23245] Invalid value for zeppelin.config.fs.dir property (#672) add 4aa09e7 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) new 622bb9e Merge pull request #670 from apache/AMBARI-22868-branch-2.6 The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../resources/ui/app/components/dataset-config.js| 8 .../domain/coordinator/coordinator-xml-generator.js | 2 +- .../domain/coordinator/coordinator-xml-importer.js | 5 - .../ui/app/templates/components/dataset-config.hbs | 20 4 files changed, 33 insertions(+), 2 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #671 from apache/AMBARI-22868-trunk
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git commit 6ff45aff59a1977afa330ca0f43339c2a517c403 Merge: 6f223af 0f4a990 Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Mon Mar 19 12:23:36 2018 +0530 Merge pull request #671 from apache/AMBARI-22868-trunk AMBARI-22868.Add ability to configure done-flag element of dataset in… .../resources/ui/app/components/dataset-config.js| 8 .../domain/coordinator/coordinator-xml-generator.js | 2 +- .../domain/coordinator/coordinator-xml-importer.js | 5 - .../ui/app/templates/components/dataset-config.hbs | 20 4 files changed, 33 insertions(+), 2 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #670 from apache/AMBARI-22868-branch-2.6
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git commit 622bb9eca3b9dbf91efb6e7d77c8b9bad04e7237 Merge: 50c9a8e 4aa09e7 Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Mon Mar 19 12:23:52 2018 +0530 Merge pull request #670 from apache/AMBARI-22868-branch-2.6 AMBARI-22868.Add ability to configure done-flag element of dataset in… .../resources/ui/app/components/dataset-config.js| 8 .../domain/coordinator/coordinator-xml-generator.js | 2 +- .../domain/coordinator/coordinator-xml-importer.js | 5 - .../ui/app/templates/components/dataset-config.hbs | 20 4 files changed, 33 insertions(+), 2 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch trunk updated (6f223af -> 6ff45af)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git. from 6f223af [AMBARI-23232] Set full name (cn) when creating user accounts in FreeIPA server add 0f4a990 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) new 6ff45af Merge pull request #671 from apache/AMBARI-22868-trunk The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../resources/ui/app/components/dataset-config.js| 8 .../domain/coordinator/coordinator-xml-generator.js | 2 +- .../domain/coordinator/coordinator-xml-importer.js | 5 - .../ui/app/templates/components/dataset-config.hbs | 20 4 files changed, 33 insertions(+), 2 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch AMBARI-22868-trunk created (now 0f4a990)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-22868-trunk in repository https://gitbox.apache.org/repos/asf/ambari.git. at 0f4a990 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) This branch includes the following new commits: new 0f4a990 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-22868-trunk in repository https://gitbox.apache.org/repos/asf/ambari.git commit 0f4a99022d12eee85db064f7f902c315b06abbeb Author: Venkata Sairam <venkatasairam.la...@gmail.com> AuthorDate: Thu Mar 15 20:24:04 2018 +0530 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) --- .../resources/ui/app/components/dataset-config.js| 8 .../domain/coordinator/coordinator-xml-generator.js | 2 +- .../domain/coordinator/coordinator-xml-importer.js | 5 - .../ui/app/templates/components/dataset-config.hbs | 20 4 files changed, 33 insertions(+), 2 deletions(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js index d5253ab..0b0726c 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js @@ -62,6 +62,10 @@ export default Ember.Component.extend(Validations, { this.get('timeUnitOptions').pushObject({value:'cron',displayName:'Cron'}); this.set('childComponents', new Map()); this.set('timezoneList', Ember.copy(Constants.timezoneList)); +if(!this.get('dataset.doneFlagType')) { + this.set('dataset.doneFlagType', 'default'); +} + }.on('init'), validateChildComponents(){ var isChildComponentsValid = true; @@ -98,6 +102,10 @@ export default Ember.Component.extend(Validations, { }, cancelDatasetOperation(){ this.sendAction('cancel'); +}, +clearDoneFlag(type){ + this.set('dataset.doneFlag', ''); + this.set('dataset.doneFlagType', type); } } }); diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js index e31e8fc..2c4ea97 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js @@ -71,7 +71,7 @@ var CoordinatorGenerator= Ember.Object.extend({ dataSetJson._frequency = dataset.frequency.value; } dataSetJson["uri-template"]=dataset.uriTemplate; -if (dataset.doneFlag){ +if (dataset.doneFlagType === 'custom'){ dataSetJson["done-flag"]=dataset.doneFlag; } datasets.push(dataSetJson); diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js index b89ad05..e0c1828 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js @@ -147,8 +147,11 @@ var CoordinatorXmlImporter= Ember.Object.extend({ dataSetJson.frequency.value = frequency; } dataSetJson["uriTemplate"] = dataset['uri-template']; -if (dataset['done-flag']){ +if (dataset.hasOwnProperty('done-flag')){ dataSetJson.doneFlag = dataset['done-flag']; + dataSetJson.doneFlagType = "custom"; +} else { + dataSetJson.doneFlagType = "default"; } return dataSetJson; }, diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs index e07a58c..17575d0 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs +++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs @@ -58,6 +58,26 @@ + Done Flag +{{#if required}} + * +{{/if}} + + + + + + Default + + + Custom + + + {{input type="text" class="form-control" name="done-flag" value=dataset.doneFlag placeholder='File Name' disabled=(eq dataset.doneFlagType 'default')}} + + + + Cancel {{#if createMode}} -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch AMBARI-22868-branch-2.6 created (now 4aa09e7)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-22868-branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git. at 4aa09e7 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) This branch includes the following new commits: new 4aa09e7 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-22868-branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git commit 4aa09e758b47346deb6cdfce7192107a8b9374c6 Author: Venkata Sairam <venkatasairam.la...@gmail.com> AuthorDate: Thu Mar 15 20:14:52 2018 +0530 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) --- .../resources/ui/app/components/dataset-config.js| 8 .../domain/coordinator/coordinator-xml-generator.js | 2 +- .../domain/coordinator/coordinator-xml-importer.js | 5 - .../ui/app/templates/components/dataset-config.hbs | 20 4 files changed, 33 insertions(+), 2 deletions(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js index d5253ab..0b0726c 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js @@ -62,6 +62,10 @@ export default Ember.Component.extend(Validations, { this.get('timeUnitOptions').pushObject({value:'cron',displayName:'Cron'}); this.set('childComponents', new Map()); this.set('timezoneList', Ember.copy(Constants.timezoneList)); +if(!this.get('dataset.doneFlagType')) { + this.set('dataset.doneFlagType', 'default'); +} + }.on('init'), validateChildComponents(){ var isChildComponentsValid = true; @@ -98,6 +102,10 @@ export default Ember.Component.extend(Validations, { }, cancelDatasetOperation(){ this.sendAction('cancel'); +}, +clearDoneFlag(type){ + this.set('dataset.doneFlag', ''); + this.set('dataset.doneFlagType', type); } } }); diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js index e31e8fc..2c4ea97 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js @@ -71,7 +71,7 @@ var CoordinatorGenerator= Ember.Object.extend({ dataSetJson._frequency = dataset.frequency.value; } dataSetJson["uri-template"]=dataset.uriTemplate; -if (dataset.doneFlag){ +if (dataset.doneFlagType === 'custom'){ dataSetJson["done-flag"]=dataset.doneFlag; } datasets.push(dataSetJson); diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js index b89ad05..e0c1828 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js @@ -147,8 +147,11 @@ var CoordinatorXmlImporter= Ember.Object.extend({ dataSetJson.frequency.value = frequency; } dataSetJson["uriTemplate"] = dataset['uri-template']; -if (dataset['done-flag']){ +if (dataset.hasOwnProperty('done-flag')){ dataSetJson.doneFlag = dataset['done-flag']; + dataSetJson.doneFlagType = "custom"; +} else { + dataSetJson.doneFlagType = "default"; } return dataSetJson; }, diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs index e07a58c..17575d0 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs +++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs @@ -58,6 +58,26 @@ + Done Flag +{{#if required}} + * +{{/if}} + + + + + + Default + + + Custom + + + {{input type="text" class="form-control" name="done-flag" value=dataset.doneFlag placeholder='File Name' disabled=(eq dataset.doneFlagType 'default')}} + + + + Cancel {{#if createMode}} -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch AMBARI-22868-branch-2.5 in repository https://gitbox.apache.org/repos/asf/ambari.git commit 5cc69d4b38934dbb5d0405e9658555d7bcfa4975 Author: Venkata Sairam <venkatasairam.la...@gmail.com> AuthorDate: Thu Mar 15 20:03:01 2018 +0530 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) --- .../resources/ui/app/components/dataset-config.js| 8 .../domain/coordinator/coordinator-xml-generator.js | 2 +- .../domain/coordinator/coordinator-xml-importer.js | 5 - .../ui/app/templates/components/dataset-config.hbs | 20 4 files changed, 33 insertions(+), 2 deletions(-) diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js index d5253ab..0b0726c 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/dataset-config.js @@ -62,6 +62,10 @@ export default Ember.Component.extend(Validations, { this.get('timeUnitOptions').pushObject({value:'cron',displayName:'Cron'}); this.set('childComponents', new Map()); this.set('timezoneList', Ember.copy(Constants.timezoneList)); +if(!this.get('dataset.doneFlagType')) { + this.set('dataset.doneFlagType', 'default'); +} + }.on('init'), validateChildComponents(){ var isChildComponentsValid = true; @@ -98,6 +102,10 @@ export default Ember.Component.extend(Validations, { }, cancelDatasetOperation(){ this.sendAction('cancel'); +}, +clearDoneFlag(type){ + this.set('dataset.doneFlag', ''); + this.set('dataset.doneFlagType', type); } } }); diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js index e31e8fc..2c4ea97 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-generator.js @@ -71,7 +71,7 @@ var CoordinatorGenerator= Ember.Object.extend({ dataSetJson._frequency = dataset.frequency.value; } dataSetJson["uri-template"]=dataset.uriTemplate; -if (dataset.doneFlag){ +if (dataset.doneFlagType === 'custom'){ dataSetJson["done-flag"]=dataset.doneFlag; } datasets.push(dataSetJson); diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js index b89ad05..e0c1828 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/coordinator/coordinator-xml-importer.js @@ -147,8 +147,11 @@ var CoordinatorXmlImporter= Ember.Object.extend({ dataSetJson.frequency.value = frequency; } dataSetJson["uriTemplate"] = dataset['uri-template']; -if (dataset['done-flag']){ +if (dataset.hasOwnProperty('done-flag')){ dataSetJson.doneFlag = dataset['done-flag']; + dataSetJson.doneFlagType = "custom"; +} else { + dataSetJson.doneFlagType = "default"; } return dataSetJson; }, diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs index e07a58c..17575d0 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs +++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/dataset-config.hbs @@ -58,6 +58,26 @@ + Done Flag +{{#if required}} + * +{{/if}} + + + + + + Default + + + Custom + + + {{input type="text" class="form-control" name="done-flag" value=dataset.doneFlag placeholder='File Name' disabled=(eq dataset.doneFlagType 'default')}} + + + + Cancel {{#if createMode}} -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch AMBARI-22868-branch-2.5 created (now 5cc69d4)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch AMBARI-22868-branch-2.5 in repository https://gitbox.apache.org/repos/asf/ambari.git. at 5cc69d4 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) This branch includes the following new commits: new 5cc69d4 AMBARI-22868.Add ability to configure done-flag element of dataset in Workflow Manager View(Venkata Sairam) The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #593 from Akhilsnaik/ambari-22897-branch_2.6
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git commit de95d6e7662725f35dcd07e7be7a7b00476515a2 Merge: fe85450 1b0b7ae Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Tue Mar 13 15:23:04 2018 +0530 Merge pull request #593 from Akhilsnaik/ambari-22897-branch_2.6 AMBARI-22897 - Tez view button doesnt works in ambari views 1.5 .../settings.js => components/query-settings.js} | 26 +++-- .../ui/hive-web/app/controllers/application.js | 9 +++- .../resources/ui/hive-web/app/controllers/index.js | 1 - .../ui/hive-web/app/controllers/open-queries.js| 12 + .../ui/hive-web/app/controllers/query-tabs.js | 63 +- .../src/main/resources/ui/hive-web/app/router.js | 5 +- .../ui/hive-web/app/routes/application.js | 22 ++-- .../application.js => routes/messages.js} | 8 +-- .../application.js => routes/tez-ui.js}| 8 +-- .../application.js => routes/visual-explain.js}| 8 +-- .../application.js => routes/visualization-ui.js} | 8 +-- .../resources/ui/hive-web/app/services/settings.js | 6 +++ .../main/resources/ui/hive-web/app/styles/app.scss | 2 +- .../ui/hive-web/app/styles/query-tabs.scss | 8 ++- .../ui/hive-web/app/templates/application.hbs | 23 +++- .../query-settings.hbs}| 16 +++--- .../resources/ui/hive-web/app/templates/index.hbs | 21 ++-- .../ui/hive-web/app/templates/open-queries.hbs | 10 ++-- .../query-settings-test.js}| 0 .../unit/{controllers => routes}/messages-test.js | 0 .../unit/routes/tez-ui-test.js}| 33 ++-- .../unit/{views => routes}/visual-explain-test.js | 2 +- 22 files changed, 163 insertions(+), 128 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch branch-2.6 updated (fe85450 -> de95d6e)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git. from fe85450 AMBARI-23216. Fix that Log Search for Ambari 2.6.2 should not require java8. (#637) add 1b0b7ae AMBARI-22897 - Tez view button doesnt works in ambari views 1.5(asnaik) new de95d6e Merge pull request #593 from Akhilsnaik/ambari-22897-branch_2.6 The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../settings.js => components/query-settings.js} | 26 +++-- .../ui/hive-web/app/controllers/application.js | 9 +++- .../resources/ui/hive-web/app/controllers/index.js | 1 - .../ui/hive-web/app/controllers/open-queries.js| 12 + .../ui/hive-web/app/controllers/query-tabs.js | 63 +- .../src/main/resources/ui/hive-web/app/router.js | 5 +- .../ui/hive-web/app/routes/application.js | 22 ++-- .../app/routes/{loading.js => messages.js} | 0 .../hive-web/app/routes/{loading.js => tez-ui.js} | 0 .../app/routes/{loading.js => visual-explain.js} | 0 .../app/routes/{loading.js => visualization-ui.js} | 0 .../resources/ui/hive-web/app/services/settings.js | 6 +++ .../main/resources/ui/hive-web/app/styles/app.scss | 2 +- .../ui/hive-web/app/styles/query-tabs.scss | 8 ++- .../ui/hive-web/app/templates/application.hbs | 23 +++- .../query-settings.hbs}| 16 +++--- .../resources/ui/hive-web/app/templates/index.hbs | 21 ++-- .../ui/hive-web/app/templates/open-queries.hbs | 10 ++-- .../query-settings-test.js}| 0 .../unit/{controllers => routes}/messages-test.js | 0 .../{integration => unit/routes}/tez-ui-test.js| 0 .../unit/{views => routes}/visual-explain-test.js | 2 +- 22 files changed, 127 insertions(+), 99 deletions(-) rename contrib/views/hive-next/src/main/resources/ui/hive-web/app/{controllers/settings.js => components/query-settings.js} (74%) copy contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/{loading.js => messages.js} (100%) copy contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/{loading.js => tez-ui.js} (100%) copy contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/{loading.js => visual-explain.js} (100%) copy contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/{loading.js => visualization-ui.js} (100%) rename contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/{settings.hbs => components/query-settings.hbs} (87%) rename contrib/views/hive-next/src/main/resources/ui/hive-web/tests/unit/{controllers/settings-test.js => components/query-settings-test.js} (100%) rename contrib/views/hive-next/src/main/resources/ui/hive-web/tests/unit/{controllers => routes}/messages-test.js (100%) copy contrib/views/hive-next/src/main/resources/ui/hive-web/tests/{integration => unit/routes}/tez-ui-test.js (100%) rename contrib/views/hive-next/src/main/resources/ui/hive-web/tests/unit/{views => routes}/visual-explain-test.js (99%) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #408 from Akhilsnaik/AMBARI-22897
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git commit 973d875a1c1489218d2b4901411c37db792d3f10 Merge: e5e10cd a425ed4 Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Thu Mar 8 16:13:05 2018 +0530 Merge pull request #408 from Akhilsnaik/AMBARI-22897 Ambari 22897 Tez view button doesnt works in ambari hiveviews 1.5(asnaik) .../settings.js => components/query-settings.js} | 26 +++-- .../ui/hive-web/app/controllers/application.js | 9 +++- .../resources/ui/hive-web/app/controllers/index.js | 1 - .../ui/hive-web/app/controllers/open-queries.js| 12 + .../ui/hive-web/app/controllers/query-tabs.js | 63 +- .../src/main/resources/ui/hive-web/app/router.js | 5 +- .../ui/hive-web/app/routes/application.js | 22 ++-- .../application.js => routes/messages.js} | 8 +-- .../application.js => routes/tez-ui.js}| 8 +-- .../application.js => routes/visual-explain.js}| 8 +-- .../application.js => routes/visualization-ui.js} | 8 +-- .../resources/ui/hive-web/app/services/settings.js | 6 +++ .../main/resources/ui/hive-web/app/styles/app.scss | 2 +- .../ui/hive-web/app/styles/query-tabs.scss | 8 ++- .../ui/hive-web/app/templates/application.hbs | 23 +++- .../query-settings.hbs}| 16 +++--- .../resources/ui/hive-web/app/templates/index.hbs | 21 ++-- .../ui/hive-web/app/templates/open-queries.hbs | 10 ++-- .../query-settings-test.js}| 0 .../unit/{controllers => routes}/messages-test.js | 0 .../unit/routes/tez-ui-test.js}| 33 ++-- .../unit/{views => routes}/visual-explain-test.js | 2 +- 22 files changed, 163 insertions(+), 128 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch trunk updated (e5e10cd -> 973d875)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git. from e5e10cd AMBARI-23172. Build fails for Debian (jdeb plugin not found). (swagle) (#583) add 34cb92e AMBARI-22897 - Tez view button doesnt works in ambari views 1.5 add 0bbb154 AMBARI-22897 - Tez view button doesnt works in ambari views 1.5(asnaik) add 874abb6 AMBARI-22897 - Tez view button doesnt works in ambari views 1. add f230fa5 Ambari 22897 Tez view button doesnt works in ambari hiveviews 1.5 (asnaik) add a425ed4 AMBARI-22897 - Tez view button doesnt works in ambari views 1.5,Fix the Unused code, layout settings tab correctly(asnaik) new 973d875 Merge pull request #408 from Akhilsnaik/AMBARI-22897 The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../settings.js => components/query-settings.js} | 26 +++-- .../ui/hive-web/app/controllers/application.js | 9 +++- .../resources/ui/hive-web/app/controllers/index.js | 1 - .../ui/hive-web/app/controllers/open-queries.js| 12 + .../ui/hive-web/app/controllers/query-tabs.js | 63 +- .../src/main/resources/ui/hive-web/app/router.js | 5 +- .../ui/hive-web/app/routes/application.js | 22 ++-- .../app/routes/{loading.js => messages.js} | 0 .../hive-web/app/routes/{loading.js => tez-ui.js} | 0 .../app/routes/{loading.js => visual-explain.js} | 0 .../app/routes/{loading.js => visualization-ui.js} | 0 .../resources/ui/hive-web/app/services/settings.js | 6 +++ .../main/resources/ui/hive-web/app/styles/app.scss | 2 +- .../ui/hive-web/app/styles/query-tabs.scss | 8 ++- .../ui/hive-web/app/templates/application.hbs | 23 +++- .../query-settings.hbs}| 16 +++--- .../resources/ui/hive-web/app/templates/index.hbs | 21 ++-- .../ui/hive-web/app/templates/open-queries.hbs | 10 ++-- .../query-settings-test.js}| 0 .../unit/{controllers => routes}/messages-test.js | 0 .../{integration => unit/routes}/tez-ui-test.js| 0 .../unit/{views => routes}/visual-explain-test.js | 2 +- 22 files changed, 127 insertions(+), 99 deletions(-) rename contrib/views/hive-next/src/main/resources/ui/hive-web/app/{controllers/settings.js => components/query-settings.js} (74%) copy contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/{loading.js => messages.js} (100%) copy contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/{loading.js => tez-ui.js} (100%) copy contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/{loading.js => visual-explain.js} (100%) copy contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/{loading.js => visualization-ui.js} (100%) rename contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/{settings.hbs => components/query-settings.hbs} (87%) rename contrib/views/hive-next/src/main/resources/ui/hive-web/tests/unit/{controllers/settings-test.js => components/query-settings-test.js} (100%) rename contrib/views/hive-next/src/main/resources/ui/hive-web/tests/unit/{controllers => routes}/messages-test.js (100%) copy contrib/views/hive-next/src/main/resources/ui/hive-web/tests/{integration => unit/routes}/tez-ui-test.js (100%) rename contrib/views/hive-next/src/main/resources/ui/hive-web/tests/unit/{views => routes}/visual-explain-test.js (99%) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #418 from Akhilsnaik/AMBARI-23002-branch-2.5
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.5 in repository https://gitbox.apache.org/repos/asf/ambari.git commit e2eba88b89fc0d0d288b8587c613e29f941d1a0f Merge: 0ad2785 19733e9 Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Wed Feb 28 12:50:08 2018 +0530 Merge pull request #418 from Akhilsnaik/AMBARI-23002-branch-2.5 AMBARI-23002 Cancel Button in Upload table doesnt works in Hive views 2.0 .../views/hive20/src/main/resources/ui/app/components/upload-table.js | 3 +++ 1 file changed, 3 insertions(+) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #374 from Akhilsnaik/AMBARI-23002
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git commit ef894f36b9b28e58547d1bf3417b645f455bf40d Merge: 69c9fc1 91aa6dc Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Wed Feb 28 12:50:15 2018 +0530 Merge pull request #374 from Akhilsnaik/AMBARI-23002 AMBARI-23002 Cancel Button in Upload table doesnt works in Hive views 2.0 .../views/hive20/src/main/resources/ui/app/components/upload-table.js | 3 +++ 1 file changed, 3 insertions(+) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch trunk updated (69c9fc1 -> ef894f3)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git. from 69c9fc1 AMBARI-23094. Ambari does not manage repositories (aonishuk) add 91aa6dc AMBARI-23002 Cancel Button in Upload table doesnt works in Hive views 2.0 new ef894f3 Merge pull request #374 from Akhilsnaik/AMBARI-23002 The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../views/hive20/src/main/resources/ui/app/components/upload-table.js | 3 +++ 1 file changed, 3 insertions(+) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch branch-2.5 updated (0ad2785 -> e2eba88)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch branch-2.5 in repository https://gitbox.apache.org/repos/asf/ambari.git. from 0ad2785 Merge pull request #426 from Akhilsnaik/AMBARI-22759-branch-2.5 add 19733e9 AMBARI-23002 Cancel Button in Upload table doesnt works in Hive views 2.0 new e2eba88 Merge pull request #418 from Akhilsnaik/AMBARI-23002-branch-2.5 The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../views/hive20/src/main/resources/ui/app/components/upload-table.js | 3 +++ 1 file changed, 3 insertions(+) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #426 from Akhilsnaik/AMBARI-22759-branch-2.5
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.5 in repository https://gitbox.apache.org/repos/asf/ambari.git commit 0ad27858a83a9aed49be0917ace8490516022d71 Merge: 70fc45c 8204a15 Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Wed Feb 28 12:49:47 2018 +0530 Merge pull request #426 from Akhilsnaik/AMBARI-22759-branch-2.5 AMBARI-22759 [Hive Views 2.0] Deleting a Saved query is Buggy .../hive20/src/main/resources/ui/app/routes/savedqueries.js| 10 -- 1 file changed, 4 insertions(+), 6 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch branch-2.5 updated (70fc45c -> 0ad2785)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch branch-2.5 in repository https://gitbox.apache.org/repos/asf/ambari.git. from 70fc45c AMBARI-22927.Hive View 1.5 Upload Table - not able to change datatype from the dropdown for a wide file(Venkata Sairam) add 8204a15 AMBARI-22759 [Hive Views 2.0] Deleting a Saved query is Buggy new 0ad2785 Merge pull request #426 from Akhilsnaik/AMBARI-22759-branch-2.5 The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../hive20/src/main/resources/ui/app/routes/savedqueries.js| 10 -- 1 file changed, 4 insertions(+), 6 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch branch-2.6 updated (dc5cc66 -> a3a01b2)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git. from dc5cc66 [AMBARI-23091] Zeppelin Notebook SSL credentials in Ambari UI are in plain text rather than being hidden (#481) add d9b324e AMBARI-22759 [Hive Views 2.0] Deleting a Saved query is Buggy new a3a01b2 Merge pull request #425 from Akhilsnaik/AMBARI-22759-branch-2.6 The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../hive20/src/main/resources/ui/app/routes/savedqueries.js| 10 -- 1 file changed, 4 insertions(+), 6 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #425 from Akhilsnaik/AMBARI-22759-branch-2.6
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/ambari.git commit a3a01b26339f39fd37e3d1db76fd8dfa07270c88 Merge: dc5cc66 d9b324e Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Wed Feb 28 12:49:56 2018 +0530 Merge pull request #425 from Akhilsnaik/AMBARI-22759-branch-2.6 AMBARI-22759 [Hive Views 2.0] Deleting a Saved query is Buggy .../hive20/src/main/resources/ui/app/routes/savedqueries.js| 10 -- 1 file changed, 4 insertions(+), 6 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] branch trunk updated (e40e7e5 -> 02105de)
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a change to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git. from e40e7e5 [AMBARI-22882] Long cannot be cast to String error when changing a user's password add 858fbd2 AMBARI-22759 [Hive Views 2.0] Deleting a Saved query is Buggy when Mutliple Queries exist in same Name new 02105de Merge pull request #175 from Akhilsnaik/AMBARI-22759_hive20 The 1 revisions listed above as "new" are entirely new to this repository and will be described in separate emails. The revisions listed as "add" were already present in the repository and have only been added to this reference. Summary of changes: .../hive20/src/main/resources/ui/app/routes/savedqueries.js| 10 -- 1 file changed, 4 insertions(+), 6 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
[ambari] 01/01: Merge pull request #175 from Akhilsnaik/AMBARI-22759_hive20
This is an automated email from the ASF dual-hosted git repository. vsairam pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git commit 02105de7cf16e7fe8f7e4f71b99f6606824799fa Merge: e40e7e5 858fbd2 Author: Venkata Sairam Lanka <venkatasairam.la...@gmail.com> AuthorDate: Wed Jan 31 19:14:37 2018 +0530 Merge pull request #175 from Akhilsnaik/AMBARI-22759_hive20 [AMBARI-22759] [Hive Views 2.0] Deleting a Saved query is Buggy when Mu… .../hive20/src/main/resources/ui/app/routes/savedqueries.js| 10 -- 1 file changed, 4 insertions(+), 6 deletions(-) -- To stop receiving notification emails like this one, please contact vsai...@apache.org.
ambari git commit: AMBARI-22506.Incorrect pie chart distribution(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 eef99836e -> 29ffc7ef9 AMBARI-22506.Incorrect pie chart distribution(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/29ffc7ef Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/29ffc7ef Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/29ffc7ef Branch: refs/heads/branch-2.6 Commit: 29ffc7ef99711b49bade5eba141d8018cf197b48 Parents: eef9983 Author: Venkata SairamAuthored: Thu Dec 28 12:07:19 2017 +0530 Committer: Venkata Sairam Committed: Thu Dec 28 12:08:20 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py | 11 --- 1 file changed, 8 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/29ffc7ef/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index ee264a4..0b6ced9 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -533,9 +533,14 @@ class Master(Script): if params.zookeeper_znode_parent \ and params.hbase_zookeeper_quorum: interpreter['properties']['phoenix.driver'] = 'org.apache.phoenix.jdbc.PhoenixDriver' -interpreter['properties']['phoenix.hbase.client.retries.number'] = '1' -interpreter['properties']['phoenix.user'] = 'phoenixuser' -interpreter['properties']['phoenix.password'] = '' +if 'phoenix.hbase.client.retries.number' not in interpreter['properties']: + interpreter['properties']['phoenix.hbase.client.retries.number'] = '1' +if 'phoenix.phoenix.query.numberFormat' not in interpreter['properties']: + interpreter['properties']['phoenix.phoenix.query.numberFormat'] = '#.#' +if 'phoenix.user' not in interpreter['properties']: + interpreter['properties']['phoenix.user'] = 'phoenixuser' +if 'phoenix.password' not in interpreter['properties']: + interpreter['properties']['phoenix.password'] = '' interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \ params.hbase_zookeeper_quorum + ':' + \ params.zookeeper_znode_parent
ambari git commit: AMBARI-22506.Incorrect pie chart distribution(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 02887284a -> c1b8cda96 AMBARI-22506.Incorrect pie chart distribution(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c1b8cda9 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c1b8cda9 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c1b8cda9 Branch: refs/heads/trunk Commit: c1b8cda9608180cd00384a8453e3e5f78a865cb2 Parents: 0288728 Author: Venkata SairamAuthored: Thu Dec 28 12:07:19 2017 +0530 Committer: Venkata Sairam Committed: Thu Dec 28 12:07:19 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py | 11 --- 1 file changed, 8 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/c1b8cda9/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index df892f8..efa3ffe 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -539,9 +539,14 @@ class Master(Script): if params.zookeeper_znode_parent \ and params.hbase_zookeeper_quorum: interpreter['properties']['phoenix.driver'] = 'org.apache.phoenix.jdbc.PhoenixDriver' -interpreter['properties']['phoenix.hbase.client.retries.number'] = '1' -interpreter['properties']['phoenix.user'] = 'phoenixuser' -interpreter['properties']['phoenix.password'] = '' +if 'phoenix.hbase.client.retries.number' not in interpreter['properties']: + interpreter['properties']['phoenix.hbase.client.retries.number'] = '1' +if 'phoenix.phoenix.query.numberFormat' not in interpreter['properties']: + interpreter['properties']['phoenix.phoenix.query.numberFormat'] = '#.#' +if 'phoenix.user' not in interpreter['properties']: + interpreter['properties']['phoenix.user'] = 'phoenixuser' +if 'phoenix.password' not in interpreter['properties']: + interpreter['properties']['phoenix.password'] = '' interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \ params.hbase_zookeeper_quorum + ':' + \ params.zookeeper_znode_parent
ambari git commit: AMBARI-22626.Zeppelin Interpreter settings are getting updated after zeppelin restart(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 0f4ab0a66 -> eef99836e AMBARI-22626.Zeppelin Interpreter settings are getting updated after zeppelin restart(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eef99836 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eef99836 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eef99836 Branch: refs/heads/branch-2.6 Commit: eef99836ea8f118b7735b9941890386f72d43018 Parents: 0f4ab0a Author: Venkata SairamAuthored: Thu Dec 28 12:05:18 2017 +0530 Committer: Venkata Sairam Committed: Thu Dec 28 12:05:18 2017 +0530 -- .../scripts/interpreter_json_template.py| 152 +++ .../package/scripts/livy2_config_template.py| 112 -- .../ZEPPELIN/0.7.0/package/scripts/master.py| 22 --- .../package/scripts/spark2_config_template.py | 84 -- 4 files changed, 152 insertions(+), 218 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/eef99836/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py index 713db23..b373e22 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py @@ -128,6 +128,68 @@ template = ''' "isUserImpersonate": false } }, +"2C4U48MY3_spark2": { + "id": "2C4U48MY3_spark2", + "name": "spark2", + "group": "spark", + "properties": { +"spark.executor.memory": "", +"args": "", +"zeppelin.spark.printREPLOutput": "true", +"spark.cores.max": "", +"zeppelin.dep.additionalRemoteRepository": "spark-packages,http://dl.bintray.com/spark-packages/maven,false;;, +"zeppelin.spark.importImplicit": "true", +"zeppelin.spark.sql.stacktrace": "false", +"zeppelin.spark.concurrentSQL": "false", +"zeppelin.spark.useHiveContext": "true", +"zeppelin.pyspark.python": "python", +"zeppelin.dep.localrepo": "local-repo", +"zeppelin.R.knitr": "true", +"zeppelin.spark.maxResult": "1000", +"master": "local[*]", +"spark.app.name": "Zeppelin", +"zeppelin.R.image.width": "100%", +"zeppelin.R.render.options": "out.format \u003d \u0027html\u0027, comment \u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message \u003d F, warning \u003d F", +"zeppelin.R.cmd": "R" + }, + "status": "READY", + "interpreterGroup": [ +{ + "name": "spark", + "class": "org.apache.zeppelin.spark.SparkInterpreter", + "defaultInterpreter": true +}, +{ + "name": "sql", + "class": "org.apache.zeppelin.spark.SparkSqlInterpreter", + "defaultInterpreter": false +}, +{ + "name": "dep", + "class": "org.apache.zeppelin.spark.DepInterpreter", + "defaultInterpreter": false +}, +{ + "name": "pyspark", + "class": "org.apache.zeppelin.spark.PySparkInterpreter", + "defaultInterpreter": false +}, +{ + "name": "r", + "class": "org.apache.zeppelin.spark.SparkRInterpreter", + "defaultInterpreter": false +} + ], + "dependencies": [], + "option": { +"remote": true, +"port": -1, +"perNoteSession": false, +"perNoteProcess": false, +"isExistingProcess": false, +"setPermission": false + } +}, "2CK8A9MEG": { "id": "2CK8A9MEG", "name": "jdbc", @@ -259,6 +321,96 @@ template = ''' "isUserImpersonate": false } }, +"2C8A4SZ9T_livy2": { + "id": "2C8A4SZ9T_livy2", + "status": "READY", + "group": "livy", + "name": "livy2", + "properties": { +"zeppelin.livy.keytab": "", +"zeppelin.livy.spark.sql.maxResult": "1000", +"livy.spark.executor.instances": "", +"livy.spark.executor.memory": "", +"livy.spark.dynamicAllocation.enabled": "", +"livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "", +"livy.spark.dynamicAllocation.initialExecutors": "", +"zeppelin.livy.session.create_timeout": "120", +
ambari git commit: AMBARI-22591.MD interpreter fails with NPE (Zeppelin)(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 8e6469230 -> 8de808b78 AMBARI-22591.MD interpreter fails with NPE (Zeppelin)(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8de808b7 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8de808b7 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8de808b7 Branch: refs/heads/branch-2.6 Commit: 8de808b78debcaec63a658435607c1d1f7b8d10c Parents: 8e64692 Author: Venkata SairamAuthored: Wed Dec 6 15:15:47 2017 +0530 Committer: Venkata Sairam Committed: Wed Dec 6 15:15:47 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py| 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/8de808b7/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py index d5a70a7..713db23 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py @@ -264,7 +264,7 @@ template = ''' "name": "md", "group": "md", "properties": { -"markdown.parser.type": "pegdown" +"markdown.parser.type": "markdown4j" }, "status": "READY", "interpreterGroup": [
ambari git commit: AMBARI-22591.MD interpreter fails with NPE (Zeppelin)(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 0fe2f8fa6 -> 97ceed034 AMBARI-22591.MD interpreter fails with NPE (Zeppelin)(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/97ceed03 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/97ceed03 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/97ceed03 Branch: refs/heads/trunk Commit: 97ceed034a8d23f5caf6276fa48c02332d58c234 Parents: 0fe2f8f Author: Venkata SairamAuthored: Wed Dec 6 14:56:14 2017 +0530 Committer: Venkata Sairam Committed: Wed Dec 6 14:56:14 2017 +0530 -- .../0.7.0/package/scripts/interpreter_json_template.py | 2 +- .../python/stacks/2.6/ZEPPELIN/interpreter_json_generated.py | 8 2 files changed, 5 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/97ceed03/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py index d5a70a7..713db23 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py @@ -264,7 +264,7 @@ template = ''' "name": "md", "group": "md", "properties": { -"markdown.parser.type": "pegdown" +"markdown.parser.type": "markdown4j" }, "status": "READY", "interpreterGroup": [ http://git-wip-us.apache.org/repos/asf/ambari/blob/97ceed03/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/interpreter_json_generated.py -- diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/interpreter_json_generated.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/interpreter_json_generated.py index 1d2cf86..4b4dc1f 100644 --- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/interpreter_json_generated.py +++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/interpreter_json_generated.py @@ -18,10 +18,10 @@ limitations under the License. """ -template = '\n{\n "interpreterSettings": {\n"2CKEKWY8Z": {\n "id": "2CKEKWY8Z",\n "name": "angular",\n "group": "angular",\n "properties": {},\n "status": "READY",\n "interpreterGroup": [\n {\n "name": "angular",\n "class": "org.apache.zeppelin.angular.AngularInterpreter",\n "defaultInterpreter": false,\n "editor": {\n "editOnDblClick": true\n }\n}\n ],\n "dependencies": [],\n "option": {\n"remote": true,\n"port": -1,\n "perNote": "shared",\n"perUser": "shared",\n "isExistingProcess": false,\n"setPermission": false,\n"users": [],\n"isUserImpersonate": false\n }\n},\n"2CKX8WPU1": {\n "id": "2CKX8WPU1",\n "name": "spark",\n "group": "spark",\n "properties": {\n"spark.executor.memory": "512m",\n"args": "",\n"zeppelin.spark.printREPLOutput": "true",\n "spark.cores.max": "",\n "zeppelin.dep.additionalRemoteRepository": "spark-packages,http://dl.bintray.com/spark-packages/maven,false;",\n "zeppelin.spark.sql.stacktrace": "false",\n "zeppelin.spark.importImplicit": "true",\n "zeppelin.spark.concurrentSQL": "false",\n "zeppelin.spark.useHiveContext": "true",\n"zeppelin.pyspark.python": "python",\n"zeppelin.dep.localrepo": "local-repo",\n "zeppelin.R.knitr": "true",\n"zeppelin.spark.maxResult": "1000",\n "master": "yarn-client",\n"spark.app.name": "Zeppelin",\n "zeppelin.R.image.width": "100%",\n"zeppelin.R.render.options": "out.format \\u003d \\u0027html\\u0027, comment \\u003d NA, echo \\u003d FALSE, results \\u003d \\u0027asis\\u0027, message \\u003d F, warning \\u003d F",\n "zeppelin.R.cmd": "R"\n },\n "status": "READY",\n "interpreterGroup": [\n{\n "name": "spark",\n "class": "org .apache.zeppelin.spark.SparkInterpreter",\n "defaultInterpreter": true,\n "editor": {\n"language": "scala"\n }\n },\n{\n "name": "sql",\n "class":
ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 099e0185d -> c57e243d2 AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c57e243d Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c57e243d Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c57e243d Branch: refs/heads/branch-2.6 Commit: c57e243d2d0c2f480b56693a39e97bb81e258da6 Parents: 099e018 Author: Venkata SairamAuthored: Thu Nov 23 15:52:55 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:52:55 2017 +0530 -- .../org/apache/ambari/view/utils/hdfs/HdfsApi.java | 16 +++- .../apache/ambari/view/utils/hdfs/HdfsUtil.java| 17 - 2 files changed, 27 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/c57e243d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java index 3db2081..812cd54 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java @@ -485,7 +485,20 @@ public class HdfsApi { * @throws IOException * @throws InterruptedException */ - public T execute(PrivilegedExceptionAction action) + public T execute(PrivilegedExceptionAction action) throws IOException, InterruptedException { +return this.execute(action, false); + } + + + /** + * Executes action on HDFS using doAs + * @param action strategy object + * @param result type + * @return result of operation + * @throws IOException + * @throws InterruptedException + */ + public T execute(PrivilegedExceptionAction action, boolean alwaysRetry) throws IOException, InterruptedException { T result = null; @@ -508,6 +521,7 @@ public class HdfsApi { } LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " + "Retrying... Try #" + (tryNumber + 1)); +LOG.error("Retrying: " + ex.getMessage(),ex); Thread.sleep(1000); //retry after 1 second } } while (!succeeded); http://git-wip-us.apache.org/repos/asf/ambari/blob/c57e243d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java index 0670f1a..810129b 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java @@ -27,6 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; public class HdfsUtil { @@ -38,13 +39,19 @@ public class HdfsUtil { * @param filePath path to file * @param content new content of file */ - public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException { -FSDataOutputStream stream; + public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException { + try { synchronized (hdfs) { -stream = hdfs.create(filePath, true); -stream.write(content.getBytes()); -stream.close(); +hdfs.execute(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { +final FSDataOutputStream stream = hdfs.create(filePath, true); +stream.write(content.getBytes()); +stream.close(); +return null; + } +}, true); } } catch (IOException e) { throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.5 603c3fd74 -> adc1fed15 AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/adc1fed1 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/adc1fed1 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/adc1fed1 Branch: refs/heads/branch-2.5 Commit: adc1fed15f57b427ee6fbaeb4b747004492f49e4 Parents: 603c3fd Author: Venkata SairamAuthored: Thu Nov 23 15:50:22 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:50:22 2017 +0530 -- .../apache/ambari/view/utils/hdfs/HdfsApi.java| 18 -- .../apache/ambari/view/utils/hdfs/HdfsUtil.java | 17 - 2 files changed, 28 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/adc1fed1/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java index 90fa483..5bce7ba 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java @@ -386,7 +386,20 @@ public class HdfsApi { * @throws IOException * @throws InterruptedException */ - public T execute(PrivilegedExceptionAction action) + public T execute(PrivilegedExceptionAction action) throws IOException, InterruptedException { +return this.execute(action, false); + } + + + /** + * Executes action on HDFS using doAs + * @param action strategy object + * @param result type + * @return result of operation + * @throws IOException + * @throws InterruptedException + */ + public T execute(PrivilegedExceptionAction action, boolean alwaysRetry) throws IOException, InterruptedException { T result = null; @@ -401,7 +414,7 @@ public class HdfsApi { result = ugi.doAs(action); succeeded = true; } catch (IOException ex) { -if (!ex.getMessage().contains("Cannot obtain block length for")) { +if (!alwaysRetry && !ex.getMessage().contains("Cannot obtain block length for")) { throw ex; } if (tryNumber >= 3) { @@ -409,6 +422,7 @@ public class HdfsApi { } LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " + "Retrying... Try #" + (tryNumber + 1)); +LOG.error("Retrying: " + ex.getMessage(),ex); Thread.sleep(1000); //retry after 1 second } } while (!succeeded); http://git-wip-us.apache.org/repos/asf/ambari/blob/adc1fed1/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java index 0670f1a..810129b 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java @@ -27,6 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; public class HdfsUtil { @@ -38,13 +39,19 @@ public class HdfsUtil { * @param filePath path to file * @param content new content of file */ - public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException { -FSDataOutputStream stream; + public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException { + try { synchronized (hdfs) { -stream = hdfs.create(filePath, true); -stream.write(content.getBytes()); -stream.close(); +hdfs.execute(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { +final FSDataOutputStream stream = hdfs.create(filePath, true); +stream.write(content.getBytes()); +stream.close(); +return null; + } +}, true); } } catch (IOException e) { throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 513602198 -> 8e36662ae AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8e36662a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8e36662a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8e36662a Branch: refs/heads/trunk Commit: 8e36662ae1dffe7cb637b3d1edb38278f0111012 Parents: 5136021 Author: Venkata SairamAuthored: Thu Nov 23 15:48:52 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:48:52 2017 +0530 -- .../org/apache/ambari/view/utils/hdfs/HdfsApi.java | 16 +++- .../apache/ambari/view/utils/hdfs/HdfsUtil.java| 17 - 2 files changed, 27 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/8e36662a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java index 3db2081..812cd54 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java @@ -485,7 +485,20 @@ public class HdfsApi { * @throws IOException * @throws InterruptedException */ - public T execute(PrivilegedExceptionAction action) + public T execute(PrivilegedExceptionAction action) throws IOException, InterruptedException { +return this.execute(action, false); + } + + + /** + * Executes action on HDFS using doAs + * @param action strategy object + * @param result type + * @return result of operation + * @throws IOException + * @throws InterruptedException + */ + public T execute(PrivilegedExceptionAction action, boolean alwaysRetry) throws IOException, InterruptedException { T result = null; @@ -508,6 +521,7 @@ public class HdfsApi { } LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " + "Retrying... Try #" + (tryNumber + 1)); +LOG.error("Retrying: " + ex.getMessage(),ex); Thread.sleep(1000); //retry after 1 second } } while (!succeeded); http://git-wip-us.apache.org/repos/asf/ambari/blob/8e36662a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java index 0670f1a..810129b 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java @@ -27,6 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; public class HdfsUtil { @@ -38,13 +39,19 @@ public class HdfsUtil { * @param filePath path to file * @param content new content of file */ - public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException { -FSDataOutputStream stream; + public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException { + try { synchronized (hdfs) { -stream = hdfs.create(filePath, true); -stream.write(content.getBytes()); -stream.close(); +hdfs.execute(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { +final FSDataOutputStream stream = hdfs.create(filePath, true); +stream.write(content.getBytes()); +stream.close(); +return null; + } +}, true); } } catch (IOException e) { throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
ambari git commit: AMBARI-22502.Workflow Manager View - FS node will overwrite internal commands and replace them with blank "move" commands when reopening the node(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 677e27e65 -> 099e0185d AMBARI-22502.Workflow Manager View - FS node will overwrite internal commands and replace them with blank "move" commands when reopening the node(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/099e0185 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/099e0185 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/099e0185 Branch: refs/heads/branch-2.6 Commit: 099e0185dc7d9b8d14f267dce0c113f819275ded Parents: 677e27e Author: Venkata SairamAuthored: Thu Nov 23 15:36:57 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:40:57 2017 +0530 -- .../src/main/resources/ui/app/domain/workflow-importer.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/099e0185/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js -- diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js index 84a789d..2afc304 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js @@ -112,7 +112,8 @@ var WorkflowImporter= Ember.Object.extend({ if (nodeHandler){ if (Ember.isArray(workflowAppJson[key])){ workflowAppJson[key].forEach(function(jsonObj){ -var node=nodeHandler.handleImportNode(key,jsonObj,workflow,xmlDoc); +var actionDom = xmlDoc.find("action[name='" + jsonObj._name + "']"); +var node = nodeHandler.handleImportNode(key,jsonObj,workflow,actionDom); nodeMap.set(jsonObj._name,{json:jsonObj,node:node}); }); }else{
ambari git commit: AMBARI-22502.Workflow Manager View - FS node will overwrite internal commands and replace them with blank "move" commands when reopening the node(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.5 1d19544b9 -> 603c3fd74 AMBARI-22502.Workflow Manager View - FS node will overwrite internal commands and replace them with blank "move" commands when reopening the node(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/603c3fd7 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/603c3fd7 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/603c3fd7 Branch: refs/heads/branch-2.5 Commit: 603c3fd741d115b4009eac5fcf2faee3283f831c Parents: 1d19544 Author: Venkata SairamAuthored: Thu Nov 23 15:36:57 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:38:11 2017 +0530 -- .../src/main/resources/ui/app/domain/workflow-importer.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/603c3fd7/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js -- diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js index 84a789d..2afc304 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js @@ -112,7 +112,8 @@ var WorkflowImporter= Ember.Object.extend({ if (nodeHandler){ if (Ember.isArray(workflowAppJson[key])){ workflowAppJson[key].forEach(function(jsonObj){ -var node=nodeHandler.handleImportNode(key,jsonObj,workflow,xmlDoc); +var actionDom = xmlDoc.find("action[name='" + jsonObj._name + "']"); +var node = nodeHandler.handleImportNode(key,jsonObj,workflow,actionDom); nodeMap.set(jsonObj._name,{json:jsonObj,node:node}); }); }else{
ambari git commit: AMBARI-22502.Workflow Manager View - FS node will overwrite internal commands and replace them with blank "move" commands when reopening the node(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 20dee7f81 -> 513602198 AMBARI-22502.Workflow Manager View - FS node will overwrite internal commands and replace them with blank "move" commands when reopening the node(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/51360219 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/51360219 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/51360219 Branch: refs/heads/trunk Commit: 51360219866386d8103326def2a1d412348414ed Parents: 20dee7f Author: Venkata SairamAuthored: Thu Nov 23 15:36:57 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:36:57 2017 +0530 -- .../src/main/resources/ui/app/domain/workflow-importer.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/51360219/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js -- diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js index 84a789d..2afc304 100644 --- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js +++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow-importer.js @@ -112,7 +112,8 @@ var WorkflowImporter= Ember.Object.extend({ if (nodeHandler){ if (Ember.isArray(workflowAppJson[key])){ workflowAppJson[key].forEach(function(jsonObj){ -var node=nodeHandler.handleImportNode(key,jsonObj,workflow,xmlDoc); +var actionDom = xmlDoc.find("action[name='" + jsonObj._name + "']"); +var node = nodeHandler.handleImportNode(key,jsonObj,workflow,actionDom); nodeMap.set(jsonObj._name,{json:jsonObj,node:node}); }); }else{
ambari git commit: AMBARI-22373.Disable auto config of interpreter.json at the time of installation(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 f4f9d6862 -> 3417682d9 AMBARI-22373.Disable auto config of interpreter.json at the time of installation(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3417682d Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3417682d Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3417682d Branch: refs/heads/branch-2.6 Commit: 3417682d95b14d312e35a1fd381b2a5eb4ff02ce Parents: f4f9d68 Author: Venkata SairamAuthored: Thu Nov 9 11:50:06 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 9 11:55:28 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py | 11 ++- 1 file changed, 10 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/3417682d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index 6ccdfba..5efc277 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -257,7 +257,6 @@ class Master(Script): if not glob.glob(params.conf_dir + "/interpreter.json") and \ not os.path.exists(params.conf_dir + "/interpreter.json"): self.create_interpreter_json() - self.update_zeppelin_interpreter() if params.zeppelin_interpreter_config_upgrade == True: self.reset_interpreter_settings() @@ -599,6 +598,16 @@ class Master(Script): group=params.zeppelin_group, mode=0664) +if params.conf_stored_in_hdfs: + params.HdfsResource(self.get_zeppelin_conf_FS(params), + type="file", + action="create_on_execute", + source=format("{params.conf_dir}/interpreter.json"), + owner=params.zeppelin_user, + recursive_chown=True, + recursive_chmod=True, + replace_existing_files=True) + def get_zeppelin_spark_dependencies(self): import params return glob.glob(params.zeppelin_dir + '/interpreter/spark/dep/zeppelin-spark-dependencies*.jar')
ambari git commit: AMBARI-22373.Disable auto config of interpreter.json at the time of installation(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 7074e6e8a -> 5f714cee3 AMBARI-22373.Disable auto config of interpreter.json at the time of installation(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5f714cee Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5f714cee Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5f714cee Branch: refs/heads/trunk Commit: 5f714cee399a1671f73abd8d405049a45d4c743c Parents: 7074e6e Author: Venkata SairamAuthored: Thu Nov 9 11:50:06 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 9 11:50:06 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py | 11 ++- 1 file changed, 10 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/5f714cee/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index 6ccdfba..5efc277 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -257,7 +257,6 @@ class Master(Script): if not glob.glob(params.conf_dir + "/interpreter.json") and \ not os.path.exists(params.conf_dir + "/interpreter.json"): self.create_interpreter_json() - self.update_zeppelin_interpreter() if params.zeppelin_interpreter_config_upgrade == True: self.reset_interpreter_settings() @@ -599,6 +598,16 @@ class Master(Script): group=params.zeppelin_group, mode=0664) +if params.conf_stored_in_hdfs: + params.HdfsResource(self.get_zeppelin_conf_FS(params), + type="file", + action="create_on_execute", + source=format("{params.conf_dir}/interpreter.json"), + owner=params.zeppelin_user, + recursive_chown=True, + recursive_chmod=True, + replace_existing_files=True) + def get_zeppelin_spark_dependencies(self): import params return glob.glob(params.zeppelin_dir + '/interpreter/spark/dep/zeppelin-spark-dependencies*.jar')
ambari git commit: AMBARI-22233.Zeppelin service check failed during EU from 2.5 to 2.6 as ZeppelinServer can not be instantiated(Prabhjyot Singh Via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 2a8ac0ded -> 0317cf716 AMBARI-22233.Zeppelin service check failed during EU from 2.5 to 2.6 as ZeppelinServer can not be instantiated(Prabhjyot Singh Via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0317cf71 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0317cf71 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0317cf71 Branch: refs/heads/trunk Commit: 0317cf7163165f4a6f90b2337bf97382679dfee4 Parents: 2a8ac0d Author: Venkata SairamAuthored: Mon Oct 16 18:20:06 2017 +0530 Committer: Venkata Sairam Committed: Mon Oct 16 18:20:06 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py| 40 +--- .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py| 3 ++ 2 files changed, 37 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/0317cf71/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index 9d179b8..6ccdfba 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -192,7 +192,7 @@ class Master(Script): notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \ params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir'] -if not self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user): +if not self.is_directory_exists_in_HDFS(notebook_directory, params.zeppelin_user): # hdfs dfs -mkdir {notebook_directory} params.HdfsResource(format("{notebook_directory}"), type="directory", @@ -243,7 +243,7 @@ class Master(Script): self.create_zeppelin_dir(params) if params.conf_stored_in_hdfs: - if not self.is_path_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), params.zeppelin_user): + if not self.is_directory_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), params.zeppelin_user): # hdfs dfs -mkdir {zeppelin's conf directory} params.HdfsResource(self.get_zeppelin_conf_FS_directory(params), type="directory", @@ -314,15 +314,17 @@ class Master(Script): def get_zeppelin_conf_FS(self, params): return self.get_zeppelin_conf_FS_directory(params) + "/interpreter.json" - def is_path_exists_in_HDFS(self, path, as_user): + def is_directory_exists_in_HDFS(self, path, as_user): kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") -path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -e {path};echo $?"), + +#-d: if the path is a directory, return 0. +path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -d {path};echo $?"), user=as_user)[1] # if there is no kerberos setup then the string will contain "-bash: kinit: command not found" if "\n" in path_exists: - path_exists = path_exists.split("\n")[1] + path_exists = path_exists.split("\n").pop() # '1' means it does not exists if path_exists == '0': @@ -330,6 +332,31 @@ class Master(Script): else: return False + def is_file_exists_in_HDFS(self, path, as_user): +kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) +kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") + +#-f: if the path is a file, return 0. +path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -f {path};echo $?"), + user=as_user)[1] + +# if there is no kerberos setup then the string will contain "-bash: kinit: command not found" +if "\n" in path_exists: + path_exists = path_exists.split("\n").pop() + +# '1' means it does not exists +if path_exists == '0': + #-z: if the file is zero length, return 0. + path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -z {path};echo $?"), + user=as_user)[1]
ambari git commit: AMBARI-22233.Zeppelin service check failed during EU from 2.5 to 2.6 as ZeppelinServer can not be instantiated(Prabhjyot Singh Via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 15d156b0d -> e99bfd023 AMBARI-22233.Zeppelin service check failed during EU from 2.5 to 2.6 as ZeppelinServer can not be instantiated(Prabhjyot Singh Via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e99bfd02 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e99bfd02 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e99bfd02 Branch: refs/heads/branch-2.6 Commit: e99bfd023621e352d6bb2328d14a61d4883c926d Parents: 15d156b Author: Venkata SairamAuthored: Mon Oct 16 18:18:45 2017 +0530 Committer: Venkata Sairam Committed: Mon Oct 16 18:18:45 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py| 40 +--- 1 file changed, 34 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/e99bfd02/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index 9d179b8..6ccdfba 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -192,7 +192,7 @@ class Master(Script): notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \ params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir'] -if not self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user): +if not self.is_directory_exists_in_HDFS(notebook_directory, params.zeppelin_user): # hdfs dfs -mkdir {notebook_directory} params.HdfsResource(format("{notebook_directory}"), type="directory", @@ -243,7 +243,7 @@ class Master(Script): self.create_zeppelin_dir(params) if params.conf_stored_in_hdfs: - if not self.is_path_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), params.zeppelin_user): + if not self.is_directory_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), params.zeppelin_user): # hdfs dfs -mkdir {zeppelin's conf directory} params.HdfsResource(self.get_zeppelin_conf_FS_directory(params), type="directory", @@ -314,15 +314,17 @@ class Master(Script): def get_zeppelin_conf_FS(self, params): return self.get_zeppelin_conf_FS_directory(params) + "/interpreter.json" - def is_path_exists_in_HDFS(self, path, as_user): + def is_directory_exists_in_HDFS(self, path, as_user): kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") -path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -e {path};echo $?"), + +#-d: if the path is a directory, return 0. +path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -d {path};echo $?"), user=as_user)[1] # if there is no kerberos setup then the string will contain "-bash: kinit: command not found" if "\n" in path_exists: - path_exists = path_exists.split("\n")[1] + path_exists = path_exists.split("\n").pop() # '1' means it does not exists if path_exists == '0': @@ -330,6 +332,31 @@ class Master(Script): else: return False + def is_file_exists_in_HDFS(self, path, as_user): +kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) +kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") + +#-f: if the path is a file, return 0. +path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -f {path};echo $?"), + user=as_user)[1] + +# if there is no kerberos setup then the string will contain "-bash: kinit: command not found" +if "\n" in path_exists: + path_exists = path_exists.split("\n").pop() + +# '1' means it does not exists +if path_exists == '0': + #-z: if the file is zero length, return 0. + path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -z {path};echo $?"), + user=as_user)[1] + + if "\n" in path_exists: +
ambari git commit: AMBARI-22223.jdbc(spark[2]) URLs not configured correctly for secure clusters(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 2d1100ded -> 15d156b0d AMBARI-3.jdbc(spark[2]) URLs not configured correctly for secure clusters(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/15d156b0 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/15d156b0 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/15d156b0 Branch: refs/heads/branch-2.6 Commit: 15d156b0d340562fe41ceb2ecec91be5c5ce485d Parents: 2d1100d Author: Venkata SairamAuthored: Mon Oct 16 18:15:58 2017 +0530 Committer: Venkata Sairam Committed: Mon Oct 16 18:16:41 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py | 12 .../ZEPPELIN/0.7.0/package/scripts/params.py | 15 +-- 2 files changed, 17 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/15d156b0/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index 6a84d79..9d179b8 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -492,8 +492,10 @@ class Master(Script): interpreter['properties']['spark.proxy.user.property'] = 'hive.server2.proxy.user' interpreter['properties']['spark.url'] = 'jdbc:hive2://' + \ params.spark_thrift_server_hosts + ':' + params.spark_hive_thrift_port + '/' - if params.spark_hive_principal: -interpreter['properties']['spark.url'] += ';principal=' + params.spark_hive_principal + if params.hive_principal: +interpreter['properties']['spark.url'] += ';principal=' + params.hive_principal + if params.hive_transport_mode: +interpreter['properties']['spark.url'] += ';transportMode=' + params.hive_transport_mode if 'spark.splitQueries' not in interpreter['properties']: interpreter['properties']['spark.splitQueries'] = "true" @@ -504,8 +506,10 @@ class Master(Script): interpreter['properties']['spark2.proxy.user.property'] = 'hive.server2.proxy.user' interpreter['properties']['spark2.url'] = 'jdbc:hive2://' + \ params.spark2_thrift_server_hosts + ':' + params.spark2_hive_thrift_port + '/' - if params.spark_hive_principal: -interpreter['properties']['spark2.url'] += ';principal=' + params.spark2_hive_principal + if params.hive_principal: +interpreter['properties']['spark2.url'] += ';principal=' + params.hive_principal + if params.hive_transport_mode: +interpreter['properties']['spark2.url'] += ';transportMode=' + params.hive_transport_mode if 'spark2.splitQueries' not in interpreter['properties']: interpreter['properties']['spark2.splitQueries'] = "true" http://git-wip-us.apache.org/repos/asf/ambari/blob/15d156b0/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py index ade418e..c2a3317 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py @@ -162,13 +162,19 @@ if 'hive_server_interactive_hosts' in master_configs and len(master_configs['hiv spark_thrift_server_hosts = None spark_hive_thrift_port = None spark_hive_principal = None +hive_principal = None +hive_transport_mode = None + +if 'hive-site' in config['configurations']: + if 'hive.server2.authentication.kerberos.principal' in config['configurations']['hive-site']: +hive_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal'] + if 'hive.server2.transport.mode' in config['configurations']['hive-site']: +hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode'] + if 'spark_thriftserver_hosts' in master_configs and len(master_configs['spark_thriftserver_hosts']) != 0: spark_thrift_server_hosts = str(master_configs['spark_thriftserver_hosts'][0])
ambari git commit: AMBARI-22223.jdbc(spark[2]) URLs not configured correctly for secure clusters(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk d4458daa9 -> 2a8ac0ded AMBARI-3.jdbc(spark[2]) URLs not configured correctly for secure clusters(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2a8ac0de Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2a8ac0de Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2a8ac0de Branch: refs/heads/trunk Commit: 2a8ac0ded9a9916dd71b84ac21a2a6165db60284 Parents: d4458da Author: Venkata SairamAuthored: Mon Oct 16 18:15:58 2017 +0530 Committer: Venkata Sairam Committed: Mon Oct 16 18:15:58 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py | 12 .../ZEPPELIN/0.7.0/package/scripts/params.py | 15 +-- 2 files changed, 17 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/2a8ac0de/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index 6a84d79..9d179b8 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -492,8 +492,10 @@ class Master(Script): interpreter['properties']['spark.proxy.user.property'] = 'hive.server2.proxy.user' interpreter['properties']['spark.url'] = 'jdbc:hive2://' + \ params.spark_thrift_server_hosts + ':' + params.spark_hive_thrift_port + '/' - if params.spark_hive_principal: -interpreter['properties']['spark.url'] += ';principal=' + params.spark_hive_principal + if params.hive_principal: +interpreter['properties']['spark.url'] += ';principal=' + params.hive_principal + if params.hive_transport_mode: +interpreter['properties']['spark.url'] += ';transportMode=' + params.hive_transport_mode if 'spark.splitQueries' not in interpreter['properties']: interpreter['properties']['spark.splitQueries'] = "true" @@ -504,8 +506,10 @@ class Master(Script): interpreter['properties']['spark2.proxy.user.property'] = 'hive.server2.proxy.user' interpreter['properties']['spark2.url'] = 'jdbc:hive2://' + \ params.spark2_thrift_server_hosts + ':' + params.spark2_hive_thrift_port + '/' - if params.spark_hive_principal: -interpreter['properties']['spark2.url'] += ';principal=' + params.spark2_hive_principal + if params.hive_principal: +interpreter['properties']['spark2.url'] += ';principal=' + params.hive_principal + if params.hive_transport_mode: +interpreter['properties']['spark2.url'] += ';transportMode=' + params.hive_transport_mode if 'spark2.splitQueries' not in interpreter['properties']: interpreter['properties']['spark2.splitQueries'] = "true" http://git-wip-us.apache.org/repos/asf/ambari/blob/2a8ac0de/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py index e69037c..2290a7f 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py @@ -162,13 +162,19 @@ if 'hive_server_interactive_hosts' in master_configs and len(master_configs['hiv spark_thrift_server_hosts = None spark_hive_thrift_port = None spark_hive_principal = None +hive_principal = None +hive_transport_mode = None + +if 'hive-site' in config['configurations']: + if 'hive.server2.authentication.kerberos.principal' in config['configurations']['hive-site']: +hive_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal'] + if 'hive.server2.transport.mode' in config['configurations']['hive-site']: +hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode'] + if 'spark_thriftserver_hosts' in master_configs and len(master_configs['spark_thriftserver_hosts']) != 0: spark_thrift_server_hosts = str(master_configs['spark_thriftserver_hosts'][0]) if
ambari git commit: AMBARI-22229.Handle upload of interpreter.json to remote storage in Ambari(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 db67f0385 -> faef3a22e AMBARI-9.Handle upload of interpreter.json to remote storage in Ambari(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/faef3a22 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/faef3a22 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/faef3a22 Branch: refs/heads/branch-2.6 Commit: faef3a22ed2801f4e931074045b37f691bd9977f Parents: db67f03 Author: Venkata SairamAuthored: Fri Oct 13 15:23:33 2017 +0530 Committer: Venkata Sairam Committed: Fri Oct 13 15:23:33 2017 +0530 -- .../common-services/ZEPPELIN/0.7.0/package/scripts/master.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/faef3a22/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index a8b1b32..6a84d79 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -305,7 +305,8 @@ class Master(Script): def get_zeppelin_conf_FS_directory(self, params): hdfs_interpreter_config = params.config['configurations']['zeppelin-config']['zeppelin.config.fs.dir'] -if not hdfs_interpreter_config.startswith("/"): +# if it doesn't start from "/" or doesn't contains "://" as in hdfs://, file://, etc then make it a absolute path +if not (hdfs_interpreter_config.startswith("/") or '://' in hdfs_interpreter_config): hdfs_interpreter_config = "/user/" + format("{zeppelin_user}") + "/" + hdfs_interpreter_config return hdfs_interpreter_config
ambari git commit: AMBARI-22229.Handle upload of interpreter.json to remote storage in Ambari(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 0f76c7f9d -> a8ba5e61c AMBARI-9.Handle upload of interpreter.json to remote storage in Ambari(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8ba5e61 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8ba5e61 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8ba5e61 Branch: refs/heads/trunk Commit: a8ba5e61c50ac92b787d3b2de4cec8d29da92d74 Parents: 0f76c7f Author: Venkata SairamAuthored: Fri Oct 13 15:22:33 2017 +0530 Committer: Venkata Sairam Committed: Fri Oct 13 15:22:33 2017 +0530 -- .../common-services/ZEPPELIN/0.7.0/package/scripts/master.py | 3 ++- .../src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py | 8 2 files changed, 6 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/a8ba5e61/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index a8b1b32..6a84d79 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -305,7 +305,8 @@ class Master(Script): def get_zeppelin_conf_FS_directory(self, params): hdfs_interpreter_config = params.config['configurations']['zeppelin-config']['zeppelin.config.fs.dir'] -if not hdfs_interpreter_config.startswith("/"): +# if it doesn't start from "/" or doesn't contains "://" as in hdfs://, file://, etc then make it a absolute path +if not (hdfs_interpreter_config.startswith("/") or '://' in hdfs_interpreter_config): hdfs_interpreter_config = "/user/" + format("{zeppelin_user}") + "/" + hdfs_interpreter_config return hdfs_interpreter_config http://git-wip-us.apache.org/repos/asf/ambari/blob/a8ba5e61/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py -- diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py index e5d0240..400350c 100644 --- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py +++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py @@ -342,7 +342,7 @@ class TestZeppelin070(RMFTestCase): ) self.assertResourceCalled('HdfsResource', - '/user/zeppelin/hdfs:///user/zeppelin/conf', + 'hdfs:///user/zeppelin/conf', security_enabled=False, hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin', keytab=UnknownConfigurationMock(), @@ -368,7 +368,7 @@ class TestZeppelin070(RMFTestCase): self.assertResourceCalled('HdfsResource', - '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json', + 'hdfs:///user/zeppelin/conf/interpreter.json', security_enabled=False, hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin', keytab=UnknownConfigurationMock(), @@ -395,7 +395,7 @@ class TestZeppelin070(RMFTestCase): ) self.assertResourceCalled('HdfsResource', - '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json', + 'hdfs:///user/zeppelin/conf/interpreter.json', security_enabled=False, hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin', keytab=UnknownConfigurationMock(), @@ -421,7 +421,7 @@ class TestZeppelin070(RMFTestCase): group='zeppelin', ) -self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json', +self.assertResourceCalled('HdfsResource', 'hdfs:///user/zeppelin/conf/interpreter.json', security_enabled = False, hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin', keytab = UnknownConfigurationMock(),
ambari git commit: AMBARI-22214.Livy protocol to be set to https in Zeppelin's interpreter setting(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 f571f9b9a -> 4e69090c4 AMBARI-22214.Livy protocol to be set to https in Zeppelin's interpreter setting(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4e69090c Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4e69090c Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4e69090c Branch: refs/heads/branch-2.6 Commit: 4e69090c435b2b434cd70d965c88d0e312e47ebb Parents: f571f9b Author: Venkata SairamAuthored: Thu Oct 12 17:19:54 2017 +0530 Committer: Venkata Sairam Committed: Thu Oct 12 17:19:54 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py| 69 +--- .../ZEPPELIN/0.7.0/package/scripts/params.py| 5 ++ 2 files changed, 36 insertions(+), 38 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/4e69090c/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index de023c1..a8b1b32 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -242,6 +242,17 @@ class Master(Script): if zeppelin_spark_dependencies and os.path.exists(zeppelin_spark_dependencies[0]): self.create_zeppelin_dir(params) +if params.conf_stored_in_hdfs: + if not self.is_path_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), params.zeppelin_user): +# hdfs dfs -mkdir {zeppelin's conf directory} +params.HdfsResource(self.get_zeppelin_conf_FS_directory(params), +type="directory", +action="create_on_execute", +owner=params.zeppelin_user, +recursive_chown=True, +recursive_chmod=True +) + # if first_setup: if not glob.glob(params.conf_dir + "/interpreter.json") and \ not os.path.exists(params.conf_dir + "/interpreter.json"): @@ -323,21 +334,16 @@ class Master(Script): import json interpreter_config = os.path.join(params.conf_dir, "interpreter.json") -if 'zeppelin.notebook.storage' in params.config['configurations']['zeppelin-config'] \ - and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo': - - if 'zeppelin.config.fs.dir' in params.config['configurations']['zeppelin-config']: -zeppelin_conf_fs = self.get_zeppelin_conf_FS(params) - -if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user): - # copy from hdfs to /etc/zeppelin/conf/interpreter.json - kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None)) - kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") - shell.call(format("rm {interpreter_config};" -"{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -get {zeppelin_conf_fs} {interpreter_config}"), - user=params.zeppelin_user) -else: - Logger.info(format("{zeppelin_conf_fs} does not exist. Skipping upload of DFS.")) +if params.conf_stored_in_hdfs: + zeppelin_conf_fs = self.get_zeppelin_conf_FS(params) + + if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user): +# copy from hdfs to /etc/zeppelin/conf/interpreter.json +kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None)) +kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") +shell.call(format("rm {interpreter_config};" + "{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -get {zeppelin_conf_fs} {interpreter_config}"), + user=params.zeppelin_user) config_content = sudo.read_file(interpreter_config) config_data = json.loads(config_content) @@ -353,28 +359,15 @@ class Master(Script): owner=params.zeppelin_user, content=json.dumps(config_data, indent=2)) -if 'zeppelin.notebook.storage' in params.config['configurations']['zeppelin-config'] \ - and
ambari git commit: AMBARI-22212.Alter logic for storing interpreter.json(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 3c3b1b810 -> be605cbfd AMBARI-22212.Alter logic for storing interpreter.json(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/be605cbf Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/be605cbf Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/be605cbf Branch: refs/heads/trunk Commit: be605cbfdc977ff927df89fc772b8fbb1f589fce Parents: 3c3b1b8 Author: Venkata SairamAuthored: Thu Oct 12 17:17:07 2017 +0530 Committer: Venkata Sairam Committed: Thu Oct 12 17:17:07 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py| 69 +--- .../ZEPPELIN/0.7.0/package/scripts/params.py| 5 ++ .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py| 52 +++ 3 files changed, 44 insertions(+), 82 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/be605cbf/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index eaa2cb9..a8b1b32 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -242,6 +242,17 @@ class Master(Script): if zeppelin_spark_dependencies and os.path.exists(zeppelin_spark_dependencies[0]): self.create_zeppelin_dir(params) +if params.conf_stored_in_hdfs: + if not self.is_path_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), params.zeppelin_user): +# hdfs dfs -mkdir {zeppelin's conf directory} +params.HdfsResource(self.get_zeppelin_conf_FS_directory(params), +type="directory", +action="create_on_execute", +owner=params.zeppelin_user, +recursive_chown=True, +recursive_chmod=True +) + # if first_setup: if not glob.glob(params.conf_dir + "/interpreter.json") and \ not os.path.exists(params.conf_dir + "/interpreter.json"): @@ -323,21 +334,16 @@ class Master(Script): import json interpreter_config = os.path.join(params.conf_dir, "interpreter.json") -if 'zeppelin.notebook.storage' in params.config['configurations']['zeppelin-config'] \ - and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo': - - if 'zeppelin.config.fs.dir' in params.config['configurations']['zeppelin-config']: -zeppelin_conf_fs = self.get_zeppelin_conf_FS(params) - -if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user): - # copy from hdfs to /etc/zeppelin/conf/interpreter.json - kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None)) - kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") - shell.call(format("rm {interpreter_config};" -"{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -get {zeppelin_conf_fs} {interpreter_config}"), - user=params.zeppelin_user) -else: - Logger.info(format("{zeppelin_conf_fs} does not exist. Skipping upload of DFS.")) +if params.conf_stored_in_hdfs: + zeppelin_conf_fs = self.get_zeppelin_conf_FS(params) + + if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user): +# copy from hdfs to /etc/zeppelin/conf/interpreter.json +kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None)) +kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") +shell.call(format("rm {interpreter_config};" + "{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -get {zeppelin_conf_fs} {interpreter_config}"), + user=params.zeppelin_user) config_content = sudo.read_file(interpreter_config) config_data = json.loads(config_content) @@ -353,28 +359,15 @@ class Master(Script): owner=params.zeppelin_user, content=json.dumps(config_data, indent=2)) -if 'zeppelin.notebook.storage' in params.config['configurations']['zeppelin-config'] \ - and
ambari git commit: AMBARI-22214.Livy protocol to be set to https in Zeppelin's interpreter setting(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 4fbe3c763 -> f571f9b9a AMBARI-22214.Livy protocol to be set to https in Zeppelin's interpreter setting(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f571f9b9 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f571f9b9 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f571f9b9 Branch: refs/heads/branch-2.6 Commit: f571f9b9a087719ffb106cb5ec0c21d4b0a5528a Parents: 4fbe3c7 Author: Venkata SairamAuthored: Thu Oct 12 17:05:12 2017 +0530 Committer: Venkata Sairam Committed: Thu Oct 12 17:07:51 2017 +0530 -- .../common-services/ZEPPELIN/0.7.0/package/scripts/master.py | 6 -- .../common-services/ZEPPELIN/0.7.0/package/scripts/params.py | 6 ++ 2 files changed, 10 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/f571f9b9/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index 10722e0..de023c1 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -530,14 +530,16 @@ class Master(Script): elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy': if params.livy_livyserver_host: - interpreter['properties']['zeppelin.livy.url'] = "http://; + params.livy_livyserver_host + \ + interpreter['properties']['zeppelin.livy.url'] = params.livy_livyserver_protocol + \ + "://" + params.livy_livyserver_host + \ ":" + params.livy_livyserver_port else: del interpreter_settings[setting_key] elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy2': if params.livy2_livyserver_host: - interpreter['properties']['zeppelin.livy.url'] = "http://; + params.livy2_livyserver_host + \ + interpreter['properties']['zeppelin.livy.url'] = params.livy2_livyserver_protocol + \ + "://" + params.livy2_livyserver_host + \ ":" + params.livy2_livyserver_port else: del interpreter_settings[setting_key] http://git-wip-us.apache.org/repos/asf/ambari/blob/f571f9b9/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py index e07da18..99e184b 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py @@ -219,17 +219,23 @@ livy2_hosts = default("/clusterHostInfo/livy2_server_hosts", []) livy_livyserver_host = None livy_livyserver_port = None +livy_livyserver_protocol = 'http' livy2_livyserver_host = None livy2_livyserver_port = None +livy2_livyserver_protocol = 'http' if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, stack_version_formatted) and \ len(livy_hosts) > 0: livy_livyserver_host = str(livy_hosts[0]) livy_livyserver_port = config['configurations']['livy-conf']['livy.server.port'] + if 'livy.keystore' in config['configurations']['livy-conf']: +livy_livyserver_protocol = 'https' if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY2, stack_version_formatted) and \ len(livy2_hosts) > 0: livy2_livyserver_host = str(livy2_hosts[0]) livy2_livyserver_port = config['configurations']['livy2-conf']['livy.server.port'] + if 'livy.keystore' in config['configurations']['livy2-conf']: +livy2_livyserver_protocol = 'https' hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] security_enabled = config['configurations']['cluster-env']['security_enabled']
ambari git commit: AMBARI-22214.Livy protocol to be set to https in Zeppelin's interpreter setting(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk b86f53fbe -> 3c3b1b810 AMBARI-22214.Livy protocol to be set to https in Zeppelin's interpreter setting(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3c3b1b81 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3c3b1b81 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3c3b1b81 Branch: refs/heads/trunk Commit: 3c3b1b810ee05490fe2fe370de6132ba31bf8b72 Parents: b86f53f Author: Venkata SairamAuthored: Thu Oct 12 17:05:12 2017 +0530 Committer: Venkata Sairam Committed: Thu Oct 12 17:05:12 2017 +0530 -- .../common-services/ZEPPELIN/0.7.0/package/scripts/master.py | 6 -- .../common-services/ZEPPELIN/0.7.0/package/scripts/params.py | 6 ++ 2 files changed, 10 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/3c3b1b81/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index 09944bd..eaa2cb9 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -530,14 +530,16 @@ class Master(Script): elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy': if params.livy_livyserver_host: - interpreter['properties']['zeppelin.livy.url'] = "http://; + params.livy_livyserver_host + \ + interpreter['properties']['zeppelin.livy.url'] = params.livy_livyserver_protocol + \ + "://" + params.livy_livyserver_host + \ ":" + params.livy_livyserver_port else: del interpreter_settings[setting_key] elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy2': if params.livy2_livyserver_host: - interpreter['properties']['zeppelin.livy.url'] = "http://; + params.livy2_livyserver_host + \ + interpreter['properties']['zeppelin.livy.url'] = params.livy2_livyserver_protocol + \ + "://" + params.livy2_livyserver_host + \ ":" + params.livy2_livyserver_port else: del interpreter_settings[setting_key] http://git-wip-us.apache.org/repos/asf/ambari/blob/3c3b1b81/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py index 3242f26..b7d3df0 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py @@ -219,17 +219,23 @@ livy2_hosts = default("/clusterHostInfo/livy2_server_hosts", []) livy_livyserver_host = None livy_livyserver_port = None +livy_livyserver_protocol = 'http' livy2_livyserver_host = None livy2_livyserver_port = None +livy2_livyserver_protocol = 'http' if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, stack_version_formatted) and \ len(livy_hosts) > 0: livy_livyserver_host = str(livy_hosts[0]) livy_livyserver_port = config['configurations']['livy-conf']['livy.server.port'] + if 'livy.keystore' in config['configurations']['livy-conf']: +livy_livyserver_protocol = 'https' if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY2, stack_version_formatted) and \ len(livy2_hosts) > 0: livy2_livyserver_host = str(livy2_hosts[0]) livy2_livyserver_port = config['configurations']['livy2-conf']['livy.server.port'] + if 'livy.keystore' in config['configurations']['livy2-conf']: +livy2_livyserver_protocol = 'https' hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] security_enabled = config['configurations']['cluster-env']['security_enabled']
ambari git commit: AMBARI-22199.Zeppelin start fails due to permission denied error during kinit(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 37463f51e -> d55c81d92 AMBARI-22199.Zeppelin start fails due to permission denied error during kinit(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d55c81d9 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d55c81d9 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d55c81d9 Branch: refs/heads/branch-2.6 Commit: d55c81d920d248d1b54ad12cd4be77c79335cf7f Parents: 37463f5 Author: Venkata SairamAuthored: Wed Oct 11 14:48:41 2017 +0530 Committer: Venkata Sairam Committed: Wed Oct 11 14:48:41 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py| 48 1 file changed, 29 insertions(+), 19 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/d55c81d9/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index d615d06..10722e0 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -192,8 +192,7 @@ class Master(Script): notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \ params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir'] - -if self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user): +if not self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user): # hdfs dfs -mkdir {notebook_directory} params.HdfsResource(format("{notebook_directory}"), type="directory", @@ -231,14 +230,14 @@ class Master(Script): Execute(("chown", "-R", format("{zeppelin_user}") + ":" + format("{zeppelin_group}"), os.path.join(params.zeppelin_dir, "notebook")), sudo=True) +if params.security_enabled: + zeppelin_kinit_cmd = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ") + Execute(zeppelin_kinit_cmd, user=params.zeppelin_user) + if 'zeppelin.notebook.storage' in params.config['configurations']['zeppelin-config'] \ and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo': self.check_and_copy_notebook_in_hdfs(params) -if params.security_enabled: -zeppelin_kinit_cmd = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ") -Execute(zeppelin_kinit_cmd, user=params.zeppelin_user) - zeppelin_spark_dependencies = self.get_zeppelin_spark_dependencies() if zeppelin_spark_dependencies and os.path.exists(zeppelin_spark_dependencies[0]): self.create_zeppelin_dir(params) @@ -292,14 +291,17 @@ class Master(Script): if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)): stack_select.select_packages(params.version) - def getZeppelinConfFS(self, params): -hdfs_interpreter_config = params.config['configurations']['zeppelin-config']['zeppelin.config.fs.dir'] + "/interpreter.json" + def get_zeppelin_conf_FS_directory(self, params): +hdfs_interpreter_config = params.config['configurations']['zeppelin-config']['zeppelin.config.fs.dir'] if not hdfs_interpreter_config.startswith("/"): hdfs_interpreter_config = "/user/" + format("{zeppelin_user}") + "/" + hdfs_interpreter_config return hdfs_interpreter_config + def get_zeppelin_conf_FS(self, params): +return self.get_zeppelin_conf_FS_directory(params) + "/interpreter.json" + def is_path_exists_in_HDFS(self, path, as_user): kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") @@ -325,17 +327,15 @@ class Master(Script): and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo': if 'zeppelin.config.fs.dir' in params.config['configurations']['zeppelin-config']: -zeppelin_conf_fs = self.getZeppelinConfFS(params) +zeppelin_conf_fs = self.get_zeppelin_conf_FS(params) if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user):
ambari git commit: AMBARI-22199.Zeppelin start fails due to permission denied error during kinit(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 7e885a390 -> 33d67f3ea AMBARI-22199.Zeppelin start fails due to permission denied error during kinit(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/33d67f3e Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/33d67f3e Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/33d67f3e Branch: refs/heads/trunk Commit: 33d67f3ead24fcda4fcdb7d5920ccc76d1743657 Parents: 7e885a3 Author: Venkata SairamAuthored: Wed Oct 11 14:46:39 2017 +0530 Committer: Venkata Sairam Committed: Wed Oct 11 14:46:39 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py| 48 --- .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py| 140 --- 2 files changed, 147 insertions(+), 41 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/33d67f3e/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index d615d06..09944bd 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -192,8 +192,7 @@ class Master(Script): notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \ params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir'] - -if self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user): +if not self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user): # hdfs dfs -mkdir {notebook_directory} params.HdfsResource(format("{notebook_directory}"), type="directory", @@ -231,14 +230,14 @@ class Master(Script): Execute(("chown", "-R", format("{zeppelin_user}") + ":" + format("{zeppelin_group}"), os.path.join(params.zeppelin_dir, "notebook")), sudo=True) +if params.security_enabled: + zeppelin_kinit_cmd = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ") + Execute(zeppelin_kinit_cmd, user=params.zeppelin_user) + if 'zeppelin.notebook.storage' in params.config['configurations']['zeppelin-config'] \ and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo': self.check_and_copy_notebook_in_hdfs(params) -if params.security_enabled: -zeppelin_kinit_cmd = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ") -Execute(zeppelin_kinit_cmd, user=params.zeppelin_user) - zeppelin_spark_dependencies = self.get_zeppelin_spark_dependencies() if zeppelin_spark_dependencies and os.path.exists(zeppelin_spark_dependencies[0]): self.create_zeppelin_dir(params) @@ -292,14 +291,17 @@ class Master(Script): if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)): stack_select.select_packages(params.version) - def getZeppelinConfFS(self, params): -hdfs_interpreter_config = params.config['configurations']['zeppelin-config']['zeppelin.config.fs.dir'] + "/interpreter.json" + def get_zeppelin_conf_FS_directory(self, params): +hdfs_interpreter_config = params.config['configurations']['zeppelin-config']['zeppelin.config.fs.dir'] if not hdfs_interpreter_config.startswith("/"): hdfs_interpreter_config = "/user/" + format("{zeppelin_user}") + "/" + hdfs_interpreter_config return hdfs_interpreter_config + def get_zeppelin_conf_FS(self, params): +return self.get_zeppelin_conf_FS_directory(params) + "/interpreter.json" + def is_path_exists_in_HDFS(self, path, as_user): kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") @@ -325,17 +327,15 @@ class Master(Script): and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo': if 'zeppelin.config.fs.dir' in params.config['configurations']['zeppelin-config']: -zeppelin_conf_fs = self.getZeppelinConfFS(params) +zeppelin_conf_fs = self.get_zeppelin_conf_FS(params) if
ambari git commit: AMBARI-22153.On Zeppelin restart sometimes interpreter settings get reset(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 eaddb0f15 -> 2eb3d78d9 AMBARI-22153.On Zeppelin restart sometimes interpreter settings get reset(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2eb3d78d Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2eb3d78d Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2eb3d78d Branch: refs/heads/branch-2.6 Commit: 2eb3d78d99b27099553f5e36676fc080e1bf Parents: eaddb0f Author: Venkata SairamAuthored: Mon Oct 9 12:07:11 2017 +0530 Committer: Venkata Sairam Committed: Mon Oct 9 12:07:11 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py| 33 +--- 1 file changed, 21 insertions(+), 12 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/2eb3d78d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index a450fb6..d615d06 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -192,18 +192,8 @@ class Master(Script): notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \ params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir'] -kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) -kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") - -notebook_directory_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -e {notebook_directory};echo $?"), - user=params.zeppelin_user)[1] - -#if there is no kerberos setup then the string will contain "-bash: kinit: command not found" -if "\n" in notebook_directory_exists: - notebook_directory_exists = notebook_directory_exists.split("\n")[1] -# '1' means it does not exists -if notebook_directory_exists == '1': +if self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user): # hdfs dfs -mkdir {notebook_directory} params.HdfsResource(format("{notebook_directory}"), type="directory", @@ -310,6 +300,22 @@ class Master(Script): return hdfs_interpreter_config + def is_path_exists_in_HDFS(self, path, as_user): +kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) +kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") +path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -e {path};echo $?"), + user=as_user)[1] + +# if there is no kerberos setup then the string will contain "-bash: kinit: command not found" +if "\n" in path_exists: + path_exists = path_exists.split("\n")[1] + +# '1' means it does not exists +if path_exists == '0': + return True +else: + return False + def get_interpreter_settings(self): import params import json @@ -320,12 +326,14 @@ class Master(Script): if 'zeppelin.config.fs.dir' in params.config['configurations']['zeppelin-config']: zeppelin_conf_fs = self.getZeppelinConfFS(params) -if os.path.exists(zeppelin_conf_fs): + +if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user): # copy from hdfs to /etc/zeppelin/conf/interpreter.json params.HdfsResource(interpreter_config, type="file", action="download_on_execute", source=zeppelin_conf_fs, + user=params.zeppelin_user, group=params.zeppelin_group, owner=params.zeppelin_user) else: @@ -353,6 +361,7 @@ class Master(Script): type="file", action="create_on_execute", source=interpreter_config, +user=params.zeppelin_user, group=params.zeppelin_group, owner=params.zeppelin_user, replace_existing_files=True)
ambari git commit: AMBARI-22153.On Zeppelin restart sometimes interpreter settings get reset(Prabhjyot Singh via Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk e61556cc2 -> 84e616da7 AMBARI-22153.On Zeppelin restart sometimes interpreter settings get reset(Prabhjyot Singh via Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/84e616da Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/84e616da Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/84e616da Branch: refs/heads/trunk Commit: 84e616da753224c43d62ddaeb8f1ef935c62d876 Parents: e61556c Author: Venkata SairamAuthored: Mon Oct 9 12:04:32 2017 +0530 Committer: Venkata Sairam Committed: Mon Oct 9 12:04:32 2017 +0530 -- .../ZEPPELIN/0.7.0/package/scripts/master.py| 33 +++--- .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py| 101 +-- 2 files changed, 45 insertions(+), 89 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/84e616da/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py -- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py index a450fb6..d615d06 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py @@ -192,18 +192,8 @@ class Master(Script): notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \ params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir'] -kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) -kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") - -notebook_directory_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -e {notebook_directory};echo $?"), - user=params.zeppelin_user)[1] - -#if there is no kerberos setup then the string will contain "-bash: kinit: command not found" -if "\n" in notebook_directory_exists: - notebook_directory_exists = notebook_directory_exists.split("\n")[1] -# '1' means it does not exists -if notebook_directory_exists == '1': +if self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user): # hdfs dfs -mkdir {notebook_directory} params.HdfsResource(format("{notebook_directory}"), type="directory", @@ -310,6 +300,22 @@ class Master(Script): return hdfs_interpreter_config + def is_path_exists_in_HDFS(self, path, as_user): +kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) +kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};") +path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -e {path};echo $?"), + user=as_user)[1] + +# if there is no kerberos setup then the string will contain "-bash: kinit: command not found" +if "\n" in path_exists: + path_exists = path_exists.split("\n")[1] + +# '1' means it does not exists +if path_exists == '0': + return True +else: + return False + def get_interpreter_settings(self): import params import json @@ -320,12 +326,14 @@ class Master(Script): if 'zeppelin.config.fs.dir' in params.config['configurations']['zeppelin-config']: zeppelin_conf_fs = self.getZeppelinConfFS(params) -if os.path.exists(zeppelin_conf_fs): + +if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user): # copy from hdfs to /etc/zeppelin/conf/interpreter.json params.HdfsResource(interpreter_config, type="file", action="download_on_execute", source=zeppelin_conf_fs, + user=params.zeppelin_user, group=params.zeppelin_group, owner=params.zeppelin_user) else: @@ -353,6 +361,7 @@ class Master(Script): type="file", action="create_on_execute", source=interpreter_config, +user=params.zeppelin_user, group=params.zeppelin_group, owner=params.zeppelin_user,
ambari git commit: AMBARI-22096.Entries related to hive query are found in RM UI after killing or stopping the execution of the query.(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.5 5188612e3 -> 363018c02 AMBARI-22096.Entries related to hive query are found in RM UI after killing or stopping the execution of the query.(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/363018c0 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/363018c0 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/363018c0 Branch: refs/heads/branch-2.5 Commit: 363018c025a1fa1c0d126e6630234433b7357825 Parents: 5188612 Author: Venkata SairamAuthored: Sat Sep 30 18:07:30 2017 +0530 Committer: Venkata Sairam Committed: Sat Sep 30 18:08:05 2017 +0530 -- .../resources/ui/app/routes/queries/query.js| 13 ++-- .../src/main/resources/ui/app/services/jobs.js | 31 ++-- 2 files changed, 38 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/363018c0/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js index c9af257..9e8b6db 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js +++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js @@ -405,6 +405,7 @@ export default Ember.Route.extend(UILoggerMixin, { self.get('controller.model').set('currentJobData', data); self.get('controller.model').set('queryFile', data.job.queryFile); self.get('controller.model').set('logFile', data.job.logFile); +self.get('controller').set('currentJobId', data.job.id); self.get('controller.model').set('currentJobId', data.job.id); ctrlrModel.set('isJobCreated',true); ctrlr.set('isJobCreated',true); @@ -442,9 +443,15 @@ export default Ember.Route.extend(UILoggerMixin, { }, stopQuery(){ - let jobId = this.get('controller.model').get('currentJobId'); - this.get('jobs').stopJob(jobId) -.then( data => this.get('controller').set('isJobCancelled', true)); + Ember.run.later(() => { +let jobId = this.get('controller').get('currentJobId'), self = this, ctrlr = self.get('controller'), ctrlrModel = self.get('controller.model'); +this.get('jobs').stopJob(jobId) + .then( data => { + this.get('controller').set('isJobCancelled', true); + }).catch(function (response) { + self.get('controller').set('isJobCancelled', true); + }); + }, 1000); }, showVisualExplain(payloadTitle){ http://git-wip-us.apache.org/repos/asf/ambari/blob/363018c0/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js index 36abf49..dd9db00 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js +++ b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js @@ -20,6 +20,7 @@ import Ember from 'ember'; export default Ember.Service.extend({ store: Ember.inject.service(), + isCurrentQueryCancelled: false, getQuery(jobId) { let job = this.get('store').peekRecord('job', jobId); if (job) { @@ -31,6 +32,11 @@ export default Ember.Service.extend({ return new Ember.RSVP.Promise((resolve, reject) => { Ember.run.later(() => { +if(this.get('isCurrentQueryCancelled')) { + this.resetCurrentQueryStatus(); + reject('error'); + return; +} this.get('store').findRecord('job', jobId, {reload: true}) .then((job) => { let status = job.get('status').toLowerCase(); @@ -64,10 +70,29 @@ export default Ember.Service.extend({ }, stopJob : function(jobId) { -return this.get('store').findRecord('job', jobId) - .then(job => job.destroyRecord()); +this.setCurrentQueryAsCancelled(); +return new Ember.RSVP.Promise((resolve, reject) => { + let job = this.get('store').peekRecord('job', jobId); + if(job) { + job.destroyRecord(); + } + else { +this.get('store').findRecord('job', jobId, { reload: true }) + .then(job => { + job.deleteRecord(); + return resolve(""); + }).catch(function (response) { + return resolve(""); + }); + } +}); + }, + setCurrentQueryAsCancelled() { +this.set('isCurrentQueryCancelled', true); + }, + resetCurrentQueryStatus() { +
ambari git commit: AMBARI-22096.Entries related to hive query are found in RM UI after killing or stopping the execution of the query.(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 700228e0a -> cdbe4ba2c AMBARI-22096.Entries related to hive query are found in RM UI after killing or stopping the execution of the query.(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cdbe4ba2 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cdbe4ba2 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cdbe4ba2 Branch: refs/heads/branch-2.6 Commit: cdbe4ba2c663af8a12853537ae0776f19def9c41 Parents: 700228e Author: Venkata SairamAuthored: Sat Sep 30 18:07:30 2017 +0530 Committer: Venkata Sairam Committed: Sat Sep 30 18:08:42 2017 +0530 -- .../resources/ui/app/routes/queries/query.js| 13 ++-- .../src/main/resources/ui/app/services/jobs.js | 31 ++-- 2 files changed, 38 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/cdbe4ba2/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js index c9af257..9e8b6db 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js +++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js @@ -405,6 +405,7 @@ export default Ember.Route.extend(UILoggerMixin, { self.get('controller.model').set('currentJobData', data); self.get('controller.model').set('queryFile', data.job.queryFile); self.get('controller.model').set('logFile', data.job.logFile); +self.get('controller').set('currentJobId', data.job.id); self.get('controller.model').set('currentJobId', data.job.id); ctrlrModel.set('isJobCreated',true); ctrlr.set('isJobCreated',true); @@ -442,9 +443,15 @@ export default Ember.Route.extend(UILoggerMixin, { }, stopQuery(){ - let jobId = this.get('controller.model').get('currentJobId'); - this.get('jobs').stopJob(jobId) -.then( data => this.get('controller').set('isJobCancelled', true)); + Ember.run.later(() => { +let jobId = this.get('controller').get('currentJobId'), self = this, ctrlr = self.get('controller'), ctrlrModel = self.get('controller.model'); +this.get('jobs').stopJob(jobId) + .then( data => { + this.get('controller').set('isJobCancelled', true); + }).catch(function (response) { + self.get('controller').set('isJobCancelled', true); + }); + }, 1000); }, showVisualExplain(payloadTitle){ http://git-wip-us.apache.org/repos/asf/ambari/blob/cdbe4ba2/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js index 36abf49..dd9db00 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js +++ b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js @@ -20,6 +20,7 @@ import Ember from 'ember'; export default Ember.Service.extend({ store: Ember.inject.service(), + isCurrentQueryCancelled: false, getQuery(jobId) { let job = this.get('store').peekRecord('job', jobId); if (job) { @@ -31,6 +32,11 @@ export default Ember.Service.extend({ return new Ember.RSVP.Promise((resolve, reject) => { Ember.run.later(() => { +if(this.get('isCurrentQueryCancelled')) { + this.resetCurrentQueryStatus(); + reject('error'); + return; +} this.get('store').findRecord('job', jobId, {reload: true}) .then((job) => { let status = job.get('status').toLowerCase(); @@ -64,10 +70,29 @@ export default Ember.Service.extend({ }, stopJob : function(jobId) { -return this.get('store').findRecord('job', jobId) - .then(job => job.destroyRecord()); +this.setCurrentQueryAsCancelled(); +return new Ember.RSVP.Promise((resolve, reject) => { + let job = this.get('store').peekRecord('job', jobId); + if(job) { + job.destroyRecord(); + } + else { +this.get('store').findRecord('job', jobId, { reload: true }) + .then(job => { + job.deleteRecord(); + return resolve(""); + }).catch(function (response) { + return resolve(""); + }); + } +}); + }, + setCurrentQueryAsCancelled() { +this.set('isCurrentQueryCancelled', true); + }, + resetCurrentQueryStatus() { +
ambari git commit: AMBARI-22096.Entries related to hive query are found in RM UI after killing or stopping the execution of the query.(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 7e6910f74 -> a66e2deba AMBARI-22096.Entries related to hive query are found in RM UI after killing or stopping the execution of the query.(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a66e2deb Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a66e2deb Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a66e2deb Branch: refs/heads/trunk Commit: a66e2deba88aeaa33af868551b31128bcc2b4ce8 Parents: 7e6910f7 Author: Venkata SairamAuthored: Sat Sep 30 18:07:30 2017 +0530 Committer: Venkata Sairam Committed: Sat Sep 30 18:07:30 2017 +0530 -- .../resources/ui/app/routes/queries/query.js| 13 ++-- .../src/main/resources/ui/app/services/jobs.js | 31 ++-- 2 files changed, 38 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/a66e2deb/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js index 01e1497..3e5adc1 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js +++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js @@ -405,6 +405,7 @@ export default Ember.Route.extend(UILoggerMixin, { self.get('controller.model').set('currentJobData', data); self.get('controller.model').set('queryFile', data.job.queryFile); self.get('controller.model').set('logFile', data.job.logFile); +self.get('controller').set('currentJobId', data.job.id); self.get('controller.model').set('currentJobId', data.job.id); ctrlrModel.set('isJobCreated',true); ctrlr.set('isJobCreated',true); @@ -442,9 +443,15 @@ export default Ember.Route.extend(UILoggerMixin, { }, stopQuery(){ - let jobId = this.get('controller.model').get('currentJobId'); - this.get('jobs').stopJob(jobId) -.then( data => this.get('controller').set('isJobCancelled', true)); + Ember.run.later(() => { +let jobId = this.get('controller').get('currentJobId'), self = this, ctrlr = self.get('controller'), ctrlrModel = self.get('controller.model'); +this.get('jobs').stopJob(jobId) + .then( data => { + this.get('controller').set('isJobCancelled', true); + }).catch(function (response) { + self.get('controller').set('isJobCancelled', true); + }); + }, 1000); }, showVisualExplain(payloadTitle){ http://git-wip-us.apache.org/repos/asf/ambari/blob/a66e2deb/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js index 36abf49..dd9db00 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js +++ b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js @@ -20,6 +20,7 @@ import Ember from 'ember'; export default Ember.Service.extend({ store: Ember.inject.service(), + isCurrentQueryCancelled: false, getQuery(jobId) { let job = this.get('store').peekRecord('job', jobId); if (job) { @@ -31,6 +32,11 @@ export default Ember.Service.extend({ return new Ember.RSVP.Promise((resolve, reject) => { Ember.run.later(() => { +if(this.get('isCurrentQueryCancelled')) { + this.resetCurrentQueryStatus(); + reject('error'); + return; +} this.get('store').findRecord('job', jobId, {reload: true}) .then((job) => { let status = job.get('status').toLowerCase(); @@ -64,10 +70,29 @@ export default Ember.Service.extend({ }, stopJob : function(jobId) { -return this.get('store').findRecord('job', jobId) - .then(job => job.destroyRecord()); +this.setCurrentQueryAsCancelled(); +return new Ember.RSVP.Promise((resolve, reject) => { + let job = this.get('store').peekRecord('job', jobId); + if(job) { + job.destroyRecord(); + } + else { +this.get('store').findRecord('job', jobId, { reload: true }) + .then(job => { + job.deleteRecord(); + return resolve(""); + }).catch(function (response) { + return resolve(""); + }); + } +}); + }, + setCurrentQueryAsCancelled() { +this.set('isCurrentQueryCancelled', true); + }, + resetCurrentQueryStatus() { +
ambari git commit: AMBARI-22103.HiveView 1.5 is not showing the Visualization page when accessed over Knox(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 9e34f4000 -> 700228e0a AMBARI-22103.HiveView 1.5 is not showing the Visualization page when accessed over Knox(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/700228e0 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/700228e0 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/700228e0 Branch: refs/heads/branch-2.6 Commit: 700228e0a29876348423e91937d81d807f3d04ef Parents: 9e34f40 Author: Venkata SairamAuthored: Sat Sep 30 17:30:20 2017 +0530 Committer: Venkata Sairam Committed: Sat Sep 30 17:32:50 2017 +0530 -- .../ui/hive-web/app/adapters/application.js | 18 ++ .../hive-web/app/controllers/visualization-ui.js | 10 ++ .../resources/ui/hive-web/app/utils/constants.js | 1 + 3 files changed, 25 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/700228e0/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js -- diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js index 2c68b89..cd93407 100644 --- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js +++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js @@ -50,5 +50,23 @@ export default DS.RESTAdapter.extend({ var prefix = constants.adapter.apiPrefix + version + constants.adapter.instancePrefix + instanceName; var url = this._super.apply(this, arguments); return prefix + url; + }, + + buildAssetURL: function () { +var version = constants.adapter.version, +instanceName = constants.adapter.instance; + +var params = window.location.pathname.split('/').filter(function (param) { + return !!param; +}); + +if (params[params.length - 3] === 'HIVE') { + version = params[params.length - 2]; + instanceName = params[params.length - 1]; +} +var assetPrefix = constants.adapter.apiPrefix.replace("/" + "api" + "/" + "v1", "").replace("versions/",""); + +var url = assetPrefix + version + "/" + instanceName; +return url; } }); http://git-wip-us.apache.org/repos/asf/ambari/blob/700228e0/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js -- diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js index c908afd..3f5b878 100644 --- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js +++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js @@ -32,8 +32,8 @@ export default Ember.Controller.extend({ polestarUrl: '', voyagerUrl: '', - polestarPath: 'polestar/#/', - voyagerPath: 'voyager/#/', + polestarPath: 'polestar/index.html#/', + voyagerPath: 'voyager/index.html#/', showDataExplorer: true, showAdvVisulization: false, @@ -79,8 +79,10 @@ export default Ember.Controller.extend({ } this.set("error", null); var id = model.get('id'); - this.set("polestarUrl", this.get('polestarPath') + "?url=" + url); - this.set("voyagerUrl", this.get('voyagerPath') + "?url=" + url); + var pstarUrl = this.container.lookup('adapter:application').buildAssetURL("") + "/" + this.get('polestarPath') + "?url=" + url; + this.set("polestarUrl", pstarUrl); + var vUrl = this.container.lookup('adapter:application').buildAssetURL("") + "/" + this.get('voyagerPath') + "?url=" + url; + this.set("voyagerUrl", vUrl); Ember.run.scheduleOnce('afterRender', this, function(){ self.alterIframe(); }); http://git-wip-us.apache.org/repos/asf/ambari/blob/700228e0/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js -- diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js index 348454b..fafffc5 100644 --- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js +++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js @@ -200,6 +200,7 @@ export default Ember.Object.create({ version: '2.0.0',
ambari git commit: AMBARI-22103.HiveView 1.5 is not showing the Visualization page when accessed over Knox(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.5 6bb6dd9b0 -> 5188612e3 AMBARI-22103.HiveView 1.5 is not showing the Visualization page when accessed over Knox(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5188612e Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5188612e Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5188612e Branch: refs/heads/branch-2.5 Commit: 5188612e3a698716bedc369327b0cee0851690fa Parents: 6bb6dd9 Author: Venkata SairamAuthored: Sat Sep 30 17:30:20 2017 +0530 Committer: Venkata Sairam Committed: Sat Sep 30 17:31:43 2017 +0530 -- .../ui/hive-web/app/adapters/application.js | 18 ++ .../hive-web/app/controllers/visualization-ui.js | 10 ++ .../resources/ui/hive-web/app/utils/constants.js | 1 + 3 files changed, 25 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/5188612e/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js -- diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js index 2c68b89..cd93407 100644 --- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js +++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js @@ -50,5 +50,23 @@ export default DS.RESTAdapter.extend({ var prefix = constants.adapter.apiPrefix + version + constants.adapter.instancePrefix + instanceName; var url = this._super.apply(this, arguments); return prefix + url; + }, + + buildAssetURL: function () { +var version = constants.adapter.version, +instanceName = constants.adapter.instance; + +var params = window.location.pathname.split('/').filter(function (param) { + return !!param; +}); + +if (params[params.length - 3] === 'HIVE') { + version = params[params.length - 2]; + instanceName = params[params.length - 1]; +} +var assetPrefix = constants.adapter.apiPrefix.replace("/" + "api" + "/" + "v1", "").replace("versions/",""); + +var url = assetPrefix + version + "/" + instanceName; +return url; } }); http://git-wip-us.apache.org/repos/asf/ambari/blob/5188612e/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js -- diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js index c908afd..3f5b878 100644 --- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js +++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js @@ -32,8 +32,8 @@ export default Ember.Controller.extend({ polestarUrl: '', voyagerUrl: '', - polestarPath: 'polestar/#/', - voyagerPath: 'voyager/#/', + polestarPath: 'polestar/index.html#/', + voyagerPath: 'voyager/index.html#/', showDataExplorer: true, showAdvVisulization: false, @@ -79,8 +79,10 @@ export default Ember.Controller.extend({ } this.set("error", null); var id = model.get('id'); - this.set("polestarUrl", this.get('polestarPath') + "?url=" + url); - this.set("voyagerUrl", this.get('voyagerPath') + "?url=" + url); + var pstarUrl = this.container.lookup('adapter:application').buildAssetURL("") + "/" + this.get('polestarPath') + "?url=" + url; + this.set("polestarUrl", pstarUrl); + var vUrl = this.container.lookup('adapter:application').buildAssetURL("") + "/" + this.get('voyagerPath') + "?url=" + url; + this.set("voyagerUrl", vUrl); Ember.run.scheduleOnce('afterRender', this, function(){ self.alterIframe(); }); http://git-wip-us.apache.org/repos/asf/ambari/blob/5188612e/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js -- diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js index 348454b..fafffc5 100644 --- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js +++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js @@ -200,6 +200,7 @@ export default Ember.Object.create({ version: '2.0.0',
ambari git commit: AMBARI-22103.HiveView 1.5 is not showing the Visualization page when accessed over Knox(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk d4378aacb -> 7e6910f74 AMBARI-22103.HiveView 1.5 is not showing the Visualization page when accessed over Knox(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7e6910f7 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7e6910f7 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7e6910f7 Branch: refs/heads/trunk Commit: 7e6910f74b58795fc10de908eaf1103541342ac6 Parents: d4378aa Author: Venkata SairamAuthored: Sat Sep 30 17:30:20 2017 +0530 Committer: Venkata Sairam Committed: Sat Sep 30 17:30:20 2017 +0530 -- .../ui/hive-web/app/adapters/application.js | 18 ++ .../hive-web/app/controllers/visualization-ui.js | 10 ++ .../resources/ui/hive-web/app/utils/constants.js | 1 + 3 files changed, 25 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/7e6910f7/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js -- diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js index 2c68b89..cd93407 100644 --- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js +++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js @@ -50,5 +50,23 @@ export default DS.RESTAdapter.extend({ var prefix = constants.adapter.apiPrefix + version + constants.adapter.instancePrefix + instanceName; var url = this._super.apply(this, arguments); return prefix + url; + }, + + buildAssetURL: function () { +var version = constants.adapter.version, +instanceName = constants.adapter.instance; + +var params = window.location.pathname.split('/').filter(function (param) { + return !!param; +}); + +if (params[params.length - 3] === 'HIVE') { + version = params[params.length - 2]; + instanceName = params[params.length - 1]; +} +var assetPrefix = constants.adapter.apiPrefix.replace("/" + "api" + "/" + "v1", "").replace("versions/",""); + +var url = assetPrefix + version + "/" + instanceName; +return url; } }); http://git-wip-us.apache.org/repos/asf/ambari/blob/7e6910f7/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js -- diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js index c908afd..3f5b878 100644 --- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js +++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js @@ -32,8 +32,8 @@ export default Ember.Controller.extend({ polestarUrl: '', voyagerUrl: '', - polestarPath: 'polestar/#/', - voyagerPath: 'voyager/#/', + polestarPath: 'polestar/index.html#/', + voyagerPath: 'voyager/index.html#/', showDataExplorer: true, showAdvVisulization: false, @@ -79,8 +79,10 @@ export default Ember.Controller.extend({ } this.set("error", null); var id = model.get('id'); - this.set("polestarUrl", this.get('polestarPath') + "?url=" + url); - this.set("voyagerUrl", this.get('voyagerPath') + "?url=" + url); + var pstarUrl = this.container.lookup('adapter:application').buildAssetURL("") + "/" + this.get('polestarPath') + "?url=" + url; + this.set("polestarUrl", pstarUrl); + var vUrl = this.container.lookup('adapter:application').buildAssetURL("") + "/" + this.get('voyagerPath') + "?url=" + url; + this.set("voyagerUrl", vUrl); Ember.run.scheduleOnce('afterRender', this, function(){ self.alterIframe(); }); http://git-wip-us.apache.org/repos/asf/ambari/blob/7e6910f7/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js -- diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js index 348454b..fafffc5 100644 --- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js +++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js @@ -200,6 +200,7 @@ export default Ember.Object.create({ version: '2.0.0',
ambari git commit: AMBARI-22091.Hive view2.0 Jobs worksheets doesn't have hyperlink whereas view1.5 History worksheets had hyperlinks pointing to fetched results(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.5 4fc5312c8 -> 6bb6dd9b0 AMBARI-22091.Hive view2.0 Jobs worksheets doesn't have hyperlink whereas view1.5 History worksheets had hyperlinks pointing to fetched results(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6bb6dd9b Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6bb6dd9b Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6bb6dd9b Branch: refs/heads/branch-2.5 Commit: 6bb6dd9b0f99f43d3cd937df7dc50e498b56f3fb Parents: 4fc5312 Author: Venkata SairamAuthored: Sat Sep 30 09:05:33 2017 +0530 Committer: Venkata Sairam Committed: Sat Sep 30 09:08:46 2017 +0530 -- .../resources/ui/app/components/job-item.js | 49 +- .../resources/ui/app/components/jobs-browser.js | 3 + .../src/main/resources/ui/app/routes/jobs.js| 11 ++ .../main/resources/ui/app/routes/queries/new.js | 2 + .../resources/ui/app/routes/queries/query.js| 164 --- .../src/main/resources/ui/app/styles/app.scss | 8 + .../ui/app/templates/components/job-item.hbs| 2 +- .../app/templates/components/jobs-browser.hbs | 2 +- .../main/resources/ui/app/templates/jobs.hbs| 1 + 9 files changed, 177 insertions(+), 65 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/6bb6dd9b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js index 42a3411..0545624 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js +++ b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js @@ -17,12 +17,17 @@ */ import Ember from 'ember'; +import UILoggerMixin from '../mixins/ui-logger'; -export default Ember.Component.extend({ +export default Ember.Component.extend(UILoggerMixin, { jobs: Ember.inject.service(), tagName: '', expanded: false, expandedValue: null, + store: Ember.inject.service(), + savedQueries: Ember.inject.service(), + + actions: { toggleExpandJob(jobId) { if(this.get('expanded')) { @@ -39,6 +44,48 @@ export default Ember.Component.extend({ }); } +}, +openAsWorksheet(savedQuery){ + + let hasWorksheetModel = this.get('model'), self = this; + let worksheetId; + + if (Ember.isEmpty(hasWorksheetModel)){ +worksheetId = 1; + }else { + +let isWorksheetExist = (this.get('model').filterBy('title', savedQuery.title).get('length') > 0); +if(isWorksheetExist) { + this.sendAction('openWorksheet', savedQuery, true); + return; +} + +let worksheets = this.get('model'); +worksheets.forEach((worksheet) => { + worksheet.set('selected', false); + }); +worksheetId = `worksheet${worksheets.get('length') + 1}`; + } + var isTabExisting = this.get("store").peekRecord('worksheet', savedQuery.id); + if(isTabExisting) { +self.sendAction('openWorksheet', savedQuery, true); +return; + } + this.get("savedQueries").fetchSavedQuery(savedQuery.get('queryFile')).then(function(response) { +let localWs = { + id: savedQuery.get('id'), + title: savedQuery.get('title'), + queryFile: savedQuery.get('queryFile'), + query: response.file.fileContent, + selectedDb : savedQuery.get('dataBase'), + owner: savedQuery.get('owner'), + selected: true +}; +self.sendAction('openWorksheet', localWs); + }, (error) => { +self.get('logger').danger('Failed to load the query', self.extractError(error)); +}); + } } }); http://git-wip-us.apache.org/repos/asf/ambari/blob/6bb6dd9b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js index 7e24843..ae081de 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js +++ b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js @@ -93,6 +93,9 @@ export default Ember.Component.extend({ clearTitleFilter() { this.set('titleFilter'); +}, +openWorksheet(worksheet, isExisitingWorksheet){ + this.sendAction("openWorksheet", worksheet, isExisitingWorksheet); } } });
ambari git commit: AMBARI-22091.Hive view2.0 Jobs worksheets doesn't have hyperlink whereas view1.5 History worksheets had hyperlinks pointing to fetched results(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 27efa3f1a -> 9e34f4000 AMBARI-22091.Hive view2.0 Jobs worksheets doesn't have hyperlink whereas view1.5 History worksheets had hyperlinks pointing to fetched results(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9e34f400 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9e34f400 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9e34f400 Branch: refs/heads/branch-2.6 Commit: 9e34f40009cd60fd8ca3ffdcc992bda4352170ae Parents: 27efa3f Author: Venkata SairamAuthored: Sat Sep 30 09:05:33 2017 +0530 Committer: Venkata Sairam Committed: Sat Sep 30 09:06:57 2017 +0530 -- .../resources/ui/app/components/job-item.js | 49 +- .../resources/ui/app/components/jobs-browser.js | 3 + .../src/main/resources/ui/app/routes/jobs.js| 11 ++ .../main/resources/ui/app/routes/queries/new.js | 2 + .../resources/ui/app/routes/queries/query.js| 164 --- .../src/main/resources/ui/app/styles/app.scss | 8 + .../ui/app/templates/components/job-item.hbs| 2 +- .../app/templates/components/jobs-browser.hbs | 2 +- .../main/resources/ui/app/templates/jobs.hbs| 1 + 9 files changed, 177 insertions(+), 65 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/9e34f400/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js index 42a3411..0545624 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js +++ b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js @@ -17,12 +17,17 @@ */ import Ember from 'ember'; +import UILoggerMixin from '../mixins/ui-logger'; -export default Ember.Component.extend({ +export default Ember.Component.extend(UILoggerMixin, { jobs: Ember.inject.service(), tagName: '', expanded: false, expandedValue: null, + store: Ember.inject.service(), + savedQueries: Ember.inject.service(), + + actions: { toggleExpandJob(jobId) { if(this.get('expanded')) { @@ -39,6 +44,48 @@ export default Ember.Component.extend({ }); } +}, +openAsWorksheet(savedQuery){ + + let hasWorksheetModel = this.get('model'), self = this; + let worksheetId; + + if (Ember.isEmpty(hasWorksheetModel)){ +worksheetId = 1; + }else { + +let isWorksheetExist = (this.get('model').filterBy('title', savedQuery.title).get('length') > 0); +if(isWorksheetExist) { + this.sendAction('openWorksheet', savedQuery, true); + return; +} + +let worksheets = this.get('model'); +worksheets.forEach((worksheet) => { + worksheet.set('selected', false); + }); +worksheetId = `worksheet${worksheets.get('length') + 1}`; + } + var isTabExisting = this.get("store").peekRecord('worksheet', savedQuery.id); + if(isTabExisting) { +self.sendAction('openWorksheet', savedQuery, true); +return; + } + this.get("savedQueries").fetchSavedQuery(savedQuery.get('queryFile')).then(function(response) { +let localWs = { + id: savedQuery.get('id'), + title: savedQuery.get('title'), + queryFile: savedQuery.get('queryFile'), + query: response.file.fileContent, + selectedDb : savedQuery.get('dataBase'), + owner: savedQuery.get('owner'), + selected: true +}; +self.sendAction('openWorksheet', localWs); + }, (error) => { +self.get('logger').danger('Failed to load the query', self.extractError(error)); +}); + } } }); http://git-wip-us.apache.org/repos/asf/ambari/blob/9e34f400/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js index 7e24843..ae081de 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js +++ b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js @@ -93,6 +93,9 @@ export default Ember.Component.extend({ clearTitleFilter() { this.set('titleFilter'); +}, +openWorksheet(worksheet, isExisitingWorksheet){ + this.sendAction("openWorksheet", worksheet, isExisitingWorksheet); } } });
ambari git commit: AMBARI-22091.Hive view2.0 Jobs worksheets doesn't have hyperlink whereas view1.5 History worksheets had hyperlinks pointing to fetched results(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk b84cbbe7e -> d4378aacb AMBARI-22091.Hive view2.0 Jobs worksheets doesn't have hyperlink whereas view1.5 History worksheets had hyperlinks pointing to fetched results(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d4378aac Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d4378aac Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d4378aac Branch: refs/heads/trunk Commit: d4378aacbd5bd95c0367f5b8a6865ee3bbe6de04 Parents: b84cbbe Author: Venkata SairamAuthored: Sat Sep 30 09:05:33 2017 +0530 Committer: Venkata Sairam Committed: Sat Sep 30 09:05:59 2017 +0530 -- .../resources/ui/app/components/job-item.js | 49 +- .../resources/ui/app/components/jobs-browser.js | 3 + .../src/main/resources/ui/app/routes/jobs.js| 11 ++ .../main/resources/ui/app/routes/queries/new.js | 2 + .../resources/ui/app/routes/queries/query.js| 164 --- .../src/main/resources/ui/app/styles/app.scss | 8 + .../ui/app/templates/components/job-item.hbs| 2 +- .../app/templates/components/jobs-browser.hbs | 2 +- .../main/resources/ui/app/templates/jobs.hbs| 1 + 9 files changed, 177 insertions(+), 65 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js index 42a3411..0545624 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js +++ b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js @@ -17,12 +17,17 @@ */ import Ember from 'ember'; +import UILoggerMixin from '../mixins/ui-logger'; -export default Ember.Component.extend({ +export default Ember.Component.extend(UILoggerMixin, { jobs: Ember.inject.service(), tagName: '', expanded: false, expandedValue: null, + store: Ember.inject.service(), + savedQueries: Ember.inject.service(), + + actions: { toggleExpandJob(jobId) { if(this.get('expanded')) { @@ -39,6 +44,48 @@ export default Ember.Component.extend({ }); } +}, +openAsWorksheet(savedQuery){ + + let hasWorksheetModel = this.get('model'), self = this; + let worksheetId; + + if (Ember.isEmpty(hasWorksheetModel)){ +worksheetId = 1; + }else { + +let isWorksheetExist = (this.get('model').filterBy('title', savedQuery.title).get('length') > 0); +if(isWorksheetExist) { + this.sendAction('openWorksheet', savedQuery, true); + return; +} + +let worksheets = this.get('model'); +worksheets.forEach((worksheet) => { + worksheet.set('selected', false); + }); +worksheetId = `worksheet${worksheets.get('length') + 1}`; + } + var isTabExisting = this.get("store").peekRecord('worksheet', savedQuery.id); + if(isTabExisting) { +self.sendAction('openWorksheet', savedQuery, true); +return; + } + this.get("savedQueries").fetchSavedQuery(savedQuery.get('queryFile')).then(function(response) { +let localWs = { + id: savedQuery.get('id'), + title: savedQuery.get('title'), + queryFile: savedQuery.get('queryFile'), + query: response.file.fileContent, + selectedDb : savedQuery.get('dataBase'), + owner: savedQuery.get('owner'), + selected: true +}; +self.sendAction('openWorksheet', localWs); + }, (error) => { +self.get('logger').danger('Failed to load the query', self.extractError(error)); +}); + } } }); http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js -- diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js index 7e24843..ae081de 100644 --- a/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js +++ b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js @@ -93,6 +93,9 @@ export default Ember.Component.extend({ clearTitleFilter() { this.set('titleFilter'); +}, +openWorksheet(worksheet, isExisitingWorksheet){ + this.sendAction("openWorksheet", worksheet, isExisitingWorksheet); } } });