AMBARI-18616. Fix Log Search User Config bug (Miklos Gergely via oleewere) Change-Id: I008c431c07ae61a2121b0addf6747b7d9b53c618
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cf619372 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cf619372 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cf619372 Branch: refs/heads/trunk Commit: cf619372dc321026705b56a5be39325367042c83 Parents: 98da69f Author: Miklos Gergely <mgerg...@hortonworks.com> Authored: Wed Oct 19 17:30:44 2016 +0200 Committer: oleewere <oleew...@gmail.com> Committed: Sat Nov 5 16:08:56 2016 +0100 ---------------------------------------------------------------------- .../common/HadoopServiceConfigHelper.java | 80 ++++++++++++++ .../ambari/logsearch/dao/UserConfigSolrDao.java | 107 +++++-------------- .../ambari/logsearch/manager/ManagerBase.java | 35 ------ .../logsearch/manager/ServiceLogsManager.java | 13 +++ .../model/common/LogFeederDataMap.java | 8 +- .../views/filter/CreateLogfeederFilterView.js | 81 ++++++++------ 6 files changed, 175 insertions(+), 149 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/cf619372/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/HadoopServiceConfigHelper.java ---------------------------------------------------------------------- diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/HadoopServiceConfigHelper.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/HadoopServiceConfigHelper.java new file mode 100644 index 0000000..0e2087f --- /dev/null +++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/HadoopServiceConfigHelper.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ambari.logsearch.common; + +import java.io.File; +import java.io.IOException; +import java.util.Iterator; +import java.util.Set; +import java.util.TreeSet; + +import org.apache.ambari.logsearch.util.JSONUtil; +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; + +import com.google.gson.JsonParseException; + +public class HadoopServiceConfigHelper { + private static final Logger LOG = Logger.getLogger(HadoopServiceConfigHelper.class); + + public static String getHadoopServiceConfigJSON() { + String fileContent = null; + + try { + ClassLoader classLoader = HadoopServiceConfigHelper.class.getClassLoader(); + File file = new File(classLoader.getResource("HadoopServiceConfig.json").getFile()); + fileContent = FileUtils.readFileToString(file); + } catch (IOException e) { + LOG.error("Unable to read HadoopServiceConfig.json", e); + } + + return JSONUtil.isJSONValid(fileContent) ? fileContent : null; + } + + @SuppressWarnings("unchecked") + public static Set<String> getAllLogIds() { + Set<String> logIds = new TreeSet<>(); + + String key = null; + JSONArray componentArray = null; + try { + String hadoopServiceConfigJSON = getHadoopServiceConfigJSON(); + JSONObject hadoopServiceJsonObject = new JSONObject(hadoopServiceConfigJSON).getJSONObject("service"); + Iterator<String> hadoopSerivceKeys = hadoopServiceJsonObject.keys(); + while (hadoopSerivceKeys.hasNext()) { + key = hadoopSerivceKeys.next(); + componentArray = hadoopServiceJsonObject.getJSONObject(key).getJSONArray("components"); + for (int i = 0; i < componentArray.length(); i++) { + JSONObject componentJsonObject = (JSONObject) componentArray.get(i); + String logId = componentJsonObject.getString("name"); + logIds.add(logId); + } + } + } catch (JsonParseException | JSONException je) { + LOG.error("Error parsing JSON. key=" + key + ", componentArray=" + componentArray, je); + return null; + } + + return logIds; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/cf619372/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java ---------------------------------------------------------------------- diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java index 58337f7..418a405 100644 --- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java +++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java @@ -19,22 +19,22 @@ package org.apache.ambari.logsearch.dao; -import java.io.File; import java.io.IOException; -import java.util.Date; -import java.util.HashMap; -import java.util.Iterator; import java.util.List; -import java.util.Scanner; +import java.util.Set; +import java.util.TreeMap; + import javax.annotation.PostConstruct; import javax.inject.Inject; import javax.inject.Named; +import org.apache.ambari.logsearch.common.HadoopServiceConfigHelper; import org.apache.ambari.logsearch.common.LogSearchConstants; import org.apache.ambari.logsearch.common.LogSearchContext; import org.apache.ambari.logsearch.common.LogType; import org.apache.ambari.logsearch.conf.SolrUserPropsConfig; import org.apache.ambari.logsearch.model.common.LogFeederDataMap; +import org.apache.ambari.logsearch.model.common.LogfeederFilterData; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.response.QueryResponse; @@ -43,15 +43,11 @@ import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; -import org.codehaus.jettison.json.JSONArray; -import org.codehaus.jettison.json.JSONException; -import org.codehaus.jettison.json.JSONObject; -import com.google.gson.JsonParseException; import org.apache.ambari.logsearch.util.JSONUtil; +import org.apache.commons.collections.CollectionUtils; import org.apache.log4j.Logger; import org.springframework.data.solr.core.SolrTemplate; -import org.springframework.util.CollectionUtils; import static org.apache.ambari.logsearch.solr.SolrConstants.UserConfigConstants.ID; import static org.apache.ambari.logsearch.solr.SolrConstants.UserConfigConstants.USER_NAME; @@ -147,61 +143,43 @@ public class UserConfigSolrDao extends SolrDaoBase { return updateResoponse; } - @SuppressWarnings("unchecked") public LogFeederDataMap getUserFilter() throws SolrServerException, IOException { - SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery("*:*"); - String fq = ROW_TYPE + ":" + LogSearchConstants.LOGFEEDER_FILTER_NAME; - solrQuery.setFilterQueries(fq); + solrQuery.setFilterQueries(ROW_TYPE + ":" + LogSearchConstants.LOGFEEDER_FILTER_NAME); QueryResponse response = process(solrQuery); SolrDocumentList documentList = response.getResults(); LogFeederDataMap logfeederDataMap = null; - if (!CollectionUtils.isEmpty(documentList)) { + if (CollectionUtils.isNotEmpty(documentList)) { SolrDocument configDoc = documentList.get(0); - String configJson = JSONUtil.objToJson(configDoc); - HashMap<String, Object> configMap = JSONUtil.jsonToMapObject(configJson); - String json = (String) configMap.get(VALUES); + String json = (String) configDoc.get(VALUES); logfeederDataMap = (LogFeederDataMap) JSONUtil.jsonToObj(json, LogFeederDataMap.class); logfeederDataMap.setId("" + configDoc.get(ID)); - } else { + logfeederDataMap = initUserFilter(); + } + return logfeederDataMap; + } + + private LogFeederDataMap initUserFilter() throws SolrServerException, IOException { + LogFeederDataMap logfeederDataMap = new LogFeederDataMap(); + + Set<String> logIds = HadoopServiceConfigHelper.getAllLogIds(); + if (logIds != null) { + logfeederDataMap.setFilter(new TreeMap<String, LogfeederFilterData>()); + logfeederDataMap.setId(Long.toString(System.currentTimeMillis())); List<String> logfeederDefaultLevels = solrUserConfig.getLogLevels(); - JSONArray levelJsonArray = new JSONArray(logfeederDefaultLevels); - - String hadoopServiceString = getHadoopServiceConfigJSON(); - String key = null; - JSONArray componentArray = null; - try { - JSONObject componentList = new JSONObject(); - JSONObject jsonValue = new JSONObject(); - - JSONObject hadoopServiceJsonObject = new JSONObject(hadoopServiceString).getJSONObject("service"); - Iterator<String> hadoopSerivceKeys = hadoopServiceJsonObject.keys(); - while (hadoopSerivceKeys.hasNext()) { - key = hadoopSerivceKeys.next(); - componentArray = hadoopServiceJsonObject.getJSONObject(key).getJSONArray("components"); - for (int i = 0; i < componentArray.length(); i++) { - JSONObject compJsonObject = (JSONObject) componentArray.get(i); - String componentName = compJsonObject.getString("name"); - JSONObject innerContent = new JSONObject(); - innerContent.put("label", componentName); - innerContent.put("hosts", new JSONArray()); - innerContent.put("defaultLevels", levelJsonArray); - componentList.put(componentName, innerContent); - } - } - jsonValue.put("filter", componentList); - logfeederDataMap = (LogFeederDataMap) JSONUtil.jsonToObj(jsonValue.toString(), LogFeederDataMap.class); - logfeederDataMap.setId(""+new Date().getTime()); - saveUserFilter(logfeederDataMap); - - } catch (JsonParseException | JSONException je) { - LOG.error("Error parsing JSON. key=" + key + ", componentArray=" + componentArray, je); - logfeederDataMap = new LogFeederDataMap(); + + for (String logId : logIds) { + LogfeederFilterData logfeederFilterData = new LogfeederFilterData(); + logfeederFilterData.setLabel(logId); + logfeederFilterData.setDefaultLevels(logfeederDefaultLevels); + logfeederDataMap.getFilter().put(logId, logfeederFilterData); } + saveUserFilter(logfeederDataMap); } + return logfeederDataMap; } @@ -209,31 +187,4 @@ public class UserConfigSolrDao extends SolrDaoBase { public SolrSchemaFieldDao getSolrSchemaFieldDao() { return solrSchemaFieldDao; } - - private String getHadoopServiceConfigJSON() { - StringBuilder result = new StringBuilder(""); - - // Get file from resources folder - ClassLoader classLoader = getClass().getClassLoader(); - File file = new File(classLoader.getResource("HadoopServiceConfig.json").getFile()); - - try (Scanner scanner = new Scanner(file)) { - - while (scanner.hasNextLine()) { - String line = scanner.nextLine(); - result.append(line).append("\n"); - } - - scanner.close(); - - } catch (IOException e) { - LOG.error("Unable to read HadoopServiceConfig.json", e); - } - - String hadoopServiceConfig = result.toString(); - if (JSONUtil.isJSONValid(hadoopServiceConfig)) { - return hadoopServiceConfig; - } - return null; - } } http://git-wip-us.apache.org/repos/asf/ambari/blob/cf619372/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java ---------------------------------------------------------------------- diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java index 6c280ac..89873f3 100644 --- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java +++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java @@ -19,19 +19,13 @@ package org.apache.ambari.logsearch.manager; -import java.io.File; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Scanner; -import org.apache.ambari.logsearch.common.MessageEnums; import org.apache.ambari.logsearch.model.response.LogData; import org.apache.ambari.logsearch.model.response.LogSearchResponse; import org.apache.ambari.logsearch.dao.SolrDaoBase; -import org.apache.ambari.logsearch.util.JSONUtil; -import org.apache.ambari.logsearch.util.RESTErrorUtil; import org.apache.commons.collections.CollectionUtils; import org.apache.log4j.Logger; import org.apache.solr.client.solrj.SolrQuery; @@ -47,35 +41,6 @@ public abstract class ManagerBase<LOG_DATA_TYPE extends LogData, SEARCH_RESPONSE public ManagerBase() { super(); } - - public String getHadoopServiceConfigJSON() { - StringBuilder result = new StringBuilder(""); - - // Get file from resources folder - ClassLoader classLoader = getClass().getClassLoader(); - File file = new File(classLoader.getResource("HadoopServiceConfig.json").getFile()); - - try (Scanner scanner = new Scanner(file)) { - - while (scanner.hasNextLine()) { - String line = scanner.nextLine(); - result.append(line).append("\n"); - } - - scanner.close(); - - } catch (IOException e) { - logger.error("Unable to read HadoopServiceConfig.json", e); - throw RESTErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM); - } - - String hadoopServiceConfig = result.toString(); - if (JSONUtil.isJSONValid(hadoopServiceConfig)) { - return hadoopServiceConfig; - } - throw RESTErrorUtil.createRESTException("Improper JSON", MessageEnums.ERROR_SYSTEM); - - } protected SEARCH_RESPONSE getLastPage(SolrDaoBase solrDoaBase, SimpleQuery lastPageQuery, String event) { int maxRows = lastPageQuery.getRows(); http://git-wip-us.apache.org/repos/asf/ambari/blob/cf619372/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java ---------------------------------------------------------------------- diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java index 74c549a..44d0c00 100644 --- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java +++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java @@ -28,6 +28,7 @@ import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Scanner; import javax.inject.Inject; import javax.inject.Named; @@ -38,6 +39,8 @@ import com.google.common.collect.Lists; import freemarker.template.Configuration; import freemarker.template.Template; import freemarker.template.TemplateException; + +import org.apache.ambari.logsearch.common.HadoopServiceConfigHelper; import org.apache.ambari.logsearch.common.LogSearchConstants; import org.apache.ambari.logsearch.common.MessageEnums; import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao; @@ -68,6 +71,7 @@ import org.apache.ambari.logsearch.solr.model.SolrComponentTypeLogData; import org.apache.ambari.logsearch.solr.model.SolrHostLogData; import org.apache.ambari.logsearch.solr.model.SolrServiceLogData; import org.apache.ambari.logsearch.util.DownloadUtil; +import org.apache.ambari.logsearch.util.JSONUtil; import org.apache.ambari.logsearch.util.DateUtil; import org.apache.ambari.logsearch.util.RESTErrorUtil; import org.apache.ambari.logsearch.util.SolrUtil; @@ -567,4 +571,13 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL } throw new UnsupportedOperationException(); } + + + public String getHadoopServiceConfigJSON() { + String hadoopServiceConfigJSON = HadoopServiceConfigHelper.getHadoopServiceConfigJSON(); + if (hadoopServiceConfigJSON == null) { + throw RESTErrorUtil.createRESTException("Could not load HadoopServiceConfig.json", MessageEnums.ERROR_SYSTEM); + } + return hadoopServiceConfigJSON; + } } http://git-wip-us.apache.org/repos/asf/ambari/blob/cf619372/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/common/LogFeederDataMap.java ---------------------------------------------------------------------- diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/common/LogFeederDataMap.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/common/LogFeederDataMap.java index b09610c..cc7d53d 100644 --- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/common/LogFeederDataMap.java +++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/common/LogFeederDataMap.java @@ -21,7 +21,7 @@ package org.apache.ambari.logsearch.model.common; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; -import java.util.HashMap; +import java.util.TreeMap; @ApiModel public class LogFeederDataMap { @@ -30,13 +30,13 @@ public class LogFeederDataMap { private String id; @ApiModelProperty - private HashMap<String, LogfeederFilterData> filter; + private TreeMap<String, LogfeederFilterData> filter; - public HashMap<String, LogfeederFilterData> getFilter() { + public TreeMap<String, LogfeederFilterData> getFilter() { return filter; } - public void setFilter(HashMap<String, LogfeederFilterData> filter) { + public void setFilter(TreeMap<String, LogfeederFilterData> filter) { this.filter = filter; } http://git-wip-us.apache.org/repos/asf/ambari/blob/cf619372/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js ---------------------------------------------------------------------- diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js index 9bdf0fa..bcab975 100644 --- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js +++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js @@ -133,14 +133,30 @@ define(['require', this.ui.loader.hide(); }, renderComponents : function(){ - var that =this; - _.each(that.componentsList.models, function(model){ - var levels='<td align="left">'+model.get("type")+'</td>'; - var override = '<td class="text-left"><span class="pull-left"><!--small><i>Override</i></small--> <input data-override type="checkbox" data-name='+model.get("type")+'></span></td>'; - levels += override + that.getLevelForComponent(model.get("type"),false); - var html = '<tr class="overrideSpacer"></tr><tr class="componentRow borderShow" data-component="'+model.get("type")+'">'+levels+'</tr><tr></tr>'; - that.ui.filterContent.append(html); - }); + var that = this; + var set = new Set(); + _.each(that.componentsList.models, function(model){ + that.createRow(model.get("type"), that); + set.add(model.get("type")); + }); + + if (set.size > 0) { + that.ui.filterContent.append('<tr class="overrideSpacer"></tr><tr class="overrideSpacer"></tr><tr class="overrideSpacer"></tr>'); + } + + var components = this.model.get("filter"); + _.each(components,function(value,key){ + if (!set.has(key)) { + that.createRow(key, that); + } + }); + }, + createRow : function(type, that) { + var levels = '<td align="left">'+type+'</td>'; + var override = '<td class="text-left"><span class="pull-left"><!--small><i>Override</i></small--> <input data-override type="checkbox" data-name='+type+'></span></td>'; + levels += override + that.getLevelForComponent(type,false); + var html = '<tr class="overrideSpacer"></tr><tr class="componentRow borderShow" data-component="'+type+'">'+levels+'</tr><tr></tr>'; + that.ui.filterContent.append(html); }, populateValues : function(){ var that =this; @@ -332,34 +348,35 @@ define(['require', }, setValues : function(){ - var obj = {filter: {}},that= this; - _.each(that.componentsList.models, function(model){ - var comp = model.get("type"),date = that.$("[data-date='"+comp+"']").data("daterangepicker"); - var host = (that.$("[data-host='"+comp+"']").length) ? that.$("[data-host='"+comp+"']").select2('val') : []; - obj.filter[comp] = { - label : comp, - hosts: host, - defaultLevels : that.getDefaultValues(comp), - overrideLevels : that.getOverideValues(comp), - expiryTime : (date && date.startDate) ? date.startDate.toJSON() : "" - }; - }); - return (obj); + var obj = {filter: {}},that = this; + var components = this.model.get("filter"); + _.each(components,function(value,key){ + var date = that.$("[data-date='"+key+"']").data("daterangepicker"); + var host = (that.$("[data-host='"+key+"']").length) ? that.$("[data-host='"+key+"']").select2('val') : []; + obj.filter[key] = { + label : key, + hosts: host, + defaultLevels : that.getDefaultValues(key), + overrideLevels : that.getOverideValues(key), + expiryTime : (date && date.startDate) ? date.startDate.toJSON() : "" + }; + }); + return (obj); }, getOverideValues : function(ofComponent){ - var $els = this.$("tr.overrideRow."+ofComponent).find("input:checked"),values=[]; - for(var i=0; i<$els.length; i++){ - values.push($($els[i]).data("id")); - } - return values; + var $els = this.$("tr.overrideRow."+ofComponent).find("input:checked"),values=[]; + for(var i=0; i<$els.length; i++){ + values.push($($els[i]).data("id")); + } + return values; }, getDefaultValues : function(ofComponent){ - var $els = this.$("tr[data-component='"+ofComponent+"']").find("input:checked"),values=[]; - for(var i=0; i<$els.length; i++){ - if($($els[i]).data("id")) - values.push($($els[i]).data("id")); - } - return values; + var $els = this.$("tr[data-component='"+ofComponent+"']").find("input:checked"),values=[]; + for(var i=0; i<$els.length; i++){ + if($($els[i]).data("id")) + values.push($($els[i]).data("id")); + } + return values; } });