http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java b/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java index 8171d64..280ee8c 100644 --- a/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java +++ b/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java @@ -36,13 +36,13 @@ public class ViewsTag extends SimpleTagSupport { for (Iterator perm = permission; perm.hasNext();) { String who = perm.next().toString(); authUsers[j] = who; - // getJspContext().setAttribute( "permission."+who+".read", - // views.getReadPermission(i,who) ); - // getJspContext().setAttribute( "permission."+who+".write", - // views.getWritePermission(i,who) ); + getJspContext().setAttribute( "permission."+who+".read", + views.getReadPermission(i,who) ); + getJspContext().setAttribute( "permission."+who+".write", + views.getWritePermission(i,who) ); j = j + 1; } - // getJspContext().setAttribute( "permission", authUsers ); + getJspContext().setAttribute( "permission", authUsers ); getJspContext().setAttribute("owner", views.getOwner(i)); getJspContext().setAttribute("description", views.getDescription(i)); getJspBody().invoke(null);
http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java b/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java index b6789b4..4d7ae98 100644 --- a/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java +++ b/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java @@ -20,6 +20,7 @@ package org.apache.hadoop.chukwa.hicc; import java.io.*; +import java.nio.charset.Charset; import java.util.*; import javax.servlet.*; @@ -30,18 +31,16 @@ import java.sql.*; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.chukwa.util.XssFilter; - import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONValue; - import org.apache.hadoop.chukwa.util.ExceptionUtil; public class Workspace extends HttpServlet { public static final long serialVersionUID = 101L; private static final Log log = LogFactory.getLog(Workspace.class); private String path = System.getenv("CHUKWA_DATA_DIR"); - transient private JSONObject hash = new JSONObject(); + private JSONObject hash = new JSONObject(); transient private XssFilter xf; @Override @@ -89,7 +88,7 @@ public class Workspace extends HttpServlet { try { // use buffering, reading one line at a time // FileReader always assumes default encoding is OK! - BufferedReader input = new BufferedReader(new FileReader(aFile)); + BufferedReader input = new BufferedReader(new InputStreamReader(new FileInputStream(aFile.getAbsolutePath()), Charset.forName("UTF-8"))); try { String line = null; // not declared within while loop /* @@ -113,8 +112,7 @@ public class Workspace extends HttpServlet { public void setContents(String fName, String buffer) { try { - FileWriter fstream = new FileWriter(fName); - BufferedWriter out = new BufferedWriter(fstream); + BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fName), Charset.forName("UTF-8"))); out.write(buffer); out.close(); } catch (Exception e) { @@ -240,20 +238,22 @@ public class Workspace extends HttpServlet { return name.endsWith(".view"); } }); - JSONObject[] cacheGroup = new JSONObject[filesWanted.length]; - for (int i = 0; i < filesWanted.length; i++) { - String buffer = getContents(filesWanted[i]); - try { - JSONObject jt = (JSONObject) JSONValue.parse(buffer); - String fn = filesWanted[i].getName(); - jt.put("key", fn.substring(0, (fn.length() - 5))); - cacheGroup[i] = jt; - } catch (Exception e) { - log.debug(ExceptionUtil.getStackTrace(e)); + if(filesWanted!=null) { + JSONObject[] cacheGroup = new JSONObject[filesWanted.length]; + for (int i = 0; i < filesWanted.length; i++) { + String buffer = getContents(filesWanted[i]); + try { + JSONObject jt = (JSONObject) JSONValue.parse(buffer); + String fn = filesWanted[i].getName(); + jt.put("key", fn.substring(0, (fn.length() - 5))); + cacheGroup[i] = jt; + } catch (Exception e) { + log.debug(ExceptionUtil.getStackTrace(e)); + } } + String viewList = convertObjectsToViewList(cacheGroup); + setContents(source + "/workspace_view_list.cache", viewList); } - String viewList = convertObjectsToViewList(cacheGroup); - setContents(source + "/workspace_view_list.cache", viewList); } } @@ -294,18 +294,20 @@ public class Workspace extends HttpServlet { return name.endsWith(".descriptor"); } }); - JSONObject[] cacheGroup = new JSONObject[filesWanted.length]; - for (int i = 0; i < filesWanted.length; i++) { - String buffer = getContents(filesWanted[i]); - try { - JSONObject jt = (JSONObject) JSONValue.parse(buffer); - cacheGroup[i] = jt; - } catch (Exception e) { - log.debug(ExceptionUtil.getStackTrace(e)); + if(filesWanted!=null) { + JSONObject[] cacheGroup = new JSONObject[filesWanted.length]; + for (int i = 0; i < filesWanted.length; i++) { + String buffer = getContents(filesWanted[i]); + try { + JSONObject jt = (JSONObject) JSONValue.parse(buffer); + cacheGroup[i] = jt; + } catch (Exception e) { + log.debug(ExceptionUtil.getStackTrace(e)); + } } + String widgetList = convertObjectsToWidgetList(cacheGroup); + setContents(source + "/workspace_plugin.cache", widgetList); } - String widgetList = convertObjectsToWidgetList(cacheGroup); - setContents(source + "/workspace_plugin.cache", widgetList); } } http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/hicc/bean/BarOptions.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/hicc/bean/BarOptions.java b/src/main/java/org/apache/hadoop/chukwa/hicc/bean/BarOptions.java index 12c6b12..4a42c2b 100644 --- a/src/main/java/org/apache/hadoop/chukwa/hicc/bean/BarOptions.java +++ b/src/main/java/org/apache/hadoop/chukwa/hicc/bean/BarOptions.java @@ -28,4 +28,44 @@ public class BarOptions extends SeriesOptions { public BarOptions() { fill = true; } + + public boolean getZero() { + return zero; + } + + public void setZero(boolean zero) { + this.zero = zero; + } + + public boolean getStepByStep() { + return stepByStep; + } + + public void setStepByStep(boolean stepByStep) { + this.stepByStep = stepByStep; + } + + public int getBarWidth() { + return barWidth; + } + + public void setBarWidth(int barWidth) { + this.barWidth = barWidth; + } + + public String getAlign() { + return align; + } + + public void setAlign(String align) { + this.align = align; + } + + public boolean getHorizontal() { + return this.horizontal; + } + + public void setHorizontal(boolean horizontal) { + this.horizontal = horizontal; + } } http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/hicc/bean/LineOptions.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/hicc/bean/LineOptions.java b/src/main/java/org/apache/hadoop/chukwa/hicc/bean/LineOptions.java index 3cbb44d..38ff526 100644 --- a/src/main/java/org/apache/hadoop/chukwa/hicc/bean/LineOptions.java +++ b/src/main/java/org/apache/hadoop/chukwa/hicc/bean/LineOptions.java @@ -20,6 +20,21 @@ package org.apache.hadoop.chukwa.hicc.bean; public class LineOptions extends SeriesOptions { public boolean zero; public boolean steps; - + + public boolean getZero() { + return zero; + } + + public void setZero(boolean zero) { + this.zero = zero; + } + + public boolean getSteps() { + return steps; + } + + public void setSteps(boolean steps) { + this.steps = steps; + } } http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/hicc/bean/PointOptions.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/hicc/bean/PointOptions.java b/src/main/java/org/apache/hadoop/chukwa/hicc/bean/PointOptions.java index 8d9ae6c..caeede6 100644 --- a/src/main/java/org/apache/hadoop/chukwa/hicc/bean/PointOptions.java +++ b/src/main/java/org/apache/hadoop/chukwa/hicc/bean/PointOptions.java @@ -20,8 +20,24 @@ package org.apache.hadoop.chukwa.hicc.bean; public class PointOptions extends SeriesOptions { public int radius; public String symbol = "circle"; - + public PointOptions() { radius = 5; } + + public int getRadius() { + return radius; + } + + public void setRadius(int radius) { + this.radius = radius; + } + + public String getSymbol() { + return symbol; + } + + public void setSymbol(String symbol) { + this.symbol = symbol; + } } http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/hicc/bean/SeriesOptions.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/hicc/bean/SeriesOptions.java b/src/main/java/org/apache/hadoop/chukwa/hicc/bean/SeriesOptions.java index 1cdb40f..b793b77 100644 --- a/src/main/java/org/apache/hadoop/chukwa/hicc/bean/SeriesOptions.java +++ b/src/main/java/org/apache/hadoop/chukwa/hicc/bean/SeriesOptions.java @@ -22,12 +22,36 @@ public class SeriesOptions { public boolean fill = false; public int lineWidth; public String fillColor; - + public boolean getFill() { return fill; } - + public void setFill(boolean fill) { this.fill = fill; } + + public boolean getShow() { + return show; + } + + public void setShow(boolean show) { + this.show = show; + } + + public int getLineWidth() { + return lineWidth; + } + + public void setLineWidth(int lineWidth) { + this.lineWidth = lineWidth; + } + + public String getFillColor() { + return fillColor; + } + + public void setFillColor(String fillColor) { + this.fillColor = fillColor; + } } http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java index e1ade29..72b6dfd 100644 --- a/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java +++ b/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java @@ -381,7 +381,9 @@ public class ChukwaDailyRollingFileAppender extends FileAppender { File target = new File(scheduledFilename); if (target.exists()) { - target.delete(); + if(!target.delete()) { + LogLog.warn("Unable to remove: "+target.getAbsolutePath()); + }; } File file = new File(fileName); @@ -405,11 +407,11 @@ public class ChukwaDailyRollingFileAppender extends FileAppender { cleanUp(); } - public String getCleanUpRegex() { + public synchronized String getCleanUpRegex() { return cleanUpRegex; } - public void setCleanUpRegex(String cleanUpRegex) { + protected synchronized void setCleanUpRegex(String cleanUpRegex) { this.cleanUpRegex = cleanUpRegex; } @@ -439,16 +441,19 @@ public class ChukwaDailyRollingFileAppender extends FileAppender { String[] dirFiles = dirList.list(new LogFilter(actualFileName, regex)); List<String> files = new ArrayList<String>(); - for (String file : dirFiles) { - files.add(file); + if(dirFiles!=null) { + for (String file : dirFiles) { + files.add(file); + } } Collections.sort(files); while (files.size() > maxBackupIndex) { String file = files.remove(0); File f = new File(directoryName + "/" + file); - f.delete(); - LogLog.debug("Removing: " + file); + if(!f.delete()) { + LogLog.warn("Cannot remove: " + file); + } } } catch (Exception e) { errorHandler @@ -456,7 +461,7 @@ public class ChukwaDailyRollingFileAppender extends FileAppender { } } - private class LogFilter implements FilenameFilter { + private static class LogFilter implements FilenameFilter { private Pattern p = null; private String logFile = null; @@ -484,12 +489,10 @@ public class ChukwaDailyRollingFileAppender extends FileAppender { */ @Override protected boolean checkEntryConditions() { - if (!hasBeenActivated) { - synchronized(chukwaLock) { - if (!hasBeenActivated) { - hasBeenActivated = true; - activateOptions(); - } + synchronized(chukwaLock) { + if (!hasBeenActivated) { + hasBeenActivated = true; + activateOptions(); } } return super.checkEntryConditions(); @@ -577,8 +580,7 @@ public class ChukwaDailyRollingFileAppender extends FileAppender { + ", starting at offset:" + currentLength); } else { log.debug("Chukwa adaptor not added, addFile(" + log4jFileName - + ") returned " + adaptorID - + ", current offset:" + currentLength); + + ") returned, current offset: " + currentLength); } } @@ -737,4 +739,14 @@ class RollingCalendar extends GregorianCalendar { } return getTime(); } + + @Override + public boolean equals(Object o) { + return super.equals(o); + } + + @Override + public int hashCode() { + return super.hashCode(); + } } http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaTaskLogAppender.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaTaskLogAppender.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaTaskLogAppender.java new file mode 100644 index 0000000..b811757 --- /dev/null +++ b/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaTaskLogAppender.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.chukwa.inputtools.log4j; + +import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController; +import org.apache.hadoop.chukwa.datacollection.controller.ClientFinalizer; +import org.apache.hadoop.chukwa.util.RecordConstants; +import org.apache.log4j.Logger; +import org.apache.log4j.spi.LoggingEvent; + +public class ChukwaTaskLogAppender extends + org.apache.hadoop.mapred.TaskLogAppender { + static Logger log = Logger.getLogger(ChukwaTaskLogAppender.class); + static final String adaptorType = ChukwaAgentController.CharFileTailUTF8NewLineEscaped; + ChukwaAgentController chukwaClient; + String recordType = null; + static boolean chukwaClientIsNull = true; + static final Object chukwaLock = new Object(); + private ClientFinalizer clientFinalizer = null; + + public String getRecordType() { + if (recordType != null) + return recordType; + else + return "unknown"; + } + + public void setRecordType(String recordType) { + this.recordType = recordType; + } + + public void subAppend(LoggingEvent event) { + this.qw.write(RecordConstants.escapeAllButLastRecordSeparator("\n",this.layout.format(event))); + // Make sure only one thread can do this + // and use the boolean to avoid the first level locking + if (chukwaClientIsNull) { + synchronized (chukwaLock) { + if (chukwaClient == null) { + String log4jFileName = getFile(); + String recordType = getRecordType(); + long currentLength = 0L; + chukwaClient = new ChukwaAgentController(); + chukwaClientIsNull = false; + String adaptorID = chukwaClient.add(ChukwaAgentController.CharFileTailUTF8NewLineEscaped, + recordType,currentLength + " " + log4jFileName, currentLength); + + // Setup a shutdownHook for the controller + clientFinalizer = new ClientFinalizer(chukwaClient); + Runtime.getRuntime().addShutdownHook(clientFinalizer); + if (adaptorID != null) { + log.debug("Added file tailing adaptor to chukwa agent for file " + + log4jFileName + ", adaptorId:" + adaptorID + + " using this recordType :" + recordType + + ", starting at offset:" + currentLength); + } else { + log.debug("Chukwa adaptor not added, addFile(" + log4jFileName + + ") returned, current offset:" + currentLength); + } + } + } + } + } +} http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/TaskLogAppender.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/TaskLogAppender.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/TaskLogAppender.java deleted file mode 100644 index cbf2575..0000000 --- a/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/TaskLogAppender.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.chukwa.inputtools.log4j; - -import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController; -import org.apache.hadoop.chukwa.datacollection.controller.ClientFinalizer; -import org.apache.hadoop.chukwa.util.RecordConstants; -import org.apache.log4j.Logger; -import org.apache.log4j.spi.LoggingEvent; - -public class TaskLogAppender extends - org.apache.hadoop.mapred.TaskLogAppender { - static Logger log = Logger.getLogger(TaskLogAppender.class); - static final String adaptorType = ChukwaAgentController.CharFileTailUTF8NewLineEscaped; - ChukwaAgentController chukwaClient; - String recordType = null; - static boolean chukwaClientIsNull = true; - static final Object chukwaLock = new Object(); - private ClientFinalizer clientFinalizer = null; - - public String getRecordType() { - if (recordType != null) - return recordType; - else - return "unknown"; - } - - public void setRecordType(String recordType) { - this.recordType = recordType; - } - - public void subAppend(LoggingEvent event) { - this.qw.write(RecordConstants.escapeAllButLastRecordSeparator("\n",this.layout.format(event))); - // Make sure only one thread can do this - // and use the boolean to avoid the first level locking - if (chukwaClientIsNull) { - synchronized (chukwaLock) { - if (chukwaClient == null) { - String log4jFileName = getFile(); - String recordType = getRecordType(); - long currentLength = 0L; - chukwaClient = new ChukwaAgentController(); - chukwaClientIsNull = false; - String adaptorID = chukwaClient.add(ChukwaAgentController.CharFileTailUTF8NewLineEscaped, - recordType,currentLength + " " + log4jFileName, currentLength); - - // Setup a shutdownHook for the controller - clientFinalizer = new ClientFinalizer(chukwaClient); - Runtime.getRuntime().addShutdownHook(clientFinalizer); - if (adaptorID != null) { - log.debug("Added file tailing adaptor to chukwa agent for file " - + log4jFileName + ", adaptorId:" + adaptorID - + " using this recordType :" + recordType - + ", starting at offset:" + currentLength); - } else { - log.debug("Chukwa adaptor not added, addFile(" + log4jFileName - + ") returned " + adaptorID - + ", current offset:" + currentLength); - } - } - } - } - } -} http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/DataConfig.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/DataConfig.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/DataConfig.java index 244a55e..72265d3 100644 --- a/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/DataConfig.java +++ b/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/DataConfig.java @@ -30,7 +30,7 @@ import java.io.File; import java.io.FilenameFilter; public class DataConfig { - private static Configuration config; + private Configuration config; final static String MDL_XML = "mdl.xml"; private Log log = LogFactory.getLog(DataConfig.class); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/ErStreamHandler.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/ErStreamHandler.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/ErStreamHandler.java index 63396aa..892916b 100644 --- a/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/ErStreamHandler.java +++ b/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/ErStreamHandler.java @@ -21,6 +21,8 @@ package org.apache.hadoop.chukwa.inputtools.mdl; import java.io.InputStream; import java.io.InputStreamReader; import java.io.BufferedReader; +import java.nio.charset.Charset; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -40,7 +42,7 @@ public class ErStreamHandler extends Thread { public void run() { try { - InputStreamReader inpStrd = new InputStreamReader(inpStr); + InputStreamReader inpStrd = new InputStreamReader(inpStr, Charset.forName("UTF-8")); BufferedReader buffRd = new BufferedReader(inpStrd); String line = null; StringBuffer sb = new StringBuffer(); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java index 986b8e9..c492214 100644 --- a/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java +++ b/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java @@ -21,7 +21,8 @@ package org.apache.hadoop.chukwa.inputtools.mdl; import java.io.*; import java.lang.management.ManagementFactory; -import java.nio.channels.*; +import java.nio.charset.Charset; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -29,8 +30,7 @@ public class LoaderServer { String name; private static Log log = LogFactory.getLog(LoaderServer.class); - private static FileLock lock = null; - private static FileOutputStream pidFileOutput = null; + private FileOutputStream pidFileOutput = null; public LoaderServer(String name) { this.name = name; @@ -46,24 +46,14 @@ public class LoaderServer { .append(".pid"); try { File pidFile = new File(pidFilesb.toString()); - pidFileOutput = new FileOutputStream(pidFile); - pidFileOutput.write(pid.getBytes()); + pidFileOutput.write(pid.getBytes(Charset.forName("UTF-8"))); pidFileOutput.flush(); - FileChannel channel = pidFileOutput.getChannel(); - LoaderServer.lock = channel.tryLock(); - if (LoaderServer.lock != null) { - log.info("Initlization succeeded..."); - } else { - throw (new IOException()); - } } catch (IOException ex) { System.out.println("Initializaiton failed: can not write pid file."); log.error("Initialization failed..."); log.error(ex.getMessage()); - System.exit(-1); throw ex; - } } @@ -80,7 +70,6 @@ public class LoaderServer { log.error("Delete pid file, No such file or directory: " + pidFileName); } else { try { - lock.release(); pidFileOutput.close(); } catch (IOException e) { log.error("Unable to release file lock: " + pidFileName); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/ExecPlugin.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/ExecPlugin.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/ExecPlugin.java index 021597f..d10df99 100644 --- a/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/ExecPlugin.java +++ b/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/ExecPlugin.java @@ -23,6 +23,8 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.Charset; + import org.json.simple.JSONObject; /** @@ -33,8 +35,8 @@ import org.json.simple.JSONObject; * */ public abstract class ExecPlugin implements IPlugin { - public final int statusOK = 100; - public final int statusKO = -100; + public final static int statusOK = 100; + public final static int statusKO = -100; Process process = null; @@ -107,7 +109,6 @@ class OutputReader extends Thread { private Process process = null; private Output outputType = null; public StringBuilder output = new StringBuilder(); - public boolean isOk = true; public OutputReader(Process process, Output outputType) { this.process = process; @@ -127,19 +128,18 @@ class OutputReader extends Thread { break; } - - InputStreamReader isr = new InputStreamReader(is); - BufferedReader br = new BufferedReader(isr); - while ((line = br.readLine()) != null) { - // System.out.println("========>>>>>>>["+line+"]"); - output.append(line).append("\n"); + if(is!=null) { + InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); + BufferedReader br = new BufferedReader(isr); + while ((line = br.readLine()) != null) { + // System.out.println("========>>>>>>>["+line+"]"); + output.append(line).append("\n"); + } + br.close(); } - br.close(); } catch (IOException e) { - isOk = false; e.printStackTrace(); } catch (Throwable e) { - isOk = false; e.printStackTrace(); } } http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/rest/bean/ParametersBean.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/rest/bean/ParametersBean.java b/src/main/java/org/apache/hadoop/chukwa/rest/bean/ParametersBean.java index 00d066b..23daf28 100755 --- a/src/main/java/org/apache/hadoop/chukwa/rest/bean/ParametersBean.java +++ b/src/main/java/org/apache/hadoop/chukwa/rest/bean/ParametersBean.java @@ -82,7 +82,7 @@ public class ParametersBean { } if(json.containsKey("edit")) { if(json.get("edit").getClass().equals(String.class)) { - edit=(new Integer((String)json.get("edit"))).intValue(); + edit=Integer.parseInt((String)json.get("edit")); } else if(json.get("edit").getClass().equals(Long.class)) { edit=((Long)json.get("edit")).intValue(); } http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/rest/bean/ViewBean.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/rest/bean/ViewBean.java b/src/main/java/org/apache/hadoop/chukwa/rest/bean/ViewBean.java index 3dd63f5..20efde2 100755 --- a/src/main/java/org/apache/hadoop/chukwa/rest/bean/ViewBean.java +++ b/src/main/java/org/apache/hadoop/chukwa/rest/bean/ViewBean.java @@ -18,6 +18,7 @@ package org.apache.hadoop.chukwa.rest.bean; +import java.nio.charset.Charset; import java.text.ParseException; import javax.xml.bind.annotation.XmlElement; @@ -29,7 +30,6 @@ import org.apache.commons.logging.LogFactory; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; - import org.apache.hadoop.chukwa.util.ExceptionUtil; @XmlRootElement @@ -49,7 +49,7 @@ public class ViewBean { public ViewBean(byte[] buffer) throws ParseException { JSONParser parser = new JSONParser(); try { - JSONObject json = (JSONObject) parser.parse(new String(buffer)); + JSONObject json = (JSONObject) parser.parse(new String(buffer, Charset.forName("UTF-8"))); if(json.containsKey("description")) { this.description = (String) json.get("description"); } else { http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/rest/bean/WidgetBean.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/rest/bean/WidgetBean.java b/src/main/java/org/apache/hadoop/chukwa/rest/bean/WidgetBean.java index bbfd1fa..269720c 100755 --- a/src/main/java/org/apache/hadoop/chukwa/rest/bean/WidgetBean.java +++ b/src/main/java/org/apache/hadoop/chukwa/rest/bean/WidgetBean.java @@ -58,10 +58,10 @@ public class WidgetBean { this.categories=(String) json.get("categories"); this.url=(String) json.get("url"); this.description=(String) json.get("description"); - if(json.get("refresh").getClass().equals("String")) { + if(json.get("refresh").getClass().getName().equals("String")) { int refresh = Integer.parseInt((String) json.get("refresh")); this.refresh = refresh; - } else if(json.get("refresh").getClass().equals("Long")) { + } else if(json.get("refresh").getClass().getName().equals("Long")) { this.refresh = ((Long) json.get("refresh")).intValue(); } try { http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java b/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java index 1857925..ca97be1 100644 --- a/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java +++ b/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.chukwa.rest.resource; +import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -71,7 +72,7 @@ public class ClientTrace { if(c!=null && c.getData()!=null) { String action = ""; long size = 0; - String data = new String(c.getData()); + String data = new String(c.getData(), Charset.forName("UTF-8")); String[] entries = data.split("\n"); for(String entry : entries) { Matcher m = pattern.matcher(entry); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/util/AdaptorNamingUtils.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/util/AdaptorNamingUtils.java b/src/main/java/org/apache/hadoop/chukwa/util/AdaptorNamingUtils.java index fdb94da..2337405 100644 --- a/src/main/java/org/apache/hadoop/chukwa/util/AdaptorNamingUtils.java +++ b/src/main/java/org/apache/hadoop/chukwa/util/AdaptorNamingUtils.java @@ -18,6 +18,7 @@ package org.apache.hadoop.chukwa.util; +import java.nio.charset.Charset; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -28,9 +29,9 @@ public class AdaptorNamingUtils { MessageDigest md; md = MessageDigest.getInstance("MD5"); - md.update(adaptorClassName.getBytes()); - md.update(dataType.getBytes()); - md.update(params.getBytes()); + md.update(adaptorClassName.getBytes(Charset.forName("UTF-8"))); + md.update(dataType.getBytes(Charset.forName("UTF-8"))); + md.update(params.getBytes(Charset.forName("UTF-8"))); StringBuilder sb = new StringBuilder(); sb.append("adaptor_"); byte[] bytes = md.digest(); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/util/ClassUtils.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/util/ClassUtils.java b/src/main/java/org/apache/hadoop/chukwa/util/ClassUtils.java index 833c3a4..11a17ed 100644 --- a/src/main/java/org/apache/hadoop/chukwa/util/ClassUtils.java +++ b/src/main/java/org/apache/hadoop/chukwa/util/ClassUtils.java @@ -93,12 +93,14 @@ public class ClassUtils { if (directory.exists()) { // Get the list of the files contained in the package String[] files = directory.list(); - for (String file : files) { - // we are only interested in .class files - if (file.endsWith(".class")) { - // removes the .class extension - classes.add(Class.forName(pckgname + '.' + if (files != null ) { + for (String file : files) { + // we are only interested in .class files + if (file.endsWith(".class")) { + // removes the .class extension + classes.add(Class.forName(pckgname + '.' + file.substring(0, file.length() - 6))); + } } } } else { http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/util/CopySequenceFile.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/util/CopySequenceFile.java b/src/main/java/org/apache/hadoop/chukwa/util/CopySequenceFile.java index 7935256..ba33850 100644 --- a/src/main/java/org/apache/hadoop/chukwa/util/CopySequenceFile.java +++ b/src/main/java/org/apache/hadoop/chukwa/util/CopySequenceFile.java @@ -19,6 +19,8 @@ package org.apache.hadoop.chukwa.util; +import java.io.IOException; + import org.apache.hadoop.chukwa.ChukwaArchiveKey; import org.apache.hadoop.chukwa.ChunkImpl; import org.apache.hadoop.conf.Configuration; @@ -93,7 +95,7 @@ public class CopySequenceFile { e.printStackTrace(); } - } catch(Exception e) { + } catch(IOException e) { log.warn("Error during .chukwa file recovery",e); e.printStackTrace(); } http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/util/CreateRecordFile.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/util/CreateRecordFile.java b/src/main/java/org/apache/hadoop/chukwa/util/CreateRecordFile.java index 7f4f93b..0a2ab46 100644 --- a/src/main/java/org/apache/hadoop/chukwa/util/CreateRecordFile.java +++ b/src/main/java/org/apache/hadoop/chukwa/util/CreateRecordFile.java @@ -34,10 +34,12 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reporter; +import java.io.FileInputStream; import java.io.IOException; import java.io.File; import java.io.BufferedReader; -import java.io.FileReader; +import java.io.InputStreamReader; +import java.nio.charset.Charset; /** * Helper class used to create sequence files of Chukwa records @@ -65,14 +67,15 @@ public class CreateRecordFile { SequenceFile.CompressionType.NONE, null); long lastSeqID = 0; String line; - BufferedReader reader = new BufferedReader(new FileReader(inputFile)); + FileInputStream fis = new FileInputStream(inputFile); + BufferedReader reader = new BufferedReader(new InputStreamReader(fis, Charset.forName("UTF-8"))); // for each line, create a chunk and an arckive key, pass it to the // processor, then write it to the sequence file. while ((line = reader.readLine()) != null) { ChunkImpl chunk = new ChunkImpl(dataType, streamName, - line.length() + lastSeqID, line.getBytes(), null); + line.length() + lastSeqID, line.getBytes(Charset.forName("UTF-8")), null); lastSeqID += line.length(); chunk.addTag("cluster=\"" + clusterName + "\""); @@ -112,7 +115,7 @@ public class CreateRecordFile { ClassNotFoundException, IllegalAccessException, InstantiationException { - if((args.length < 0 && args[0].contains("-h")) || args.length < 2) { + if(args.length == 0 || (args.length==1 && args[0].contains("-h"))) { usage(); } @@ -129,7 +132,7 @@ public class CreateRecordFile { if (args.length > 4) streamName = args[4]; if (args.length > 5) { - Class clazz = null; + Class<?> clazz = null; try { clazz = Class.forName(args[5]); } @@ -165,7 +168,7 @@ public class CreateRecordFile { } public static void usage() { - System.out.println("Usage: java " + TempFileUtil.class.toString().split(" ")[1] + " <inputFile> <outputFile> [<clusterName> <dataType> <streamName> <processorClass> [confFile]]"); + System.out.println("Usage: java " + CreateRecordFile.class.toString().split(" ")[1] + " <inputFile> <outputFile> [<clusterName> <dataType> <streamName> <processorClass> [confFile]]"); System.out.println("Description: Takes a plain text input file and generates a Hadoop sequence file contaning ChukwaRecordKey,ChukwaRecord entries"); System.out.println("Parameters: inputFile - Text input file to read"); System.out.println(" outputFile - Sequence file to create"); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/util/DumpArchive.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/util/DumpArchive.java b/src/main/java/org/apache/hadoop/chukwa/util/DumpArchive.java index 9b5802d..743a31c 100644 --- a/src/main/java/org/apache/hadoop/chukwa/util/DumpArchive.java +++ b/src/main/java/org/apache/hadoop/chukwa/util/DumpArchive.java @@ -21,7 +21,9 @@ package org.apache.hadoop.chukwa.util; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.nio.charset.Charset; import java.util.*; + import org.apache.hadoop.chukwa.ChukwaArchiveKey; import org.apache.hadoop.chukwa.ChunkImpl; import org.apache.hadoop.chukwa.conf.ChukwaConfiguration; @@ -109,7 +111,7 @@ public class DumpArchive { if(oldC != null) counts.put(entryKey, oldC + 1); else - counts.put(entryKey, new Integer(1)); + counts.put(entryKey, Integer.valueOf(1)); if(!summarize) { System.out.println("\nTimePartition: " + key.getTimePartition()); @@ -123,7 +125,7 @@ public class DumpArchive { System.out.println("Source : " + chunk.getSource()); System.out.println("Application : " + chunk.getStreamName()); System.out.println("SeqID : " + chunk.getSeqID()); - System.out.println("Data : " + new String(chunk.getData())); + System.out.println("Data : " + new String(chunk.getData(), Charset.forName("UTF-8"))); } } } catch (Exception e) { http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java b/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java index d07f8d2..552bffe 100644 --- a/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java +++ b/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java @@ -216,8 +216,8 @@ public class DumpChunks { Long b = byteCounts.get(streamName); byteCounts.put(streamName, b + chunk.getLength()); } else { - matchCounts.put(streamName, new Integer(1)); - byteCounts.put(streamName, new Long(chunk.getLength())); + matchCounts.put(streamName, Integer.valueOf(1)); + byteCounts.put(streamName, Long.valueOf(chunk.getLength())); } } http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/util/Filter.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/util/Filter.java b/src/main/java/org/apache/hadoop/chukwa/util/Filter.java index 066b65a..b938deb 100644 --- a/src/main/java/org/apache/hadoop/chukwa/util/Filter.java +++ b/src/main/java/org/apache/hadoop/chukwa/util/Filter.java @@ -17,10 +17,12 @@ */ package org.apache.hadoop.chukwa.util; +import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; + import org.apache.commons.lang.ArrayUtils; import org.apache.hadoop.chukwa.Chunk; import org.apache.hadoop.chukwa.extraction.engine.RecordUtil; @@ -56,7 +58,7 @@ public class Filter { String cluster = RecordUtil.getClusterName(chunk); return p.matcher(cluster).matches(); } else if(targ.equals("content")) { - String content = new String(chunk.getData()); + String content = new String(chunk.getData(), Charset.forName("UTF-8")); return p.matcher(content).matches(); } else if(targ.startsWith("tags.")) { String tagName = targ.substring("tags.".length()); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/util/HierarchyDataType.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/util/HierarchyDataType.java b/src/main/java/org/apache/hadoop/chukwa/util/HierarchyDataType.java index c9ea8e5..5a6a68b 100644 --- a/src/main/java/org/apache/hadoop/chukwa/util/HierarchyDataType.java +++ b/src/main/java/org/apache/hadoop/chukwa/util/HierarchyDataType.java @@ -145,7 +145,7 @@ public class HierarchyDataType { results = datasource.replaceFirst("/", ""); } if (results.endsWith("/")) { - results = results.substring(0, -1); + results = results.substring(0, results.length()-1); } return results; } @@ -171,4 +171,4 @@ public class HierarchyDataType { public static String getHierarchyDataTypeDirectory(String datasource) { return datasource.replace(CHUKWA_CONSTANT.HIERARCHY_CONNECTOR, "/"); } -} \ No newline at end of file +} http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/util/TempFileUtil.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/util/TempFileUtil.java b/src/main/java/org/apache/hadoop/chukwa/util/TempFileUtil.java index d6d22d6..41b127d 100644 --- a/src/main/java/org/apache/hadoop/chukwa/util/TempFileUtil.java +++ b/src/main/java/org/apache/hadoop/chukwa/util/TempFileUtil.java @@ -20,8 +20,10 @@ package org.apache.hadoop.chukwa.util; import java.io.*; +import java.nio.charset.Charset; import java.util.Calendar; import java.util.Random; + import org.apache.hadoop.chukwa.ChukwaArchiveKey; import org.apache.hadoop.chukwa.ChunkImpl; import org.apache.hadoop.conf.Configuration; @@ -34,15 +36,21 @@ public class TempFileUtil { public static File makeBinary(int length) throws IOException { File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"), "chukwaTest"); - FileOutputStream fos = new FileOutputStream(tmpOutput); - Random r = new Random(); - byte[] randomData = new byte[length]; - r.nextBytes(randomData); - randomData[length - 1] = '\n';// need data to end with \n since default + FileOutputStream fos = null; + try { + fos = new FileOutputStream(tmpOutput); + Random r = new Random(); + byte[] randomData = new byte[length]; + r.nextBytes(randomData); + randomData[length - 1] = '\n';// need data to end with \n since default // tailer uses that - fos.write(randomData); - fos.flush(); - fos.close(); + fos.write(randomData); + fos.flush(); + } finally { + if(fos != null) { + fos.close(); + } + } return tmpOutput; } @@ -58,7 +66,7 @@ public class TempFileUtil { + r.nextInt() + "\n"; ChunkImpl c = new ChunkImpl("HadoopLogProcessor", "test", - line.length() + lastSeqID, line.getBytes(), null); + line.length() + lastSeqID, line.getBytes(Charset.forName("UTF-8")), null); lastSeqID += line.length(); c.addTag("cluster=\"foocluster\""); return c; @@ -99,8 +107,7 @@ public class TempFileUtil { public static File makeTestFile(String name, int size,File baseDir) throws IOException { File tmpOutput = new File(baseDir, name); FileOutputStream fos = new FileOutputStream(tmpOutput); - - PrintWriter pw = new PrintWriter(fos); + PrintWriter pw = new PrintWriter(new OutputStreamWriter(fos, Charset.forName("UTF-8"))); for (int i = 0; i < size; ++i) { pw.print(i + " "); pw.println("abcdefghijklmnopqrstuvwxyz"); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/chukwa/util/XssFilter.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/chukwa/util/XssFilter.java b/src/main/java/org/apache/hadoop/chukwa/util/XssFilter.java index 0fcaba5..1cf1267 100644 --- a/src/main/java/org/apache/hadoop/chukwa/util/XssFilter.java +++ b/src/main/java/org/apache/hadoop/chukwa/util/XssFilter.java @@ -18,14 +18,7 @@ package org.apache.hadoop.chukwa.util; -import java.util.Enumeration; -import java.util.regex.Pattern; -import java.util.regex.Matcher; -import java.util.List; -import java.util.Map; -import java.util.ArrayList; import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpSession; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -33,12 +26,9 @@ import org.jsoup.Jsoup; import org.jsoup.safety.Whitelist; import org.owasp.esapi.ESAPI; -import javax.ws.rs.core.MultivaluedMap; - public class XssFilter { private HttpServletRequest request = null; private static Log LOG = LogFactory.getLog(XssFilter.class); - private HttpSession session = null; public XssFilter() { } @@ -74,25 +64,6 @@ public class XssFilter { } /** - * Apply the XSS filter to the parameters - * @param parameters - * @param type - */ - private void cleanParams( MultivaluedMap<String, String> parameters ) { - for( Map.Entry<String, List<String>> params : parameters.entrySet() ) { - String key = params.getKey(); - List<String> values = params.getValue(); - - List<String> cleanValues = new ArrayList<String>(); - for( String value : values ) { - cleanValues.add( filter( value ) ); - } - - parameters.put( key, cleanValues ); - } - } - - /** * Strips any potential XSS threats out of the value * @param value * @return http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java b/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java index 9e2100e..6ab1e39 100644 --- a/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java +++ b/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java @@ -297,14 +297,12 @@ public abstract class AbstractMetricsContext implements MetricsContext { * Emits the records. */ private synchronized void emitRecords() throws IOException { - for (String recordName : bufferedData.keySet()) { - RecordMap recordMap = bufferedData.get(recordName); - synchronized (recordMap) { - Set<Entry<TagMap, MetricMap>> entrySet = recordMap.entrySet (); - for (Entry<TagMap, MetricMap> entry : entrySet) { - OutputRecord outRec = new OutputRecord(entry.getKey(), entry.getValue()); - emitRecord(contextName, recordName, outRec); - } + for (Entry<String, RecordMap> record : bufferedData.entrySet()) { + String recordName = record.getKey(); + RecordMap recordMap = record.getValue(); + for (Entry<TagMap, MetricMap> entry : record.getValue().entrySet()) { + OutputRecord outRec = new OutputRecord(entry.getKey(), entry.getValue()); + emitRecord(contextName, recordName, outRec); } } flush(); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/test/java/org/apache/hadoop/chukwa/ChunkImplTest.java ---------------------------------------------------------------------- diff --git a/src/test/java/org/apache/hadoop/chukwa/ChunkImplTest.java b/src/test/java/org/apache/hadoop/chukwa/ChunkImplTest.java index c1cf37f..94e626d 100644 --- a/src/test/java/org/apache/hadoop/chukwa/ChunkImplTest.java +++ b/src/test/java/org/apache/hadoop/chukwa/ChunkImplTest.java @@ -36,6 +36,7 @@ public class ChunkImplTest extends TestCase { DataInputBuffer ib = new DataInputBuffer(); ib.reset(ob.getData(), c.getSerializedSizeEstimate()); int version = ib.readInt(); + ib.close(); assertEquals(version, ChunkImpl.PROTOCOL_VERSION); } catch (IOException e) { e.printStackTrace(); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestDirTailingAdaptor.java ---------------------------------------------------------------------- diff --git a/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestDirTailingAdaptor.java b/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestDirTailingAdaptor.java index 5748c9b..f72c06d 100644 --- a/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestDirTailingAdaptor.java +++ b/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestDirTailingAdaptor.java @@ -24,7 +24,6 @@ import java.util.Map; import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent; import org.apache.hadoop.conf.*; import org.apache.log4j.Level; -import org.apache.log4j.Logger; import junit.framework.TestCase; public class TestDirTailingAdaptor extends TestCase { http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/rest/TestAdaptorController.java ---------------------------------------------------------------------- diff --git a/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/rest/TestAdaptorController.java b/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/rest/TestAdaptorController.java index fa2d5f1..fda08c4 100644 --- a/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/rest/TestAdaptorController.java +++ b/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/rest/TestAdaptorController.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.chukwa.datacollection.agent.rest; +import junit.framework.Assert; import junit.framework.TestCase; import org.apache.commons.logging.Log; @@ -35,6 +36,8 @@ import javax.servlet.ServletException; import javax.servlet.Servlet; import javax.ws.rs.core.MediaType; +import java.io.File; +import java.io.FilenameFilter; import java.io.IOException; import java.io.UnsupportedEncodingException; @@ -55,6 +58,19 @@ public class TestAdaptorController extends TestCase { String adaptor; protected void setUp() throws Exception { + String path = System.getenv("CHUKWA_LOG_DIR"); + String[] checkpointNames = new File(path).list(new FilenameFilter() { + public boolean accept(File dir, String name) { + String checkPointBaseName = "chukwa_agent_checkpoint"; + return name.startsWith(checkPointBaseName); + } + }); + for(String cpn : checkpointNames) { + File checkpoint = new File(path+"/"+cpn); + if(!checkpoint.delete()) { + Assert.fail("Fail to clean up existing check point file: "+ cpn); + } + } agent = ChukwaAgent.getAgent(); agent.start(); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestDelayedAcks.java ---------------------------------------------------------------------- diff --git a/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestDelayedAcks.java b/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestDelayedAcks.java index b067f16..fd0b512 100644 --- a/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestDelayedAcks.java +++ b/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestDelayedAcks.java @@ -18,14 +18,12 @@ package org.apache.hadoop.chukwa.datacollection.collector; import java.io.File; -import java.io.IOException; import java.util.*; import java.util.regex.*; import org.apache.hadoop.chukwa.*; import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorShutdownPolicy; import org.apache.hadoop.chukwa.datacollection.adaptor.TestDirTailingAdaptor; import org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.FileTailingAdaptor; -import org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.TestRawAdaptor; import org.apache.hadoop.chukwa.datacollection.agent.AdaptorResetThread; import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent; import org.apache.hadoop.chukwa.datacollection.collector.servlet.CommitCheckServlet; http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestFailedCollectorAck.java ---------------------------------------------------------------------- diff --git a/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestFailedCollectorAck.java b/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestFailedCollectorAck.java index de34563..ad3be6d 100644 --- a/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestFailedCollectorAck.java +++ b/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestFailedCollectorAck.java @@ -51,7 +51,7 @@ public class TestFailedCollectorAck extends TestCase { Configuration conf = new Configuration(); String outputDirectory = TestDelayedAcks.buildConf(conf); - SeqFileWriter.ENABLE_ROTATION_ON_CLOSE = false; + SeqFileWriter.setEnableRotationOnClose(false); File sinkA = new File(outputDirectory, "chukwa_sink_A"); sinkA.mkdir(); File sinkB = new File(outputDirectory, "chukwa_sink_B"); @@ -83,7 +83,7 @@ public class TestFailedCollectorAck extends TestCase { Thread.sleep(10 * 1000); collector1_s.stop(); Thread.sleep(10 * 1000); - SeqFileWriter.ENABLE_ROTATION_ON_CLOSE = true; + SeqFileWriter.setEnableRotationOnClose(true); String[] stat = agent.getAdaptorList().get("adaptor_constSend").split(" "); long bytesCommitted = Long.valueOf(stat[stat.length -1]); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestHBaseWriter.java ---------------------------------------------------------------------- diff --git a/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestHBaseWriter.java b/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestHBaseWriter.java index 1623301..10769b8 100644 --- a/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestHBaseWriter.java +++ b/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestHBaseWriter.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.chukwa.datacollection.writer; - -import java.io.IOException; import java.util.ArrayList; import junit.framework.Assert; @@ -32,11 +30,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.LocalHBaseCluster; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.apache.log4j.Logger; @@ -53,7 +50,6 @@ public class TestHBaseWriter extends TestCase{ private byte[] table = Bytes.toBytes("Test"); private byte[] test = Bytes.toBytes("1234567890 Key Value"); private ChukwaConfiguration cc; - private LocalHBaseCluster cluster; long timestamp = 1234567890; public TestHBaseWriter() { @@ -93,7 +89,7 @@ public class TestHBaseWriter extends TestCase{ if(hbw.add(chunks)!=ChukwaWriter.COMMIT_OK) { Assert.fail("Commit status is not OK."); } - HTable testTable = new HTable(conf, table); + Table testTable = util.getConnection().getTable(TableName.valueOf(table)); ResultScanner scanner = testTable.getScanner(columnFamily, qualifier); for(Result res : scanner) { Assert.assertEquals(new String(expectedValue), new String(res.getValue(columnFamily, qualifier))); http://git-wip-us.apache.org/repos/asf/chukwa/blob/7f662e8c/src/test/resources/tasklog-log4j.properties ---------------------------------------------------------------------- diff --git a/src/test/resources/tasklog-log4j.properties b/src/test/resources/tasklog-log4j.properties index 1968788..56b83a8 100644 --- a/src/test/resources/tasklog-log4j.properties +++ b/src/test/resources/tasklog-log4j.properties @@ -14,7 +14,7 @@ # limitations under the License. log4j.rootLogger=INFO, R -log4j.appender.R=org.apache.hadoop.chukwa.inputtools.log4j.TaskLogAppender +log4j.appender.R=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaTaskLogAppender log4j.appender.R.recordType=TaskLog log4j.appender.R.taskId=attempt_200905220200_13470_r_000000_0 log4j.appender.R.totalLogFileSize=5000
