Repository: chukwa
Updated Branches:
  refs/heads/master fb022bfd2 -> 6def7b64d


http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Top.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Top.java
 
b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Top.java
deleted file mode 100644
index f0c7c15..0000000
--- 
a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Top.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.chukwa.extraction.demux.processor.mapper;
-
-
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.hadoop.chukwa.datacollection.writer.hbase.Annotation.Table;
-import org.apache.hadoop.chukwa.datacollection.writer.hbase.Annotation.Tables;
-import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
-import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.log4j.Logger;
-
-@Tables(annotations={
-@Table(name="SystemMetrics",columnFamily="SystemMetrics"),
-@Table(name="SystemMetrics",columnFamily="Top")
-})
-public class Top extends AbstractProcessor {
-  static Logger log = Logger.getLogger(Top.class);
-  public final String reduceType = "SystemMetrics";
-  public final String recordType = this.getClass().getName();
-
-  private static String regex = "([0-9]{4}-[0-9]{2}-[0-9]{2} 
[0-9]{2}:[0-9]{2}:[0-9]{2},[0-9]{3}) (.*?) (.*?): ";
-  private static Pattern p = null;
-
-  private Matcher matcher = null;
-  private SimpleDateFormat sdf = null;
-
-  public Top() {
-    // TODO move that to config
-    sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
-    p = Pattern.compile(regex);
-  }
-
-  @Override
-  protected void parse(String recordEntry,
-      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
-      throws Throwable {
-
-    log.debug("Top record: [" + recordEntry + "] type[" + chunk.getDataType()
-        + "]");
-
-    matcher = p.matcher(recordEntry);
-    while (matcher.find()) {
-      log.debug("Top Processor Matches");
-
-      try {
-        Date d = sdf.parse(matcher.group(1).trim());
-
-        ChukwaRecord record = new ChukwaRecord();
-        String[] lines = recordEntry.split("\n");
-        int i = 0;
-        if (lines.length < 2) {
-          return;
-        }
-        String summaryString = "";
-        while (!lines[i].equals("")) {
-          summaryString = summaryString + lines[i] + "\n";
-          i++;
-        }
-        i++;
-        record = new ChukwaRecord();
-        key = new ChukwaRecordKey();
-        parseSummary(record, summaryString);
-        this.buildGenericRecord(record, null, d.getTime(), reduceType);
-        output.collect(key, record);
-
-        StringBuffer buffer = new StringBuffer();
-        // FIXME please validate this
-        while (i < lines.length) {
-          record = null;
-          buffer.append(lines[i] + "\n");
-          i++;
-
-        }
-        record = new ChukwaRecord();
-        key = new ChukwaRecordKey();
-        this.buildGenericRecord(record, buffer.toString(), d.getTime(), 
recordType);
-        // Output Top info to database
-        output.collect(key, record);
-
-        // End of parsing
-      } catch (Exception e) {
-        e.printStackTrace();
-        throw e;
-      }
-    }
-  }
-
-  public void parseSummary(ChukwaRecord record, String header) {
-    HashMap<String, Object> keyValues = new HashMap<String, Object>();
-    String[] headers = header.split("\n");
-    Pattern p = Pattern.compile("top - (.*?) up (.*?),\\s+(\\d+) users");
-    Matcher matcher = p.matcher(headers[0]);
-    if (matcher.find()) {
-      record.add("uptime", matcher.group(2));
-      record.add("users", matcher.group(3));
-    }
-    p = Pattern
-        .compile("Tasks:\\s+(\\d+) total,\\s+(\\d+) running,\\s+(\\d+) 
sleeping,\\s+(\\d+) stopped,\\s+(\\d+) zombie");
-    matcher = p.matcher(headers[1]);
-    if (matcher.find()) {
-      record.add("tasks_total", matcher.group(1));
-      record.add("tasks_running", matcher.group(2));
-      record.add("tasks_sleeping", matcher.group(3));
-      record.add("tasks_stopped", matcher.group(4));
-      record.add("tasks_zombie", matcher.group(5));
-    }
-    p = Pattern
-        
.compile("Cpu\\(s\\):\\s*(.*?)%\\s*us,\\s*(.*?)%\\s*sy,\\s*(.*?)%\\s*ni,\\s*(.*?)%\\s*id,\\s*(.*?)%\\s*wa,\\s*(.*?)%\\s*hi,\\s*(.*?)%\\s*si");
-    matcher = p.matcher(headers[2]);
-    if (matcher.find()) {
-      record.add("cpu_user%", matcher.group(1));
-      record.add("cpu_sys%", matcher.group(2));
-      record.add("cpu_nice%", matcher.group(3));
-      record.add("cpu_wait%", matcher.group(4));
-      record.add("cpu_hi%", matcher.group(5));
-      record.add("cpu_si%", matcher.group(6));
-    }
-    p = Pattern
-        .compile("Mem:\\s+(.*?)k total,\\s+(.*?)k used,\\s+(.*?)k 
free,\\s+(.*?)k buffers");
-    matcher = p.matcher(headers[3]);
-    if (matcher.find()) {
-      record.add("mem_total", matcher.group(1));
-      record.add("mem_used", matcher.group(2));
-      record.add("mem_free", matcher.group(3));
-      record.add("mem_buffers", matcher.group(4));
-    }
-    p = Pattern
-        .compile("Swap:\\s+(.*?)k total,\\s+(.*?)k used,\\s+(.*?)k 
free,\\s+(.*?)k cached");
-    matcher = p.matcher(headers[4]);
-    if (matcher.find()) {
-      record.add("swap_total", matcher.group(1));
-      record.add("swap_used", matcher.group(2));
-      record.add("swap_free", matcher.group(3));
-      record.add("swap_cached", matcher.group(4));
-    }
-    Iterator<String> ki = keyValues.keySet().iterator();
-    while (ki.hasNext()) {
-      String key = ki.next();
-      log.debug(key + ":" + keyValues.get(key));
-    }
-  }
-
-  public String getDataType() {
-    return recordType;
-  }
-}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Torque.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Torque.java
 
b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Torque.java
deleted file mode 100644
index 735948c..0000000
--- 
a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Torque.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.chukwa.extraction.demux.processor.mapper;
-
-
-import java.io.IOException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
-import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.log4j.Logger;
-
-public class Torque extends AbstractProcessor {
-
-  static Logger log = Logger.getLogger(Torque.class);
-  private SimpleDateFormat sdf = null;
-
-  public Torque() {
-    // TODO move that to config
-    sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
-  }
-
-  @Override
-  protected void parse(String recordEntry,
-      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
-      throws Throwable {
-    try {
-      String dStr = recordEntry.substring(0, 23);
-      int start = 24;
-      int idx = recordEntry.indexOf(' ', start);
-      start = idx + 1;
-      idx = recordEntry.indexOf(' ', start);
-      String body = recordEntry.substring(idx + 1);
-      body = body.replaceAll("\n", "");
-      Date d = sdf.parse(dStr);
-      String[] kvpairs = body.split(", ");
-
-      ChukwaRecord record = new ChukwaRecord();
-      String kvpair = null;
-      String[] halves = null;
-      boolean containRecord = false;
-      for (int i = 0; i < kvpairs.length; ++i) {
-        kvpair = kvpairs[i];
-        if (kvpair.indexOf("=") >= 0) {
-          halves = kvpair.split("=");
-          record.add(halves[0], halves[1]);
-          containRecord = true;
-        }
-      }
-      if (record.containsField("Machine")) {
-        buildGenericRecord(record, null, d.getTime(), "HodMachine");
-      } else {
-        buildGenericRecord(record, null, d.getTime(), "HodJob");
-      }
-      if (containRecord) {
-        output.collect(key, record);
-      }
-    } catch (ParseException e) {
-      e.printStackTrace();
-      log.warn("Wrong format in Torque [" + recordEntry + "]", e);
-      throw e;
-    } catch (IOException e) {
-      e.printStackTrace();
-      log.warn("Unable to collect output in Torque [" + recordEntry + "]", e);
-      throw e;
-    }
-
-  }
-
-  public String getDataType() {
-    return Torque.class.getName();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/YWatch.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/YWatch.java
 
b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/YWatch.java
deleted file mode 100644
index 7da9baa..0000000
--- 
a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/YWatch.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.chukwa.extraction.demux.processor.mapper;
-
-
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
-import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.log4j.Logger;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-public class YWatch extends AbstractProcessor {
-  static Logger log = Logger.getLogger(YWatch.class);
-
-  private static final String ywatchType = "YWatch";
-
-  private static String regex = null;
-
-  private static Pattern p = null;
-
-  private Matcher matcher = null;
-
-  public YWatch() {
-    // TODO move that to config
-    regex = "([0-9]{4}\\-[0-9]{2}\\-[0-9]{2} 
[0-9]{2}\\:[0-9]{2}:[0-9]{2},[0-9]{3}) (INFO|DEBUG|ERROR|WARN) (.*?): (.*)";
-    p = Pattern.compile(regex);
-    matcher = p.matcher("-");
-  }
-
-  @SuppressWarnings("unchecked")
-  @Override
-  protected void parse(String recordEntry,
-      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
-      throws Throwable {
-    if (log.isDebugEnabled()) {
-      log.debug("YWatchProcessor record: [" + recordEntry + "] type["
-          + chunk.getDataType() + "]");
-    }
-
-    matcher.reset(recordEntry);
-    if (matcher.matches()) {
-      log.info("YWatchProcessor Matches");
-
-      try {
-        String body = matcher.group(4);
-
-        try {
-          JSONObject json = new JSONObject(body);
-
-          String poller = json.getString("poller");
-          String host = json.getString("host");
-          String metricName = json.getString("metricName");
-
-          // Data
-          JSONObject jsonData = json.getJSONObject("data")
-              .getJSONObject("data");
-
-          String jsonTs = null;
-          long ts = Long.parseLong(jsonTs);
-
-          String jsonValue = null;
-          Iterator<String> it = jsonData.keys();
-
-          ChukwaRecord record = null;
-
-          while (it.hasNext()) {
-            jsonTs = it.next();
-            jsonValue = jsonData.getString(jsonTs);
-
-            record = new ChukwaRecord();
-            key = new ChukwaRecordKey();
-            this.buildGenericRecord(record, null, ts, "Ywatch");
-            record.add("poller", poller);
-            record.add("host", host);
-            record.add("metricName", metricName);
-            record.add("value", jsonValue);
-            output.collect(key, record);
-            log.info("YWatchProcessor output 1 metric");
-          }
-
-        } catch (IOException e) {
-          log.warn("Unable to collect output in YWatchProcessor ["
-              + recordEntry + "]", e);
-          e.printStackTrace();
-        } catch (JSONException e) {
-          e.printStackTrace();
-          log.warn("Wrong format in YWatchProcessor [" + recordEntry + "]", e);
-        }
-
-      } catch (Exception e) {
-        e.printStackTrace();
-        throw e;
-      }
-    }
-  }
-
-  public String getDataType() {
-    return YWatch.ywatchType;
-  }
-}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/YwatchInvalidEntry.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/YwatchInvalidEntry.java
 
b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/YwatchInvalidEntry.java
deleted file mode 100644
index ff240f7..0000000
--- 
a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/YwatchInvalidEntry.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.chukwa.extraction.demux.processor.mapper;
-
-
-public class YwatchInvalidEntry extends Exception {
-
-  /**
-        * 
-        */
-  private static final long serialVersionUID = 7074989443687516732L;
-
-  public YwatchInvalidEntry() {
-  }
-
-  public YwatchInvalidEntry(String message) {
-    super(message);
-  }
-
-  public YwatchInvalidEntry(Throwable cause) {
-    super(cause);
-  }
-
-  public YwatchInvalidEntry(String message, Throwable cause) {
-    super(message, cause);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/main/java/org/apache/hadoop/chukwa/hicc/rest/HeatmapController.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/hadoop/chukwa/hicc/rest/HeatmapController.java 
b/src/main/java/org/apache/hadoop/chukwa/hicc/rest/HeatmapController.java
index 050d941..4c7d5c5 100644
--- a/src/main/java/org/apache/hadoop/chukwa/hicc/rest/HeatmapController.java
+++ b/src/main/java/org/apache/hadoop/chukwa/hicc/rest/HeatmapController.java
@@ -40,37 +40,36 @@ public class HeatmapController {
   static Logger log = Logger.getLogger(HeatmapController.class);
 
   @GET
-  @Path("{table}/{family}/{column}")
+  @Path("{metricGroup}/{metric}")
   @Produces(MediaType.APPLICATION_JSON)
-  public Heatmap getHeatmap(@Context HttpServletRequest request, 
-                 @PathParam("table") String table, 
-                 @PathParam("family") String family, 
-                 @PathParam("column") String column, 
-                 @QueryParam("start") String start, 
-                 @QueryParam("end") String end, 
-                 @QueryParam("max") @DefaultValue("1.0") double max,
-                 @QueryParam("scale") @DefaultValue("100") double scale,
-                 @QueryParam("height") @DefaultValue("400") int height) {
-         SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
-         Heatmap heatmap = null;
-         long startTime = 0;
-           long endTime = 0;
-           TimeHandler time = new TimeHandler(request);
-           try {
-             if(start!=null) {
-               startTime = sdf.parse(start).getTime();
-             } else {
-               startTime = time.getStartTime();
-             }
-             if(end!=null) {
-               endTime = sdf.parse(end).getTime();
-             } else {
-               endTime = time.getEndTime();
-             }
-             heatmap = ChukwaHBaseStore.getHeatmap(table, family, column, 
startTime, endTime, max, scale, height);
-           }catch(Throwable e) {
-                   log.error(ExceptionUtil.getStackTrace(e));
-           }
-         return heatmap;
+  public Heatmap getHeatmap(@Context HttpServletRequest request,
+      @PathParam("metricGroup") String metricGroup,
+      @PathParam("metric") String metric, @QueryParam("start") String start,
+      @QueryParam("end") String end,
+      @QueryParam("max") @DefaultValue("1.0") double max,
+      @QueryParam("scale") @DefaultValue("100") double scale,
+      @QueryParam("height") @DefaultValue("400") int height) {
+    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
+    Heatmap heatmap = null;
+    long startTime = 0;
+    long endTime = 0;
+    TimeHandler time = new TimeHandler(request);
+    try {
+      if (start != null) {
+        startTime = sdf.parse(start).getTime();
+      } else {
+        startTime = time.getStartTime();
+      }
+      if (end != null) {
+        endTime = sdf.parse(end).getTime();
+      } else {
+        endTime = time.getEndTime();
+      }
+      heatmap = ChukwaHBaseStore.getHeatmap(metricGroup, metric, startTime,
+          endTime, max, scale, height);
+    } catch (Throwable e) {
+      log.error(ExceptionUtil.getStackTrace(e));
+    }
+    return heatmap;
   }
 }

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java 
b/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
index 9f41ed8..61f8247 100644
--- a/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
+++ b/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
@@ -23,7 +23,6 @@ import java.util.Set;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpSession;
-import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
@@ -42,9 +41,9 @@ import org.json.simple.JSONArray;
 public class MetricsController {
 
   @GET
-  @Path("series/{table}/{family}/{column}/rowkey/{rkey}")
+  @Path("series/{metric}/{source}")
   @Produces("application/json")
-  public String getSeries(@Context HttpServletRequest request, 
@PathParam("table") String table, @PathParam("family") String family, 
@PathParam("column") String column, @PathParam("rkey") String rkey, 
@QueryParam("start") String start, @QueryParam("end") String end) {
+  public String getSeries(@Context HttpServletRequest request, 
@PathParam("metric") String metric, @PathParam("source") String source, 
@QueryParam("start") String start, @QueryParam("end") String end) {
     SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
     String buffer = "";
     Series series;
@@ -62,13 +61,8 @@ public class MetricsController {
       } else {
         endTime = time.getEndTime();
       }
-      if(rkey!=null) {
-        series = ChukwaHBaseStore.getSeries(table, rkey, family, column, 
startTime, endTime, true);
-        buffer = series.toString();
-      } else {
-        throw new 
WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
-            .entity("No row key defined.").build());
-      }
+      series = ChukwaHBaseStore.getSeries(metric, source, startTime, endTime);
+      buffer = series.toString();
     } catch (ParseException e) {
       throw new 
WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
           .entity("Start/End date parse error.  Format: 
yyyyMMddHHmmss.").build());
@@ -77,16 +71,14 @@ public class MetricsController {
   }
 
   @GET
-  @Path("series/{table}/{column}/session/{sessionKey}")
+  @Path("series/{metricGroup}/{metric}/session/{sessionKey}")
   @Produces("application/json")
-  public String getSeriesBySessionAttribute(@Context HttpServletRequest 
request, @PathParam("table") String table, @PathParam("column") String column, 
@PathParam("sessionKey") String skey, @QueryParam("start") String start, 
@QueryParam("end") String end) {
+  public String getSeriesBySessionAttribute(@Context HttpServletRequest 
request, @PathParam("metricGroup") String metricGroup, @PathParam("metric") 
String metric, @PathParam("sessionKey") String skey, @QueryParam("start") 
String start, @QueryParam("end") String end) {
     SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
     String buffer = "";
     long startTime = 0;
     long endTime = 0;
     TimeHandler time = new TimeHandler(request);
-    String family = column.split(":")[0];
-    String qualifier = column.split(":")[1];
     try {
       if(start!=null) {
         startTime = sdf.parse(start).getTime();
@@ -100,13 +92,13 @@ public class MetricsController {
       }
       if(skey!=null) {
           HttpSession session = request.getSession();
-          String[] rkeys = (session.getAttribute(skey).toString()).split(",");
+          String[] sourcekeys = 
(session.getAttribute(skey).toString()).split(",");
           JSONArray seriesList = new JSONArray();
-          for(String rowKey : rkeys) {
-               if (rowKey == null || rowKey.equals("")) {
+          for(String source : sourcekeys) {
+               if (source == null || source.equals("")) {
                        continue;
                }
-            Series output = ChukwaHBaseStore.getSeries(table, rowKey, family, 
qualifier, startTime, endTime, true);
+            Series output = ChukwaHBaseStore.getSeries(metricGroup, metric, 
source, startTime, endTime);
             seriesList.add(output.toJSONObject());
           }
           buffer = seriesList.toString();
@@ -125,83 +117,33 @@ public class MetricsController {
   @Path("schema")
   @Produces("application/json")
   public String getTables() {
-    Set<String> tableNames = ChukwaHBaseStore.getTableNames();
-    JSONArray tables = new JSONArray();
-    for(String table : tableNames) {
-      tables.add(table);
+    Set<String> metricGroups = ChukwaHBaseStore.getMetricGroups();
+    JSONArray groups = new JSONArray();
+    for(String metric : metricGroups) {
+      groups.add(metric);
     }
-    return tables.toString();
+    return groups.toString();
   }
   
   @GET
-  @Path("schema/{table}")
+  @Path("schema/{metricGroup}")
   @Produces("application/json")
-  public String getFamilies(@PathParam("table") String tableName) {
-    Set<String> familyNames = ChukwaHBaseStore.getFamilyNames(tableName);
-    JSONArray families = new JSONArray();
-    for(String family : familyNames) {
-      families.add(family);
+  public String getFamilies(@PathParam("metricGroup") String metricGroup) {
+    Set<String> metricNames = ChukwaHBaseStore.getMetricNames(metricGroup);
+    JSONArray metrics = new JSONArray();
+    for(String metric : metricNames) {
+      metrics.add(metric);
     }
-    return families.toString();
+    return metrics.toString();
   }
-  
+    
   @GET
-  @Path("schema/{table}/{family}")
+  @Path("source/{metricGroup}")
   @Produces("application/json")
-  public String getColumnNames(@Context HttpServletRequest request, 
@PathParam("table") String tableName, @PathParam("family") String family, 
@QueryParam("start") String start, @QueryParam("end") String end, 
@DefaultValue("false") @QueryParam("fullScan") boolean fullScan) {
-    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
-    long startTime = 0;
-    long endTime = 0;
-    TimeHandler time = new TimeHandler(request);
-    try {
-      if(start!=null) {
-        startTime = sdf.parse(start).getTime();
-      } else {
-        startTime = time.getStartTime();
-      }
-      if(end!=null) {
-        endTime = sdf.parse(end).getTime();
-      } else {
-        endTime = time.getEndTime();
-      }
-    } catch(ParseException e) {
-      throw new 
WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
-          .entity("Start/End date parse error.  Format: 
yyyyMMddHHmmss.").build());      
-    }
-    Set<String> columnNames = ChukwaHBaseStore.getColumnNames(tableName, 
family, startTime, endTime, fullScan);
-    JSONArray columns = new JSONArray();
-    for(String column : columnNames) {
-      columns.add(column);
-    }
-    return columns.toString();
-  }
-  
-  @GET
-  @Path("rowkey/{table}/{family}/{column}")
-  @Produces("application/json")
-  public String getRowNames(@Context HttpServletRequest request, 
@PathParam("table") String tableName, @PathParam("family") String family, 
@PathParam("column") String column, @QueryParam("start") String start, 
@QueryParam("end") String end, @QueryParam("fullScan") @DefaultValue("false") 
boolean fullScan) {
-    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
-    long startTime = 0;
-    long endTime = 0;
-    TimeHandler time = new TimeHandler(request);
-    try {
-      if(start!=null) {
-        startTime = sdf.parse(start).getTime();
-      } else {
-        startTime = time.getStartTime();
-      }
-      if(end!=null) {
-        endTime = sdf.parse(end).getTime();
-      } else {
-        endTime = time.getEndTime();
-      }
-    } catch(ParseException e) {
-      throw new 
WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
-          .entity("Start/End date parse error.  Format: 
yyyyMMddHHmmss.").build());      
-    }
-    Set<String> columnNames = ChukwaHBaseStore.getRowNames(tableName, family, 
column, startTime, endTime, fullScan);
+  public String getSourceNames(@Context HttpServletRequest request, 
@PathParam("metricGroup") String metricGroup) {
+    Set<String> sourceNames = ChukwaHBaseStore.getSourceNames(metricGroup);
     JSONArray rows = new JSONArray();
-    for(String row : columnNames) {
+    for(String row : sourceNames) {
       rows.add(row);
     }
     return rows.toString();

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4JMetricsContext.java
----------------------------------------------------------------------
diff --git 
a/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4JMetricsContext.java
 
b/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4JMetricsContext.java
index 438b161..a80778f 100644
--- 
a/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4JMetricsContext.java
+++ 
b/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4JMetricsContext.java
@@ -106,7 +106,7 @@ public class Log4JMetricsContext extends 
AbstractMetricsContext {
       try {
         json.put("contextName", contextName);
         json.put("recordName", recordName);
-        json.put("chukwa_timestamp", System.currentTimeMillis());
+        json.put("timestamp", System.currentTimeMillis());
         json.put("period", period);
         for (String tagName : outRec.getTagNames()) {
           json.put(tagName, outRec.getTag(tagName));

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/main/web/hicc/jsp/graph_explorer.jsp
----------------------------------------------------------------------
diff --git a/src/main/web/hicc/jsp/graph_explorer.jsp 
b/src/main/web/hicc/jsp/graph_explorer.jsp
index 02b706b..5804f25 100644
--- a/src/main/web/hicc/jsp/graph_explorer.jsp
+++ b/src/main/web/hicc/jsp/graph_explorer.jsp
@@ -123,10 +123,9 @@
     function getRows() {
       var size = $('#row option').size();
       $('#row').find('option').remove();
-      var column = $('#column').val();
-      $('#column :selected').each(function(i, selected) {
-        var tfColumn = $(selected).val();
-        var url = encodeURI("/hicc/v1/metrics/rowkey/"+tfColumn);
+      $('#table :selected').each(function(i, selected) {
+        var metricGroup = $(selected).val();
+        var url = encodeURI("/hicc/v1/metrics/source/"+metricGroup);
         $.ajax({ url: url, dataType: "json", success: function(data){
           for(var i in data) {
             var test = $('#row').find('option[value="'+data[i]+'"]').val();
@@ -145,9 +144,9 @@
       }
       var url = [];
       var idx = 0;
-      $('#column :selected').each(function(i, selected) {
+      $('#family :selected').each(function(i, selected) {
         $('#row :selected').each(function(j, rowSelected) {
-          url[idx++] = encodeURI("/hicc/v1/metrics/series/" + 
$(selected).val() + "/rowkey/" + $(rowSelected).val());
+          url[idx++] = encodeURI("/hicc/v1/metrics/series/" + 
$(selected).val() + "/" + $(rowSelected).val());
         }); 
       });
       var title = $('#title').val();
@@ -239,7 +238,7 @@
       var family = $("#family").val();
 
       /* loop through series to construct URLs */
-      $('#column :selected').each(function(i, selected) {
+      $('#family :selected').each(function(i, selected) {
         var option = {};
         option.label = $('#table').val() + "." + 
           family + "." + 
@@ -247,7 +246,7 @@
           $('#row').val();
         var values = encodeURI("/hicc/v1/metrics/series/" + 
              $(selected).val() + 
-             "/rowkey/" + $('#row').val());
+             "/" + $('#row').val());
         option.value = values;
         selections.value[idx] = values;
         selections.options[idx++] = option;
@@ -306,22 +305,17 @@
         </tr>
         <tr>
           <td>
-            Table<br>
+            Metric Groups<br>
             <select id="table" size="10" onMouseUp="getFamilies()" 
style="min-width: 100px;" class="select">
             </select>
           </td>
           <td>
-            Column Family<br>
-            <select id="family" multiple size="10" style="min-width: 110px;" 
onMouseUp="getColumns()">
-            </select>
-          </td>
-          <td>
-            Column<br>
-            <select id="column" multiple size="10" style="min-width: 100px;" 
onMouseUp="getRows()">
+            Metrics<br>
+            <select id="family" multiple size="10" style="min-width: 110px;" 
onMouseUp="getRows()">
             </select>
           </td>
           <td>
-            Row<br>
+            Sources<br>
             <select id="row" size="10" style="min-width: 100px;">
             </select>
           </td>

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/main/web/hicc/jsp/host_selector_dropdown.jsp
----------------------------------------------------------------------
diff --git a/src/main/web/hicc/jsp/host_selector_dropdown.jsp 
b/src/main/web/hicc/jsp/host_selector_dropdown.jsp
index 606104a..f8e316b 100644
--- a/src/main/web/hicc/jsp/host_selector_dropdown.jsp
+++ b/src/main/web/hicc/jsp/host_selector_dropdown.jsp
@@ -72,7 +72,7 @@
         }
       } catch (NullPointerException e) {
       }
-      Set<String> machines = ChukwaHBaseStore.getHostnames(cluster, 
time.getStartTime(), time.getEndTime(), false);
+      Set<String> machines = ChukwaHBaseStore.getSourceNames("ChukwaMetrics");
       for(String machine : machines) {
         if(hosts.containsKey(machine)) {
           out.println("<option selected>"+machine+"</option>");

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestLog4JMetricsContextChukwaRecord.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestLog4JMetricsContextChukwaRecord.java
 
b/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestLog4JMetricsContextChukwaRecord.java
index 2297bea..349977e 100644
--- 
a/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestLog4JMetricsContextChukwaRecord.java
+++ 
b/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestLog4JMetricsContextChukwaRecord.java
@@ -27,16 +27,16 @@ import 
org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 
 public class TestLog4JMetricsContextChukwaRecord extends TestCase {
   private static String[] chukwaQueueLog = { 
-      "2009-05-06 00:00:21,982 INFO chukwa.metrics.chunkQueue: 
{\"removedChunk\":1,\"recordName\":\"chunkQueue\",\"queueSize\":94,\"chukwa_timestamp\":1241568021982,\"removedChunk_raw\":0,\"dataSize\":10373608,\"fullQueue\":1,\"addedChunk_rate\":0,\"addedChunk_raw\":0,\"period\":60,\"addedChunk\":95,\"hostName\":\"test.com\",\"removedChunk_rate\":0,\"contextName\":\"chunkQueue\"}",
-      "2009-05-06 00:01:21,981 INFO chukwa.metrics.chunkQueue: 
{\"removedChunk\":1,\"recordName\":\"chunkQueue\",\"queueSize\":94,\"chukwa_timestamp\":1241568081981,\"removedChunk_raw\":0,\"dataSize\":10373608,\"fullQueue\":1,\"addedChunk_rate\":0,\"addedChunk_raw\":0,\"period\":60,\"addedChunk\":95,\"hostName\":\"test.com\",\"removedChunk_rate\":0,\"contextName\":\"chunkQueue\"}",
-      "2009-05-06 00:02:21,982 INFO chukwa.metrics.chunkQueue: 
{\"removedChunk\":1,\"recordName\":\"chunkQueue\",\"queueSize\":94,\"chukwa_timestamp\":1241568141982,\"removedChunk_raw\":0,\"dataSize\":10373608,\"fullQueue\":1,\"addedChunk_rate\":0,\"addedChunk_raw\":0,\"period\":60,\"addedChunk\":95,\"hostName\":\"test.com\",\"removedChunk_rate\":0,\"contextName\":\"chunkQueue\"}",
+      "2009-05-06 00:00:21,982 INFO chukwa.metrics.chunkQueue: 
{\"removedChunk\":1,\"recordName\":\"chunkQueue\",\"queueSize\":94,\"timestamp\":1241568021982,\"removedChunk_raw\":0,\"dataSize\":10373608,\"fullQueue\":1,\"addedChunk_rate\":0,\"addedChunk_raw\":0,\"period\":60,\"addedChunk\":95,\"hostName\":\"test.com\",\"removedChunk_rate\":0,\"contextName\":\"chunkQueue\"}",
+      "2009-05-06 00:01:21,981 INFO chukwa.metrics.chunkQueue: 
{\"removedChunk\":1,\"recordName\":\"chunkQueue\",\"queueSize\":94,\"timestamp\":1241568081981,\"removedChunk_raw\":0,\"dataSize\":10373608,\"fullQueue\":1,\"addedChunk_rate\":0,\"addedChunk_raw\":0,\"period\":60,\"addedChunk\":95,\"hostName\":\"test.com\",\"removedChunk_rate\":0,\"contextName\":\"chunkQueue\"}",
+      "2009-05-06 00:02:21,982 INFO chukwa.metrics.chunkQueue: 
{\"removedChunk\":1,\"recordName\":\"chunkQueue\",\"queueSize\":94,\"timestamp\":1241568141982,\"removedChunk_raw\":0,\"dataSize\":10373608,\"fullQueue\":1,\"addedChunk_rate\":0,\"addedChunk_raw\":0,\"period\":60,\"addedChunk\":95,\"hostName\":\"test.com\",\"removedChunk_rate\":0,\"contextName\":\"chunkQueue\"}",
   };
   
   private static String[] chukwaAgentLog = {
-    "2009-05-06 23:33:35,213 INFO chukwa.metrics.chukwaAgent: 
{\"addedAdaptor_rate\":0,\"addedAdaptor_raw\":0,\"recordName\":\"chukwaAgent\",\"chukwa_timestamp\":1241652815212,\"removedAdaptor_rate\":0,\"removedAdaptor\":0,\"period\":60,\"adaptorCount\":4,\"removedAdaptor_raw\":0,\"process\":\"ChukwaAgent\",\"addedAdaptor\":4,\"hostName\":\"test.com\",\"contextName\":\"chukwaAgent\"}",
-    "2009-05-06 23:34:35,211 INFO chukwa.metrics.chukwaAgent: 
{\"addedAdaptor_rate\":0,\"addedAdaptor_raw\":0,\"recordName\":\"chukwaAgent\",\"chukwa_timestamp\":1241652875211,\"removedAdaptor_rate\":0,\"removedAdaptor\":0,\"period\":60,\"adaptorCount\":4,\"removedAdaptor_raw\":0,\"process\":\"ChukwaAgent\",\"addedAdaptor\":4,\"hostName\":\"test.com\",\"contextName\":\"chukwaAgent\"}",
-    "2009-05-06 23:35:35,212 INFO chukwa.metrics.chukwaAgent: 
{\"addedAdaptor_rate\":0,\"addedAdaptor_raw\":0,\"recordName\":\"chukwaAgent\",\"chukwa_timestamp\":1241652935212,\"removedAdaptor_rate\":0,\"removedAdaptor\":0,\"period\":60,\"adaptorCount\":4,\"removedAdaptor_raw\":0,\"process\":\"ChukwaAgent\",\"addedAdaptor\":4,\"hostName\":\"test.com\",\"contextName\":\"chukwaAgent\"}",
-    "2009-05-06 23:39:35,215 INFO chukwa.metrics.chukwaAgent: 
{\"addedAdaptor_rate\":0,\"addedAdaptor_raw\":0,\"recordName\":\"chukwaAgent\",\"chukwa_timestamp\":1241653175214,\"removedAdaptor_rate\":0,\"removedAdaptor\":0,\"period\":60,\"adaptorCount\":4,\"removedAdaptor_raw\":0,\"process\":\"ChukwaAgent\",\"addedAdaptor\":4,\"hostName\":\"test.com\",\"contextName\":\"CA\"}",
+    "2009-05-06 23:33:35,213 INFO chukwa.metrics.chukwaAgent: 
{\"addedAdaptor_rate\":0,\"addedAdaptor_raw\":0,\"recordName\":\"chukwaAgent\",\"timestamp\":1241652815212,\"removedAdaptor_rate\":0,\"removedAdaptor\":0,\"period\":60,\"adaptorCount\":4,\"removedAdaptor_raw\":0,\"process\":\"ChukwaAgent\",\"addedAdaptor\":4,\"hostName\":\"test.com\",\"contextName\":\"chukwaAgent\"}",
+    "2009-05-06 23:34:35,211 INFO chukwa.metrics.chukwaAgent: 
{\"addedAdaptor_rate\":0,\"addedAdaptor_raw\":0,\"recordName\":\"chukwaAgent\",\"timestamp\":1241652875211,\"removedAdaptor_rate\":0,\"removedAdaptor\":0,\"period\":60,\"adaptorCount\":4,\"removedAdaptor_raw\":0,\"process\":\"ChukwaAgent\",\"addedAdaptor\":4,\"hostName\":\"test.com\",\"contextName\":\"chukwaAgent\"}",
+    "2009-05-06 23:35:35,212 INFO chukwa.metrics.chukwaAgent: 
{\"addedAdaptor_rate\":0,\"addedAdaptor_raw\":0,\"recordName\":\"chukwaAgent\",\"timestamp\":1241652935212,\"removedAdaptor_rate\":0,\"removedAdaptor\":0,\"period\":60,\"adaptorCount\":4,\"removedAdaptor_raw\":0,\"process\":\"ChukwaAgent\",\"addedAdaptor\":4,\"hostName\":\"test.com\",\"contextName\":\"chukwaAgent\"}",
+    "2009-05-06 23:39:35,215 INFO chukwa.metrics.chukwaAgent: 
{\"addedAdaptor_rate\":0,\"addedAdaptor_raw\":0,\"recordName\":\"chukwaAgent\",\"timestamp\":1241653175214,\"removedAdaptor_rate\":0,\"removedAdaptor\":0,\"period\":60,\"adaptorCount\":4,\"removedAdaptor_raw\":0,\"process\":\"ChukwaAgent\",\"addedAdaptor\":4,\"hostName\":\"test.com\",\"contextName\":\"CA\"}",
   };
 
   public void testLog4JMetricsContextChukwaRecord() throws Throwable {
@@ -44,7 +44,7 @@ public class TestLog4JMetricsContextChukwaRecord extends 
TestCase {
       Log4JMetricsContextChukwaRecord rec = new 
Log4JMetricsContextChukwaRecord(chukwaQueueLog[0]);
       ChukwaRecord chukwaRecord = rec.getChukwaRecord();
       assertEquals("chunkQueue", rec.getRecordType());
-      assertEquals("1241568021982", chukwaRecord.getValue("chukwa_timestamp"));
+      assertEquals("1241568021982", chukwaRecord.getValue("timestamp"));
       assertEquals((1241568021982l/60000)*60000, rec.getTimestamp());
       assertEquals("94", chukwaRecord.getValue("queueSize"));
     }

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7ae68398/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestPsOutput.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestPsOutput.java
 
b/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestPsOutput.java
deleted file mode 100644
index 11424c5..0000000
--- 
a/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestPsOutput.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.chukwa.extraction.demux.processor.mapper;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map.Entry;
-
-import 
org.apache.hadoop.chukwa.extraction.demux.processor.mapper.Ps.InvalidPsRecord;
-import org.apache.hadoop.chukwa.extraction.demux.processor.mapper.Ps.PsOutput;
-
-import junit.framework.TestCase;
-
-public class TestPsOutput extends TestCase {
-
-  public void testGetRecordList() throws IOException, InvalidPsRecord {
-    // below is from command
-    // "ps axo pid,user,vsize,size,pcpu,pmem,time,start_time,start,cmd"
-    String output = "  PID USER        VSZ    SZ %CPU %MEM     TIME START  
STARTED CMD\n"
-        + "    1 root       2064   284  0.0  0.0 00:00:02  2008   Dec 29 init 
[5]\n"
-        + "    2 root          0     0  0.0  0.0 00:00:01  2008   Dec 29 
[migration/0]\n"
-        + "20270 chzhang    4248   588  0.0  0.0 00:00:00 15:32 15:32:36 ps 
axo pid,user,vsize,size,pcpu,pmem,time,start_time,start,cmd\n"
-        + "28371 angelac2   7100  1716  0.0  0.0 00:00:00 Feb27   Feb 27 
/usr/libexec/gconfd-2 5\n";
-
-    PsOutput pso = new PsOutput(output);
-    ArrayList<HashMap<String, String>> processes = pso.getProcessList();
-    assertEquals(4, processes.size());
-    assertEquals("Dec29", processes.get(0).get("STARTED"));
-    assertEquals("15:32:36", processes.get(2).get("STARTED"));
-    assertEquals(
-        "ps axo pid,user,vsize,size,pcpu,pmem,time,start_time,start,cmd",
-        processes.get(2).get("CMD"));
-  }
-
-}

Reply via email to