http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
new file mode 100644
index 0000000..ad97bef
--- /dev/null
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
@@ -0,0 +1,531 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you
+ * may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sdap.mudrod.weblog.structure;
+
+import org.apache.sdap.mudrod.driver.ESDriver;
+import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.Optional;
+import org.apache.spark.api.java.function.FlatMapFunction;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFunction;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.search.SearchHit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import scala.Tuple2;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * ClassName: SessionExtractor Function: Extract sessions details from
+ * reconstructed sessions.
+ */
+public class SessionExtractor implements Serializable {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(SessionExtractor.class);
+
+  /**
+   *
+   */
+  private static final long serialVersionUID = 1L;
+
+  public SessionExtractor() {
+    // default constructor
+  }
+
+  /**
+   * extractClickStreamFromES:Extract click streams from logs stored in
+   * Elasticsearch
+   *
+   * @param props
+   *          the Mudrod configuration
+   * @param es
+   *          the Elasticsearch drive
+   * @param spark
+   *          the spark driver
+   * @return clickstream list in JavaRDD format {@link ClickStream}
+   */
+  public JavaRDD<ClickStream> extractClickStreamFromES(Properties props, 
ESDriver es, SparkDriver spark) {
+    switch (props.getProperty(MudrodConstants.PROCESS_TYPE)) {
+      case "sequential":
+        List<ClickStream> queryList = this.getClickStreamList(props, es);
+        return spark.sc.parallelize(queryList);
+      case "parallel":
+        return getClickStreamListInParallel(props, spark, es);
+      default:
+      LOG.error("Error finding processing type for '{}'. Please check your 
config.xml.", props.getProperty(MudrodConstants.PROCESS_TYPE));
+    }
+    return null;
+  }
+
+  /**
+   * getClickStreamList:Extract click streams from logs stored in 
Elasticsearch.
+   *
+   * @param props
+   *          the Mudrod configuration
+   * @param es
+   *          the Elasticsearch driver
+   * @return clickstream list {@link ClickStream}
+   */
+  protected List<ClickStream> getClickStreamList(Properties props, ESDriver 
es) {
+    List<String> logIndexList = 
es.getIndexListWithPrefix(props.getProperty(MudrodConstants.LOG_INDEX));
+
+    List<ClickStream> result = new ArrayList<>();
+    for (int n = 0; n < logIndexList.size(); n++) {
+      String logIndex = logIndexList.get(n);
+      List<String> sessionIdList;
+      try {
+        sessionIdList = this.getSessions(props, es, logIndex);
+        Session session = new Session(props, es);
+        int sessionNum = sessionIdList.size();
+        for (int i = 0; i < sessionNum; i++) {
+          String[] sArr = sessionIdList.get(i).split(",");
+          List<ClickStream> datas = session.getClickStreamList(sArr[1], 
sArr[2], sArr[0]);
+          result.addAll(datas);
+        }
+      } catch (Exception e) {
+        LOG.error("Error during extraction of Clickstreams from log index. 
{}", e);
+      }
+    }
+
+    return result;
+  }
+
+  protected JavaRDD<ClickStream> getClickStreamListInParallel(Properties 
props, SparkDriver spark, ESDriver es) {
+
+    List<String> logIndexList = 
es.getIndexListWithPrefix(props.getProperty(MudrodConstants.LOG_INDEX));
+
+    LOG.info("Retrieved {}", logIndexList.toString());
+
+    List<String> sessionIdList = new ArrayList<>();
+    for (int n = 0; n < logIndexList.size(); n++) {
+      String logIndex = logIndexList.get(n);
+      List<String> tmpsessionList = this.getSessions(props, es, logIndex);
+      sessionIdList.addAll(tmpsessionList);
+    }
+
+    JavaRDD<String> sessionRDD = spark.sc.parallelize(sessionIdList, 16);
+
+    JavaRDD<ClickStream> clickStreamRDD = sessionRDD.mapPartitions(new 
FlatMapFunction<Iterator<String>, ClickStream>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Iterator<ClickStream> call(Iterator<String> arg0) throws 
Exception {
+        ESDriver tmpES = new ESDriver(props);
+        tmpES.createBulkProcessor();
+
+        Session session = new Session(props, tmpES);
+        List<ClickStream> clickstreams = new ArrayList<>();
+        while (arg0.hasNext()) {
+          String s = arg0.next();
+          String[] sArr = s.split(",");
+          List<ClickStream> clicks = session.getClickStreamList(sArr[1], 
sArr[2], sArr[0]);
+          clickstreams.addAll(clicks);
+        }
+        tmpES.destroyBulkProcessor();
+        tmpES.close();
+        return clickstreams.iterator();
+      }
+    });
+
+    LOG.info("Clickstream number: {}", clickStreamRDD.count());
+
+    return clickStreamRDD;
+  }
+
+  // This function is reserved and not being used for now
+
+  /**
+   * loadClickStremFromTxt:Load click stream form txt file
+   *
+   * @param clickthroughFile
+   *          txt file
+   * @param sc
+   *          the spark context
+   * @return clickstream list in JavaRDD format {@link ClickStream}
+   */
+  public JavaRDD<ClickStream> loadClickStremFromTxt(String clickthroughFile, 
JavaSparkContext sc) {
+    return sc.textFile(clickthroughFile).flatMap(new FlatMapFunction<String, 
ClickStream>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @SuppressWarnings("unchecked")
+      @Override
+      public Iterator<ClickStream> call(String line) throws Exception {
+        List<ClickStream> clickthroughs = (List<ClickStream>) 
ClickStream.parseFromTextLine(line);
+        return (Iterator<ClickStream>) clickthroughs;
+      }
+    });
+  }
+
+  /**
+   * bulidDataQueryRDD: convert click stream list to data set queries pairs.
+   *
+   * @param clickstreamRDD:
+   *          click stream data
+   * @param downloadWeight:
+   *          weight of download behavior
+   * @return JavaPairRDD, key is short name of data set, and values are queries
+   */
+  public JavaPairRDD<String, List<String>> 
bulidDataQueryRDD(JavaRDD<ClickStream> clickstreamRDD, int downloadWeight) {
+    return clickstreamRDD.mapToPair(new PairFunction<ClickStream, String, 
List<String>>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Tuple2<String, List<String>> call(ClickStream click) throws 
Exception {
+        List<String> query = new ArrayList<>();
+        // important! download behavior is given higher weights
+        // than viewing
+        // behavior
+        boolean download = click.isDownload();
+        int weight = 1;
+        if (download) {
+          weight = downloadWeight;
+        }
+        for (int i = 0; i < weight; i++) {
+          query.add(click.getKeyWords());
+        }
+
+        return new Tuple2<>(click.getViewDataset(), query);
+      }
+    }).reduceByKey(new Function2<List<String>, List<String>, List<String>>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public List<String> call(List<String> v1, List<String> v2) throws 
Exception {
+        List<String> list = new ArrayList<>();
+        list.addAll(v1);
+        list.addAll(v2);
+        return list;
+      }
+    });
+  }
+
+  /**
+   * getSessions: Get sessions from logs
+   *
+   * @param props
+   *          the Mudrod configuration
+   * @param es
+   *          the Elasticsearch driver
+   * @param logIndex
+   *          a log index name
+   * @return list of session names
+   */
+  protected List<String> getSessions(Properties props, ESDriver es, String 
logIndex) {
+
+    String cleanupPrefix = 
props.getProperty(MudrodConstants.CLEANUP_TYPE_PREFIX);
+    String sessionStatPrefix = 
props.getProperty(MudrodConstants.SESSION_STATS_PREFIX);
+
+    List<String> sessions = new ArrayList<>();
+    SearchResponse scrollResp = 
es.getClient().prepareSearch(logIndex).setTypes(sessionStatPrefix).setScroll(new
 
TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
+            .actionGet();
+    while (true) {
+      for (SearchHit hit : scrollResp.getHits().getHits()) {
+        Map<String, Object> session = hit.getSource();
+        String sessionID = (String) session.get("SessionID");
+        sessions.add(sessionID + "," + logIndex + "," + cleanupPrefix);
+      }
+
+      scrollResp = 
es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new 
TimeValue(600000)).execute().actionGet();
+      if (scrollResp.getHits().getHits().length == 0) {
+        break;
+      }
+    }
+
+    return sessions;
+  }
+
+  public JavaPairRDD<String, Double> bulidUserItermRDD(JavaRDD<ClickStream> 
clickstreamRDD) {
+    return clickstreamRDD.mapToPair(new PairFunction<ClickStream, String, 
Double>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Tuple2<String, Double> call(ClickStream click) throws Exception {
+        double rate = 1;
+        boolean download = click.isDownload();
+        if (download) {
+          rate = 2;
+        }
+
+        String sessionID = click.getSessionID();
+        String user = sessionID.split("@")[0];
+
+        return new Tuple2<>(user + "," + click.getViewDataset(), rate);
+      }
+    }).reduceByKey(new Function2<Double, Double, Double>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Double call(Double v1, Double v2) throws Exception {
+        return v1 >= v2 ? v1 : v2;
+
+      }
+    });
+  }
+
+  public JavaPairRDD<String, Double> bulidSessionItermRDD(JavaRDD<ClickStream> 
clickstreamRDD) {
+    JavaPairRDD<String, String> sessionItemRDD = clickstreamRDD.mapToPair(new 
PairFunction<ClickStream, String, String>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Tuple2<String, String> call(ClickStream click) throws Exception {
+
+        String sessionID = click.getSessionID();
+        return new Tuple2<>(sessionID, click.getViewDataset());
+      }
+    }).distinct();
+
+    // remove some sessions
+    JavaPairRDD<String, Double> sessionItemNumRDD = 
sessionItemRDD.keys().mapToPair(new PairFunction<String, String, Double>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Tuple2<String, Double> call(String item) throws Exception {
+        return new Tuple2<>(item, 1.0);
+      }
+    }).reduceByKey(new Function2<Double, Double, Double>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Double call(Double v1, Double v2) throws Exception {
+        return v1 + v2;
+      }
+    }).filter(new Function<Tuple2<String, Double>, Boolean>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Boolean call(Tuple2<String, Double> arg0) throws Exception {
+        Boolean b = true;
+        if (arg0._2 < 2) {
+          b = false;
+        }
+        return b;
+      }
+    });
+
+    return sessionItemNumRDD.leftOuterJoin(sessionItemRDD).mapToPair(new 
PairFunction<Tuple2<String, Tuple2<Double, Optional<String>>>, String, 
Double>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Tuple2<String, Double> call(Tuple2<String, Tuple2<Double, 
Optional<String>>> arg0) throws Exception {
+
+        Tuple2<Double, Optional<String>> test = arg0._2;
+        Optional<String> optStr = test._2;
+        String item = "";
+        if (optStr.isPresent()) {
+          item = optStr.get();
+        }
+        return new Tuple2<>(arg0._1 + "," + item, 1.0);
+      }
+
+    });
+  }
+
+  public JavaPairRDD<String, List<String>> bulidSessionDatasetRDD(Properties 
props, ESDriver es, SparkDriver spark) {
+
+    List<String> result = new ArrayList<>();
+    List<String> logIndexList = 
es.getIndexListWithPrefix(props.getProperty(MudrodConstants.LOG_INDEX));
+    for (int n = 0; n < logIndexList.size(); n++) {
+      String logIndex = logIndexList.get(n);
+      SearchResponse scrollResp = 
es.getClient().prepareSearch(logIndex).setTypes(props.getProperty(MudrodConstants.SESSION_STATS_PREFIX)).setScroll(new
 TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery())
+              .setSize(100).execute().actionGet();
+      while (true) {
+        for (SearchHit hit : scrollResp.getHits().getHits()) {
+          Map<String, Object> session = hit.getSource();
+          String sessionID = (String) session.get("SessionID");
+          String views = (String) session.get("views");
+          if (views != null && !"".equals(views)) {
+            String sessionItems = sessionID + ":" + views;
+            result.add(sessionItems);
+          }
+        }
+
+        scrollResp = 
es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new 
TimeValue(600000)).execute().actionGet();
+        if (scrollResp.getHits().getHits().length == 0) {
+          break;
+        }
+      }
+    }
+
+    JavaRDD<String> sessionRDD = spark.sc.parallelize(result);
+
+    return sessionRDD.mapToPair(new PairFunction<String, String, 
List<String>>() {
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Tuple2<String, List<String>> call(String sessionitem) throws 
Exception {
+        String[] splits = sessionitem.split(":");
+        String sessionId = splits[0];
+        List<String> itemList = new ArrayList<>();
+
+        String items = splits[1];
+        String[] itemArr = items.split(",");
+        int size = itemArr.length;
+        for (int i = 0; i < size; i++) {
+          String item = itemArr[i];
+          if (!itemList.contains(item))
+            itemList.add(itemArr[i]);
+        }
+
+        return new Tuple2<>(sessionId, itemList);
+      }
+    });
+  }
+
+  /**
+   * extractClickStreamFromES:Extract click streams from logs stored in
+   * Elasticsearch
+   *
+   * @param props
+   *          the Mudrod configuration
+   * @param es
+   *          the Elasticsearch drive
+   * @param spark
+   *          the spark driver
+   * @return clickstream list in JavaRDD format {@link ClickStream}
+   */
+  public JavaRDD<RankingTrainData> extractRankingTrainData(Properties props, 
ESDriver es, SparkDriver spark) {
+
+    List<RankingTrainData> queryList = this.extractRankingTrainData(props, es);
+    return spark.sc.parallelize(queryList);
+
+  }
+
+  /**
+   * getClickStreamList:Extract click streams from logs stored in 
Elasticsearch.
+   *
+   * @param props
+   *          the Mudrod configuration
+   * @param es
+   *          the Elasticsearch driver
+   * @return clickstream list {@link ClickStream}
+   */
+  protected List<RankingTrainData> extractRankingTrainData(Properties props, 
ESDriver es) {
+    List<String> logIndexList = 
es.getIndexListWithPrefix(props.getProperty(MudrodConstants.LOG_INDEX));
+
+    LOG.info(logIndexList.toString());
+
+    List<RankingTrainData> result = new ArrayList<>();
+    for (int n = 0; n < logIndexList.size(); n++) {
+      String logIndex = logIndexList.get(n);
+      List<String> sessionIdList;
+      try {
+        sessionIdList = this.getSessions(props, es, logIndex);
+        Session session = new Session(props, es);
+        int sessionNum = sessionIdList.size();
+        for (int i = 0; i < sessionNum; i++) {
+          String[] sArr = sessionIdList.get(i).split(",");
+          List<RankingTrainData> datas = session.getRankingTrainData(sArr[1], 
sArr[2], sArr[0]);
+          result.addAll(datas);
+        }
+      } catch (Exception e) {
+        LOG.error("Error which extracting ranking train data: {}", e);
+      }
+    }
+
+    return result;
+  }
+
+  protected JavaRDD<RankingTrainData> 
extractRankingTrainDataInParallel(Properties props, SparkDriver spark, ESDriver 
es) {
+
+    List<String> logIndexList = 
es.getIndexListWithPrefix(props.getProperty(MudrodConstants.LOG_INDEX));
+
+    LOG.info(logIndexList.toString());
+
+    List<String> sessionIdList = new ArrayList<>();
+    for (int n = 0; n < logIndexList.size(); n++) {
+      String logIndex = logIndexList.get(n);
+      List<String> tmpsessionList = this.getSessions(props, es, logIndex);
+      sessionIdList.addAll(tmpsessionList);
+    }
+
+    JavaRDD<String> sessionRDD = spark.sc.parallelize(sessionIdList, 16);
+
+    JavaRDD<RankingTrainData> clickStreamRDD = sessionRDD.mapPartitions(new 
FlatMapFunction<Iterator<String>, RankingTrainData>() {
+      /**
+       *
+       */
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Iterator<RankingTrainData> call(Iterator<String> arg0) throws 
Exception {
+        ESDriver tmpES = new ESDriver(props);
+        tmpES.createBulkProcessor();
+
+        Session session = new Session(props, tmpES);
+        List<RankingTrainData> clickstreams = new ArrayList<>();
+        while (arg0.hasNext()) {
+          String s = arg0.next();
+          String[] sArr = s.split(",");
+          List<RankingTrainData> clicks = session.getRankingTrainData(sArr[1], 
sArr[2], sArr[0]);
+          clickstreams.addAll(clicks);
+        }
+        tmpES.destroyBulkProcessor();
+        tmpES.close();
+        return clickstreams.iterator();
+      }
+    });
+
+    LOG.info("Clickstream number: {}", clickStreamRDD.count());
+
+    return clickStreamRDD;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionNode.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionNode.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionNode.java
new file mode 100644
index 0000000..5e43f3e
--- /dev/null
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionNode.java
@@ -0,0 +1,344 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you
+ * may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sdap.mudrod.weblog.structure;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * ClassName: SessionNode Function: Functions related to a node in a session
+ * tree sturcture.
+ */
+public class SessionNode {
+  // id: Node ID
+  protected String id;
+  // value: Node value
+  protected String value;
+  // parent: Parent node of this node
+  protected SessionNode parent;
+  // children: Child nodes of this node
+  protected List<SessionNode> children = new ArrayList<>();
+  // time: request time of node
+  protected String time;
+  // request: request url of this node
+  protected String request;
+  // referer: previous request url of this node
+  protected String referer;
+  // seq: sequence of this node
+  protected int seq;
+  // key: type of this node extracted from url, including three types -
+  // dataset,datasetlist,ftp
+  protected String key;
+  // logType: log types of this node, including two types - po.dacc, ftp
+  protected String logType;
+  // search: query extracted from this node
+  protected String search;
+  // filter: filter facets extracted from this node
+  protected Map<String, String> filter;
+  // datasetId: viewed/downloaded data set ID
+  protected String datasetId;
+
+  public SessionNode() {
+
+  }
+
+  /**
+   * Creates a new instance of SessionNode.
+   *
+   * @param request: request url
+   * @param logType: including two types - po.dacc, ftp
+   * @param referer: previous request url
+   * @param time:    request time of node
+   * @param seq:     sequence of this node
+   */
+  public SessionNode(String request, String logType, String referer, String 
time, int seq) {
+    this.logType = logType;
+    this.time = time;
+    this.seq = seq;
+    this.setRequest(request);
+    this.setReferer(referer);
+    this.setKey(request, logType);
+  }
+
+  /**
+   * setReferer: Set previous request url of this node
+   *
+   * @param referer previous request url
+   */
+  public void setReferer(String referer) {
+    if (referer == null) {
+      this.referer = "";
+      return;
+    }
+    this.referer = referer.toLowerCase().replace("http://podaac.jpl.nasa.gov";, 
"");
+  }
+
+  /**
+   * setRequest: Set request url of this node
+   *
+   * @param req request url
+   */
+  public void setRequest(String req) {
+    this.request = req;
+    if (this.logType.equals("PO.DAAC")) {
+      this.parseRequest(req);
+    }
+  }
+
+  /**
+   * getChildren:Get child nodes of this node
+   *
+   * @return child nodes
+   */
+  public List<SessionNode> getChildren() {
+    return this.children;
+  }
+
+  /**
+   * setChildren: Set child nodes of this node
+   *
+   * @param children child nodes of this node
+   */
+  public void setChildren(List<SessionNode> children) {
+    this.children = children;
+  }
+
+  /**
+   * addChildren: Add a children node
+   *
+   * @param node session node
+   */
+  public void addChildren(SessionNode node) {
+    this.children.add(node);
+  }
+
+  /**
+   * getId:Get node ID
+   *
+   * @return node ID of this node
+   */
+  public String getId() {
+    return this.id;
+  }
+
+  /**
+   * bSame:Compare this node with another node
+   *
+   * @param node {@link SessionNode}
+   * @return boolean value, true mean the two nodes are same
+   */
+  public Boolean bSame(SessionNode node) {
+    Boolean bsame = false;
+    if (this.request.equals(node.request)) {
+      bsame = true;
+    }
+    return bsame;
+  }
+
+  /**
+   * setKey:Set request type which contains three categories -
+   * dataset,datasetlist,ftp
+   *
+   * @param request request url
+   * @param logType url type
+   */
+  public void setKey(String request, String logType) {
+    this.key = "";
+    String datasetlist = "/datasetlist?";
+    String dataset = "/dataset/";
+    if (logType.equals("ftp")) {
+      this.key = "ftp";
+    } else if (logType.equals("root")) {
+      this.key = "root";
+    } else {
+      if (request.contains(datasetlist)) {
+        this.key = "datasetlist";
+      } else if (request.contains(dataset) /* || request.contains(granule) */) 
{
+        this.key = "dataset";
+      }
+    }
+  }
+
+  /**
+   * getKey:Get request type which contains three categories -
+   * dataset,datasetlist,ftp
+   *
+   * @return request url type of this node
+   */
+  public String getKey() {
+    return this.key;
+  }
+
+  /**
+   * getRequest:Get node request
+   *
+   * @return request url of this node
+   */
+  public String getRequest() {
+    return this.request;
+  }
+
+  /**
+   * getReferer:Get previous request url of this node
+   *
+   * @return previous request url of this node
+   */
+  public String getReferer() {
+    return this.referer;
+  }
+
+  /**
+   * getParent:Get parent node of this node
+   *
+   * @return parent node of this node
+   */
+  public SessionNode getParent() {
+    return this.parent;
+  }
+
+  /**
+   * setParent: Set parent node of this node
+   *
+   * @param parent the previous request node of this node
+   */
+  public void setParent(SessionNode parent) {
+    this.parent = parent;
+  }
+
+  /**
+   * getSearch:Get query of this node
+   *
+   * @return search query of this node
+   */
+  public String getSearch() {
+    return this.search;
+  }
+
+  /**
+   * getFilter:Get filter facets of this node
+   *
+   * @return filter values of this node
+   */
+  public Map<String, String> getFilter() {
+    return this.filter;
+  }
+
+  /**
+   * getDatasetId:Get data set ID of this node
+   *
+   * @return viewing/downloading data set of this node
+   */
+  public String getDatasetId() {
+    return this.datasetId;
+  }
+
+  /**
+   * getSeq:Get sequence of this node
+   *
+   * @return request sequence of this node
+   */
+  public int getSeq() {
+    return this.seq;
+  }
+
+  /**
+   * getFilterStr:Get filter facets of this node
+   *
+   * @return filters values of this node
+   */
+  public String getFilterStr() {
+    String filter = "";
+    if (this.filter.size() > 0) {
+      Iterator iter = this.filter.keySet().iterator();
+      while (iter.hasNext()) {
+        String key = (String) iter.next();
+        String val = this.filter.get(key);
+        filter += key + "=" + val + ",";
+      }
+
+      filter = filter.substring(0, filter.length() - 1);
+    }
+
+    return filter;
+  }
+
+  /**
+   * parseRequest:Parse request to extract request type
+   *
+   * @param request request url of this node
+   */
+  public void parseRequest(String request) {
+    Pattern pattern = Pattern.compile("get (.*?) http/*");
+    Matcher matcher = pattern.matcher(request.trim().toLowerCase());
+    while (matcher.find()) {
+      request = matcher.group(1);
+    }
+    if (request.contains("/dataset/")) {
+      this.parseDatasetId(request);
+    }
+
+    this.request = request.toLowerCase();
+  }
+
+  /**
+   * parseFilterParams:Parse filter facets information
+   *
+   * @param params filter key value pairs of this node
+   */
+  private void parseFilterParams(Map<String, String> params) {
+    this.filter = new HashMap<String, String>();
+    if (params.containsKey("ids")) {
+      String idsStr = params.get("ids");
+      if (!idsStr.equals("")) {
+        idsStr = URLDecoder.decode(idsStr);
+        String[] ids = idsStr.split(":");
+        String valueStr = params.get("values");
+        if (valueStr != null) {
+          valueStr = URLDecoder.decode(valueStr);
+          String[] values = valueStr.split(":");
+          int size = ids.length;
+          for (int i = 0; i < size; i++) {
+            this.filter.put(ids[i], values[i]);
+          }
+        }
+      }
+    }
+
+    if (!this.search.equals("")) {
+      this.filter.put("search", this.search);
+    }
+  }
+
+  /**
+   * parseDatasetId:Parse Request to extract data set ID
+   *
+   * @param request request url
+   */
+  public void parseDatasetId(String request) {
+    try {
+      request = URLDecoder.decode(request, "UTF-8");
+    } catch (UnsupportedEncodingException e) {
+      e.printStackTrace();
+    }
+    String[] twoparts = request.split("[?]");
+    String[] parts = twoparts[0].split("/");
+    if (parts.length <= 2) {
+      return;
+    }
+    this.datasetId = parts[2];
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java
new file mode 100644
index 0000000..db0d372
--- /dev/null
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java
@@ -0,0 +1,522 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you
+ * may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sdap.mudrod.weblog.structure;
+
+import com.google.gson.Gson;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+
+import org.apache.sdap.mudrod.discoveryengine.MudrodAbstract;
+import org.apache.sdap.mudrod.driver.ESDriver;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.UnsupportedEncodingException;
+import java.util.*;
+import java.util.concurrent.ExecutionException;
+
+/**
+ * ClassName: SessionTree Function: Convert request list in a session to a tree
+ */
+public class SessionTree extends MudrodAbstract {
+
+  /**
+   *
+   */
+  private static final long serialVersionUID = 1L;
+  private static final Logger LOG = LoggerFactory.getLogger(SessionTree.class);
+  // size: node numbers in the session tree
+  public int size = 0;
+  // root: root node of session tree
+  protected SessionNode root = null;
+  // binsert: indicates inserting a node or not
+  public boolean binsert = false;
+  // tmpnode: tempt node
+  public SessionNode tmpnode;
+  // latestDatasetnode: the latest inserted node whose key is "dataset"
+  public SessionNode latestDatasetnode;
+  // sessionID: session ID
+  private String sessionID;
+  // cleanupType: session type in Elasticsearch
+  private String cleanupType;
+
+  /**
+   * Creates a new instance of SessionTree.
+   *
+   * @param props:       the Mudrod configuration
+   * @param es:          the Elasticsearch drive
+   * @param rootData:    root node of the tree
+   * @param sessionID:   session ID
+   * @param cleanupType: session type
+   */
+  public SessionTree(Properties props, ESDriver es, SessionNode rootData, 
String sessionID, String cleanupType) {
+    super(props, es, null);
+    root = new SessionNode("root", "root", "", "", 0);
+    tmpnode = root;
+    this.sessionID = sessionID;
+    this.cleanupType = cleanupType;
+  }
+
+  /**
+   * Creates a new instance of SessionTree.
+   *
+   * @param props:       the Mudrod configuration
+   * @param es:          the Elasticsearch drive
+   * @param sessionID:   session ID
+   * @param cleanupType: session type
+   */
+  public SessionTree(Properties props, ESDriver es, String sessionID, String 
cleanupType) {
+    super(props, es, null);
+    root = new SessionNode("root", "root", "", "", 0);
+    root.setParent(root);
+    tmpnode = root;
+    this.sessionID = sessionID;
+    this.cleanupType = cleanupType;
+  }
+
+  /**
+   * insert: insert a node into the session tree.
+   *
+   * @param node {@link SessionNode}
+   * @return session node
+   */
+  public SessionNode insert(SessionNode node) {
+    // begin with datasetlist
+    if (node.getKey().equals("datasetlist")) {
+      this.binsert = true;
+    }
+    if (!this.binsert) {
+      return null;
+    }
+    // remove unrelated node
+    if (!node.getKey().equals("datasetlist") && 
!node.getKey().equals("dataset") && !node.getKey().equals("ftp")) {
+      return null;
+    }
+    // remove dumplicated click
+    if (node.getRequest().equals(tmpnode.getRequest())) {
+      return null;
+    }
+    // search insert node
+    SessionNode parentnode = this.searchParentNode(node);
+    if (parentnode == null) {
+      return null;
+    }
+    node.setParent(parentnode);
+    parentnode.addChildren(node);
+
+    // record insert node
+    tmpnode = node;
+    if ("dataset".equals(node.getKey())) {
+      latestDatasetnode = node;
+    }
+
+    size++;
+    return node;
+  }
+
+  /**
+   * printTree: Print session tree
+   *
+   * @param node root node of the session tree
+   */
+  public void printTree(SessionNode node) {
+    LOG.info("node: {} \n", node.getRequest());
+    if (node.children.isEmpty()) {
+      for (int i = 0; i < node.children.size(); i++) {
+        printTree(node.children.get(i));
+      }
+    }
+  }
+
+  /**
+   * TreeToJson: Convert the session tree to Json object
+   *
+   * @param node node of the session tree
+   * @return tree content in Json format
+   */
+  public JsonObject treeToJson(SessionNode node) {
+    Gson gson = new Gson();
+    JsonObject json = new JsonObject();
+
+    json.addProperty("seq", node.getSeq());
+    if ("datasetlist".equals(node.getKey())) {
+      json.addProperty("icon", "./resources/images/searching.png");
+      json.addProperty("name", node.getRequest());
+    } else if ("dataset".equals(node.getKey())) {
+      json.addProperty("icon", "./resources/images/viewing.png");
+      json.addProperty("name", node.getDatasetId());
+    } else if ("ftp".equals(node.getKey())) {
+      json.addProperty("icon", "./resources/images/downloading.png");
+      json.addProperty("name", node.getRequest());
+    } else if ("root".equals(node.getKey())) {
+      json.addProperty("name", "");
+      json.addProperty("icon", "./resources/images/users.png");
+    }
+
+    if (!node.children.isEmpty()) {
+      List<JsonObject> jsonChildren = new ArrayList<>();
+      for (int i = 0; i < node.children.size(); i++) {
+        JsonObject jsonChild = treeToJson(node.children.get(i));
+        jsonChildren.add(jsonChild);
+      }
+      JsonElement jsonElement = gson.toJsonTree(jsonChildren);
+      json.add("children", jsonElement);
+    }
+
+    return json;
+  }
+
+  /**
+   * getClickStreamList: Get click stream list in the session
+   *
+   * @return {@link ClickStream}
+   */
+  public List<ClickStream> getClickStreamList() {
+
+    List<ClickStream> clickthroughs = new ArrayList<>();
+    List<SessionNode> viewnodes = this.getViewNodes(this.root);
+    for (int i = 0; i < viewnodes.size(); i++) {
+
+      SessionNode viewnode = viewnodes.get(i);
+      SessionNode parent = viewnode.getParent();
+      List<SessionNode> children = viewnode.getChildren();
+
+      if (!"datasetlist".equals(parent.getKey())) {
+        continue;
+      }
+
+      RequestUrl requestURL = new RequestUrl();
+      String viewquery = "";
+      try {
+        String infoStr = requestURL.getSearchInfo(viewnode.getRequest());
+        viewquery = es.customAnalyzing(props.getProperty("indexName"), 
infoStr);
+      } catch (UnsupportedEncodingException | InterruptedException | 
ExecutionException e) {
+        LOG.warn("Exception getting search info. Ignoring...", e);
+      }
+
+      String dataset = viewnode.getDatasetId();
+      boolean download = false;
+      for (int j = 0; j < children.size(); j++) {
+        SessionNode child = children.get(j);
+        if ("ftp".equals(child.getKey())) {
+          download = true;
+          break;
+        }
+      }
+
+      if (viewquery != null && !"".equals(viewquery)) {
+        String[] queries = viewquery.trim().split(",");
+        if (queries.length > 0) {
+          for (int k = 0; k < queries.length; k++) {
+            ClickStream data = new ClickStream(queries[k], dataset, download);
+            data.setSessionId(this.sessionID);
+            data.setType(this.cleanupType);
+            clickthroughs.add(data);
+          }
+        }
+      }
+    }
+
+    return clickthroughs;
+  }
+
+  /**
+   * searchParentNode:Get parent node of a session node
+   *
+   * @param node {@link SessionNode}
+   * @return node {@link SessionNode}
+   */
+  private SessionNode searchParentNode(SessionNode node) {
+
+    String nodeKey = node.getKey();
+
+    if ("datasetlist".equals(nodeKey)) {
+      if ("-".equals(node.getReferer())) {
+        return root;
+      } else {
+        SessionNode tmp = this.findLatestRefer(tmpnode, node.getReferer());
+        if (tmp == null) {
+          return root;
+        } else {
+          return tmp;
+        }
+      }
+    } else if ("dataset".equals(nodeKey)) {
+      if ("-".equals(node.getReferer())) {
+        return null;
+      } else {
+        return this.findLatestRefer(tmpnode, node.getReferer());
+      }
+    } else if ("ftp".equals(nodeKey)) {
+      return latestDatasetnode;
+    }
+
+    return tmpnode;
+  }
+
+  /**
+   * findLatestRefer: Find parent node whose visiting url is equal to the refer
+   * url of a session node
+   *
+   * @param node:  {@link SessionNode}
+   * @param refer: request url
+   * @return
+   */
+  private SessionNode findLatestRefer(SessionNode node, String refer) {
+    while (true) {
+      if ("root".equals(node.getKey())) {
+        return null;
+      }
+      SessionNode parentNode = node.getParent();
+      if (refer.equals(parentNode.getRequest())) {
+        return parentNode;
+      }
+
+      SessionNode tmp = this.iterChild(parentNode, refer);
+      if (tmp == null) {
+        node = parentNode;
+        continue;
+      } else {
+        return tmp;
+      }
+    }
+  }
+
+  /**
+   * iterChild:
+   *
+   * @param start
+   * @param refer
+   * @return
+   */
+  private SessionNode iterChild(SessionNode start, String refer) {
+    List<SessionNode> children = start.getChildren();
+    for (int i = children.size() - 1; i >= 0; i--) {
+      SessionNode tmp = children.get(i);
+      if (tmp.getChildren().isEmpty()) {
+        if (refer.equals(tmp.getRequest())) {
+          return tmp;
+        } else {
+          continue;
+        }
+      } else {
+        iterChild(tmp, refer);
+      }
+    }
+
+    return null;
+  }
+
+  /**
+   * check:
+   *
+   * @param children
+   * @param str
+   * @return
+   */
+  private boolean check(List<SessionNode> children, String str) {
+    for (int i = 0; i < children.size(); i++) {
+      if (children.get(i).key.equals(str)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * insertHelperChildren:
+   *
+   * @param entry
+   * @param children
+   * @return
+   */
+  private boolean insertHelperChildren(SessionNode entry, List<SessionNode> 
children) {
+    for (int i = 0; i < children.size(); i++) {
+      boolean result = insertHelper(entry, children.get(i));
+      if (result) {
+        return result;
+      }
+    }
+    return false;
+
+  }
+
+  /**
+   * insertHelper:
+   *
+   * @param entry
+   * @param node
+   * @return
+   */
+  private boolean insertHelper(SessionNode entry, SessionNode node) {
+    if ("datasetlist".equals(entry.key) || "dataset".equals(entry.key)) {
+      if ("datasetlist".equals(node.key)) {
+        if (node.children.isEmpty()) {
+          node.children.add(entry);
+          return true;
+        } else {
+          boolean flag = check(node.children, "datasetlist");
+          if (!flag) {
+            node.children.add(entry);
+            return true;
+          } else {
+            insertHelperChildren(entry, node.children);
+          }
+        }
+      } else {
+        insertHelperChildren(entry, node.children);
+      }
+    } else if ("ftp".equals(entry.key)) {
+      if ("dataset".equals(node.key)) {
+        if (node.children.isEmpty()) {
+          node.children.add(entry);
+          return true;
+        } else {
+          boolean flag = check(node.children, "dataset");
+          if (!flag) {
+            node.children.add(entry);
+            return true;
+          } else {
+            insertHelperChildren(entry, node.children);
+          }
+        }
+      } else {
+        insertHelperChildren(entry, node.children);
+      }
+    }
+
+    return false;
+  }
+
+  /**
+   * getViewNodes: Get a session node's child nodes whose key is "dataset".
+   *
+   * @param node
+   * @return a list of session node
+   */
+  private List<SessionNode> getViewNodes(SessionNode node) {
+
+    List<SessionNode> viewnodes = new ArrayList<>();
+    if ("dataset".equals(node.getKey())) {
+      viewnodes.add(node);
+    }
+
+    if (!node.children.isEmpty()) {
+      for (int i = 0; i < node.children.size(); i++) {
+        SessionNode childNode = node.children.get(i);
+        viewnodes.addAll(getViewNodes(childNode));
+      }
+    }
+
+    return viewnodes;
+  }
+
+  private List<SessionNode> getQueryNodes(SessionNode node) {
+    return this.getNodes(node, "datasetlist");
+  }
+
+  private List<SessionNode> getNodes(SessionNode node, String nodeKey) {
+
+    List<SessionNode> nodes = new ArrayList<>();
+    if (node.getKey().equals(nodeKey)) {
+      nodes.add(node);
+    }
+
+    if (!node.children.isEmpty()) {
+      for (int i = 0; i < node.children.size(); i++) {
+        SessionNode childNode = node.children.get(i);
+        nodes.addAll(getNodes(childNode, nodeKey));
+      }
+    }
+
+    return nodes;
+  }
+
+  /**
+   * Obtain the ranking training data.
+   *
+   * @param indexName   the index from whcih to obtain the data
+   * @param sessionID   a valid session identifier
+   * @return {@link ClickStream}
+   * @throws UnsupportedEncodingException if there is an error whilst
+   *                                      processing the ranking training data.
+   */
+  public List<RankingTrainData> getRankingTrainData(String indexName, String 
sessionID) throws UnsupportedEncodingException {
+
+    List<RankingTrainData> trainDatas = new ArrayList<>();
+
+    List<SessionNode> queryNodes = this.getQueryNodes(this.root);
+    for (int i = 0; i < queryNodes.size(); i++) {
+      SessionNode querynode = queryNodes.get(i);
+      List<SessionNode> children = querynode.getChildren();
+
+      LinkedHashMap<String, Boolean> datasetOpt = new LinkedHashMap<>();
+      int ndownload = 0;
+      for (int j = 0; j < children.size(); j++) {
+        SessionNode node = children.get(j);
+        if ("dataset".equals(node.getKey())) {
+          Boolean bDownload = false;
+          List<SessionNode> nodeChildren = node.getChildren();
+          int childSize = nodeChildren.size();
+          for (int k = 0; k < childSize; k++) {
+            if ("ftp".equals(nodeChildren.get(k).getKey())) {
+              bDownload = true;
+              ndownload += 1;
+              break;
+            }
+          }
+          datasetOpt.put(node.datasetId, bDownload);
+        }
+      }
+
+      // method 1: The priority of download data are higher
+      if (datasetOpt.size() > 1 && ndownload > 0) {
+        // query
+        RequestUrl requestURL = new RequestUrl();
+        String queryUrl = querynode.getRequest();
+        String infoStr = requestURL.getSearchInfo(queryUrl);
+        String query = null;
+        try {
+          query = es.customAnalyzing(props.getProperty("indexName"), infoStr);
+        } catch (InterruptedException | ExecutionException e) {
+          throw new RuntimeException("Error performing custom analyzing", e);
+        }
+        Map<String, String> filter = RequestUrl.getFilterInfo(queryUrl);
+
+        for (String datasetA : datasetOpt.keySet()) {
+          Boolean bDownloadA = datasetOpt.get(datasetA);
+          if (bDownloadA) {
+            for (String datasetB : datasetOpt.keySet()) {
+              Boolean bDownloadB = datasetOpt.get(datasetB);
+              if (!bDownloadB) {
+
+                String[] queries = query.split(",");
+                for (int l = 0; l < queries.length; l++) {
+                  RankingTrainData trainData = new 
RankingTrainData(queries[l], datasetA, datasetB);
+
+                  trainData.setSessionId(this.sessionID);
+                  trainData.setIndex(indexName);
+                  trainData.setFilter(filter);
+                  trainDatas.add(trainData);
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+
+    return trainDatas;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/WebLog.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/WebLog.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/WebLog.java
new file mode 100644
index 0000000..93a9747
--- /dev/null
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/WebLog.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you 
+ * may not use this file except in compliance with the License. 
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sdap.mudrod.weblog.structure;
+
+import java.io.Serializable;
+
+/**
+ * This class represents an Apache access log line. See
+ * http://httpd.apache.org/docs/2.2/logs.html for more details.
+ */
+public class WebLog implements Serializable {
+  String LogType;
+  String IP;
+  String Time;
+  String Request;
+  double Bytes;
+
+  public String getLogType() {
+    return this.LogType;
+  }
+
+  public String getIP() {
+    return this.IP;
+  }
+
+  public String getTime() {
+    return this.Time;
+  }
+
+  public String getRequest() {
+    return this.Request;
+  }
+
+  public double getBytes() {
+    return this.Bytes;
+  }
+
+  public WebLog() {
+
+  }
+
+  public static String SwithtoNum(String time) {
+    if (time.contains("Jan")) {
+      time = time.replace("Jan", "1");
+    } else if (time.contains("Feb")) {
+      time = time.replace("Feb", "2");
+    } else if (time.contains("Mar")) {
+      time = time.replace("Mar", "3");
+    } else if (time.contains("Apr")) {
+      time = time.replace("Apr", "4");
+    } else if (time.contains("May")) {
+      time = time.replace("May", "5");
+    } else if (time.contains("Jun")) {
+      time = time.replace("Jun", "6");
+    } else if (time.contains("Jul")) {
+      time = time.replace("Jul", "7");
+    } else if (time.contains("Aug")) {
+      time = time.replace("Aug", "8");
+    } else if (time.contains("Sep")) {
+      time = time.replace("Sep", "9");
+    } else if (time.contains("Oct")) {
+      time = time.replace("Oct", "10");
+    } else if (time.contains("Nov")) {
+      time = time.replace("Nov", "11");
+    } else if (time.contains("Dec")) {
+      time = time.replace("Dec", "12");
+    }
+    return time;
+  }
+
+  public static boolean checknull(String s) {
+    if (s.equals("{}")) {
+      return false;
+    }
+    return true;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/package-info.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/package-info.java 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/package-info.java
new file mode 100644
index 0000000..4ab063b
--- /dev/null
+++ 
b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/package-info.java
@@ -0,0 +1,17 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you 
+ * may not use this file except in compliance with the License. 
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * This package includes data structure needed for web log analysis
+ */
+package org.apache.sdap.mudrod.weblog.structure;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/core/src/main/resources/config.xml
----------------------------------------------------------------------
diff --git a/core/src/main/resources/config.xml 
b/core/src/main/resources/config.xml
index 62ac7a6..5a5bcee 100644
--- a/core/src/main/resources/config.xml
+++ b/core/src/main/resources/config.xml
@@ -63,7 +63,7 @@
     The ontology service implementation. Possible values include
      EsipPortal - EsipPortalOntology
      EsipCOR - EsipCOROntology
-     Local - gov.nasa.jpl.mudrod.ontology.process.Local
+     Local - org.apache.sdap.mudrod.ontology.process.Local
      -->
     <para name="mudrod.ontology.implementation">Local</para>
 

http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/core/src/test/java/gov/nasa/jpl/mudrod/main/TestMudrodEngine.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/gov/nasa/jpl/mudrod/main/TestMudrodEngine.java 
b/core/src/test/java/gov/nasa/jpl/mudrod/main/TestMudrodEngine.java
deleted file mode 100644
index ff15415..0000000
--- a/core/src/test/java/gov/nasa/jpl/mudrod/main/TestMudrodEngine.java
+++ /dev/null
@@ -1,303 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License"); you 
- * may not use this file except in compliance with the License. 
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package gov.nasa.jpl.mudrod.main;
-
-import static org.junit.Assert.fail;
-
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-
-/**
- * Initial test case for {@link gov.nasa.jpl.mudrod.main.MudrodEngine}, 
currently no 
- * tests are implemented, this is merely in place to get the JaCoCo test 
reporting to
- * work.
- */
-public class TestMudrodEngine {
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @BeforeClass
-  public static void setUpBeforeClass() throws Exception {
-  }
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @AfterClass
-  public static void tearDownAfterClass() throws Exception {
-  }
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @Before
-  public void setUp() throws Exception {
-  }
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @After
-  public void tearDown() throws Exception {
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#MudrodEngine()}.
-   */
-  @Ignore
-  @Test
-  public final void testMudrodEngine() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#startESDriver()}.
-   */
-  @Ignore
-  @Test
-  public final void testStartESDriver() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#startSparkDriver()}.
-   */
-  @Ignore
-  @Test
-  public final void testStartSparkDriver() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link gov.nasa.jpl.mudrod.main.MudrodEngine#getConfig()}.
-   */
-  @Ignore
-  @Test
-  public final void testGetConfig() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#getESDriver()}.
-   */
-  @Ignore
-  @Test
-  public final void testGetESDriver() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#setESDriver(gov.nasa.jpl.mudrod.driver.ESDriver)}.
-   */
-  @Ignore
-  @Test
-  public final void testSetESDriver() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#loadConfig()}.
-   */
-  @Ignore
-  @Test
-  public final void testLoadConfig() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#startLogIngest()}.
-   */
-  @Ignore
-  @Test
-  public final void testStartLogIngest() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#startMetaIngest()}.
-   */
-  @Ignore
-  @Test
-  public final void testStartMetaIngest() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#startFullIngest()}.
-   */
-  @Ignore
-  @Test
-  public final void testStartFullIngest() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#startProcessing()}.
-   */
-  @Ignore
-  @Test
-  public final void testStartProcessing() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link gov.nasa.jpl.mudrod.main.MudrodEngine#end()}.
-   */
-  @Ignore
-  @Test
-  public final void testEnd() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#main(java.lang.String[])}.
-   */
-  @Ignore
-  @Test
-  public final void testMain() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#getSparkDriver()}.
-   */
-  @Ignore
-  @Test
-  public final void testGetSparkDriver() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.main.MudrodEngine#setSparkDriver(gov.nasa.jpl.mudrod.driver.SparkDriver)}.
-   */
-  @Ignore
-  @Test
-  public final void testSetSparkDriver() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#Object()}.
-   */
-  @Ignore
-  @Test
-  public final void testObject() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#getClass()}.
-   */
-  @Ignore
-  @Test
-  public final void testGetClass() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#hashCode()}.
-   */
-  @Ignore
-  @Test
-  public final void testHashCode() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#equals(java.lang.Object)}.
-   */
-  @Ignore
-  @Test
-  public final void testEquals() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#clone()}.
-   */
-  @Ignore
-  @Test
-  public final void testClone() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#toString()}.
-   */
-  @Ignore
-  @Test
-  public final void testToString() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#notify()}.
-   */
-  @Ignore
-  @Test
-  public final void testNotify() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#notifyAll()}.
-   */
-  @Ignore
-  @Test
-  public final void testNotifyAll() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#wait(long)}.
-   */
-  @Ignore
-  @Test
-  public final void testWaitLong() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#wait(long, int)}.
-   */
-  @Ignore
-  @Test
-  public final void testWaitLongInt() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#wait()}.
-   */
-  @Ignore
-  @Test
-  public final void testWait() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link java.lang.Object#finalize()}.
-   */
-  @Ignore
-  @Test
-  public final void testFinalize() {
-    fail("Not yet implemented"); // TODO
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/core/src/test/java/gov/nasa/jpl/mudrod/ontology/process/TestLocalOntology.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/gov/nasa/jpl/mudrod/ontology/process/TestLocalOntology.java
 
b/core/src/test/java/gov/nasa/jpl/mudrod/ontology/process/TestLocalOntology.java
deleted file mode 100644
index 994f4ed..0000000
--- 
a/core/src/test/java/gov/nasa/jpl/mudrod/ontology/process/TestLocalOntology.java
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License"); you
- * may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package gov.nasa.jpl.mudrod.ontology.process;
-
-import static org.junit.Assert.*;
-
-import java.io.IOException;
-
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-
-/**
- * Test cases for {@link gov.nasa.jpl.mudrod.ontology.process.LocalOntology}
- */
-public class TestLocalOntology {
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @BeforeClass
-  public static void setUpBeforeClass() throws Exception {
-  }
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @AfterClass
-  public static void tearDownAfterClass() throws Exception {
-  }
-
-  private LocalOntology lOnt;
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @Before
-  public void setUp() throws Exception {
-    lOnt = new LocalOntology();
-  }
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @After
-  public void tearDown() throws Exception {
-    lOnt = null;
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#LocalOntology()}.
-   */
-  @Test
-  public final void testLocalOntology() {
-    assertNotNull("Test setUp should create a new instance of LocalOntology.", 
lOnt);
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#getInstance()}.
-   */
-  @Test
-  public final void testGetInstance() {
-    assertSame("Ontology instance should be of type LocalOntology", 
LocalOntology.class, lOnt.getInstance().getClass());
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#load()}.
-   * @throws IOException 
-   */
-  @Ignore
-  @Test
-  public final void testLoad() throws IOException {
-    lOnt.load();
-    assertTrue("Resource list should have a minimum of one resource.", 
lOnt.getLoadedOntologyResources().size() == 1);
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#load(java.lang.String[])}.
-   */
-  @Ignore
-  @Test
-  public final void testLoadStringArray() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#getParser()}.
-   */
-  @Ignore
-  @Test
-  public final void testGetParser() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#getModel()}.
-   */
-  @Ignore
-  @Test
-  public final void testGetModel() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#merge(gov.nasa.jpl.mudrod.ontology.Ontology)}.
-   */
-  @Ignore
-  @Test
-  public final void testMerge() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#subclasses(java.lang.String)}.
-   */
-  @Ignore
-  @Test
-  public final void testSubclasses() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#synonyms(java.lang.String)}.
-   */
-  @Ignore
-  @Test
-  public final void testSynonyms() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#addSearchTerm(java.lang.String,
 org.apache.jena.ontology.OntResource)}.
-   */
-  @Ignore
-  @Test
-  public final void testAddSearchTerm() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#retrieve(java.lang.String)}.
-   */
-  @Ignore
-  @Test
-  public final void testRetrieve() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#renderHierarchy(java.io.PrintStream,
 org.apache.jena.ontology.OntClass, java.util.List, int)}.
-   */
-  @Ignore
-  @Test
-  public final void testRenderHierarchy() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#renderClassDescription(java.io.PrintStream,
 org.apache.jena.ontology.OntClass, int)}.
-   */
-  @Ignore
-  @Test
-  public final void testRenderClassDescription() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#renderRestriction(java.io.PrintStream,
 org.apache.jena.ontology.Restriction)}.
-   */
-  @Ignore
-  @Test
-  public final void testRenderRestriction() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#renderURI(java.io.PrintStream,
 org.apache.jena.shared.PrefixMapping, java.lang.String)}.
-   */
-  @Ignore
-  @Test
-  public final void testRenderURI() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#renderAnonymous(java.io.PrintStream,
 org.apache.jena.rdf.model.Resource, java.lang.String)}.
-   */
-  @Ignore
-  @Test
-  public final void testRenderAnonymous() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#indent(java.io.PrintStream, 
int)}.
-   */
-  @Ignore
-  @Test
-  public final void testIndent() {
-    fail("Not yet implemented"); // TODO
-  }
-
-  /**
-   * Test method for {@link 
gov.nasa.jpl.mudrod.ontology.process.LocalOntology#main(java.lang.String[])}.
-   */
-  @Ignore
-  @Test
-  public final void testMain() {
-    fail("Not yet implemented"); // TODO
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/core/src/test/java/org/apache/sdap/mudrod/main/TestMudrodEngine.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/sdap/mudrod/main/TestMudrodEngine.java 
b/core/src/test/java/org/apache/sdap/mudrod/main/TestMudrodEngine.java
new file mode 100644
index 0000000..80bd52b
--- /dev/null
+++ b/core/src/test/java/org/apache/sdap/mudrod/main/TestMudrodEngine.java
@@ -0,0 +1,303 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you 
+ * may not use this file except in compliance with the License. 
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sdap.mudrod.main;
+
+import static org.junit.Assert.fail;
+
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Initial test case for {@link org.apache.sdap.mudrod.main.MudrodEngine}, 
currently no 
+ * tests are implemented, this is merely in place to get the JaCoCo test 
reporting to
+ * work.
+ */
+public class TestMudrodEngine {
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+  }
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+  }
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#MudrodEngine()}.
+   */
+  @Ignore
+  @Test
+  public final void testMudrodEngine() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#startESDriver()}.
+   */
+  @Ignore
+  @Test
+  public final void testStartESDriver() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#startSparkDriver()}.
+   */
+  @Ignore
+  @Test
+  public final void testStartSparkDriver() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#getConfig()}.
+   */
+  @Ignore
+  @Test
+  public final void testGetConfig() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#getESDriver()}.
+   */
+  @Ignore
+  @Test
+  public final void testGetESDriver() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#setESDriver(org.apache.sdap.mudrod.driver.ESDriver)}.
+   */
+  @Ignore
+  @Test
+  public final void testSetESDriver() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#loadConfig()}.
+   */
+  @Ignore
+  @Test
+  public final void testLoadConfig() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#startLogIngest()}.
+   */
+  @Ignore
+  @Test
+  public final void testStartLogIngest() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#startMetaIngest()}.
+   */
+  @Ignore
+  @Test
+  public final void testStartMetaIngest() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#startFullIngest()}.
+   */
+  @Ignore
+  @Test
+  public final void testStartFullIngest() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#startProcessing()}.
+   */
+  @Ignore
+  @Test
+  public final void testStartProcessing() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link org.apache.sdap.mudrod.main.MudrodEngine#end()}.
+   */
+  @Ignore
+  @Test
+  public final void testEnd() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#main(java.lang.String[])}.
+   */
+  @Ignore
+  @Test
+  public final void testMain() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#getSparkDriver()}.
+   */
+  @Ignore
+  @Test
+  public final void testGetSparkDriver() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.main.MudrodEngine#setSparkDriver(org.apache.sdap.mudrod.driver.SparkDriver)}.
+   */
+  @Ignore
+  @Test
+  public final void testSetSparkDriver() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#Object()}.
+   */
+  @Ignore
+  @Test
+  public final void testObject() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#getClass()}.
+   */
+  @Ignore
+  @Test
+  public final void testGetClass() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#hashCode()}.
+   */
+  @Ignore
+  @Test
+  public final void testHashCode() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#equals(java.lang.Object)}.
+   */
+  @Ignore
+  @Test
+  public final void testEquals() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#clone()}.
+   */
+  @Ignore
+  @Test
+  public final void testClone() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#toString()}.
+   */
+  @Ignore
+  @Test
+  public final void testToString() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#notify()}.
+   */
+  @Ignore
+  @Test
+  public final void testNotify() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#notifyAll()}.
+   */
+  @Ignore
+  @Test
+  public final void testNotifyAll() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#wait(long)}.
+   */
+  @Ignore
+  @Test
+  public final void testWaitLong() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#wait(long, int)}.
+   */
+  @Ignore
+  @Test
+  public final void testWaitLongInt() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#wait()}.
+   */
+  @Ignore
+  @Test
+  public final void testWait() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link java.lang.Object#finalize()}.
+   */
+  @Ignore
+  @Test
+  public final void testFinalize() {
+    fail("Not yet implemented"); // TODO
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/core/src/test/java/org/apache/sdap/mudrod/ontology/process/TestLocalOntology.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/sdap/mudrod/ontology/process/TestLocalOntology.java
 
b/core/src/test/java/org/apache/sdap/mudrod/ontology/process/TestLocalOntology.java
new file mode 100644
index 0000000..192a29b
--- /dev/null
+++ 
b/core/src/test/java/org/apache/sdap/mudrod/ontology/process/TestLocalOntology.java
@@ -0,0 +1,227 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you
+ * may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sdap.mudrod.ontology.process;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+
+import org.apache.sdap.mudrod.ontology.process.LocalOntology;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Test cases for {@link org.apache.sdap.mudrod.ontology.process.LocalOntology}
+ */
+public class TestLocalOntology {
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+  }
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+  }
+
+  private LocalOntology lOnt;
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @Before
+  public void setUp() throws Exception {
+    lOnt = new LocalOntology();
+  }
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @After
+  public void tearDown() throws Exception {
+    lOnt = null;
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#LocalOntology()}.
+   */
+  @Test
+  public final void testLocalOntology() {
+    assertNotNull("Test setUp should create a new instance of LocalOntology.", 
lOnt);
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#getInstance()}.
+   */
+  @Test
+  public final void testGetInstance() {
+    assertSame("Ontology instance should be of type LocalOntology", 
LocalOntology.class, lOnt.getInstance().getClass());
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#load()}.
+   * @throws IOException 
+   */
+  @Ignore
+  @Test
+  public final void testLoad() throws IOException {
+    lOnt.load();
+    assertTrue("Resource list should have a minimum of one resource.", 
lOnt.getLoadedOntologyResources().size() == 1);
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#load(java.lang.String[])}.
+   */
+  @Ignore
+  @Test
+  public final void testLoadStringArray() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#getParser()}.
+   */
+  @Ignore
+  @Test
+  public final void testGetParser() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#getModel()}.
+   */
+  @Ignore
+  @Test
+  public final void testGetModel() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#merge(org.apache.sdap.mudrod.ontology.Ontology)}.
+   */
+  @Ignore
+  @Test
+  public final void testMerge() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#subclasses(java.lang.String)}.
+   */
+  @Ignore
+  @Test
+  public final void testSubclasses() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#synonyms(java.lang.String)}.
+   */
+  @Ignore
+  @Test
+  public final void testSynonyms() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#addSearchTerm(java.lang.String,
 org.apache.jena.ontology.OntResource)}.
+   */
+  @Ignore
+  @Test
+  public final void testAddSearchTerm() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#retrieve(java.lang.String)}.
+   */
+  @Ignore
+  @Test
+  public final void testRetrieve() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#renderHierarchy(java.io.PrintStream,
 org.apache.jena.ontology.OntClass, java.util.List, int)}.
+   */
+  @Ignore
+  @Test
+  public final void testRenderHierarchy() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#renderClassDescription(java.io.PrintStream,
 org.apache.jena.ontology.OntClass, int)}.
+   */
+  @Ignore
+  @Test
+  public final void testRenderClassDescription() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#renderRestriction(java.io.PrintStream,
 org.apache.jena.ontology.Restriction)}.
+   */
+  @Ignore
+  @Test
+  public final void testRenderRestriction() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#renderURI(java.io.PrintStream,
 org.apache.jena.shared.PrefixMapping, java.lang.String)}.
+   */
+  @Ignore
+  @Test
+  public final void testRenderURI() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#renderAnonymous(java.io.PrintStream,
 org.apache.jena.rdf.model.Resource, java.lang.String)}.
+   */
+  @Ignore
+  @Test
+  public final void testRenderAnonymous() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#indent(java.io.PrintStream,
 int)}.
+   */
+  @Ignore
+  @Test
+  public final void testIndent() {
+    fail("Not yet implemented"); // TODO
+  }
+
+  /**
+   * Test method for {@link 
org.apache.sdap.mudrod.ontology.process.LocalOntology#main(java.lang.String[])}.
+   */
+  @Ignore
+  @Test
+  public final void testMain() {
+    fail("Not yet implemented"); // TODO
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sdap-mudrod/blob/39379fa9/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 9bf3f7f..249519a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -17,18 +17,16 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
     <modelVersion>4.0.0</modelVersion>
 
-    <groupId>gov.nasa.jpl.mudrod</groupId>
+    <groupId>org.apache.sdap.mudrod</groupId>
     <artifactId>mudrod-parent</artifactId>
     <version>0.0.1-SNAPSHOT</version>
     <packaging>pom</packaging>
 
     <name>Mudrod :: Parent</name>
-    <url>https://github.com/mudrod/mudrod.git</url>
+    <url>https://github.com/apache/incubator-sdap-mudrod.git</url>
     <description>MUDROD: Mining and Utilizing Dataset Relevancy from
-        Oceanographic Datasets
-        to Improve Data Discovery and Access. MUDROD is a semantic discovery 
and
-        search project
-        funded by NASA AIST (NNX15AM85G).
+        Oceanographic Datasets to Improve Data Discovery and Access. 
+        MUDROD is a semantic discovery and search project.
     </description>
 
     <licenses>

Reply via email to