SINGA-84 Header Files Rearrange

Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/239ed217
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/239ed217
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/239ed217

Branch: refs/heads/master
Commit: 239ed21787da6a0d12390f626e153b33b3a0f19f
Parents: dc7f199
Author: ijingo <[email protected]>
Authored: Wed Oct 7 21:15:21 2015 +0800
Committer: ijingo <[email protected]>
Committed: Wed Oct 7 21:34:56 2015 +0800

----------------------------------------------------------------------
 .gitignore                                      |   1 +
 Makefile.am                                     | 105 +--
 examples/cifar10/create_data.cc                 |   3 +-
 examples/cifar10/run.sh                         |  70 --
 include/comm/msg.h                              | 238 -------
 include/comm/socket.h                           | 174 -----
 include/driver.h                                | 226 -------
 include/io/hdfs_store.h                         |  22 -
 include/io/imagefolder_store.h                  |  21 -
 include/io/kvfile.h                             | 182 ------
 include/io/kvfile_store.h                       |  55 --
 include/io/store.h                              | 105 ---
 include/io/textfile_store.h                     |  56 --
 include/neuralnet/connection_layer.h            | 156 -----
 include/neuralnet/input_layer.h                 | 303 ---------
 include/neuralnet/layer.h                       | 294 ---------
 include/neuralnet/loss_layer.h                  |  74 ---
 include/neuralnet/neuralnet.h                   | 118 ----
 include/neuralnet/neuron_layer.h                | 248 -------
 include/neuralnet/output_layer.h                |  27 -
 include/server.h                                | 133 ----
 include/singa.h                                 |  37 --
 include/singa/comm/msg.h                        | 238 +++++++
 include/singa/comm/socket.h                     | 174 +++++
 include/singa/driver.h                          | 226 +++++++
 include/singa/io/hdfs_store.h                   |  22 +
 include/singa/io/imagefolder_store.h            |  21 +
 include/singa/io/kvfile.h                       | 182 ++++++
 include/singa/io/kvfile_store.h                 |  55 ++
 include/singa/io/store.h                        | 105 +++
 include/singa/io/textfile_store.h               |  56 ++
 .../singa/neuralnet/connection_layer/bridge.h   | 106 +++
 .../singa/neuralnet/connection_layer/concate.h  |  48 ++
 .../singa/neuralnet/connection_layer/slice.h    |  54 ++
 .../singa/neuralnet/connection_layer/split.h    |  52 ++
 .../singa/neuralnet/input_layer/csv_record.h    |  72 ++
 include/singa/neuralnet/input_layer/data.h      |  76 +++
 .../neuralnet/input_layer/image_preprocess.h    |  63 ++
 include/singa/neuralnet/input_layer/label.h     |  59 ++
 include/singa/neuralnet/input_layer/lmdb_data.h |  75 +++
 include/singa/neuralnet/input_layer/mnist.h     |  62 ++
 include/singa/neuralnet/input_layer/parser.h    |  65 ++
 include/singa/neuralnet/input_layer/prefetch.h  |  65 ++
 .../singa/neuralnet/input_layer/proto_record.h  |  73 +++
 include/singa/neuralnet/input_layer/rgb_image.h |  66 ++
 .../singa/neuralnet/input_layer/shard_data.h    |  65 ++
 .../singa/neuralnet/input_layer/store_input.h   | 105 +++
 include/singa/neuralnet/layer.h                 | 288 ++++++++
 include/singa/neuralnet/loss_layer/euclidean.h  |  47 ++
 include/singa/neuralnet/loss_layer/softmax.h    |  63 ++
 include/singa/neuralnet/neuralnet.h             | 118 ++++
 .../singa/neuralnet/neuron_layer/convolution.h  |  72 ++
 include/singa/neuralnet/neuron_layer/dropout.h  |  51 ++
 .../neuralnet/neuron_layer/inner_product.h      |  55 ++
 include/singa/neuralnet/neuron_layer/lrn.h      |  60 ++
 include/singa/neuralnet/neuron_layer/pooling.h  |  60 ++
 include/singa/neuralnet/neuron_layer/rbm.h      |  99 +++
 include/singa/neuralnet/neuron_layer/relu.h     |  44 ++
 include/singa/neuralnet/neuron_layer/sigmoid.h  |  51 ++
 include/singa/neuralnet/neuron_layer/stanh.h    |  47 ++
 .../singa/neuralnet/output_layer/output_layer.h |  27 +
 include/singa/server.h                          | 133 ++++
 include/singa/singa.h                           |  37 ++
 include/singa/stub.h                            | 109 ++++
 include/singa/utils/blob.h                      | 238 +++++++
 include/singa/utils/cluster.h                   | 163 +++++
 include/singa/utils/cluster_rt.h                | 190 ++++++
 include/singa/utils/common.h                    | 155 +++++
 include/singa/utils/data_shard.h                | 171 +++++
 include/singa/utils/factory.h                   | 100 +++
 include/singa/utils/graph.h                     | 118 ++++
 include/singa/utils/image_transform.h           |  35 +
 include/singa/utils/param.h                     | 397 +++++++++++
 include/singa/utils/singleton.h                 |  52 ++
 include/singa/utils/tinydir.h                   | 562 ++++++++++++++++
 include/singa/utils/tokenizer.h                 |  64 ++
 include/singa/utils/updater.h                   | 145 +++++
 include/singa/worker.h                          | 313 +++++++++
 include/stub.h                                  | 109 ----
 include/utils/blob.h                            | 198 ------
 include/utils/cluster.h                         | 163 -----
 include/utils/cluster_rt.h                      | 190 ------
 include/utils/common.h                          | 155 -----
 include/utils/data_shard.h                      | 171 -----
 include/utils/factory.h                         | 100 ---
 include/utils/graph.h                           | 118 ----
 include/utils/image_transform.h                 |  35 -
 include/utils/param.h                           | 397 -----------
 include/utils/singleton.h                       |  52 --
 include/utils/tinydir.h                         | 562 ----------------
 include/utils/tokenizer.h                       |  64 --
 include/utils/updater.h                         | 145 -----
 include/worker.h                                | 311 ---------
 src/comm/msg.cc                                 |   2 +-
 src/comm/socket.cc                              |   2 +-
 src/driver.cc                                   |  45 +-
 src/io/kvfile.cc                                |   2 +-
 src/io/kvfile_store.cc                          |   2 +-
 src/io/store.cc                                 |   6 +-
 src/io/textfile_store.cc                        |   2 +-
 src/main.cc                                     |   2 +-
 src/neuralnet/connection_layer.cc               | 138 ----
 src/neuralnet/connection_layer/bridge.cc        |  34 +
 src/neuralnet/connection_layer/concate.cc       |  55 ++
 src/neuralnet/connection_layer/slice.cc         |  86 +++
 src/neuralnet/connection_layer/split.cc         |  44 ++
 src/neuralnet/input_layer.cc                    | 597 -----------------
 src/neuralnet/input_layer/csv_record.cc         |  77 +++
 src/neuralnet/input_layer/image_preprocess.cc   |  76 +++
 src/neuralnet/input_layer/label.cc              |  56 ++
 src/neuralnet/input_layer/lmdb_data.cc          | 143 ++++
 src/neuralnet/input_layer/mnist.cc              |  86 +++
 src/neuralnet/input_layer/parser.cc             |  35 +
 src/neuralnet/input_layer/prefetch.cc           |  46 ++
 src/neuralnet/input_layer/proto_record.cc       |  81 +++
 src/neuralnet/input_layer/rgb_image.cc          | 131 ++++
 src/neuralnet/input_layer/shard_data.cc         |  80 +++
 src/neuralnet/input_layer/store_input.cc        | 133 ++++
 src/neuralnet/layer.cc                          |   6 +-
 src/neuralnet/loss_layer.cc                     | 137 ----
 src/neuralnet/loss_layer/euclidean.cc           |  77 +++
 src/neuralnet/loss_layer/softmax.cc             | 100 +++
 src/neuralnet/neuralnet.cc                      |   4 +-
 src/neuralnet/neuron_layer.cc                   | 652 -------------------
 src/neuralnet/neuron_layer/convolution.cc       | 169 +++++
 src/neuralnet/neuron_layer/dropout.cc           |  64 ++
 src/neuralnet/neuron_layer/inner_product.cc     |  96 +++
 src/neuralnet/neuron_layer/lrn.cc               |  74 +++
 src/neuralnet/neuron_layer/pooling.cc           | 123 ++++
 src/neuralnet/neuron_layer/rbm.cc               | 187 ++++++
 src/neuralnet/neuron_layer/relu.cc              |  53 ++
 src/neuralnet/neuron_layer/sigmoid.cc           |  53 ++
 src/neuralnet/neuron_layer/stanh.cc             |  53 ++
 src/neuralnet/output_layer.cc                   |  26 -
 src/neuralnet/output_layer/output_layer.cc      |  26 +
 src/server.cc                                   |  12 +-
 src/stub.cc                                     |  10 +-
 src/test/test_cluster.cc                        |   2 +-
 src/test/test_common.cc                         |   2 +-
 src/test/test_csv_record_layer.cc               |   4 +-
 src/test/test_msg.cc                            |   2 +-
 src/test/test_neuralnet.cc                      |   2 +-
 src/test/test_paramslicer.cc                    |   2 +-
 src/test/test_proto_record_layer.cc             |   6 +-
 src/test/test_shard.cc                          |   2 +-
 src/test/test_store.cc                          |   2 +-
 src/utils/blob.cc                               |   2 +-
 src/utils/cluster.cc                            |   2 +-
 src/utils/cluster_rt.cc                         |   4 +-
 src/utils/common.cc                             |   2 +-
 src/utils/data_shard.cc                         |   2 +-
 src/utils/graph.cc                              |   2 +-
 src/utils/image_transform.cc                    |   2 +-
 src/utils/param.cc                              |   8 +-
 src/utils/tool.cc                               |   6 +-
 src/utils/updater.cc                            |   6 +-
 src/worker.cc                                   |   8 +-
 157 files changed, 8649 insertions(+), 6969 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 7ac9bc2..972d2ca 100644
--- a/.gitignore
+++ b/.gitignore
@@ -49,3 +49,4 @@ aclocal.m4
 Makefile.in
 thirdparty/*
 !thirdparty/install.sh
+!include/singa

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/Makefile.am
----------------------------------------------------------------------
diff --git a/Makefile.am b/Makefile.am
index a1496bd..5c341a3 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -40,43 +40,64 @@ SINGA_SRCS := src/driver.cc \
               src/worker.cc \
               src/stub.cc \
               src/neuralnet/layer.cc \
-              src/neuralnet/connection_layer.cc \
-              src/neuralnet/input_layer.cc \
-              src/neuralnet/loss_layer.cc \
-              src/neuralnet/neuron_layer.cc \
-              src/neuralnet/output_layer.cc \
+              src/neuralnet/connection_layer/bridge.cc \
+              src/neuralnet/connection_layer/concate.cc \
+              src/neuralnet/connection_layer/slice.cc \
+              src/neuralnet/connection_layer/split.cc \
+              src/neuralnet/input_layer/parser.cc \
+              src/neuralnet/input_layer/csv_record.cc \
+              src/neuralnet/input_layer/image_preprocess.cc \
+              src/neuralnet/input_layer/label.cc \
+              src/neuralnet/input_layer/lmdb_data.cc \
+              src/neuralnet/input_layer/mnist.cc \
+              src/neuralnet/input_layer/prefetch.cc \
+              src/neuralnet/input_layer/proto_record.cc \
+              src/neuralnet/input_layer/rgb_image.cc \
+              src/neuralnet/input_layer/shard_data.cc \
+              src/neuralnet/input_layer/store_input.cc \
+              src/neuralnet/loss_layer/euclidean.cc \
+              src/neuralnet/loss_layer/softmax.cc \
+              src/neuralnet/neuron_layer/convolution.cc \
+              src/neuralnet/neuron_layer/dropout.cc \
+              src/neuralnet/neuron_layer/inner_product.cc \
+              src/neuralnet/neuron_layer/lrn.cc \
+              src/neuralnet/neuron_layer/pooling.cc \
+              src/neuralnet/neuron_layer/rbm.cc \
+              src/neuralnet/neuron_layer/relu.cc \
+              src/neuralnet/neuron_layer/sigmoid.cc \
+              src/neuralnet/neuron_layer/stanh.cc \
               src/neuralnet/neuralnet.cc \
               src/comm/socket.cc \
               src/comm/msg.cc \
-                                                       src/io/kvfile.cc \
-                                                       src/io/kvfile_store.cc \
-                                                       
src/io/textfile_store.cc \
-                                                       src/io/store.cc
-
-SINGA_HDRS := include/singa.h \
-              include/utils/cluster.h \
-              include/utils/cluster_rt.h \
-              include/utils/param.h \
-              include/utils/common.h \
-              include/utils/factory.h \
-              include/utils/data_shard.h \
-              include/utils/singleton.h \
-              include/utils/graph.h \
-              include/utils/blob.h \
-              include/utils/updater.h \
-              include/utils/tinydir.h \
-              include/utils/tokenizer.h \
-              include/utils/image_transform.h \
-              include/server.h \
-              include/worker.h \
-              include/stub.h \
-              include/neuralnet/layer.h \
-              include/neuralnet/connection_layer.h \
-              include/neuralnet/input_layer.h \
-              include/neuralnet/loss_layer.h \
-              include/neuralnet/neuron_layer.h \
-              include/neuralnet/output_layer.h \
-              include/neuralnet/neuralnet.h \
+              src/io/kvfile.cc \
+              src/io/kvfile_store.cc \
+              src/io/textfile_store.cc \
+              src/io/store.cc
+
+SINGA_HDRS := include/singa/singa.h \
+              include/singa/utils/cluster.h \
+              include/singa/utils/cluster_rt.h \
+              include/singa/utils/param.h \
+              include/singa/utils/common.h \
+              include/singa/utils/factory.h \
+              include/singa/utils/data_shard.h \
+              include/singa/utils/singleton.h \
+              include/singa/utils/graph.h \
+              include/singa/utils/blob.h \
+              include/singa/utils/updater.h \
+              include/singa/utils/tinydir.h \
+              include/singa/utils/tokenizer.h \
+              include/singa/utils/image_transform.h \
+              include/singa/server.h \
+              include/singa/worker.h \
+              include/singa/stub.h \
+              include/singa/neuralnet/layer.h \
+              include/singa/neuralnet/connection_layer.h \
+              include/singa/neuralnet/input_layer.h \
+              include/singa/neuralnet/loss_layer.h \
+              include/singa/neuralnet/neuron_layer.h \
+              include/singa/neuralnet/output_layer.h \
+              include/singa/neuralnet/neuralnet.h \
               include/mshadow/tensor_expr.h \
               include/mshadow/tensor_container.h \
               include/mshadow/tensor_expr_ext.h \
@@ -85,12 +106,12 @@ SINGA_HDRS := include/singa.h \
               include/mshadow/cxxnet_op.h \
               include/mshadow/tensor_base.h \
               include/mshadow/tensor_random.h \
-              include/comm/msg.h \
-              include/comm/socket.h
-                                                       include/io/store.h \
-                                                       include/io/kvfile.h \
-                                                       
include/io/kvfile_store.h \
-                                                       
include/io/textfile_store.h
+              include/singa/comm/msg.h \
+              include/singa/comm/socket.h \
+              include/singa/io/store.h \
+              include/singa/io/kvfile.h \
+              include/singa/io/kvfile_store.h \
+              include/singa/io/textfile_store.h
 
 GTEST_SRCS := include/gtest/gtest-all.cc
 GTEST_HRDS := include/gtest/gtest.h
@@ -197,6 +218,6 @@ $(PROTO_HDRS) $(PROTO_SRCS): $(PROTOS)
        mkdir -p $(top_srcdir)/tool/pb2/
        touch $(top_srcdir)/tool/pb2/__init__.py
        protoc --proto_path=$(top_srcdir)/src/proto 
--python_out=$(top_srcdir)/tool/pb2 $(PROTOS)
-       mkdir -p $(top_srcdir)/include/proto/
-       cp $(top_srcdir)/src/proto/*.pb.h $(top_srcdir)/include/proto/
+       mkdir -p $(top_srcdir)/include/singa/proto/
+       cp $(top_srcdir)/src/proto/*.pb.h $(top_srcdir)/include/singa/proto/
        @echo

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/examples/cifar10/create_data.cc
----------------------------------------------------------------------
diff --git a/examples/cifar10/create_data.cc b/examples/cifar10/create_data.cc
index 5fddd1d..303c589 100644
--- a/examples/cifar10/create_data.cc
+++ b/examples/cifar10/create_data.cc
@@ -36,7 +36,8 @@
 #include <cstdint>
 #include <iostream>
 
-#include "./singa.h"
+#include "singa/io/store.h"
+#include "singa/proto/common.pb.h"
 
 using std::string;
 

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/examples/cifar10/run.sh
----------------------------------------------------------------------
diff --git a/examples/cifar10/run.sh b/examples/cifar10/run.sh
deleted file mode 100755
index 53c81c1..0000000
--- a/examples/cifar10/run.sh
+++ /dev/null
@@ -1,70 +0,0 @@
-#
-#/**
-# * Copyright 2015 The Apache Software Foundation
-# *
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements.  See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership.  The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License.  You may obtain a copy of the License at
-# *
-# *     http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
-#!/bin/bash
-
-if [ $# -ne 2 ];then
-  echo "Usage: run.sh [start|stop] num_procs"
-  exit
-fi
-
-netconf=conv.conf
-
-script_path=`readlink -f $0`
-script_dir=`dirname $script_path`
-example_dir=`dirname $script_dir`
-singa_dir=`dirname $example_dir`
-exec_path=${singa_dir}/build/singa
-host_path=$script_dir/hostfile
-ssh_options="-oStrictHostKeyChecking=no \
--oUserKnownHostsFile=/dev/null \
--oLogLevel=quiet"
-
-hosts=(`cat $host_path |cut -d ' ' -f 1`)
-if [ $1 == "start" ]
-then
-  count=0
-  for i in ${hosts[@]}
-  do
-    cmd="touch $singa_dir/$count.lock;\
-      $exec_path \
-      -procsID=$count \
-      -hostfile=$host_path \
-      -cluster_conf=$script_dir/cluster.conf \
-      -model_conf=$script_dir/$netconf; rm -f $singa_dir/$count.lock"
-    echo $cmd
-    ssh $ssh_options $i $cmd &
-    count=$(($count+1))
-    if [ $count -eq $2 ]
-    then
-      exit
-    fi
-  done
-elif [ $1 == "stop" ]
-then
-  for (( idx=$2-1 ; idx>=0 ; idx-- ))
-  do
-    echo "ssh ${hosts[$idx]} \"kill singa\""
-    ssh $ssh_options ${hosts[$idx]} "killall -q singa"
-    sleep 1
-  done
-fi
-
-

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/comm/msg.h
----------------------------------------------------------------------
diff --git a/include/comm/msg.h b/include/comm/msg.h
deleted file mode 100644
index 50a9b81..0000000
--- a/include/comm/msg.h
+++ /dev/null
@@ -1,238 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_COMM_MSG_H_
-#define SINGA_COMM_MSG_H_
-
-// TODO(wangwei): make it a compiler argument
-#define USE_ZMQ
-
-#include <utility>
-#ifdef USE_ZMQ
-#include <czmq.h>
-#endif
-
-namespace singa {
-/**
- * Wrapper to generate message address
- * @param grp worker/server group id
- * @param id_or_proc worker/server id or procs id
- * @param type msg type
- */
-inline int Addr(int grp, int id_or_proc, int type) {
-  return (grp << 16) | (id_or_proc << 8) | type;
-}
-
-/**
- * Parse group id from addr.
- *
- * @return group id
- */
-inline int AddrGrp(int addr) {
-  return addr >> 16;
-}
-
-/**
- * Parse worker/server id from addr.
- *
- * @return id
- */
-inline int AddrID(int addr) {
-  static const int mask = (1 << 8) - 1;
-  return (addr >> 8) & mask;
-}
-
-/**
- * Parse worker/server procs from addr.
- *
- * @return procs id
- */
-inline int AddrProc(int addr) {
-  return AddrID(addr);
-}
-
-/**
- * Parse msg type from addr
- * @return msg type
- */
-inline int AddrType(int addr) {
-  static const int mask = (1 << 8) -1;
-  return addr & mask;
-}
-
-/**
- * Msg used to transfer Param info (gradient or value), feature blob, etc
- * between workers, stubs and servers.
- *
- * Each msg has a source addr and dest addr identified by a unique integer.
- * It is also associated with a target field (value and version) for ease of
- * getting some meta info (e.g., parameter id) from the msg.
- *
- * Other data is added into the message as frames.
- */
-class Msg {
- public:
-  ~Msg();
-  Msg();
-  /**
-   * Construct the msg providing source and destination addr.
-   */
-  Msg(int src, int dst);
-  /**
-   * Copy constructor.
-   */
-  Msg(const Msg& msg);
-  /**
-   * Swap the src/dst addr
-   */
-  void SwapAddr();
-  /**
-   * Add a frame (a chunck of bytes) into the message
-   */
-  void AddFrame(const void* addr, int nBytes);
-  /**
-   * @return num of bytes of the current frame.
-   */
-  int FrameSize();
-  /**
-   * @return the pointer to the current frame data.
-   */
-  void* FrameData();
-  /**
-   * @return the data of the current frame as c string
-   */
-  char* FrameStr();
-  /**
-   * Move the cursor to the first frame.
-   */
-  void FirstFrame();
-  /**
-   * Move the cursor to the last frame.
-   */
-  void LastFrame();
-  /**
-   * Move the cursor to the next frame
-   * @return true if the next frame is not NULL; otherwise false
-   */
-  bool NextFrame();
-  /**
-   *  Add a 'format' frame to the msg (like CZMQ's zsock_send).
-   *
-   *  The format is a string that defines the type of each field.
-   *  The format can contain any of these characters, each corresponding to
-   *  one or two arguments:
-   *  i = int (signed)
-   *  1 = uint8_t
-   *  2 = uint16_t
-   *  4 = uint32_t
-   *  8 = uint64_t
-   *  p = void * (sends the pointer value, only meaningful over inproc)
-   *  s = char**
-   *
-   *  Returns size of the added content.
-   */
-  int AddFormatFrame(const char *format, ...);
-  /**
-   *  Parse the current frame added using AddFormatFrame(const char*, ...).
-   *
-   *  The format is a string that defines the type of each field.
-   *  The format can contain any of these characters, each corresponding to
-   *  one or two arguments:
-   *  i = int (signed)
-   *  1 = uint8_t
-   *  2 = uint16_t
-   *  4 = uint32_t
-   *  8 = uint64_t
-   *  p = void * (sends the pointer value, only meaningful over inproc)
-   *  s = char**
-   *
-   *  Returns size of the parsed content.
-   */
-  int ParseFormatFrame(const char* format, ...);
-
-#ifdef USE_ZMQ
-  void ParseFromZmsg(zmsg_t* msg);
-  zmsg_t* DumpToZmsg();
-#endif
-
-  /**
-   * @return msg size in terms of bytes, ignore meta info.
-   */
-  int size() const;
-  /**
-   * Set source addr.
-   * @param addr unique identify one worker/server/stub in the current job
-   */
-  inline void set_src(int addr) { src_ = addr; }
-  /**
-   * @return source addr.
-   */
-  inline int src() const { return src_; }
-  /**
-   * Set destination addr.
-   * @param addr unique identify one worker/server/stub in the current job
-   */
-  inline void set_dst(int addr) { dst_ = addr; }
-  /**
-   * @return dst addr.
-   */
-  inline int dst() const { return dst_; }
-  /**
-   * Set msg type, e.g., kPut, kGet, kUpdate, kRequest
-   */
-  inline void set_type(int type) { type_ = type; }
-  /**
-   * @return msg type.
-   */
-  inline int type() const { return type_; }
-  /**
-   * Set msg target.
-   *
-   * One msg has a target to identify some entity in worker/server/stub.
-   * The target is associated with a version, e.g., Param version.
-   */
-  inline void set_trgt(int val, int version) {
-    trgt_val_ = val;
-    trgt_version_ = version;
-  }
-  inline int trgt_val() const { return trgt_val_; }
-  inline int trgt_version() const { return trgt_version_; }
-
- protected:
-  int src_ = 0;
-  int dst_ = 0;
-  int type_ = 0;
-  int trgt_val_ = 0;
-  int trgt_version_ = 0;
-#ifdef USE_ZMQ
-  zmsg_t* msg_ = nullptr;
-  zframe_t *frame_ = nullptr;
-#endif
-};
-
-inline void DeleteMsg(Msg** msg) {
-  delete *msg;
-  *msg = nullptr;
-}
-
-}  // namespace singa
-
-#endif  // SINGA_COMM_MSG_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/comm/socket.h
----------------------------------------------------------------------
diff --git a/include/comm/socket.h b/include/comm/socket.h
deleted file mode 100644
index f2ffb4d..0000000
--- a/include/comm/socket.h
+++ /dev/null
@@ -1,174 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_COMM_SOCKET_H_
-#define SINGA_COMM_SOCKET_H_
-
-#ifdef USE_ZMQ
-#include <czmq.h>
-#endif
-#include <map>
-#include <string>
-#include <vector>
-#include "comm/msg.h"
-
-namespace singa {
-
-const std::string kInprocRouterEndpoint = "inproc://router";
-
-class SocketInterface {
- public:
-  virtual ~SocketInterface() {}
-  /**
-    * Send a message to connected socket(s), non-blocking. The message
-    * will be deallocated after sending, thus should not be used after
-    * calling Send();
-    *
-    * @param msg The message to be sent
-    * @return 1 for success queuing the message for sending, 0 for failure
-    */
-  virtual int Send(Msg** msg) = 0;
-  /**
-    * Receive a message from any connected socket.
-    *
-    * @return a message pointer if success; nullptr if failure
-    */
-  virtual Msg* Receive() = 0;
-  /**
-   * @return Identifier of the implementation dependent socket. E.g., zsock_t*
-   * for ZeroMQ implementation and rank for MPI implementation.
-   */
-  virtual void* InternalID() const = 0;
-};
-
-class Poller {
- public:
-  Poller();
-  explicit Poller(SocketInterface* socket);
-  /**
-    * Add a socket for polling; Multiple sockets can be polled together by
-    * adding them into the same poller.
-    */
-  void Add(SocketInterface* socket);
-  /**
-    * Poll for all sockets added into this poller.
-    * @param timeout Stop after this number of mseconds
-    * @return pointer To the socket if it has one message in the receiving
-    * queue; nullptr if no message in any sockets,
-    */
-  SocketInterface* Wait(int duration);
-
-  /**
-   * @return true if the poller is terminated due to process interupt
-   */
-  virtual bool Terminated();
-
- protected:
-#ifdef USE_ZMQ
-  zpoller_t *poller_;
-  std::map<zsock_t*, SocketInterface*> zsock2Socket_;
-#endif
-};
-
-class Dealer : public SocketInterface {
- public:
-  /*
-   * @param id Local dealer ID within a procs if the dealer is from worker or
-   * server thread, starts from 1 (0 is used by the router); or the connected
-   * remote procs ID for inter-process dealers from the stub thread.
-   */
-  Dealer();
-  explicit Dealer(int id);
-  ~Dealer() override;
-  /**
-    * Setup the connection with the router.
-    *
-    * @param endpoint Identifier of the router. For intra-process
-    * connection, the endpoint follows the format of ZeroMQ, i.e.,
-    * starting with "inproc://"; in Singa, since each process has one
-    * router, hence we can fix the endpoint to be "inproc://router" for
-    * intra-process. For inter-process, the endpoint follows ZeroMQ's
-    * format, i.e., IP:port, where IP is the connected process.
-    * @return 1 connection sets up successfully; 0 otherwise
-    */
-  int Connect(const std::string& endpoint);
-  int Send(Msg** msg) override;
-  Msg* Receive() override;
-  void* InternalID() const override;
-
- protected:
-  int id_ = -1;
-#ifdef USE_ZMQ
-  zsock_t* dealer_ = nullptr;
-  zpoller_t* poller_ = nullptr;
-#endif
-};
-
-class Router : public SocketInterface {
- public:
-  Router();
-  /**
-   * There is only one router per procs, hence its local id is 0 and is not set
-   * explicitly.
-   *
-   * @param bufsize Buffer at most this number of messages
-   */
-  explicit Router(int bufsize);
-  ~Router() override;
-  /**
-   * Setup the connection with dealers.
-   *
-   * It automatically binds to the endpoint for intra-process communication,
-   * i.e., "inproc://router".
-   *
-   * @param endpoint The identifier for the Dealer socket in other process
-   * to connect. It has the format IP:Port, where IP is the host machine.
-   * If endpoint is empty, it means that all connections are
-   * intra-process connection.
-   * @return number of connected dealers.
-   */
-  int Bind(const std::string& endpoint);
-  /**
-   * If the destination socket has not connected yet, buffer this the message.
-   */
-  int Send(Msg** msg) override;
-  Msg* Receive() override;
-  void* InternalID() const override;
-
- protected:
-  int nBufmsg_ = 0;
-  int bufsize_ = 100;
-#ifdef USE_ZMQ
-  zsock_t* router_ = nullptr;
-  zpoller_t* poller_ = nullptr;
-  std::map<int, zframe_t*> id2addr_;
-  std::map<int, std::vector<zmsg_t*>> bufmsg_;
-#endif
-};
-
-#ifdef USE_MPI
-// TODO(wangsheng): add intra-process communication using shared queue
-std::vector<SafeQueue*> MPIQueues;
-#endif
-
-}  // namespace singa
-
-#endif  // SINGA_COMM_SOCKET_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/driver.h
----------------------------------------------------------------------
diff --git a/include/driver.h b/include/driver.h
deleted file mode 100644
index 9ae4b27..0000000
--- a/include/driver.h
+++ /dev/null
@@ -1,226 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_DRIVER_H_
-#define SINGA_DRIVER_H_
-
-#include <vector>
-#include "proto/job.pb.h"
-#include "proto/singa.pb.h"
-#include "utils/factory.h"
-#include "utils/param.h"
-#include "utils/singleton.h"
-#include "utils/updater.h"
-#include "neuralnet/layer.h"
-#include "./worker.h"
-#include "./server.h"
-
-namespace singa {
-using std::vector;
-class Driver {
- public:
-  /**
-   * Init SINGA
-   * - init glog
-   * - parse job id and job conf from cmd line
-   * - register built-in layer, worker, updater, param subclasses.
-   *
-   * May be used for MPI init if it is used for message passing.
-   */
-  void Init(int argc, char** argv);
-  /**
-   * Update job configuration and call Train(const JobProto&) to start the
-   * training.
-   *
-   * It sets up the logging path and checkpoing files (if resume), and checks
-   * the existence of the workspace folder .
-   *
-   * @param[in] resume if true resume the training from the latest checkpoint
-   * files.
-   * @param[in] job_conf job configuration.
-   */
-  void Train(bool resume, const JobProto& job_conf);
-  /**
-   * Create workers and servers to conduct the training.
-   *
-   * @param[in] job_conf job configuration with all necessary fields set (e.g.,
-   * by Train(bool, const JobProto&).
-   */
-  void Train(const JobProto& job_conf);
-  /**
-   * Setting the checkpoint field of the job configuration to resume training.
-   *
-   * The checkpoint folder will be searched to get the files for the latest
-   * checkpoint, which will be added into the checkpoint field. The workers
-   * would then load the values of params from the checkpoint files.
-   *
-   * @param job_conf job configuration
-   */
-  void SetupForResume(JobProto* job_conf);
-  /**
-   * Create server instances.
-   *
-   * @param[in] job_conf job configuration.
-   * @param[in] net training neural network.
-   * @return server instances
-   */
-  const vector<Server*> CreateServers(const JobProto& job_conf, NeuralNet* 
net);
-  /**
-   * Create workers instances.
-   * @param[in] job_conf job configuration.
-   * @param[in] net training neural network.
-   * @return worker instances
-   */
-  const vector<Worker*> CreateWorkers(const JobProto& job_conf, NeuralNet* 
net);
-
-
-  /*********** Subclasses registers *************************/
-  /**
-   * Register a Layer subclass.
-   *
-   * @param type layer type ID. If called to register built-in subclasses,
-   * it is from LayerType; if called to register user-defined
-   * subclass, it is a string;
-   * @return 0 if success; otherwise -1.
-   */
-  template<typename Subclass, typename Type>
-  int RegisterLayer(const Type& type);
-  /**
-   * Register an Updater subclass.
-   *
-   * @param type ID of the subclass. If called to register built-in subclasses,
-   * it is from UpdaterType; if called to register user-defined
-   * subclass, it is a string;
-   * @return 0 if success; otherwise -1.
-   */
-  template<typename Subclass, typename Type>
-  int RegisterUpdater(const Type& type);
-  /**
-   * Register a learning rate generator subclasses.
-   *
-   * @param type ID of the subclass. If called to register built-in subclasses,
-   * it is from ChangeMethod; if called to register user-defined
-   * subclass, it is a string;
-   * @return 0 if success; otherwise -1.
-   */
-  template<typename Subclass, typename Type>
-  int RegisterLRGenerator(const Type& type);
-  /**
-   * Register a Worker subclass.
-   *
-   * @param type ID of the subclass. If called to register built-in subclasses,
-   * it is from TrainOneBatchAlg; if called to register user-defined
-   * subclass, it is a string;
-   * @return 0 if success; otherwise -1.
-   */
-  template<typename Subclass, typename Type>
-  int RegisterWorker(const Type& type);
-  /**
-   * Register a Param subclass.
-   * @param type ID of the subclass. If called to register built-in subclasses,
-   * it is from ParamType; if called to register user-defined
-   * subclass, it is a string;
-   *
-   * @return 0 if success; otherwise -1.
-   */
-  template<typename Subclass, typename Type>
-  int RegisterParam(const Type& type);
-  /**
-   * Register ParamGenerator subclasses for initalizing Param objects.
-   *
-   * @param type ID of the subclass. If called to register built-in subclasses,
-   * it is from InitMethod; if called to register user-defined
-   * subclass, it is a string;
-   * @return 0 if success; otherwise -1.
-   */
-  template<typename Subclass, typename Type>
-  int RegisterParamGenerator(const Type& type);
-
-  /****************** Access function ********************/
-  /**
-   * @return job ID which is generated by zookeeper and passed in by the
-   * launching script.
-   */
-  inline int job_id() const { return job_id_; }
-  /**
-   * @return job conf path which is passed by users at the command line. It
-   * should at least contains the cluster configuration.
-   */
-  inline JobProto job_conf() const { return job_conf_; }
-
- private:
-  int job_id_;
-  JobProto job_conf_;
-  SingaProto singa_conf_;
-};
-
-/************* Implementation of template functions*************************
-* Must put the implementation in driver.h file instead of driver.cc.
-* Otherwise there would be linking error caused by unknown registration
-* functions, becuase these function cannot be generated merely based on its
-* declearation in driver.h.
-*/
-
-template<typename Subclass, typename Type>
-int Driver::RegisterLayer(const Type& type) {
-  auto factory = Singleton<Factory<singa::Layer>>::Instance();
-  factory->Register(type, CreateInstance(Subclass, Layer));
-  return 1;
-}
-
-template<typename Subclass, typename Type>
-int Driver::RegisterParam(const Type& type) {
-  auto factory = Singleton<Factory<singa::Param>>::Instance();
-  factory->Register(type, CreateInstance(Subclass, Param));
-  return 1;
-}
-
-template<typename Subclass, typename Type>
-int Driver::RegisterParamGenerator(const Type& type) {
-  auto factory = Singleton<Factory<singa::ParamGenerator>>::Instance();
-  factory->Register(type, CreateInstance(Subclass, ParamGenerator));
-  return 1;
-}
-
-template<typename Subclass, typename Type>
-int Driver::RegisterUpdater(const Type& type) {
-  auto factory = Singleton<Factory<singa::Updater>>::Instance();
-  factory->Register(type, CreateInstance(Subclass, Updater));
-  return 1;
-}
-
-template<typename Subclass, typename Type>
-int Driver::RegisterLRGenerator(const Type& type) {
-  auto factory = Singleton<Factory<singa::LRGenerator>>::Instance();
-  factory->Register(type, CreateInstance(Subclass, LRGenerator));
-  return 1;
-}
-
-template<typename Subclass, typename Type>
-int Driver::RegisterWorker(const Type& type) {
-  auto factory = Singleton<Factory<singa::Worker>>::Instance();
-  factory->Register(type, CreateInstance(Subclass, Worker));
-  return 1;
-}
-
-}  // namespace singa
-
-#endif  // SINGA_DRIVER_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/io/hdfs_store.h
----------------------------------------------------------------------
diff --git a/include/io/hdfs_store.h b/include/io/hdfs_store.h
deleted file mode 100644
index f85615b..0000000
--- a/include/io/hdfs_store.h
+++ /dev/null
@@ -1,22 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-// TODO(wangwei) use hdfs as data storage

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/io/imagefolder_store.h
----------------------------------------------------------------------
diff --git a/include/io/imagefolder_store.h b/include/io/imagefolder_store.h
deleted file mode 100644
index c05d92d..0000000
--- a/include/io/imagefolder_store.h
+++ /dev/null
@@ -1,21 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-// TODO(wangwei) store images in a disk folder

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/io/kvfile.h
----------------------------------------------------------------------
diff --git a/include/io/kvfile.h b/include/io/kvfile.h
deleted file mode 100644
index 27dd35e..0000000
--- a/include/io/kvfile.h
+++ /dev/null
@@ -1,182 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_IO_KVFILE_H_
-#define SINGA_IO_KVFILE_H_
-
-#include <fstream>
-#include <string>
-#include <unordered_set>
-
-#define USE_PROTOBUF 1
-
-#ifdef USE_PROTOBUF
-#include <google/protobuf/message.h>
-#endif
-
-namespace singa {
-namespace io {
-
-/**
- * KVFile stores training/validation/test tuples.
- * Every worker node should have a KVFile for training data (validation/test
- * KVFile is optional).
- * KVFile consists of a set of unordered tuples. Each tuple is
- * encoded as [key_len key val_len val] (key_len and val_len are of type
- * uint32, which indicate the bytes of key and value respectively.
- *
- * When KVFile is created, it will remove the last tuple if the value size
- * and key size do not match because the last write crashed.
- *
- * TODO(wangwei) split one KVFile into multiple KVFile s.
- *
- */
-class KVFile {
- public:
-  enum Mode {
-    // read only mode used in training
-    kRead = 0,
-    // write mode used in creating KVFile (will overwrite previous one)
-    kCreate = 1,
-    // append mode, e.g. used when previous creating crashes
-    kAppend = 2
-  };
-
-  /**
-   * KVFile constructor.
-   *
-   * @param path path to the disk KVFile, it can be
-   *  - a path to local disk file.
-   *  - a path to local directory. This is to be compatible with the older
-   *    version (DataShard). The KVFile is shard.dat under that directory
-   *  - a hdfs file starting with "hdfs://"
-   * @param mode KVFile open mode, KVFile::kRead, KVFile::kWrite or
-   * KVFile::kAppend
-   * @param bufsize Cache bufsize bytes data for every disk op (read or write),
-   * default is 10MB.
-   */
-  KVFile(const std::string& path, Mode mode, int bufsize = 10485760);
-  ~KVFile();
-
-#ifdef USE_PROTOBUF
-  /**
-   * read next tuple from the KVFile.
-   *
-   * @param key Tuple key
-   * @param val Record of type Message
-   * @return false if read unsuccess, e.g., the tuple was not inserted
-   *         completely.
-   */
-  bool Next(std::string* key, google::protobuf::Message* val);
-  /**
-   * Append one tuple to the KVFile.
-   *
-   * @param key e.g., image path
-   * @param val
-   * @return false if unsucess, e.g., inserted before
-   */
-  bool Insert(const std::string& key, const google::protobuf::Message& tuple);
-#endif
-  /**
-   * read next tuple from the KVFile.
-   *
-   * @param key Tuple key
-   * @param val Record of type string
-   * @return false if unsuccess, e.g. the tuple was not inserted completely.
-   */
-  bool Next(std::string* key, std::string* val);
-  /**
-   * Append one tuple to the KVFile.
-   *
-   * @param key e.g., image path
-   * @param val
-   * @return false if unsucess, e.g., inserted before
-   */
-  bool Insert(const std::string& key, const std::string& tuple);
-  /**
-   * Move the read pointer to the head of the KVFile file.
-   * Used for repeated reading.
-   */
-  void SeekToFirst();
-  /**
-   * Flush buffered data to disk.
-   * Used only for kCreate or kAppend.
-   */
-  void Flush();
-  /**
-   * Iterate through all tuples to get the num of all tuples.
-   *
-   * @return num of tuples
-   */
-  int Count();
-  /**
-   * @return path to KVFile file
-   */
-  inline std::string path() { return path_; }
-
- protected:
-  /**
-   * Read the next key and prepare buffer for reading value.
-   *
-   * @param key
-   * @return length (i.e., bytes) of value field.
-   */
-  int Next(std::string* key);
-  /**
-   * Setup the disk pointer to the right position for append in case that
-   * the pervious write crashes.
-   *
-   * @param path KVFile path.
-   * @return offset (end pos) of the last success written record.
-   */
-  int PrepareForAppend(const std::string& path);
-  /**
-   * Read data from disk if the current data in the buffer is not a full field.
-   *
-   * @param size size of the next field.
-   */
-  bool PrepareNextField(int size);
-
- private:
-  std::string path_ = "";
-  Mode mode_;
-  //!< either ifstream or ofstream
-  std::fstream fdat_;
-  //!< to avoid replicated record
-  std::unordered_set<std::string> keys_;
-  //!< internal buffer
-  char* buf_ = nullptr;
-  //!< offset inside the buf_
-  int offset_ = 0;
-  //!< allocated bytes for the buf_
-  int capacity_ = 0;
-  //!< bytes in buf_, used in reading
-  int bufsize_ = 0;
-};
-}  // namespace io
-
-/**
- * @deprecated {ShardData is deprecated! Use KVFile}.
-using ShardData = KVFile;
-*/
-}  // namespace singa
-
-#endif  // SINGA_IO_KVFILE_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/io/kvfile_store.h
----------------------------------------------------------------------
diff --git a/include/io/kvfile_store.h b/include/io/kvfile_store.h
deleted file mode 100644
index bcd70eb..0000000
--- a/include/io/kvfile_store.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_IO_KVFILE_STORE_H_
-#define SINGA_IO_KVFILE_STORE_H_
-
-#include <string>
-#include "io/store.h"
-#include "io/kvfile.h"
-
-namespace singa {
-namespace io {
-
-/**
- * Use the KVFile as the data storage.
- *
- * KVFile is a binary file. Each tuple is stored as byte string.
- */
-class KVFileStore : public Store {
- public:
-  ~KVFileStore() { Close();}
-  bool Open(const std::string& source, Mode mode) override;
-  void Close() override;
-  bool Read(std::string* key, std::string* value) override;
-  void SeekToFirst() override;
-  bool Write(const std::string& key, const std::string& value) override;
-  void Flush() override;
-
- private:
-  KVFile* file_ = nullptr;
-  Mode mode_;
-};
-
-}  // namespace io
-}  // namespace singa
-
-#endif  // SINGA_IO_KVFILE_STORE_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/io/store.h
----------------------------------------------------------------------
diff --git a/include/io/store.h b/include/io/store.h
deleted file mode 100644
index 15afb6a..0000000
--- a/include/io/store.h
+++ /dev/null
@@ -1,105 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_IO_STORE_H_
-#define SINGA_IO_STORE_H_
-
-#include <string>
-
-namespace singa {
-namespace io {
-
-using std::string;
-enum Mode { kCreate, kRead, kAppend };
-
-/**
- * General key-value store that provides functions for reading and writing
- * tuples.
- *
- * Subclasses implement the functions for a specific data storage, e.g., CSV
- * file, HDFS, image folder, singa::io::SFile, leveldb, lmdb, etc.
- */
-class Store {
- public:
-  Store() { }
-  /**
-   * In case that users forget to call Close() to release resources, e.g.,
-   * memory, you can release them here.
-   */
-  virtual ~Store() { }
-  /**
-   * @param[in] source path to the storage, could be a file path, folder path
-   * or hdfs path, or even a http url.
-   * @param[in] mode
-   * @return true if open successfully, otherwise false.
-   */
-  virtual bool Open(const std::string& source, Mode mode) = 0;
-  /**
-   * Release resources.
-   */
-  virtual void Close() = 0;
-  /**
-   * Read a tuple.
-   *
-   * @param[out] key
-   * @param[out] value
-   * @return true if read successfully, otherwise false.
-   */
-  virtual bool Read(std::string* key, std::string* value) = 0;
-  /**
-   * Seek the read header to the first tuple.
-   */
-  virtual void SeekToFirst() = 0;
-  /**
-   * Write a tuple.
-   *
-   * @param[in] key
-   * @param[in] value
-   * @return true if success, otherwise false.
-   */
-  virtual bool Write(const std::string& key, const std::string& value) = 0;
-  /**
-   * Flush writing buffer if it has.
-   */
-  virtual void Flush() {}
-};
-
-/**
- * Create a Store object.
- *
- * @param[in] backend identifier for a specific backend. Two backends are
- * inluced currently, i.e., "kvfile", "textfile"
- * @return a pointer to the newly created Store.
- */
-Store* CreateStore(const string& backend);
-/**
- * Create and open a Store object.
- *
- * @param[in] backend, @see CreateStore().
- * @param[in] path
- * @param[in] mode kRead or kCreate or kAppend
- */
-Store* OpenStore(const string& backend, const string& path, Mode mode);
-
-}  // namespace io
-}  // namespace singa
-
-#endif  // SINGA_IO_STORE_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/io/textfile_store.h
----------------------------------------------------------------------
diff --git a/include/io/textfile_store.h b/include/io/textfile_store.h
deleted file mode 100644
index 5450f00..0000000
--- a/include/io/textfile_store.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_IO_TEXTFILE_STORE_H_
-#define SINGA_IO_TEXTFILE_STORE_H_
-
-#include <fstream>
-#include <string>
-#include "io/store.h"
-
-namespace singa {
-namespace io {
-/**
- * Use text file as the data storage, one line per tuple.
- *
- * It is used for storeing CSV format data where the key is the line No. and
- * the value is the line.
- */
-class TextFileStore : public Store {
- public:
-  ~TextFileStore() { Close(); }
-  bool Open(const std::string& source, Mode mode) override;
-  void Close() override;
-  bool Read(std::string* key, std::string* value) override;
-  void SeekToFirst() override;
-  bool Write(const std::string& key, const std::string& value) override;
-  void Flush() override;
-
- private:
-  int lineNo_ = 0;
-  std::fstream* fs_ = nullptr;
-  Mode mode_;
-};
-
-}  // namespace io
-}  // namespace singa
-
-#endif  // SINGA_IO_TEXTFILE_STORE_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/neuralnet/connection_layer.h
----------------------------------------------------------------------
diff --git a/include/neuralnet/connection_layer.h 
b/include/neuralnet/connection_layer.h
deleted file mode 100644
index 1976fb9..0000000
--- a/include/neuralnet/connection_layer.h
+++ /dev/null
@@ -1,156 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_NEURALNET_CONNECTION_LAYER_H_
-#define SINGA_NEURALNET_CONNECTION_LAYER_H_
-
-#include <vector>
-#include "neuralnet/layer.h"
-
-/**
- * \file this file includes the declarations of layers that inherit the
- * base ConnectionLayer.
- */
-namespace singa {
-class BridgeLayer : virtual public ConnectionLayer {
- public:
-  void set_ready(bool a) {
-    ready_ = a;
-  }
-  bool ready() const {
-    return ready_;
-  }
-  virtual bool is_bridgesrclayer() const {
-    return false;
-  }
-  virtual bool is_bridgedstlayer() const {
-    return false;
-  }
-
- protected:
-  //!< true if received grad from BridgeDstLayer
-  bool ready_;
-};
-
-/**
- * For recv data from layer on other threads which may resident on other nodes
- * due to layer/data partiton
- */
-class BridgeDstLayer : public BridgeLayer {
- public:
-  void Setup(const LayerProto& conf, const vector<Layer*>& srclayers) override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override {
-    // reset ready_ for next iteration.
-    ready_ = false;
-  }
-  void ComputeGradient(int flag,  const vector<Layer*>& srclayers) override {}
-  bool is_bridgedstlayer() const {
-    return true;
-  }
-};
-
-/**
- * For sending data to layer on other threads which may resident on other nodes
- * due to layer/data partition.
- */
-class BridgeSrcLayer : public BridgeLayer {
- public:
-  void Setup(const LayerProto& conf, const vector<Layer*>& srclayers) override 
{
-    CHECK_GE(srclayers.size(), 1);
-    srclayer_ = srclayers.at(0);
-  }
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override {}
-  void ComputeGradient(int flag,  const vector<Layer*>& srclayers) override {
-    ready_ = false;
-  }
-  const Blob<float>& data(const Layer* from) const override {
-    return srclayer_->data(this);
-  }
-  Blob<float>* mutable_data(const Layer* from) override {
-    return srclayer_->mutable_data(this);
-  }
-  const Blob<float>& grad(const Layer* from) const override {
-    return srclayer_->grad(this);
-  }
-  Blob<float>* mutable_grad(const Layer* from) override {
-    return srclayer_->mutable_grad(this);
-  }
-  bool is_bridgesrclayer() const override {
-    return true;
-  }
-
- private:
-  Layer* srclayer_;
-};
-
-
-/**
- * Connect multiple (src) layers with a single (dst) layer.
- *
- * It concates feature Blobs (i.e., matrix) of src layers on one dimension.
- * The concated feature Blob will be fed into the dst layer.
- */
-class ConcateLayer : public ConnectionLayer {
- public:
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override;
-};
-
-/**
- * Connect a single (src) layer with multiple (dst) layers.
- *
- * It slices the feature Blob (i.e., matrix) of the src layer on one dimension.
- * The sliced feature Blobs will be fed into dst layers.
- */
-class SliceLayer : public ConnectionLayer {
- public:
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override;
-
- private:
-  std::vector<Blob<float>> datavec_;
-  std::vector<Blob<float>> gradvec_;
-  int slice_dim_;
-  int slice_num_;
-};
-
-/**
- * Connect a single (src) layer with multiple dst layers.
- *
- * It replicates the feature Blob of the src layer.
- * Each replicated feature Blob will be fed into one dst layer.
- * It aggregates gradients set by all dst layers and set it to the src layer.
- */
-class SplitLayer : public ConnectionLayer {
- public:
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override;
-
- protected:
-  Blob<float> grads_;
-};
-
-}  // namespace singa
-
-#endif  // SINGA_NEURALNET_CONNECTION_LAYER_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/neuralnet/input_layer.h
----------------------------------------------------------------------
diff --git a/include/neuralnet/input_layer.h b/include/neuralnet/input_layer.h
deleted file mode 100644
index 2e8725e..0000000
--- a/include/neuralnet/input_layer.h
+++ /dev/null
@@ -1,303 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_NEURALNET_INPUT_LAYER_H_
-#define SINGA_NEURALNET_INPUT_LAYER_H_
-
-#include <string>
-#include <vector>
-#include "io/store.h"
-#include "neuralnet/layer.h"
-#include "utils/data_shard.h"
-/**
- * \file this file includes the declarations of input layers that inherit the
- * base InputLayer to load input features.
- *
- * The feature loading phase can be implemented using a single layer or
- * separated into DataLayer (for loading features as records) and ParserLayer
- * (for parsing features from records). SINGA has provided some subclasses of
- * DataLayer and ParserLayer.
- *
- * Data prefetching can be implemented as a sub-class of InputLayer.
- * SINGA provides a built-in PrefetchLayer which embeds DataLayer and
- * ParserLayer.
- */
-namespace singa {
-using std::string;
-using std::vector;
-
-/************************Start of new input layers***************************/
-/**
- * Base class for loading data from Store.
- */
-class StoreInputLayer : virtual public InputLayer {
- public:
-  ~StoreInputLayer();
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-
-  ConnectionType dst_layer_connection() const override { return kOneToMany; }
-
- protected:
-  /**
-   * Parsing the (key, val) tuple to get feature (and label).
-   * Subclasses must implment this function.
-   * @param[in] k parse this tuple as the k-th instance of one mini-batch.
-   * @param[in] flag used to guide the parsing, e.g., kDeploy phase should not
-   * parse labels from the tuple.
-   * @param[in] key
-   * @param[in] val
-   */
-  virtual bool Parse(int k, int flag, const string& key, const string& val) = 
0;
-
- protected:
-  int batchsize_ = 1;
-  int random_skip_ = 0;
-  io::Store* store_ = nullptr;
-};
-
-/**
- * Base layer for parsing a key-value tuple as a feature vector with fixed
- * length. The feature shape is indicated by users in the configuration.
- * Each tuple may has a label.
- */
-class SingleLabelRecordLayer : public StoreInputLayer {
- public:
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-
- protected:
-  /**
-   * Load a single record (tuple), e.g., the mean or standard variance vector.
-   */
-  virtual void LoadRecord(const string& backend, const string& path,
-      Blob<float>* to) = 0;
-
- protected:
-  /**
-   * Feature standardization by processing each feature dimension via
-   * @f$ y = (x - mu)/ std @f$
-   * <a href= "http://ufldl.stanford.edu/wiki/index.php/Data_Preprocessing";>
-   * UFLDL</a>
-   */
-  Blob<float> mean_, std_;
-};
-
-/**
- * Specific layer that parses the value string loaded by Store into a
- * SingleLabelImageRecord.
- */
-class ProtoRecordLayer : public SingleLabelRecordLayer {
- public:
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-
- protected:
-  /**
-   * Parse key as instance ID and val into SingleLabelImageRecord.
-   * @copydetails StoreInputLayer::Parse()
-   */
-  bool Parse(int k, int flag, const string& key, const string& val) override;
-  void LoadRecord(const string& backend,
-                  const string& path,
-                  Blob<float>* to) override;
-
- private:
-  // TODO(wangwei) decode the image
-  bool encoded_;
-};
-
-/**
- * Specific layer that parses the value string loaded by Store as a line from
- * a CSV file.
- *
- * It assumes the first column is the label except that has_label_ is 
configured
- * to false. Or the data is used in deploy mode.
- */
-class CSVRecordLayer : public SingleLabelRecordLayer {
- public:
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-
- protected:
-  bool Parse(int k, int flag, const string& key, const string& val) override;
-  void LoadRecord(const string& backend,
-                  const string& path,
-                  Blob<float>* to) override;
-
- private:
-  std::string sep_;
-  bool has_label_;
-};
-
-/**
- * Do preprocessing for images, including cropping, mirroring, resizing.
- */
-class ImagePreprocessLayer : public InputLayer {
- public:
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers);
-
- private:
-  bool mirror_ = false;
-  int cropsize_ = 0;
-  int resize_ = 0;
-  float scale_ = 1;
-};
-
-/************************End of new input layers***************************/
-/**
- * Base layer for reading ::Record  from local Shard, HDFS, lmdb, etc.
- */
-class DataLayer: virtual public InputLayer {
- public:
-  Blob<float>* mutable_data(const Layer* layer) override { return nullptr; }
-  ConnectionType dst_layer_connection() const override {
-    return kOneToMany;
-  }
-
-  inline int batchsize() const { return batchsize_; }
-  virtual const Record& sample() const {
-    return sample_;
-  }
-  /**
-   * @return the loaded records
-   */
-  virtual const std::vector<Record>& records() const {
-    return records_;
-  }
-
- protected:
-  int random_skip_;
-  int batchsize_;
-  Record sample_;
-  std::vector<Record> records_;
-};
-/**
- * Layer for loading Record from DataShard.
- *
- * It is derived from DataLayer.
- */
-class ShardDataLayer : public DataLayer {
- public:
-  ~ShardDataLayer();
-
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-
- private:
-  DataShard* shard_;
-};
-
-#ifdef USE_LMDB
-#include <lmdb.h>
-class LMDBDataLayer : public DataLayer {
- public:
-  ~LMDBDataLayer();
-
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void OpenLMDB(const std::string& path);
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ConvertCaffeDatumToRecord(const CaffeDatum& datum,
-                                 SingleLabelImageRecord* record);
-
- private:
-  MDB_env* mdb_env_;
-  MDB_dbi mdb_dbi_;
-  MDB_txn* mdb_txn_;
-  MDB_cursor* mdb_cursor_;
-  MDB_val mdb_key_, mdb_value_;
-};
-#endif
-
-/**
- * Base layer for parsing the input records into Blobs.
- */
-class ParserLayer : public InputLayer {
- public:
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override {}
-  ConnectionType dst_layer_connection() const override {
-    return kOneToMany;
-  }
-  /**
-   * Parse records from DataLayer into blob.
-   */
-  virtual void ParseRecords(int flag, const std::vector<Record>& records,
-      Blob<float>* blob) = 0;
-};
-
-/**
- * Derived from ParserLayer to parse label from SingaleLabelImageRecord.
- */
-class LabelLayer : public ParserLayer {
- public:
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void ParseRecords(int flag, const std::vector<Record>& records,
-                    Blob<float>* blob) override;
-};
-
-/**
- * Derived from ParserLayer to parse MNIST feature from 
SingaleLabelImageRecord.
- */
-class MnistLayer : public ParserLayer {
- public:
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void ParseRecords(int flag, const std::vector<Record>& records,
-                    Blob<float>* blob) override;
-
- protected:
-  float norm_a_, norm_b_;
-};
-/**
- * Derived from ParserLayer to parse RGB image feature from
- * SingaleLabelImageRecord.
- */
-class RGBImageLayer : public ParserLayer {
- public:
-  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
-  void ParseRecords(int flag, const std::vector<Record>& records,
-                    Blob<float>* blob) override;
-
- private:
-  float scale_;
-  int cropsize_;
-  bool mirror_;
-  Blob<float> mean_;
-};
-/**
- * Layer for prefetching data records and parsing them.
- *
- * The data loading and parsing work is done by internal DataLayer and
- * ParserLayer respectively. This layer controls the prefetching thread, i.e.,
- * creating and joining the prefetching thread.
- */
-class PrefetchLayer : public Layer {
- public:
-  ~PrefetchLayer();
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override {}
-
- protected:
-  std::thread thread_;
-};
-
-}  // namespace singa
-
-#endif  // SINGA_NEURALNET_INPUT_LAYER_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/neuralnet/layer.h
----------------------------------------------------------------------
diff --git a/include/neuralnet/layer.h b/include/neuralnet/layer.h
deleted file mode 100644
index 5ed0c7e..0000000
--- a/include/neuralnet/layer.h
+++ /dev/null
@@ -1,294 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_NEURALNET_LAYER_H_
-#define SINGA_NEURALNET_LAYER_H_
-
-#include <map>
-#include <string>
-#include <thread>
-#include <vector>
-#include "proto/common.pb.h"
-#include "proto/job.pb.h"
-#include "utils/common.h"
-#include "utils/blob.h"
-#include "utils/param.h"
-
-namespace singa {
-using std::vector;
-// TODO(wangwei) make AuxType a template argument for Layer.
-using AuxType = int;
-/**
- * Base layer class.
- *
- * Subclasses should implement at least
- * Layer::ComputeFeature() and Layer::ComputGradient()
- * functions in accordance with the NeuralNet::TrainOneBatch function.
- */
-class Layer {
- public:
-  /**
-   * Create a sub-layer instance based on proto.type();
-   *
-   * @param proto configuration of the layer instance.
-   * @return pointer to the newly created layer instance.
-   */
-  static Layer* Create(const LayerProto& proto);
-
-  Layer() {}
-  virtual ~Layer() {}
-  /**
-   * Setup layer properties.
-   *
-   * Setup members e.g., shapes of Param objects based on the layer
-   * configuration and connected layers.
-   * It should check the partition setting when setup the properties.
-   *
-   * @param conf layer configuration.
-   * @param srclayers source layers that connect to this layer.
-   */
-  virtual void Setup(const LayerProto& conf, const vector<Layer*>& srclayers) {
-    layer_conf_ = conf;
-  }
-  /**
-   * Compute features of this layer based on connected layers.
-   *
-   * @param[in] flag set by the TrainOneBatch function, e.g., to indicate the
-   * running phase (kForward|kTrain, kForward|kTest, etc).
-   * @param[in] srclayers source layers that connect to this layer.
-   */
-  virtual void ComputeFeature(int flag, const vector<Layer*>& srclayers) = 0;
-  /**
-   * Compute gradients for parameters associated with this layer.
-   * It may also compute the gradients of the loss w.r.t the source layers.
-   *
-   * \copydetails ComputeFeature().
-   */
-  virtual void ComputeGradient(int flag, const vector<Layer*>& srclayers) = 0;
-  /**
-   * Layers that have paramters must override this function to return all Param
-   * objects associated with this layer.
-   *
-   * @return parameters associated with this layer.
-   */
-  virtual const std::vector<Param*> GetParams() const {
-    return std::vector<Param*> {};
-  }
-  /**
-   * Return the connection type between one neuron of this layer and its source
-   * layer.
-   *
-   * Currently support two connection types: kOneToOne, and kOneToAll.
-   * - kOneToOne indicates the neuron depends on only one neuron from src 
layer.
-   * - kOneToAll indicates the neuron depends on all neurons from src layer.
-   * TODO(wangwei) support kOneToMany.
-   *
-   * @param[in] k index of source layer, current only support k = 0.
-   * @return connection type.
-   */
-  virtual ConnectionType src_neuron_connection(int k) const {
-    // CHECK_LT(k, srclayers_.size());
-    return kOneToOne;
-  }
-  /**
-   * Return the connection type of this layer and all dst layers.
-   *
-   * Currently support two connection types: kOneToOne, and kOneToMany.
-   * - kOneToOne indicates the users implement the ComputeFeature and
-   * ComputeGradient function considering only one dst layer. In this case,
-   * a SplitLayer will be added automatically to connect this layer with all
-   * dest layer.
-   * - kOneToMany indicates this layer has already considered multiple dst
-   *   layers in the implementation.
-   *
-   * @return connection type default is kOneToOne.
-   */
-  virtual ConnectionType dst_layer_connection() const {
-    return kOneToOne;
-  }
-  /**
-   * To display layer info, e.g., aggreated loss/accuracy, or norm of feature
-   * vector and norm of parameters.
-   *
-   * @param[in] debug whether print the debug info
-   * @param[in] flag used to get the calling phase, e.g., forward of training
-   * (kForward | kTrain).
-   * @return info string about this layer, which is printed into the log.
-   */
-  virtual const std::string ToString(bool debug, int flag);
-  /**
-   * @return partition dimension of this layer,
-   * - -1 for no partition.
-   * -  0 for partition on the data dimension, i.e., partitioning the 
mini-batch
-   *    into sub-mini-batches.
-   * -  1 for partition this layer on feature dimension, i.e., the feature
-   *    vector of each instance is partitioned into sub-vectors.
-   */
-  inline int partition_dim() const {
-    CHECK_LE(layer_conf_.partition_dim(), 1);
-    return layer_conf_.partition_dim();
-  }
-  /**
-   * @return the partition ID (i.e., the worker ID to whom is layer is
-   * dispatched) of this layer, which is a sublayer partitioned from the
-   * original layer.
-   */
-  inline int partition_id() const { return layer_conf_.partition_id(); }
-  /**
-   * @return total number of partitions (i.e., sub-layers) of the original
-   * layer of this layer.
-   */
-  inline int num_partitions() const { return layer_conf_.num_partitions(); }
-  /**
-   * @return the type of this layer, only valid for built-in layer (types).
-   */
-  inline LayerType type() const { return layer_conf_.type(); }
-  /**
-   * @return user-defined layer type.
-   */
-  inline const std::string& user_type() const {
-    return layer_conf_.user_type();
-  }
-  /**
-   * Return name of this layer
-   */
-  inline const std::string& name() const { return layer_conf_.name(); }
-  /**
-   * @param[in] from pointer to one of the dst layer. For some layers, they 
have
-   * more than one data Blob. In this case, this argument identifies the layer
-   * that is requesting the data Blob.
-   * @return a const ref for Blob storing feature values of this layer.
-   */
-  virtual const Blob<float>& data(const Layer* from) const {
-    return data_;
-  }
-  /**
-   * @see data().
-   * @return the pointer to the Blob storing feature values of this layer.
-   */
-  virtual Blob<float>* mutable_data(const Layer* from) {
-    return &data_;
-  }
-  /**
-   * @return auxiliary data, e.g., image label.
-   */
-  virtual const vector<AuxType>& aux_data(const Layer* from = nullptr) const {
-    return aux_data_;
-  }
-  /**
-   * @see data().
-   * @return the const ref of the Blob for the gradient of this layer, mainly
-   * used in BP algorithm.
-   */
-  virtual const Blob<float>& grad(const Layer* from) const {
-    return grad_;
-  }
-  /**
-   * @see data().
-   * @return a pointer to the Blob storing gradients of this layer, mainly
-   * used in BP algorithm.
-   */
-  virtual Blob<float>* mutable_grad(const Layer* from) {
-    return &grad_;
-  }
-
- protected:
-  LayerProto layer_conf_;
-  Blob<float> data_, grad_;
-  vector<AuxType> aux_data_;
-};
-
-/**
- * Base layer for connecting layers when neural net is partitioned.
- */
-class ConnectionLayer : virtual public Layer {
-  // defined as a layer category
-};
-
-/**
- * Base layer for getting input data. May include layers for loading records,
- * parsing records.
- */
-class InputLayer : virtual public Layer {
- public:
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override {}
-  Blob<float>* mutable_grad(const Layer* layer) override {
-    // LOG(FATAL) << "Loss layer has no gradient blob";
-    return nullptr;
-  }
-  const Blob<float>& grad(const Layer* from) const override {
-    // LOG(FATAL) << "Loss layer has no gradient blob";
-    return grad_;
-  }
-};
-
-
-/**
- * Base layer for calculating loss and doing BackPropagation.
- */
-class LossLayer : virtual public Layer {
- public:
-  const std::string ToString(bool debug, int flag) override;
-  Blob<float>* mutable_grad(const Layer* layer) override {
-    LOG(FATAL) << "Loss layer has no gradient blob";
-    return nullptr;
-  }
-  const Blob<float>& grad(const Layer* from) const override {
-    LOG(FATAL) << "Loss layer has no gradient blob";
-    return grad_;
-  }
- protected:
-  Metric metric_;
-};
-
-/**
- * Base layer for feature transformation, e.g., ConvolutionLayer, PoolingLayer,
- * etc.
- */
-class NeuronLayer : virtual public Layer {
-  // defined as a layer category
-};
-
-/**
- * Base layer for collecting features into disk file, HTTP stream, etc.
- */
-class OutpuLayer : virtual public Layer {
- public:
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override {}
-  Blob<float>* mutable_grad(const Layer* layer) override {
-    LOG(FATAL) << "Loss layer has no gradient blob";
-    return nullptr;
-  }
-  const Blob<float>& grad(const Layer* from) const override {
-    LOG(FATAL) << "Loss layer has no gradient blob";
-    return grad_;
-  }
-};
-
-}  // namespace singa
-
-#include "neuralnet/connection_layer.h"
-#include "neuralnet/input_layer.h"
-#include "neuralnet/loss_layer.h"
-#include "neuralnet/neuron_layer.h"
-#include "neuralnet/output_layer.h"
-
-#endif  // SINGA_NEURALNET_LAYER_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/neuralnet/loss_layer.h
----------------------------------------------------------------------
diff --git a/include/neuralnet/loss_layer.h b/include/neuralnet/loss_layer.h
deleted file mode 100644
index a48a8e7..0000000
--- a/include/neuralnet/loss_layer.h
+++ /dev/null
@@ -1,74 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_NEURALNET_LOSS_LAYER_H_
-#define SINGA_NEURALNET_LOSS_LAYER_H_
-
-#include <vector>
-#include "neuralnet/layer.h"
-
-/**
- * @file this file includes the declarations of layers that inherit the base
- * LossLayer for measuring the objective training loss.
- */
-namespace singa {
-using std::vector;
-/**
- * Squared Euclidean loss as @f$0.5 ||p - t||^2@f$, where p is for prediction
- * t is for ground truth.
- */
-class EuclideanLossLayer : public LossLayer {
- public:
-  void Setup(const LayerProto& conf, const vector<Layer*>& srclayers) override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override;
-};
-
-/**
- * Cross-entropy loss applied to the probabilities computed from Softmax.
- * @f$ L_i = -log P_{t_i}, t_i\in [0, C] @f$ is the label for the i-th object,
- * C is the total number of classes.
- */
-class SoftmaxLossLayer : public LossLayer {
- public:
-  void Setup(const LayerProto& conf, const vector<Layer*>& srclayers) override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override;
-
-  /**
-   * softmax is not recommendeded for partition because it requires the whole
-   * src layer for normalization.
-   */
-  ConnectionType src_neuron_connection(int k) const override {
-    // CHECK_LT(k, srclayers_.size());
-    return kOneToAll;
-  }
-
- private:
-  int batchsize_;
-  int dim_;
-  float scale_;
-  int topk_;
-};
-
-}  // namespace singa
-
-#endif  // SINGA_NEURALNET_LOSS_LAYER_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/239ed217/include/neuralnet/neuralnet.h
----------------------------------------------------------------------
diff --git a/include/neuralnet/neuralnet.h b/include/neuralnet/neuralnet.h
deleted file mode 100644
index a202f44..0000000
--- a/include/neuralnet/neuralnet.h
+++ /dev/null
@@ -1,118 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef SINGA_NEURALNET_NEURALNET_H_
-#define SINGA_NEURALNET_NEURALNET_H_
-
-#include <string>
-#include <vector>
-#include <unordered_map>
-
-#include "neuralnet/layer.h"
-#include "proto/job.pb.h"
-#include "utils/factory.h"
-#include "utils/graph.h"
-
-namespace singa {
-/**
- * The neural network is constructed from user configurations in NetProto.
- *
- * Some layers, e.g., SplitLayer and BridgeSrcLayer/BridgeDstLayer
- * will be added implicitly to partition the neural network.
- * TODO(wangwei) create wrappers for popular models, e.g., MLP, CNN.
- */
-class NeuralNet {
- public:
-  /**
-   * Create the neural network for training, test or validation.
-   *
-   * Parameters for test/validation net can share those from training after
-   * setup (done outside of this funcion).
-   *
-   * @param net_conf proto for the neural network
-   * @param phase test/training/validation
-   * @param npartitions num of partitions, do partitioning if num > 1
-   * @return pointer to a neural net
-   */
-  static NeuralNet* Create(const NetProto& net_conf, Phase phase,
-                           int npartitions);
-
-  /**
-   * construct the net structure from protocol buffer.
-   * @param netproto neural net config
-   * @param npartitions num of partitions. 1 for no partitioning.
-   */
-  NeuralNet(NetProto net_conf, int num_partitions);
-  ~NeuralNet();
-  /**
-   * To display the adjacency layers
-  std::string ToAdjacency();
-   */
-  /**
-   * Share memory of parameter values from other neuralnet
-   */
-  void ShareParamsFrom(NeuralNet* other);
-  inline const std::vector<Layer*>& layers() const { return layers_; }
-  inline const std::vector<Param*>& params() const { return params_; }
-  inline Layer* name2layer(std::string name) const {
-    CHECK(name2layer_.find(name) != name2layer_.end())
-      << "No layer with name " << name;
-    return name2layer_.at(name);
-  }
-  inline const std::vector<Layer*>& srclayers(const Layer* layer) const {
-    CHECK(src_map_.find(layer) != src_map_.end())
-      << "layer (" << layer->name() << " ) has no source layers";
-    return src_map_.at(layer);
-  }
-  inline Param* paramid2param(int id) const { return paramid2param_.at(id); }
-
- protected:
-  /**
-   * Create a neural net graph, one node for each layer.
-   *
-   * Partition the graph if npartitions > 1, each layer is sliced according to
-   * its own partition setting.
-   * @param netproto
-   * @npartitions
-   * @return neural net graph
-   */
-  Graph* CreateGraph(const NetProto& netproto, int num_partitions);
-  /**
-   * Create neural net from graph, one layer per node.
-   */
-  void CreateNetFromGraph(Graph* graph, int num_partitions);
-  /**
-   * prepare data structures, e.g., params_, layers_, etc.
-   */
-  void PrepareDataStructures();
-
- protected:
-  std::vector<Layer*> layers_;
-  std::vector<Param*> params_;
-
-  std::unordered_map<std::string, Layer*> name2layer_;
-  std::unordered_map<int, Param*> paramid2param_;
-  std::unordered_map<const Layer*, std::vector<Layer*>> src_map_;
-};
-
-}  // namespace singa
-
-#endif  // SINGA_NEURALNET_NEURALNET_H_


Reply via email to