Repository: incubator-singa
Updated Branches:
  refs/heads/master 71819abab -> bd2e3453c


SINGA-106 Add dummy layer for test purpose

Dummy layer can be used as input, neuron, output layer to construct a simple
neuralnet when only focusing on testing a specific layer

* Use as neuron layer (default):
  In compute feautre, it copy srclayer's data to its own data blob
  In compute feature, it copy its own grad blob to srclayer's grad

* Use as input layer:
  It will create data and config blobs as configed.
  In compute feature, it randomly set values in data_ to [0,1]
  To use:
  - set dummy_conf.input as true
  - add dummy_conf.shape

* Use as output layer:
  In compute feature, it randomly set values in grad_ to [0,1]
  To use:
  - set dummy_conf.output as true


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/4664b6bb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/4664b6bb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/4664b6bb

Branch: refs/heads/master
Commit: 4664b6bb351c6a48a19c52b5c559dde76bf3002b
Parents: 71819ab
Author: WANG Sheng <[email protected]>
Authored: Thu Dec 3 16:39:57 2015 +0800
Committer: WANG Sheng <[email protected]>
Committed: Thu Dec 3 17:32:57 2015 +0800

----------------------------------------------------------------------
 Makefile.am                                    |  2 +
 include/singa/neuralnet/neuron_layer/dummy.h   | 51 +++++++++++++
 include/singa/neuralnet/neuron_layer/sigmoid.h |  3 -
 src/neuralnet/neuron_layer/dummy.cc            | 72 +++++++++++++++++++
 src/proto/job.proto                            |  9 +++
 src/test/test_connection_layers.cc             | 79 +++++++++++++++++++++
 6 files changed, 213 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/4664b6bb/Makefile.am
----------------------------------------------------------------------
diff --git a/Makefile.am b/Makefile.am
index b863c2e..0a2c544 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -47,6 +47,7 @@ SINGA_SRCS := src/driver.cc \
               src/neuralnet/loss_layer/softmax.cc \
               src/neuralnet/neuron_layer/argsort.cc \
               src/neuralnet/neuron_layer/convolution.cc \
+              src/neuralnet/neuron_layer/dummy.cc \
               src/neuralnet/neuron_layer/dropout.cc \
               src/neuralnet/neuron_layer/inner_product.cc \
               src/neuralnet/neuron_layer/lrn.cc \
@@ -119,6 +120,7 @@ TEST_SRCS := include/gtest/gtest_main.cc \
                                                 src/test/test_paramslicer.cc \
                                                 src/test/test_kvfile.cc \
                                                 src/test/test_store.cc \
+                                                
src/test/test_connection_layers.cc \
                                                 
src/test/test_record_input_layer.cc \
                                                 
src/test/test_csv_input_layer.cc
 

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/4664b6bb/include/singa/neuralnet/neuron_layer/dummy.h
----------------------------------------------------------------------
diff --git a/include/singa/neuralnet/neuron_layer/dummy.h 
b/include/singa/neuralnet/neuron_layer/dummy.h
new file mode 100644
index 0000000..3177b7e
--- /dev/null
+++ b/include/singa/neuralnet/neuron_layer/dummy.h
@@ -0,0 +1,51 @@
+/************************************************************
+*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*   http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied.  See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*
+*************************************************************/
+
+#ifndef SINGA_NEURALNET_NEURON_LAYER_DUMMY_H_
+#define SINGA_NEURALNET_NEURON_LAYER_DUMMY_H_
+
+#include <random>
+#include <vector>
+#include "singa/neuralnet/layer.h"
+#include "singa/proto/job.pb.h"
+
+namespace singa {
+/**
+ * This layer is dummy and do no real work.
+ * It is used for testing purpose only.
+ *
+ * Use it as input layer, it will generate random data;
+ * Use it as output layer, it will generate random grad;
+ * Use it as neuron layer, it will replicates data and grad.
+ */
+class DummyLayer: public Layer {
+ public:
+  void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
+  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
+  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override;
+ private:
+  bool input_ = false;  // use as input layer
+  bool output_ = false;  // use as output layer
+};
+
+}  // namespace singa
+
+#endif  // SINGA_NEURALNET_NEURON_LAYER_DUMMY_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/4664b6bb/include/singa/neuralnet/neuron_layer/sigmoid.h
----------------------------------------------------------------------
diff --git a/include/singa/neuralnet/neuron_layer/sigmoid.h 
b/include/singa/neuralnet/neuron_layer/sigmoid.h
index 88f15e9..3cf80e7 100644
--- a/include/singa/neuralnet/neuron_layer/sigmoid.h
+++ b/include/singa/neuralnet/neuron_layer/sigmoid.h
@@ -34,9 +34,6 @@ namespace singa {
  */
 class SigmoidLayer: public Layer {
  public:
-  using Layer::ComputeFeature;
-  using Layer::ComputeGradient;
-
   void Setup(const LayerProto& proto, const vector<Layer*>& srclayers) 
override;
   void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
   void ComputeGradient(int flag, const vector<Layer*>& srclayers) override;

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/4664b6bb/src/neuralnet/neuron_layer/dummy.cc
----------------------------------------------------------------------
diff --git a/src/neuralnet/neuron_layer/dummy.cc 
b/src/neuralnet/neuron_layer/dummy.cc
new file mode 100644
index 0000000..2ef702d
--- /dev/null
+++ b/src/neuralnet/neuron_layer/dummy.cc
@@ -0,0 +1,72 @@
+/************************************************************
+*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*   http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied.  See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*
+*************************************************************/
+
+#include "singa/neuralnet/neuron_layer/dummy.h"
+#include <glog/logging.h>
+
+namespace singa {
+
+void DummyLayer::Setup(const LayerProto& proto,
+                       const vector<Layer*>& srclayers) {
+  Layer::Setup(proto, srclayers);
+  if (proto.dummy_conf().input()) {  // use as input layer
+    CHECK_EQ(srclayers.size(), 0);
+    input_ = true;
+    vector<int> shape;
+    for (int s : proto.dummy_conf().shape()) shape.push_back(s);
+    data_.Reshape(shape);
+    grad_.ReshapeLike(data_);
+  } else {
+    CHECK_EQ(srclayers.size(), 1);
+    data_.ReshapeLike(srclayers[0]->data(this));
+    grad_.ReshapeLike(srclayers[0]->grad(this));
+  }
+  if (proto.dummy_conf().output()) {  // use as output layer
+    output_ = true;
+  }
+}
+
+std::random_device rd;
+std::mt19937 gen(rd());
+std::uniform_real_distribution<> dis(0, 1);
+
+void DummyLayer::ComputeFeature(int flag, const vector<Layer*>& srclayers) {
+  if (input_) {
+    // randomly init data with [0,1] values
+    for (int i = 0; i < data_.count(); ++i)
+      data_.mutable_cpu_data()[i] = dis(gen);
+  }
+  if (srclayers.size() > 0)
+    data_.CopyFrom(srclayers[0]->data(this));
+}
+
+void DummyLayer::ComputeGradient(int flag, const vector<Layer*>& srclayers) {
+  if (output_) {
+    // randomly init data with [0,1] values
+    for (int i = 0; i < data_.count(); ++i)
+      grad_.mutable_cpu_data()[i] = dis(gen);
+  }
+  if (srclayers.size() > 0) {
+    srclayers[0]->mutable_grad(this)->CopyFrom(grad_);
+  }
+}
+
+} // namespace singa

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/4664b6bb/src/proto/job.proto
----------------------------------------------------------------------
diff --git a/src/proto/job.proto b/src/proto/job.proto
index 6308c2e..d3f51b2 100644
--- a/src/proto/job.proto
+++ b/src/proto/job.proto
@@ -194,6 +194,8 @@ message LayerProto {
   optional ConvolutionProto convolution_conf = 30;
   // configuration for concatenation layer
   optional ConcateProto concate_conf = 31;
+  // configuration for dummy layer
+  optional DummyProto dummy_conf = 53;
   // configuration for dropout layer
   optional DropoutProto dropout_conf = 33;
   // configuration for inner product layer
@@ -394,6 +396,13 @@ message MnistProto {
   optional int32 elastic_freq = 36 [default = 0];
 }
 
+message DummyProto {
+  // shape of data and grad blobs
+  optional bool input = 1 [default = false]; 
+  optional bool output = 2 [default = false]; 
+  repeated int32 shape = 3; 
+}
+
 // Message that stores parameters used by DropoutLayer
 message DropoutProto {
   // dropout ratio

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/4664b6bb/src/test/test_connection_layers.cc
----------------------------------------------------------------------
diff --git a/src/test/test_connection_layers.cc 
b/src/test/test_connection_layers.cc
new file mode 100644
index 0000000..3d931b3
--- /dev/null
+++ b/src/test/test_connection_layers.cc
@@ -0,0 +1,79 @@
+/************************************************************
+*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*   http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied.  See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*
+*************************************************************/
+
+#include "gtest/gtest.h"
+#include "singa/neuralnet/connection_layer/bridge.h"
+#include "singa/neuralnet/neuron_layer/dummy.h"
+#include "singa/proto/job.pb.h"
+
+using namespace singa;
+
+TEST(ConnectionLayerTest, DummyTest) {
+  // use dummy as input layer
+  LayerProto proto_in;
+  vector<Layer*> src_in;
+  proto_in.set_name("dummy_input");
+  proto_in.mutable_dummy_conf()->set_input(true);
+  proto_in.mutable_dummy_conf()->add_shape(10);
+  proto_in.mutable_dummy_conf()->add_shape(20);
+  DummyLayer in;
+  in.Setup(proto_in, src_in);
+  ASSERT_EQ(in.data(nullptr).shape(0), 10);
+  ASSERT_EQ(in.data(nullptr).shape(1), 20);
+  in.ComputeFeature(0, src_in);
+ 
+  // use dummy as neuron layer
+  LayerProto proto_neu;
+  vector<Layer*> src_neu;
+  src_neu.push_back(static_cast<Layer*>(&in));
+  proto_neu.set_name("dummy_neuron");
+  proto_neu.mutable_dummy_conf();
+  DummyLayer neu;
+  neu.Setup(proto_neu, src_neu);
+  ASSERT_EQ(neu.data(nullptr).shape(0), 10);
+  ASSERT_EQ(neu.data(nullptr).shape(1), 20);
+  neu.ComputeFeature(0, src_neu);
+  ASSERT_EQ(in.data(nullptr).count(), neu.data(nullptr).count());
+  for (int i = 0; i < in.data(nullptr).count(); ++i)
+    ASSERT_EQ(in.data(nullptr).cpu_data()[i], neu.data(nullptr).cpu_data()[i]);
+
+  // use dummy as output layer
+  LayerProto proto_out;
+  vector<Layer*> src_out;
+  src_out.push_back(static_cast<Layer*>(&neu));
+  proto_out.set_name("dummy_output");
+  proto_out.mutable_dummy_conf()->set_output(true);
+  DummyLayer out;
+  out.Setup(proto_out, src_out);
+  ASSERT_EQ(out.data(nullptr).shape(0), 10);
+  ASSERT_EQ(out.data(nullptr).shape(1), 20);
+  out.ComputeFeature(0, src_out);
+  ASSERT_EQ(in.data(nullptr).count(), out.data(nullptr).count());
+  for (int i = 0; i < in.data(nullptr).count(); ++i)
+    ASSERT_EQ(in.data(nullptr).cpu_data()[i], out.data(nullptr).cpu_data()[i]);
+ 
+  // test for computing gradient
+  out.ComputeGradient(0, src_out);
+  neu.ComputeGradient(0, src_neu);
+  in.ComputeGradient(0, src_in);
+  for (int i = 0; i < in.grad(nullptr).count(); ++i)
+    ASSERT_EQ(in.grad(nullptr).cpu_data()[i], out.grad(nullptr).cpu_data()[i]);
+}

Reply via email to