SINGA-10 Add Support for Recurrent Neural Networks (RNN)

Add README.md to show instructions of RNNLM example;
Revise the following files to remove warnings by a code checker, cpplint.py;
   create_shard.cc, rnnlm.h, rnnlm.cc, rnnlm.proto
Add license paragraph to the following files;
   create_shard.cc, rnnlm.h, rnnlm.cc, rnnlm.proto, Makefile.example


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/c1c6a2ed
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/c1c6a2ed
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/c1c6a2ed

Branch: refs/heads/master
Commit: c1c6a2ed6c39991bdaf36629b47dc89e8ac67233
Parents: e8e07f1
Author: chonho <[email protected]>
Authored: Wed Sep 16 17:43:37 2015 +0800
Committer: Wei Wang <[email protected]>
Committed: Fri Sep 18 16:46:41 2015 +0800

----------------------------------------------------------------------
 examples/rnnlm/Makefile.example |  21 ++++++
 examples/rnnlm/README.md        |  52 +++++++++++++++
 examples/rnnlm/create_shard.cc  | 123 ++++++++++++++++++++---------------
 examples/rnnlm/main.cc          |  24 ++++++-
 examples/rnnlm/rnnlm.cc         |  56 +++++++++++-----
 examples/rnnlm/rnnlm.h          |  32 +++++++--
 examples/rnnlm/rnnlm.proto      |  21 ++++++
 7 files changed, 255 insertions(+), 74 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/c1c6a2ed/examples/rnnlm/Makefile.example
----------------------------------------------------------------------
diff --git a/examples/rnnlm/Makefile.example b/examples/rnnlm/Makefile.example
index 83e2522..0e2333f 100644
--- a/examples/rnnlm/Makefile.example
+++ b/examples/rnnlm/Makefile.example
@@ -1,3 +1,24 @@
+#/************************************************************
+#*
+#* Licensed to the Apache Software Foundation (ASF) under one
+#* or more contributor license agreements.  See the NOTICE file
+#* distributed with this work for additional information
+#* regarding copyright ownership.  The ASF licenses this file
+#* to you under the Apache License, Version 2.0 (the
+#* "License"); you may not use this file except in compliance
+#* with the License.  You may obtain a copy of the License at
+#*
+#*   http://www.apache.org/licenses/LICENSE-2.0
+#*
+#* Unless required by applicable law or agreed to in writing,
+#* software distributed under the License is distributed on an
+#* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+#* KIND, either express or implied.  See the License for the
+#* specific language governing permissions and limitations
+#* under the License.
+#*
+#*************************************************************/
+
 MSHADOW_FLAGS :=-DMSHADOW_USE_CUDA=0 -DMSHADOW_USE_CBLAS=1 -DMSHADOW_USE_MKL=0
 
 libs :=singa glog protobuf

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/c1c6a2ed/examples/rnnlm/README.md
----------------------------------------------------------------------
diff --git a/examples/rnnlm/README.md b/examples/rnnlm/README.md
new file mode 100644
index 0000000..9e83686
--- /dev/null
+++ b/examples/rnnlm/README.md
@@ -0,0 +1,52 @@
+This example trains the [RNN 
model](http://www.fit.vutbr.cz/research/groups/speech/publi/2010/mikolov_interspeech2010_IS100722.pdf)
 proposed by Tomas Mikolov for [language 
modeling](https://en.wikipedia.org/wiki/Language_model) over text dataset 
contains 71350 words, provided at [RNNLM 
Toolkit](https://f25ea9ccb7d3346ce6891573d543960492b92c30.googledrive.com/host/0ByxdPXuxLPS5RFM5dVNvWVhTd0U).
+The training objective (loss) is to minimize the [perplexity per 
word](https://en.wikipedia.org/wiki/Perplexity), which is equivalent to 
maximize the probability of predicting the next word given the current word in 
a sentence.
+The purpose of this example is to show users how to implement and use their 
own layers for RNN in SINGA.
+The example RNN model consists of six layers, namely RnnDataLayer, WordLayer, 
RnnLabelLayer, EmbeddingLayer, HiddenLayer, and OutputLayer. 
+
+## File description
+
+The files in this folder include:
+
+* rnnlm.proto, definition of the configuration protocol of the layers.
+* rnnlm.h, declaration of the layers.
+* rnnlm.cc, definition of the layers.
+* main.cc, main function that register the layers.
+* Makefile.exmaple, Makefile for compiling all source code in this folder.
+* job.conf, the job configuration for training the RNN language model.
+
+
+## Data preparation
+
+To use the RNNLM dataset, we can download it and create DataShard by typing
+
+    # in rnnlm/ folder
+    cp Makefile.example Makefile
+    make download
+    make create
+
+## Compilation
+
+The *Makefile.example* contains instructions for compiling the source code.
+
+    # in rnnlm/ folder
+    cp Makefile.example Makefile
+    make rnnlm
+
+It will generate an executable file *rnnlm.bin*.
+
+## Running
+
+Make sure that there is one example job configuration file, named *job.conf*.
+
+Before running SINGA, we need to export the `LD_LIBRARY_PATH` to
+include the libsinga.so by the following script.
+
+    # at the root folder of SINGA
+    export LD_LIBRARY_PATH=.libs:$LD_LIBRARY_PATH
+
+Then, we can run SINGA as follows. 
+
+    # at the root folder of SINGA
+    ./bin/singa-run.sh -exec examples/rnnlm/rnnlm.bin -conf 
examples/rnnlm/job.conf
+
+You will see the values of loss and ppl at each training step.

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/c1c6a2ed/examples/rnnlm/create_shard.cc
----------------------------------------------------------------------
diff --git a/examples/rnnlm/create_shard.cc b/examples/rnnlm/create_shard.cc
index f337350..4da365b 100644
--- a/examples/rnnlm/create_shard.cc
+++ b/examples/rnnlm/create_shard.cc
@@ -38,15 +38,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 //    http://www.rnnlm.org/
 //
 // Usage:
-//    create_shard.bin -train train_file -class_size [-debug] [-valid 
valid_file] [-test test_file]
-
-#include "utils/data_shard.h"
-#include "utils/common.h"
-#include "proto/common.pb.h"
-#include "singa.h"
-#include "rnnlm.pb.h"
-
-#define MAX_STRING 100
+//    create_shard.bin -train [train_file] -valid [valid_file]
+//                     -test [test_file] -class_size [# of classes]
 
 #include <cstring>
 #include <cstdlib>
@@ -55,7 +48,18 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 #include <algorithm>
 #include <fstream>
 
-using namespace std;
+#include "utils/data_shard.h"
+#include "utils/common.h"
+#include "proto/common.pb.h"
+#include "./rnnlm.pb.h"
+
+#define MAX_STRING 100
+#define BUFFER_LEN 32
+#define NL_STRING  "</s>"
+
+using std::string;
+using std::max;
+using std::min;
 using singa::DataShard;
 
 struct vocab_word {
@@ -101,26 +105,28 @@ int searchVocab(char *word) {
     if (vocab_hash[hash] == -1) return -1;
     if (!strcmp(word, vocab[vocab_hash[hash]].word)) return vocab_hash[hash];
 
-    for (a = 0; a < vocab_size; a++) {                //search in vocabulary
+    for (a = 0; a < vocab_size; a++) {   // search in vocabulary
         if (!strcmp(word, vocab[a].word)) {
             vocab_hash[hash] = a;
             return a;
         }
     }
 
-    return -1;                            //return OOV if not found
+    return -1;   // return OOV if not found
 }
 
 int addWordToVocab(char *word) {
     unsigned int hash;
 
-    strcpy(vocab[vocab_size].word, word);
+    snprintf(vocab[vocab_size].word, strlen(word)+1, "%s", word);
     vocab[vocab_size].cn = 0;
     vocab_size++;
 
-    if (vocab_size + 2 >= vocab_max_size) {        //reallocate memory if 
needed
+    if (vocab_size + 2 >= vocab_max_size) {   // reallocate memory if needed
         vocab_max_size += 100;
-        vocab = (struct vocab_word *) realloc(vocab, vocab_max_size * 
sizeof(struct vocab_word));
+        vocab = (struct vocab_word *) realloc(
+                               vocab,
+                               vocab_max_size * sizeof(struct vocab_word));
     }
 
     hash = getWordHash(word);
@@ -144,17 +150,19 @@ void readWord(char *word, FILE *fin) {
             }
 
             if (ch == '\n') {
-                strcpy(word, (char *) "</s>");
+                snprintf(word, strlen(NL_STRING) + 1,
+                         "%s", const_cast<char *>(NL_STRING));
                 return;
+            } else {
+                continue;
             }
-            else continue;
         }
 
-        word[a] = char(ch);
+        word[a] = static_cast<char>(ch);
         a++;
 
         if (a >= MAX_STRING) {
-            //printf("Too long word found!\n");   //truncate too long words
+            // printf("Too long word found!\n");   //truncate too long words
             a--;
         }
     }
@@ -167,7 +175,8 @@ void sortVocab() {
 
     for (a = 1; a < vocab_size; a++) {
         max = a;
-        for (b = a + 1; b < vocab_size; b++) if (vocab[max].cn < vocab[b].cn) 
max = b;
+        for (b = a + 1; b < vocab_size; b++)
+            if (vocab[max].cn < vocab[b].cn) max = b;
 
         swap = vocab[max];
         vocab[max] = vocab[a];
@@ -186,7 +195,7 @@ int learnVocabFromTrainFile() {
 
     vocab_size = 0;
 
-    addWordToVocab((char *) "</s>");
+    addWordToVocab(const_cast<char *>(NL_STRING));
 
     train_wcn = 0;
     while (1) {
@@ -199,7 +208,9 @@ int learnVocabFromTrainFile() {
         if (i == -1) {
             a = addWordToVocab(word);
             vocab[a].cn = 1;
-        } else vocab[i].cn++;
+        } else {
+            vocab[i].cn++;
+        }
     }
 
     sortVocab();
@@ -209,8 +220,6 @@ int learnVocabFromTrainFile() {
         printf("Words in train file: %d\n", train_wcn);
     }
 
-    //train_words = train_wcn;
-
     fclose(fin);
     return 0;
 }
@@ -224,17 +233,18 @@ int splitClasses() {
     a = 0;
     b = 0;
 
-    class_start = (int *) calloc(class_size, sizeof(int));
+    class_start = reinterpret_cast<int *>(calloc(class_size, sizeof(int)));
     memset(class_start, 0x7f, sizeof(int) * class_size);
-    class_end = (int *) calloc(class_size, sizeof(int));
+    class_end = reinterpret_cast<int *>(calloc(class_size, sizeof(int)));
     memset(class_end, 0, sizeof(int) * class_size);
 
     if (old_classes) {    // old classes
-        for (i = 0; i < vocab_size; i++) b += vocab[i].cn;
+        for (i = 0; i < vocab_size; i++)
+            b += vocab[i].cn;
         for (i = 0; i < vocab_size; i++) {
-            df += vocab[i].cn / (double) b;
+            df += vocab[i].cn / static_cast<double>(b);
             if (df > 1) df = 1;
-            if (df > (a + 1) / (double) class_size) {
+            if (df > (a + 1) / static_cast<double>(class_size)) {
                 vocab[i].class_index = a;
                 if (a < class_size - 1) a++;
             } else {
@@ -242,12 +252,14 @@ int splitClasses() {
             }
         }
     } else {            // new classes
-        for (i = 0; i < vocab_size; i++) b += vocab[i].cn;
-        for (i = 0; i < vocab_size; i++) dd += sqrt(vocab[i].cn / (double) b);
+        for (i = 0; i < vocab_size; i++)
+            b += vocab[i].cn;
+        for (i = 0; i < vocab_size; i++)
+            dd += sqrt(vocab[i].cn / static_cast<double>(b));
         for (i = 0; i < vocab_size; i++) {
-            df += sqrt(vocab[i].cn / (double) b) / dd;
+            df += sqrt(vocab[i].cn / static_cast<double>(b)) / dd;
             if (df > 1) df = 1;
-            if (df > (a + 1) / (double) class_size) {
+            if (df > (a + 1) / static_cast<double>(class_size)) {
                 vocab[i].class_index = a;
                 if (a < class_size - 1) a++;
             } else {
@@ -257,7 +269,7 @@ int splitClasses() {
     }
 
     // after dividing classes, update class start and class end information
-    for(i = 0; i < vocab_size; i++)  {
+    for (i = 0; i < vocab_size; i++)  {
         a = vocab[i].class_index;
         class_start[a] = min(i, class_start[a]);
         class_end[a] = max(i + 1, class_end[a]);
@@ -266,13 +278,14 @@ int splitClasses() {
 }
 
 int init_class() {
-    //debug_mode = 1;
+    // debug_mode = 1;
     debug_mode = 0;
     vocab_max_size = 100;  // largest length value for each word
     vocab_size = 0;
-    vocab = (struct vocab_word *) calloc(vocab_max_size, sizeof(struct 
vocab_word));
+    vocab = (struct vocab_word *) calloc(vocab_max_size,
+                                         sizeof(struct vocab_word));
     vocab_hash_size = 100000000;
-    vocab_hash = (int *) calloc(vocab_hash_size, sizeof(int));
+    vocab_hash = reinterpret_cast<int *>(calloc(vocab_hash_size, sizeof(int)));
     old_classes = 1;
 
     // read vocab
@@ -288,11 +301,13 @@ int create_shard(const char *input_file, const char 
*output_file) {
     DataShard dataShard(output_file, DataShard::kCreate);
     singa::WordRecord wordRecord;
 
-    char word[MAX_STRING], str_buffer[32];
     FILE *fin;
     int a, i;
     fin = fopen(input_file, "rb");
+
     int wcnt = 0;
+    char str_buffer[BUFFER_LEN];
+    char word[MAX_STRING];
     while (1) {
         readWord(word, fin);
         if (feof(fin)) break;
@@ -306,7 +321,7 @@ int create_shard(const char *input_file, const char 
*output_file) {
             wordRecord.set_class_index(class_idx);
             wordRecord.set_class_start(class_start[class_idx]);
             wordRecord.set_class_end(class_end[class_idx]);
-            int length = snprintf(str_buffer, 32, "%05d", wcnt++);
+            int length = snprintf(str_buffer, BUFFER_LEN, "%05d", wcnt++);
             dataShard.Insert(string(str_buffer, length), wordRecord);
         }
     }
@@ -319,7 +334,9 @@ int create_shard(const char *input_file, const char 
*output_file) {
 int argPos(char *str, int argc, char **argv) {
     int a;
 
-    for (a = 1; a < argc; a++) if (!strcmp(str, argv[a])) return a;
+    for (a = 1; a < argc; a++)
+        if (!strcmp(str, argv[a]))
+            return a;
 
     return -1;
 }
@@ -328,23 +345,23 @@ int main(int argc, char **argv) {
     int i;
     FILE *f;
 
-    //set debug mode
-    i = argPos((char *) "-debug", argc, argv);
+    // set debug mode
+    i = argPos(const_cast<char *>("-debug"), argc, argv);
     if (i > 0) {
         debug_mode = 1;
         if (debug_mode > 0)
             printf("debug mode: %d\n", debug_mode);
     }
 
-    //search for train file
-    i = argPos((char *) "-train", argc, argv);
+    // search for train file
+    i = argPos(const_cast<char *>("-train"), argc, argv);
     if (i > 0) {
         if (i + 1 == argc) {
             printf("ERROR: training data file not specified!\n");
             return 0;
         }
 
-        strcpy(train_file, argv[i + 1]);
+        snprintf(train_file, strlen(argv[i + 1])+1, "%s", argv[i + 1]);
 
         if (debug_mode > 0)
             printf("train file: %s\n", train_file);
@@ -359,15 +376,15 @@ int main(int argc, char **argv) {
         printf("ERROR: training data must be set.\n");
     }
 
-    //search for valid file
-    i = argPos((char *) "-valid", argc, argv);
+    // search for valid file
+    i = argPos(const_cast<char *>("-valid"), argc, argv);
     if (i > 0) {
         if (i + 1 == argc) {
             printf("ERROR: validating data file not specified!\n");
             return 0;
         }
 
-        strcpy(valid_file, argv[i + 1]);
+        snprintf(valid_file, strlen(argv[i + 1])+1, "%s", argv[i + 1]);
 
         if (debug_mode > 0)
             printf("valid file: %s\n", valid_file);
@@ -381,15 +398,15 @@ int main(int argc, char **argv) {
         valid_mode = 1;
     }
 
-    //search for test file
-    i = argPos((char *) "-test", argc, argv);
+    // search for test file
+    i = argPos(const_cast<char *>("-test"), argc, argv);
     if (i > 0) {
         if (i + 1 == argc) {
             printf("ERROR: testing data file not specified!\n");
             return 0;
         }
 
-        strcpy(test_file, argv[i + 1]);
+        snprintf(test_file, strlen(argv[i + 1])+1, "%s", argv[i + 1]);
 
         if (debug_mode > 0)
             printf("test file: %s\n", test_file);
@@ -403,8 +420,8 @@ int main(int argc, char **argv) {
         test_mode = 1;
     }
 
-    //search for class size
-    i = argPos((char *) "-class_size", argc, argv);
+    // search for class size
+    i = argPos(const_cast<char *>("-class_size"), argc, argv);
     if (i > 0) {
         if (i + 1 == argc) {
             printf("ERROR: class size not specified!\n");

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/c1c6a2ed/examples/rnnlm/main.cc
----------------------------------------------------------------------
diff --git a/examples/rnnlm/main.cc b/examples/rnnlm/main.cc
index 3cb59f1..bf2a67a 100644
--- a/examples/rnnlm/main.cc
+++ b/examples/rnnlm/main.cc
@@ -1,3 +1,23 @@
+/************************************************************
+*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*   http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied.  See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*
+*************************************************************/
 #include <string>
 #include "singa.h"
 #include "rnnlm.h"
@@ -7,11 +27,11 @@ int main(int argc, char **argv) {
   singa::Driver driver;
   driver.Init(argc, argv);
 
-  //if -resume in argument list, set resume to true; otherwise false
+  // if -resume in argument list, set resume to true; otherwise false
   int resume_pos = singa::ArgPos(argc, argv, "-resume");
   bool resume = (resume_pos != -1);
 
-  //  register all layers for rnnlm
+  // register all layers for rnnlm
   driver.RegisterLayer<singa::EmbeddingLayer, std::string>("kEmbedding");
   driver.RegisterLayer<singa::HiddenLayer, std::string>("kHidden");
   driver.RegisterLayer<singa::OutputLayer, std::string>("kOutput");

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/c1c6a2ed/examples/rnnlm/rnnlm.cc
----------------------------------------------------------------------
diff --git a/examples/rnnlm/rnnlm.cc b/examples/rnnlm/rnnlm.cc
index 0ad29a6..9cad077 100644
--- a/examples/rnnlm/rnnlm.cc
+++ b/examples/rnnlm/rnnlm.cc
@@ -1,7 +1,31 @@
-#include "rnnlm.h"
-#include "rnnlm.pb.h"
+/************************************************************
+*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*   http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied.  See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*
+*************************************************************/
+#include <string>
+#include <algorithm>
+#include <vector>
 #include "mshadow/tensor.h"
+#include "mshadow/tensor_expr.h"
 #include "mshadow/cxxnet_op.h"
+#include "rnnlm.h"
+#include "rnnlm.pb.h"
 
 namespace singa {
 using namespace mshadow;
@@ -11,7 +35,7 @@ using mshadow::Shape;
 using mshadow::Shape1;
 using mshadow::Shape2;
 using mshadow::Tensor;
-
+// using mshadow::TensorContainer;
 
 inline Tensor<cpu, 2> RTensor2(Blob<float>* blob) {
   const vector<int>& shape = blob->shape();
@@ -35,10 +59,12 @@ RnnDataLayer::~RnnDataLayer() {
 
 void RnnDataLayer::Setup(const LayerProto& proto, int npartitions) {
   Layer::Setup(proto, npartitions);
-  shard_ = new DataShard(proto.GetExtension(input_conf).path(), 
DataShard::kRead);
+  shard_ = new DataShard(
+               proto.GetExtension(input_conf).path(),
+               DataShard::kRead);
   string key;
   max_window_ = proto.GetExtension(input_conf).max_window();
-  records_.resize(max_window_ + 1);  // # of records in data layer is 
max_window_ + 1
+  records_.resize(max_window_ + 1);  // resize to # of records in data layer
   window_ = 0;
   shard_->Next(&key, &records_[window_]);
 }
@@ -50,11 +76,11 @@ void RnnDataLayer::ComputeFeature(int flag, Metric *perf) {
   for (int i = 1; i <= max_window_; i++) {
     string key;
     if (shard_->Next(&key, &records_[i])) {
-      if(records_[i].word_index() == 0) {
+      if (records_[i].word_index() == 0) {
         window_ = i;  // +1 ??
         break;
       }
-    } else{
+    } else {
       shard_->SeekToFirst();
       CHECK(shard_->Next(&key, &records_[i]));
     }
@@ -65,9 +91,7 @@ void RnnDataLayer::ComputeFeature(int flag, Metric *perf) {
 void WordLayer::Setup(const LayerProto& proto, int npartitions) {
   Layer::Setup(proto, npartitions);
   CHECK_EQ(srclayers_.size(), 1);
-  LOG(ERROR) << srclayers_[0]->name();
   int max_window = static_cast<RnnDataLayer*>(srclayers_[0])->max_window();
-  LOG(ERROR) << "clee " << max_window;
   data_.Reshape(vector<int>{max_window});
 }
 
@@ -75,7 +99,7 @@ void WordLayer::ComputeFeature(int flag, Metric *perf) {
   auto records = static_cast<RnnDataLayer*>(srclayers_[0])->records();
   float *word = data_.mutable_cpu_data();
   window_ = static_cast<RNNLayer*>(srclayers_[0])->window();
-  for(int i = 0; i < window_; i++) {
+  for (int i = 0; i < window_; i++) {
     word[i] = records[i].word_index();
   }
 }
@@ -226,14 +250,14 @@ void OutputLayer::ComputeFeature(int flag, Metric* perf) {
   auto class_weight = RTensor2(class_weight_->mutable_data());
   const float * label = srclayers_[1]->data(this).cpu_data();
 
-  float loss = 0.f, ppl =0.f;
+  float loss = 0.f, ppl = 0.f;
   for (int t = 0; t < window_; t++) {
     int start = static_cast<int>(label[t * 4 + 0]);
     int end = static_cast<int>(label[t * 4 + 1]);
 
     auto wordWeight = word_weight.Slice(start, end);
     CHECK_GT(end, start);
-    pword_[t].Reshape(vector<int>{end-start});
+    pword_[t].Reshape(std::vector<int>{end-start});
     auto pword = RTensor1(&pword_[t]);
     pword = dot(src[t], wordWeight.T());
     Softmax(pword, pword);
@@ -279,12 +303,14 @@ void OutputLayer::ComputeGradient(int flag, Metric* perf) 
{
     pword[wid - start] -= 1.0;
 
     // gL/gword_weight
-    gword_weight.Slice(start, end) += dot(pword.FlatTo2D().T(), 
src[t].FlatTo2D());
+    gword_weight.Slice(start, end) += dot(pword.FlatTo2D().T(),
+                                          src[t].FlatTo2D());
     // gL/gclass_weight
-    gclass_weight += dot(pclass[t].FlatTo2D().T(), src[t].FlatTo2D());
+    gclass_weight += dot(pclass[t].FlatTo2D().T(),
+                         src[t].FlatTo2D());
 
     gsrc[t] = dot(pword, word_weight.Slice(start, end));
     gsrc[t] += dot(pclass[t], class_weight);
   }
 }
-}
+}   // end of namespace singa

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/c1c6a2ed/examples/rnnlm/rnnlm.h
----------------------------------------------------------------------
diff --git a/examples/rnnlm/rnnlm.h b/examples/rnnlm/rnnlm.h
index 888ebe7..d9032e6 100644
--- a/examples/rnnlm/rnnlm.h
+++ b/examples/rnnlm/rnnlm.h
@@ -1,5 +1,28 @@
-#include "singa.h"
-#include "rnnlm.pb.h"
+/************************************************************
+*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*   http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied.  See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*
+*************************************************************/
+#ifndef EXAMPLES_RNNLM_RNNLM_H_
+#define EXAMPLES_RNNLM_RNNLM_H_
+#include <vector>
+#include "./singa.h"
+#include "./rnnlm.pb.h"
 namespace singa {
 
 /**
@@ -131,8 +154,9 @@ class OutputLayer : public RNNLayer {
   }
 
  private:
-  vector<Blob<float>> pword_;
+  std::vector<Blob<float>> pword_;
   Blob<float> pclass_;
   Param* word_weight_, *class_weight_;
 };
-}
+}  // namespace singa
+#endif  // EXAMPLES_RNNLM_RNNLM_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/c1c6a2ed/examples/rnnlm/rnnlm.proto
----------------------------------------------------------------------
diff --git a/examples/rnnlm/rnnlm.proto b/examples/rnnlm/rnnlm.proto
index 01580c1..32d2556 100644
--- a/examples/rnnlm/rnnlm.proto
+++ b/examples/rnnlm/rnnlm.proto
@@ -1,3 +1,24 @@
+/************************************************************
+*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*   http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied.  See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*
+*************************************************************/
+
 package singa;
 import "job.proto";
 

Reply via email to