This is an automated email from the ASF dual-hosted git repository.

niketanpansare pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/systemml.git


The following commit(s) were added to refs/heads/master by this push:
     new 392f3d2  [MINOR][DOC] Updated Deep Learning documentation
392f3d2 is described below

commit 392f3d2c8a9d7fd9f1c05454636536d5b4d9e155
Author: Niketan Pansare <npan...@us.ibm.com>
AuthorDate: Fri Mar 22 19:47:00 2019 -0700

    [MINOR][DOC] Updated Deep Learning documentation
    
    - Also, fixed javadoc errors.
---
 docs/deep-learning.md                                                | 1 +
 src/main/java/org/apache/sysml/api/ScriptExecutorUtils.java          | 1 +
 .../sysml/runtime/instructions/gpu/context/GPUMemoryManager.java     | 5 +++--
 src/main/python/systemml/mllearn/keras2caffe.py                      | 2 +-
 4 files changed, 6 insertions(+), 3 deletions(-)

diff --git a/docs/deep-learning.md b/docs/deep-learning.md
index 2dbb4bb..968c959 100644
--- a/docs/deep-learning.md
+++ b/docs/deep-learning.md
@@ -207,6 +207,7 @@ keras_model.add(Flatten())
 keras_model.add(Dense(512, activation='relu'))
 keras_model.add(Dropout(0.5))
 keras_model.add(Dense(10, activation='softmax'))
+keras_model.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.01, 
decay=1e-6, momentum=0.9, nesterov=True))
 keras_model.summary()
 
 # Scale the input features
diff --git a/src/main/java/org/apache/sysml/api/ScriptExecutorUtils.java 
b/src/main/java/org/apache/sysml/api/ScriptExecutorUtils.java
index c9d1a5d..5e59204 100644
--- a/src/main/java/org/apache/sysml/api/ScriptExecutorUtils.java
+++ b/src/main/java/org/apache/sysml/api/ScriptExecutorUtils.java
@@ -104,6 +104,7 @@ public class ScriptExecutorUtils {
         * @param api API used to execute the runtime program
         * @param performHOPRewrites should perform hop rewrites
         * @param maintainSymbolTable whether or not all values should be 
maintained in the symbol table after execution.
+        * @param init whether to initialize hadoop execution
         * @return compiled runtime program
         */
        public static Program compileRuntimeProgram(String script, 
Map<String,String> nsscripts, Map<String, String> args, String[] allArgs,
diff --git 
a/src/main/java/org/apache/sysml/runtime/instructions/gpu/context/GPUMemoryManager.java
 
b/src/main/java/org/apache/sysml/runtime/instructions/gpu/context/GPUMemoryManager.java
index ce22a7e..cf579ec 100644
--- 
a/src/main/java/org/apache/sysml/runtime/instructions/gpu/context/GPUMemoryManager.java
+++ 
b/src/main/java/org/apache/sysml/runtime/instructions/gpu/context/GPUMemoryManager.java
@@ -517,14 +517,15 @@ public class GPUMemoryManager {
        }
        
        /**
-        * Clears up the memory used by non-dirty pointers.
+        * Clears up the memory used by non-dirty pointers except output and 
locked matrix objects.
+        * 
+        * @param outputMatrixObjects list of output matrix objects
         */
        public void clearTemporaryMemory(HashSet<MatrixObject> 
outputMatrixObjects) {
                Set<Pointer> donotClearPointers =  new HashSet<>();
                // First clean up all GPU objects except:
                // 1. Output matrix objects
                // 2. GPU objects that are currently being used (i.e. locked)
-               // 3. Matrix object are 
                Set<GPUObject> allGPUObjects = new 
HashSet<>(matrixMemoryManager.getGpuObjects());
                for (GPUObject gpuObj : allGPUObjects) {
                        boolean isOutput = 
outputMatrixObjects.contains(gpuObj.mat);
diff --git a/src/main/python/systemml/mllearn/keras2caffe.py 
b/src/main/python/systemml/mllearn/keras2caffe.py
index 39a9755..19cde10 100755
--- a/src/main/python/systemml/mllearn/keras2caffe.py
+++ b/src/main/python/systemml/mllearn/keras2caffe.py
@@ -296,7 +296,7 @@ def getDropoutParam(layer):
         if not supported:
             raise Exception('noise_shape=' + str(layer.noise_shape) + ' is not 
supported for Dropout layer with input_shape='
                             + str(layer.input_shape))
-    return {'dropout_ratio': l.rate}
+    return {'dropout_ratio': layer.rate}
 
 layerParamMapping = {
     keras.layers.InputLayer: lambda l:

Reply via email to