Repository: systemml
Updated Branches:
  refs/heads/master 614adecaf -> cffefca30


http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/java/org/apache/sysml/test/integration/functions/paramserv/ParamservSparkNNTest.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/sysml/test/integration/functions/paramserv/ParamservSparkNNTest.java
 
b/src/test/java/org/apache/sysml/test/integration/functions/paramserv/ParamservSparkNNTest.java
new file mode 100644
index 0000000..2441116
--- /dev/null
+++ 
b/src/test/java/org/apache/sysml/test/integration/functions/paramserv/ParamservSparkNNTest.java
@@ -0,0 +1,47 @@
+package org.apache.sysml.test.integration.functions.paramserv;
+
+import org.apache.sysml.api.DMLException;
+import org.apache.sysml.api.DMLScript;
+import org.apache.sysml.test.integration.AutomatedTestBase;
+import org.apache.sysml.test.integration.TestConfiguration;
+import org.junit.Test;
+
+public class ParamservSparkNNTest extends AutomatedTestBase {
+
+       private static final String TEST_NAME1 = 
"paramserv-spark-nn-bsp-batch-dc";
+
+       private static final String TEST_DIR = "functions/paramserv/";
+       private static final String TEST_CLASS_DIR = TEST_DIR + 
ParamservSparkNNTest.class.getSimpleName() + "/";
+
+       @Override
+       public void setUp() {
+               addTestConfiguration(TEST_NAME1, new 
TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[] {}));
+       }
+
+       @Test
+       public void testParamservBSPBatchDisjointContiguous() {
+               runDMLTest(TEST_NAME1);
+       }
+
+       private void runDMLTest(String testname) {
+               DMLScript.RUNTIME_PLATFORM oldRtplatform = 
AutomatedTestBase.rtplatform;
+               boolean oldUseLocalSparkConfig = 
DMLScript.USE_LOCAL_SPARK_CONFIG;
+               AutomatedTestBase.rtplatform = DMLScript.RUNTIME_PLATFORM.SPARK;
+               DMLScript.USE_LOCAL_SPARK_CONFIG = true;
+
+               try {
+                       TestConfiguration config = 
getTestConfiguration(testname);
+                       loadTestConfiguration(config);
+                       programArgs = new String[] { "-explain" };
+                       String HOME = SCRIPT_DIR + TEST_DIR;
+                       fullDMLScriptName = HOME + testname + ".dml";
+                       // The test is not already finished, so it is normal to 
have the NPE
+                       runTest(true, true, DMLException.class, null, -1);
+               } finally {
+                       AutomatedTestBase.rtplatform = oldRtplatform;
+                       DMLScript.USE_LOCAL_SPARK_CONFIG = 
oldUseLocalSparkConfig;
+               }
+
+       }
+
+}

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/java/org/apache/sysml/test/integration/functions/paramserv/SerializationTest.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/sysml/test/integration/functions/paramserv/SerializationTest.java
 
b/src/test/java/org/apache/sysml/test/integration/functions/paramserv/SerializationTest.java
new file mode 100644
index 0000000..2a08ca6
--- /dev/null
+++ 
b/src/test/java/org/apache/sysml/test/integration/functions/paramserv/SerializationTest.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.sysml.test.integration.functions.paramserv;
+
+import java.util.Arrays;
+
+import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
+import org.apache.sysml.runtime.controlprogram.paramserv.ParamservUtils;
+import org.apache.sysml.runtime.instructions.cp.IntObject;
+import org.apache.sysml.runtime.instructions.cp.ListObject;
+import org.apache.sysml.runtime.util.DataConverter;
+import org.apache.sysml.runtime.util.ProgramConverter;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class SerializationTest {
+
+       @Test
+       public void serializeUnnamedListObject() {
+               MatrixObject mo1 = generateDummyMatrix(10);
+               MatrixObject mo2 = generateDummyMatrix(20);
+               IntObject io = new IntObject(30);
+               ListObject lo = new ListObject(Arrays.asList(mo1, mo2, io));
+               String serial = ProgramConverter.serializeDataObject("key", lo);
+               Object[] obj = ProgramConverter.parseDataObject(serial);
+               ListObject actualLO = (ListObject) obj[1];
+               MatrixObject actualMO1 = (MatrixObject) actualLO.slice(0);
+               MatrixObject actualMO2 = (MatrixObject) actualLO.slice(1);
+               IntObject actualIO = (IntObject) actualLO.slice(2);
+               
Assert.assertArrayEquals(mo1.acquireRead().getDenseBlockValues(), 
actualMO1.acquireRead().getDenseBlockValues(), 0);
+               
Assert.assertArrayEquals(mo2.acquireRead().getDenseBlockValues(), 
actualMO2.acquireRead().getDenseBlockValues(), 0);
+               Assert.assertEquals(io.getLongValue(), actualIO.getLongValue());
+       }
+
+       @Test
+       public void serializeNamedListObject() {
+               MatrixObject mo1 = generateDummyMatrix(10);
+               MatrixObject mo2 = generateDummyMatrix(20);
+               IntObject io = new IntObject(30);
+               ListObject lo = new ListObject(Arrays.asList(mo1, mo2, io), 
Arrays.asList("e1", "e2", "e3"));
+
+               String serial = ProgramConverter.serializeDataObject("key", lo);
+               Object[] obj = ProgramConverter.parseDataObject(serial);
+               ListObject actualLO = (ListObject) obj[1];
+               MatrixObject actualMO1 = (MatrixObject) actualLO.slice(0);
+               MatrixObject actualMO2 = (MatrixObject) actualLO.slice(1);
+               IntObject actualIO = (IntObject) actualLO.slice(2);
+               Assert.assertEquals(lo.getNames(), actualLO.getNames());
+               
Assert.assertArrayEquals(mo1.acquireRead().getDenseBlockValues(), 
actualMO1.acquireRead().getDenseBlockValues(), 0);
+               
Assert.assertArrayEquals(mo2.acquireRead().getDenseBlockValues(), 
actualMO2.acquireRead().getDenseBlockValues(), 0);
+               Assert.assertEquals(io.getLongValue(), actualIO.getLongValue());
+       }
+
+       private MatrixObject generateDummyMatrix(int size) {
+               double[] dl = new double[size];
+               for (int i = 0; i < size; i++) {
+                       dl[i] = i;
+               }
+               MatrixObject result = 
ParamservUtils.newMatrixObject(DataConverter.convertToMatrixBlock(dl, true));
+               result.exportData();
+               return result;
+       }
+}

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/java/org/apache/sysml/test/integration/functions/parfor/ParForAdversarialLiteralsTest.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/sysml/test/integration/functions/parfor/ParForAdversarialLiteralsTest.java
 
b/src/test/java/org/apache/sysml/test/integration/functions/parfor/ParForAdversarialLiteralsTest.java
index a4eab06..40f5a1b 100644
--- 
a/src/test/java/org/apache/sysml/test/integration/functions/parfor/ParForAdversarialLiteralsTest.java
+++ 
b/src/test/java/org/apache/sysml/test/integration/functions/parfor/ParForAdversarialLiteralsTest.java
@@ -23,7 +23,7 @@ import java.util.HashMap;
 
 import org.junit.Test;
 
-import org.apache.sysml.runtime.controlprogram.parfor.ProgramConverter;
+import org.apache.sysml.lops.Lop;
 import org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex;
 import org.apache.sysml.test.integration.AutomatedTestBase;
 import org.apache.sysml.test.integration.TestConfiguration;
@@ -123,7 +123,7 @@ public class ParForAdversarialLiteralsTest extends 
AutomatedTestBase
                // This is for running the junit test the new way, i.e., 
construct the arguments directly 
                String HOME = SCRIPT_DIR + TEST_DIR;
                String IN = "A";
-               String OUT = 
(testName.equals(TEST_NAME1a)||testName.equals(TEST_NAME1b))?ProgramConverter.CP_ROOT_THREAD_ID:"B";
+               String OUT = 
(testName.equals(TEST_NAME1a)||testName.equals(TEST_NAME1b))?Lop.CP_ROOT_THREAD_ID:"B";
 
                fullDMLScriptName = HOME + TEST_NAME + ".dml";
                programArgs = new String[]{"-args", input(IN),
@@ -132,7 +132,7 @@ public class ParForAdversarialLiteralsTest extends 
AutomatedTestBase
                fullRScriptName = HOME + TEST_NAME + ".R";
                rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " 
" + expectedDir();
                
-        double[][] A = getRandomMatrix(rows, cols, 0, 1, sparsity, 7);
+               double[][] A = getRandomMatrix(rows, cols, 0, 1, sparsity, 7);
                writeInputMatrix("A", A, false);
 
                boolean exceptionExpected = false;
@@ -141,8 +141,7 @@ public class ParForAdversarialLiteralsTest extends 
AutomatedTestBase
                //compare matrices
                HashMap<CellIndex, Double> dmlin = 
TestUtils.readDMLMatrixFromHDFS(input(IN));
                HashMap<CellIndex, Double> dmlout = readDMLMatrixFromHDFS(OUT); 
-                               
-               TestUtils.compareMatrices(dmlin, dmlout, eps, "DMLin", 
"DMLout");                       
+               
+               TestUtils.compareMatrices(dmlin, dmlout, eps, "DMLin", 
"DMLout");
        }
-       
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/scripts/functions/paramserv/mnist_lenet_paramserv.dml
----------------------------------------------------------------------
diff --git a/src/test/scripts/functions/paramserv/mnist_lenet_paramserv.dml 
b/src/test/scripts/functions/paramserv/mnist_lenet_paramserv.dml
index acafc88..84095ec 100644
--- a/src/test/scripts/functions/paramserv/mnist_lenet_paramserv.dml
+++ b/src/test/scripts/functions/paramserv/mnist_lenet_paramserv.dml
@@ -36,7 +36,7 @@ source("nn/optim/sgd_nesterov.dml") as sgd_nesterov
 train = function(matrix[double] X, matrix[double] Y,
                  matrix[double] X_val, matrix[double] Y_val,
                  int C, int Hin, int Win, int epochs, int workers,
-                 string utype, string freq, int batchsize, string scheme)
+                 string utype, string freq, int batchsize, string scheme, 
string mode)
     return (matrix[double] W1, matrix[double] b1,
             matrix[double] W2, matrix[double] b2,
             matrix[double] W3, matrix[double] b3,
@@ -108,7 +108,7 @@ train = function(matrix[double] X, matrix[double] Y,
   params = list(lr=lr, mu=mu, decay=decay, C=C, Hin=Hin, Win=Win, Hf=Hf, 
Wf=Wf, stride=stride, pad=pad, lambda=lambda, F1=F1, F2=F2, N3=N3)
 
   # Use paramserv function
-  modelList2 = paramserv(model=modelList, features=X, labels=Y, 
val_features=X_val, val_labels=Y_val, 
upd="./src/test/scripts/functions/paramserv/mnist_lenet_paramserv.dml::gradients",
 
agg="./src/test/scripts/functions/paramserv/mnist_lenet_paramserv.dml::aggregation",
 mode="LOCAL", utype=utype, freq=freq, epochs=epochs, batchsize=batchsize, 
k=workers, scheme=scheme, hyperparams=params, checkpointing="NONE")
+  modelList2 = paramserv(model=modelList, features=X, labels=Y, 
val_features=X_val, val_labels=Y_val, 
upd="./src/test/scripts/functions/paramserv/mnist_lenet_paramserv.dml::gradients",
 
agg="./src/test/scripts/functions/paramserv/mnist_lenet_paramserv.dml::aggregation",
 mode=mode, utype=utype, freq=freq, epochs=epochs, batchsize=batchsize, 
k=workers, scheme=scheme, hyperparams=params, checkpointing="NONE")
 
   W1 = as.matrix(modelList2["W1"])
   b1 = as.matrix(modelList2["b1"])

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/scripts/functions/paramserv/paramserv-nn-asp-batch.dml
----------------------------------------------------------------------
diff --git a/src/test/scripts/functions/paramserv/paramserv-nn-asp-batch.dml 
b/src/test/scripts/functions/paramserv/paramserv-nn-asp-batch.dml
index 2279d58..ba22942 100644
--- a/src/test/scripts/functions/paramserv/paramserv-nn-asp-batch.dml
+++ b/src/test/scripts/functions/paramserv/paramserv-nn-asp-batch.dml
@@ -42,7 +42,7 @@ workers = 2
 batchsize = 32
 
 # Train
-[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "ASP", "BATCH", batchsize,"DISJOINT_CONTIGUOUS")
+[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "ASP", "BATCH", batchsize,"DISJOINT_CONTIGUOUS", 
"LOCAL")
 
 # Compute validation loss & accuracy
 probs_val = mnist_lenet::predict(X_val, C, Hin, Win, batchsize, W1, b1, W2, 
b2, W3, b3, W4, b4)

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/scripts/functions/paramserv/paramserv-nn-asp-epoch.dml
----------------------------------------------------------------------
diff --git a/src/test/scripts/functions/paramserv/paramserv-nn-asp-epoch.dml 
b/src/test/scripts/functions/paramserv/paramserv-nn-asp-epoch.dml
index 1824083..c8c6a2f 100644
--- a/src/test/scripts/functions/paramserv/paramserv-nn-asp-epoch.dml
+++ b/src/test/scripts/functions/paramserv/paramserv-nn-asp-epoch.dml
@@ -42,7 +42,7 @@ workers = 2
 batchsize = 32
 
 # Train
-[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "ASP", "EPOCH", batchsize, "DISJOINT_CONTIGUOUS")
+[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "ASP", "EPOCH", batchsize, "DISJOINT_CONTIGUOUS", 
"LOCAL")
 
 # Compute validation loss & accuracy
 probs_val = mnist_lenet::predict(X_val, C, Hin, Win, batchsize, W1, b1, W2, 
b2, W3, b3, W4, b4)

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-dc.dml
----------------------------------------------------------------------
diff --git a/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-dc.dml 
b/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-dc.dml
index 2e09de4..78fc1c4 100644
--- a/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-dc.dml
+++ b/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-dc.dml
@@ -42,7 +42,7 @@ workers = 2
 batchsize = 32
 
 # Train
-[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "BATCH", batchsize, "DISJOINT_CONTIGUOUS")
+[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "BATCH", batchsize, "DISJOINT_CONTIGUOUS", 
"LOCAL")
 
 # Compute validation loss & accuracy
 probs_val = mnist_lenet::predict(X_val, C, Hin, Win, batchsize, W1, b1, W2, 
b2, W3, b3, W4, b4)

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-dr.dml
----------------------------------------------------------------------
diff --git a/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-dr.dml 
b/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-dr.dml
index 8444952..9191b5a 100644
--- a/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-dr.dml
+++ b/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-dr.dml
@@ -42,7 +42,7 @@ workers = 2
 batchsize = 32
 
 # Train
-[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "BATCH", batchsize, "DISJOINT_RANDOM")
+[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "BATCH", batchsize, "DISJOINT_RANDOM", 
"LOCAL")
 
 # Compute validation loss & accuracy
 probs_val = mnist_lenet::predict(X_val, C, Hin, Win, batchsize, W1, b1, W2, 
b2, W3, b3, W4, b4)

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-drr.dml
----------------------------------------------------------------------
diff --git 
a/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-drr.dml 
b/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-drr.dml
index ccb7ffc..ec18cb4 100644
--- a/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-drr.dml
+++ b/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-drr.dml
@@ -42,7 +42,7 @@ workers = 4
 batchsize = 32
 
 # Train
-[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "BATCH", batchsize, "DISJOINT_ROUND_ROBIN")
+[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "BATCH", batchsize, "DISJOINT_ROUND_ROBIN", 
"LOCAL")
 
 # Compute validation loss & accuracy
 probs_val = mnist_lenet::predict(X_val, C, Hin, Win, batchsize, W1, b1, W2, 
b2, W3, b3, W4, b4)

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-or.dml
----------------------------------------------------------------------
diff --git a/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-or.dml 
b/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-or.dml
index 4afc56b..928dde2 100644
--- a/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-or.dml
+++ b/src/test/scripts/functions/paramserv/paramserv-nn-bsp-batch-or.dml
@@ -42,7 +42,7 @@ workers = 2
 batchsize = 32
 
 # Train
-[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "BATCH", batchsize, "OVERLAP_RESHUFFLE")
+[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "BATCH", batchsize, "OVERLAP_RESHUFFLE", 
"LOCAL")
 
 # Compute validation loss & accuracy
 probs_val = mnist_lenet::predict(X_val, C, Hin, Win, batchsize, W1, b1, W2, 
b2, W3, b3, W4, b4)

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/scripts/functions/paramserv/paramserv-nn-bsp-epoch.dml
----------------------------------------------------------------------
diff --git a/src/test/scripts/functions/paramserv/paramserv-nn-bsp-epoch.dml 
b/src/test/scripts/functions/paramserv/paramserv-nn-bsp-epoch.dml
index c542286..8605984 100644
--- a/src/test/scripts/functions/paramserv/paramserv-nn-bsp-epoch.dml
+++ b/src/test/scripts/functions/paramserv/paramserv-nn-bsp-epoch.dml
@@ -42,7 +42,7 @@ workers = 2
 batchsize = 32
 
 # Train
-[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "EPOCH", batchsize,"DISJOINT_CONTIGUOUS")
+[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "EPOCH", batchsize,"DISJOINT_CONTIGUOUS", 
"LOCAL")
 
 # Compute validation loss & accuracy
 probs_val = mnist_lenet::predict(X_val, C, Hin, Win, batchsize, W1, b1, W2, 
b2, W3, b3, W4, b4)

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test/scripts/functions/paramserv/paramserv-spark-nn-bsp-batch-dc.dml
----------------------------------------------------------------------
diff --git 
a/src/test/scripts/functions/paramserv/paramserv-spark-nn-bsp-batch-dc.dml 
b/src/test/scripts/functions/paramserv/paramserv-spark-nn-bsp-batch-dc.dml
new file mode 100644
index 0000000..31d44aa
--- /dev/null
+++ b/src/test/scripts/functions/paramserv/paramserv-spark-nn-bsp-batch-dc.dml
@@ -0,0 +1,53 @@
+#-------------------------------------------------------------
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#-------------------------------------------------------------
+
+source("src/test/scripts/functions/paramserv/mnist_lenet_paramserv.dml") as 
mnist_lenet
+source("nn/layers/cross_entropy_loss.dml") as cross_entropy_loss
+
+# Generate the training data
+[images, labels, C, Hin, Win] = mnist_lenet::generate_dummy_data()
+n = nrow(images)
+
+# Generate the training data
+[X, Y, C, Hin, Win] = mnist_lenet::generate_dummy_data()
+
+# Split into training and validation
+val_size = n * 0.1
+X = images[(val_size+1):n,]
+X_val = images[1:val_size,]
+Y = labels[(val_size+1):n,]
+Y_val = labels[1:val_size,]
+
+# Arguments
+epochs = 10
+workers = 2
+batchsize = 16
+
+# Train
+[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, Y, X_val, Y_val, C, 
Hin, Win, epochs, workers, "BSP", "BATCH", batchsize, "DISJOINT_CONTIGUOUS", 
"REMOTE_SPARK")
+
+# Compute validation loss & accuracy
+probs_val = mnist_lenet::predict(X_val, C, Hin, Win, batchsize, W1, b1, W2, 
b2, W3, b3, W4, b4)
+loss_val = cross_entropy_loss::forward(probs_val, Y_val)
+accuracy_val = mean(rowIndexMax(probs_val) == rowIndexMax(Y_val))
+
+# Output results
+print("Val Loss: " + loss_val + ", Val Accuracy: " + accuracy_val)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/systemml/blob/cffefca3/src/test_suites/java/org/apache/sysml/test/integration/functions/paramserv/ZPackageSuite.java
----------------------------------------------------------------------
diff --git 
a/src/test_suites/java/org/apache/sysml/test/integration/functions/paramserv/ZPackageSuite.java
 
b/src/test_suites/java/org/apache/sysml/test/integration/functions/paramserv/ZPackageSuite.java
index 26ea638..d1b3a6d 100644
--- 
a/src/test_suites/java/org/apache/sysml/test/integration/functions/paramserv/ZPackageSuite.java
+++ 
b/src/test_suites/java/org/apache/sysml/test/integration/functions/paramserv/ZPackageSuite.java
@@ -29,9 +29,10 @@ import org.junit.runners.Suite;
        LocalDataPartitionerTest.class,
        SparkDataPartitionerTest.class,
        ParamservSyntaxTest.class,
+       SerializationTest.class,
        ParamservRecompilationTest.class,
        ParamservRuntimeNegativeTest.class,
-       ParamservNNTest.class
+       ParamservLocalNNTest.class
 })
 
 

Reply via email to