Repository: systemml
Updated Branches:
  refs/heads/master c89d3be80 -> 1049f5e56


[MINOR] Remove unused argument from fm.init dml function

Closes #804.


Project: http://git-wip-us.apache.org/repos/asf/systemml/repo
Commit: http://git-wip-us.apache.org/repos/asf/systemml/commit/1049f5e5
Tree: http://git-wip-us.apache.org/repos/asf/systemml/tree/1049f5e5
Diff: http://git-wip-us.apache.org/repos/asf/systemml/diff/1049f5e5

Branch: refs/heads/master
Commit: 1049f5e56070b648d2f1fa54f1af658206f0114e
Parents: c89d3be
Author: Janardhan Pulivarthi <[email protected]>
Authored: Wed Jul 18 18:47:49 2018 -0700
Committer: Matthias Boehm <[email protected]>
Committed: Wed Jul 18 18:47:49 2018 -0700

----------------------------------------------------------------------
 scripts/nn/layers/fm.dml          | 2 +-
 scripts/nn/test/grad_check.dml    | 2 +-
 scripts/staging/fm-binclass.dml   | 2 +-
 scripts/staging/fm-regression.dml | 2 +-
 4 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/systemml/blob/1049f5e5/scripts/nn/layers/fm.dml
----------------------------------------------------------------------
diff --git a/scripts/nn/layers/fm.dml b/scripts/nn/layers/fm.dml
index 228ec41..ccbc31d 100644
--- a/scripts/nn/layers/fm.dml
+++ b/scripts/nn/layers/fm.dml
@@ -109,7 +109,7 @@ backward = function(matrix[double] dout, matrix[double] X, 
matrix[double] w0, ma
   # dV = mean(dout) * (t(X) %*% X %*%V) - g_V2
 }
 
-init = function(int n, int d, int k)
+init = function(int d, int k)
     return (matrix[double] w0, matrix[double] W, matrix[double] V) {
   /*
    * This function initializes the parameters.

http://git-wip-us.apache.org/repos/asf/systemml/blob/1049f5e5/scripts/nn/test/grad_check.dml
----------------------------------------------------------------------
diff --git a/scripts/nn/test/grad_check.dml b/scripts/nn/test/grad_check.dml
index be34408..a5da859 100644
--- a/scripts/nn/test/grad_check.dml
+++ b/scripts/nn/test/grad_check.dml
@@ -1142,7 +1142,7 @@ fm = function() {
   k = 2 # factorization dimensionality
   X = rand(rows=n, cols=d)
   y = rand(rows=n, cols=1)
-  [w0, W, V] = fm::init(n, d, k)
+  [w0, W, V] = fm::init(d, k)
 
   # Compute analytical gradients of loss wrt parameters
   out = fm::forward(X, w0, W, V)

http://git-wip-us.apache.org/repos/asf/systemml/blob/1049f5e5/scripts/staging/fm-binclass.dml
----------------------------------------------------------------------
diff --git a/scripts/staging/fm-binclass.dml b/scripts/staging/fm-binclass.dml
index f777544..4d6a57f 100644
--- a/scripts/staging/fm-binclass.dml
+++ b/scripts/staging/fm-binclass.dml
@@ -56,7 +56,7 @@ train = function(matrix[double] X, matrix[double] y, 
matrix[double] X_val, matri
     k = 2; # factorization dimensionality, only(=2) possible for now.
 
     # 1.initialize fm core
-    [w0, W, V] = fm::init(n, d, k);
+    [w0, W, V] = fm::init(d, k);
 
     # 2.initialize adam optimizer
     ## Default values for some parameters

http://git-wip-us.apache.org/repos/asf/systemml/blob/1049f5e5/scripts/staging/fm-regression.dml
----------------------------------------------------------------------
diff --git a/scripts/staging/fm-regression.dml 
b/scripts/staging/fm-regression.dml
index 92333a3..875a6da 100644
--- a/scripts/staging/fm-regression.dml
+++ b/scripts/staging/fm-regression.dml
@@ -55,7 +55,7 @@ train = function(matrix[double] X, matrix[double] y, 
matrix[double] X_val, matri
                 #   only (=2) possible
 
     # 1.initialize fm core
-    [w0, W, V] = fm::init(n, d, k);
+    [w0, W, V] = fm::init(d, k);
 
     # 2.initialize adam optimizer
     ## Default values for some parameters

Reply via email to