IGNITE-5217: Gradient descent for OLS lin reg

this closes #3308


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/b2060855
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/b2060855
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/b2060855

Branch: refs/heads/master
Commit: b20608557d96b120f2b944a4aad6e07ecafb407e
Parents: a394687
Author: dmitrievanthony <[email protected]>
Authored: Thu Dec 28 19:08:49 2017 +0300
Committer: Yury Babak <[email protected]>
Committed: Thu Dec 28 19:08:49 2017 +0300

----------------------------------------------------------------------
 .../DistributedRegressionExample.java           | 149 ---
 .../DistributedRegressionModelExample.java      | 134 ---
 ...tedLinearRegressionExampleWithQRTrainer.java | 136 +++
 ...edLinearRegressionExampleWithSGDTrainer.java | 137 +++
 .../ml/regression/linear/package-info.java      |  22 +
 .../matrix/SparseBlockDistributedMatrix.java    |   5 +-
 .../ml/optimization/BarzilaiBorweinUpdater.java |  51 ++
 .../ignite/ml/optimization/GradientDescent.java | 201 +++++
 .../ml/optimization/GradientFunction.java       |  31 +
 .../LeastSquaresGradientFunction.java           |  33 +
 .../ignite/ml/optimization/SimpleUpdater.java   |  45 +
 .../apache/ignite/ml/optimization/Updater.java  |  30 +
 .../ignite/ml/optimization/package-info.java    |  22 +
 .../util/SparseDistributedMatrixMapReducer.java |  84 ++
 .../ml/optimization/util/package-info.java      |  22 +
 .../AbstractMultipleLinearRegression.java       | 378 --------
 .../regressions/MultipleLinearRegression.java   |  71 --
 .../OLSMultipleLinearRegression.java            | 257 ------
 .../OLSMultipleLinearRegressionModel.java       |  77 --
 .../OLSMultipleLinearRegressionModelFormat.java |  46 -
 .../OLSMultipleLinearRegressionTrainer.java     |  62 --
 .../regressions/RegressionsErrorMessages.java   |  28 -
 .../linear/LinearRegressionModel.java           | 107 +++
 .../linear/LinearRegressionQRTrainer.java       |  72 ++
 .../linear/LinearRegressionSGDTrainer.java      |  67 ++
 .../ml/regressions/linear/package-info.java     |  22 +
 .../org/apache/ignite/ml/LocalModelsTest.java   |  40 +-
 .../ml/optimization/GradientDescentTest.java    |  64 ++
 .../SparseDistributedMatrixMapReducerTest.java  | 135 +++
 .../AbstractMultipleLinearRegressionTest.java   | 164 ----
 ...tedBlockOLSMultipleLinearRegressionTest.java | 901 ------------------
 ...tributedOLSMultipleLinearRegressionTest.java | 903 -------------------
 .../OLSMultipleLinearRegressionModelTest.java   |  53 --
 .../OLSMultipleLinearRegressionTest.java        | 820 -----------------
 .../ml/regressions/RegressionsTestSuite.java    |  20 +-
 .../linear/ArtificialRegressionDatasets.java    | 404 +++++++++
 ...istributedLinearRegressionQRTrainerTest.java |  36 +
 ...stributedLinearRegressionSGDTrainerTest.java |  35 +
 ...istributedLinearRegressionQRTrainerTest.java |  36 +
 ...stributedLinearRegressionSGDTrainerTest.java |  35 +
 .../GenericLinearRegressionTrainerTest.java     | 206 +++++
 ...wareAbstractLinearRegressionTrainerTest.java | 124 +++
 .../linear/LinearRegressionModelTest.java       |  66 ++
 .../LocalLinearRegressionQRTrainerTest.java     |  36 +
 .../LocalLinearRegressionSGDTrainerTest.java    |  35 +
 .../resources/datasets/regression/README.md     |  98 ++
 .../resources/datasets/regression/boston.csv    | 506 +++++++++++
 .../resources/datasets/regression/diabetes.csv  | 442 +++++++++
 .../jdbc/JdbcPutIndexedValue8Benchmark.java     |   3 -
 ...iteOLSMultipleLinearRegressionBenchmark.java |  67 +-
 parent/pom.xml                                  |   1 +
 51 files changed, 3388 insertions(+), 4131 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/examples/src/main/ml/org/apache/ignite/examples/ml/regression/DistributedRegressionExample.java
----------------------------------------------------------------------
diff --git 
a/examples/src/main/ml/org/apache/ignite/examples/ml/regression/DistributedRegressionExample.java
 
b/examples/src/main/ml/org/apache/ignite/examples/ml/regression/DistributedRegressionExample.java
deleted file mode 100644
index 3e65527..0000000
--- 
a/examples/src/main/ml/org/apache/ignite/examples/ml/regression/DistributedRegressionExample.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.regression;
-
-import java.util.Arrays;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.Ignition;
-import 
org.apache.ignite.examples.ml.math.matrix.SparseDistributedMatrixExample;
-import org.apache.ignite.ml.math.StorageConstants;
-import org.apache.ignite.ml.math.Tracer;
-import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
-import org.apache.ignite.ml.regressions.OLSMultipleLinearRegression;
-import org.apache.ignite.thread.IgniteThread;
-
-/**
- * Run linear regression over distributed matrix.
- *
- * TODO: IGNITE-6222, Currently works only in local mode.
- *
- * @see OLSMultipleLinearRegression
- */
-public class DistributedRegressionExample {
-    /** Run example. */
-    public static void main(String[] args) throws InterruptedException {
-        System.out.println();
-        System.out.println(">>> Linear regression over sparse distributed 
matrix API usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = 
Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-            // Create IgniteThread, we must work with SparseDistributedMatrix 
inside IgniteThread
-            // because we create ignite cache internally.
-            IgniteThread igniteThread = new 
IgniteThread(ignite.configuration().getIgniteInstanceName(), 
SparseDistributedMatrixExample.class.getSimpleName(), () -> {
-
-                double[] data = {
-                    8, 78, 284, 9.100000381, 109,
-                    9.300000191, 68, 433, 8.699999809, 144,
-                    7.5, 70, 739, 7.199999809, 113,
-                    8.899999619, 96, 1792, 8.899999619, 97,
-                    10.19999981, 74, 477, 8.300000191, 206,
-                    8.300000191, 111, 362, 10.89999962, 124,
-                    8.800000191, 77, 671, 10, 152,
-                    8.800000191, 168, 636, 9.100000381, 162,
-                    10.69999981, 82, 329, 8.699999809, 150,
-                    11.69999981, 89, 634, 7.599999905, 134,
-                    8.5, 149, 631, 10.80000019, 292,
-                    8.300000191, 60, 257, 9.5, 108,
-                    8.199999809, 96, 284, 8.800000191, 111,
-                    7.900000095, 83, 603, 9.5, 182,
-                    10.30000019, 130, 686, 8.699999809, 129,
-                    7.400000095, 145, 345, 11.19999981, 158,
-                    9.600000381, 112, 1357, 9.699999809, 186,
-                    9.300000191, 131, 544, 9.600000381, 177,
-                    10.60000038, 80, 205, 9.100000381, 127,
-                    9.699999809, 130, 1264, 9.199999809, 179,
-                    11.60000038, 140, 688, 8.300000191, 80,
-                    8.100000381, 154, 354, 8.399999619, 103,
-                    9.800000191, 118, 1632, 9.399999619, 101,
-                    7.400000095, 94, 348, 9.800000191, 117,
-                    9.399999619, 119, 370, 10.39999962, 88,
-                    11.19999981, 153, 648, 9.899999619, 78,
-                    9.100000381, 116, 366, 9.199999809, 102,
-                    10.5, 97, 540, 10.30000019, 95,
-                    11.89999962, 176, 680, 8.899999619, 80,
-                    8.399999619, 75, 345, 9.600000381, 92,
-                    5, 134, 525, 10.30000019, 126,
-                    9.800000191, 161, 870, 10.39999962, 108,
-                    9.800000191, 111, 669, 9.699999809, 77,
-                    10.80000019, 114, 452, 9.600000381, 60,
-                    10.10000038, 142, 430, 10.69999981, 71,
-                    10.89999962, 238, 822, 10.30000019, 86,
-                    9.199999809, 78, 190, 10.69999981, 93,
-                    8.300000191, 196, 867, 9.600000381, 106,
-                    7.300000191, 125, 969, 10.5, 162,
-                    9.399999619, 82, 499, 7.699999809, 95,
-                    9.399999619, 125, 925, 10.19999981, 91,
-                    9.800000191, 129, 353, 9.899999619, 52,
-                    3.599999905, 84, 288, 8.399999619, 110,
-                    8.399999619, 183, 718, 10.39999962, 69,
-                    10.80000019, 119, 540, 9.199999809, 57,
-                    10.10000038, 180, 668, 13, 106,
-                    9, 82, 347, 8.800000191, 40,
-                    10, 71, 345, 9.199999809, 50,
-                    11.30000019, 118, 463, 7.800000191, 35,
-                    11.30000019, 121, 728, 8.199999809, 86,
-                    12.80000019, 68, 383, 7.400000095, 57,
-                    10, 112, 316, 10.39999962, 57,
-                    6.699999809, 109, 388, 8.899999619, 94
-                };
-
-                final int nobs = 53;
-                final int nvars = 4;
-
-                System.out.println(">>> Create new SparseDistributedMatrix 
inside IgniteThread.");
-                // Create SparseDistributedMatrix, new cache will be created 
automagically.
-                SparseDistributedMatrix distributedMatrix = new 
SparseDistributedMatrix(0, 0,
-                    StorageConstants.ROW_STORAGE_MODE, 
StorageConstants.RANDOM_ACCESS_MODE);
-
-                System.out.println(">>> Create new linear regression object");
-                OLSMultipleLinearRegression regression = new 
OLSMultipleLinearRegression();
-                regression.newSampleData(data, nobs, nvars, distributedMatrix);
-                System.out.println();
-
-                System.out.println(">>> Estimates the regression parameters 
b:");
-                
System.out.println(Arrays.toString(regression.estimateRegressionParameters()));
-
-                System.out.println(">>> Estimates the residuals, ie u = y - 
X*b:");
-                
System.out.println(Arrays.toString(regression.estimateResiduals()));
-
-                System.out.println(">>> Standard errors of the regression 
parameters:");
-                
System.out.println(Arrays.toString(regression.estimateRegressionParametersStandardErrors()));
-
-                System.out.println(">>> Estimates the variance of the 
regression parameters, ie Var(b):");
-                
Tracer.showAscii(regression.estimateRegressionParametersVariance());
-
-                System.out.println(">>> Estimates the standard error of the 
regression:");
-                
System.out.println(regression.estimateRegressionStandardError());
-
-                System.out.println(">>> R-Squared statistic:");
-                System.out.println(regression.calculateRSquared());
-
-                System.out.println(">>> Adjusted R-squared statistic:");
-                System.out.println(regression.calculateAdjustedRSquared());
-
-                System.out.println(">>> Returns the variance of the 
regressand, ie Var(y):");
-                System.out.println(regression.estimateErrorVariance());
-            });
-
-            igniteThread.start();
-
-            igniteThread.join();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/examples/src/main/ml/org/apache/ignite/examples/ml/regression/DistributedRegressionModelExample.java
----------------------------------------------------------------------
diff --git 
a/examples/src/main/ml/org/apache/ignite/examples/ml/regression/DistributedRegressionModelExample.java
 
b/examples/src/main/ml/org/apache/ignite/examples/ml/regression/DistributedRegressionModelExample.java
deleted file mode 100644
index 38de97e..0000000
--- 
a/examples/src/main/ml/org/apache/ignite/examples/ml/regression/DistributedRegressionModelExample.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.regression;
-
-import org.apache.ignite.Ignite;
-import org.apache.ignite.Ignition;
-import 
org.apache.ignite.examples.ml.math.matrix.SparseDistributedMatrixExample;
-import org.apache.ignite.ml.math.StorageConstants;
-import org.apache.ignite.ml.math.Tracer;
-import org.apache.ignite.ml.math.Vector;
-import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
-import org.apache.ignite.ml.math.impls.vector.SparseDistributedVector;
-import org.apache.ignite.ml.regressions.OLSMultipleLinearRegressionModel;
-import org.apache.ignite.ml.regressions.OLSMultipleLinearRegressionTrainer;
-import org.apache.ignite.thread.IgniteThread;
-
-/**
- * Run linear regression model over distributed matrix.
- *
- * @see OLSMultipleLinearRegressionModel
- */
-public class DistributedRegressionModelExample {
-    /** Run example. */
-    public static void main(String[] args) throws InterruptedException {
-        System.out.println();
-        System.out.println(">>> Linear regression model over sparse 
distributed matrix API usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = 
Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-            // Create IgniteThread, we must work with SparseDistributedMatrix 
inside IgniteThread
-            // because we create ignite cache internally.
-            IgniteThread igniteThread = new 
IgniteThread(ignite.configuration().getIgniteInstanceName(),
-                SparseDistributedMatrixExample.class.getSimpleName(), () -> {
-                double[] data = {
-                    8, 78, 284, 9.100000381, 109,
-                    9.300000191, 68, 433, 8.699999809, 144,
-                    7.5, 70, 739, 7.199999809, 113,
-                    8.899999619, 96, 1792, 8.899999619, 97,
-                    10.19999981, 74, 477, 8.300000191, 206,
-                    8.300000191, 111, 362, 10.89999962, 124,
-                    8.800000191, 77, 671, 10, 152,
-                    8.800000191, 168, 636, 9.100000381, 162,
-                    10.69999981, 82, 329, 8.699999809, 150,
-                    11.69999981, 89, 634, 7.599999905, 134,
-                    8.5, 149, 631, 10.80000019, 292,
-                    8.300000191, 60, 257, 9.5, 108,
-                    8.199999809, 96, 284, 8.800000191, 111,
-                    7.900000095, 83, 603, 9.5, 182,
-                    10.30000019, 130, 686, 8.699999809, 129,
-                    7.400000095, 145, 345, 11.19999981, 158,
-                    9.600000381, 112, 1357, 9.699999809, 186,
-                    9.300000191, 131, 544, 9.600000381, 177,
-                    10.60000038, 80, 205, 9.100000381, 127,
-                    9.699999809, 130, 1264, 9.199999809, 179,
-                    11.60000038, 140, 688, 8.300000191, 80,
-                    8.100000381, 154, 354, 8.399999619, 103,
-                    9.800000191, 118, 1632, 9.399999619, 101,
-                    7.400000095, 94, 348, 9.800000191, 117,
-                    9.399999619, 119, 370, 10.39999962, 88,
-                    11.19999981, 153, 648, 9.899999619, 78,
-                    9.100000381, 116, 366, 9.199999809, 102,
-                    10.5, 97, 540, 10.30000019, 95,
-                    11.89999962, 176, 680, 8.899999619, 80,
-                    8.399999619, 75, 345, 9.600000381, 92,
-                    5, 134, 525, 10.30000019, 126,
-                    9.800000191, 161, 870, 10.39999962, 108,
-                    9.800000191, 111, 669, 9.699999809, 77,
-                    10.80000019, 114, 452, 9.600000381, 60,
-                    10.10000038, 142, 430, 10.69999981, 71,
-                    10.89999962, 238, 822, 10.30000019, 86,
-                    9.199999809, 78, 190, 10.69999981, 93,
-                    8.300000191, 196, 867, 9.600000381, 106,
-                    7.300000191, 125, 969, 10.5, 162,
-                    9.399999619, 82, 499, 7.699999809, 95,
-                    9.399999619, 125, 925, 10.19999981, 91,
-                    9.800000191, 129, 353, 9.899999619, 52,
-                    3.599999905, 84, 288, 8.399999619, 110,
-                    8.399999619, 183, 718, 10.39999962, 69,
-                    10.80000019, 119, 540, 9.199999809, 57,
-                    10.10000038, 180, 668, 13, 106,
-                    9, 82, 347, 8.800000191, 40,
-                    10, 71, 345, 9.199999809, 50,
-                    11.30000019, 118, 463, 7.800000191, 35,
-                    11.30000019, 121, 728, 8.199999809, 86,
-                    12.80000019, 68, 383, 7.400000095, 57,
-                    10, 112, 316, 10.39999962, 57,
-                    6.699999809, 109, 388, 8.899999619, 94
-                };
-
-                final int nobs = 53;
-                final int nvars = 4;
-
-                System.out.println(">>> Create new SparseDistributedMatrix 
inside IgniteThread.");
-                // Create SparseDistributedMatrix, new cache will be created 
automagically.
-                SparseDistributedMatrix distributedMatrix = new 
SparseDistributedMatrix(0, 0,
-                    StorageConstants.ROW_STORAGE_MODE, 
StorageConstants.RANDOM_ACCESS_MODE);
-
-                System.out.println(">>> Create new linear regression trainer 
object.");
-                OLSMultipleLinearRegressionTrainer trainer
-                    = new OLSMultipleLinearRegressionTrainer(0, nobs, nvars, 
distributedMatrix);
-                System.out.println(">>> Perform the training to get the 
model.");
-                OLSMultipleLinearRegressionModel mdl = trainer.train(data);
-                System.out.println();
-
-                Vector val = new SparseDistributedVector(nobs).assign((i) -> 
data[i * (nvars + 1)]);
-
-                System.out.println(">>> The input data:");
-                Tracer.showAscii(val);
-
-                System.out.println(">>> Trained model prediction results:");
-                Tracer.showAscii(mdl.apply(val));
-            });
-
-            igniteThread.start();
-
-            igniteThread.join();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/DistributedLinearRegressionExampleWithQRTrainer.java
----------------------------------------------------------------------
diff --git 
a/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/DistributedLinearRegressionExampleWithQRTrainer.java
 
b/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/DistributedLinearRegressionExampleWithQRTrainer.java
new file mode 100644
index 0000000..98ff2a2
--- /dev/null
+++ 
b/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/DistributedLinearRegressionExampleWithQRTrainer.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.regression.linear;
+
+import java.util.Arrays;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.Ignition;
+import 
org.apache.ignite.examples.ml.math.matrix.SparseDistributedMatrixExample;
+import org.apache.ignite.ml.Trainer;
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
+import org.apache.ignite.ml.math.impls.vector.SparseDistributedVector;
+import org.apache.ignite.ml.regressions.linear.LinearRegressionModel;
+import org.apache.ignite.ml.regressions.linear.LinearRegressionQRTrainer;
+import org.apache.ignite.thread.IgniteThread;
+
+/**
+ * Run linear regression model over distributed matrix.
+ *
+ * @see LinearRegressionQRTrainer
+ */
+public class DistributedLinearRegressionExampleWithQRTrainer {
+    /** */
+    private static final double[][] data = {
+        {8, 78, 284, 9.100000381, 109},
+        {9.300000191, 68, 433, 8.699999809, 144},
+        {7.5, 70, 739, 7.199999809, 113},
+        {8.899999619, 96, 1792, 8.899999619, 97},
+        {10.19999981, 74, 477, 8.300000191, 206},
+        {8.300000191, 111, 362, 10.89999962, 124},
+        {8.800000191, 77, 671, 10, 152},
+        {8.800000191, 168, 636, 9.100000381, 162},
+        {10.69999981, 82, 329, 8.699999809, 150},
+        {11.69999981, 89, 634, 7.599999905, 134},
+        {8.5, 149, 631, 10.80000019, 292},
+        {8.300000191, 60, 257, 9.5, 108},
+        {8.199999809, 96, 284, 8.800000191, 111},
+        {7.900000095, 83, 603, 9.5, 182},
+        {10.30000019, 130, 686, 8.699999809, 129},
+        {7.400000095, 145, 345, 11.19999981, 158},
+        {9.600000381, 112, 1357, 9.699999809, 186},
+        {9.300000191, 131, 544, 9.600000381, 177},
+        {10.60000038, 80, 205, 9.100000381, 127},
+        {9.699999809, 130, 1264, 9.199999809, 179},
+        {11.60000038, 140, 688, 8.300000191, 80},
+        {8.100000381, 154, 354, 8.399999619, 103},
+        {9.800000191, 118, 1632, 9.399999619, 101},
+        {7.400000095, 94, 348, 9.800000191, 117},
+        {9.399999619, 119, 370, 10.39999962, 88},
+        {11.19999981, 153, 648, 9.899999619, 78},
+        {9.100000381, 116, 366, 9.199999809, 102},
+        {10.5, 97, 540, 10.30000019, 95},
+        {11.89999962, 176, 680, 8.899999619, 80},
+        {8.399999619, 75, 345, 9.600000381, 92},
+        {5, 134, 525, 10.30000019, 126},
+        {9.800000191, 161, 870, 10.39999962, 108},
+        {9.800000191, 111, 669, 9.699999809, 77},
+        {10.80000019, 114, 452, 9.600000381, 60},
+        {10.10000038, 142, 430, 10.69999981, 71},
+        {10.89999962, 238, 822, 10.30000019, 86},
+        {9.199999809, 78, 190, 10.69999981, 93},
+        {8.300000191, 196, 867, 9.600000381, 106},
+        {7.300000191, 125, 969, 10.5, 162},
+        {9.399999619, 82, 499, 7.699999809, 95},
+        {9.399999619, 125, 925, 10.19999981, 91},
+        {9.800000191, 129, 353, 9.899999619, 52},
+        {3.599999905, 84, 288, 8.399999619, 110},
+        {8.399999619, 183, 718, 10.39999962, 69},
+        {10.80000019, 119, 540, 9.199999809, 57},
+        {10.10000038, 180, 668, 13, 106},
+        {9, 82, 347, 8.800000191, 40},
+        {10, 71, 345, 9.199999809, 50},
+        {11.30000019, 118, 463, 7.800000191, 35},
+        {11.30000019, 121, 728, 8.199999809, 86},
+        {12.80000019, 68, 383, 7.400000095, 57},
+        {10, 112, 316, 10.39999962, 57},
+        {6.699999809, 109, 388, 8.899999619, 94}
+    };
+
+    /** Run example. */
+    public static void main(String[] args) throws InterruptedException {
+        System.out.println();
+        System.out.println(">>> Linear regression model over sparse 
distributed matrix API usage example started.");
+        // Start ignite grid.
+        try (Ignite ignite = 
Ignition.start("examples/config/example-ignite.xml")) {
+            System.out.println(">>> Ignite grid started.");
+            // Create IgniteThread, we must work with SparseDistributedMatrix 
inside IgniteThread
+            // because we create ignite cache internally.
+            IgniteThread igniteThread = new 
IgniteThread(ignite.configuration().getIgniteInstanceName(),
+                SparseDistributedMatrixExample.class.getSimpleName(), () -> {
+
+                // Create SparseDistributedMatrix, new cache will be created 
automagically.
+                System.out.println(">>> Create new SparseDistributedMatrix 
inside IgniteThread.");
+                SparseDistributedMatrix distributedMatrix = new 
SparseDistributedMatrix(data);
+
+                System.out.println(">>> Create new linear regression trainer 
object.");
+                Trainer<LinearRegressionModel, Matrix> trainer = new 
LinearRegressionQRTrainer();
+
+                System.out.println(">>> Perform the training to get the 
model.");
+                LinearRegressionModel model = trainer.train(distributedMatrix);
+                System.out.println(">>> Linear regression model: " + model);
+
+                System.out.println(">>> ---------------------------------");
+                System.out.println(">>> | Prediction\t| Ground Truth\t|");
+                System.out.println(">>> ---------------------------------");
+                for (double[] observation : data) {
+                    Vector inputs = new 
SparseDistributedVector(Arrays.copyOfRange(observation, 1, observation.length));
+                    double prediction = model.apply(inputs);
+                    double groundTruth = observation[0];
+                    System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", 
prediction, groundTruth);
+                }
+                System.out.println(">>> ---------------------------------");
+            });
+
+            igniteThread.start();
+
+            igniteThread.join();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/DistributedLinearRegressionExampleWithSGDTrainer.java
----------------------------------------------------------------------
diff --git 
a/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/DistributedLinearRegressionExampleWithSGDTrainer.java
 
b/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/DistributedLinearRegressionExampleWithSGDTrainer.java
new file mode 100644
index 0000000..3f61762
--- /dev/null
+++ 
b/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/DistributedLinearRegressionExampleWithSGDTrainer.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.regression.linear;
+
+import java.util.Arrays;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.Ignition;
+import 
org.apache.ignite.examples.ml.math.matrix.SparseDistributedMatrixExample;
+import org.apache.ignite.ml.Trainer;
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
+import org.apache.ignite.ml.math.impls.vector.SparseDistributedVector;
+import org.apache.ignite.ml.regressions.linear.LinearRegressionModel;
+import org.apache.ignite.ml.regressions.linear.LinearRegressionQRTrainer;
+import org.apache.ignite.ml.regressions.linear.LinearRegressionSGDTrainer;
+import org.apache.ignite.thread.IgniteThread;
+
+/**
+ * Run linear regression model over distributed matrix.
+ *
+ * @see LinearRegressionQRTrainer
+ */
+public class DistributedLinearRegressionExampleWithSGDTrainer {
+    /** */
+    private static final double[][] data = {
+        {8, 78, 284, 9.100000381, 109},
+        {9.300000191, 68, 433, 8.699999809, 144},
+        {7.5, 70, 739, 7.199999809, 113},
+        {8.899999619, 96, 1792, 8.899999619, 97},
+        {10.19999981, 74, 477, 8.300000191, 206},
+        {8.300000191, 111, 362, 10.89999962, 124},
+        {8.800000191, 77, 671, 10, 152},
+        {8.800000191, 168, 636, 9.100000381, 162},
+        {10.69999981, 82, 329, 8.699999809, 150},
+        {11.69999981, 89, 634, 7.599999905, 134},
+        {8.5, 149, 631, 10.80000019, 292},
+        {8.300000191, 60, 257, 9.5, 108},
+        {8.199999809, 96, 284, 8.800000191, 111},
+        {7.900000095, 83, 603, 9.5, 182},
+        {10.30000019, 130, 686, 8.699999809, 129},
+        {7.400000095, 145, 345, 11.19999981, 158},
+        {9.600000381, 112, 1357, 9.699999809, 186},
+        {9.300000191, 131, 544, 9.600000381, 177},
+        {10.60000038, 80, 205, 9.100000381, 127},
+        {9.699999809, 130, 1264, 9.199999809, 179},
+        {11.60000038, 140, 688, 8.300000191, 80},
+        {8.100000381, 154, 354, 8.399999619, 103},
+        {9.800000191, 118, 1632, 9.399999619, 101},
+        {7.400000095, 94, 348, 9.800000191, 117},
+        {9.399999619, 119, 370, 10.39999962, 88},
+        {11.19999981, 153, 648, 9.899999619, 78},
+        {9.100000381, 116, 366, 9.199999809, 102},
+        {10.5, 97, 540, 10.30000019, 95},
+        {11.89999962, 176, 680, 8.899999619, 80},
+        {8.399999619, 75, 345, 9.600000381, 92},
+        {5, 134, 525, 10.30000019, 126},
+        {9.800000191, 161, 870, 10.39999962, 108},
+        {9.800000191, 111, 669, 9.699999809, 77},
+        {10.80000019, 114, 452, 9.600000381, 60},
+        {10.10000038, 142, 430, 10.69999981, 71},
+        {10.89999962, 238, 822, 10.30000019, 86},
+        {9.199999809, 78, 190, 10.69999981, 93},
+        {8.300000191, 196, 867, 9.600000381, 106},
+        {7.300000191, 125, 969, 10.5, 162},
+        {9.399999619, 82, 499, 7.699999809, 95},
+        {9.399999619, 125, 925, 10.19999981, 91},
+        {9.800000191, 129, 353, 9.899999619, 52},
+        {3.599999905, 84, 288, 8.399999619, 110},
+        {8.399999619, 183, 718, 10.39999962, 69},
+        {10.80000019, 119, 540, 9.199999809, 57},
+        {10.10000038, 180, 668, 13, 106},
+        {9, 82, 347, 8.800000191, 40},
+        {10, 71, 345, 9.199999809, 50},
+        {11.30000019, 118, 463, 7.800000191, 35},
+        {11.30000019, 121, 728, 8.199999809, 86},
+        {12.80000019, 68, 383, 7.400000095, 57},
+        {10, 112, 316, 10.39999962, 57},
+        {6.699999809, 109, 388, 8.899999619, 94}
+    };
+
+    /** Run example. */
+    public static void main(String[] args) throws InterruptedException {
+        System.out.println();
+        System.out.println(">>> Linear regression model over sparse 
distributed matrix API usage example started.");
+        // Start ignite grid.
+        try (Ignite ignite = 
Ignition.start("examples/config/example-ignite.xml")) {
+            System.out.println(">>> Ignite grid started.");
+            // Create IgniteThread, we must work with SparseDistributedMatrix 
inside IgniteThread
+            // because we create ignite cache internally.
+            IgniteThread igniteThread = new 
IgniteThread(ignite.configuration().getIgniteInstanceName(),
+                SparseDistributedMatrixExample.class.getSimpleName(), () -> {
+
+                // Create SparseDistributedMatrix, new cache will be created 
automagically.
+                System.out.println(">>> Create new SparseDistributedMatrix 
inside IgniteThread.");
+                SparseDistributedMatrix distributedMatrix = new 
SparseDistributedMatrix(data);
+
+                System.out.println(">>> Create new linear regression trainer 
object.");
+                Trainer<LinearRegressionModel, Matrix> trainer = new 
LinearRegressionSGDTrainer(100_000, 1e-12);
+
+                System.out.println(">>> Perform the training to get the 
model.");
+                LinearRegressionModel model = trainer.train(distributedMatrix);
+                System.out.println(">>> Linear regression model: " + model);
+
+                System.out.println(">>> ---------------------------------");
+                System.out.println(">>> | Prediction\t| Ground Truth\t|");
+                System.out.println(">>> ---------------------------------");
+                for (double[] observation : data) {
+                    Vector inputs = new 
SparseDistributedVector(Arrays.copyOfRange(observation, 1, observation.length));
+                    double prediction = model.apply(inputs);
+                    double groundTruth = observation[0];
+                    System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", 
prediction, groundTruth);
+                }
+                System.out.println(">>> ---------------------------------");
+            });
+
+            igniteThread.start();
+
+            igniteThread.join();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/package-info.java
----------------------------------------------------------------------
diff --git 
a/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/package-info.java
 
b/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/package-info.java
new file mode 100644
index 0000000..d0441a4
--- /dev/null
+++ 
b/examples/src/main/ml/org/apache/ignite/examples/ml/regression/linear/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * ML linear regression examples.
+ */
+package org.apache.ignite.examples.ml.regression.linear;

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseBlockDistributedMatrix.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseBlockDistributedMatrix.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseBlockDistributedMatrix.java
index 2d822d2..d387d21 100644
--- 
a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseBlockDistributedMatrix.java
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseBlockDistributedMatrix.java
@@ -38,7 +38,6 @@ import 
org.apache.ignite.ml.math.functions.IgniteDoubleFunction;
 import org.apache.ignite.ml.math.impls.storage.matrix.BlockMatrixStorage;
 import org.apache.ignite.ml.math.impls.storage.matrix.BlockVectorStorage;
 import org.apache.ignite.ml.math.impls.vector.SparseBlockDistributedVector;
-import org.apache.ignite.ml.math.impls.vector.SparseDistributedVector;
 import org.apache.ignite.ml.math.impls.vector.VectorBlockEntry;
 
 /**
@@ -229,7 +228,7 @@ public class SparseBlockDistributedMatrix extends 
AbstractMatrix implements Stor
     @Override public Vector getCol(int col) {
         checkColumnIndex(col);
 
-        Vector res = new SparseDistributedVector(rowSize());
+        Vector res = new SparseBlockDistributedVector(rowSize());
 
         for (int i = 0; i < rowSize(); i++)
             res.setX(i, getX(i, col));
@@ -240,7 +239,7 @@ public class SparseBlockDistributedMatrix extends 
AbstractMatrix implements Stor
     @Override public Vector getRow(int row) {
         checkRowIndex(row);
 
-        Vector res = new SparseDistributedVector(columnSize());
+        Vector res = new SparseBlockDistributedVector(columnSize());
 
         for (int i = 0; i < columnSize(); i++)
             res.setX(i, getX(row, i));

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/optimization/BarzilaiBorweinUpdater.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/optimization/BarzilaiBorweinUpdater.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/BarzilaiBorweinUpdater.java
new file mode 100644
index 0000000..2190d86
--- /dev/null
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/BarzilaiBorweinUpdater.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.optimization;
+
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Updater based in Barzilai-Borwein method which guarantees convergence.
+ */
+public class BarzilaiBorweinUpdater implements Updater {
+    /** */
+    private static final long serialVersionUID = 5046575099408708472L;
+
+    /**
+     * Learning rate used on the first iteration.
+     */
+    private static final double INITIAL_LEARNING_RATE = 1.0;
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override public Vector compute(Vector oldWeights, Vector oldGradient, 
Vector weights, Vector gradient, int iteration) {
+        double learningRate = computeLearningRate(oldWeights != null ? 
oldWeights.copy() : null, oldGradient != null ? oldGradient.copy() : null, 
weights.copy(), gradient.copy());
+        return weights.copy().minus(gradient.copy().times(learningRate));
+    }
+
+    /** */
+    private double computeLearningRate(Vector oldWeights, Vector oldGradient, 
Vector weights, Vector gradient) {
+        if (oldWeights == null || oldGradient == null)
+            return INITIAL_LEARNING_RATE;
+        else {
+            Vector gradientDiff = gradient.minus(oldGradient);
+            return weights.minus(oldWeights).dot(gradientDiff) / 
Math.pow(gradientDiff.kNorm(2.0), 2.0);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/optimization/GradientDescent.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/optimization/GradientDescent.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/GradientDescent.java
new file mode 100644
index 0000000..f02bcb3
--- /dev/null
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/GradientDescent.java
@@ -0,0 +1,201 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.optimization;
+
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.StorageConstants;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
+import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
+import org.apache.ignite.ml.math.impls.vector.FunctionVector;
+import 
org.apache.ignite.ml.optimization.util.SparseDistributedMatrixMapReducer;
+
+/**
+ * Gradient descent optimizer.
+ */
+public class GradientDescent {
+    /**
+     * Function which computes gradient of the loss function at any given 
point.
+     */
+    private final GradientFunction lossGradient;
+
+    /**
+     * Weights updater applied on every gradient descent step to decide how 
weights should be changed.
+     */
+    private final Updater updater;
+
+    /**
+     * Max number of gradient descent iterations.
+     */
+    private int maxIterations = 1000;
+
+    /**
+     * Convergence tolerance is condition which decides iteration termination.
+     */
+    private double convergenceTol = 1e-8;
+
+    /**
+     * New gradient descent instance based of loss function and updater.
+     *
+     * @param lossGradient Function which computes gradient of the loss 
function at any given point
+     * @param updater Weights updater applied on every gradient descent step 
to decide how weights should be changed
+     */
+    public GradientDescent(GradientFunction lossGradient, Updater updater) {
+        this.lossGradient = lossGradient;
+        this.updater = updater;
+    }
+
+    /**
+     * Sets max number of gradient descent iterations.
+     *
+     * @param maxIterations Max number of gradient descent iterations
+     * @return This gradient descent instance
+     */
+    public GradientDescent withMaxIterations(int maxIterations) {
+        assert maxIterations >= 0;
+
+        this.maxIterations = maxIterations;
+
+        return this;
+    }
+
+    /**
+     * Sets convergence tolerance.
+     *
+     * @param convergenceTol Condition which decides iteration termination
+     * @return This gradient descent instance
+     */
+    public GradientDescent withConvergenceTol(double convergenceTol) {
+        assert convergenceTol >= 0;
+
+        this.convergenceTol = convergenceTol;
+
+        return this;
+    }
+
+    /**
+     * Computes point where loss function takes minimal value.
+     *
+     * @param data Inputs parameters of loss function
+     * @param initWeights Initial weights
+     * @return Point where loss function takes minimal value
+     */
+    public Vector optimize(Matrix data, Vector initWeights) {
+        Vector weights = initWeights, oldWeights = null, oldGradient = null;
+        IgniteFunction<Vector, Vector> gradientFunction = 
getLossGradientFunction(data);
+
+        for (int iteration = 0; iteration < maxIterations; iteration++) {
+            Vector gradient = gradientFunction.apply(weights);
+            Vector newWeights = updater.compute(oldWeights, oldGradient, 
weights, gradient, iteration);
+
+            if (isConverged(weights, newWeights))
+                return newWeights;
+            else {
+                oldGradient = gradient;
+                oldWeights = weights;
+                weights = newWeights;
+            }
+        }
+        return weights;
+    }
+
+    /**
+     * Calculates gradient based in distributed matrix using {@link 
SparseDistributedMatrixMapReducer}.
+     *
+     * @param data Distributed matrix
+     * @param weights Point to calculate gradient
+     * @return Gradient
+     */
+    private Vector calculateDistributedGradient(SparseDistributedMatrix data, 
Vector weights) {
+        SparseDistributedMatrixMapReducer mapReducer = new 
SparseDistributedMatrixMapReducer(data);
+        return mapReducer.mapReduce(
+            (matrix, args) -> {
+                Matrix inputs = extractInputs(matrix);
+                Vector groundTruth = extractGroundTruth(matrix);
+
+                return lossGradient.compute(inputs, groundTruth, args);
+            },
+            gradients -> {
+                int cnt = 0;
+                Vector resGradient = new 
DenseLocalOnHeapVector(data.columnSize());
+
+                for (Vector gradient : gradients) {
+                    if (gradient != null) {
+                        resGradient = resGradient.plus(gradient);
+                        cnt++;
+                    }
+                }
+                return resGradient.divide(cnt);
+            },
+            weights);
+    }
+
+    /**
+     * Tests if gradient descent process converged.
+     *
+     * @param weights Weights
+     * @param newWeights New weights
+     * @return {@code true} if process has converged, otherwise {@code false}
+     */
+    private boolean isConverged(Vector weights, Vector newWeights) {
+        if (convergenceTol == 0)
+            return false;
+        else {
+            double solutionVectorDiff = weights.minus(newWeights).kNorm(2.0);
+            return solutionVectorDiff < convergenceTol * 
Math.max(newWeights.kNorm(2.0), 1.0);
+        }
+    }
+
+    /**
+     * Extracts first column with ground truth from the data set matrix.
+     *
+     * @param data data to build model
+     * @return Ground truth vector
+     */
+    private Vector extractGroundTruth(Matrix data) {
+        return data.getCol(0);
+    }
+
+    /**
+     * Extracts all inputs from data set matrix and updates matrix so that 
first column contains value 1.0.
+     *
+     * @param data data to build model
+     * @return Inputs matrix
+     */
+    private Matrix extractInputs(Matrix data) {
+        data = data.copy();
+        data.assignColumn(0, new FunctionVector(data.rowSize(), row -> 1.0));
+        return data;
+    }
+
+    /** Makes carrying of the gradient function and fixes data matrix. */
+    private IgniteFunction<Vector, Vector> getLossGradientFunction(Matrix 
data) {
+        if (data instanceof SparseDistributedMatrix) {
+            SparseDistributedMatrix distributedMatrix = 
(SparseDistributedMatrix) data;
+
+            if (distributedMatrix.getStorage().storageMode() == 
StorageConstants.ROW_STORAGE_MODE)
+                return weights -> 
calculateDistributedGradient(distributedMatrix, weights);
+        }
+
+        Matrix inputs = extractInputs(data);
+        Vector groundTruth = extractGroundTruth(data);
+
+        return weights -> lossGradient.compute(inputs, groundTruth, weights);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/optimization/GradientFunction.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/optimization/GradientFunction.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/GradientFunction.java
new file mode 100644
index 0000000..7dc6674
--- /dev/null
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/GradientFunction.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.optimization;
+
+import java.io.Serializable;
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Function which computes gradient of the loss function at any given point.
+ */
+@FunctionalInterface
+public interface GradientFunction extends Serializable {
+    /** */
+    Vector compute(Matrix inputs, Vector groundTruth, Vector point);
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/optimization/LeastSquaresGradientFunction.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/optimization/LeastSquaresGradientFunction.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/LeastSquaresGradientFunction.java
new file mode 100644
index 0000000..4d90e3b
--- /dev/null
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/LeastSquaresGradientFunction.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.optimization;
+
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Function which computes gradient of least square loss function.
+ */
+public class LeastSquaresGradientFunction implements GradientFunction {
+    /**
+     * {@inheritDoc}
+     */
+    @Override public Vector compute(Matrix inputs, Vector groundTruth, Vector 
pnt) {
+        return inputs.transpose().times(inputs.times(pnt).minus(groundTruth));
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/optimization/SimpleUpdater.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/optimization/SimpleUpdater.java 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/SimpleUpdater.java
new file mode 100644
index 0000000..0f6d520
--- /dev/null
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/SimpleUpdater.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.optimization;
+
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Simple updater with fixed learning rate which doesn't guarantee convergence.
+ */
+public class SimpleUpdater implements Updater {
+    /** */
+    private static final long serialVersionUID = 6417716224818162225L;
+
+    /** */
+    private final double learningRate;
+
+    /** */
+    public SimpleUpdater(double learningRate) {
+        assert learningRate > 0;
+
+        this.learningRate = learningRate;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override public Vector compute(Vector oldWeights, Vector oldGradient, 
Vector weights, Vector gradient, int iteration) {
+        return weights.minus(gradient.times(learningRate));
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/optimization/Updater.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/optimization/Updater.java 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/Updater.java
new file mode 100644
index 0000000..83405d7
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/Updater.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.optimization;
+
+import java.io.Serializable;
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Weights updater applied on every gradient descent step to decide how 
weights should be changed.
+ */
+@FunctionalInterface
+public interface Updater extends Serializable {
+    /** */
+    Vector compute(Vector oldWeights, Vector oldGradient, Vector weights, 
Vector gradient, int iteration);
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/optimization/package-info.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/optimization/package-info.java 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/package-info.java
new file mode 100644
index 0000000..96b0acf
--- /dev/null
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains implementations of optimization algorithms and related classes.
+ */
+package org.apache.ignite.ml.optimization;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/optimization/util/SparseDistributedMatrixMapReducer.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/optimization/util/SparseDistributedMatrixMapReducer.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/util/SparseDistributedMatrixMapReducer.java
new file mode 100644
index 0000000..7a5f90b
--- /dev/null
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/util/SparseDistributedMatrixMapReducer.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.optimization.util;
+
+import java.util.Collection;
+import java.util.Map;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.affinity.Affinity;
+import org.apache.ignite.cluster.ClusterNode;
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.distributed.keys.RowColMatrixKey;
+import org.apache.ignite.ml.math.functions.IgniteBiFunction;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix;
+import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
+import 
org.apache.ignite.ml.math.impls.storage.matrix.SparseDistributedMatrixStorage;
+
+/**
+ * Wrapper of {@link SparseDistributedMatrix} which allow to perform 
computation on every node containing a part of the
+ * distributed matrix, get results and then reduce them.
+ */
+public class SparseDistributedMatrixMapReducer {
+    /** */
+    private final SparseDistributedMatrix distributedMatrix;
+
+    /** */
+    public SparseDistributedMatrixMapReducer(
+        SparseDistributedMatrix distributedMatrix) {
+        this.distributedMatrix = distributedMatrix;
+    }
+
+    /** */
+    public <R, T> R mapReduce(IgniteBiFunction<Matrix, T, R> mapper, 
IgniteFunction<Collection<R>, R> reducer, T args) {
+        Ignite ignite = Ignition.localIgnite();
+        SparseDistributedMatrixStorage storage = 
(SparseDistributedMatrixStorage)distributedMatrix.getStorage();
+
+        int colSize = distributedMatrix.columnSize();
+
+        Collection<R> results = ignite
+            .compute(ignite.cluster().forDataNodes(storage.cacheName()))
+            .broadcast(arguments -> {
+                Ignite locIgnite = Ignition.localIgnite();
+
+                Affinity<RowColMatrixKey> affinity = 
locIgnite.affinity(storage.cacheName());
+                ClusterNode locNode = locIgnite.cluster().localNode();
+
+                Map<ClusterNode, Collection<RowColMatrixKey>> keys = 
affinity.mapKeysToNodes(storage.getAllKeys());
+                Collection<RowColMatrixKey> locKeys = keys.get(locNode);
+
+                if (locKeys != null) {
+                    int idx = 0;
+                    Matrix locMatrix = new 
DenseLocalOnHeapMatrix(locKeys.size(), colSize);
+
+                    for (RowColMatrixKey key : locKeys) {
+                        Map<Integer, Double> row = storage.cache().get(key);
+
+                        for (Map.Entry<Integer,Double> cell : row.entrySet())
+                            locMatrix.set(idx, cell.getKey(), cell.getValue());
+
+                        idx++;
+                    }
+                    return mapper.apply(locMatrix, arguments);
+                }
+                return null;
+            }, args);
+        return reducer.apply(results);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/optimization/util/package-info.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/optimization/util/package-info.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/util/package-info.java
new file mode 100644
index 0000000..cb01ab6
--- /dev/null
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/optimization/util/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains util classes used in optimization package.
+ */
+package org.apache.ignite.ml.optimization.util;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/regressions/AbstractMultipleLinearRegression.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/regressions/AbstractMultipleLinearRegression.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/regressions/AbstractMultipleLinearRegression.java
deleted file mode 100644
index 5bc92c9..0000000
--- 
a/modules/ml/src/main/java/org/apache/ignite/ml/regressions/AbstractMultipleLinearRegression.java
+++ /dev/null
@@ -1,378 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.ml.regressions;
-
-import org.apache.ignite.ml.math.Matrix;
-import org.apache.ignite.ml.math.Vector;
-import org.apache.ignite.ml.math.exceptions.CardinalityException;
-import org.apache.ignite.ml.math.exceptions.InsufficientDataException;
-import org.apache.ignite.ml.math.exceptions.MathIllegalArgumentException;
-import org.apache.ignite.ml.math.exceptions.NoDataException;
-import org.apache.ignite.ml.math.exceptions.NonSquareMatrixException;
-import org.apache.ignite.ml.math.exceptions.NullArgumentException;
-import org.apache.ignite.ml.math.functions.Functions;
-import org.apache.ignite.ml.math.util.MatrixUtil;
-
-/**
- * This class is based on the corresponding class from Apache Common Math lib.
- * Abstract base class for implementations of MultipleLinearRegression.
- */
-public abstract class AbstractMultipleLinearRegression implements 
MultipleLinearRegression {
-    /** X sample data. */
-    private Matrix xMatrix;
-
-    /** Y sample data. */
-    private Vector yVector;
-
-    /** Whether or not the regression model includes an intercept.  True means 
no intercept. */
-    private boolean noIntercept = false;
-
-    /**
-     * @return the X sample data.
-     */
-    protected Matrix getX() {
-        return xMatrix;
-    }
-
-    /**
-     * @return the Y sample data.
-     */
-    protected Vector getY() {
-        return yVector;
-    }
-
-    /**
-     * @return true if the model has no intercept term; false otherwise
-     */
-    public boolean isNoIntercept() {
-        return noIntercept;
-    }
-
-    /**
-     * @param noIntercept true means the model is to be estimated without an 
intercept term
-     */
-    public void setNoIntercept(boolean noIntercept) {
-        this.noIntercept = noIntercept;
-    }
-
-    /**
-     * <p>Loads model x and y sample data from a flat input array, overriding 
any previous sample.
-     * </p>
-     * <p>Assumes that rows are concatenated with y values first in each row.  
For example, an input
-     * <code>data</code> array containing the sequence of values (1, 2, 3, 4, 
5, 6, 7, 8, 9) with
-     * <code>nobs = 3</code> and <code>nvars = 2</code> creates a regression 
dataset with two
-     * independent variables, as below:
-     * <pre>
-     *   y   x[0]  x[1]
-     *   --------------
-     *   1     2     3
-     *   4     5     6
-     *   7     8     9
-     * </pre>
-     * </p>
-     * <p>Note that there is no need to add an initial unitary column (column 
of 1's) when
-     * specifying a model including an intercept term.  If {@link 
#isNoIntercept()} is <code>true</code>,
-     * the X matrix will be created without an initial column of "1"s; 
otherwise this column will
-     * be added.
-     * </p>
-     * <p>Throws IllegalArgumentException if any of the following 
preconditions fail:
-     * <ul><li><code>data</code> cannot be null</li>
-     * <li><code>data.length = nobs * (nvars + 1)</li>
-     * <li><code>nobs > nvars</code></li></ul>
-     * </p>
-     *
-     * @param data input data array
-     * @param nobs number of observations (rows)
-     * @param nvars number of independent variables (columns, not counting y)
-     * @param like matrix(maybe empty) indicating how data should be stored
-     * @throws NullArgumentException if the data array is null
-     * @throws CardinalityException if the length of the data array is not 
equal to <code>nobs * (nvars + 1)</code>
-     * @throws InsufficientDataException if <code>nobs</code> is less than 
<code>nvars + 1</code>
-     */
-    public void newSampleData(double[] data, int nobs, int nvars, Matrix like) 
{
-        if (data == null)
-            throw new NullArgumentException();
-        if (data.length != nobs * (nvars + 1))
-            throw new CardinalityException(nobs * (nvars + 1), data.length);
-        if (nobs <= nvars)
-            throw new 
InsufficientDataException(RegressionsErrorMessages.INSUFFICIENT_OBSERVED_POINTS_IN_SAMPLE);
-        double[] y = new double[nobs];
-        final int cols = noIntercept ? nvars : nvars + 1;
-        double[][] x = new double[nobs][cols];
-        int pointer = 0;
-        for (int i = 0; i < nobs; i++) {
-            y[i] = data[pointer++];
-            if (!noIntercept)
-                x[i][0] = 1.0d;
-            for (int j = noIntercept ? 0 : 1; j < cols; j++)
-                x[i][j] = data[pointer++];
-        }
-        xMatrix = MatrixUtil.like(like, nobs, cols).assign(x);
-        yVector = MatrixUtil.likeVector(like, y.length).assign(y);
-    }
-
-    /**
-     * Loads new y sample data, overriding any previous data.
-     *
-     * @param y the array representing the y sample
-     * @throws NullArgumentException if y is null
-     * @throws NoDataException if y is empty
-     */
-    protected void newYSampleData(Vector y) {
-        if (y == null)
-            throw new NullArgumentException();
-        if (y.size() == 0)
-            throw new NoDataException();
-        // TODO: IGNITE-5826, Should we copy here?
-        yVector = y;
-    }
-
-    /**
-     * <p>Loads new x sample data, overriding any previous data.
-     * </p>
-     * The input <code>x</code> array should have one row for each sample
-     * observation, with columns corresponding to independent variables.
-     * For example, if <pre>
-     * <code> x = new double[][] {{1, 2}, {3, 4}, {5, 6}} </code></pre>
-     * then <code>setXSampleData(x) </code> results in a model with two 
independent
-     * variables and 3 observations:
-     * <pre>
-     *   x[0]  x[1]
-     *   ----------
-     *     1    2
-     *     3    4
-     *     5    6
-     * </pre>
-     * </p>
-     * <p>Note that there is no need to add an initial unitary column (column 
of 1's) when
-     * specifying a model including an intercept term.
-     * </p>
-     *
-     * @param x the rectangular array representing the x sample
-     * @throws NullArgumentException if x is null
-     * @throws NoDataException if x is empty
-     * @throws CardinalityException if x is not rectangular
-     */
-    protected void newXSampleData(Matrix x) {
-        if (x == null)
-            throw new NullArgumentException();
-        if (x.rowSize() == 0)
-            throw new NoDataException();
-        if (noIntercept)
-            // TODO: IGNITE-5826, Should we copy here?
-            xMatrix = x;
-        else { // Augment design matrix with initial unitary column
-            xMatrix = MatrixUtil.like(x, x.rowSize(), x.columnSize() + 1);
-            xMatrix.viewColumn(0).map(Functions.constant(1.0));
-            xMatrix.viewPart(0, x.rowSize(), 1, x.columnSize()).assign(x);
-        }
-    }
-
-    /**
-     * Validates sample data.  Checks that
-     * <ul><li>Neither x nor y is null or empty;</li>
-     * <li>The length (i.e. number of rows) of x equals the length of y</li>
-     * <li>x has at least one more row than it has columns (i.e. there is
-     * sufficient data to estimate regression coefficients for each of the
-     * columns in x plus an intercept.</li>
-     * </ul>
-     *
-     * @param x the n x k matrix representing the x data
-     * @param y the n-sized vector representing the y data
-     * @throws NullArgumentException if {@code x} or {@code y} is null
-     * @throws CardinalityException if {@code x} and {@code y} do not have the 
same length
-     * @throws NoDataException if {@code x} or {@code y} are zero-length
-     * @throws MathIllegalArgumentException if the number of rows of {@code x} 
is not larger than the number of columns
-     * + 1
-     */
-    protected void validateSampleData(Matrix x, Vector y) throws 
MathIllegalArgumentException {
-        if ((x == null) || (y == null))
-            throw new NullArgumentException();
-        if (x.rowSize() != y.size())
-            throw new CardinalityException(y.size(), x.rowSize());
-        if (x.rowSize() == 0) {  // Must be no y data either
-            throw new NoDataException();
-        }
-        if (x.columnSize() + 1 > x.rowSize()) {
-            throw new MathIllegalArgumentException(
-                
RegressionsErrorMessages.NOT_ENOUGH_DATA_FOR_NUMBER_OF_PREDICTORS,
-                x.rowSize(), x.columnSize());
-        }
-    }
-
-    /**
-     * Validates that the x data and covariance matrix have the same
-     * number of rows and that the covariance matrix is square.
-     *
-     * @param x the [n,k] array representing the x sample
-     * @param covariance the [n,n] array representing the covariance matrix
-     * @throws CardinalityException if the number of rows in x is not equal to 
the number of rows in covariance
-     * @throws NonSquareMatrixException if the covariance matrix is not square
-     */
-    protected void validateCovarianceData(double[][] x, double[][] covariance) 
{
-        if (x.length != covariance.length)
-            throw new CardinalityException(x.length, covariance.length);
-        if (covariance.length > 0 && covariance.length != covariance[0].length)
-            throw new NonSquareMatrixException(covariance.length, 
covariance[0].length);
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override public double[] estimateRegressionParameters() {
-        Vector b = calculateBeta();
-        return b.getStorage().data();
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override public double[] estimateResiduals() {
-        Vector b = calculateBeta();
-        Vector e = yVector.minus(xMatrix.times(b));
-        return e.getStorage().data();
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override public Matrix estimateRegressionParametersVariance() {
-        return calculateBetaVariance();
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override public double[] estimateRegressionParametersStandardErrors() {
-        Matrix betaVariance = estimateRegressionParametersVariance();
-        double sigma = calculateErrorVariance();
-        int len = betaVariance.rowSize();
-        double[] res = new double[len];
-        for (int i = 0; i < len; i++)
-            res[i] = Math.sqrt(sigma * betaVariance.getX(i, i));
-        return res;
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override public double estimateRegressandVariance() {
-        return calculateYVariance();
-    }
-
-    /**
-     * Estimates the variance of the error.
-     *
-     * @return estimate of the error variance
-     */
-    public double estimateErrorVariance() {
-        return calculateErrorVariance();
-
-    }
-
-    /**
-     * Estimates the standard error of the regression.
-     *
-     * @return regression standard error
-     */
-    public double estimateRegressionStandardError() {
-        return Math.sqrt(estimateErrorVariance());
-    }
-
-    /**
-     * Calculates the beta of multiple linear regression in matrix notation.
-     *
-     * @return beta
-     */
-    protected abstract Vector calculateBeta();
-
-    /**
-     * Calculates the beta variance of multiple linear regression in matrix
-     * notation.
-     *
-     * @return beta variance
-     */
-    protected abstract Matrix calculateBetaVariance();
-
-    /**
-     * Calculates the variance of the y values.
-     *
-     * @return Y variance
-     */
-    protected double calculateYVariance() {
-        // Compute initial estimate using definitional formula
-        int vSize = yVector.size();
-        double xbar = yVector.sum() / vSize;
-        // Compute correction factor in second pass
-        final double corr = yVector.foldMap((val, acc) -> acc + val - xbar, 
Functions.IDENTITY, 0.0);
-        final double mean = xbar - corr;
-        return yVector.foldMap(Functions.PLUS, val -> (val - mean) * (val - 
mean), 0.0) / (vSize - 1);
-    }
-
-    /**
-     * <p>Calculates the variance of the error term.</p>
-     * Uses the formula <pre>
-     * var(u) = u &middot; u / (n - k)
-     * </pre>
-     * where n and k are the row and column dimensions of the design
-     * matrix X.
-     *
-     * @return error variance estimate
-     */
-    protected double calculateErrorVariance() {
-        Vector residuals = calculateResiduals();
-        return residuals.dot(residuals) /
-            (xMatrix.rowSize() - xMatrix.columnSize());
-    }
-
-    /**
-     * Calculates the residuals of multiple linear regression in matrix
-     * notation.
-     *
-     * <pre>
-     * u = y - X * b
-     * </pre>
-     *
-     * @return The residuals [n,1] matrix
-     */
-    protected Vector calculateResiduals() {
-        Vector b = calculateBeta();
-        return yVector.minus(xMatrix.times(b));
-    }
-
-    /** {@inheritDoc} */
-    @Override public boolean equals(Object o) {
-        if (this == o)
-            return true;
-        if (o == null || getClass() != o.getClass())
-            return false;
-
-        AbstractMultipleLinearRegression that = 
(AbstractMultipleLinearRegression)o;
-
-        return noIntercept == that.noIntercept && xMatrix.equals(that.xMatrix);
-    }
-
-    /** {@inheritDoc} */
-    @Override public int hashCode() {
-        int res = xMatrix.hashCode();
-
-        res = 31 * res + (noIntercept ? 1 : 0);
-
-        return res;
-    }
-}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b2060855/modules/ml/src/main/java/org/apache/ignite/ml/regressions/MultipleLinearRegression.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/regressions/MultipleLinearRegression.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/regressions/MultipleLinearRegression.java
deleted file mode 100644
index 2fc4dde..0000000
--- 
a/modules/ml/src/main/java/org/apache/ignite/ml/regressions/MultipleLinearRegression.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.ml.regressions;
-
-import org.apache.ignite.ml.math.Matrix;
-
-/**
- * This class is based on the corresponding class from Apache Common Math lib. 
* The multiple linear regression can be
- * represented in matrix-notation.
- * <pre>
- *  y=X*b+u
- * </pre>
- * where y is an <code>n-vector</code> <b>regressand</b>, X is a 
<code>[n,k]</code> matrix whose <code>k</code> columns
- * are called <b>regressors</b>, b is <code>k-vector</code> of <b>regression 
parameters</b> and <code>u</code> is an
- * <code>n-vector</code> of <b>error terms</b> or <b>residuals</b>.
- * <p>
- * The notation is quite standard in literature, cf eg <a 
href="http://www.econ.queensu.ca/ETM";>Davidson and MacKinnon,
- * Econometrics Theory and Methods, 2004</a>. </p>
- */
-public interface MultipleLinearRegression {
-    /**
-     * Estimates the regression parameters b.
-     *
-     * @return The [k,1] array representing b
-     */
-    public double[] estimateRegressionParameters();
-
-    /**
-     * Estimates the variance of the regression parameters, ie Var(b).
-     *
-     * @return The k x k matrix representing the variance of b
-     */
-    public Matrix estimateRegressionParametersVariance();
-
-    /**
-     * Estimates the residuals, ie u = y - X*b.
-     *
-     * @return The [n,1] array representing the residuals
-     */
-    public double[] estimateResiduals();
-
-    /**
-     * Returns the variance of the regressand, ie Var(y).
-     *
-     * @return The double representing the variance of y
-     */
-    public double estimateRegressandVariance();
-
-    /**
-     * Returns the standard errors of the regression parameters.
-     *
-     * @return standard errors of estimated regression parameters
-     */
-    public double[] estimateRegressionParametersStandardErrors();
-
-}

Reply via email to