IGNITE-7876: Adopt SVM Linear Binary Classification Model and Trainer to the 
new Partitioned Dataset

This closes #3607


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/318ffe50
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/318ffe50
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/318ffe50

Branch: refs/heads/master
Commit: 318ffe50fc0edb1d31f05044be8367bd578b6a88
Parents: 29d56ae
Author: zaleslaw <zaleslaw....@gmail.com>
Authored: Mon Mar 12 15:28:11 2018 +0300
Committer: Yury Babak <yba...@gridgain.com>
Committed: Mon Mar 12 15:28:11 2018 +0300

----------------------------------------------------------------------
 .../ml/svm/SVMBinaryClassificationExample.java  |  131 --
 .../ml/svm/SVMMultiClassificationExample.java   |  130 --
 .../src/main/resources/datasets/titanic.txt     | 1309 ------------------
 .../apache/ignite/ml/structures/Dataset.java    |   10 +
 .../ignite/ml/structures/LabeledDataset.java    |   16 +-
 .../svm/SVMLinearBinaryClassificationModel.java |    4 +-
 .../SVMLinearBinaryClassificationTrainer.java   |   87 +-
 .../SVMLinearMultiClassClassificationModel.java |   89 --
 ...VMLinearMultiClassClassificationTrainer.java |  160 ---
 .../ignite/ml/svm/SVMPartitionContext.java      |   28 +
 .../ml/svm/SVMPartitionDataBuilderOnHeap.java   |   86 ++
 .../org/apache/ignite/ml/LocalModelsTest.java   |   28 -
 .../org/apache/ignite/ml/svm/BaseSVMTest.java   |   58 -
 .../ignite/ml/svm/SVMBinaryTrainerTest.java     |   74 +
 .../org/apache/ignite/ml/svm/SVMModelTest.java  |   15 -
 .../org/apache/ignite/ml/svm/SVMTestSuite.java  |   13 +-
 ...inearSVMBinaryClassificationTrainerTest.java |   35 -
 ...inearSVMBinaryClassificationTrainerTest.java |  141 --
 ...inearSVMBinaryClassificationTrainerTest.java |   38 -
 ...rSVMMultiClassClassificationTrainerTest.java |   35 -
 ...rSVMMultiClassClassificationTrainerTest.java |   76 -
 ...rSVMMultiClassClassificationTrainerTest.java |   38 -
 22 files changed, 272 insertions(+), 2329 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMBinaryClassificationExample.java
----------------------------------------------------------------------
diff --git 
a/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMBinaryClassificationExample.java
 
b/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMBinaryClassificationExample.java
deleted file mode 100644
index e256276..0000000
--- 
a/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMBinaryClassificationExample.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.svm;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.examples.ExampleNodeStartup;
-import org.apache.ignite.internal.util.IgniteUtils;
-import org.apache.ignite.ml.Trainer;
-import org.apache.ignite.ml.structures.LabeledDataset;
-import org.apache.ignite.ml.structures.LabeledDatasetTestTrainPair;
-import org.apache.ignite.ml.structures.preprocessing.LabeledDatasetLoader;
-import org.apache.ignite.ml.structures.preprocessing.LabellingMachine;
-import org.apache.ignite.ml.structures.preprocessing.Normalizer;
-import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationModel;
-import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationTrainer;
-import org.apache.ignite.thread.IgniteThread;
-
-/**
- * <p>
- * Example of using {@link 
org.apache.ignite.ml.svm.SVMLinearBinaryClassificationModel} with Titanic 
dataset.</p>
- * <p>
- * Note that in this example we cannot guarantee order in which nodes return 
results of intermediate
- * computations and therefore algorithm can return different results.</p>
- * <p>
- * Remote nodes should always be started with special configuration file which
- * enables P2P class loading: {@code 'ignite.{sh|bat} 
examples/config/example-ignite.xml'}.</p>
- * <p>
- * Alternatively you can run {@link ExampleNodeStartup} in another JVM which 
will start node
- * with {@code examples/config/example-ignite.xml} configuration.</p>
- */
-public class SVMBinaryClassificationExample {
-    /** Separator. */
-    private static final String SEPARATOR = ",";
-
-    /** Path to the Iris dataset. */
-    private static final String TITANIC_DATASET = 
"examples/src/main/resources/datasets/titanic.txt";
-
-    /**
-     * Executes example.
-     *
-     * @param args Command line arguments, none required.
-     */
-    public static void main(String[] args) throws InterruptedException {
-        System.out.println(">>> SVM Binary classification example started.");
-        // Start ignite grid.
-        try (Ignite ignite = 
Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-
-            IgniteThread igniteThread = new 
IgniteThread(ignite.configuration().getIgniteInstanceName(),
-                SVMBinaryClassificationExample.class.getSimpleName(), () -> {
-
-                try {
-                    // Prepare path to read
-                    File file = IgniteUtils.resolveIgnitePath(TITANIC_DATASET);
-                    if (file == null)
-                        throw new RuntimeException("Can't find file: " + 
TITANIC_DATASET);
-
-                    Path path = file.toPath();
-
-                    // Read dataset from file
-                    LabeledDataset dataset = 
LabeledDatasetLoader.loadFromTxtFile(path, SEPARATOR, true, false);
-
-                    // Normalize dataset
-                    Normalizer.normalizeWithMiniMax(dataset);
-
-                    // Random splitting of the given data as 70% train and 30% 
test datasets
-                    LabeledDatasetTestTrainPair split = new 
LabeledDatasetTestTrainPair(dataset, 0.3);
-
-                    System.out.println("\n>>> Amount of observations in train 
dataset " + split.train().rowSize());
-                    System.out.println("\n>>> Amount of observations in test 
dataset " + split.test().rowSize());
-
-                    LabeledDataset test = split.test();
-                    LabeledDataset train = split.train();
-
-                    System.out.println("\n>>> Create new linear binary SVM 
trainer object.");
-                    Trainer<SVMLinearBinaryClassificationModel, 
LabeledDataset> trainer = new SVMLinearBinaryClassificationTrainer();
-
-                    System.out.println("\n>>> Perform the training to get the 
model.");
-                    SVMLinearBinaryClassificationModel mdl = 
trainer.train(train);
-
-                    System.out.println("\n>>> SVM classification model: " + 
mdl);
-
-                    // Clone labels
-                    final double[] labels = test.labels();
-
-                    // Save predicted classes to test dataset
-                    LabellingMachine.assignLabels(test, mdl);
-
-                    // Calculate amount of errors on test dataset
-                    int amountOfErrors = 0;
-                    for (int i = 0; i < test.rowSize(); i++) {
-                        if (test.label(i) != labels[i])
-                            amountOfErrors++;
-                    }
-
-                    System.out.println("\n>>> Absolute amount of errors " + 
amountOfErrors);
-                    System.out.println("\n>>> Prediction percentage " + (1 - 
amountOfErrors / (double) test.rowSize()));
-
-                } catch (IOException e) {
-                    e.printStackTrace();
-                    System.out.println("\n>>> Unexpected exception, check 
resources: " + e);
-                } finally {
-                    System.out.println("\n>>> SVM binary classification 
example completed.");
-                }
-
-            });
-
-            igniteThread.start();
-            igniteThread.join();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMMultiClassificationExample.java
----------------------------------------------------------------------
diff --git 
a/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMMultiClassificationExample.java
 
b/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMMultiClassificationExample.java
deleted file mode 100644
index 7aee5d2..0000000
--- 
a/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMMultiClassificationExample.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.svm;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.examples.ExampleNodeStartup;
-import org.apache.ignite.internal.util.IgniteUtils;
-import org.apache.ignite.ml.Trainer;
-import org.apache.ignite.ml.structures.LabeledDataset;
-import org.apache.ignite.ml.structures.LabeledDatasetTestTrainPair;
-import org.apache.ignite.ml.structures.preprocessing.LabeledDatasetLoader;
-import org.apache.ignite.ml.structures.preprocessing.LabellingMachine;
-import org.apache.ignite.ml.structures.preprocessing.Normalizer;
-import org.apache.ignite.ml.svm.SVMLinearMultiClassClassificationModel;
-import org.apache.ignite.ml.svm.SVMLinearMultiClassClassificationTrainer;
-import org.apache.ignite.thread.IgniteThread;
-
-/**
- * <p>
- * Example of using {@link 
org.apache.ignite.ml.svm.SVMLinearMultiClassClassificationModel} with Iris 
dataset.</p>
- * <p>
- * Note that in this example we cannot guarantee order in which nodes return 
results of intermediate
- * computations and therefore algorithm can return different results.</p>
- * <p>
- * Remote nodes should always be started with special configuration file which
- * enables P2P class loading: {@code 'ignite.{sh|bat} 
examples/config/example-ignite.xml'}.</p>
- * <p>
- * Alternatively you can run {@link ExampleNodeStartup} in another JVM which 
will start node
- * with {@code examples/config/example-ignite.xml} configuration.</p>
- */
-public class SVMMultiClassificationExample {
-    /** Separator. */
-    private static final String SEPARATOR = "\t";
-
-    /** Path to the Iris dataset. */
-    private static final String IRIS_DATASET = 
"examples/src/main/resources/datasets/iris.txt";
-
-    /**
-     * Executes example.
-     *
-     * @param args Command line arguments, none required.
-     */
-    public static void main(String[] args) throws InterruptedException {
-        System.out.println(">>> SVM Multi-Class classification example 
started.");
-        // Start ignite grid.
-        try (Ignite ignite = 
Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-
-            IgniteThread igniteThread = new 
IgniteThread(ignite.configuration().getIgniteInstanceName(),
-                SVMMultiClassificationExample.class.getSimpleName(), () -> {
-
-                try {
-                    // Prepare path to read
-                    File file = IgniteUtils.resolveIgnitePath(IRIS_DATASET);
-                    if (file == null)
-                        throw new RuntimeException("Can't find file: " + 
IRIS_DATASET);
-
-                    Path path = file.toPath();
-
-                    // Read dataset from file
-                    LabeledDataset dataset = 
LabeledDatasetLoader.loadFromTxtFile(path, SEPARATOR, true, false);
-
-                    // Normalize dataset
-                    Normalizer.normalizeWithMiniMax(dataset);
-
-                    // Random splitting of the given data as 70% train and 30% 
test datasets
-                    LabeledDatasetTestTrainPair split = new 
LabeledDatasetTestTrainPair(dataset, 0.3);
-
-                    System.out.println("\n>>> Amount of observations in train 
dataset " + split.train().rowSize());
-                    System.out.println("\n>>> Amount of observations in test 
dataset " + split.test().rowSize());
-
-                    LabeledDataset test = split.test();
-                    LabeledDataset train = split.train();
-
-                    System.out.println("\n>>> Create new linear multi-class 
SVM trainer object.");
-                    Trainer<SVMLinearMultiClassClassificationModel, 
LabeledDataset> trainer = new SVMLinearMultiClassClassificationTrainer();
-
-                    System.out.println("\n>>> Perform the training to get the 
model.");
-                    SVMLinearMultiClassClassificationModel mdl = 
trainer.train(train);
-
-                    System.out.println("\n>>> SVM classification model: " + 
mdl);
-
-                    // Clone labels
-                    final double[] labels = test.labels();
-
-                    // Save predicted classes to test dataset
-                    LabellingMachine.assignLabels(test, mdl);
-
-                    // Calculate amount of errors on test dataset
-                    int amountOfErrors = 0;
-                    for (int i = 0; i < test.rowSize(); i++) {
-                        if (test.label(i) != labels[i])
-                            amountOfErrors++;
-                    }
-
-                    System.out.println("\n>>> Absolute amount of errors " + 
amountOfErrors);
-                    System.out.println("\n>>> Prediction percentage " + (1 - 
amountOfErrors / (double) test.rowSize()));
-
-                } catch (IOException e) {
-                    e.printStackTrace();
-                    System.out.println("\n>>> Unexpected exception, check 
resources: " + e);
-                } finally {
-                    System.out.println("\n>>> SVM Multi-Class classification 
example completed.");
-                }
-            });
-
-            igniteThread.start();
-            igniteThread.join();
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/examples/src/main/resources/datasets/titanic.txt
----------------------------------------------------------------------
diff --git a/examples/src/main/resources/datasets/titanic.txt 
b/examples/src/main/resources/datasets/titanic.txt
deleted file mode 100644
index fa2dc70..0000000
--- a/examples/src/main/resources/datasets/titanic.txt
+++ /dev/null
@@ -1,1309 +0,0 @@
-1,1,2,29,0,0,211.3375
-1,1,1,0.9167,1,2,151.55
--1,1,2,2,1,2,151.55
--1,1,1,30,1,2,151.55
--1,1,2,25,1,2,151.55
-1,1,1,48,0,0,26.55
-1,1,2,63,1,0,77.9583
--1,1,1,39,0,0,0
-1,1,2,53,2,0,51.4792
--1,1,1,71,0,0,49.5042
--1,1,1,47,1,0,227.525
-1,1,2,18,1,0,227.525
-1,1,2,24,0,0,69.3
-1,1,2,26,0,0,78.85
-1,1,1,80,0,0,30
--1,1,1,,0,0,25.925
--1,1,1,24,0,1,247.5208
-1,1,2,50,0,1,247.5208
-1,1,2,32,0,0,76.2917
--1,1,1,36,0,0,75.2417
-1,1,1,37,1,1,52.5542
-1,1,2,47,1,1,52.5542
-1,1,1,26,0,0,30
-1,1,2,42,0,0,227.525
-1,1,2,29,0,0,221.7792
--1,1,1,25,0,0,26
-1,1,1,25,1,0,91.0792
-1,1,2,19,1,0,91.0792
-1,1,2,35,0,0,135.6333
-1,1,1,28,0,0,26.55
--1,1,1,45,0,0,35.5
-1,1,1,40,0,0,31
-1,1,2,30,0,0,164.8667
-1,1,2,58,0,0,26.55
--1,1,1,42,0,0,26.55
-1,1,2,45,0,0,262.375
-1,1,2,22,0,1,55
-1,1,1,,0,0,26.55
--1,1,1,41,0,0,30.5
--1,1,1,48,0,0,50.4958
--1,1,1,,0,0,39.6
-1,1,2,44,0,0,27.7208
-1,1,2,59,2,0,51.4792
-1,1,2,60,0,0,76.2917
-1,1,2,41,0,0,134.5
--1,1,1,45,0,0,26.55
--1,1,1,,0,0,31
-1,1,1,42,0,0,26.2875
-1,1,2,53,0,0,27.4458
-1,1,1,36,0,1,512.3292
-1,1,2,58,0,1,512.3292
--1,1,1,33,0,0,5
--1,1,1,28,0,0,47.1
--1,1,1,17,0,0,47.1
-1,1,1,11,1,2,120
-1,1,2,14,1,2,120
-1,1,1,36,1,2,120
-1,1,2,36,1,2,120
--1,1,1,49,0,0,26
-1,1,2,,0,0,27.7208
--1,1,1,36,1,0,78.85
-1,1,2,76,1,0,78.85
--1,1,1,46,1,0,61.175
-1,1,2,47,1,0,61.175
-1,1,1,27,1,0,53.1
-1,1,2,33,1,0,53.1
-1,1,2,36,0,0,262.375
-1,1,2,30,0,0,86.5
-1,1,1,45,0,0,29.7
-1,1,2,,0,1,55
--1,1,1,,0,0,0
--1,1,1,27,1,0,136.7792
-1,1,2,26,1,0,136.7792
-1,1,2,22,0,0,151.55
--1,1,1,,0,0,52
--1,1,1,47,0,0,25.5875
-1,1,2,39,1,1,83.1583
--1,1,1,37,1,1,83.1583
-1,1,2,64,0,2,83.1583
-1,1,2,55,2,0,25.7
--1,1,1,,0,0,26.55
--1,1,1,70,1,1,71
-1,1,2,36,0,2,71
-1,1,2,64,1,1,26.55
--1,1,1,39,1,0,71.2833
-1,1,2,38,1,0,71.2833
-1,1,1,51,0,0,26.55
-1,1,1,27,0,0,30.5
-1,1,2,33,0,0,151.55
--1,1,1,31,1,0,52
-1,1,2,27,1,2,52
-1,1,1,31,1,0,57
-1,1,2,17,1,0,57
-1,1,1,53,1,1,81.8583
-1,1,1,4,0,2,81.8583
-1,1,2,54,1,1,81.8583
--1,1,1,50,1,0,106.425
-1,1,2,27,1,1,247.5208
-1,1,2,48,1,0,106.425
-1,1,2,48,1,0,39.6
-1,1,1,49,1,0,56.9292
--1,1,1,39,0,0,29.7
-1,1,2,23,0,1,83.1583
-1,1,2,38,0,0,227.525
-1,1,2,54,1,0,78.2667
--1,1,2,36,0,0,31.6792
--1,1,1,,0,0,221.7792
-1,1,2,,0,0,31.6833
-1,1,2,,0,0,110.8833
-1,1,1,36,0,0,26.3875
--1,1,1,30,0,0,27.75
-1,1,2,24,3,2,263
-1,1,2,28,3,2,263
-1,1,2,23,3,2,263
--1,1,1,19,3,2,263
--1,1,1,64,1,4,263
-1,1,2,60,1,4,263
-1,1,2,30,0,0,56.9292
--1,1,1,,0,0,26.55
-1,1,1,50,2,0,133.65
-1,1,1,43,1,0,27.7208
-1,1,2,,1,0,133.65
-1,1,2,22,0,2,49.5
-1,1,1,60,1,1,79.2
-1,1,2,48,1,1,79.2
--1,1,1,,0,0,0
--1,1,1,37,1,0,53.1
-1,1,2,35,1,0,53.1
--1,1,1,47,0,0,38.5
-1,1,2,35,0,0,211.5
-1,1,2,22,0,1,59.4
-1,1,2,45,0,1,59.4
--1,1,1,24,0,0,79.2
-1,1,1,49,1,0,89.1042
-1,1,2,,1,0,89.1042
--1,1,1,71,0,0,34.6542
-1,1,1,53,0,0,28.5
-1,1,2,19,0,0,30
--1,1,1,38,0,1,153.4625
-1,1,2,58,0,1,153.4625
-1,1,1,23,0,1,63.3583
-1,1,2,45,0,1,63.3583
--1,1,1,46,0,0,79.2
-1,1,1,25,1,0,55.4417
-1,1,2,25,1,0,55.4417
-1,1,1,48,1,0,76.7292
-1,1,2,49,1,0,76.7292
--1,1,1,,0,0,42.4
--1,1,1,45,1,0,83.475
-1,1,2,35,1,0,83.475
--1,1,1,40,0,0,0
-1,1,1,27,0,0,76.7292
-1,1,1,,0,0,30
-1,1,2,24,0,0,83.1583
--1,1,1,55,1,1,93.5
-1,1,2,52,1,1,93.5
--1,1,1,42,0,0,42.5
--1,1,1,,0,0,51.8625
--1,1,1,55,0,0,50
-1,1,2,16,0,1,57.9792
-1,1,2,44,0,1,57.9792
-1,1,2,51,1,0,77.9583
--1,1,1,42,1,0,52
-1,1,2,35,1,0,52
-1,1,1,35,0,0,26.55
-1,1,1,38,1,0,90
--1,1,1,,0,0,30.6958
-1,1,2,35,1,0,90
-1,1,2,38,0,0,80
--1,1,2,50,0,0,28.7125
-1,1,1,49,0,0,0
--1,1,1,46,0,0,26
--1,1,1,50,0,0,26
--1,1,1,32.5,0,0,211.5
--1,1,1,58,0,0,29.7
--1,1,1,41,1,0,51.8625
-1,1,2,,1,0,51.8625
-1,1,1,42,1,0,52.5542
-1,1,2,45,1,0,52.5542
--1,1,1,,0,0,26.55
-1,1,2,39,0,0,211.3375
-1,1,2,49,0,0,25.9292
-1,1,2,30,0,0,106.425
-1,1,1,35,0,0,512.3292
--1,1,1,,0,0,27.7208
--1,1,1,42,0,0,26.55
-1,1,2,55,0,0,27.7208
-1,1,2,16,0,1,39.4
-1,1,2,51,0,1,39.4
--1,1,1,29,0,0,30
-1,1,2,21,0,0,77.9583
--1,1,1,30,0,0,45.5
-1,1,2,58,0,0,146.5208
-1,1,2,15,0,1,211.3375
--1,1,1,30,0,0,26
-1,1,2,16,0,0,86.5
-1,1,1,,0,0,29.7
--1,1,1,19,1,0,53.1
-1,1,2,18,1,0,53.1
-1,1,2,24,0,0,49.5042
--1,1,1,46,0,0,75.2417
--1,1,1,54,0,0,51.8625
-1,1,1,36,0,0,26.2875
--1,1,1,28,1,0,82.1708
-1,1,2,,1,0,82.1708
--1,1,1,65,0,0,26.55
--1,1,1,44,2,0,90
-1,1,2,33,1,0,90
-1,1,2,37,1,0,90
-1,1,1,30,1,0,57.75
--1,1,1,55,0,0,30.5
--1,1,1,47,0,0,42.4
--1,1,1,37,0,1,29.7
-1,1,2,31,1,0,113.275
-1,1,2,23,1,0,113.275
--1,1,1,58,0,2,113.275
-1,1,2,19,0,2,26.2833
--1,1,1,64,0,0,26
-1,1,2,39,0,0,108.9
-1,1,1,,0,0,25.7417
-1,1,2,22,0,1,61.9792
--1,1,1,65,0,1,61.9792
--1,1,1,28.5,0,0,27.7208
--1,1,1,,0,0,0
--1,1,1,45.5,0,0,28.5
--1,1,1,23,0,0,93.5
--1,1,1,29,1,0,66.6
-1,1,2,22,1,0,66.6
--1,1,1,18,1,0,108.9
-1,1,2,17,1,0,108.9
-1,1,2,30,0,0,93.5
-1,1,1,52,0,0,30.5
--1,1,1,47,0,0,52
-1,1,2,56,0,1,83.1583
--1,1,1,38,0,0,0
-1,1,1,,0,0,39.6
--1,1,1,22,0,0,135.6333
--1,1,1,,0,0,227.525
-1,1,2,43,0,1,211.3375
--1,1,1,31,0,0,50.4958
-1,1,1,45,0,0,26.55
--1,1,1,,0,0,50
-1,1,2,33,0,0,27.7208
--1,1,1,46,0,0,79.2
--1,1,1,36,0,0,40.125
-1,1,2,33,0,0,86.5
--1,1,1,55,1,0,59.4
-1,1,2,54,1,0,59.4
--1,1,1,33,0,0,26.55
-1,1,1,13,2,2,262.375
-1,1,2,18,2,2,262.375
-1,1,2,21,2,2,262.375
--1,1,1,61,1,3,262.375
-1,1,2,48,1,3,262.375
-1,1,1,,0,0,30.5
-1,1,2,24,0,0,69.3
-1,1,1,,0,0,26
-1,1,2,35,1,0,57.75
-1,1,2,30,0,0,31
-1,1,1,34,0,0,26.55
-1,1,2,40,0,0,153.4625
-1,1,1,35,0,0,26.2875
--1,1,1,50,1,0,55.9
-1,1,2,39,1,0,55.9
-1,1,1,56,0,0,35.5
-1,1,1,28,0,0,35.5
--1,1,1,56,0,0,26.55
--1,1,1,56,0,0,30.6958
--1,1,1,24,1,0,60
--1,1,1,,0,0,26
-1,1,2,18,1,0,60
-1,1,1,24,1,0,82.2667
-1,1,2,23,1,0,82.2667
-1,1,1,6,0,2,134.5
-1,1,1,45,1,1,134.5
-1,1,2,40,1,1,134.5
--1,1,1,57,1,0,146.5208
-1,1,2,,1,0,146.5208
-1,1,1,32,0,0,30.5
--1,1,1,62,0,0,26.55
-1,1,1,54,1,0,55.4417
-1,1,2,43,1,0,55.4417
-1,1,2,52,1,0,78.2667
--1,1,1,,0,0,27.7208
-1,1,2,62,0,0,80
--1,1,1,67,1,0,221.7792
--1,1,2,63,1,0,221.7792
--1,1,1,61,0,0,32.3208
-1,1,2,48,0,0,25.9292
-1,1,2,18,0,2,79.65
--1,1,1,52,1,1,79.65
-1,1,2,39,1,1,79.65
-1,1,1,48,1,0,52
-1,1,2,,1,0,52
--1,1,1,49,1,1,110.8833
-1,1,1,17,0,2,110.8833
-1,1,2,39,1,1,110.8833
-1,1,2,,0,0,79.2
-1,1,1,31,0,0,28.5375
--1,1,1,40,0,0,27.7208
--1,1,1,61,0,0,33.5
--1,1,1,47,0,0,34.0208
-1,1,2,35,0,0,512.3292
--1,1,1,64,1,0,75.25
-1,1,2,60,1,0,75.25
--1,1,1,60,0,0,26.55
--1,1,1,54,0,1,77.2875
--1,1,1,21,0,1,77.2875
-1,1,2,55,0,0,135.6333
-1,1,2,31,0,2,164.8667
--1,1,1,57,1,1,164.8667
-1,1,2,45,1,1,164.8667
--1,1,1,50,1,1,211.5
--1,1,1,27,0,2,211.5
-1,1,2,50,1,1,211.5
-1,1,2,21,0,0,26.55
--1,1,1,51,0,1,61.3792
-1,1,1,21,0,1,61.3792
--1,1,1,,0,0,35
-1,1,2,31,0,0,134.5
-1,1,1,,0,0,35.5
--1,1,1,62,0,0,26.55
-1,1,2,36,0,0,135.6333
--1,2,1,30,1,0,24
-1,2,2,28,1,0,24
--1,2,1,30,0,0,13
--1,2,1,18,0,0,11.5
--1,2,1,25,0,0,10.5
--1,2,1,34,1,0,26
-1,2,2,36,1,0,26
--1,2,1,57,0,0,13
--1,2,1,18,0,0,11.5
--1,2,1,23,0,0,10.5
-1,2,2,36,0,0,13
--1,2,1,28,0,0,10.5
--1,2,1,51,0,0,12.525
-1,2,1,32,1,0,26
-1,2,2,19,1,0,26
--1,2,1,28,0,0,26
-1,2,1,1,2,1,39
-1,2,2,4,2,1,39
-1,2,2,12,2,1,39
-1,2,2,36,0,3,39
-1,2,1,34,0,0,13
-1,2,2,19,0,0,13
--1,2,1,23,0,0,13
--1,2,1,26,0,0,13
--1,2,1,42,0,0,13
--1,2,1,27,0,0,13
-1,2,2,24,0,0,13
-1,2,2,15,0,2,39
--1,2,1,60,1,1,39
-1,2,2,40,1,1,39
-1,2,2,20,1,0,26
--1,2,1,25,1,0,26
-1,2,2,36,0,0,13
--1,2,1,25,0,0,13
--1,2,1,42,0,0,13
-1,2,2,42,0,0,13
-1,2,1,0.8333,0,2,29
-1,2,1,26,1,1,29
-1,2,2,22,1,1,29
-1,2,2,35,0,0,21
--1,2,1,,0,0,0
--1,2,1,19,0,0,13
--1,2,2,44,1,0,26
--1,2,1,54,1,0,26
--1,2,1,52,0,0,13.5
--1,2,1,37,1,0,26
--1,2,2,29,1,0,26
-1,2,2,25,1,1,30
-1,2,2,45,0,2,30
--1,2,1,29,1,0,26
-1,2,2,28,1,0,26
--1,2,1,29,0,0,10.5
--1,2,1,28,0,0,13
-1,2,1,24,0,0,10.5
-1,2,2,8,0,2,26.25
--1,2,1,31,1,1,26.25
-1,2,2,31,1,1,26.25
-1,2,2,22,0,0,10.5
--1,2,2,30,0,0,13
--1,2,2,,0,0,21
--1,2,1,21,0,0,11.5
--1,2,1,,0,0,0
-1,2,1,8,1,1,36.75
--1,2,1,18,0,0,73.5
-1,2,2,48,0,2,36.75
-1,2,2,28,0,0,13
--1,2,1,32,0,0,13
--1,2,1,17,0,0,73.5
--1,2,1,29,1,0,27.7208
-1,2,2,24,1,0,27.7208
--1,2,1,25,0,0,31.5
--1,2,1,18,0,0,73.5
-1,2,2,18,0,1,23
-1,2,2,34,0,1,23
--1,2,1,54,0,0,26
-1,2,1,8,0,2,32.5
--1,2,1,42,1,1,32.5
-1,2,2,34,1,1,32.5
-1,2,2,27,1,0,13.8583
-1,2,2,30,1,0,13.8583
--1,2,1,23,0,0,13
--1,2,1,21,0,0,13
--1,2,1,18,0,0,13
--1,2,1,40,1,0,26
-1,2,2,29,1,0,26
--1,2,1,18,0,0,10.5
--1,2,1,36,0,0,13
--1,2,1,,0,0,0
--1,2,2,38,0,0,13
--1,2,1,35,0,0,26
--1,2,1,38,1,0,21
--1,2,1,34,1,0,21
-1,2,2,34,0,0,13
--1,2,1,16,0,0,26
--1,2,1,26,0,0,10.5
--1,2,1,47,0,0,10.5
--1,2,1,21,1,0,11.5
--1,2,1,21,1,0,11.5
--1,2,1,24,0,0,13.5
--1,2,1,24,0,0,13
--1,2,1,34,0,0,13
--1,2,1,30,0,0,13
--1,2,1,52,0,0,13
--1,2,1,30,0,0,13
-1,2,1,0.6667,1,1,14.5
-1,2,2,24,0,2,14.5
--1,2,1,44,0,0,13
-1,2,2,6,0,1,33
--1,2,1,28,0,1,33
-1,2,1,62,0,0,10.5
--1,2,1,30,0,0,10.5
-1,2,2,7,0,2,26.25
--1,2,1,43,1,1,26.25
-1,2,2,45,1,1,26.25
-1,2,2,24,1,2,65
-1,2,2,24,1,2,65
--1,2,1,49,1,2,65
-1,2,2,48,1,2,65
-1,2,2,55,0,0,16
--1,2,1,24,2,0,73.5
--1,2,1,32,2,0,73.5
--1,2,1,21,2,0,73.5
--1,2,2,18,1,1,13
-1,2,2,20,2,1,23
--1,2,1,23,2,1,11.5
--1,2,1,36,0,0,13
-1,2,2,54,1,3,23
--1,2,1,50,0,0,13
--1,2,1,44,1,0,26
-1,2,2,29,1,0,26
--1,2,1,21,0,0,73.5
-1,2,1,42,0,0,13
--1,2,1,63,1,0,26
--1,2,2,60,1,0,26
--1,2,1,33,0,0,12.275
-1,2,2,17,0,0,10.5
--1,2,1,42,1,0,27
-1,2,2,24,2,1,27
--1,2,1,47,0,0,15
--1,2,1,24,2,0,31.5
--1,2,1,22,2,0,31.5
--1,2,1,32,0,0,10.5
-1,2,2,23,0,0,13.7917
--1,2,1,34,1,0,26
-1,2,2,24,1,0,26
--1,2,2,22,0,0,21
-1,2,2,,0,0,12.35
--1,2,1,35,0,0,12.35
-1,2,2,45,0,0,13.5
--1,2,1,57,0,0,12.35
--1,2,1,,0,0,0
--1,2,1,31,0,0,10.5
--1,2,2,26,1,1,26
--1,2,1,30,1,1,26
--1,2,1,,0,0,10.7083
-1,2,2,1,1,2,41.5792
-1,2,2,3,1,2,41.5792
--1,2,1,25,1,2,41.5792
-1,2,2,22,1,2,41.5792
-1,2,2,17,0,0,12
-1,2,2,,0,0,33
-1,2,2,34,0,0,10.5
--1,2,1,36,0,0,12.875
--1,2,1,24,0,0,10.5
--1,2,1,61,0,0,12.35
--1,2,1,50,1,0,26
-1,2,2,42,1,0,26
--1,2,2,57,0,0,10.5
--1,2,1,,0,0,15.0458
-1,2,1,1,0,2,37.0042
--1,2,1,31,1,1,37.0042
-1,2,2,24,1,1,37.0042
--1,2,1,,0,0,15.5792
--1,2,1,30,0,0,13
--1,2,1,40,0,0,16
--1,2,1,32,0,0,13.5
--1,2,1,30,0,0,13
--1,2,1,46,0,0,26
-1,2,2,13,0,1,19.5
-1,2,2,41,0,1,19.5
-1,2,1,19,0,0,10.5
--1,2,1,39,0,0,13
--1,2,1,48,0,0,13
--1,2,1,70,0,0,10.5
--1,2,1,27,0,0,13
--1,2,1,54,0,0,14
--1,2,1,39,0,0,26
--1,2,1,16,0,0,10.5
--1,2,1,62,0,0,9.6875
--1,2,1,32.5,1,0,30.0708
-1,2,2,14,1,0,30.0708
-1,2,1,2,1,1,26
-1,2,1,3,1,1,26
--1,2,1,36.5,0,2,26
--1,2,1,26,0,0,13
--1,2,1,19,1,1,36.75
--1,2,1,28,0,0,13.5
-1,2,1,20,0,0,13.8625
-1,2,2,29,0,0,10.5
--1,2,1,39,0,0,13
-1,2,1,22,0,0,10.5
-1,2,1,,0,0,13.8625
--1,2,1,23,0,0,10.5
-1,2,1,29,0,0,13.8583
--1,2,1,28,0,0,10.5
--1,2,1,,0,0,0
-1,2,2,50,0,1,26
--1,2,1,19,0,0,10.5
--1,2,1,,0,0,15.05
--1,2,1,41,0,0,13
-1,2,2,21,0,1,21
-1,2,2,19,0,0,26
--1,2,1,43,0,1,21
-1,2,2,32,0,0,13
--1,2,1,34,0,0,13
-1,2,1,30,0,0,12.7375
--1,2,1,27,0,0,15.0333
-1,2,2,2,1,1,26
-1,2,2,8,1,1,26
-1,2,2,33,0,2,26
--1,2,1,36,0,0,10.5
--1,2,1,34,1,0,21
-1,2,2,30,3,0,21
-1,2,2,28,0,0,13
--1,2,1,23,0,0,15.0458
-1,2,1,0.8333,1,1,18.75
-1,2,1,3,1,1,18.75
-1,2,2,24,2,3,18.75
-1,2,2,50,0,0,10.5
--1,2,1,19,0,0,10.5
-1,2,2,21,0,0,10.5
--1,2,1,26,0,0,13
--1,2,1,25,0,0,13
--1,2,1,27,0,0,26
-1,2,2,25,0,1,26
-1,2,2,18,0,2,13
-1,2,2,20,0,0,36.75
-1,2,2,30,0,0,13
--1,2,1,59,0,0,13.5
-1,2,2,30,0,0,12.35
--1,2,1,35,0,0,10.5
-1,2,2,40,0,0,13
--1,2,1,25,0,0,13
--1,2,1,41,0,0,15.0458
--1,2,1,25,0,0,10.5
--1,2,1,18.5,0,0,13
--1,2,1,14,0,0,65
-1,2,2,50,0,0,10.5
--1,2,1,23,0,0,13
-1,2,2,28,0,0,12.65
-1,2,2,27,0,0,10.5
--1,2,1,29,1,0,21
--1,2,2,27,1,0,21
--1,2,1,40,0,0,13
-1,2,2,31,0,0,21
--1,2,1,30,1,0,21
--1,2,1,23,1,0,10.5
-1,2,2,31,0,0,21
--1,2,1,,0,0,0
-1,2,2,12,0,0,15.75
-1,2,2,40,0,0,15.75
-1,2,2,32.5,0,0,13
--1,2,1,27,1,0,26
-1,2,2,29,1,0,26
-1,2,1,2,1,1,23
-1,2,2,4,1,1,23
-1,2,2,29,0,2,23
-1,2,2,0.9167,1,2,27.75
-1,2,2,5,1,2,27.75
--1,2,1,36,1,2,27.75
-1,2,2,33,1,2,27.75
--1,2,1,66,0,0,10.5
--1,2,1,,0,0,12.875
-1,2,1,31,0,0,13
-1,2,1,,0,0,13
-1,2,2,26,0,0,13.5
--1,2,2,24,0,0,13
--1,3,1,42,0,0,7.55
--1,3,1,13,0,2,20.25
--1,3,1,16,1,1,20.25
-1,3,2,35,1,1,20.25
-1,3,2,16,0,0,7.65
-1,3,1,25,0,0,7.65
-1,3,1,20,0,0,7.925
-1,3,2,18,0,0,7.2292
--1,3,1,30,0,0,7.25
--1,3,1,26,0,0,8.05
--1,3,2,40,1,0,9.475
-1,3,1,0.8333,0,1,9.35
-1,3,2,18,0,1,9.35
-1,3,1,26,0,0,18.7875
--1,3,1,26,0,0,7.8875
--1,3,1,20,0,0,7.925
--1,3,1,24,0,0,7.05
--1,3,1,25,0,0,7.05
--1,3,1,35,0,0,8.05
--1,3,1,18,0,0,8.3
--1,3,1,32,0,0,22.525
-1,3,2,19,1,0,7.8542
--1,3,1,4,4,2,31.275
--1,3,2,6,4,2,31.275
--1,3,2,2,4,2,31.275
-1,3,2,17,4,2,7.925
--1,3,2,38,4,2,7.775
--1,3,2,9,4,2,31.275
--1,3,2,11,4,2,31.275
--1,3,1,39,1,5,31.275
-1,3,1,27,0,0,7.7958
--1,3,1,26,0,0,7.775
--1,3,2,39,1,5,31.275
--1,3,1,20,0,0,7.8542
--1,3,1,26,0,0,7.8958
--1,3,1,25,1,0,17.8
--1,3,2,18,1,0,17.8
--1,3,1,24,0,0,7.775
--1,3,1,35,0,0,7.05
--1,3,1,5,4,2,31.3875
--1,3,1,9,4,2,31.3875
-1,3,1,3,4,2,31.3875
--1,3,1,13,4,2,31.3875
-1,3,2,5,4,2,31.3875
--1,3,1,40,1,5,31.3875
-1,3,1,23,0,0,7.7958
-1,3,2,38,1,5,31.3875
-1,3,2,45,0,0,7.225
--1,3,1,21,0,0,7.225
--1,3,1,23,0,0,7.05
--1,3,2,17,0,0,14.4583
--1,3,1,30,0,0,7.225
--1,3,1,23,0,0,7.8542
-1,3,2,13,0,0,7.2292
--1,3,1,20,0,0,7.225
--1,3,1,32,1,0,15.85
-1,3,2,33,3,0,15.85
-1,3,2,0.75,2,1,19.2583
-1,3,2,0.75,2,1,19.2583
-1,3,2,5,2,1,19.2583
-1,3,2,24,0,3,19.2583
-1,3,2,18,0,0,8.05
--1,3,1,40,0,0,7.225
--1,3,1,26,0,0,7.8958
-1,3,1,20,0,0,7.2292
--1,3,2,18,0,1,14.4542
--1,3,2,45,0,1,14.4542
--1,3,2,27,0,0,7.8792
--1,3,1,22,0,0,8.05
--1,3,1,19,0,0,8.05
--1,3,1,26,0,0,7.775
--1,3,1,22,0,0,9.35
--1,3,1,,0,0,7.2292
--1,3,1,20,0,0,4.0125
-1,3,1,32,0,0,56.4958
--1,3,1,21,0,0,7.775
--1,3,1,18,0,0,7.75
--1,3,1,26,0,0,7.8958
--1,3,1,6,1,1,15.2458
--1,3,2,9,1,1,15.2458
--1,3,1,,0,0,7.225
--1,3,2,,0,2,15.2458
--1,3,2,,0,2,7.75
--1,3,1,40,1,1,15.5
--1,3,2,32,1,1,15.5
--1,3,1,21,0,0,16.1
-1,3,2,22,0,0,7.725
--1,3,2,20,0,0,7.8542
--1,3,1,29,1,0,7.0458
--1,3,1,22,1,0,7.25
--1,3,1,22,0,0,7.7958
--1,3,1,35,0,0,8.05
--1,3,2,18.5,0,0,7.2833
-1,3,1,21,0,0,7.8208
--1,3,1,19,0,0,6.75
--1,3,2,18,0,0,7.8792
--1,3,2,21,0,0,8.6625
--1,3,2,30,0,0,8.6625
--1,3,1,18,0,0,8.6625
--1,3,1,38,0,0,8.6625
--1,3,1,17,0,0,8.6625
--1,3,1,17,0,0,8.6625
--1,3,2,21,0,0,7.75
--1,3,1,21,0,0,7.75
--1,3,1,21,0,0,8.05
--1,3,1,,1,0,14.4583
--1,3,2,,1,0,14.4583
--1,3,1,28,0,0,7.7958
--1,3,1,24,0,0,7.8542
-1,3,2,16,0,0,7.75
--1,3,2,37,0,0,7.75
--1,3,1,28,0,0,7.25
--1,3,1,24,0,0,8.05
--1,3,1,21,0,0,7.7333
-1,3,1,32,0,0,56.4958
--1,3,1,29,0,0,8.05
--1,3,1,26,1,0,14.4542
--1,3,1,18,1,0,14.4542
--1,3,1,20,0,0,7.05
-1,3,1,18,0,0,8.05
--1,3,1,24,0,0,7.25
--1,3,1,36,0,0,7.4958
--1,3,1,24,0,0,7.4958
--1,3,1,31,0,0,7.7333
--1,3,1,31,0,0,7.75
-1,3,2,22,0,0,7.75
--1,3,2,30,0,0,7.6292
--1,3,1,70.5,0,0,7.75
--1,3,1,43,0,0,8.05
--1,3,1,35,0,0,7.8958
--1,3,1,27,0,0,7.8958
--1,3,1,19,0,0,7.8958
--1,3,1,30,0,0,8.05
-1,3,1,9,1,1,15.9
-1,3,1,3,1,1,15.9
-1,3,2,36,0,2,15.9
--1,3,1,59,0,0,7.25
--1,3,1,19,0,0,8.1583
-1,3,2,17,0,1,16.1
--1,3,1,44,0,1,16.1
--1,3,1,17,0,0,8.6625
--1,3,1,22.5,0,0,7.225
-1,3,1,45,0,0,8.05
--1,3,2,22,0,0,10.5167
--1,3,1,19,0,0,10.1708
-1,3,2,30,0,0,6.95
-1,3,1,29,0,0,7.75
--1,3,1,0.3333,0,2,14.4
--1,3,1,34,1,1,14.4
--1,3,2,28,1,1,14.4
--1,3,1,27,0,0,7.8958
--1,3,1,25,0,0,7.8958
--1,3,1,24,2,0,24.15
--1,3,1,22,0,0,8.05
--1,3,1,21,2,0,24.15
--1,3,1,17,2,0,8.05
--1,3,1,,1,0,16.1
-1,3,2,,1,0,16.1
-1,3,1,36.5,1,0,17.4
-1,3,2,36,1,0,17.4
-1,3,1,30,0,0,9.5
--1,3,1,16,0,0,9.5
-1,3,1,1,1,2,20.575
-1,3,2,0.1667,1,2,20.575
--1,3,1,26,1,2,20.575
-1,3,2,33,1,2,20.575
--1,3,1,25,0,0,7.8958
--1,3,1,,0,0,7.8958
--1,3,1,,0,0,7.8958
--1,3,1,22,0,0,7.25
--1,3,1,36,0,0,7.25
-1,3,2,19,0,0,7.8792
--1,3,1,17,0,0,7.8958
--1,3,1,42,0,0,8.6625
--1,3,1,43,0,0,7.8958
--1,3,1,,0,0,7.2292
--1,3,1,32,0,0,7.75
-1,3,1,19,0,0,8.05
-1,3,2,30,0,0,12.475
--1,3,2,24,0,0,7.75
-1,3,2,23,0,0,8.05
--1,3,1,33,0,0,7.8958
--1,3,1,65,0,0,7.75
-1,3,1,24,0,0,7.55
--1,3,1,23,1,0,13.9
-1,3,2,22,1,0,13.9
--1,3,1,18,0,0,7.775
--1,3,1,16,0,0,7.775
--1,3,1,45,0,0,6.975
--1,3,1,,0,0,7.225
--1,3,1,39,0,2,7.2292
--1,3,1,17,1,1,7.2292
--1,3,1,15,1,1,7.2292
--1,3,1,47,0,0,7.25
-1,3,2,5,0,0,12.475
--1,3,1,,0,0,7.225
--1,3,1,40.5,0,0,15.1
--1,3,1,40.5,0,0,7.75
-1,3,1,,0,0,7.05
--1,3,1,18,0,0,7.7958
--1,3,2,,0,0,7.75
--1,3,1,,0,0,7.75
--1,3,1,,0,0,6.95
--1,3,1,26,0,0,7.8792
--1,3,1,,0,0,7.75
-1,3,1,,0,0,56.4958
--1,3,2,21,2,2,34.375
--1,3,2,9,2,2,34.375
--1,3,1,,0,0,8.05
--1,3,1,18,2,2,34.375
--1,3,1,16,1,3,34.375
--1,3,2,48,1,3,34.375
--1,3,1,,0,0,7.75
--1,3,1,,0,0,7.25
--1,3,1,25,0,0,7.7417
--1,3,1,,0,0,14.5
--1,3,1,,0,0,7.8958
--1,3,1,22,0,0,8.05
-1,3,2,16,0,0,7.7333
-1,3,2,,0,0,7.75
-1,3,1,9,0,2,20.525
--1,3,1,33,1,1,20.525
--1,3,1,41,0,0,7.85
-1,3,2,31,1,1,20.525
--1,3,1,38,0,0,7.05
--1,3,1,9,5,2,46.9
--1,3,1,1,5,2,46.9
--1,3,1,11,5,2,46.9
--1,3,2,10,5,2,46.9
--1,3,2,16,5,2,46.9
--1,3,1,14,5,2,46.9
--1,3,1,40,1,6,46.9
--1,3,2,43,1,6,46.9
--1,3,1,51,0,0,8.05
--1,3,1,32,0,0,8.3625
--1,3,1,,0,0,8.05
--1,3,1,20,0,0,9.8458
--1,3,1,37,2,0,7.925
--1,3,1,28,2,0,7.925
--1,3,1,19,0,0,7.775
--1,3,2,24,0,0,8.85
--1,3,2,17,0,0,7.7333
--1,3,1,,1,0,19.9667
--1,3,1,,1,0,19.9667
--1,3,1,28,1,0,15.85
-1,3,2,24,1,0,15.85
--1,3,1,20,0,0,9.5
--1,3,1,23.5,0,0,7.2292
--1,3,1,41,2,0,14.1083
--1,3,1,26,1,0,7.8542
--1,3,1,21,0,0,7.8542
-1,3,2,45,1,0,14.1083
--1,3,2,,0,0,7.55
--1,3,1,25,0,0,7.25
--1,3,1,,0,0,6.8583
--1,3,1,11,0,0,18.7875
-1,3,2,,0,0,7.75
-1,3,1,27,0,0,6.975
-1,3,1,,0,0,56.4958
--1,3,2,18,0,0,6.75
-1,3,2,26,0,0,7.925
--1,3,2,23,0,0,7.925
-1,3,2,22,0,0,8.9625
--1,3,1,28,0,0,7.8958
--1,3,2,28,0,0,7.775
--1,3,2,,0,0,7.75
-1,3,2,2,0,1,12.2875
-1,3,2,22,1,1,12.2875
--1,3,1,43,0,0,6.45
--1,3,1,28,0,0,22.525
-1,3,2,27,0,0,7.925
--1,3,1,,0,0,7.75
-1,3,2,,0,0,8.05
--1,3,1,42,0,0,7.65
-1,3,1,,0,0,7.8875
--1,3,1,30,0,0,7.2292
--1,3,1,,0,0,7.8958
--1,3,2,27,1,0,7.925
--1,3,2,25,1,0,7.925
--1,3,1,,0,0,7.8958
-1,3,1,29,0,0,7.8958
-1,3,1,21,0,0,7.7958
--1,3,1,,0,0,7.05
--1,3,1,20,0,0,7.8542
--1,3,1,48,0,0,7.8542
--1,3,1,17,1,0,7.0542
-1,3,2,,0,0,7.75
-1,3,1,,0,0,8.1125
--1,3,1,34,0,0,6.4958
-1,3,1,26,0,0,7.775
--1,3,1,22,0,0,7.7958
--1,3,1,33,0,0,8.6542
--1,3,1,31,0,0,7.775
--1,3,1,29,0,0,7.8542
-1,3,1,4,1,1,11.1333
-1,3,2,1,1,1,11.1333
--1,3,1,49,0,0,0
--1,3,1,33,0,0,7.775
--1,3,1,19,0,0,0
-1,3,2,27,0,2,11.1333
--1,3,1,,1,2,23.45
--1,3,2,,1,2,23.45
--1,3,1,,1,2,23.45
--1,3,2,,1,2,23.45
--1,3,1,23,0,0,7.8958
-1,3,1,32,0,0,7.8542
--1,3,1,27,0,0,7.8542
--1,3,2,20,1,0,9.825
--1,3,2,21,1,0,9.825
-1,3,1,32,0,0,7.925
--1,3,1,17,0,0,7.125
--1,3,1,21,0,0,8.4333
--1,3,1,30,0,0,7.8958
-1,3,1,21,0,0,7.7958
--1,3,1,33,0,0,7.8542
--1,3,1,22,0,0,7.5208
-1,3,2,4,0,1,13.4167
-1,3,1,39,0,1,13.4167
--1,3,1,,0,0,7.2292
--1,3,1,18.5,0,0,7.2292
--1,3,1,,0,0,7.75
--1,3,1,,0,0,7.25
-1,3,2,,0,0,7.75
-1,3,2,,0,0,7.75
--1,3,1,34.5,0,0,7.8292
--1,3,1,44,0,0,8.05
-1,3,1,,0,0,7.75
--1,3,1,,1,0,14.4542
--1,3,2,,1,0,14.4542
--1,3,1,,1,0,7.75
--1,3,1,,1,0,7.75
--1,3,1,,0,0,7.7375
--1,3,2,22,2,0,8.6625
--1,3,1,26,2,0,8.6625
-1,3,2,4,0,2,22.025
-1,3,1,29,3,1,22.025
-1,3,2,26,1,1,22.025
--1,3,2,1,1,1,12.1833
--1,3,1,18,1,1,7.8542
--1,3,2,36,0,2,12.1833
--1,3,1,,0,0,7.8958
-1,3,1,25,0,0,7.2292
--1,3,1,,0,0,7.225
--1,3,2,37,0,0,9.5875
--1,3,1,,0,0,7.8958
-1,3,1,,0,0,56.4958
--1,3,1,,0,0,56.4958
-1,3,2,22,0,0,7.25
--1,3,1,,0,0,7.75
-1,3,1,26,0,0,56.4958
--1,3,1,29,0,0,9.4833
--1,3,1,29,0,0,7.775
--1,3,1,22,0,0,7.775
-1,3,1,22,0,0,7.225
--1,3,1,,3,1,25.4667
--1,3,2,,3,1,25.4667
--1,3,2,,3,1,25.4667
--1,3,2,,3,1,25.4667
--1,3,2,,0,4,25.4667
--1,3,1,32,0,0,7.925
--1,3,1,34.5,0,0,6.4375
--1,3,2,,1,0,15.5
--1,3,1,,1,0,15.5
--1,3,1,36,0,0,0
--1,3,1,39,0,0,24.15
--1,3,1,24,0,0,9.5
--1,3,2,25,0,0,7.775
--1,3,2,45,0,0,7.75
--1,3,1,36,1,0,15.55
--1,3,2,30,1,0,15.55
-1,3,1,20,1,0,7.925
--1,3,1,,0,0,7.8792
--1,3,1,28,0,0,56.4958
--1,3,1,,0,0,7.55
--1,3,1,30,1,0,16.1
--1,3,2,26,1,0,16.1
--1,3,1,,0,0,7.8792
--1,3,1,20.5,0,0,7.25
-1,3,1,27,0,0,8.6625
--1,3,1,51,0,0,7.0542
-1,3,2,23,0,0,7.8542
-1,3,1,32,0,0,7.5792
--1,3,1,,0,0,7.8958
--1,3,1,,0,0,7.55
-1,3,2,,0,0,7.75
-1,3,1,24,0,0,7.1417
--1,3,1,22,0,0,7.125
--1,3,2,,0,0,7.8792
--1,3,1,,0,0,7.75
--1,3,1,,0,0,8.05
--1,3,1,29,0,0,7.925
-1,3,1,,0,0,7.2292
--1,3,2,30.5,0,0,7.75
-1,3,2,,0,0,7.7375
--1,3,1,,0,0,7.2292
--1,3,1,35,0,0,7.8958
--1,3,1,33,0,0,7.8958
-1,3,2,,0,0,7.225
--1,3,1,,0,0,7.8958
-1,3,2,,0,0,7.75
-1,3,1,,0,0,7.75
-1,3,2,,2,0,23.25
-1,3,2,,2,0,23.25
-1,3,1,,2,0,23.25
-1,3,2,,0,0,7.7875
--1,3,1,,0,0,15.5
-1,3,2,,0,0,7.8792
-1,3,2,15,0,0,8.0292
--1,3,2,35,0,0,7.75
--1,3,1,,0,0,7.75
--1,3,1,24,1,0,16.1
--1,3,2,19,1,0,16.1
--1,3,2,,0,0,7.75
--1,3,2,,0,0,8.05
--1,3,2,,0,0,8.05
--1,3,1,55.5,0,0,8.05
--1,3,1,,0,0,7.75
-1,3,1,21,0,0,7.775
--1,3,1,,0,0,8.05
--1,3,1,24,0,0,7.8958
--1,3,1,21,0,0,7.8958
--1,3,1,28,0,0,7.8958
--1,3,1,,0,0,7.8958
-1,3,2,,0,0,7.8792
--1,3,1,25,0,0,7.65
-1,3,1,6,0,1,12.475
-1,3,2,27,0,1,12.475
--1,3,1,,0,0,8.05
-1,3,2,,1,0,24.15
--1,3,1,,1,0,24.15
--1,3,1,,0,0,8.4583
--1,3,1,34,0,0,8.05
--1,3,1,,0,0,7.75
-1,3,1,,0,0,7.775
-1,3,1,,1,1,15.2458
-1,3,1,,1,1,15.2458
-1,3,2,,0,2,15.2458
-1,3,2,,0,0,7.2292
--1,3,1,,0,0,8.05
-1,3,2,,0,0,7.7333
-1,3,2,24,0,0,7.75
--1,3,1,,0,0,8.05
-1,3,2,,1,0,15.5
-1,3,2,,1,0,15.5
-1,3,2,,0,0,15.5
--1,3,1,18,0,0,7.75
--1,3,1,22,0,0,7.8958
-1,3,2,15,0,0,7.225
-1,3,2,1,0,2,15.7417
-1,3,1,20,1,1,15.7417
-1,3,2,19,1,1,15.7417
--1,3,1,33,0,0,8.05
--1,3,1,,0,0,7.8958
--1,3,1,,0,0,7.2292
--1,3,2,,0,0,7.75
--1,3,1,,0,0,7.8958
-1,3,1,12,1,0,11.2417
-1,3,2,14,1,0,11.2417
--1,3,2,29,0,0,7.925
--1,3,1,28,0,0,8.05
-1,3,2,18,0,0,7.775
-1,3,2,26,0,0,7.8542
--1,3,1,21,0,0,7.8542
--1,3,1,41,0,0,7.125
-1,3,1,39,0,0,7.925
--1,3,1,21,0,0,7.8
--1,3,1,28.5,0,0,7.2292
-1,3,2,22,0,0,7.75
--1,3,1,61,0,0,6.2375
--1,3,1,,1,0,15.5
--1,3,1,,0,0,7.8292
-1,3,2,,1,0,15.5
--1,3,1,,0,0,7.7333
--1,3,1,,0,0,7.75
--1,3,1,,0,0,7.75
--1,3,1,23,0,0,9.225
--1,3,2,,0,0,7.75
-1,3,2,,0,0,7.75
-1,3,2,,0,0,7.8792
-1,3,2,22,0,0,7.775
-1,3,1,,0,0,7.75
-1,3,2,,0,0,7.8292
-1,3,1,9,0,1,3.1708
--1,3,1,28,0,0,22.525
--1,3,1,42,0,1,8.4042
--1,3,1,,0,0,7.3125
--1,3,2,31,0,0,7.8542
--1,3,1,28,0,0,7.8542
-1,3,1,32,0,0,7.775
--1,3,1,20,0,0,9.225
--1,3,2,23,0,0,8.6625
--1,3,2,20,0,0,8.6625
--1,3,1,20,0,0,8.6625
--1,3,1,16,0,0,9.2167
-1,3,2,31,0,0,8.6833
--1,3,2,,0,0,7.6292
--1,3,1,2,3,1,21.075
--1,3,1,6,3,1,21.075
--1,3,2,3,3,1,21.075
--1,3,2,8,3,1,21.075
--1,3,2,29,0,4,21.075
--1,3,1,1,4,1,39.6875
--1,3,1,7,4,1,39.6875
--1,3,1,2,4,1,39.6875
--1,3,1,16,4,1,39.6875
--1,3,1,14,4,1,39.6875
--1,3,2,41,0,5,39.6875
--1,3,1,21,0,0,8.6625
--1,3,1,19,0,0,14.5
--1,3,1,,0,0,8.7125
--1,3,1,32,0,0,7.8958
--1,3,1,0.75,1,1,13.775
--1,3,2,3,1,1,13.775
--1,3,2,26,0,2,13.775
--1,3,1,,0,0,7
--1,3,1,,0,0,7.775
--1,3,1,,0,0,8.05
--1,3,1,21,0,0,7.925
--1,3,1,25,0,0,7.925
--1,3,1,22,0,0,7.25
-1,3,1,25,1,0,7.775
-1,3,1,,1,1,22.3583
-1,3,2,,1,1,22.3583
-1,3,2,,0,2,22.3583
--1,3,2,,0,0,8.1375
--1,3,1,24,0,0,8.05
--1,3,2,28,0,0,7.8958
--1,3,1,19,0,0,7.8958
--1,3,1,,0,0,7.8958
--1,3,1,25,1,0,7.775
--1,3,2,18,0,0,7.775
-1,3,1,32,0,0,8.05
--1,3,1,,0,0,7.8958
--1,3,1,17,0,0,8.6625
--1,3,1,24,0,0,8.6625
--1,3,1,,0,0,7.8958
--1,3,2,,0,0,8.1125
--1,3,1,,0,0,7.2292
--1,3,1,,0,0,7.25
--1,3,1,38,0,0,7.8958
--1,3,1,21,0,0,8.05
--1,3,1,10,4,1,29.125
--1,3,1,4,4,1,29.125
--1,3,1,7,4,1,29.125
--1,3,1,2,4,1,29.125
--1,3,1,8,4,1,29.125
--1,3,2,39,0,5,29.125
--1,3,2,22,0,0,39.6875
--1,3,1,35,0,0,7.125
-1,3,2,,0,0,7.7208
--1,3,1,,0,0,14.5
--1,3,2,,0,0,14.5
--1,3,1,50,1,0,14.5
--1,3,2,47,1,0,14.5
--1,3,1,,0,0,8.05
--1,3,1,,0,0,7.775
--1,3,2,2,1,1,20.2125
--1,3,1,18,1,1,20.2125
--1,3,2,41,0,2,20.2125
-1,3,2,,0,0,8.05
--1,3,1,50,0,0,8.05
--1,3,1,16,0,0,8.05
-1,3,1,,0,0,7.75
--1,3,1,,0,0,24.15
--1,3,1,,0,0,7.2292
--1,3,1,25,0,0,7.225
--1,3,1,,0,0,7.225
--1,3,1,,0,0,7.7292
--1,3,1,,0,0,7.575
--1,3,1,38.5,0,0,7.25
--1,3,1,,8,2,69.55
--1,3,1,14.5,8,2,69.55
--1,3,2,,8,2,69.55
--1,3,2,,8,2,69.55
--1,3,2,,8,2,69.55
--1,3,2,,8,2,69.55
--1,3,1,,8,2,69.55
--1,3,1,,8,2,69.55
--1,3,1,,8,2,69.55
--1,3,1,,1,9,69.55
--1,3,2,,1,9,69.55
--1,3,1,24,0,0,9.325
-1,3,2,21,0,0,7.65
--1,3,1,39,0,0,7.925
--1,3,1,,2,0,21.6792
--1,3,1,,2,0,21.6792
--1,3,1,,2,0,21.6792
-1,3,2,1,1,1,16.7
-1,3,2,24,0,2,16.7
-1,3,2,4,1,1,16.7
-1,3,1,25,0,0,9.5
--1,3,1,20,0,0,8.05
--1,3,1,24.5,0,0,8.05
--1,3,1,,0,0,7.725
--1,3,1,,0,0,7.8958
--1,3,1,,0,0,7.75
-1,3,1,29,0,0,9.5
--1,3,1,,0,0,15.1
-1,3,2,,0,0,7.7792
--1,3,1,,0,0,8.05
--1,3,1,,0,0,8.05
--1,3,1,22,0,0,7.2292
--1,3,1,,0,0,8.05
--1,3,1,40,0,0,7.8958
--1,3,1,21,0,0,7.925
-1,3,2,18,0,0,7.4958
--1,3,1,4,3,2,27.9
--1,3,1,10,3,2,27.9
--1,3,2,9,3,2,27.9
--1,3,2,2,3,2,27.9
--1,3,1,40,1,4,27.9
--1,3,2,45,1,4,27.9
--1,3,1,,0,0,7.8958
--1,3,1,,0,0,8.05
--1,3,1,,0,0,8.6625
--1,3,1,,0,0,7.75
-1,3,2,,0,0,7.7333
--1,3,1,19,0,0,7.65
--1,3,1,30,0,0,8.05
--1,3,1,,0,0,8.05
--1,3,1,32,0,0,8.05
--1,3,1,,0,0,7.8958
--1,3,1,33,0,0,8.6625
-1,3,2,23,0,0,7.55
--1,3,1,21,0,0,8.05
--1,3,1,60.5,0,0,0
--1,3,1,19,0,0,7.8958
--1,3,2,22,0,0,9.8375
-1,3,1,31,0,0,7.925
--1,3,1,27,0,0,8.6625
--1,3,2,2,0,1,10.4625
--1,3,2,29,1,1,10.4625
-1,3,1,16,0,0,8.05
-1,3,1,44,0,0,7.925
--1,3,1,25,0,0,7.05
--1,3,1,74,0,0,7.775
-1,3,1,14,0,0,9.225
--1,3,1,24,0,0,7.7958
-1,3,1,25,0,0,7.7958
--1,3,1,34,0,0,8.05
-1,3,1,0.4167,0,1,8.5167
--1,3,1,,1,0,6.4375
--1,3,1,,0,0,6.4375
--1,3,1,,0,0,7.225
-1,3,2,16,1,1,8.5167
--1,3,1,,0,0,8.05
--1,3,1,,1,0,16.1
-1,3,2,,1,0,16.1
--1,3,1,32,0,0,7.925
--1,3,1,,0,0,7.75
--1,3,1,,0,0,7.8958
--1,3,1,30.5,0,0,8.05
--1,3,1,44,0,0,8.05
--1,3,1,,0,0,7.2292
-1,3,1,25,0,0,0
--1,3,1,,0,0,7.2292
-1,3,1,7,1,1,15.2458
-1,3,2,9,1,1,15.2458
-1,3,2,29,0,2,15.2458
--1,3,1,36,0,0,7.8958
-1,3,2,18,0,0,9.8417
-1,3,2,63,0,0,9.5875
--1,3,1,,1,1,14.5
--1,3,1,11.5,1,1,14.5
--1,3,1,40.5,0,2,14.5
--1,3,2,10,0,2,24.15
--1,3,1,36,1,1,24.15
--1,3,2,30,1,1,24.15
--1,3,1,,0,0,9.5
--1,3,1,33,0,0,9.5
--1,3,1,28,0,0,9.5
--1,3,1,28,0,0,9.5
--1,3,1,47,0,0,9
--1,3,2,18,2,0,18
--1,3,1,31,3,0,18
--1,3,1,16,2,0,18
--1,3,2,31,1,0,18
-1,3,1,22,0,0,7.225
--1,3,1,20,0,0,7.8542
--1,3,2,14,0,0,7.8542
--1,3,1,22,0,0,7.8958
--1,3,1,22,0,0,9
--1,3,1,,0,0,8.05
--1,3,1,,0,0,7.55
--1,3,1,,0,0,8.05
--1,3,1,32.5,0,0,9.5
-1,3,2,38,0,0,7.2292
--1,3,1,51,0,0,7.75
--1,3,1,18,1,0,6.4958
--1,3,1,21,1,0,6.4958
-1,3,2,47,1,0,7
--1,3,1,,0,0,8.7125
--1,3,1,,0,0,7.55
--1,3,1,,0,0,8.05
--1,3,1,28.5,0,0,16.1
--1,3,1,21,0,0,7.25
--1,3,1,27,0,0,8.6625
--1,3,1,,0,0,7.25
--1,3,1,36,0,0,9.5
--1,3,1,27,1,0,14.4542
-1,3,2,15,1,0,14.4542
--1,3,1,45.5,0,0,7.225
--1,3,1,,0,0,7.225
--1,3,1,,0,0,14.4583
--1,3,2,14.5,1,0,14.4542
--1,3,2,,1,0,14.4542
--1,3,1,26.5,0,0,7.225
--1,3,1,27,0,0,7.225
--1,3,1,29,0,0,7.875

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/structures/Dataset.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/structures/Dataset.java 
b/modules/ml/src/main/java/org/apache/ignite/ml/structures/Dataset.java
index 89ff1b4..cbed297 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/structures/Dataset.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/structures/Dataset.java
@@ -86,6 +86,16 @@ public class Dataset<Row extends DatasetRow> implements 
Serializable, Externaliz
     }
 
     /**
+     * Creates new Dataset by given data.
+     *
+     * @param data Should be initialized with one vector at least.
+     */
+    public Dataset(Row[] data) {
+        this.data = data;
+        this.rowSize = data.length;
+    }
+
+    /**
      * Creates new Dataset and initialized with empty data structure.
      *
      * @param rowSize Amount of instances. Should be > 0.

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledDataset.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledDataset.java 
b/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledDataset.java
index 0028a16..3239116 100644
--- 
a/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledDataset.java
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledDataset.java
@@ -27,7 +27,7 @@ import 
org.apache.ignite.ml.math.impls.vector.SparseDistributedVector;
 /**
  * Class for set of labeled vectors.
  */
-public class LabeledDataset<L, Row extends LabeledVector> extends Dataset<Row> 
{
+public class LabeledDataset<L, Row extends LabeledVector> extends Dataset<Row> 
implements AutoCloseable {
     /**
      * Default constructor (required by Externalizable).
      */
@@ -70,6 +70,15 @@ public class LabeledDataset<L, Row extends LabeledVector> 
extends Dataset<Row> {
         initializeDataWithLabeledVectors();
     }
 
+    /**
+     * Creates new Labeled Dataset by given data.
+     *
+     * @param data Should be initialized with one vector at least.
+     */
+    public LabeledDataset(Row[] data) {
+        super(data);
+    }
+
     /** */
     private void initializeDataWithLabeledVectors() {
         data = (Row[])new LabeledVector[rowSize];
@@ -207,4 +216,9 @@ public class LabeledDataset<L, Row extends LabeledVector> 
extends Dataset<Row> {
 
         return res;
     }
+
+    /** Closes LabeledDataset. */
+    @Override public void close() throws Exception {
+
+    }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationModel.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationModel.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationModel.java
index 6d93402..4fd2e0e 100644
--- 
a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationModel.java
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationModel.java
@@ -146,7 +146,9 @@ public class SVMLinearBinaryClassificationModel implements 
Model<Vector, Double>
             return true;
         if (o == null || getClass() != o.getClass())
             return false;
+
         SVMLinearBinaryClassificationModel mdl = 
(SVMLinearBinaryClassificationModel)o;
+
         return Double.compare(mdl.intercept, intercept) == 0
             && Double.compare(mdl.threshold, threshold) == 0
             && Boolean.compare(mdl.isKeepingRawLabels, isKeepingRawLabels) == 0
@@ -176,7 +178,7 @@ public class SVMLinearBinaryClassificationModel implements 
Model<Vector, Double>
             return builder.toString();
         }
 
-        return "LinearRegressionModel{" +
+        return "SVMModel{" +
             "weights=" + weights +
             ", intercept=" + intercept +
             '}';

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationTrainer.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationTrainer.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationTrainer.java
index ee3b6e8..e745ca9 100644
--- 
a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationTrainer.java
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationTrainer.java
@@ -18,10 +18,13 @@
 package org.apache.ignite.ml.svm;
 
 import java.util.concurrent.ThreadLocalRandom;
-import org.apache.ignite.ml.Trainer;
+import org.apache.ignite.ml.DatasetTrainer;
+import org.apache.ignite.ml.dataset.Dataset;
+import org.apache.ignite.ml.dataset.DatasetBuilder;
+import org.apache.ignite.ml.dataset.PartitionDataBuilder;
 import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.functions.IgniteBiFunction;
 import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
-import org.apache.ignite.ml.math.impls.vector.SparseDistributedVector;
 import org.apache.ignite.ml.structures.LabeledDataset;
 import org.apache.ignite.ml.structures.LabeledVector;
 import org.jetbrains.annotations.NotNull;
@@ -32,33 +35,50 @@ import org.jetbrains.annotations.NotNull;
  * and +1 labels for two classes and makes binary classification. </p> The 
paper about this algorithm could be found
  * here https://arxiv.org/abs/1409.1458.
  */
-public class SVMLinearBinaryClassificationTrainer implements 
Trainer<SVMLinearBinaryClassificationModel, LabeledDataset> {
+public class SVMLinearBinaryClassificationTrainer<K, V>
+    implements DatasetTrainer<K, V, SVMLinearBinaryClassificationModel> {
     /** Amount of outer SDCA algorithm iterations. */
-    private int amountOfIterations = 20;
+    private int amountOfIterations = 200;
 
     /** Amount of local SDCA algorithm iterations. */
-    private int amountOfLocIterations = 50;
+    private int amountOfLocIterations = 100;
 
     /** Regularization parameter. */
-    private double lambda = 0.2;
+    private double lambda = 0.4;
 
-    /** This flag enables distributed mode for this algorithm. */
-    private boolean isDistributed;
+    /** Dataset. */
+    private Dataset<SVMPartitionContext, LabeledDataset<Double, 
LabeledVector>> dataset;
 
     /**
-     * Returns model based on data
+     * Trains model based on the specified data.
      *
-     * @param data data to build model
-     * @return model
+     * @param datasetBuilder   Dataset builder.
+     * @param featureExtractor Feature extractor.
+     * @param lbExtractor      Label extractor.
+     * @param cols             Number of columns.
+     * @return Model.
      */
-    @Override public SVMLinearBinaryClassificationModel train(LabeledDataset 
data) {
-        isDistributed = data.isDistributed();
+    @Override public SVMLinearBinaryClassificationModel fit(DatasetBuilder<K, 
V> datasetBuilder, IgniteBiFunction<K, V, double[]> featureExtractor, 
IgniteBiFunction<K, V, Double> lbExtractor, int cols) {
 
-        final int weightVectorSizeWithIntercept = data.colSize() + 1;
+        assert datasetBuilder != null;
+
+        PartitionDataBuilder<K, V, SVMPartitionContext, LabeledDataset<Double, 
LabeledVector>> partDataBuilder = new SVMPartitionDataBuilderOnHeap<>(
+            featureExtractor,
+            lbExtractor,
+            cols
+        );
+
+        this.dataset = datasetBuilder.build(
+            (upstream, upstreamSize) -> new SVMPartitionContext(),
+            partDataBuilder
+        );
+
+
+        final int weightVectorSizeWithIntercept = cols + 1;
         Vector weights = 
initializeWeightsWithZeros(weightVectorSizeWithIntercept);
 
         for (int i = 0; i < this.getAmountOfIterations(); i++) {
-            Vector deltaWeights = calculateUpdates(data, weights);
+            Vector deltaWeights = calculateUpdates(weights);
             weights = weights.plus(deltaWeights); // creates new vector
         }
 
@@ -67,34 +87,32 @@ public class SVMLinearBinaryClassificationTrainer 
implements Trainer<SVMLinearBi
 
     /** */
     @NotNull private Vector initializeWeightsWithZeros(int vectorSize) {
-        if (isDistributed)
-            return new SparseDistributedVector(vectorSize);
-        else
             return new DenseLocalOnHeapVector(vectorSize);
     }
 
     /** */
-    private Vector calculateUpdates(LabeledDataset data, Vector weights) {
-        Vector copiedWeights = weights.copy();
-        Vector deltaWeights = initializeWeightsWithZeros(weights.size());
-
-        final int amountOfObservation = data.rowSize();
+    private Vector calculateUpdates(Vector weights) {
+        return dataset.compute(data -> {
+            Vector copiedWeights = weights.copy();
+            Vector deltaWeights = initializeWeightsWithZeros(weights.size());
+            final int amountOfObservation = data.rowSize();
 
-        Vector tmpAlphas = initializeWeightsWithZeros(amountOfObservation);
-        Vector deltaAlphas = initializeWeightsWithZeros(amountOfObservation);
+            Vector tmpAlphas = initializeWeightsWithZeros(amountOfObservation);
+            Vector deltaAlphas = 
initializeWeightsWithZeros(amountOfObservation);
 
-        for (int i = 0; i < this.getAmountOfLocIterations(); i++) {
-            int randomIdx = 
ThreadLocalRandom.current().nextInt(amountOfObservation);
+            for (int i = 0; i < this.getAmountOfLocIterations(); i++) {
+                int randomIdx = 
ThreadLocalRandom.current().nextInt(amountOfObservation);
 
-            Deltas deltas = getDeltas(data, copiedWeights, 
amountOfObservation, tmpAlphas, randomIdx);
+                Deltas deltas = getDeltas(data, copiedWeights, 
amountOfObservation, tmpAlphas, randomIdx);
 
-            copiedWeights = copiedWeights.plus(deltas.deltaWeights); // 
creates new vector
-            deltaWeights = deltaWeights.plus(deltas.deltaWeights);  // creates 
new vector
+                copiedWeights = copiedWeights.plus(deltas.deltaWeights); // 
creates new vector
+                deltaWeights = deltaWeights.plus(deltas.deltaWeights);  // 
creates new vector
 
-            tmpAlphas.set(randomIdx, tmpAlphas.get(randomIdx) + 
deltas.deltaAlpha);
-            deltaAlphas.set(randomIdx, deltaAlphas.get(randomIdx) + 
deltas.deltaAlpha);
-        }
-        return deltaWeights;
+                tmpAlphas.set(randomIdx, tmpAlphas.get(randomIdx) + 
deltas.deltaAlpha);
+                deltaAlphas.set(randomIdx, deltaAlphas.get(randomIdx) + 
deltas.deltaAlpha);
+            }
+            return deltaWeights;
+        }, (a, b) -> a == null ? b : a.plus(b));
     }
 
     /** */
@@ -225,6 +243,7 @@ public class SVMLinearBinaryClassificationTrainer 
implements Trainer<SVMLinearBi
         this.amountOfLocIterations = amountOfLocIterations;
         return this;
     }
+
 }
 
 /** This is a helper class to handle pair results which are returned from the 
calculation method. */

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationModel.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationModel.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationModel.java
deleted file mode 100644
index fd91595..0000000
--- 
a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationModel.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.ml.svm;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-import java.util.TreeMap;
-import org.apache.ignite.ml.Exportable;
-import org.apache.ignite.ml.Exporter;
-import org.apache.ignite.ml.Model;
-import org.apache.ignite.ml.math.Vector;
-
-/** Base class for multi-classification model for set of SVM classifiers. */
-public class SVMLinearMultiClassClassificationModel implements Model<Vector, 
Double>, Exportable<SVMLinearMultiClassClassificationModel>, Serializable {
-    /** List of models associated with each class. */
-    private Map<Double, SVMLinearBinaryClassificationModel> models;
-
-    /** */
-    public SVMLinearMultiClassClassificationModel() {
-        this.models = new HashMap<>();
-    }
-
-    /** {@inheritDoc} */
-    @Override public Double apply(Vector input) {
-        TreeMap<Double, Double> maxMargins = new TreeMap<>();
-
-        models.forEach((k, v) -> maxMargins.put(input.dot(v.weights()) + 
v.intercept(), k));
-
-        return maxMargins.lastEntry().getValue();
-    }
-
-    /** {@inheritDoc} */
-    @Override public <P> void 
saveModel(Exporter<SVMLinearMultiClassClassificationModel, P> exporter, P path) 
{
-        exporter.save(this, path);
-    }
-
-    /** {@inheritDoc} */
-    @Override public boolean equals(Object o) {
-        if (this == o)
-            return true;
-        if (o == null || getClass() != o.getClass())
-            return false;
-        SVMLinearMultiClassClassificationModel mdl = 
(SVMLinearMultiClassClassificationModel)o;
-        return Objects.equals(models, mdl.models);
-    }
-
-    /** {@inheritDoc} */
-    @Override public int hashCode() {
-        return Objects.hash(models);
-    }
-
-    /** {@inheritDoc} */
-    @Override public String toString() {
-        StringBuilder wholeStr = new StringBuilder();
-
-        models.forEach((clsLb, mdl) -> {
-            wholeStr.append("The class with label " + clsLb + " has 
classifier: " + mdl.toString() + System.lineSeparator());
-        });
-
-        return wholeStr.toString();
-    }
-
-    /**
-     * Adds a specific SVM binary classifier to the bunch of same classifiers.
-     *
-     * @param clsLb The class label for the added model.
-     * @param mdl The model.
-     */
-    public void add(double clsLb, SVMLinearBinaryClassificationModel mdl) {
-        models.put(clsLb, mdl);
-    }
-}

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationTrainer.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationTrainer.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationTrainer.java
deleted file mode 100644
index 669e2e3..0000000
--- 
a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationTrainer.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.ml.svm;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import org.apache.ignite.ml.Trainer;
-import org.apache.ignite.ml.structures.LabeledDataset;
-
-/**
- * Base class for a soft-margin SVM linear multiclass-classification trainer 
based on the communication-efficient
- * distributed dual coordinate ascent algorithm (CoCoA) with hinge-loss 
function.
- *
- * All common parameters are shared with bunch of binary classification 
trainers.
- */
-public class SVMLinearMultiClassClassificationTrainer implements 
Trainer<SVMLinearMultiClassClassificationModel, LabeledDataset> {
-    /** Amount of outer SDCA algorithm iterations. */
-    private int amountOfIterations = 20;
-
-    /** Amount of local SDCA algorithm iterations. */
-    private int amountOfLocIterations = 50;
-
-    /** Regularization parameter. */
-    private double lambda = 0.2;
-
-    /**
-     * Returns model based on data.
-     *
-     * @param data data to build model.
-     * @return model.
-     */
-    @Override public SVMLinearMultiClassClassificationModel 
train(LabeledDataset data) {
-        List<Double> classes = getClassLabels(data);
-
-        SVMLinearMultiClassClassificationModel multiClsMdl = new 
SVMLinearMultiClassClassificationModel();
-
-        classes.forEach(clsLb -> {
-            LabeledDataset binarizedDataset = binarizeLabels(data, clsLb);
-
-            SVMLinearBinaryClassificationTrainer trainer = new 
SVMLinearBinaryClassificationTrainer()
-                .withAmountOfIterations(this.amountOfIterations())
-                .withAmountOfLocIterations(this.amountOfLocIterations())
-                .withLambda(this.lambda());
-
-            multiClsMdl.add(clsLb, trainer.train(binarizedDataset));
-        });
-
-        return multiClsMdl;
-    }
-
-    /**
-     * Copies the given data and changes class labels in +1 for chosen class 
and in -1 for the rest classes.
-     *
-     * @param data Data to transform.
-     * @param clsLb Chosen class in schema One-vs-Rest.
-     * @return Copy of dataset with new labels.
-     */
-    private LabeledDataset binarizeLabels(LabeledDataset data, double clsLb) {
-        final LabeledDataset ds = data.copy();
-
-        for (int i = 0; i < ds.rowSize(); i++)
-            ds.setLabel(i, ds.label(i) == clsLb ? 1.0 : -1.0);
-
-        return ds;
-    }
-
-    /** Iterates among dataset and collects class labels. */
-    private List<Double> getClassLabels(LabeledDataset data) {
-        final Set<Double> clsLabels = new HashSet<>();
-
-        for (int i = 0; i < data.rowSize(); i++)
-            clsLabels.add(data.label(i));
-
-        List<Double> res = new ArrayList<>();
-        res.addAll(clsLabels);
-
-        return res;
-    }
-
-    /**
-     * Set up the regularization parameter.
-     *
-     * @param lambda The regularization parameter. Should be more than 0.0.
-     * @return Trainer with new lambda parameter value.
-     */
-    public SVMLinearMultiClassClassificationTrainer withLambda(double lambda) {
-        assert lambda > 0.0;
-        this.lambda = lambda;
-        return this;
-    }
-
-    /**
-     * Gets the regularization lambda.
-     *
-     * @return The parameter value.
-     */
-    public double lambda() {
-        return lambda;
-    }
-
-    /**
-     * Gets the amount of outer iterations of SCDA algorithm.
-     *
-     * @return The parameter value.
-     */
-    public int amountOfIterations() {
-        return amountOfIterations;
-    }
-
-    /**
-     * Set up the amount of outer iterations of SCDA algorithm.
-     *
-     * @param amountOfIterations The parameter value.
-     * @return Trainer with new amountOfIterations parameter value.
-     */
-    public SVMLinearMultiClassClassificationTrainer withAmountOfIterations(int 
amountOfIterations) {
-        this.amountOfIterations = amountOfIterations;
-        return this;
-    }
-
-    /**
-     * Gets the amount of local iterations of SCDA algorithm.
-     *
-     * @return The parameter value.
-     */
-    public int amountOfLocIterations() {
-        return amountOfLocIterations;
-    }
-
-    /**
-     * Set up the amount of local iterations of SCDA algorithm.
-     *
-     * @param amountOfLocIterations The parameter value.
-     * @return Trainer with new amountOfLocIterations parameter value.
-     */
-    public SVMLinearMultiClassClassificationTrainer 
withAmountOfLocIterations(int amountOfLocIterations) {
-        this.amountOfLocIterations = amountOfLocIterations;
-        return this;
-    }
-}
-
-
-

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionContext.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionContext.java 
b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionContext.java
new file mode 100644
index 0000000..0aee0fb
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionContext.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.svm;
+
+import java.io.Serializable;
+
+/**
+ * Partition context of the SVM classification algorithm.
+ */
+public class SVMPartitionContext implements Serializable {
+    /** */
+    private static final long serialVersionUID = -7212307112344430126L;
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionDataBuilderOnHeap.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionDataBuilderOnHeap.java
 
b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionDataBuilderOnHeap.java
new file mode 100644
index 0000000..ad85758
--- /dev/null
+++ 
b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionDataBuilderOnHeap.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.svm;
+
+import java.io.Serializable;
+import java.util.Iterator;
+import org.apache.ignite.ml.dataset.PartitionDataBuilder;
+import org.apache.ignite.ml.dataset.UpstreamEntry;
+import org.apache.ignite.ml.math.functions.IgniteBiFunction;
+import org.apache.ignite.ml.structures.LabeledDataset;
+import org.apache.ignite.ml.structures.LabeledVector;
+
+/**
+ * SVM partition data builder that builds {@link LabeledDataset}.
+ *
+ * @param <K> Type of a key in <tt>upstream</tt> data.
+ * @param <V> Type of a value in <tt>upstream</tt> data.
+ * @param <C> Type of a partition <tt>context</tt>.
+ */
+public class SVMPartitionDataBuilderOnHeap<K, V, C extends Serializable>
+    implements PartitionDataBuilder<K, V, C, LabeledDataset<Double, 
LabeledVector>> {
+    /** */
+    private static final long serialVersionUID = -7820760153954269227L;
+
+    /** Extractor of X matrix row. */
+    private final IgniteBiFunction<K, V, double[]> xExtractor;
+
+    /** Extractor of Y vector value. */
+    private final IgniteBiFunction<K, V, Double> yExtractor;
+
+    /** Number of columns. */
+    private final int cols;
+
+    /**
+     * Constructs a new instance of SVM partition data builder.
+     *
+     * @param xExtractor Extractor of X matrix row.
+     * @param yExtractor Extractor of Y vector value.
+     * @param cols       Number of columns.
+     */
+    public SVMPartitionDataBuilderOnHeap(IgniteBiFunction<K, V, double[]> 
xExtractor,
+                                         IgniteBiFunction<K, V, Double> 
yExtractor, int cols) {
+        this.xExtractor = xExtractor;
+        this.yExtractor = yExtractor;
+        this.cols = cols;
+    }
+
+    /** {@inheritDoc} */
+    @Override public LabeledDataset<Double, LabeledVector> 
build(Iterator<UpstreamEntry<K, V>> upstreamData, long upstreamDataSize,
+                                                                 C ctx) {
+
+        double[][] x = new double[Math.toIntExact(upstreamDataSize)][cols];
+        double[] y = new double[Math.toIntExact(upstreamDataSize)];
+
+        int ptr = 0;
+        while (upstreamData.hasNext()) {
+            UpstreamEntry<K, V> entry = upstreamData.next();
+            double[] row = xExtractor.apply(entry.getKey(), entry.getValue());
+
+            assert row.length == cols : "X extractor must return exactly " + 
cols + " columns";
+
+            x[ptr] = row;
+
+            y[ptr] = yExtractor.apply(entry.getKey(), entry.getValue());
+
+            ptr++;
+        }
+
+        return new LabeledDataset<>(x, y);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/LocalModelsTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/LocalModelsTest.java 
b/modules/ml/src/test/java/org/apache/ignite/ml/LocalModelsTest.java
index 57d93d6..421a19f 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/LocalModelsTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/LocalModelsTest.java
@@ -32,7 +32,6 @@ import 
org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
 import org.apache.ignite.ml.regressions.linear.LinearRegressionModel;
 import org.apache.ignite.ml.structures.LabeledDataset;
 import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationModel;
-import org.apache.ignite.ml.svm.SVMLinearMultiClassClassificationModel;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -98,33 +97,6 @@ public class LocalModelsTest {
 
 
     /** */
-    @Test
-    public void importExportSVMMulticlassClassificationModelTest() throws 
IOException {
-        executeModelTest(mdlFilePath -> {
-
-
-            SVMLinearBinaryClassificationModel binaryMdl1 = new 
SVMLinearBinaryClassificationModel(new DenseLocalOnHeapVector(new double[]{1, 
2}), 3);
-            SVMLinearBinaryClassificationModel binaryMdl2 = new 
SVMLinearBinaryClassificationModel(new DenseLocalOnHeapVector(new double[]{2, 
3}), 4);
-            SVMLinearBinaryClassificationModel binaryMdl3 = new 
SVMLinearBinaryClassificationModel(new DenseLocalOnHeapVector(new double[]{3, 
4}), 5);
-
-            SVMLinearMultiClassClassificationModel mdl = new 
SVMLinearMultiClassClassificationModel();
-            mdl.add(1, binaryMdl1);
-            mdl.add(2, binaryMdl2);
-            mdl.add(3, binaryMdl3);
-
-            Exporter<SVMLinearMultiClassClassificationModel, String> exporter 
= new FileExporter<>();
-            mdl.saveModel(exporter, mdlFilePath);
-
-            SVMLinearMultiClassClassificationModel load = 
exporter.load(mdlFilePath);
-
-            Assert.assertNotNull(load);
-            Assert.assertEquals("", mdl, load);
-
-            return null;
-        });
-    }
-
-    /** */
     private void executeModelTest(Function<String, Void> code) throws 
IOException {
         Path mdlPath = Files.createTempFile(null, null);
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/BaseSVMTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/svm/BaseSVMTest.java 
b/modules/ml/src/test/java/org/apache/ignite/ml/svm/BaseSVMTest.java
deleted file mode 100644
index 424118d..0000000
--- a/modules/ml/src/test/java/org/apache/ignite/ml/svm/BaseSVMTest.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.ml.svm;
-
-import org.apache.ignite.Ignite;
-import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
-
-/**
- * Base class for SVM tests.
- */
-public class BaseSVMTest extends GridCommonAbstractTest {
-    /** Count of nodes. */
-    private static final int NODE_COUNT = 4;
-
-    /** Grid instance. */
-    protected Ignite ignite;
-
-    /**
-     * Default constructor.
-     */
-    public BaseSVMTest() {
-        super(false);
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override protected void beforeTest() throws Exception {
-        ignite = grid(NODE_COUNT);
-    }
-
-    /** {@inheritDoc} */
-    @Override protected void beforeTestsStarted() throws Exception {
-        for (int i = 1; i <= NODE_COUNT; i++)
-            startGrid(i);
-    }
-
-    /** {@inheritDoc} */
-    @Override protected void afterTestsStopped() throws Exception {
-        stopAllGrids();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java 
b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java
new file mode 100644
index 0000000..353915c
--- /dev/null
+++ 
b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.svm;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ThreadLocalRandom;
+import org.apache.ignite.ml.TestUtils;
+import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder;
+import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
+import org.junit.Test;
+
+/**
+ * Tests for {@link SVMLinearBinaryClassificationTrainer}.
+ */
+public class SVMBinaryTrainerTest {
+    /** Fixed size of Dataset. */
+    private static final int AMOUNT_OF_OBSERVATIONS = 1000;
+
+    /** Fixed size of columns in Dataset. */
+    private static final int AMOUNT_OF_FEATURES = 2;
+
+    /**
+     * Test trainer on classification model y = x.
+     */
+    @Test
+    public void testTrainWithTheLinearlySeparableCase() {
+        Map<Integer, double[]> data = new HashMap<>();
+
+
+        ThreadLocalRandom rndX = ThreadLocalRandom.current();
+        ThreadLocalRandom rndY = ThreadLocalRandom.current();
+
+        for (int i = 0; i < AMOUNT_OF_OBSERVATIONS; i++) {
+            double x = rndX.nextDouble(-1000, 1000);
+            double y = rndY.nextDouble(-1000, 1000);
+            double[] vec = new double[AMOUNT_OF_FEATURES + 1];
+            vec[0] = y - x > 0 ? 1 : -1; // assign label.
+            vec[1] = x;
+            vec[2] = y;
+            data.put(i, vec);
+        }
+
+
+        SVMLinearBinaryClassificationTrainer<Integer, double[]> trainer = new 
SVMLinearBinaryClassificationTrainer<>();
+
+        SVMLinearBinaryClassificationModel mdl = trainer.fit(
+            new LocalDatasetBuilder<>(data, 10),
+            (k, v) -> Arrays.copyOfRange(v, 1, v.length),
+            (k, v) -> v[0],
+            AMOUNT_OF_FEATURES);
+
+        double precision = 1e-2;
+
+        TestUtils.assertEquals(-1, mdl.apply(new DenseLocalOnHeapVector(new 
double[]{100, 10})), precision);
+        TestUtils.assertEquals(1, mdl.apply(new DenseLocalOnHeapVector(new 
double[]{10, 100})), precision);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java 
b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java
index 35b6644..2533466 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java
@@ -59,21 +59,6 @@ public class SVMModelTest {
 
     /** */
     @Test
-    public void testPredictWithMultiClasses() {
-        Vector weights1 = new DenseLocalOnHeapVector(new double[]{10.0, 0.0});
-        Vector weights2 = new DenseLocalOnHeapVector(new double[]{0.0, 10.0});
-        Vector weights3 = new DenseLocalOnHeapVector(new double[]{-1.0, -1.0});
-        SVMLinearMultiClassClassificationModel mdl = new 
SVMLinearMultiClassClassificationModel();
-        mdl.add(1, new SVMLinearBinaryClassificationModel(weights1, 
0.0).withRawLabels(true));
-        mdl.add(2, new SVMLinearBinaryClassificationModel(weights2, 
0.0).withRawLabels(true));
-        mdl.add(2, new SVMLinearBinaryClassificationModel(weights3, 
0.0).withRawLabels(true));
-
-        Vector observation = new DenseLocalOnHeapVector(new double[]{1.0, 
1.0});
-        TestUtils.assertEquals( 1.0, mdl.apply(observation), PRECISION);
-    }
-
-    /** */
-    @Test
     public void testPredictWithErasedLabels() {
         Vector weights = new DenseLocalOnHeapVector(new double[]{1.0, 1.0});
         SVMLinearBinaryClassificationModel mdl = new 
SVMLinearBinaryClassificationModel(weights, 1.0);

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java 
b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java
index 853a43f..dd87fec 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java
@@ -17,23 +17,16 @@
 
 package org.apache.ignite.ml.svm;
 
-import 
org.apache.ignite.ml.svm.binary.DistributedLinearSVMBinaryClassificationTrainerTest;
-import 
org.apache.ignite.ml.svm.binary.LocalLinearSVMBinaryClassificationTrainerTest;
-import 
org.apache.ignite.ml.svm.multi.DistributedLinearSVMMultiClassClassificationTrainerTest;
-import 
org.apache.ignite.ml.svm.multi.LocalLinearSVMMultiClassClassificationTrainerTest;
 import org.junit.runner.RunWith;
 import org.junit.runners.Suite;
 
 /**
- * Test suite for all tests located in org.apache.ignite.ml.regressions.* 
package.
+ * Test suite for all tests located in org.apache.ignite.ml.svm.* package.
  */
 @RunWith(Suite.class)
 @Suite.SuiteClasses({
-    LocalLinearSVMBinaryClassificationTrainerTest.class,
-    DistributedLinearSVMBinaryClassificationTrainerTest.class,
-    LocalLinearSVMMultiClassClassificationTrainerTest.class,
-    DistributedLinearSVMMultiClassClassificationTrainerTest.class,
-    SVMModelTest.class
+    SVMModelTest.class,
+    SVMBinaryTrainerTest.class
 })
 public class SVMTestSuite {
     // No-op.

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/DistributedLinearSVMBinaryClassificationTrainerTest.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/DistributedLinearSVMBinaryClassificationTrainerTest.java
 
b/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/DistributedLinearSVMBinaryClassificationTrainerTest.java
deleted file mode 100644
index 1be1d1c..0000000
--- 
a/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/DistributedLinearSVMBinaryClassificationTrainerTest.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.ml.svm.binary;
-
-import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix;
-import org.apache.ignite.ml.regressions.linear.LinearRegressionSGDTrainer;
-import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationTrainer;
-
-/**
- * Tests for {@link LinearRegressionSGDTrainer} on {@link 
DenseLocalOnHeapMatrix}.
- */
-public class DistributedLinearSVMBinaryClassificationTrainerTest extends 
GenericLinearSVMBinaryClassificationTrainerTest {
-    /** */
-    public DistributedLinearSVMBinaryClassificationTrainerTest() {
-        super(
-            new SVMLinearBinaryClassificationTrainer(),
-            true,
-            1e-2);
-    }
-}

http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/GenericLinearSVMBinaryClassificationTrainerTest.java
----------------------------------------------------------------------
diff --git 
a/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/GenericLinearSVMBinaryClassificationTrainerTest.java
 
b/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/GenericLinearSVMBinaryClassificationTrainerTest.java
deleted file mode 100644
index f390557..0000000
--- 
a/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/GenericLinearSVMBinaryClassificationTrainerTest.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.ml.svm.binary;
-
-import java.util.concurrent.ThreadLocalRandom;
-import org.apache.ignite.internal.util.IgniteUtils;
-import org.apache.ignite.ml.TestUtils;
-import org.apache.ignite.ml.Trainer;
-import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
-import org.apache.ignite.ml.structures.LabeledDataset;
-import org.apache.ignite.ml.structures.LabeledVector;
-import org.apache.ignite.ml.svm.BaseSVMTest;
-import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationModel;
-import org.junit.Test;
-
-/**
- * Base class for all linear regression trainers.
- */
-public class GenericLinearSVMBinaryClassificationTrainerTest extends 
BaseSVMTest {
-    /** Fixed size of Dataset. */
-    private static final int AMOUNT_OF_OBSERVATIONS = 100;
-
-    /** Fixed size of columns in Dataset. */
-    private static final int AMOUNT_OF_FEATURES = 2;
-
-    /** */
-    private final Trainer<SVMLinearBinaryClassificationModel, LabeledDataset> 
trainer;
-
-    /** */
-    private boolean isDistributed;
-
-    /** */
-    private final double precision;
-
-    /** */
-    GenericLinearSVMBinaryClassificationTrainerTest(
-        Trainer<SVMLinearBinaryClassificationModel, LabeledDataset> trainer,
-        boolean isDistributed,
-        double precision) {
-        super();
-        this.trainer = trainer;
-        this.precision = precision;
-        this.isDistributed = isDistributed;
-    }
-
-    /**
-     * Test trainer on classification model y = x.
-     */
-    @Test
-    public void testTrainWithTheLinearlySeparableCase() {
-        if (isDistributed)
-            
IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
-
-        LabeledDataset dataset = new LabeledDataset<Double, 
LabeledVector>(AMOUNT_OF_OBSERVATIONS, AMOUNT_OF_FEATURES, isDistributed);
-
-        ThreadLocalRandom rndX = ThreadLocalRandom.current();
-        ThreadLocalRandom rndY = ThreadLocalRandom.current();
-        for (int i = 0; i < AMOUNT_OF_OBSERVATIONS; i++) {
-            double x = rndX.nextDouble(-1000, 1000);
-            double y = rndY.nextDouble(-1000, 1000);
-            dataset.features(i).set(0, x);
-            dataset.features(i).set(1, y);
-            double lb = y - x > 0 ? 1 : -1;
-            dataset.setLabel(i, lb);
-        }
-
-        SVMLinearBinaryClassificationModel mdl = trainer.train(dataset);
-
-        TestUtils.assertEquals(-1, mdl.apply(new DenseLocalOnHeapVector(new 
double[] {100, 10})), precision);
-        TestUtils.assertEquals(1, mdl.apply(new DenseLocalOnHeapVector(new 
double[] {10, 100})), precision);
-    }
-
-    /**
-     * Test trainer on classification model y = x. Amount of generated points 
is increased 10 times.
-     */
-    @Test
-    public void testTrainWithTheLinearlySeparableCase10() {
-        if (isDistributed)
-            
IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
-
-        LabeledDataset dataset = new LabeledDataset<Double, 
LabeledVector>(AMOUNT_OF_OBSERVATIONS * 10, AMOUNT_OF_FEATURES, isDistributed);
-
-        ThreadLocalRandom rndX = ThreadLocalRandom.current();
-        ThreadLocalRandom rndY = ThreadLocalRandom.current();
-        for (int i = 0; i < AMOUNT_OF_OBSERVATIONS * 10; i++) {
-            double x = rndX.nextDouble(-1000, 1000);
-            double y = rndY.nextDouble(-1000, 1000);
-            dataset.features(i).set(0, x);
-            dataset.features(i).set(1, y);
-            double lb = y - x > 0 ? 1 : -1;
-            dataset.setLabel(i, lb);
-        }
-
-        SVMLinearBinaryClassificationModel mdl = trainer.train(dataset);
-
-        TestUtils.assertEquals(-1, mdl.apply(new DenseLocalOnHeapVector(new 
double[] {100, 10})), precision);
-        TestUtils.assertEquals(1, mdl.apply(new DenseLocalOnHeapVector(new 
double[] {10, 100})), precision);
-    }
-
-    /**
-     * Test trainer on classification model y = x. Amount of generated points 
is increased 100 times.
-     */
-    @Test
-    public void testTrainWithTheLinearlySeparableCase100() {
-        if (isDistributed)
-            
IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
-
-        LabeledDataset dataset = new LabeledDataset<Double, 
LabeledVector>(AMOUNT_OF_OBSERVATIONS * 100, AMOUNT_OF_FEATURES, isDistributed);
-
-        ThreadLocalRandom rndX = ThreadLocalRandom.current();
-        ThreadLocalRandom rndY = ThreadLocalRandom.current();
-        for (int i = 0; i < AMOUNT_OF_OBSERVATIONS * 100; i++) {
-            double x = rndX.nextDouble(-1000, 1000);
-            double y = rndY.nextDouble(-1000, 1000);
-            dataset.features(i).set(0, x);
-            dataset.features(i).set(1, y);
-            double lb = y - x > 0 ? 1 : -1;
-            dataset.setLabel(i, lb);
-        }
-
-        SVMLinearBinaryClassificationModel mdl = trainer.train(dataset);
-
-        TestUtils.assertEquals(-1, mdl.apply(new DenseLocalOnHeapVector(new 
double[] {100, 10})), precision);
-        TestUtils.assertEquals(1, mdl.apply(new DenseLocalOnHeapVector(new 
double[] {10, 100})), precision);
-    }
-}

Reply via email to