http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorImplementationsTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorImplementationsTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorImplementationsTest.java new file mode 100644 index 0000000..27fffb0 --- /dev/null +++ b/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorImplementationsTest.java @@ -0,0 +1,849 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import java.util.Arrays; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Function; +import org.apache.ignite.IgniteException; +import org.apache.ignite.ml.math.ExternalizeTest; +import org.apache.ignite.ml.math.exceptions.CardinalityException; +import org.apache.ignite.ml.math.exceptions.UnsupportedOperationException; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.SparseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.DelegatingVector; +import org.apache.ignite.ml.math.primitives.vector.impl.VectorizedViewMatrix; +import org.junit.Assert; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +/** See also: {@link AbstractVectorTest} and {@link VectorToMatrixTest}. */ +public class VectorImplementationsTest { // TODO: IGNTIE-5723, split this to smaller cohesive test classes + /** */ + @Test + public void setGetTest() { + consumeSampleVectors((v, desc) -> mutateAtIdxTest(v, desc, (vec, idx, val) -> { + vec.set(idx, val); + + return val; + })); + } + + /** */ + @Test + public void setXTest() { + consumeSampleVectors((v, desc) -> mutateAtIdxTest(v, desc, (vec, idx, val) -> { + vec.setX(idx, val); + + return val; + })); + } + + /** */ + @Test + public void incrementTest() { + consumeSampleVectors((v, desc) -> mutateAtIdxTest(v, desc, (vec, idx, val) -> { + double old = vec.get(idx); + + vec.increment(idx, val); + + return old + val; + })); + } + + /** */ + @Test + public void incrementXTest() { + consumeSampleVectors((v, desc) -> mutateAtIdxTest(v, desc, (vec, idx, val) -> { + double old = vec.getX(idx); + + vec.incrementX(idx, val); + + return old + val; + })); + } + + /** */ + @Test + public void operateXOutOfBoundsTest() { + consumeSampleVectors((v, desc) -> { + if (v instanceof SparseVector) + return; // TODO: IGNTIE-5723, find out if it's OK to skip by instances here + + boolean expECaught = false; + + try { + v.getX(-1); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + if (!getXOutOfBoundsOK(v)) + assertTrue("Expect exception at negative index getX in " + desc, expECaught); + + expECaught = false; + + try { + v.setX(-1, 0); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + assertTrue("Expect exception at negative index setX in " + desc, expECaught); + + expECaught = false; + + try { + v.incrementX(-1, 1); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + assertTrue("Expect exception at negative index incrementX in " + desc, expECaught); + + expECaught = false; + + try { + v.getX(v.size()); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + if (!getXOutOfBoundsOK(v)) + assertTrue("Expect exception at too large index getX in " + desc, expECaught); + + expECaught = false; + + try { + v.setX(v.size(), 1); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + assertTrue("Expect exception at too large index setX in " + desc, expECaught); + + expECaught = false; + + try { + v.incrementX(v.size(), 1); + } + catch (ArrayIndexOutOfBoundsException | IgniteException e) { + expECaught = true; + } + + assertTrue("Expect exception at too large index incrementX in " + desc, expECaught); + }); + } + + /** */ + @Test + public void sizeTest() { + final AtomicReference<Integer> expSize = new AtomicReference<>(0); + + consumeSampleVectors( + expSize::set, + (v, desc) -> Assert.assertEquals("Expected size for " + desc, + (int)expSize.get(), v.size()) + ); + } + + /** */ + @Test + public void getElementTest() { + consumeSampleVectors((v, desc) -> new ElementsChecker(v, desc).assertCloseEnough(v)); + } + + /** */ + @Test + public void copyTest() { + consumeSampleVectors((v, desc) -> new ElementsChecker(v, desc).assertCloseEnough(v.copy())); + } + + /** */ + @Test + public void divideTest() { + operationTest((val, operand) -> val / operand, Vector::divide); + } + + /** */ + @Test + public void likeTest() { + for (int card : new int[] {1, 2, 4, 8, 16, 32, 64, 128}) + consumeSampleVectors((v, desc) -> { + Class<? extends Vector> expType = expLikeType(v); + + if (expType == null) { + try { + v.like(card); + } + catch (UnsupportedOperationException uoe) { + return; + } + + fail("Expected exception wasn't caught for " + desc); + + return; + } + + Vector vLike = v.like(card); + + assertNotNull("Expect non-null like vector for " + expType.getSimpleName() + " in " + desc, vLike); + assertEquals("Expect size equal to cardinality at " + desc, card, vLike.size()); + + Class<? extends Vector> actualType = vLike.getClass(); + + assertTrue("Actual vector type " + actualType.getSimpleName() + + " should be assignable from expected type " + expType.getSimpleName() + " in " + desc, + actualType.isAssignableFrom(expType)); + }); + } + + /** */ + @Test + public void minusTest() { + operationVectorTest((operand1, operand2) -> operand1 - operand2, Vector::minus); + } + + /** */ + @Test + public void plusVectorTest() { + operationVectorTest((operand1, operand2) -> operand1 + operand2, Vector::plus); + } + + /** */ + @Test + public void plusDoubleTest() { + operationTest((val, operand) -> val + operand, Vector::plus); + } + + /** */ + @Test + public void timesVectorTest() { + operationVectorTest((operand1, operand2) -> operand1 * operand2, Vector::times); + } + + /** */ + @Test + public void timesDoubleTest() { + operationTest((val, operand) -> val * operand, Vector::times); + } + + /** */ + @Test + public void viewPartTest() { + consumeSampleVectors((v, desc) -> { + final int size = v.size(); + final double[] ref = new double[size]; + final int delta = size > 32 ? 3 : 1; // IMPL NOTE this is for faster test execution + + final ElementsChecker checker = new ElementsChecker(v, ref, desc); + + for (int off = 0; off < size; off += delta) + for (int len = 1; len < size - off; len += delta) + checker.assertCloseEnough(v.viewPart(off, len), Arrays.copyOfRange(ref, off, off + len)); + }); + } + + /** */ + @Test + public void sumTest() { + toDoubleTest( + ref -> Arrays.stream(ref).sum(), + Vector::sum); + } + + /** */ + @Test + public void minValueTest() { + toDoubleTest( + ref -> Arrays.stream(ref).min().getAsDouble(), + Vector::minValue); + } + + /** */ + @Test + public void maxValueTest() { + toDoubleTest( + ref -> Arrays.stream(ref).max().getAsDouble(), + Vector::maxValue); + } + + /** */ + @Test + public void sortTest() { + consumeSampleVectors((v, desc) -> { + if (readOnly() || !v.isArrayBased()) { + boolean expECaught = false; + + try { + v.sort(); + } + catch (UnsupportedOperationException uoe) { + expECaught = true; + } + + assertTrue("Expected exception was not caught for sort in " + desc, expECaught); + + return; + } + + final int size = v.size(); + final double[] ref = new double[size]; + + new ElementsChecker(v, ref, desc).assertCloseEnough(v.sort(), Arrays.stream(ref).sorted().toArray()); + }); + } + + /** */ + @Test + public void metaAttributesTest() { + consumeSampleVectors((v, desc) -> { + assertNotNull("Null meta storage in " + desc, v.getMetaStorage()); + + final String key = "test key"; + final String val = "test value"; + final String details = "key [" + key + "] for " + desc; + + v.setAttribute(key, val); + assertTrue("Expect to have meta attribute for " + details, v.hasAttribute(key)); + assertEquals("Unexpected meta attribute value for " + details, val, v.getAttribute(key)); + + v.removeAttribute(key); + assertFalse("Expect not to have meta attribute for " + details, v.hasAttribute(key)); + assertNull("Unexpected meta attribute value for " + details, v.getAttribute(key)); + }); + } + + /** */ + @Test + public void assignDoubleTest() { + consumeSampleVectors((v, desc) -> { + if (readOnly()) + return; + + for (double val : new double[] {0, -1, 0, 1}) { + v.assign(val); + + for (int idx = 0; idx < v.size(); idx++) { + final Metric metric = new Metric(val, v.get(idx)); + + assertTrue("Not close enough at index " + idx + ", val " + val + ", " + metric + + ", " + desc, metric.closeEnough()); + } + } + }); + } + + /** */ + @Test + public void assignDoubleArrTest() { + consumeSampleVectors((v, desc) -> { + if (readOnly()) + return; + + final int size = v.size(); + final double[] ref = new double[size]; + + final ElementsChecker checker = new ElementsChecker(v, ref, desc); + + for (int idx = 0; idx < size; idx++) + ref[idx] = -ref[idx]; + + v.assign(ref); + + checker.assertCloseEnough(v, ref); + + assignDoubleArrWrongCardinality(v, desc); + }); + } + + /** */ + @Test + public void assignVectorTest() { + consumeSampleVectors((v, desc) -> { + if (readOnly()) + return; + + final int size = v.size(); + final double[] ref = new double[size]; + + final ElementsChecker checker = new ElementsChecker(v, ref, desc); + + for (int idx = 0; idx < size; idx++) + ref[idx] = -ref[idx]; + + v.assign(new DenseVector(ref)); + + checker.assertCloseEnough(v, ref); + + assignVectorWrongCardinality(v, desc); + }); + } + + /** */ + @Test + public void assignFunctionTest() { + consumeSampleVectors((v, desc) -> { + if (readOnly()) + return; + + final int size = v.size(); + final double[] ref = new double[size]; + + final ElementsChecker checker = new ElementsChecker(v, ref, desc); + + for (int idx = 0; idx < size; idx++) + ref[idx] = -ref[idx]; + + v.assign((idx) -> ref[idx]); + + checker.assertCloseEnough(v, ref); + }); + } + + /** */ + @Test + public void minElementTest() { + consumeSampleVectors((v, desc) -> { + final ElementsChecker checker = new ElementsChecker(v, desc); + + final Vector.Element minE = v.minElement(); + + final int minEIdx = minE.index(); + + assertTrue("Unexpected index from minElement " + minEIdx + ", " + desc, + minEIdx >= 0 && minEIdx < v.size()); + + final Metric metric = new Metric(minE.get(), v.minValue()); + + assertTrue("Not close enough minElement at index " + minEIdx + ", " + metric + + ", " + desc, metric.closeEnough()); + + checker.assertNewMinElement(v); + }); + } + + /** */ + @Test + public void maxElementTest() { + consumeSampleVectors((v, desc) -> { + final ElementsChecker checker = new ElementsChecker(v, desc); + + final Vector.Element maxE = v.maxElement(); + + final int minEIdx = maxE.index(); + + assertTrue("Unexpected index from minElement " + minEIdx + ", " + desc, + minEIdx >= 0 && minEIdx < v.size()); + + final Metric metric = new Metric(maxE.get(), v.maxValue()); + + assertTrue("Not close enough maxElement at index " + minEIdx + ", " + metric + + ", " + desc, metric.closeEnough()); + + checker.assertNewMaxElement(v); + }); + } + + /** */ + @Test + public void externalizeTest() { + (new ExternalizeTest<Vector>() { + /** {@inheritDoc} */ + @Override public void externalizeTest() { + consumeSampleVectors((v, desc) -> externalizeTest(v)); + } + }).externalizeTest(); + } + + /** */ + @Test + public void hashCodeTest() { + consumeSampleVectors((v, desc) -> assertTrue("Zero hash code for " + desc, v.hashCode() != 0)); + } + + /** */ + private boolean getXOutOfBoundsOK(Vector v) { + // TODO: IGNTIE-5723, find out if this is indeed OK + return false; + } + + /** */ + private void mutateAtIdxTest(Vector v, String desc, MutateAtIdx operation) { + if (readOnly()) { + if (v.size() < 1) + return; + + boolean expECaught = false; + + try { + operation.apply(v, 0, 1); + } + catch (UnsupportedOperationException uoe) { + expECaught = true; + } + + assertTrue("Expect exception at attempt to mutate element in " + desc, expECaught); + + return; + } + + for (double val : new double[] {0, -1, 0, 1}) + for (int idx = 0; idx < v.size(); idx++) { + double exp = operation.apply(v, idx, val); + + final Metric metric = new Metric(exp, v.get(idx)); + + assertTrue("Not close enough at index " + idx + ", val " + val + ", " + metric + + ", " + desc, metric.closeEnough()); + } + } + + /** */ + private Class<? extends Vector> expLikeType(Vector v) { + Class<? extends Vector> clazz = v.getClass(); + + if (clazz.isAssignableFrom(VectorizedViewMatrix.class) || clazz.isAssignableFrom(DelegatingVector.class)) + return DenseVector.class; // IMPL NOTE per fixture + + return clazz; + } + + /** */ + private void toDoubleTest(Function<double[], Double> calcRef, Function<Vector, Double> calcVec) { + consumeSampleVectors((v, desc) -> { + final int size = v.size(); + final double[] ref = new double[size]; + + new ElementsChecker(v, ref, desc); // IMPL NOTE this initialises vector and reference array + + final Metric metric = new Metric(calcRef.apply(ref), calcVec.apply(v)); + + assertTrue("Not close enough at " + desc + + ", " + metric, metric.closeEnough()); + }); + } + + /** */ + private void operationVectorTest(BiFunction<Double, Double, Double> operation, + BiFunction<Vector, Vector, Vector> vecOperation) { + consumeSampleVectors((v, desc) -> { + // TODO : IGNTIE-5723, find out if more elaborate testing scenario is needed or it's okay as is. + final int size = v.size(); + final double[] ref = new double[size]; + + final ElementsChecker checker = new ElementsChecker(v, ref, desc); + final Vector operand = v.copy(); + + for (int idx = 0; idx < size; idx++) + ref[idx] = operation.apply(ref[idx], ref[idx]); + + checker.assertCloseEnough(vecOperation.apply(v, operand), ref); + + assertWrongCardinality(v, desc, vecOperation); + }); + } + + /** */ + private void assignDoubleArrWrongCardinality(Vector v, String desc) { + boolean expECaught = false; + + try { + v.assign(new double[v.size() + 1]); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too large size in " + desc, expECaught); + + if (v.size() < 2) + return; + + expECaught = false; + + try { + v.assign(new double[v.size() - 1]); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too small size in " + desc, expECaught); + } + + /** */ + private void assignVectorWrongCardinality(Vector v, String desc) { + boolean expECaught = false; + + try { + v.assign(new DenseVector(v.size() + 1)); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too large size in " + desc, expECaught); + + if (v.size() < 2) + return; + + expECaught = false; + + try { + v.assign(new DenseVector(v.size() - 1)); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too small size in " + desc, expECaught); + } + + /** */ + private void assertWrongCardinality( + Vector v, String desc, BiFunction<Vector, Vector, Vector> vecOperation) { + boolean expECaught = false; + + try { + vecOperation.apply(v, new DenseVector(v.size() + 1)); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too large size in " + desc, expECaught); + + if (v.size() < 2) + return; + + expECaught = false; + + try { + vecOperation.apply(v, new DenseVector(v.size() - 1)); + } + catch (CardinalityException ce) { + expECaught = true; + } + + assertTrue("Expect exception at too small size in " + desc, expECaught); + } + + /** */ + private void operationTest(BiFunction<Double, Double, Double> operation, + BiFunction<Vector, Double, Vector> vecOperation) { + for (double val : new double[] {0, 0.1, 1, 2, 10}) + consumeSampleVectors((v, desc) -> { + final int size = v.size(); + final double[] ref = new double[size]; + + final ElementsChecker checker = new ElementsChecker(v, ref, "val " + val + ", " + desc); + + for (int idx = 0; idx < size; idx++) + ref[idx] = operation.apply(ref[idx], val); + + checker.assertCloseEnough(vecOperation.apply(v, val), ref); + }); + } + + /** */ + private void consumeSampleVectors(BiConsumer<Vector, String> consumer) { + consumeSampleVectors(null, consumer); + } + + /** */ + private void consumeSampleVectors(Consumer<Integer> paramsConsumer, BiConsumer<Vector, String> consumer) { + new VectorImplementationsFixtures().consumeSampleVectors(paramsConsumer, consumer); + } + + /** */ + private static boolean readOnly() { + return false; + } + + /** */ + private interface MutateAtIdx { + /** */ + double apply(Vector v, int idx, double val); + } + + /** */ + static class ElementsChecker { + /** */ + private final String fixtureDesc; + + /** */ + private final double[] refReadOnly; + + /** */ + private final boolean nonNegative; + + /** */ + ElementsChecker(Vector v, double[] ref, String fixtureDesc, boolean nonNegative) { + this.fixtureDesc = fixtureDesc; + + this.nonNegative = nonNegative; + + refReadOnly = readOnly() && ref == null ? new double[v.size()] : null; + + init(v, ref); + } + + /** */ + ElementsChecker(Vector v, double[] ref, String fixtureDesc) { + this(v, ref, fixtureDesc, false); + } + + /** */ + ElementsChecker(Vector v, String fixtureDesc) { + this(v, null, fixtureDesc); + } + + /** */ + void assertCloseEnough(Vector obtained, double[] exp) { + final int size = obtained.size(); + + for (int i = 0; i < size; i++) { + final Vector.Element e = obtained.getElement(i); + + if (refReadOnly != null && exp == null) + exp = refReadOnly; + + final Metric metric = new Metric(exp == null ? generated(i) : exp[i], e.get()); + + assertEquals("Unexpected vector index at " + fixtureDesc, i, e.index()); + assertTrue("Not close enough at index " + i + ", size " + size + ", " + metric + + ", " + fixtureDesc, metric.closeEnough()); + } + } + + /** */ + void assertCloseEnough(Vector obtained) { + assertCloseEnough(obtained, null); + } + + /** */ + void assertNewMinElement(Vector v) { + if (readOnly()) + return; + + int exp = v.size() / 2; + + v.set(exp, -(v.size() * 2 + 1)); + + assertEquals("Unexpected minElement index at " + fixtureDesc, exp, v.minElement().index()); + } + + /** */ + void assertNewMaxElement(Vector v) { + if (readOnly()) + return; + + int exp = v.size() / 2; + + v.set(exp, v.size() * 2 + 1); + + assertEquals("Unexpected minElement index at " + fixtureDesc, exp, v.maxElement().index()); + } + + /** */ + private void init(Vector v, double[] ref) { + if (readOnly()) { + initReadonly(v, ref); + + return; + } + + for (Vector.Element e : v.all()) { + int idx = e.index(); + + // IMPL NOTE introduce negative values because their absence + // blocked catching an ugly bug in AbstractVector#kNorm + int val = generated(idx); + + e.set(val); + + if (ref != null) + ref[idx] = val; + } + } + + /** */ + private void initReadonly(Vector v, double[] ref) { + if (refReadOnly != null) + for (Vector.Element e : v.all()) + refReadOnly[e.index()] = e.get(); + + if (ref != null) + for (Vector.Element e : v.all()) + ref[e.index()] = e.get(); + } + + /** */ + private int generated(int idx) { + return nonNegative || (idx & 1) == 0 ? idx : -idx; + } + } + + /** */ + static class Metric { //TODO: IGNITE-5824, consider if softer tolerance (like say 0.1 or 0.01) would make sense here + /** */ + private final double exp; + + /** */ + private final double obtained; + + /** **/ + Metric(double exp, double obtained) { + this.exp = exp; + this.obtained = obtained; + } + + /** */ + boolean closeEnough() { + return new Double(exp).equals(obtained) || closeEnoughToZero(); + } + + /** {@inheritDoc} */ + @Override public String toString() { + return "Metric{" + "expected=" + exp + + ", obtained=" + obtained + + '}'; + } + + /** */ + private boolean closeEnoughToZero() { + return (new Double(exp).equals(0.0) && new Double(obtained).equals(-0.0)) + || (new Double(exp).equals(-0.0) && new Double(obtained).equals(0.0)); + } + } +}
http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormTest.java new file mode 100644 index 0000000..d71ec48 --- /dev/null +++ b/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorNormTest.java @@ -0,0 +1,238 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import java.util.function.BiConsumer; +import java.util.function.BiFunction; +import java.util.function.Function; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; + +/** */ +public class VectorNormTest { + /** */ + @Test + public void normalizeTest() { + normalizeTest(2, (val, len) -> val / len, Vector::normalize); + } + + /** */ + @Test + public void normalizePowerTest() { + for (double pow : new double[] {0, 0.5, 1, 2, 2.5, Double.POSITIVE_INFINITY}) + normalizeTest(pow, (val, norm) -> val / norm, (v) -> v.normalize(pow)); + } + + /** */ + @Test + public void logNormalizeTest() { + normalizeTest(2, (val, len) -> Math.log1p(val) / (len * Math.log(2)), Vector::logNormalize); + } + + /** */ + @Test + public void logNormalizePowerTest() { + for (double pow : new double[] {1.1, 2, 2.5}) + normalizeTest(pow, (val, norm) -> Math.log1p(val) / (norm * Math.log(pow)), (v) -> v.logNormalize(pow)); + } + + /** */ + @Test + public void kNormTest() { + for (double pow : new double[] {0, 0.5, 1, 2, 2.5, Double.POSITIVE_INFINITY}) + toDoubleTest(pow, ref -> new Norm(ref, pow).calculate(), v -> v.kNorm(pow)); + } + + /** */ + @Test + public void getLengthSquaredTest() { + toDoubleTest(2.0, ref -> new Norm(ref, 2).sumPowers(), Vector::getLengthSquared); + } + + /** */ + @Test + public void getDistanceSquaredTest() { + consumeSampleVectors((v, desc) -> { + new VectorImplementationsTest.ElementsChecker(v, desc); // IMPL NOTE this initialises vector + + final int size = v.size(); + final Vector vOnHeap = new DenseVector(size); + + invertValues(v, vOnHeap); + + for (int idx = 0; idx < size; idx++) { + final double exp = v.get(idx); + final int idxMirror = size - 1 - idx; + + assertTrue("On heap vector difference at " + desc + ", idx " + idx, + exp - vOnHeap.get(idxMirror) == 0); + } + + final double exp = vOnHeap.minus(v).getLengthSquared(); // IMPL NOTE this won't mutate vOnHeap + final VectorImplementationsTest.Metric metric = new VectorImplementationsTest.Metric(exp, v.getDistanceSquared(vOnHeap)); + + assertTrue("On heap vector not close enough at " + desc + ", " + metric, + metric.closeEnough()); + }); + } + + /** */ + @Test + public void dotTest() { + consumeSampleVectors((v, desc) -> { + new VectorImplementationsTest.ElementsChecker(v, desc); // IMPL NOTE this initialises vector + + final int size = v.size(); + final Vector v1 = new DenseVector(size); + + invertValues(v, v1); + + final double actual = v.dot(v1); + + double exp = 0; + + for (Vector.Element e : v.all()) + exp += e.get() * v1.get(e.index()); + + final VectorImplementationsTest.Metric metric = new VectorImplementationsTest.Metric(exp, actual); + + assertTrue("Dot product not close enough at " + desc + ", " + metric, + metric.closeEnough()); + }); + } + + /** */ + private void invertValues(Vector src, Vector dst) { + final int size = src.size(); + + for (Vector.Element e : src.all()) { + final int idx = size - 1 - e.index(); + final double val = e.get(); + + dst.set(idx, val); + } + } + + /** */ + private void toDoubleTest(Double val, Function<double[], Double> calcRef, Function<Vector, Double> calcVec) { + consumeSampleVectors((v, desc) -> { + final int size = v.size(); + final double[] ref = new double[size]; + + new VectorImplementationsTest.ElementsChecker(v, ref, desc); // IMPL NOTE this initialises vector and reference array + + final double exp = calcRef.apply(ref); + final double obtained = calcVec.apply(v); + final VectorImplementationsTest.Metric metric = new VectorImplementationsTest.Metric(exp, obtained); + + assertTrue("Not close enough at " + desc + + (val == null ? "" : ", value " + val) + ", " + metric, metric.closeEnough()); + }); + } + + /** */ + private void normalizeTest(double pow, BiFunction<Double, Double, Double> operation, + Function<Vector, Vector> vecOperation) { + consumeSampleVectors((v, desc) -> { + final int size = v.size(); + final double[] ref = new double[size]; + final boolean nonNegative = pow != (int)pow; + + final VectorImplementationsTest.ElementsChecker checker = new VectorImplementationsTest.ElementsChecker(v, ref, desc + ", pow = " + pow, nonNegative); + final double norm = new Norm(ref, pow).calculate(); + + for (int idx = 0; idx < size; idx++) + ref[idx] = operation.apply(ref[idx], norm); + + checker.assertCloseEnough(vecOperation.apply(v), ref); + }); + } + + /** */ + private void consumeSampleVectors(BiConsumer<Vector, String> consumer) { + new VectorImplementationsFixtures().consumeSampleVectors(null, consumer); + } + + /** */ + private static class Norm { + /** */ + private final double[] arr; + + /** */ + private final Double pow; + + /** */ + Norm(double[] arr, double pow) { + this.arr = arr; + this.pow = pow; + } + + /** */ + double calculate() { + if (pow.equals(0.0)) + return countNonZeroes(); // IMPL NOTE this is beautiful if you think of it + + if (pow.equals(Double.POSITIVE_INFINITY)) + return maxAbs(); + + return Math.pow(sumPowers(), 1 / pow); + } + + /** */ + double sumPowers() { + if (pow.equals(0.0)) + return countNonZeroes(); + + double norm = 0; + + for (double val : arr) + norm += pow == 1 ? Math.abs(val) : Math.pow(val, pow); + + return norm; + } + + /** */ + private int countNonZeroes() { + int cnt = 0; + + final Double zero = 0.0; + + for (double val : arr) + if (!zero.equals(val)) + cnt++; + + return cnt; + } + + /** */ + private double maxAbs() { + double res = 0; + + for (double val : arr) { + final double abs = Math.abs(val); + + if (abs > res) + res = abs; + } + + return res; + } + } +} http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorToMatrixTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorToMatrixTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorToMatrixTest.java new file mode 100644 index 0000000..09733b8 --- /dev/null +++ b/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorToMatrixTest.java @@ -0,0 +1,259 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.function.BiConsumer; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.math.primitives.matrix.impl.SparseMatrix; +import org.apache.ignite.ml.math.primitives.vector.impl.SparseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.DelegatingVector; +import org.apache.ignite.ml.math.primitives.vector.impl.VectorizedViewMatrix; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +/** Tests for methods of Vector that involve Matrix. */ +public class VectorToMatrixTest { + /** */ + private static final Map<Class<? extends Vector>, Class<? extends Matrix>> typesMap = typesMap(); + + /** */ + @Test + public void testHaveLikeMatrix() { + for (Class<? extends Vector> key : typesMap.keySet()) { + Class<? extends Matrix> val = typesMap.get(key); + + if (val == null) + System.out.println("Missing test for implementation of likeMatrix for " + key.getSimpleName()); + } + } + + /** */ + @Test + public void testLikeMatrix() { + consumeSampleVectors((v, desc) -> { + if (!availableForTesting(v)) + return; + + final Matrix matrix = v.likeMatrix(1, 1); + + Class<? extends Vector> key = v.getClass(); + + Class<? extends Matrix> expMatrixType = typesMap.get(key); + + assertNotNull("Expect non-null matrix for " + key.getSimpleName() + " in " + desc, matrix); + + Class<? extends Matrix> actualMatrixType = matrix.getClass(); + + assertTrue("Expected matrix type " + expMatrixType.getSimpleName() + + " should be assignable from actual type " + actualMatrixType.getSimpleName() + " in " + desc, + expMatrixType.isAssignableFrom(actualMatrixType)); + + for (int rows : new int[] {1, 2}) + for (int cols : new int[] {1, 2}) { + final Matrix actualMatrix = v.likeMatrix(rows, cols); + + String details = "rows " + rows + " cols " + cols; + + assertNotNull("Expect non-null matrix for " + details + " in " + desc, + actualMatrix); + + assertEquals("Unexpected number of rows in " + desc, rows, actualMatrix.rowSize()); + + assertEquals("Unexpected number of cols in " + desc, cols, actualMatrix.columnSize()); + } + }); + } + + /** */ + @Test + public void testToMatrix() { + consumeSampleVectors((v, desc) -> { + if (!availableForTesting(v)) + return; + + fillWithNonZeroes(v); + + final Matrix matrixRow = v.toMatrix(true); + + final Matrix matrixCol = v.toMatrix(false); + + for (Vector.Element e : v.all()) + assertToMatrixValue(desc, matrixRow, matrixCol, e.get(), e.index()); + }); + } + + /** */ + @Test + public void testToMatrixPlusOne() { + consumeSampleVectors((v, desc) -> { + if (!availableForTesting(v)) + return; + + fillWithNonZeroes(v); + + for (double zeroVal : new double[] {-1, 0, 1, 2}) { + final Matrix matrixRow = v.toMatrixPlusOne(true, zeroVal); + + final Matrix matrixCol = v.toMatrixPlusOne(false, zeroVal); + + final Metric metricRow0 = new Metric(zeroVal, matrixRow.get(0, 0)); + + assertTrue("Not close enough row like " + metricRow0 + " at index 0 in " + desc, + metricRow0.closeEnough()); + + final Metric metricCol0 = new Metric(zeroVal, matrixCol.get(0, 0)); + + assertTrue("Not close enough cols like " + metricCol0 + " at index 0 in " + desc, + metricCol0.closeEnough()); + + for (Vector.Element e : v.all()) + assertToMatrixValue(desc, matrixRow, matrixCol, e.get(), e.index() + 1); + } + }); + } + + /** */ + @Test + public void testCross() { + consumeSampleVectors((v, desc) -> { + if (!availableForTesting(v)) + return; + + fillWithNonZeroes(v); + + for (int delta : new int[] {-1, 0, 1}) { + final int size2 = v.size() + delta; + + if (size2 < 1) + return; + + final Vector v2 = new DenseVector(size2); + + for (Vector.Element e : v2.all()) + e.set(size2 - e.index()); + + assertCross(v, v2, desc); + } + }); + } + + /** */ + private void assertCross(Vector v1, Vector v2, String desc) { + assertNotNull(v1); + assertNotNull(v2); + + final Matrix res = v1.cross(v2); + + assertNotNull("Cross matrix is expected to be not null in " + desc, res); + + assertEquals("Unexpected number of rows in cross Matrix in " + desc, v1.size(), res.rowSize()); + + assertEquals("Unexpected number of cols in cross Matrix in " + desc, v2.size(), res.columnSize()); + + for (int row = 0; row < v1.size(); row++) + for (int col = 0; col < v2.size(); col++) { + final Metric metric = new Metric(v1.get(row) * v2.get(col), res.get(row, col)); + + assertTrue("Not close enough cross " + metric + " at row " + row + " at col " + col + + " in " + desc, metric.closeEnough()); + } + } + + /** */ + private void assertToMatrixValue(String desc, Matrix matrixRow, Matrix matrixCol, double exp, int idx) { + final Metric metricRow = new Metric(exp, matrixRow.get(0, idx)); + + assertTrue("Not close enough row like " + metricRow + " at index " + idx + " in " + desc, + metricRow.closeEnough()); + + final Metric metricCol = new Metric(exp, matrixCol.get(idx, 0)); + + assertTrue("Not close enough cols like " + matrixCol + " at index " + idx + " in " + desc, + metricCol.closeEnough()); + } + + /** */ + private void fillWithNonZeroes(Vector sample) { + for (Vector.Element e : sample.all()) + e.set(1 + e.index()); + } + + /** */ + private boolean availableForTesting(Vector v) { + assertNotNull("Error in test: vector is null", v); + + final boolean availableForTesting = typesMap.get(v.getClass()) != null; + + final Matrix actualLikeMatrix = v.likeMatrix(1, 1); + + assertTrue("Need to enable matrix testing for vector type " + v.getClass().getSimpleName(), + availableForTesting || actualLikeMatrix == null); + + return availableForTesting; + } + + /** */ + private void consumeSampleVectors(BiConsumer<Vector, String> consumer) { + new VectorImplementationsFixtures().consumeSampleVectors(null, consumer); + } + + /** */ + private static Map<Class<? extends Vector>, Class<? extends Matrix>> typesMap() { + return new LinkedHashMap<Class<? extends Vector>, Class<? extends Matrix>>() {{ + put(DenseVector.class, DenseMatrix.class); + put(SparseVector.class, SparseMatrix.class); + put(VectorizedViewMatrix.class, DenseMatrix.class); // IMPL NOTE per fixture + put(DelegatingVector.class, DenseMatrix.class); // IMPL NOTE per fixture + // IMPL NOTE check for presence of all implementations here will be done in testHaveLikeMatrix via Fixture + }}; + } + + /** */ + private static class Metric { //TODO: IGNITE-5824, consider if softer tolerance (like say 0.1 or 0.01) would make sense here. + /** */ + private final double exp; + + /** */ + private final double obtained; + + /** **/ + Metric(double exp, double obtained) { + this.exp = exp; + this.obtained = obtained; + } + + /** */ + boolean closeEnough() { + return new Double(exp).equals(obtained); + } + + /** {@inheritDoc} */ + @Override public String toString() { + return "Metric{" + "expected=" + exp + + ", obtained=" + obtained + + '}'; + } + } +} http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorViewTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorViewTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorViewTest.java new file mode 100644 index 0000000..7471540 --- /dev/null +++ b/modules/ml/src/test/java/org/apache/ignite/ml/math/primitives/vector/VectorViewTest.java @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.math.primitives.vector; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.function.BiConsumer; +import java.util.stream.IntStream; +import org.apache.ignite.ml.math.exceptions.UnsupportedOperationException; +import org.apache.ignite.ml.math.primitives.MathTestConstants; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; +import org.apache.ignite.ml.math.primitives.vector.impl.VectorView; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +/** + * Unit tests for {@link VectorView}. + */ +public class VectorViewTest { + /** */ + private static final int OFFSET = 10; + + /** */ + private static final int VIEW_LENGTH = 80; + + /** */ + private static final String EXTERNALIZE_TEST_FILE_NAME = "externalizeTest"; + + /** */ + private VectorView testVector; + + /** */ + private DenseVector parentVector; + + /** */ + private double[] parentData; + + /** */ + @Before + public void setup() { + parentVector = new DenseVector(MathTestConstants.STORAGE_SIZE); + + IntStream.range(0, MathTestConstants.STORAGE_SIZE).forEach(idx -> parentVector.set(idx, Math.random())); + + parentData = parentVector.getStorage().data().clone(); + + testVector = new VectorView(parentVector, OFFSET, VIEW_LENGTH); + } + + /** */ + @AfterClass + public static void cleanup() throws IOException { + Files.deleteIfExists(Paths.get(EXTERNALIZE_TEST_FILE_NAME)); + } + + /** */ + @Test + public void testCopy() throws Exception { + Vector cp = testVector.copy(); + + assertTrue(MathTestConstants.VAL_NOT_EQUALS, cp.equals(testVector)); + } + + /** */ + @Test(expected = UnsupportedOperationException.class) + public void testLike() throws Exception { + for (int card : new int[] {1, 2, 4, 8, 16, 32, 64, 128}) + consumeSampleVectors((v, desc) -> { + Vector vLike = new VectorView(v, 0, 1).like(card); + + Class<? extends Vector> expType = v.getClass(); + + assertNotNull("Expect non-null like vector for " + expType.getSimpleName() + " in " + desc, vLike); + + assertEquals("Expect size equal to cardinality at " + desc, card, vLike.size()); + + Class<? extends Vector> actualType = vLike.getClass(); + + assertTrue("Expected matrix type " + expType.getSimpleName() + + " should be assignable from actual type " + actualType.getSimpleName() + " in " + desc, + expType.isAssignableFrom(actualType)); + + }); + } + + /** See also {@link VectorToMatrixTest#testLikeMatrix()}. */ + @Test + public void testLikeMatrix() { + consumeSampleVectors((v, desc) -> { + boolean expECaught = false; + + try { + assertNull("Null view instead of exception in " + desc, new VectorView(v, 0, 1).likeMatrix(1, 1)); + } + catch (UnsupportedOperationException uoe) { + expECaught = true; + } + + assertTrue("Expected exception was not caught in " + desc, expECaught); + }); + } + + /** */ + @Test + public void testWriteReadExternal() throws Exception { + assertNotNull("Unexpected null parent data", parentData); + + File f = new File(EXTERNALIZE_TEST_FILE_NAME); + + try { + ObjectOutputStream objOutputStream = new ObjectOutputStream(new FileOutputStream(f)); + + objOutputStream.writeObject(testVector); + + objOutputStream.close(); + + ObjectInputStream objInputStream = new ObjectInputStream(new FileInputStream(f)); + + VectorView readVector = (VectorView)objInputStream.readObject(); + + objInputStream.close(); + + assertTrue(MathTestConstants.VAL_NOT_EQUALS, testVector.equals(readVector)); + } + catch (ClassNotFoundException | IOException e) { + fail(e.getMessage()); + } + } + + /** */ + private void consumeSampleVectors(BiConsumer<Vector, String> consumer) { + new VectorImplementationsFixtures().consumeSampleVectors(null, consumer); + } + +} http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPConstInitializer.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPConstInitializer.java b/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPConstInitializer.java index 0a10682..729fc30 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPConstInitializer.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPConstInitializer.java @@ -17,8 +17,8 @@ package org.apache.ignite.ml.nn; -import org.apache.ignite.ml.math.Matrix; -import org.apache.ignite.ml.math.Vector; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.vector.Vector; import org.apache.ignite.ml.nn.initializers.MLPInitializer; /** http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTest.java index 3072abb..5c09e43 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTest.java @@ -18,12 +18,12 @@ package org.apache.ignite.ml.nn; import org.apache.ignite.ml.TestUtils; -import org.apache.ignite.ml.math.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; import org.apache.ignite.ml.math.Tracer; -import org.apache.ignite.ml.math.Vector; +import org.apache.ignite.ml.math.primitives.vector.Vector; import org.apache.ignite.ml.math.functions.IgniteTriFunction; -import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix; -import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; import org.apache.ignite.ml.nn.architecture.MLPArchitecture; import org.apache.ignite.ml.optimization.LossFunctions; import org.junit.Assert; @@ -44,9 +44,9 @@ public class MLPTest { int input = 2; - Matrix predict = mlp.apply(new DenseLocalOnHeapMatrix(new double[][] {{input}})); + Matrix predict = mlp.apply(new DenseMatrix(new double[][] {{input}})); - Assert.assertEquals(predict, new DenseLocalOnHeapMatrix(new double[][] {{Activators.SIGMOID.apply(input)}})); + Assert.assertEquals(predict, new DenseMatrix(new double[][] {{Activators.SIGMOID.apply(input)}})); } /** @@ -60,16 +60,16 @@ public class MLPTest { MultilayerPerceptron mlp = new MultilayerPerceptron(conf, new MLPConstInitializer(1, 2)); - mlp.setWeights(1, new DenseLocalOnHeapMatrix(new double[][] {{20.0, 20.0}, {-20.0, -20.0}})); - mlp.setBiases(1, new DenseLocalOnHeapVector(new double[] {-10.0, 30.0})); + mlp.setWeights(1, new DenseMatrix(new double[][] {{20.0, 20.0}, {-20.0, -20.0}})); + mlp.setBiases(1, new DenseVector(new double[] {-10.0, 30.0})); - mlp.setWeights(2, new DenseLocalOnHeapMatrix(new double[][] {{20.0, 20.0}})); - mlp.setBiases(2, new DenseLocalOnHeapVector(new double[] {-30.0})); + mlp.setWeights(2, new DenseMatrix(new double[][] {{20.0, 20.0}})); + mlp.setBiases(2, new DenseVector(new double[] {-30.0})); - Matrix input = new DenseLocalOnHeapMatrix(new double[][] {{0.0, 0.0}, {0.0, 1.0}, {1.0, 0.0}, {1.0, 1.0}}); + Matrix input = new DenseMatrix(new double[][] {{0.0, 0.0}, {0.0, 1.0}, {1.0, 0.0}, {1.0, 1.0}}); Matrix predict = mlp.apply(input); - Matrix truth = new DenseLocalOnHeapMatrix(new double[][] {{0.0}, {1.0}, {1.0}, {0.0}}); + Matrix truth = new DenseMatrix(new double[][] {{0.0}, {1.0}, {1.0}, {0.0}}); TestUtils.checkIsInEpsilonNeighbourhood(predict.getRow(0), truth.getRow(0), 1E-4); } @@ -99,8 +99,8 @@ public class MLPTest { MultilayerPerceptron stackedMLP = mlp1.add(mlp2); - Matrix predict = mlp.apply(new DenseLocalOnHeapMatrix(new double[][] {{1}, {2}, {3}, {4}}).transpose()); - Matrix stackedPredict = stackedMLP.apply(new DenseLocalOnHeapMatrix(new double[][] {{1}, {2}, {3}, {4}}).transpose()); + Matrix predict = mlp.apply(new DenseMatrix(new double[][] {{1}, {2}, {3}, {4}}).transpose()); + Matrix stackedPredict = stackedMLP.apply(new DenseMatrix(new double[][] {{1}, {2}, {3}, {4}}).transpose()); Assert.assertEquals(predict, stackedPredict); } @@ -131,15 +131,15 @@ public class MLPTest { int firstLayerNeuronsCnt = 2; int secondLayerNeurons = 1; - DenseLocalOnHeapVector paramsVector = new DenseLocalOnHeapVector(new double[] { + DenseVector paramsVector = new DenseVector(new double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, // First layer weight matrix. 7.0, 8.0, // Second layer weight matrix. 9.0 // Second layer biases. }); - DenseLocalOnHeapMatrix firstLayerWeights = new DenseLocalOnHeapMatrix(new double[][] {{1.0, 2.0, 3.0}, {4.0, 5.0, 6.0}}); - DenseLocalOnHeapMatrix secondLayerWeights = new DenseLocalOnHeapMatrix(new double[][] {{7.0, 8.0}}); - DenseLocalOnHeapVector secondLayerBiases = new DenseLocalOnHeapVector(new double[] {9.0}); + DenseMatrix firstLayerWeights = new DenseMatrix(new double[][] {{1.0, 2.0, 3.0}, {4.0, 5.0, 6.0}}); + DenseMatrix secondLayerWeights = new DenseMatrix(new double[][] {{7.0, 8.0}}); + DenseVector secondLayerBiases = new DenseVector(new double[] {9.0}); MLPArchitecture conf = new MLPArchitecture(inputSize). withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID). @@ -176,9 +176,9 @@ public class MLPTest { double x0 = 1.0; double x1 = 3.0; - Matrix inputs = new DenseLocalOnHeapMatrix(new double[][] {{x0, x1}}).transpose(); + Matrix inputs = new DenseMatrix(new double[][] {{x0, x1}}).transpose(); double ytt = 1.0; - Matrix truth = new DenseLocalOnHeapMatrix(new double[][] {{ytt}}).transpose(); + Matrix truth = new DenseMatrix(new double[][] {{ytt}}).transpose(); Vector grad = mlp.differentiateByParameters(LossFunctions.MSE, inputs, truth); http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerIntegrationTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerIntegrationTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerIntegrationTest.java index bac6e5f..27ddc6d 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerIntegrationTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerIntegrationTest.java @@ -25,11 +25,11 @@ import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.internal.util.IgniteUtils; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.ml.TestUtils; -import org.apache.ignite.ml.math.Matrix; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; import org.apache.ignite.ml.math.Tracer; -import org.apache.ignite.ml.math.VectorUtils; -import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix; -import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; import org.apache.ignite.ml.nn.architecture.MLPArchitecture; import org.apache.ignite.ml.optimization.LossFunctions; import org.apache.ignite.ml.optimization.updatecalculators.NesterovParameterUpdate; @@ -142,7 +142,7 @@ public class MLPTrainerIntegrationTest extends GridCommonAbstractTest { (k, v) -> new double[]{ v.lb} ); - Matrix predict = mlp.apply(new DenseLocalOnHeapMatrix(new double[][]{ + Matrix predict = mlp.apply(new DenseMatrix(new double[][]{ {0.0, 0.0}, {0.0, 1.0}, {1.0, 0.0}, @@ -151,9 +151,9 @@ public class MLPTrainerIntegrationTest extends GridCommonAbstractTest { Tracer.showAscii(predict); - X.println(new DenseLocalOnHeapVector(new double[]{0.0}).minus(predict.getRow(0)).kNorm(2) + ""); + X.println(new DenseVector(new double[]{0.0}).minus(predict.getRow(0)).kNorm(2) + ""); - TestUtils.checkIsInEpsilonNeighbourhood(new DenseLocalOnHeapVector(new double[]{0.0}), predict.getRow(0), 1E-1); + TestUtils.checkIsInEpsilonNeighbourhood(new DenseVector(new double[]{0.0}), predict.getRow(0), 1E-1); } finally { xorCache.destroy(); http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerTest.java index 7f18465..5e61fe6 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/nn/MLPTrainerTest.java @@ -23,10 +23,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.ignite.ml.TestUtils; -import org.apache.ignite.ml.math.Matrix; -import org.apache.ignite.ml.math.VectorUtils; -import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix; -import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; import org.apache.ignite.ml.nn.architecture.MLPArchitecture; import org.apache.ignite.ml.optimization.LossFunctions; import org.apache.ignite.ml.optimization.updatecalculators.NesterovParameterUpdate; @@ -145,14 +145,14 @@ public class MLPTrainerTest { (k, v) -> v[1] ); - Matrix predict = mlp.apply(new DenseLocalOnHeapMatrix(new double[][]{ + Matrix predict = mlp.apply(new DenseMatrix(new double[][]{ {0.0, 0.0}, {0.0, 1.0}, {1.0, 0.0}, {1.0, 1.0} })); - TestUtils.checkIsInEpsilonNeighbourhood(new DenseLocalOnHeapVector(new double[]{0.0}), predict.getRow(0), 1E-1); + TestUtils.checkIsInEpsilonNeighbourhood(new DenseVector(new double[]{0.0}), predict.getRow(0), 1E-1); } } http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistIntegrationTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistIntegrationTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistIntegrationTest.java index 5a26171..e11a829 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistIntegrationTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistIntegrationTest.java @@ -23,9 +23,9 @@ import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.internal.util.IgniteUtils; -import org.apache.ignite.ml.math.Matrix; -import org.apache.ignite.ml.math.VectorUtils; -import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; import org.apache.ignite.ml.nn.Activators; import org.apache.ignite.ml.nn.MLPTrainer; import org.apache.ignite.ml.nn.MultilayerPerceptron; @@ -114,7 +114,7 @@ public class MLPTrainerMnistIntegrationTest extends GridCommonAbstractTest { int incorrectAnswers = 0; for (MnistUtils.MnistLabeledImage e : MnistMLPTestUtil.loadTestSet(1_000)) { - Matrix input = new DenseLocalOnHeapMatrix(new double[][]{e.getPixels()}); + Matrix input = new DenseMatrix(new double[][]{e.getPixels()}); Matrix outputMatrix = mdl.apply(input); int predicted = (int) VectorUtils.vec2Num(outputMatrix.getRow(0)); http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistTest.java index 269082a..e2c905f 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MLPTrainerMnistTest.java @@ -20,9 +20,9 @@ package org.apache.ignite.ml.nn.performance; import java.io.IOException; import java.util.HashMap; import java.util.Map; -import org.apache.ignite.ml.math.Matrix; -import org.apache.ignite.ml.math.VectorUtils; -import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix; +import org.apache.ignite.ml.math.primitives.matrix.Matrix; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix; import org.apache.ignite.ml.nn.Activators; import org.apache.ignite.ml.nn.MLPTrainer; import org.apache.ignite.ml.nn.MultilayerPerceptron; @@ -84,7 +84,7 @@ public class MLPTrainerMnistTest { int incorrectAnswers = 0; for (MnistUtils.MnistLabeledImage e : MnistMLPTestUtil.loadTestSet(10_000)) { - Matrix input = new DenseLocalOnHeapMatrix(new double[][]{e.getPixels()}); + Matrix input = new DenseMatrix(new double[][]{e.getPixels()}); Matrix outputMatrix = mdl.apply(input); int predicted = (int) VectorUtils.vec2Num(outputMatrix.getRow(0)); http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MnistMLPTestUtil.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MnistMLPTestUtil.java b/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MnistMLPTestUtil.java index d68b355..dd934ab 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MnistMLPTestUtil.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/nn/performance/MnistMLPTestUtil.java @@ -24,7 +24,7 @@ import java.util.Properties; import java.util.Random; import java.util.stream.Stream; import org.apache.ignite.lang.IgniteBiTuple; -import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; import org.apache.ignite.ml.util.MnistUtils; /** */ @@ -42,13 +42,13 @@ public class MnistMLPTestUtil { private static final String PROP_TEST_LABELS = "mnist.test.labels"; /** */ - static IgniteBiTuple<Stream<DenseLocalOnHeapVector>, Stream<DenseLocalOnHeapVector>> loadMnist(int samplesCnt) throws IOException { + static IgniteBiTuple<Stream<DenseVector>, Stream<DenseVector>> loadMnist(int samplesCnt) throws IOException { Properties props = loadMNISTProperties(); - Stream<DenseLocalOnHeapVector> trainingMnistStream = MnistUtils.mnistAsStream(props.getProperty(PROP_TRAINING_IMAGES), + Stream<DenseVector> trainingMnistStream = MnistUtils.mnistAsStream(props.getProperty(PROP_TRAINING_IMAGES), props.getProperty(PROP_TRAINING_LABELS), new Random(123L), samplesCnt); - Stream<DenseLocalOnHeapVector> testMnistStream = MnistUtils.mnistAsStream(props.getProperty(PROP_TEST_IMAGES), + Stream<DenseVector> testMnistStream = MnistUtils.mnistAsStream(props.getProperty(PROP_TEST_IMAGES), props.getProperty(PROP_TEST_LABELS), new Random(123L), 10_000); return new IgniteBiTuple<>(trainingMnistStream, testMnistStream); http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationPreprocessorTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationPreprocessorTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationPreprocessorTest.java index a89b1aa..f37b502 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationPreprocessorTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationPreprocessorTest.java @@ -17,7 +17,7 @@ package org.apache.ignite.ml.preprocessing.binarization; -import org.apache.ignite.ml.math.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.junit.Test; import static org.junit.Assert.assertArrayEquals; http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationTrainerTest.java index a7317a5..8b10aaa 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationTrainerTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/binarization/BinarizationTrainerTest.java @@ -22,7 +22,7 @@ import java.util.HashMap; import java.util.Map; import org.apache.ignite.ml.dataset.DatasetBuilder; import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; -import org.apache.ignite.ml.math.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerPreprocessorTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerPreprocessorTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerPreprocessorTest.java index 8482928..14f9a20 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerPreprocessorTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerPreprocessorTest.java @@ -17,8 +17,8 @@ package org.apache.ignite.ml.preprocessing.imputing; -import org.apache.ignite.ml.math.Vector; -import org.apache.ignite.ml.math.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.junit.Test; import static org.junit.Assert.assertArrayEquals; http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerTrainerTest.java index bbb9d07..006ac29 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerTrainerTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/imputing/ImputerTrainerTest.java @@ -22,8 +22,8 @@ import java.util.HashMap; import java.util.Map; import org.apache.ignite.ml.dataset.DatasetBuilder; import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; -import org.apache.ignite.ml.math.Vector; -import org.apache.ignite.ml.math.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerPreprocessorTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerPreprocessorTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerPreprocessorTest.java index aef1587..ce59112 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerPreprocessorTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerPreprocessorTest.java @@ -17,8 +17,8 @@ package org.apache.ignite.ml.preprocessing.minmaxscaling; -import org.apache.ignite.ml.math.Vector; -import org.apache.ignite.ml.math.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.junit.Test; import static org.junit.Assert.assertArrayEquals; http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerTrainerTest.java index 8d3681b..451f5e9 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerTrainerTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/minmaxscaling/MinMaxScalerTrainerTest.java @@ -22,8 +22,8 @@ import java.util.HashMap; import java.util.Map; import org.apache.ignite.ml.dataset.DatasetBuilder; import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; -import org.apache.ignite.ml.math.Vector; -import org.apache.ignite.ml.math.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationPreprocessorTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationPreprocessorTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationPreprocessorTest.java index a8bfd28..5811d3c 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationPreprocessorTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationPreprocessorTest.java @@ -17,8 +17,8 @@ package org.apache.ignite.ml.preprocessing.normalization; -import org.apache.ignite.ml.math.Vector; -import org.apache.ignite.ml.math.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.Vector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.apache.ignite.ml.preprocessing.binarization.BinarizationPreprocessor; import org.junit.Test; http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationTrainerTest.java index f6be0f5..b962701 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationTrainerTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/preprocessing/normalization/NormalizationTrainerTest.java @@ -22,7 +22,7 @@ import java.util.HashMap; import java.util.Map; import org.apache.ignite.ml.dataset.DatasetBuilder; import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; -import org.apache.ignite.ml.math.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.apache.ignite.ml.preprocessing.binarization.BinarizationTrainer; import org.junit.Test; import org.junit.runner.RunWith; http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java index f2f264b..f771dae 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java @@ -21,7 +21,7 @@ import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Random; -import org.apache.ignite.ml.math.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionModelTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionModelTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionModelTest.java index 7ca9121..d5af7f3 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionModelTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionModelTest.java @@ -18,9 +18,9 @@ package org.apache.ignite.ml.regressions.linear; import org.apache.ignite.ml.TestUtils; -import org.apache.ignite.ml.math.Vector; +import org.apache.ignite.ml.math.primitives.vector.Vector; import org.apache.ignite.ml.math.exceptions.CardinalityException; -import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; import org.apache.ignite.ml.regressions.logistic.binomial.LogisticRegressionModel; import org.apache.ignite.ml.regressions.logistic.multiclass.LogRegressionMultiClassModel; import org.junit.Test; @@ -35,48 +35,48 @@ public class LinearRegressionModelTest { /** */ @Test public void testPredict() { - Vector weights = new DenseLocalOnHeapVector(new double[]{2.0, 3.0}); + Vector weights = new DenseVector(new double[]{2.0, 3.0}); LinearRegressionModel mdl = new LinearRegressionModel(weights, 1.0); - Vector observation = new DenseLocalOnHeapVector(new double[]{1.0, 1.0}); + Vector observation = new DenseVector(new double[]{1.0, 1.0}); TestUtils.assertEquals(1.0 + 2.0 * 1.0 + 3.0 * 1.0, mdl.apply(observation), PRECISION); - observation = new DenseLocalOnHeapVector(new double[]{2.0, 1.0}); + observation = new DenseVector(new double[]{2.0, 1.0}); TestUtils.assertEquals(1.0 + 2.0 * 2.0 + 3.0 * 1.0, mdl.apply(observation), PRECISION); - observation = new DenseLocalOnHeapVector(new double[]{1.0, 2.0}); + observation = new DenseVector(new double[]{1.0, 2.0}); TestUtils.assertEquals(1.0 + 2.0 * 1.0 + 3.0 * 2.0, mdl.apply(observation), PRECISION); - observation = new DenseLocalOnHeapVector(new double[]{-2.0, 1.0}); + observation = new DenseVector(new double[]{-2.0, 1.0}); TestUtils.assertEquals(1.0 - 2.0 * 2.0 + 3.0 * 1.0, mdl.apply(observation), PRECISION); - observation = new DenseLocalOnHeapVector(new double[]{1.0, -2.0}); + observation = new DenseVector(new double[]{1.0, -2.0}); TestUtils.assertEquals(1.0 + 2.0 * 1.0 - 3.0 * 2.0, mdl.apply(observation), PRECISION); } /** */ @Test public void testPredictWithMultiClasses() { - Vector weights1 = new DenseLocalOnHeapVector(new double[]{10.0, 0.0}); - Vector weights2 = new DenseLocalOnHeapVector(new double[]{0.0, 10.0}); - Vector weights3 = new DenseLocalOnHeapVector(new double[]{-1.0, -1.0}); + Vector weights1 = new DenseVector(new double[]{10.0, 0.0}); + Vector weights2 = new DenseVector(new double[]{0.0, 10.0}); + Vector weights3 = new DenseVector(new double[]{-1.0, -1.0}); LogRegressionMultiClassModel mdl = new LogRegressionMultiClassModel(); mdl.add(1, new LogisticRegressionModel(weights1, 0.0).withRawLabels(true)); mdl.add(2, new LogisticRegressionModel(weights2, 0.0).withRawLabels(true)); mdl.add(2, new LogisticRegressionModel(weights3, 0.0).withRawLabels(true)); - Vector observation = new DenseLocalOnHeapVector(new double[]{1.0, 1.0}); + Vector observation = new DenseVector(new double[]{1.0, 1.0}); TestUtils.assertEquals( 1.0, mdl.apply(observation), PRECISION); } /** */ @Test(expected = CardinalityException.class) public void testPredictOnAnObservationWithWrongCardinality() { - Vector weights = new DenseLocalOnHeapVector(new double[]{2.0, 3.0}); + Vector weights = new DenseVector(new double[]{2.0, 3.0}); LinearRegressionModel mdl = new LinearRegressionModel(weights, 1.0); - Vector observation = new DenseLocalOnHeapVector(new double[]{1.0}); + Vector observation = new DenseVector(new double[]{1.0}); mdl.apply(observation); } http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java index 7c3cef1..ee38938 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionSGDTrainerTest.java @@ -20,7 +20,7 @@ package org.apache.ignite.ml.regressions.linear; import java.util.Arrays; import java.util.HashMap; import java.util.Map; -import org.apache.ignite.ml.math.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.apache.ignite.ml.nn.UpdatesStrategy; import org.apache.ignite.ml.optimization.updatecalculators.RPropParameterUpdate; import org.apache.ignite.ml.optimization.updatecalculators.RPropUpdateCalculator; http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogRegMultiClassTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogRegMultiClassTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogRegMultiClassTrainerTest.java index b2d5e63..1d25524 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogRegMultiClassTrainerTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogRegMultiClassTrainerTest.java @@ -22,8 +22,8 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.ThreadLocalRandom; import org.apache.ignite.ml.TestUtils; -import org.apache.ignite.ml.math.VectorUtils; -import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; +import org.apache.ignite.ml.math.primitives.vector.VectorUtils; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; import org.apache.ignite.ml.nn.UpdatesStrategy; import org.apache.ignite.ml.optimization.SmoothParametrized; import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate; @@ -93,7 +93,7 @@ public class LogRegMultiClassTrainerTest { (k, v) -> v[0] ); - TestUtils.assertEquals(-1, mdl.apply(new DenseLocalOnHeapVector(new double[]{100, 10})), PRECISION); - TestUtils.assertEquals(1, mdl.apply(new DenseLocalOnHeapVector(new double[]{10, 100})), PRECISION); + TestUtils.assertEquals(-1, mdl.apply(new DenseVector(new double[]{100, 10})), PRECISION); + TestUtils.assertEquals(1, mdl.apply(new DenseVector(new double[]{10, 100})), PRECISION); } } http://git-wip-us.apache.org/repos/asf/ignite/blob/26e40528/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionModelTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionModelTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionModelTest.java index 1268a7d..679bd50 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionModelTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/regressions/logistic/LogisticRegressionModelTest.java @@ -18,9 +18,9 @@ package org.apache.ignite.ml.regressions.logistic; import org.apache.ignite.ml.TestUtils; -import org.apache.ignite.ml.math.Vector; +import org.apache.ignite.ml.math.primitives.vector.Vector; import org.apache.ignite.ml.math.exceptions.CardinalityException; -import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; +import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector; import org.apache.ignite.ml.regressions.logistic.binomial.LogisticRegressionModel; import org.junit.Test; @@ -34,33 +34,33 @@ public class LogisticRegressionModelTest { /** */ @Test public void testPredict() { - Vector weights = new DenseLocalOnHeapVector(new double[]{2.0, 3.0}); + Vector weights = new DenseVector(new double[]{2.0, 3.0}); LogisticRegressionModel mdl = new LogisticRegressionModel(weights, 1.0).withRawLabels(true); - Vector observation = new DenseLocalOnHeapVector(new double[]{1.0, 1.0}); + Vector observation = new DenseVector(new double[]{1.0, 1.0}); TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 1.0 + 3.0 * 1.0), mdl.apply(observation), PRECISION); - observation = new DenseLocalOnHeapVector(new double[]{2.0, 1.0}); + observation = new DenseVector(new double[]{2.0, 1.0}); TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 2.0 + 3.0 * 1.0), mdl.apply(observation), PRECISION); - observation = new DenseLocalOnHeapVector(new double[]{1.0, 2.0}); + observation = new DenseVector(new double[]{1.0, 2.0}); TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 1.0 + 3.0 * 2.0), mdl.apply(observation), PRECISION); - observation = new DenseLocalOnHeapVector(new double[]{-2.0, 1.0}); + observation = new DenseVector(new double[]{-2.0, 1.0}); TestUtils.assertEquals(sigmoid(1.0 - 2.0 * 2.0 + 3.0 * 1.0), mdl.apply(observation), PRECISION); - observation = new DenseLocalOnHeapVector(new double[]{1.0, -2.0}); + observation = new DenseVector(new double[]{1.0, -2.0}); TestUtils.assertEquals(sigmoid(1.0 + 2.0 * 1.0 - 3.0 * 2.0), mdl.apply(observation), PRECISION); } /** */ @Test(expected = CardinalityException.class) public void testPredictOnAnObservationWithWrongCardinality() { - Vector weights = new DenseLocalOnHeapVector(new double[]{2.0, 3.0}); + Vector weights = new DenseVector(new double[]{2.0, 3.0}); LogisticRegressionModel mdl = new LogisticRegressionModel(weights, 1.0); - Vector observation = new DenseLocalOnHeapVector(new double[]{1.0}); + Vector observation = new DenseVector(new double[]{1.0}); mdl.apply(observation); }
