Author: tommaso
Date: Tue Sep  1 12:26:55 2015
New Revision: 1700527

URL: http://svn.apache.org/r1700527
Log:
OPENNLP-713 - slightly enhanced some tests, made Hypothesis unmutable

Modified:
    
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/GradientDescentUtils.java
    
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/Hypothesis.java
    
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/LinearCombinationHypothesis.java
    
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/anomalydetection/AnomalyDetectionUtilsTest.java
    
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/regression/GradientDescentUtilsTest.java
    
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/regression/RegressionModelUtilsTest.java

Modified: 
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/GradientDescentUtils.java
URL: 
http://svn.apache.org/viewvc/opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/GradientDescentUtils.java?rev=1700527&r1=1700526&r2=1700527&view=diff
==============================================================================
--- 
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/GradientDescentUtils.java
 (original)
+++ 
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/GradientDescentUtils.java
 Tue Sep  1 12:26:55 2015
@@ -33,14 +33,13 @@ public class GradientDescentUtils {
    * Calculates batch gradient descent on the give hypothesis, training set 
and learning rate alpha.
    * The algorithms iteratively adjusts the hypothesis parameters
    *
-   * @param hypothesis  the hypothesis representing the model used
    * @param trainingSet the training set used to fit the parameters
    * @param alpha       the learning rate alpha used to define how big the 
descent steps are
    */
-  public static void batchGradientDescent(Hypothesis hypothesis, TrainingSet 
trainingSet, double alpha) {
+  public static void batchGradientDescent(TrainingSet trainingSet, double 
alpha) {
     // set initial random weights
     double[] parameters = 
initializeRandomWeights(trainingSet.iterator().next().getInputs().length);
-    hypothesis.updateParameters(parameters);
+    Hypothesis hypothesis = new LinearCombinationHypothesis(parameters);
 
     int iterations = 0;
 
@@ -52,7 +51,7 @@ public class GradientDescentUtils {
       if (newCost > cost) {
         throw new RuntimeException("failed to converge at iteration " + 
iterations + " with cost going from " + cost + " to " + newCost);
       } else if (cost == newCost || newCost < THRESHOLD || iterations > 
MAX_ITERATIONS) {
-        System.out.println(cost + " with parameters " + 
Arrays.toString(parameters));
+        System.out.println(cost + " with parameters " + 
Arrays.toString(parameters) + "(" + iterations + " iterations)");
         break;
       }
 
@@ -63,7 +62,7 @@ public class GradientDescentUtils {
       parameters = RegressionModelUtils.batchLeastMeanSquareUpdate(parameters, 
alpha, trainingSet, hypothesis);
 
       // update weights in the hypothesis
-      hypothesis.updateParameters(parameters);
+      hypothesis = new LinearCombinationHypothesis(parameters);
 
       iterations++;
     }

Modified: 
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/Hypothesis.java
URL: 
http://svn.apache.org/viewvc/opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/Hypothesis.java?rev=1700527&r1=1700526&r2=1700527&view=diff
==============================================================================
--- 
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/Hypothesis.java
 (original)
+++ 
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/Hypothesis.java
 Tue Sep  1 12:26:55 2015
@@ -31,10 +31,4 @@ public interface Hypothesis {
    */
   double calculateOutput(double[] inputs);
 
-  /**
-   * update the internal model's parameters.
-   *
-   * @param parameters an array of <code>double</code> containing the updated 
parameters
-   */
-  void updateParameters(double[] parameters);
 }

Modified: 
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/LinearCombinationHypothesis.java
URL: 
http://svn.apache.org/viewvc/opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/LinearCombinationHypothesis.java?rev=1700527&r1=1700526&r2=1700527&view=diff
==============================================================================
--- 
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/LinearCombinationHypothesis.java
 (original)
+++ 
opennlp/sandbox/nlp-utils/src/main/java/org/apache/opennlp/utils/regression/LinearCombinationHypothesis.java
 Tue Sep  1 12:26:55 2015
@@ -22,7 +22,11 @@ package org.apache.opennlp.utils.regress
  * Simplest {@link Hypothesis} which just linearly combines inputs with weights
  */
 public class LinearCombinationHypothesis implements Hypothesis {
-  private double[] weights;
+  private final double[] weights;
+
+  public LinearCombinationHypothesis(double... weights) {
+    this.weights = weights;
+  }
 
   @Override
   public double calculateOutput(double[] inputs) {
@@ -33,8 +37,4 @@ public class LinearCombinationHypothesis
     return output;
   }
 
-  @Override
-  public void updateParameters(double[] parameters) {
-    weights = parameters;
-  }
-}
+ }

Modified: 
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/anomalydetection/AnomalyDetectionUtilsTest.java
URL: 
http://svn.apache.org/viewvc/opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/anomalydetection/AnomalyDetectionUtilsTest.java?rev=1700527&r1=1700526&r2=1700527&view=diff
==============================================================================
--- 
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/anomalydetection/AnomalyDetectionUtilsTest.java
 (original)
+++ 
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/anomalydetection/AnomalyDetectionUtilsTest.java
 Tue Sep  1 12:26:55 2015
@@ -23,6 +23,7 @@ import org.apache.opennlp.utils.Training
 import org.apache.opennlp.utils.TrainingSet;
 import org.junit.Test;
 
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
@@ -39,19 +40,17 @@ public class AnomalyDetectionUtilsTest {
     assertNotNull(mus);
     double[] sigmas = AnomalyDetectionUtils.fitSigmas(mus, trainingSet);
     assertNotNull(sigmas);
-    TrainingExample newInput = new TrainingExample(new double[]{1d, 2d, 1000d, 
123d, 0.1d}, 0d);
+    TrainingExample newInput = new TrainingExample(new 
double[]{0.4d,0.5d,0.5d,0.5d,0.2d}, 0d);
     double probability = 
AnomalyDetectionUtils.getGaussianProbability(newInput, mus, sigmas);
-    assertTrue("negative probability " + probability, 0 <= probability);
-    assertTrue("probability bigger than 1 " + probability, 1 >= probability);
+    assertEquals(0.5d, probability, 0.5d);
   }
 
   @Test
   public void testGaussianDistributionProbabilityFromTrainingSet() throws 
Exception {
     TrainingSet trainingSet = new TrainingSet();
     TestUtils.fillTrainingSet(trainingSet, 100, 5);
-    TrainingExample newInput = new TrainingExample(new double[]{1d, 2d, 1000d, 
123d, 0.1d}, 0d);
+    TrainingExample newInput = new TrainingExample(new 
double[]{0.4d,0.5d,0.5d,0.5d,0.2d}, 0d);
     double probability = 
AnomalyDetectionUtils.getGaussianProbability(newInput, trainingSet);
-    assertTrue("negative probability " + probability, 0 <= probability);
-    assertTrue("probability bigger than 1 " + probability, 1 >= probability);
+    assertEquals(0.5d, probability, 0.5d);
   }
 }

Modified: 
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/regression/GradientDescentUtilsTest.java
URL: 
http://svn.apache.org/viewvc/opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/regression/GradientDescentUtilsTest.java?rev=1700527&r1=1700526&r2=1700527&view=diff
==============================================================================
--- 
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/regression/GradientDescentUtilsTest.java
 (original)
+++ 
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/regression/GradientDescentUtilsTest.java
 Tue Sep  1 12:26:55 2015
@@ -31,7 +31,7 @@ public class GradientDescentUtilsTest {
   public void testConvergence() throws Exception {
     TrainingSet trainingSet = new TrainingSet();
     TestUtils.fillTrainingSet(trainingSet, 100, 5);
-    GradientDescentUtils.batchGradientDescent(new 
LinearCombinationHypothesis(), trainingSet, 0.00002);
+    GradientDescentUtils.batchGradientDescent(trainingSet, 0.00002);
   }
 
 }

Modified: 
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/regression/RegressionModelUtilsTest.java
URL: 
http://svn.apache.org/viewvc/opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/regression/RegressionModelUtilsTest.java?rev=1700527&r1=1700526&r2=1700527&view=diff
==============================================================================
--- 
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/regression/RegressionModelUtilsTest.java
 (original)
+++ 
opennlp/sandbox/nlp-utils/src/test/java/org/apache/opennlp/utils/regression/RegressionModelUtilsTest.java
 Tue Sep  1 12:26:55 2015
@@ -34,8 +34,7 @@ public class RegressionModelUtilsTest {
   public void testLMS() throws Exception {
     TrainingSet trainingSet = new TrainingSet();
     trainingSet.add(new TrainingExample(new double[]{10, 10}, 1));
-    LinearCombinationHypothesis hypothesis = new LinearCombinationHypothesis();
-    hypothesis.updateParameters(new double[]{1, 1});
+    LinearCombinationHypothesis hypothesis = new 
LinearCombinationHypothesis(1, 1);
     double[] updatedParameters = 
RegressionModelUtils.batchLeastMeanSquareUpdate(new double[]{1, 1}, 0.1, 
trainingSet, hypothesis);
     assertNotNull(updatedParameters);
     assertTrue(updatedParameters.length == 2);


Reply via email to