Author: tommaso
Date: Mon Dec 10 13:18:30 2012
New Revision: 1419427
URL: http://svn.apache.org/viewvc?rev=1419427&view=rev
Log:
fixed style to 2 spaces
Modified:
labs/yay/trunk/core/src/main/java/org/apache/yay/BasicPerceptron.java
labs/yay/trunk/core/src/main/java/org/apache/yay/LogisticRegressionCostFunction.java
labs/yay/trunk/core/src/main/java/org/apache/yay/MaxSelectionFunction.java
labs/yay/trunk/core/src/main/java/org/apache/yay/StepActivationFunction.java
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/BinaryThresholdNeuron.java
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/LinearNeuron.java
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/RectifiedLinearNeuron.java
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/SigmoidNeuron.java
labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ExamplesFactory.java
labs/yay/trunk/core/src/test/java/org/apache/yay/BasicPerceptronTest.java
labs/yay/trunk/core/src/test/java/org/apache/yay/LogisticRegressionCostFunctionTest.java
labs/yay/trunk/core/src/test/java/org/apache/yay/VoidLearningStrategyTest.java
Modified: labs/yay/trunk/core/src/main/java/org/apache/yay/BasicPerceptron.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/BasicPerceptron.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/BasicPerceptron.java
(original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/BasicPerceptron.java Mon
Dec 10 13:18:30 2012
@@ -24,47 +24,50 @@ import org.apache.yay.neuron.BinaryThres
import org.apache.yay.utils.ConversionUtils;
/**
- * A perceptron {@link NeuralNetwork} implementation based on {@link
org.apache.yay.neuron.BinaryThresholdNeuron}s
+ * A perceptron {@link NeuralNetwork} implementation based on
+ * {@link org.apache.yay.neuron.BinaryThresholdNeuron}s
*/
public class BasicPerceptron implements NeuralNetwork<Double, Double> {
- private final BinaryThresholdNeuron perceptronNeuron;
+ private final BinaryThresholdNeuron perceptronNeuron;
- private final double[] currentWeights;
+ private final double[] currentWeights;
- /**
- * Create a perceptron given its input weights.
- * Assume bias weight is given and all the input samples have a
corresponding 1 input for that
- *
- * @param inputWeights the array of starting weights for the perceptron
- */
- public BasicPerceptron(double... inputWeights) {
- this.perceptronNeuron = new BinaryThresholdNeuron(0, inputWeights);
- this.currentWeights = inputWeights;
- }
-
- @Override
- public void learn(TrainingExample<Double, Double>... trainingExamples)
throws LearningException {
- for (TrainingExample<Double, Double> example : trainingExamples) {
- Vector<Double> doubles =
ConversionUtils.toVector(example.getFeatureVector());
- Double[] inputs = doubles.toArray(new Double[doubles.size()]);
- Double calculatedOutput = perceptronNeuron.elaborate(inputs);
- int diff = calculatedOutput.compareTo(example.getOutput());
- if (diff > 0) {
- for (int i = 0; i < currentWeights.length; i++) {
- currentWeights[i] += inputs[i];
- }
- } else if (diff < 0) {
- for (int i = 0; i < currentWeights.length; i++) {
- currentWeights[i] -= inputs[i];
- }
- }
- perceptronNeuron.updateWeights(currentWeights);
+ /**
+ * Create a perceptron given its input weights. Assume bias weight is given
and all the input
+ * samples have a corresponding 1 input for that
+ *
+ * @param inputWeights
+ * the array of starting weights for the perceptron
+ */
+ public BasicPerceptron(double... inputWeights) {
+ this.perceptronNeuron = new BinaryThresholdNeuron(0, inputWeights);
+ this.currentWeights = inputWeights;
+ }
+
+ @Override
+ public void learn(TrainingExample<Double, Double>... trainingExamples)
throws LearningException {
+ for (TrainingExample<Double, Double> example : trainingExamples) {
+ Vector<Double> doubles =
ConversionUtils.toVector(example.getFeatureVector());
+ Double[] inputs = doubles.toArray(new Double[doubles.size()]);
+ Double calculatedOutput = perceptronNeuron.elaborate(inputs);
+ int diff = calculatedOutput.compareTo(example.getOutput());
+ if (diff > 0) {
+ for (int i = 0; i < currentWeights.length; i++) {
+ currentWeights[i] += inputs[i];
}
+ } else if (diff < 0) {
+ for (int i = 0; i < currentWeights.length; i++) {
+ currentWeights[i] -= inputs[i];
+ }
+ }
+ perceptronNeuron.updateWeights(currentWeights);
}
+ }
- @Override
- public Double predict(Example<Double> input) throws PredictionException {
- return
perceptronNeuron.elaborate(ConversionUtils.toVector(input.getFeatureVector()).toArray(new
Double[input.getFeatureVector().size()]));
- }
+ @Override
+ public Double predict(Example<Double> input) throws PredictionException {
+ return
perceptronNeuron.elaborate(ConversionUtils.toVector(input.getFeatureVector()).toArray(
+ new Double[input.getFeatureVector().size()]));
+ }
}
Modified:
labs/yay/trunk/core/src/main/java/org/apache/yay/LogisticRegressionCostFunction.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/LogisticRegressionCostFunction.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
---
labs/yay/trunk/core/src/main/java/org/apache/yay/LogisticRegressionCostFunction.java
(original)
+++
labs/yay/trunk/core/src/main/java/org/apache/yay/LogisticRegressionCostFunction.java
Mon Dec 10 13:18:30 2012
@@ -25,51 +25,52 @@ import java.util.Collection;
*/
public class LogisticRegressionCostFunction implements
CostFunction<WeightsMatrix, Double> {
- private final Double lambda;
+ private final Double lambda;
- public LogisticRegressionCostFunction(Double lambda) {
- this.lambda = lambda;
- }
-
- @Override
- public Double calculateCost(Collection<TrainingExample<Double, Double>>
trainingExamples,
- ActivationFunction<Double> hypothesis,
- WeightsMatrix... parameters) throws Exception {
-
- Double errorTerm = calculateErrorTerm(parameters, hypothesis,
trainingExamples);
- Double regularizationTerm = calculateRegularizationTerm(parameters,
trainingExamples);
- return errorTerm + regularizationTerm;
- }
-
- private Double calculateRegularizationTerm(WeightsMatrix[] parameters,
-
Collection<TrainingExample<Double, Double>> trainingExamples) {
- Double res = 1d;
- for (WeightsMatrix layerMatrix : parameters) {
- for (int i = 0; i < layerMatrix.getColumnDimension(); i++) {
- double[] column = layerMatrix.getColumn(i);
- // starting from 1 to avoid including the bias unit in
regularization
- for (int j = 1; j < column.length; j++) {
- res+= Math.pow(column[j], 2d);
- }
- }
+ public LogisticRegressionCostFunction(Double lambda) {
+ this.lambda = lambda;
+ }
+
+ @Override
+ public Double calculateCost(Collection<TrainingExample<Double, Double>>
trainingExamples,
+ ActivationFunction<Double> hypothesis, WeightsMatrix... parameters)
throws Exception {
+
+ Double errorTerm = calculateErrorTerm(parameters, hypothesis,
trainingExamples);
+ Double regularizationTerm = calculateRegularizationTerm(parameters,
trainingExamples);
+ return errorTerm + regularizationTerm;
+ }
+
+ private Double calculateRegularizationTerm(WeightsMatrix[] parameters,
+ Collection<TrainingExample<Double, Double>> trainingExamples) {
+ Double res = 1d;
+ for (WeightsMatrix layerMatrix : parameters) {
+ for (int i = 0; i < layerMatrix.getColumnDimension(); i++) {
+ double[] column = layerMatrix.getColumn(i);
+ // starting from 1 to avoid including the bias unit in regularization
+ for (int j = 1; j < column.length; j++) {
+ res += Math.pow(column[j], 2d);
}
- return (lambda / (2d * trainingExamples.size())) * res;
+ }
}
+ return (lambda / (2d * trainingExamples.size())) * res;
+ }
- private Double calculateErrorTerm(WeightsMatrix[] parameters,
ActivationFunction<Double> hypothesis,
- Collection<TrainingExample<Double,
Double>> trainingExamples) throws PredictionException, CreationException {
- Double res = 0d;
- NeuralNetwork<Double, Double> neuralNetwork =
NeuralNetworkFactory.create(
- trainingExamples,parameters, new VoidLearningStrategy<Double,
Double>(),
- new FeedForwardStrategy(hypothesis));
-
-
- for (TrainingExample<Double, Double> input : trainingExamples) {
- // TODO : handle this for multiple outputs (multi class
classification)
- Double predictedOutput = neuralNetwork.predict(input);
- Double sampleOutput = input.getOutput();
- res += sampleOutput * Math.log(predictedOutput) + (1d -
sampleOutput) * Math.log(1d - predictedOutput);
- }
- return (-1d / trainingExamples.size()) * res;
+ private Double calculateErrorTerm(WeightsMatrix[] parameters,
+ ActivationFunction<Double> hypothesis,
+ Collection<TrainingExample<Double, Double>> trainingExamples) throws
PredictionException,
+ CreationException {
+ Double res = 0d;
+ NeuralNetwork<Double, Double> neuralNetwork =
NeuralNetworkFactory.create(trainingExamples,
+ parameters, new VoidLearningStrategy<Double, Double>(), new
FeedForwardStrategy(
+ hypothesis));
+
+ for (TrainingExample<Double, Double> input : trainingExamples) {
+ // TODO : handle this for multiple outputs (multi class classification)
+ Double predictedOutput = neuralNetwork.predict(input);
+ Double sampleOutput = input.getOutput();
+ res += sampleOutput * Math.log(predictedOutput) + (1d - sampleOutput)
+ * Math.log(1d - predictedOutput);
}
+ return (-1d / trainingExamples.size()) * res;
+ }
}
Modified:
labs/yay/trunk/core/src/main/java/org/apache/yay/MaxSelectionFunction.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/MaxSelectionFunction.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/MaxSelectionFunction.java
(original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/MaxSelectionFunction.java
Mon Dec 10 13:18:30 2012
@@ -26,8 +26,8 @@ import java.util.Collections;
*/
public class MaxSelectionFunction implements
SelectionFunction<Collection<Comparable>, Comparable> {
- @Override
- public Comparable selectOutput(Collection<Comparable> neuralNetworkOutput)
{
- return Collections.max(neuralNetworkOutput);
- }
+ @Override
+ public Comparable selectOutput(Collection<Comparable> neuralNetworkOutput) {
+ return Collections.max(neuralNetworkOutput);
+ }
}
Modified:
labs/yay/trunk/core/src/main/java/org/apache/yay/StepActivationFunction.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/StepActivationFunction.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
---
labs/yay/trunk/core/src/main/java/org/apache/yay/StepActivationFunction.java
(original)
+++
labs/yay/trunk/core/src/main/java/org/apache/yay/StepActivationFunction.java
Mon Dec 10 13:18:30 2012
@@ -23,15 +23,15 @@ package org.apache.yay;
*/
public class StepActivationFunction implements ActivationFunction<Double> {
- private final double center;
+ private final double center;
- public StepActivationFunction(double center) {
- this.center = center;
- }
+ public StepActivationFunction(double center) {
+ this.center = center;
+ }
- @Override
- public Double apply(Double signal) {
- return signal >= center ? 1d : 0d;
- }
+ @Override
+ public Double apply(Double signal) {
+ return signal >= center ? 1d : 0d;
+ }
}
Modified:
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/BinaryThresholdNeuron.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/BinaryThresholdNeuron.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
---
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/BinaryThresholdNeuron.java
(original)
+++
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/BinaryThresholdNeuron.java
Mon Dec 10 13:18:30 2012
@@ -22,31 +22,31 @@ import org.apache.yay.BasicElaborationUn
import org.apache.yay.StepActivationFunction;
/**
- * A neuron which: <ul>
+ * A neuron which:
+ * <ul>
* <li>first computes a weighted sum of the inputs</li>
- * <li>then send out a fixed size of activity if the weighted sum exceeds a
- * threshold</li>
+ * <li>then send out a fixed size of activity if the weighted sum exceeds a
threshold</li>
* </ul>
*/
public class BinaryThresholdNeuron extends BasicElaborationUnit<Double> {
- private double[] weights;
+ private double[] weights;
- public BinaryThresholdNeuron(double threshold, double... weights) {
- this.activationFunction = new StepActivationFunction(threshold);
- this.weights = weights;
- }
+ public BinaryThresholdNeuron(double threshold, double... weights) {
+ this.activationFunction = new StepActivationFunction(threshold);
+ this.weights = weights;
+ }
- public void updateWeights(double... weights) {
- this.weights = weights;
- }
+ public void updateWeights(double... weights) {
+ this.weights = weights;
+ }
- @Override
- protected Double combine(Double... inputs) {
- Double res = 0d;
- for (int i = 0; i < inputs.length; i++) {
- res += inputs[i] * weights[i];
- }
- return res;
+ @Override
+ protected Double combine(Double... inputs) {
+ Double res = 0d;
+ for (int i = 0; i < inputs.length; i++) {
+ res += inputs[i] * weights[i];
}
+ return res;
+ }
}
Modified:
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/LinearNeuron.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/LinearNeuron.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/LinearNeuron.java
(original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/LinearNeuron.java
Mon Dec 10 13:18:30 2012
@@ -26,23 +26,23 @@ import org.apache.yay.IdentityActivation
*/
public class LinearNeuron extends BasicElaborationUnit<Double> {
- private final double[] weights;
- private final Double bias;
+ private final double[] weights;
- public LinearNeuron(Double bias, double... weights) {
- this.activationFunction = new IdentityActivationFunction<Double>();
- this.bias = bias;
- this.weights = weights;
- }
+ private final Double bias;
- @Override
- protected Double combine(Double... inputs) {
- Double res = bias;
- for (int i = 0; i < inputs.length; i++) {
- res += inputs[i] * weights[i];
- }
- return res;
- }
+ public LinearNeuron(Double bias, double... weights) {
+ this.activationFunction = new IdentityActivationFunction<Double>();
+ this.bias = bias;
+ this.weights = weights;
+ }
+ @Override
+ protected Double combine(Double... inputs) {
+ Double res = bias;
+ for (int i = 0; i < inputs.length; i++) {
+ res += inputs[i] * weights[i];
+ }
+ return res;
+ }
}
Modified:
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/RectifiedLinearNeuron.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/RectifiedLinearNeuron.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
---
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/RectifiedLinearNeuron.java
(original)
+++
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/RectifiedLinearNeuron.java
Mon Dec 10 13:18:30 2012
@@ -21,18 +21,18 @@ package org.apache.yay.neuron;
import org.apache.yay.ActivationFunction;
/**
- * A rectified linear neuron which is similar to {@link LinearNeuron} but cuts
- * all the negative outputs to 0
+ * A rectified linear neuron which is similar to {@link LinearNeuron} but cuts
all the negative
+ * outputs to 0
*/
public class RectifiedLinearNeuron extends LinearNeuron {
- public RectifiedLinearNeuron(Double bias, double... weights) {
- super(bias, weights);
- this.activationFunction = new ActivationFunction<Double>() {
- @Override
- public Double apply(Double signal) {
- return signal > 0 ? signal : 0;
- }
- };
- }
+ public RectifiedLinearNeuron(Double bias, double... weights) {
+ super(bias, weights);
+ this.activationFunction = new ActivationFunction<Double>() {
+ @Override
+ public Double apply(Double signal) {
+ return signal > 0 ? signal : 0;
+ }
+ };
+ }
}
Modified:
labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/SigmoidNeuron.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/SigmoidNeuron.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/SigmoidNeuron.java
(original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/neuron/SigmoidNeuron.java
Mon Dec 10 13:18:30 2012
@@ -25,8 +25,8 @@ import org.apache.yay.SigmoidFunction;
*/
public class SigmoidNeuron extends LinearNeuron {
- public SigmoidNeuron(Double bias, double... weights) {
- super(bias, weights);
- this.activationFunction = new SigmoidFunction();
- }
+ public SigmoidNeuron(Double bias, double... weights) {
+ super(bias, weights);
+ this.activationFunction = new SigmoidFunction();
+ }
}
Modified:
labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ExamplesFactory.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ExamplesFactory.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
--- labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ExamplesFactory.java
(original)
+++ labs/yay/trunk/core/src/main/java/org/apache/yay/utils/ExamplesFactory.java
Mon Dec 10 13:18:30 2012
@@ -29,41 +29,41 @@ import org.apache.yay.TrainingExample;
*/
public class ExamplesFactory {
- public static TrainingExample<Double, Double>
createDoubleTrainingExample(final Double output, final Double...
featuresValues) {
- return new TrainingExample<Double, Double>() {
- @Override
- public Vector<Feature<Double>> getFeatureVector() {
- return doublesToFeatureVector(featuresValues);
- }
-
- @Override
- public Double getOutput() {
- return output;
- }
- };
- }
-
- public static Example<Double> createDoubleExample(final Double...
featuresValues) {
- return new Example<Double>() {
- @Override
- public Vector<Feature<Double>> getFeatureVector() {
- return doublesToFeatureVector(featuresValues);
- }
- };
+ public static TrainingExample<Double, Double>
createDoubleTrainingExample(final Double output,
+ final Double... featuresValues) {
+ return new TrainingExample<Double, Double>() {
+ @Override
+ public Vector<Feature<Double>> getFeatureVector() {
+ return doublesToFeatureVector(featuresValues);
+ }
+
+ @Override
+ public Double getOutput() {
+ return output;
+ }
+ };
+ }
+
+ public static Example<Double> createDoubleExample(final Double...
featuresValues) {
+ return new Example<Double>() {
+ @Override
+ public Vector<Feature<Double>> getFeatureVector() {
+ return doublesToFeatureVector(featuresValues);
+ }
+ };
+ }
+
+ private static Vector<Feature<Double>> doublesToFeatureVector(Double[]
featuresValues) {
+ Vector<Feature<Double>> features = new Vector<Feature<Double>>();
+ Feature<Double> byasFeature = new Feature<Double>();
+ byasFeature.setValue(1d);
+ features.add(byasFeature);
+ for (Double d : featuresValues) {
+ Feature<Double> feature = new Feature<Double>();
+ feature.setValue(d);
+ features.add(feature);
}
-
- private static Vector<Feature<Double>> doublesToFeatureVector(Double[]
featuresValues) {
- Vector<Feature<Double>> features = new Vector<Feature<Double>>();
- Feature<Double> byasFeature = new Feature<Double>();
- byasFeature.setValue(1d);
- features.add(byasFeature);
- for (Double d : featuresValues) {
- Feature<Double> feature = new Feature<Double>();
- feature.setValue(d);
- features.add(feature);
- }
- return features;
- }
-
+ return features;
+ }
}
Modified:
labs/yay/trunk/core/src/test/java/org/apache/yay/BasicPerceptronTest.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/test/java/org/apache/yay/BasicPerceptronTest.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
--- labs/yay/trunk/core/src/test/java/org/apache/yay/BasicPerceptronTest.java
(original)
+++ labs/yay/trunk/core/src/test/java/org/apache/yay/BasicPerceptronTest.java
Mon Dec 10 13:18:30 2012
@@ -29,48 +29,47 @@ import static org.junit.Assert.assertEqu
*/
public class BasicPerceptronTest {
- @Test
- @SuppressWarnings("unchecked")
- public void testLearnPhase() throws Exception {
- BasicPerceptron basicPerceptron = new BasicPerceptron(1, 2, 3, 4);
- basicPerceptron.learn(createTrainingExample(1d, 4d, 5d, 6d),
- createTrainingExample(1d, 5d, 6d, 0.5d),
- createTrainingExample(0.1d, 9d, 4d, 1.9d),
- createTrainingExample(0.11d, 4d, 2.6d, 9.5d));
- }
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testLearnPhase() throws Exception {
+ BasicPerceptron basicPerceptron = new BasicPerceptron(1, 2, 3, 4);
+ basicPerceptron.learn(createTrainingExample(1d, 4d, 5d, 6d),
+ createTrainingExample(1d, 5d, 6d, 0.5d),
createTrainingExample(0.1d, 9d, 4d, 1.9d),
+ createTrainingExample(0.11d, 4d, 2.6d, 9.5d));
+ }
- @Test
- @SuppressWarnings("unchecked")
- public void testPredictionPhase() throws Exception {
- BasicPerceptron basicPerceptron = new BasicPerceptron(1, 2, 3, 4);
- basicPerceptron.learn(createTrainingExample(1d, 4d, 5d, 6d),
- createTrainingExample(1d, 5d, 6d, 0.5d),
- createTrainingExample(0.1d, 9d, 4d, 1.9d),
- createTrainingExample(0.11d, 4d, 2.6d, 9.5d));
- Double output = basicPerceptron.predict(createTrainingExample(null,
1d, 6d, 0.4d));
- assertEquals(Double.valueOf(1d), output);
- }
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testPredictionPhase() throws Exception {
+ BasicPerceptron basicPerceptron = new BasicPerceptron(1, 2, 3, 4);
+ basicPerceptron.learn(createTrainingExample(1d, 4d, 5d, 6d),
+ createTrainingExample(1d, 5d, 6d, 0.5d),
createTrainingExample(0.1d, 9d, 4d, 1.9d),
+ createTrainingExample(0.11d, 4d, 2.6d, 9.5d));
+ Double output = basicPerceptron.predict(createTrainingExample(null, 1d,
6d, 0.4d));
+ assertEquals(Double.valueOf(1d), output);
+ }
- private TrainingExample<Double, Double> createTrainingExample(final Double
output, final Double... params) {
- return new TrainingExample<Double, Double>() {
- @Override
- public Vector<Feature<Double>> getFeatureVector() {
- Vector<Feature<Double>> features = new
Vector<Feature<Double>>();
- Feature<Double> byasFeature = new Feature<Double>();
- byasFeature.setValue(1d);
- features.add(byasFeature);
- for (Double d : params) {
- Feature<Double> feature = new Feature<Double>();
- feature.setValue(d);
- features.add(feature);
- }
- return features;
- }
+ private TrainingExample<Double, Double> createTrainingExample(final Double
output,
+ final Double... params) {
+ return new TrainingExample<Double, Double>() {
+ @Override
+ public Vector<Feature<Double>> getFeatureVector() {
+ Vector<Feature<Double>> features = new Vector<Feature<Double>>();
+ Feature<Double> byasFeature = new Feature<Double>();
+ byasFeature.setValue(1d);
+ features.add(byasFeature);
+ for (Double d : params) {
+ Feature<Double> feature = new Feature<Double>();
+ feature.setValue(d);
+ features.add(feature);
+ }
+ return features;
+ }
- @Override
- public Double getOutput() {
- return output;
- }
- };
- }
+ @Override
+ public Double getOutput() {
+ return output;
+ }
+ };
+ }
}
Modified:
labs/yay/trunk/core/src/test/java/org/apache/yay/LogisticRegressionCostFunctionTest.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/test/java/org/apache/yay/LogisticRegressionCostFunctionTest.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
---
labs/yay/trunk/core/src/test/java/org/apache/yay/LogisticRegressionCostFunctionTest.java
(original)
+++
labs/yay/trunk/core/src/test/java/org/apache/yay/LogisticRegressionCostFunctionTest.java
Mon Dec 10 13:18:30 2012
@@ -31,23 +31,24 @@ import static org.junit.Assert.assertTru
*/
public class LogisticRegressionCostFunctionTest {
- @Test
- public void testORParametersCost() throws Exception {
- CostFunction<WeightsMatrix, Double> costFunction = new
LogisticRegressionCostFunction(0.1d);
- Collection<TrainingExample<Double, Double>> trainingExamples = new
LinkedList<TrainingExample<Double, Double>>();
- TrainingExample<Double, Double> example1 =
ExamplesFactory.createDoubleTrainingExample(1d, 0d, 1d);
- TrainingExample<Double, Double> example2 =
ExamplesFactory.createDoubleTrainingExample(1d, 1d, 1d);
- TrainingExample<Double, Double> example3 =
ExamplesFactory.createDoubleTrainingExample(0d, 1d, 1d);
- TrainingExample<Double, Double> example4 =
ExamplesFactory.createDoubleTrainingExample(0d, 0d, 0d);
- trainingExamples.add(example1);
- trainingExamples.add(example2);
- trainingExamples.add(example3);
- trainingExamples.add(example4);
- double[][] weights = {{-10d, 20d, 20d}};
- WeightsMatrix singleOrLayerWeights = new WeightsMatrix(weights);
- WeightsMatrix[] orWeightsMatrixSet = new
WeightsMatrix[]{singleOrLayerWeights};
- Double cost = costFunction.calculateCost(trainingExamples, new
SigmoidFunction(), orWeightsMatrixSet);
- assertTrue("cost should not be negative", cost > 0d);
- }
+ @Test
+ public void testORParametersCost() throws Exception {
+ CostFunction<WeightsMatrix, Double> costFunction = new
LogisticRegressionCostFunction(0.1d);
+ Collection<TrainingExample<Double, Double>> trainingExamples = new
LinkedList<TrainingExample<Double, Double>>();
+ TrainingExample<Double, Double> example1 =
ExamplesFactory.createDoubleTrainingExample(1d, 0d, 1d);
+ TrainingExample<Double, Double> example2 =
ExamplesFactory.createDoubleTrainingExample(1d, 1d, 1d);
+ TrainingExample<Double, Double> example3 =
ExamplesFactory.createDoubleTrainingExample(0d, 1d, 1d);
+ TrainingExample<Double, Double> example4 =
ExamplesFactory.createDoubleTrainingExample(0d, 0d, 0d);
+ trainingExamples.add(example1);
+ trainingExamples.add(example2);
+ trainingExamples.add(example3);
+ trainingExamples.add(example4);
+ double[][] weights = { { -10d, 20d, 20d } };
+ WeightsMatrix singleOrLayerWeights = new WeightsMatrix(weights);
+ WeightsMatrix[] orWeightsMatrixSet = new WeightsMatrix[] {
singleOrLayerWeights };
+ Double cost = costFunction.calculateCost(trainingExamples, new
SigmoidFunction(),
+ orWeightsMatrixSet);
+ assertTrue("cost should not be negative", cost > 0d);
+ }
}
Modified:
labs/yay/trunk/core/src/test/java/org/apache/yay/VoidLearningStrategyTest.java
URL:
http://svn.apache.org/viewvc/labs/yay/trunk/core/src/test/java/org/apache/yay/VoidLearningStrategyTest.java?rev=1419427&r1=1419426&r2=1419427&view=diff
==============================================================================
---
labs/yay/trunk/core/src/test/java/org/apache/yay/VoidLearningStrategyTest.java
(original)
+++
labs/yay/trunk/core/src/test/java/org/apache/yay/VoidLearningStrategyTest.java
Mon Dec 10 13:18:30 2012
@@ -33,12 +33,13 @@ public class VoidLearningStrategyTest {
@Test
public void testNoLearning() throws Exception {
VoidLearningStrategy<String, String> learningStrategy = new
VoidLearningStrategy<String, String>();
- Collection<TrainingExample<String, String>> trainingExamples = new
LinkedList<TrainingExample<String, String>>();
- WeightsMatrix[] weightsMatrixSet = new WeightsMatrix[1];
- double[][] weights = {{1d, 2d,}, {2d, 4d}};
- weightsMatrixSet[0] = new WeightsMatrix(weights);
- WeightsMatrix[] learnedWeights =
learningStrategy.learnWeights(weightsMatrixSet,trainingExamples);
- assertTrue(learnedWeights.equals(weightsMatrixSet));
+ Collection<TrainingExample<String, String>> trainingExamples = new
LinkedList<TrainingExample<String, String>>();
+ WeightsMatrix[] weightsMatrixSet = new WeightsMatrix[1];
+ double[][] weights = { { 1d, 2d, }, { 2d, 4d } };
+ weightsMatrixSet[0] = new WeightsMatrix(weights);
+ WeightsMatrix[] learnedWeights =
learningStrategy.learnWeights(weightsMatrixSet,
+ trainingExamples);
+ assertTrue(learnedWeights.equals(weightsMatrixSet));
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]