Repository: spark
Updated Branches:
refs/heads/branch-1.0 bb90e87f6 -> 0972b6251
Update GradientDescentSuite.scala
use more faster way to construct an array
Author: baishuo(ç½ç¡)
Closes #588 from baishuo/master and squashes the following commits:
45b95fb [baishuo(ç½ç¡)] Update GradientDescentSuite.scala
c03b61c [baishuo(ç½ç¡)] Update GradientDescentSuite.scala
b666d27 [baishuo(ç½ç¡)] Update GradientDescentSuite.scala
(cherry picked from commit 0c19bb161b9b2b96c0c55d3ea09e81fd798cbec0)
Signed-off-by: Patrick Wendell
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0972b625
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0972b625
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0972b625
Branch: refs/heads/branch-1.0
Commit: 0972b625199671b786e2659f870e2b3ff2cdb957
Parents: bb90e87
Author: baishuo(ç½ç¡)
Authored: Wed May 7 16:02:55 2014 -0700
Committer: Patrick Wendell
Committed: Wed May 7 16:03:02 2014 -0700
--
.../apache/spark/mllib/optimization/GradientDescentSuite.scala | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
--
http://git-wip-us.apache.org/repos/asf/spark/blob/0972b625/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala
--
diff --git
a/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala
b/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala
index c4b4334..8a16284 100644
---
a/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala
+++
b/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala
@@ -81,11 +81,11 @@ class GradientDescentSuite extends FunSuite with
LocalSparkContext with ShouldMa
// Add a extra variable consisting of all 1.0's for the intercept.
val testData = GradientDescentSuite.generateGDInput(A, B, nPoints, 42)
val data = testData.map { case LabeledPoint(label, features) =>
- label -> Vectors.dense(1.0, features.toArray: _*)
+ label -> Vectors.dense(1.0 +: features.toArray)
}
val dataRDD = sc.parallelize(data, 2).cache()
-val initialWeightsWithIntercept = Vectors.dense(1.0, initialWeights: _*)
+val initialWeightsWithIntercept = Vectors.dense(1.0 +:
initialWeights.toArray)
val (_, loss) = GradientDescent.runMiniBatchSGD(
dataRDD,
@@ -111,7 +111,7 @@ class GradientDescentSuite extends FunSuite with
LocalSparkContext with ShouldMa
// Add a extra variable consisting of all 1.0's for the intercept.
val testData = GradientDescentSuite.generateGDInput(2.0, -1.5, 1, 42)
val data = testData.map { case LabeledPoint(label, features) =>
- label -> Vectors.dense(1.0, features.toArray: _*)
+ label -> Vectors.dense(1.0 +: features.toArray)
}
val dataRDD = sc.parallelize(data, 2).cache()