srowen closed pull request #18936: [SPARK-21688][ML][MLLIB] make native BLAS
the first choice for BLAS level 1 operations for dense data
URL: https://github.com/apache/spark/pull/18936
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/linalg/BLAS.scala
b/mllib/src/main/scala/org/apache/spark/mllib/linalg/BLAS.scala
index cb97742245689..97fc1773e04b3 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/linalg/BLAS.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/linalg/BLAS.scala
@@ -65,7 +65,7 @@ private[spark] object BLAS extends Serializable with Logging {
*/
private def axpy(a: Double, x: DenseVector, y: DenseVector): Unit = {
val n = x.size
- f2jBLAS.daxpy(n, a, x.values, 1, y.values, 1)
+ nativeBLAS.daxpy(n, a, x.values, 1, y.values, 1)
}
/**
@@ -125,7 +125,7 @@ private[spark] object BLAS extends Serializable with
Logging {
*/
private def dot(x: DenseVector, y: DenseVector): Double = {
val n = x.size
- f2jBLAS.ddot(n, x.values, 1, y.values, 1)
+ nativeBLAS.ddot(n, x.values, 1, y.values, 1)
}
/**
@@ -222,7 +222,7 @@ private[spark] object BLAS extends Serializable with
Logging {
case sx: SparseVector =>
f2jBLAS.dscal(sx.values.length, a, sx.values, 1)
case dx: DenseVector =>
- f2jBLAS.dscal(dx.values.length, a, dx.values, 1)
+ nativeBLAS.dscal(dx.values.length, a, dx.values, 1)
case _ =>
throw new IllegalArgumentException(s"scal doesn't support vector type
${x.getClass}.")
}
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]