srowen commented on a change in pull request #32991:
URL: https://github.com/apache/spark/pull/32991#discussion_r655427006
##########
File path: mllib-local/src/main/scala/org/apache/spark/ml/impl/Utils.scala
##########
@@ -99,30 +97,44 @@ private[spark] object Utils {
/**
* Perform in-place softmax conversion.
*/
- def softmax(values: Array[Double]): Unit = {
+ def softmax(array: Array[Double]): Unit =
+ softmax(array, array.length, 0, 1, array)
+
+ /**
+ * Perform softmax conversion.
+ */
+ def softmax(
+ input: Array[Double],
+ n: Int,
+ offset: Int,
+ step: Int,
+ output: Array[Double]): Unit = {
var maxValue = Double.MinValue
- var i = 0
- while (i < values.length) {
- val value = values(i)
- if (value.isPosInfinity) {
- java.util.Arrays.fill(values, 0)
- values(i) = 1.0
+ var i = offset
+ val end = offset + step * n
+ while (i < end) {
+ val v = input(i)
+ if (v.isPosInfinity) {
+ var j = offset
+ while (j < end) { output(j) = 0.0; j += step }
+ output(i) = 1.0
return
- } else if (value > maxValue) {
- maxValue = value
+ } else if (v > maxValue) {
+ maxValue = v
}
- i += 1
+ i += step
}
var sum = 0.0
- i = 0
- while (i < values.length) {
- val exp = math.exp(values(i) - maxValue)
- values(i) = exp
+ i = offset
+ while (i < end) {
+ val exp = math.exp(input(i) - maxValue)
+ output(i) = exp
sum += exp
- i += 1
+ i += step
}
- BLAS.javaBLAS.dscal(values.length, 1.0 / sum, values, 1)
+ i = offset
+ while (i < end) { output(i) /= sum; i += step }
Review comment:
Oh OK, is it just that the answer is slightly different with dscal?
would it be reasonable to loosen a tolerance? but this is fine.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]