Github user jkbradley commented on a diff in the pull request:

    https://github.com/apache/spark/pull/2451#discussion_r17801735
  
    --- Diff: mllib/src/main/scala/org/apache/spark/mllib/linalg/Matrices.scala 
---
    @@ -37,11 +44,197 @@ trait Matrix extends Serializable {
       private[mllib] def toBreeze: BM[Double]
     
       /** Gets the (i, j)-th element. */
    -  private[mllib] def apply(i: Int, j: Int): Double = toBreeze(i, j)
    +  private[mllib] def apply(i: Int, j: Int): Double
    +
    +  /** Return the index for the (i, j)-th element in the backing array. */
    +  private[mllib] def index(i: Int, j: Int): Int
    +
    +  /** Update element at (i, j) */
    +  private[mllib] def update(i: Int, j: Int, v: Double): Unit
    +
    +  /** Get a deep copy of the matrix. */
    +  def copy: Matrix
     
    +  /** Convenience method for `Matrix`-`Matrix` multiplication.
    +    * Note: `SparseMatrix`-`SparseMatrix` multiplication is not supported 
*/
    +  def multiply(y: Matrix): DenseMatrix = {
    +    val C: DenseMatrix = DenseMatrix.zeros(numRows, y.numCols)
    +    BLAS.gemm(false, false, 1.0, this, y, 0.0, C)
    +    C
    +  }
    +
    +  /** Convenience method for `Matrix`-`DenseVector` multiplication. */
    +  def multiply(y: DenseVector): DenseVector = {
    +    val output = new DenseVector(new Array[Double](numRows))
    +    BLAS.gemv(1.0, this, y, 0.0, output)
    +    output
    +  }
    +
    +  /** Convenience method for `Matrix`^T^-`Matrix` multiplication.
    +    * Note: `SparseMatrix`-`SparseMatrix` multiplication is not supported 
*/
    +  def transposeMultiply(y: Matrix): DenseMatrix = {
    +    val C: DenseMatrix = DenseMatrix.zeros(numCols, y.numCols)
    +    BLAS.gemm(true, false, 1.0, this, y, 0.0, C)
    +    C
    +  }
    +
    +  /** Convenience method for `Matrix`^T^-`DenseVector` multiplication. */
    +  def transposeMultiply(y: DenseVector): DenseVector = {
    +    val output = new DenseVector(new Array[Double](numCols))
    +    BLAS.gemv(true, 1.0, this, y, 0.0, output)
    +    output
    +  }
    +
    +  /** A human readable representation of the matrix */
       override def toString: String = toBreeze.toString()
    +
    +  private[mllib] def map(f: Double => Double): Matrix
    +
    +  private[mllib] def update(f: Double => Double): Matrix
    +
    +  private[mllib] def elementWiseOperateOnColumnsInPlace(f: (Double, 
Double) => Double,
    +                                                        y: Matrix): Matrix
    +
    +  private[mllib] def elementWiseOperateOnRowsInPlace(f: (Double, Double) 
=> Double,
    +                                                     y: Matrix): Matrix
    +
    +  private[mllib] def elementWiseOperateInPlace(f: (Double, Double) => 
Double, y: Matrix): Matrix
    +
    +  private[mllib] def elementWiseOperateScalarInPlace(f: (Double, Double) 
=> Double,
    +                                                     y: Double): Matrix
    +
    +  private[mllib] def operateInPlace(f: (Double, Double) => Double, y: 
Matrix): Matrix
    +
    +  private[mllib] def elementWiseOperateOnColumns(f: (Double, Double) => 
Double, y: Matrix): Matrix
    +
    +  private[mllib] def elementWiseOperateOnRows(f: (Double, Double) => 
Double, y: Matrix): Matrix
    +
    +  private[mllib] def elementWiseOperate(f: (Double, Double) => Double, y: 
Matrix): Matrix
    +
    +  private[mllib] def elementWiseOperateScalar(f: (Double, Double) => 
Double, y: Double): Matrix
    +
    +  private[mllib] def operate(f: (Double, Double) => Double, y: Matrix): 
Matrix
    +
    +  private[mllib] def *=(y: Matrix) = operateInPlace(_ * _, y)
    +
    +  private[mllib] def *(y: Matrix) = operate(_ * _, y)
    +
    +  private[mllib] def +=(y: Matrix) = operateInPlace(_ + _, y)
    +
    +  private[mllib] def +(y: Matrix) = operate(_ + _, y)
    +
    +  private[mllib] def -=(y: Matrix) = operateInPlace(_ - _, y)
    +
    +  private[mllib] def -(y: Matrix) = operate(_ - _, y)
    +
    +  private[mllib] def /=(y: Matrix) = operateInPlace(_ / _, y)
    +
    +  private[mllib] def /(y: Matrix) = operate(_ / _, y)
    +
    +  private[mllib] def *=(y: Double) = elementWiseOperateScalarInPlace(_ * 
_, y)
    +
    +  private[mllib] def +=(y: Double) = elementWiseOperateScalarInPlace(_ + 
_, y)
    +
    +  private[mllib] def -=(y: Double) = elementWiseOperateScalarInPlace(_ - 
_, y)
    +
    +  private[mllib] def /=(y: Double) = elementWiseOperateScalarInPlace(_ / 
_, y)
    +
    +  private[mllib] def *(y: Double) = elementWiseOperateScalar(_ * _, y)
    +
    +  private[mllib] def +(y: Double) = elementWiseOperateScalar(_ + _, y)
    +
    +  private[mllib] def -(y: Double) = elementWiseOperateScalar(_ - _, y)
    +
    +  private[mllib] def /(y: Double) = elementWiseOperateScalar(_ / _, y)
    +
    +  private[mllib] def neg: Matrix
    +
    +  private[mllib] def negInPlace: Matrix
    +
    +  /** Less-than-or-equal-to check. Outputs binary `DenseMatrix` */
    +  private[mllib] def compare(v: Double, f: (Double, Double) => Boolean): 
DenseMatrix
    +
    +  /** Returns the p-th norm for each column */
    +  private[mllib] def colNorms(p: Double): Matrix
    +
    +  private[mllib] def colSums: DenseMatrix = colSums(false)
    +
    +  private[mllib] def colSums(absolute: Boolean, skipRows: DenseMatrix = 
null): DenseMatrix = {
    +    val sums = new DenseMatrix(1, numCols, Array.fill(numCols)(0.0))
    +    var j = 0
    +    this match {
    +      case sparse: SparseMatrix =>
    +        while (j < numCols){
    +          var i = sparse.colPtrs(j)
    +          val indEnd = sparse.colPtrs(j + 1)
    +          while (i < indEnd){
    +            var v = sparse.values(i)
    +            if (absolute) v = math.abs(v)
    +            sums.values(j) += v
    +            i += 1
    +          }
    +          j += 1
    +        }
    +      case dense: DenseMatrix =>
    +        while (j < numCols){
    +          var i = 0
    +          while (i < numRows){
    +            if (skipRows == null) {
    +              var v = dense.values(index(i, j))
    +              if (absolute) v = math.abs(v)
    +              sums.values(j) += v
    +            } else {
    +              if (skipRows(i) != 1.0) {
    +                var v = dense.values(index(i, j))
    +                if (absolute) v = math.abs(v)
    +                sums.values(j) += v
    +              }
    +            }
    +
    +            i += 1
    +          }
    +          j += 1
    +        }
    +    }
    +    sums
    +  }
    +
    +  private[mllib] def rowSums: DenseMatrix = rowSums(false)
    --- End diff --
    
    Same as colSums: Why not return a DenseVector?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to