Github user mengxr commented on a diff in the pull request:
https://github.com/apache/spark/pull/3200#discussion_r23582382
--- Diff:
mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala
---
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.mllib.linalg.distributed
+
+import breeze.linalg.{DenseMatrix => BDM}
+
+import org.apache.spark.{Logging, Partitioner}
+import org.apache.spark.mllib.linalg._
+import org.apache.spark.mllib.rdd.RDDFunctions._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.storage.StorageLevel
+
+/**
+ * A grid partitioner, which stores every block in a separate partition.
+ *
+ * @param numRowBlocks Number of blocks that form the rows of the matrix.
+ * @param numColBlocks Number of blocks that form the columns of the
matrix.
+ */
+private[mllib] class GridPartitioner(
+ val numRowBlocks: Int,
+ val numColBlocks: Int,
+ val numParts: Int) extends Partitioner {
+ // Having the number of partitions greater than the number of sub
matrices does not help
+ override val numPartitions = math.min(numParts, numRowBlocks *
numColBlocks)
+
+ /**
+ * Returns the index of the partition the SubMatrix belongs to. Tries to
achieve block wise
+ * partitioning.
+ *
+ * @param key The key for the SubMatrix. Can be its position in the grid
(its column major index)
+ * or a tuple of three integers that are the final row index
after the multiplication,
+ * the index of the block to multiply with, and the final
column index after the
+ * multiplication.
+ * @return The index of the partition, which the SubMatrix belongs to.
+ */
+ override def getPartition(key: Any): Int = {
+ key match {
+ case (blockRowIndex: Int, blockColIndex: Int) =>
+ getBlockId(blockRowIndex, blockColIndex)
+ case (blockRowIndex: Int, innerIndex: Int, blockColIndex: Int) =>
+ getBlockId(blockRowIndex, blockColIndex)
+ case _ =>
+ throw new IllegalArgumentException(s"Unrecognized key. key: $key")
+ }
+ }
+
+ /** Partitions sub-matrices as blocks with neighboring sub-matrices. */
+ private def getBlockId(blockRowIndex: Int, blockColIndex: Int): Int = {
+ val totalBlocks = numRowBlocks * numColBlocks
+ // Gives the number of blocks that need to be in each partition
+ val partitionRatio = math.ceil(totalBlocks * 1.0 / numPartitions).toInt
+ // Number of neighboring blocks to take in each row
+ val subBlocksPerRow = math.ceil(numRowBlocks * 1.0 /
partitionRatio).toInt
+ // Number of neighboring blocks to take in each column
+ val subBlocksPerCol = math.ceil(numColBlocks * 1.0 /
partitionRatio).toInt
+ // Coordinates of the block
+ val i = blockRowIndex / subBlocksPerRow
+ val j = blockColIndex / subBlocksPerCol
+ val blocksPerRow = math.ceil(numRowBlocks * 1.0 /
subBlocksPerRow).toInt
+ j * blocksPerRow + i
+ }
+
+ /** Checks whether the partitioners have the same characteristics */
+ override def equals(obj: Any): Boolean = {
+ obj match {
+ case r: GridPartitioner =>
+ (this.numRowBlocks == r.numRowBlocks) && (this.numColBlocks ==
r.numColBlocks) &&
+ (this.numPartitions == r.numPartitions)
+ case _ =>
+ false
+ }
+ }
+}
+
+/**
+ * Represents a distributed matrix in blocks of local matrices.
+ *
+ * @param rdd The RDD of SubMatrices (local matrices) that form this matrix
+ * @param nRows Number of rows of this matrix
+ * @param nCols Number of columns of this matrix
+ * @param numRowBlocks Number of blocks that form the rows of this matrix
+ * @param numColBlocks Number of blocks that form the columns of this
matrix
+ * @param rowsPerBlock Number of rows that make up each block. The blocks
forming the final
+ * rows are not required to have the given number of
rows
+ * @param colsPerBlock Number of columns that make up each block. The
blocks forming the final
+ * columns are not required to have the given number
of columns
+ */
+class BlockMatrix(
+ val rdd: RDD[((Int, Int), Matrix)],
+ private var nRows: Long,
+ private var nCols: Long,
+ val numRowBlocks: Int,
+ val numColBlocks: Int,
+ val rowsPerBlock: Int,
+ val colsPerBlock: Int) extends DistributedMatrix with Logging {
+
+ private type SubMatrix = ((Int, Int), Matrix) // ((blockRowIndex,
blockColIndex), matrix)
+
+ /**
+ * Alternate constructor for BlockMatrix without the input of the number
of rows and columns.
+ *
+ * @param rdd The RDD of SubMatrices (local matrices) that form this
matrix
+ * @param numRowBlocks Number of blocks that form the rows of this matrix
+ * @param numColBlocks Number of blocks that form the columns of this
matrix
+ * @param rowsPerBlock Number of rows that make up each block. The
blocks forming the final
+ * rows are not required to have the given number of
rows
+ * @param colsPerBlock Number of columns that make up each block. The
blocks forming the final
+ * columns are not required to have the given number
of columns
+ */
+ def this(
+ rdd: RDD[((Int, Int), Matrix)],
+ numRowBlocks: Int,
+ numColBlocks: Int,
+ rowsPerBlock: Int,
+ colsPerBlock: Int) = {
+ this(rdd, 0L, 0L, numRowBlocks, numColBlocks, rowsPerBlock,
colsPerBlock)
+ }
+
+ private[mllib] var partitioner: GridPartitioner =
+ new GridPartitioner(numRowBlocks, numColBlocks, rdd.partitions.length)
+
+ private lazy val dims: (Long, Long) = getDim
+
+ override def numRows(): Long = {
+ if (nRows <= 0L) nRows = dims._1
+ nRows
+ }
+
+ override def numCols(): Long = {
+ if (nCols <= 0L) nCols = dims._2
+ nCols
+ }
+
+ /** Returns the dimensions of the matrix. */
+ private def getDim: (Long, Long) = {
+ case class MatrixMetaData(var rowIndex: Int, var colIndex: Int,
+ var numRows: Int, var numCols: Int)
+ // picks the sizes of the matrix with the maximum indices
+ def pickSizeByGreaterIndex(example: MatrixMetaData, base:
MatrixMetaData): MatrixMetaData = {
--- End diff --
If we modify `base` in-place, we should put `base` as the first parameter.
This is the convention used in Spark aggregation.
We should simply this code block. Essentially we want to find the largest
row index and col index:
~~~
val (rows, cols) = rdd.map { case (blockRowIndex, blockColIndex, mat) =>
(blockRowIndex * rowsPerBlock + mat.numRows, blockColIndex * colsPerBlock
+ mat.numCols)
}.reduce((x0, x1) => (math.max(x0._1, x1._1), math.max(x0._2, x1._2)))
(math.max(rows, nRows), math.max(cols, nCols))
~~~
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]