Github user mengxr commented on a diff in the pull request:

    https://github.com/apache/spark/pull/5048#discussion_r26869614
  
    --- Diff: mllib/src/main/scala/org/apache/spark/mllib/linalg/Matrices.scala 
---
    @@ -102,6 +108,86 @@ sealed trait Matrix extends Serializable {
       private[spark] def foreachActive(f: (Int, Int, Double) => Unit)
     }
     
    +@DeveloperApi
    +private[spark] class MatrixUDT extends UserDefinedType[Matrix] {
    +
    +  override def sqlType: StructType = {
    +    // type: 0 = sparse, 1 = dense
    +    // the dense matrix is built by numRows, numCols, values and 
isTransposed, all of which are
    +    // set as not nullable, except values since in the future, support for 
binary matrices might
    +    // be added for which values are not needed.
    +    // the sparse matrix needs colPtrs and rowIndices, which are set as
    +    // null, while building the dense matrix.
    +    StructType(Seq(
    +      StructField("type", ByteType, nullable = false),
    +      StructField("numRows", IntegerType, nullable = false),
    +      StructField("numCols", IntegerType, nullable = false),
    +      StructField("colPtrs", ArrayType(IntegerType, containsNull = false), 
nullable = true),
    +      StructField("rowIndices", ArrayType(IntegerType, containsNull = 
false), nullable = true),
    +      StructField("values", ArrayType(DoubleType, containsNull = false), 
nullable = true),
    +      StructField("isTransposed", BooleanType, nullable = false)
    +      ))
    +  }
    +
    +  override def serialize(obj: Any): Row = {
    +    val row = new GenericMutableRow(7)
    +    obj match {
    +      case sm: SparseMatrix =>
    +        row.setByte(0, 0)
    +        row.setInt(1, sm.numRows)
    +        row.setInt(2, sm.numCols)
    +        row.update(3, sm.colPtrs.toSeq)
    +        row.update(4, sm.rowIndices.toSeq)
    +        row.update(5, sm.values.toSeq)
    +        row.setBoolean(6, sm.isTransposed)
    +
    +      case dm: DenseMatrix =>
    +        row.setByte(0, 1)
    +        row.setInt(1, dm.numRows)
    +        row.setInt(2, dm.numCols)
    +        row.setNullAt(3)
    +        row.setNullAt(4)
    +        row.update(5, dm.values.toSeq)
    +        row.setBoolean(6, dm.isTransposed)
    +    }
    +    row
    +  }
    +
    +  override def deserialize(datum: Any): Matrix = {
    +    datum match {
    +      // TODO: something wrong with UDT serialization, should never happen.
    +      case m: Matrix => m
    +      case row: Row =>
    +        require(row.length == 7,
    +          s"MatrixUDT.deserialize given row with length ${row.length} but 
requires length == 7")
    +        val tpe = row.getByte(0)
    +        val numRows = row.getInt(1)
    +        val numCols = row.getInt(2)
    +        val values = row.getAs[Iterable[Double]](5).toArray
    +        val isTransposed = row.getBoolean(6)
    +        tpe match {
    +          case 0 =>
    +            val colPtrs = row.getAs[Iterable[Int]](3).toArray
    +            val rowIndices = row.getAs[Iterable[Int]](4).toArray
    +            new SparseMatrix(numRows, numCols, colPtrs, rowIndices, 
values, isTransposed)
    +          case 1 =>
    +            new DenseMatrix(numRows, numCols, values, isTransposed)
    +        }
    +    }
    +  }
    +
    +  override def userClass: Class[Matrix] = classOf[Matrix]
    +
    +  override def equals(o: Any): Boolean = {
    --- End diff --
    
    Since we implement `equals`, let's add a `hashCode` that returns a 
predefined random integer. See
    
    
https://github.com/apache/spark/blob/master/mllib/src/main/scala/org/apache/spark/mllib/linalg/Vectors.scala#L186
    
    (and you can pick a value;)


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to