Github user yanboliang commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19525#discussion_r155413347
  
    --- Diff: mllib/src/main/scala/org/apache/spark/ml/param/params.scala ---
    @@ -122,17 +124,33 @@ private[ml] object Param {
     
       /** Decodes a param value from JSON. */
       def jsonDecode[T](json: String): T = {
    -    parse(json) match {
    +    val jValue = parse(json)
    +    jValue match {
           case JString(x) =>
             x.asInstanceOf[T]
           case JObject(v) =>
             val keys = v.map(_._1)
    -        assert(keys.contains("type") && keys.contains("values"),
    -          s"Expect a JSON serialized vector but cannot find fields 'type' 
and 'values' in $json.")
    -        JsonVectorConverter.fromJson(json).asInstanceOf[T]
    +        if (keys.contains("class")) {
    +          implicit val formats = DefaultFormats
    +          val className = (jValue \ "class").extract[String]
    +          className match {
    +            case JsonMatrixConverter.className =>
    +              val checkFields = Array("numRows", "numCols", "values", 
"isTransposed")
    +              require(checkFields.forall(keys.contains), s"Expect a JSON 
serialized Matrix" +
    +                s" but cannot find fields ${checkFields.mkString(", ")} in 
$json.")
    +              JsonMatrixConverter.fromJson(json).asInstanceOf[T]
    +
    +            case s => throw new SparkException(s"unrecognized class $s in 
$json")
    +          }
    +        } else { // Vector does not have class info in json
    --- End diff --
    
    I'd suggest to add more comment here to clarify why vector doesn't have 
_class_ info in json, it should facilitate the code maintenance.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to