Github user mengxr commented on a diff in the pull request:
https://github.com/apache/spark/pull/9848#discussion_r45527882
--- Diff:
mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala ---
@@ -131,6 +137,181 @@ class CrossValidator(override val uid: String)
extends Estimator[CrossValidatorM
}
copied
}
+
+ // Currently, this only works if all [[Param]]s in
[[estimatorParamMaps]] are simple types.
+ // E.g., this may fail if a [[Param]] is an instance of an [[Estimator]].
+ // However, this case should be unusual.
+ @Since("1.6.0")
+ override def write: MLWriter = new
CrossValidator.CrossValidatorWriter(this)
+}
+
+@Since("1.6.0")
+object CrossValidator extends MLReadable[CrossValidator] {
+
+ @Since("1.6.0")
+ override def read: MLReader[CrossValidator] = new CrossValidatorReader
+
+ @Since("1.6.0")
+ override def load(path: String): CrossValidator = super.load(path)
+
+ private[CrossValidator] class CrossValidatorWriter(instance:
CrossValidator) extends MLWriter {
+
+ SharedReadWrite.validateParams(instance)
+
+ override protected def saveImpl(path: String): Unit =
+ SharedReadWrite.saveImpl(path, instance, sc, JNothing)
+ }
+
+ private class CrossValidatorReader extends MLReader[CrossValidator] {
+
+ /** Checked against metadata when loading model */
+ private val className = classOf[CrossValidator].getName
+
+ override def load(path: String): CrossValidator = {
+ val (metadata, estimator, evaluator, estimatorParamMaps, numFolds) =
+ SharedReadWrite.load(path, sc, className)
+ new CrossValidator(metadata.uid)
+ .setEstimator(estimator)
+ .setEvaluator(evaluator)
+ .setEstimatorParamMaps(estimatorParamMaps)
+ .setNumFolds(numFolds)
+ }
+ }
+
+ private object CrossValidatorReader {
+ /**
+ * Examine the given estimator (which may be a compound estimator) and
extract a mapping
+ * from UIDs to corresponding [[Params]] instances.
+ */
+ def getUidMap(instance: Params): Map[String, Params] = {
+ val subStages: Array[Params] = instance match {
+ case p: Pipeline => p.getStages.asInstanceOf[Array[Params]]
+ case pm: PipelineModel => pm.stages.asInstanceOf[Array[Params]]
+ case v: ValidatorParams => Array(v.getEstimator, v.getEvaluator)
+ case _: Params => Array()
+ }
+ val subStageMaps =
subStages.map(getUidMap).foldLeft(Map.empty[String, Params])(_ ++ _)
+ Map(instance.uid -> instance) ++ subStageMaps
+ }
+ }
+
+ private[tuning] object SharedReadWrite {
+
+ /**
+ * Check that [[CrossValidator.evaluator]] and
[[CrossValidator.estimator]] are Writable.
+ * This does not check [[CrossValidator.estimatorParamMaps]].
+ */
+ def validateParams(instance: ValidatorParams): Unit = {
+ def checkElement(elem: Params, name: String): Unit = elem match {
+ case stage: MLWritable => // good
+ case other =>
+ throw new UnsupportedOperationException("CrossValidator write
will fail " +
+ s" because it contains $name which does not implement
Writable." +
+ s" Non-Writable $name: ${other.uid} of type ${other.getClass}")
+ }
+ checkElement(instance.getEvaluator, "evaluator")
+ checkElement(instance.getEstimator, "estimator")
+ // Check to make sure all Params apply to this estimator. Throw an
error if any do not.
+ // Extraneous Params would cause problems when loading the
estimatorParamMaps.
+ val uidToInstance: Map[String, Params] =
CrossValidatorReader.getUidMap(instance)
+ instance.getEstimatorParamMaps.foreach { case pMap: ParamMap =>
+ pMap.toSeq.foreach { case ParamPair(p, v) =>
+ require(uidToInstance.contains(p.parent), s"CrossValidator save
requires all Params in" +
+ s" estimatorParamMaps to apply to this CrossValidator, its
Estimator, or its" +
+ s" Evaluator. An extraneous Param was found: $p")
+ }
+ }
+ }
+
+ private[tuning] def saveImpl(
+ path: String,
+ instance: CrossValidatorParams,
+ sc: SparkContext,
+ extraMetadata: JValue): Unit = {
+ import org.json4s.JsonDSL._
+
+ val uid = instance.uid
+ val cls = instance.getClass.getName
+ val estimatorParamMapsJson = compact(render(
+ instance.getEstimatorParamMaps.map { case paramMap =>
+ paramMap.toSeq.map { case ParamPair(p, v) =>
+ Map("parent" -> p.parent, "name" -> p.name, "value" ->
p.jsonEncode(v))
+ }
+ }.toSeq
+ ))
+ val jsonParams = List(
+ "numFolds" ->
parse(instance.numFolds.jsonEncode(instance.getNumFolds)),
+ "estimatorParamMaps" -> parse(estimatorParamMapsJson)
+ )
+ val metadata = ("class" -> cls) ~
+ ("timestamp" -> System.currentTimeMillis()) ~
+ ("sparkVersion" -> sc.version) ~
+ ("uid" -> uid) ~
+ ("paramMap" -> jsonParams) ~
+ ("extraMetadata" -> extraMetadata)
--- End diff --
It is not easy to understand what `extraMetadata` means by looking at the
generated JSON file. Since this is `avgMetrics`, shall we call it `avgMetrics`
or save it as part of `data`?
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]