Github user jkbradley commented on a diff in the pull request:

    https://github.com/apache/spark/pull/5926#discussion_r29738265
  
    --- Diff: python/pyspark/ml/tuning.py ---
    @@ -79,6 +85,173 @@ def build(self):
             return [dict(zip(keys, prod)) for prod in 
itertools.product(*grid_values)]
     
     
    +class CrossValidator(Estimator):
    +    """
    +    K-fold cross validation.
    +
    +    >>> from pyspark.ml.classification import LogisticRegression
    +    >>> from pyspark.ml.evaluation import BinaryClassificationEvaluator
    +    >>> from pyspark.mllib.linalg import Vectors
    +    >>> dataset = sqlContext.createDataFrame(
    +    ...     [(Vectors.dense([0.0, 1.0]), 0.0),
    +    ...      (Vectors.dense([1.0, 2.0]), 1.0),
    +    ...      (Vectors.dense([0.55, 3.0]), 0.0),
    +    ...      (Vectors.dense([0.45, 4.0]), 1.0),
    +    ...      (Vectors.dense([0.51, 5.0]), 1.0)] * 10,
    +    ...     ["features", "label"])
    +    >>> lr = LogisticRegression()
    +    >>> grid = ParamGridBuilder().addGrid(lr.maxIter, [0, 1, 5]).build()
    +    >>> evaluator = BinaryClassificationEvaluator()
    +    >>> cv = CrossValidator(estimator=lr, estimatorParamMaps=grid, 
evaluator=evaluator)
    +    >>> cvModel = cv.fit(dataset)
    +    >>> expected = lr.fit(dataset, {lr.maxIter: 5}).transform(dataset)
    +    >>> cvModel.transform(dataset).collect() == expected.collect()
    +    True
    +    """
    +
    +    # a placeholder to make it appear in the generated doc
    +    estimator = Param(Params._dummy(), "estimator", "estimator to be 
cross-validated")
    +
    +    # a placeholder to make it appear in the generated doc
    +    estimatorParamMaps = Param(Params._dummy(), "estimatorParamMaps", 
"estimator param maps")
    +
    +    # a placeholder to make it appear in the generated doc
    +    evaluator = Param(Params._dummy(), "evaluator", "evaluator for 
selection")
    +
    +    # a placeholder to make it appear in the generated doc
    +    numFolds = Param(Params._dummy(), "numFolds", "number of folds for 
cross validation")
    +
    +    @keyword_only
    +    def __init__(self, estimator=None, estimatorParamMaps=None, 
evaluator=None, numFolds=3):
    +        """
    +        __init__(self, estimator=None, estimatorParamMaps=None, 
evaluator=None, numFolds=3)
    +        """
    +        super(CrossValidator, self).__init__()
    +        #: param for estimator to be cross-validated
    +        self.estimator = Param(self, "estimator", "estimator to be 
cross-validated")
    +        #: param for estimator param maps
    +        self.estimatorParamMaps = Param(self, "estimatorParamMaps", 
"estimator param maps")
    +        #: param for evaluator for selection
    +        self.evaluator = Param(self, "evaluator", "evaluator for 
selection")
    +        #: param for number of folds for cross validation
    +        self.numFolds = Param(self, "numFolds", "number of folds for cross 
validation")
    +        self._setDefault(numFolds=3)
    +        kwargs = self.__init__._input_kwargs
    +        self._set(**kwargs)
    +
    +    @keyword_only
    +    def setParams(self, estimator=None, estimatorParamMaps=None, 
evaluator=None, numFolds=3):
    +        """
    +        setParams(self, estimator=None, estimatorParamMaps=None, 
evaluator=None, numFolds=3):
    +        Sets params for cross validator.
    +        """
    +        kwargs = self.setParams._input_kwargs
    +        return self._set(**kwargs)
    +
    +    def setEstimator(self, value):
    +        """
    +        Sets the value of :py:attr:`estimator`.
    +        """
    +        self.paramMap[self.estimator] = value
    +        return self
    +
    +    def getEstimator(self):
    +        """
    +        Gets the value of estimator or its default value.
    +        """
    +        return self.getOrDefault(self.estimator)
    +
    +    def setEstimatorParamMaps(self, value):
    +        """
    +        Sets the value of :py:attr:`estimatorParamMaps`.
    +        """
    +        self.paramMap[self.estimatorParamMaps] = value
    +        return self
    +
    +    def getEstimatorParamMaps(self):
    +        """
    +        Gets the value of estimatorParamMaps or its default value.
    +        """
    +        return self.getOrDefault(self.estimatorParamMaps)
    +
    +    def setEvaluator(self, value):
    +        """
    +        Sets the value of :py:attr:`evaluator`.
    +        """
    +        self.paramMap[self.evaluator] = value
    +        return self
    +
    +    def getEvaluator(self):
    +        """
    +        Gets the value of evaluator or its default value.
    +        """
    +        return self.getOrDefault(self.evaluator)
    +
    +    def setNumFolds(self, value):
    +        """
    +        Sets the value of :py:attr:`numFolds`.
    +        """
    +        self.paramMap[self.numFolds] = value
    +        return self
    +
    +    def getNumFolds(self):
    +        """
    +        Gets the value of numFolds or its default value.
    +        """
    +        return self.getOrDefault(self.numFolds)
    +
    +    def fit(self, dataset, params={}):
    --- End diff --
    
    Good point; I guess I'm doing that right now for 
[https://issues.apache.org/jira/browse/SPARK-7380]


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to