beliefer commented on code in PR #39017:
URL: https://github.com/apache/spark/pull/39017#discussion_r1046787533
##########
python/pyspark/sql/connect/dataframe.py:
##########
@@ -875,6 +877,60 @@ def to_jcols(
melt = unpivot
+ def randomSplit(
+ self,
+ weights: List[float],
+ seed: Optional[int] = None,
+ ) -> List["DataFrame"]:
+ """Randomly splits this :class:`DataFrame` with the provided weights.
+
+ .. versionadded:: 3.4.0
+
+ Parameters
+ ----------
+ weights : list
+ list of doubles as weights with which to split the
:class:`DataFrame`.
+ Weights will be normalized if they don't sum up to 1.0.
+ seed : int, optional
+ The seed for sampling.
+
+ Returns
+ -------
+ list
+ List of DataFrames.
+ """
+ for w in weights:
+ if w < 0.0:
+ raise ValueError("Weights must be positive. Found weight
value: %s" % w)
+ seed = seed if seed is not None else random.randint(0, sys.maxsize)
+ total = sum(weights)
+ if total <= 0:
+ raise ValueError("Sum of weights must be positive, but got: %s" %
w)
+ proportions = list(map(lambda x: x / total, weights))
+ normalizedCumWeights = [0.0]
+ for v in proportions:
+ normalizedCumWeights.append(normalizedCumWeights[-1] + v)
+ j = 1
+ length = len(normalizedCumWeights)
+ result = []
+ while j < length:
+ lowerBound = normalizedCumWeights[j - 1]
+ upperBound = normalizedCumWeights[j]
+ samplePlan = DataFrame.withPlan(
+ plan.Sample(
+ child=self._plan,
+ lower_bound=lowerBound,
+ upper_bound=upperBound,
+ with_replacement=False,
+ seed=int(seed),
+ ),
+ session=self._session,
+ )
+ result.append(samplePlan)
+ j += 1
+
+ return result
Review Comment:
randomSplit of Dataset returns multiple Dataset. This method just follows
the former's behavior.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]