Github user sethah commented on a diff in the pull request:
https://github.com/apache/spark/pull/15413#discussion_r85179044
--- Diff:
mllib/src/main/scala/org/apache/spark/ml/clustering/GaussianMixture.scala ---
@@ -316,24 +319,129 @@ class GaussianMixture @Since("2.0.0") (
@Since("2.0.0")
def setSeed(value: Long): this.type = set(seed, value)
+ // number of samples per cluster to use when initializing Gaussians
+ private val nSamples = 5
+
@Since("2.0.0")
override def fit(dataset: Dataset[_]): GaussianMixtureModel = {
transformSchema(dataset.schema, logging = true)
- val rdd: RDD[OldVector] = dataset.select(col($(featuresCol))).rdd.map {
- case Row(point: Vector) => OldVectors.fromML(point)
+
+ val sc = dataset.sparkSession.sparkContext
+ val _k = $(k)
+
+ val instances: RDD[Vector] =
dataset.select(col($(featuresCol))).rdd.map {
+ case Row(features: Vector) => features
+ }.cache()
+
+ // Extract the number of features.
+ val numFeatures = instances.first().size
+
+ val shouldDistributeGaussians =
GaussianMixture.shouldDistributeGaussians(_k, numFeatures)
+
+ // Determine initial weights and corresponding Gaussians.
+ // We start with uniform weights, a random mean from the data, and
+ // diagonal covariance matrices using component variances
+ // derived from the samples.
+ // TODO: Support users supplied initial GMM.
+ val samples = instances.takeSample(withReplacement = true, _k *
nSamples, $(seed))
+ val weights: Array[Double] = Array.fill(_k)(1.0 / _k)
+ /**
--- End diff --
AFAIK we typically use `/* ... */` comments for longer comments inside
method bodies like this
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]