Github user dongjoon-hyun commented on a diff in the pull request:
https://github.com/apache/spark/pull/22566#discussion_r220962782
--- Diff:
sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala
---
@@ -33,11 +33,13 @@ import org.apache.spark.sql.types._
/**
* Analyzes the given columns of the given table to generate statistics,
which will be used in
- * query optimizations.
+ * query optimizations. Parameter `allColumns` may be specified to
generate statistics of all the
+ * columns of a given table.
*/
case class AnalyzeColumnCommand(
tableIdent: TableIdentifier,
- columnNames: Seq[String]) extends RunnableCommand {
+ columnNames: Option[Seq[String]],
+ allColumns: Boolean = false ) extends RunnableCommand {
--- End diff --
nit. `false )` -> `false`.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]