keypointt commented on a change in pull request #25354: [SPARK-28612][SQL] Add
DataFrameWriterV2 API
URL: https://github.com/apache/spark/pull/25354#discussion_r316012778
##########
File path: sql/core/src/main/scala/org/apache/spark/sql/functions.scala
##########
@@ -3942,6 +3943,61 @@ object functions {
*/
def to_csv(e: Column): Column = to_csv(e, Map.empty[String, String].asJava)
+ /**
+ * A transform for timestamps and dates to partition data into years.
+ *
+ * @group partition_transforms
+ * @since 3.0.0
+ */
+ def years(e: Column): Column = withExpr { Years(e.expr) }
+
+ /**
+ * A transform for timestamps and dates to partition data into months.
+ *
+ * @group partition_transforms
+ * @since 3.0.0
+ */
+ def months(e: Column): Column = withExpr { Months(e.expr) }
+
+ /**
+ * A transform for timestamps and dates to partition data into days.
+ *
+ * @group partition_transforms
+ * @since 3.0.0
+ */
+ def days(e: Column): Column = withExpr { Days(e.expr) }
+
+ /**
+ * A transform for timestamps to partition data into hours.
+ *
+ * @group partition_transforms
+ * @since 3.0.0
+ */
+ def hours(e: Column): Column = withExpr { Hours(e.expr) }
+
+ /**
+ * A transform for any type that partitions by a hash of the input column.
+ *
+ * @group partition_transforms
+ * @since 3.0.0
+ */
+ def bucket(numBuckets: Column, e: Column): Column = withExpr {
+ numBuckets.expr match {
+ case lit @ Literal(_, IntegerType) =>
+ Bucket(lit, e.expr)
+ case _ =>
+ throw new AnalysisException(s"Invalid number of buckets: $numBuckets")
Review comment:
also add column information in exception msg for debugging, like `s"Invalid
number of buckets: $numBuckets, for column: $e"`?
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]