maropu commented on a change in pull request #25410: [SPARK-28690][SQL] Add `date_part` function for timestamps/dates URL: https://github.com/apache/spark/pull/25410#discussion_r314198550
########## File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala ########## @@ -1963,3 +1963,64 @@ case class Epoch(child: Expression, timeZoneId: Option[String] = None) defineCodeGen(ctx, ev, c => s"$dtu.getEpoch($c, $zid)") } } + +@ExpressionDescription( + usage = "_FUNC_(field, source) - Extracts a part of the date/timestamp.", + arguments = """ + Arguments: + * field - selects which part of the source should be extracted. Supported string values are: + ["MILLENNIUM", "CENTURY", "DECADE", "YEAR", "QUARTER", "MONTH", + "WEEK", "DAY", "DAYOFWEEK", "DOW", "ISODOW", "DOY", + "HOUR", "MINUTE", "SECOND"] + * source - a date (or timestamp) column from where `field` should be extracted + """, + examples = """ + Examples: + > SELECT _FUNC_('YEAR', TIMESTAMP '2019-08-12 01:00:00.123456'); + 2019 + > SELECT _FUNC_('week', timestamp'2019-08-12 01:00:00.123456'); + 33 + > SELECT _FUNC_('doy', DATE'2019-08-12'); + 224 + """, + since = "3.0.0") +case class DatePart(field: Expression, source: Expression, child: Expression) + extends RuntimeReplaceable { + + def this(field: Expression, source: Expression) { + this(field, source, { + if (!field.foldable) { + throw new AnalysisException("The field parameter needs to be a foldable string value.") Review comment: I checked the mysql behaivour and it doesn't support the case. So, its ok to keep as it is. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org