Github user viirya commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21009#discussion_r180306495
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
 ---
    @@ -456,6 +448,49 @@ case class DayOfWeek(child: Expression) extends 
UnaryExpression with ImplicitCas
       }
     }
     
    +// scalastyle:off line.size.limit
    +@ExpressionDescription(
    +  usage = "_FUNC_(date) - Returns the day of the week for date/timestamp 
(0 = Monday, 1 = Tuesday, ..., 6 = Sunday).",
    +  examples = """
    +    Examples:
    +      > SELECT _FUNC_('2009-07-30');
    +       3
    +  """,
    +  since = "2.4.0")
    +// scalastyle:on line.size.limit
    +case class WeekDay(child: Expression) extends DayWeek {
    +
    +  override protected def nullSafeEval(date: Any): Any = {
    +    c.setTimeInMillis(date.asInstanceOf[Int] * 1000L * 3600L * 24L)
    +    (c.get(Calendar.DAY_OF_WEEK) + 5 ) % 7
    +  }
    +
    +  override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): 
ExprCode = {
    +    nullSafeCodeGen(ctx, ev, time => {
    +      val cal = classOf[Calendar].getName
    +      val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
    +      val c = "calWeekDay"
    +      ctx.addImmutableStateIfNotExists(cal, c,
    +        v => s"""$v = $cal.getInstance($dtu.getTimeZone("UTC"));""")
    +      s"""
    +        $c.setTimeInMillis($time * 1000L * 3600L * 24L);
    +        ${ev.value} = ($c.get($cal.DAY_OF_WEEK) + 5) % 7;
    +      """
    +    })
    +  }
    +}
    +
    +abstract class DayWeek extends UnaryExpression with ImplicitCastInputTypes 
{
    +
    +  override def inputTypes: Seq[AbstractDataType] = Seq(DateType)
    +
    +  override def dataType: DataType = IntegerType
    +
    +  @transient protected lazy val c: Calendar = {
    --- End diff --
    
    nit: maybe `calendar` instead of just `c`.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to