alamb commented on code in PR #3034:
URL: https://github.com/apache/arrow-datafusion/pull/3034#discussion_r938810596
##########
datafusion/physical-expr/src/datetime_expressions.rs:
##########
@@ -275,6 +275,93 @@ pub fn date_trunc(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
})
}
+fn date_bin_single(stride: i64, source: i64, origin: i64) -> i64 {
+ let time_diff = source - origin;
+ // distance to bin
+ let time_delta = time_diff - (time_diff % stride);
+
+ let time_delta = if time_diff < 0 && stride > 1 {
+ // The origin is later than the source timestamp, round down to the
previous bin
+ time_delta - stride
+ } else {
+ time_delta
+ };
+
+ origin + time_delta
+}
+
+/// DATE_BIN sql function
+pub fn date_bin(args: &[ColumnarValue]) -> Result<ColumnarValue> {
+ if args.len() != 3 {
+ return Err(DataFusionError::Execution(
+ "Expected three arguments for DATE_BIN".to_string(),
+ ));
+ }
+
+ let (stride, array, origin) = (&args[0], &args[1], &args[2]);
+
+ let stride = match stride {
+ ColumnarValue::Scalar(ScalarValue::IntervalDayTime(Some(v))) => {
+ let (days, ms) = IntervalDayTimeType::to_parts(*v);
+ let nanos = (Duration::days(days as i64) +
Duration::milliseconds(ms as i64))
+ .num_nanoseconds();
+ match nanos {
+ Some(v) => v,
+ _ => {
+ return Err(DataFusionError::Execution(
+ "stride of DATE_BIN is too large".to_string(),
+ ))
+ }
+ }
+ }
+ _ => {
+ return Err(DataFusionError::Execution(
+ "stride of DATE_BIN is an invalid type".to_string(),
+ ))
+ }
+ };
+
+ let origin = match origin {
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(Some(v), _)) =>
*v,
+ _ => {
+ return Err(DataFusionError::Execution(
+ "origin of DATE_BIN is an invalid type".to_string(),
+ ))
+ }
+ };
+
+ let f = |x: Option<i64>| x.map(|x| date_bin_single(stride, x, origin));
+
+ Ok(match array {
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(v, tz_opt)) => {
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(f(*v),
tz_opt.clone()))
+ }
+ ColumnarValue::Array(array) => match array.data_type() {
+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {
+ let array = array
+ .as_any()
+ .downcast_ref::<TimestampNanosecondArray>()
+ .unwrap()
+ .iter()
+ .map(f)
+ .collect::<TimestampNanosecondArray>();
Review Comment:
👍
##########
datafusion/physical-expr/src/datetime_expressions.rs:
##########
@@ -275,6 +275,93 @@ pub fn date_trunc(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
})
}
+fn date_bin_single(stride: i64, source: i64, origin: i64) -> i64 {
+ let time_diff = source - origin;
+ // distance to bin
+ let time_delta = time_diff - (time_diff % stride);
+
+ let time_delta = if time_diff < 0 && stride > 1 {
+ // The origin is later than the source timestamp, round down to the
previous bin
+ time_delta - stride
+ } else {
+ time_delta
+ };
+
+ origin + time_delta
+}
+
+/// DATE_BIN sql function
+pub fn date_bin(args: &[ColumnarValue]) -> Result<ColumnarValue> {
+ if args.len() != 3 {
+ return Err(DataFusionError::Execution(
+ "Expected three arguments for DATE_BIN".to_string(),
+ ));
+ }
+
+ let (stride, array, origin) = (&args[0], &args[1], &args[2]);
+
+ let stride = match stride {
+ ColumnarValue::Scalar(ScalarValue::IntervalDayTime(Some(v))) => {
+ let (days, ms) = IntervalDayTimeType::to_parts(*v);
+ let nanos = (Duration::days(days as i64) +
Duration::milliseconds(ms as i64))
+ .num_nanoseconds();
+ match nanos {
+ Some(v) => v,
+ _ => {
+ return Err(DataFusionError::Execution(
+ "stride of DATE_BIN is too large".to_string(),
+ ))
+ }
+ }
+ }
+ _ => {
+ return Err(DataFusionError::Execution(
+ "stride of DATE_BIN is an invalid type".to_string(),
+ ))
+ }
+ };
+
+ let origin = match origin {
Review Comment:
Same comment applies here as about stride above -- I recommend making the
error message more explicit as there is a difference between the type of
constant (which the user can possibly do something about) and trying to use an
array (which the user can not do anything about)
##########
datafusion/physical-expr/src/datetime_expressions.rs:
##########
@@ -275,6 +275,93 @@ pub fn date_trunc(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
})
}
+fn date_bin_single(stride: i64, source: i64, origin: i64) -> i64 {
+ let time_diff = source - origin;
+ // distance to bin
+ let time_delta = time_diff - (time_diff % stride);
+
+ let time_delta = if time_diff < 0 && stride > 1 {
+ // The origin is later than the source timestamp, round down to the
previous bin
+ time_delta - stride
+ } else {
+ time_delta
+ };
+
+ origin + time_delta
+}
+
+/// DATE_BIN sql function
+pub fn date_bin(args: &[ColumnarValue]) -> Result<ColumnarValue> {
+ if args.len() != 3 {
+ return Err(DataFusionError::Execution(
+ "Expected three arguments for DATE_BIN".to_string(),
+ ));
+ }
+
+ let (stride, array, origin) = (&args[0], &args[1], &args[2]);
+
+ let stride = match stride {
+ ColumnarValue::Scalar(ScalarValue::IntervalDayTime(Some(v))) => {
+ let (days, ms) = IntervalDayTimeType::to_parts(*v);
+ let nanos = (Duration::days(days as i64) +
Duration::milliseconds(ms as i64))
+ .num_nanoseconds();
+ match nanos {
+ Some(v) => v,
+ _ => {
+ return Err(DataFusionError::Execution(
+ "stride of DATE_BIN is too large".to_string(),
+ ))
+ }
+ }
+ }
+ _ => {
+ return Err(DataFusionError::Execution(
+ "stride of DATE_BIN is an invalid type".to_string(),
+ ))
+ }
Review Comment:
I recommend making this error more explicit. Something like (untested):
```suggestion
ColumnarValue::Scalar(v) => {
return Err(DataFusionError::Execution(
format!("{} is not supported as a DATE_BIN stride type",
v.get_data_type()),
))
ColumnarValue::Array(_) => {
return Err(DataFusionError::NotYetImplemented(
format!("DATE_BIN only supports literal values for stride
argument, not arrays", v.get_data_type()),
))
}
```
##########
datafusion/core/tests/sql/timestamp.rs:
##########
@@ -1065,3 +1065,79 @@ async fn cast_to_timestamp_micros_twice() -> Result<()> {
Ok(())
}
+
+#[tokio::test]
+async fn date_bin() {
+ let ctx = SessionContext::new();
+
+ let sql = "SELECT DATE_BIN(INTERVAL '15 minutes', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '1970-01-01T00:00:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-08-03 14:30:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+
+ // Shift forward by 5 minutes
+ let sql = "SELECT DATE_BIN(INTERVAL '15 minutes', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '1970-01-01T00:05:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-08-03 14:35:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+
+ // Shift backward by 5 minutes
+ let sql = "SELECT DATE_BIN(INTERVAL '15 minutes', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '1970-01-01T23:55:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-08-03 14:25:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+
+ // origin after source, timestamp in previous bucket
+ let sql = "SELECT DATE_BIN(INTERVAL '15 minutes', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '2022-08-03 14:40:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-08-03 14:25:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+
+ // stride by 7 days
+ let sql = "SELECT DATE_BIN(INTERVAL '7 days', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '1970-01-01 00:00:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-07-28 00:00:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+
+ // origin shifts bins forward 1 day
+ let sql = "SELECT DATE_BIN(INTERVAL '7 days', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '1970-01-02 00:00:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-07-29 00:00:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+}
Review Comment:
Also, when I tried with `origin` that was a column (rather than a scalar) I
got:
```sql
-- Try using a column value as the origin:
SELECT
DATE_BIN(INTERVAL '15' minute, CAST(time as TIMESTAMP), CAST(origin as
TIMESTAMP)) AS time,
val
FROM (
VALUES
('2021-06-10 17:05:00Z', '2001-01-01 00:02:30', 0.5),
('2021-06-10 17:19:10Z', '2001-01-01 00:02:30', 0.3)
) as t (time, origin, val);
ArrowError(ExternalError(Execution("origin of DATE_BIN is an invalid type")))
❯
```
(it is probably worth a test for that case too, even if we don't support it
in the initial implementation)
##########
datafusion/core/tests/sql/timestamp.rs:
##########
@@ -1065,3 +1065,79 @@ async fn cast_to_timestamp_micros_twice() -> Result<()> {
Ok(())
}
+
+#[tokio::test]
+async fn date_bin() {
+ let ctx = SessionContext::new();
+
+ let sql = "SELECT DATE_BIN(INTERVAL '15 minutes', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '1970-01-01T00:00:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-08-03 14:30:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+
+ // Shift forward by 5 minutes
+ let sql = "SELECT DATE_BIN(INTERVAL '15 minutes', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '1970-01-01T00:05:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-08-03 14:35:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+
+ // Shift backward by 5 minutes
+ let sql = "SELECT DATE_BIN(INTERVAL '15 minutes', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '1970-01-01T23:55:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-08-03 14:25:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+
+ // origin after source, timestamp in previous bucket
+ let sql = "SELECT DATE_BIN(INTERVAL '15 minutes', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '2022-08-03 14:40:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-08-03 14:25:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+
+ // stride by 7 days
+ let sql = "SELECT DATE_BIN(INTERVAL '7 days', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '1970-01-01 00:00:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-07-28 00:00:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+
+ // origin shifts bins forward 1 day
+ let sql = "SELECT DATE_BIN(INTERVAL '7 days', TIMESTAMP '2022-08-03
14:38:50Z', TIMESTAMP '1970-01-02 00:00:00Z') AS res";
+ let results = execute_to_batches(&ctx, sql).await;
+ let expected = vec![
+ "+---------------------+",
+ "| res |",
+ "+---------------------+",
+ "| 2022-07-29 00:00:00 |",
+ "+---------------------+",
+ ];
+ assert_batches_eq!(expected, &results);
+}
Review Comment:
I recommend adding a test showing this function working on actual values (to
exercise the `Array` path not just the `ScalarValue`):
Perhaps the examples from
https://github.com/apache/arrow-datafusion/issues/3015 👍
```sql
SELECT
DATE_BIN(INTERVAL '15' minute, CAST(time as TIMESTAMP), TIMESTAMP
'2001-01-01') AS time,
val
FROM (
VALUES
('2021-06-10 17:05:00Z', 0.5),
('2021-06-10 17:19:10Z', 0.3)
) as t (time, val);
```
and
```sql
SELECT
DATE_BIN(INTERVAL '15' minute, CAST(time as TIMESTAMP), TIMESTAMP
'2001-01-01 00:02:30') AS time,
val
FROM (
VALUES
('2021-06-10 17:05:00Z', 0.5),
('2021-06-10 17:19:10Z', 0.3)
) as t (time, val);
```
```
##########
datafusion/physical-expr/src/datetime_expressions.rs:
##########
@@ -502,6 +589,140 @@ mod tests {
});
}
+ #[test]
+ fn test_date_bin_single() {
+ use chrono::Duration;
+
+ let cases = vec![
+ (
+ (
+ Duration::minutes(15),
+ "2004-04-09T02:03:04.123456789Z",
+ "2001-01-01T00:00:00",
+ ),
+ "2004-04-09T02:00:00Z",
+ ),
+ (
+ (
+ Duration::minutes(15),
+ "2004-04-09T02:03:04.123456789Z",
+ "2001-01-01T00:02:30",
+ ),
+ "2004-04-09T02:02:30Z",
+ ),
+ (
+ (
+ Duration::minutes(15),
+ "2004-04-09T02:03:04.123456789Z",
+ "2005-01-01T00:02:30",
+ ),
+ "2004-04-09T02:02:30Z",
+ ),
+ ];
+
+ cases
+ .iter()
+ .for_each(|((stride, source, origin), expected)| {
+ let stride1 = stride.num_nanoseconds().unwrap();
+ let source1 = string_to_timestamp_nanos(source).unwrap();
+ let origin1 = string_to_timestamp_nanos(origin).unwrap();
+
+ let expected1 = string_to_timestamp_nanos(expected).unwrap();
+ let result = date_bin_single(stride1, source1, origin1);
+ assert_eq!(result, expected1, "{} = {}", source, expected);
+ })
+ }
+
+ #[test]
+ fn test_date_bin() {
+ let res = date_bin(&[
+ ColumnarValue::Scalar(ScalarValue::IntervalDayTime(Some(1))),
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(Some(1),
None)),
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(Some(1),
None)),
+ ]);
+ assert!(res.is_ok());
+
+ let mut builder = TimestampNanosecondArray::builder(5);
+ builder.append_slice((1..6).collect::<Vec<i64>>().as_slice());
+ let timestamps = Arc::new(builder.finish());
Review Comment:
It doesn't matter for correctness, but here is an alternative potential
construction if you want to use a more functional style
```suggestion
let timestamps = Arc::new(
(1..6).map(Some)
.collect::<TimestampNanosecondArray>()
);
```
##########
datafusion/physical-expr/src/datetime_expressions.rs:
##########
@@ -502,6 +589,140 @@ mod tests {
});
}
+ #[test]
+ fn test_date_bin_single() {
+ use chrono::Duration;
+
+ let cases = vec![
+ (
+ (
+ Duration::minutes(15),
+ "2004-04-09T02:03:04.123456789Z",
+ "2001-01-01T00:00:00",
+ ),
+ "2004-04-09T02:00:00Z",
+ ),
+ (
+ (
+ Duration::minutes(15),
+ "2004-04-09T02:03:04.123456789Z",
+ "2001-01-01T00:02:30",
+ ),
+ "2004-04-09T02:02:30Z",
+ ),
+ (
Review Comment:
I recommend at least one test that is a duration other than 15 minutes
##########
datafusion/physical-expr/src/datetime_expressions.rs:
##########
@@ -275,6 +275,93 @@ pub fn date_trunc(args: &[ColumnarValue]) ->
Result<ColumnarValue> {
})
}
+fn date_bin_single(stride: i64, source: i64, origin: i64) -> i64 {
+ let time_diff = source - origin;
+ // distance to bin
+ let time_delta = time_diff - (time_diff % stride);
+
+ let time_delta = if time_diff < 0 && stride > 1 {
+ // The origin is later than the source timestamp, round down to the
previous bin
+ time_delta - stride
+ } else {
+ time_delta
+ };
+
+ origin + time_delta
+}
+
+/// DATE_BIN sql function
+pub fn date_bin(args: &[ColumnarValue]) -> Result<ColumnarValue> {
+ if args.len() != 3 {
+ return Err(DataFusionError::Execution(
+ "Expected three arguments for DATE_BIN".to_string(),
+ ));
+ }
+
+ let (stride, array, origin) = (&args[0], &args[1], &args[2]);
+
+ let stride = match stride {
+ ColumnarValue::Scalar(ScalarValue::IntervalDayTime(Some(v))) => {
+ let (days, ms) = IntervalDayTimeType::to_parts(*v);
+ let nanos = (Duration::days(days as i64) +
Duration::milliseconds(ms as i64))
+ .num_nanoseconds();
+ match nanos {
+ Some(v) => v,
+ _ => {
+ return Err(DataFusionError::Execution(
+ "stride of DATE_BIN is too large".to_string(),
+ ))
+ }
+ }
+ }
+ _ => {
+ return Err(DataFusionError::Execution(
+ "stride of DATE_BIN is an invalid type".to_string(),
+ ))
+ }
+ };
+
+ let origin = match origin {
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(Some(v), _)) =>
*v,
+ _ => {
+ return Err(DataFusionError::Execution(
+ "origin of DATE_BIN is an invalid type".to_string(),
+ ))
+ }
+ };
+
+ let f = |x: Option<i64>| x.map(|x| date_bin_single(stride, x, origin));
+
+ Ok(match array {
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(v, tz_opt)) => {
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(f(*v),
tz_opt.clone()))
+ }
+ ColumnarValue::Array(array) => match array.data_type() {
+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {
+ let array = array
+ .as_any()
+ .downcast_ref::<TimestampNanosecondArray>()
+ .unwrap()
+ .iter()
+ .map(f)
+ .collect::<TimestampNanosecondArray>();
+
+ ColumnarValue::Array(Arc::new(array))
+ }
+ _ => {
+ return Err(DataFusionError::Execution(
+ "source argument of DATE_BIN must be a
timestamp".to_string(),
Review Comment:
```suggestion
format!("Unsupported source type for DATE_BIN, must be a
timestamp but got {}", array.data_type())
```
##########
datafusion/physical-expr/src/datetime_expressions.rs:
##########
@@ -502,6 +589,140 @@ mod tests {
});
}
+ #[test]
+ fn test_date_bin_single() {
+ use chrono::Duration;
+
+ let cases = vec![
+ (
+ (
+ Duration::minutes(15),
+ "2004-04-09T02:03:04.123456789Z",
+ "2001-01-01T00:00:00",
+ ),
+ "2004-04-09T02:00:00Z",
+ ),
+ (
+ (
+ Duration::minutes(15),
+ "2004-04-09T02:03:04.123456789Z",
+ "2001-01-01T00:02:30",
+ ),
+ "2004-04-09T02:02:30Z",
+ ),
+ (
+ (
+ Duration::minutes(15),
+ "2004-04-09T02:03:04.123456789Z",
+ "2005-01-01T00:02:30",
+ ),
+ "2004-04-09T02:02:30Z",
+ ),
+ ];
+
+ cases
+ .iter()
+ .for_each(|((stride, source, origin), expected)| {
+ let stride1 = stride.num_nanoseconds().unwrap();
+ let source1 = string_to_timestamp_nanos(source).unwrap();
+ let origin1 = string_to_timestamp_nanos(origin).unwrap();
+
+ let expected1 = string_to_timestamp_nanos(expected).unwrap();
+ let result = date_bin_single(stride1, source1, origin1);
+ assert_eq!(result, expected1, "{} = {}", source, expected);
+ })
+ }
+
+ #[test]
+ fn test_date_bin() {
+ let res = date_bin(&[
+ ColumnarValue::Scalar(ScalarValue::IntervalDayTime(Some(1))),
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(Some(1),
None)),
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(Some(1),
None)),
+ ]);
+ assert!(res.is_ok());
+
+ let mut builder = TimestampNanosecondArray::builder(5);
+ builder.append_slice((1..6).collect::<Vec<i64>>().as_slice());
+ let timestamps = Arc::new(builder.finish());
+ let res = date_bin(&[
+ ColumnarValue::Scalar(ScalarValue::IntervalDayTime(Some(1))),
+ ColumnarValue::Array(timestamps),
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(Some(1),
None)),
+ ]);
+ assert!(res.is_ok());
+
+ //
+ // Fallible test cases
+ //
+
+ // invalid number of arguments
+ let res = date_bin(&[
+ ColumnarValue::Scalar(ScalarValue::IntervalDayTime(Some(1))),
+ ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(Some(1),
None)),
+ ]);
+ assert!(matches!(
+ res,
+ Err(DataFusionError::Execution(x)) if x == "Expected three
arguments for DATE_BIN"
+ ));
Review Comment:
This works fine -- if you wanted, another alternative construction that is
often used in tests is something like:
```suggestion
assert_eq!(
res.unwrap_err().to_string(),
"Expected three arguments for DATE_BIN"
);
```
What you have is arguably more specific as it also tests the type of
`DataFusionError` but I wanted to bring the alternate style to your attention
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]