This is an automated email from the ASF dual-hosted git repository.
alamb pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-datafusion.git
The following commit(s) were added to refs/heads/master by this push:
new 397110a [nit] fix document issue for `approx_distinct` (#1110)
397110a is described below
commit 397110ab6948ea80a14155d65acaf55e23fd624e
Author: Jiayu Liu <[email protected]>
AuthorDate: Tue Oct 12 23:49:28 2021 +0800
[nit] fix document issue for `approx_distinct` (#1110)
---
datafusion/src/physical_plan/expressions/approx_distinct.rs | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/datafusion/src/physical_plan/expressions/approx_distinct.rs
b/datafusion/src/physical_plan/expressions/approx_distinct.rs
index 7a19b6c..ac7dcb3 100644
--- a/datafusion/src/physical_plan/expressions/approx_distinct.rs
+++ b/datafusion/src/physical_plan/expressions/approx_distinct.rs
@@ -88,6 +88,7 @@ impl AggregateExpr for ApproxDistinct {
let accumulator: Box<dyn Accumulator> = match &self.input_data_type {
// TODO u8, i8, u16, i16 shall really be done using bitmap, not HLL
// TODO support for boolean (trivial case)
+ // https://github.com/apache/arrow-datafusion/issues/1109
DataType::UInt8 =>
Box::new(NumericHLLAccumulator::<UInt8Type>::new()),
DataType::UInt16 =>
Box::new(NumericHLLAccumulator::<UInt16Type>::new()),
DataType::UInt32 =>
Box::new(NumericHLLAccumulator::<UInt32Type>::new()),
@@ -102,7 +103,7 @@ impl AggregateExpr for ApproxDistinct {
DataType::LargeBinary =>
Box::new(BinaryHLLAccumulator::<i64>::new()),
other => {
return Err(DataFusionError::NotImplemented(format!(
- "Support for count_distinct for data type {} is not
implemented",
+ "Support for 'approx_distinct' for data type {} is not
implemented",
other
)))
}