This is an automated email from the ASF dual-hosted git repository.
alamb pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion.git
The following commit(s) were added to refs/heads/main by this push:
new f7591fb7df Minor: Small comment changes in sql folder (#12838)
f7591fb7df is described below
commit f7591fb7df126bf1d71693b3b47707607242ccb7
Author: Jonathan Chen <[email protected]>
AuthorDate: Thu Oct 10 16:31:38 2024 -0400
Minor: Small comment changes in sql folder (#12838)
* Small changes in datafusion/sql folder
* more small changes
---
datafusion/sql/src/cte.rs | 4 ++--
datafusion/sql/src/expr/function.rs | 10 +++++-----
datafusion/sql/src/expr/identifier.rs | 28 ++++++++++++++--------------
datafusion/sql/src/expr/order_by.rs | 2 +-
datafusion/sql/src/expr/unary_op.rs | 4 ++--
datafusion/sql/src/expr/value.rs | 10 +++++-----
datafusion/sql/src/parser.rs | 6 +++---
datafusion/sql/src/planner.rs | 22 +++++++++++-----------
datafusion/sql/src/query.rs | 2 +-
datafusion/sql/src/relation/mod.rs | 4 ++--
datafusion/sql/src/select.rs | 30 +++++++++++++++---------------
datafusion/sql/src/statement.rs | 26 +++++++++++++-------------
datafusion/sql/src/unparser/dialect.rs | 8 ++++----
datafusion/sql/src/unparser/expr.rs | 6 +++---
datafusion/sql/src/unparser/rewrite.rs | 6 +++---
datafusion/sql/src/unparser/utils.rs | 2 +-
datafusion/sql/src/utils.rs | 28 ++++++++++++++--------------
datafusion/sql/src/values.rs | 2 +-
18 files changed, 100 insertions(+), 100 deletions(-)
diff --git a/datafusion/sql/src/cte.rs b/datafusion/sql/src/cte.rs
index 4c380f0b37..c288d6ca70 100644
--- a/datafusion/sql/src/cte.rs
+++ b/datafusion/sql/src/cte.rs
@@ -98,8 +98,8 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
};
- // Each recursive CTE consists from two parts in the logical plan:
- // 1. A static term (the left hand side on the SQL, where the
+ // Each recursive CTE consists of two parts in the logical plan:
+ // 1. A static term (the left-hand side on the SQL, where the
// referencing to the same CTE is not allowed)
//
// 2. A recursive term (the right hand side, and the recursive
diff --git a/datafusion/sql/src/expr/function.rs
b/datafusion/sql/src/expr/function.rs
index 20a772cdd0..619eadcf0f 100644
--- a/datafusion/sql/src/expr/function.rs
+++ b/datafusion/sql/src/expr/function.rs
@@ -237,7 +237,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
}
- // user-defined function (UDF) should have precedence
+ // User-defined function (UDF) should have precedence
if let Some(fm) = self.context_provider.get_function_meta(&name) {
let args = self.function_args_to_expr(args, schema,
planner_context)?;
return Ok(Expr::ScalarFunction(ScalarFunction::new_udf(fm, args)));
@@ -260,12 +260,12 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
);
}
- // then, window function
+ // Then, window function
if let Some(WindowType::WindowSpec(window)) = over {
let partition_by = window
.partition_by
.into_iter()
- // ignore window spec PARTITION BY for scalar values
+ // Ignore window spec PARTITION BY for scalar values
// as they do not change and thus do not generate new
partitions
.filter(|e| !matches!(e, sqlparser::ast::Expr::Value { .. },))
.map(|e| self.sql_expr_to_logical_expr(e, schema,
planner_context))
@@ -383,7 +383,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
&self,
name: &str,
) -> Result<WindowFunctionDefinition> {
- // check udaf first
+ // Check udaf first
let udaf = self.context_provider.get_aggregate_meta(name);
// Use the builtin window function instead of the user-defined
aggregate function
if udaf.as_ref().is_some_and(|udaf| {
@@ -434,7 +434,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}),
FunctionArg::Unnamed(FunctionArgExpr::QualifiedWildcard(object_name)) => {
let qualifier =
self.object_name_to_table_reference(object_name)?;
- // sanity check on qualifier with schema
+ // Sanity check on qualifier with schema
let qualified_indices =
schema.fields_indices_with_qualified(&qualifier);
if qualified_indices.is_empty() {
return plan_err!("Invalid qualifier {qualifier}");
diff --git a/datafusion/sql/src/expr/identifier.rs
b/datafusion/sql/src/expr/identifier.rs
index b016309b69..e103f68fc9 100644
--- a/datafusion/sql/src/expr/identifier.rs
+++ b/datafusion/sql/src/expr/identifier.rs
@@ -115,9 +115,9 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
let search_result = search_dfschema(&ids, schema);
match search_result {
- // found matching field with spare identifier(s) for nested
field(s) in structure
+ // Found matching field with spare identifier(s) for nested
field(s) in structure
Some((field, qualifier, nested_names)) if
!nested_names.is_empty() => {
- // found matching field with spare identifier(s) for
nested field(s) in structure
+ // Found matching field with spare identifier(s) for
nested field(s) in structure
for planner in self.context_provider.get_expr_planners() {
if let Ok(planner_result) =
planner.plan_compound_identifier(
field,
@@ -134,21 +134,21 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
plan_err!("could not parse compound identifier from
{ids:?}")
}
- // found matching field with no spare identifier(s)
+ // Found matching field with no spare identifier(s)
Some((field, qualifier, _nested_names)) => {
Ok(Expr::Column(Column::from((qualifier, field))))
}
None => {
- // return default where use all identifiers to not have a
nested field
+ // Return default where use all identifiers to not have a
nested field
// this len check is because at 5 identifiers will have to
have a nested field
if ids.len() == 5 {
not_impl_err!("compound identifier: {ids:?}")
} else {
- // check the outer_query_schema and try to find a match
+ // Check the outer_query_schema and try to find a match
if let Some(outer) =
planner_context.outer_query_schema() {
let search_result = search_dfschema(&ids, outer);
match search_result {
- // found matching field with spare
identifier(s) for nested field(s) in structure
+ // Found matching field with spare
identifier(s) for nested field(s) in structure
Some((field, qualifier, nested_names))
if !nested_names.is_empty() =>
{
@@ -158,15 +158,15 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
Column::from((qualifier,
field)).quoted_flat_name()
)
}
- // found matching field with no spare
identifier(s)
+ // Found matching field with no spare
identifier(s)
Some((field, qualifier, _nested_names)) => {
- // found an exact match on a qualified
name in the outer plan schema, so this is an outer reference column
+ // Found an exact match on a qualified
name in the outer plan schema, so this is an outer reference column
Ok(Expr::OuterReferenceColumn(
field.data_type().clone(),
Column::from((qualifier, field)),
))
}
- // found no matching field, will return a
default
+ // Found no matching field, will return a
default
None => {
let s = &ids[0..ids.len()];
// safe unwrap as s can never be empty or
exceed the bounds
@@ -177,7 +177,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
} else {
let s = &ids[0..ids.len()];
- // safe unwrap as s can never be empty or exceed
the bounds
+ // Safe unwrap as s can never be empty or exceed
the bounds
let (relation, column_name) =
form_identifier(s).unwrap();
Ok(Expr::Column(Column::new(relation,
column_name)))
}
@@ -311,15 +311,15 @@ fn search_dfschema<'ids, 'schema>(
fn generate_schema_search_terms(
ids: &[String],
) -> impl Iterator<Item = (Option<TableReference>, &String, &[String])> {
- // take at most 4 identifiers to form a Column to search with
+ // Take at most 4 identifiers to form a Column to search with
// - 1 for the column name
// - 0 to 3 for the TableReference
let bound = ids.len().min(4);
- // search terms from most specific to least specific
+ // Search terms from most specific to least specific
(0..bound).rev().map(|i| {
let nested_names_index = i + 1;
let qualifier_and_column = &ids[0..nested_names_index];
- // safe unwrap as qualifier_and_column can never be empty or exceed
the bounds
+ // Safe unwrap as qualifier_and_column can never be empty or exceed
the bounds
let (relation, column_name) =
form_identifier(qualifier_and_column).unwrap();
(relation, column_name, &ids[nested_names_index..])
})
@@ -331,7 +331,7 @@ mod test {
#[test]
// testing according to documentation of generate_schema_search_terms
function
- // where ensure generated search terms are in correct order with correct
values
+ // where it ensures generated search terms are in correct order with
correct values
fn test_generate_schema_search_terms() -> Result<()> {
type ExpectedItem = (
Option<TableReference>,
diff --git a/datafusion/sql/src/expr/order_by.rs
b/datafusion/sql/src/expr/order_by.rs
index 6a3a4d6ccb..0028980687 100644
--- a/datafusion/sql/src/expr/order_by.rs
+++ b/datafusion/sql/src/expr/order_by.rs
@@ -102,7 +102,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
expr_vec.push(Sort::new(
expr,
asc,
- // when asc is true, by default nulls last to be consistent
with postgres
+ // When asc is true, by default nulls last to be consistent
with postgres
// postgres rule:
https://www.postgresql.org/docs/current/queries-order.html
nulls_first.unwrap_or(!asc),
))
diff --git a/datafusion/sql/src/expr/unary_op.rs
b/datafusion/sql/src/expr/unary_op.rs
index 2a341fb7c4..3c54705038 100644
--- a/datafusion/sql/src/expr/unary_op.rs
+++ b/datafusion/sql/src/expr/unary_op.rs
@@ -37,7 +37,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
UnaryOperator::Minus => {
match expr {
- // optimization: if it's a number literal, we apply the
negative operator
+ // Optimization: if it's a number literal, we apply the
negative operator
// here directly to calculate the new literal.
SQLExpr::Value(Value::Number(n, _)) => {
self.parse_sql_number(&n, true)
@@ -45,7 +45,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
SQLExpr::Interval(interval) => {
self.sql_interval_to_expr(true, interval)
}
- // not a literal, apply negative operator on expression
+ // Not a literal, apply negative operator on expression
_ =>
Ok(Expr::Negative(Box::new(self.sql_expr_to_logical_expr(
expr,
schema,
diff --git a/datafusion/sql/src/expr/value.rs b/datafusion/sql/src/expr/value.rs
index be0909b584..7dc15de7ad 100644
--- a/datafusion/sql/src/expr/value.rs
+++ b/datafusion/sql/src/expr/value.rs
@@ -235,7 +235,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
let value = interval_literal(*interval.value, negative)?;
// leading_field really means the unit if specified
- // for example, "month" in `INTERVAL '5' month`
+ // For example, "month" in `INTERVAL '5' month`
let value = match interval.leading_field.as_ref() {
Some(leading_field) => format!("{value} {leading_field}"),
None => value,
@@ -323,9 +323,9 @@ const fn try_decode_hex_char(c: u8) -> Option<u8> {
fn parse_decimal_128(unsigned_number: &str, negative: bool) -> Result<Expr> {
// remove leading zeroes
let trimmed = unsigned_number.trim_start_matches('0');
- // parse precision and scale, remove decimal point if exists
+ // Parse precision and scale, remove decimal point if exists
let (precision, scale, replaced_str) = if trimmed == "." {
- // special cases for numbers such as “0.”, “000.”, and so on.
+ // Special cases for numbers such as “0.”, “000.”, and so on.
(1, 0, Cow::Borrowed("0"))
} else if let Some(i) = trimmed.find('.') {
(
@@ -334,7 +334,7 @@ fn parse_decimal_128(unsigned_number: &str, negative: bool)
-> Result<Expr> {
Cow::Owned(trimmed.replace('.', "")),
)
} else {
- // no decimal point, keep as is
+ // No decimal point, keep as is
(trimmed.len(), 0, Cow::Borrowed(trimmed))
};
@@ -344,7 +344,7 @@ fn parse_decimal_128(unsigned_number: &str, negative: bool)
-> Result<Expr> {
)))
})?;
- // check precision overflow
+ // Check precision overflow
if precision as u8 > DECIMAL128_MAX_PRECISION {
return Err(DataFusionError::from(ParserError(format!(
"Cannot parse {replaced_str} as i128 when building decimal:
precision overflow"
diff --git a/datafusion/sql/src/parser.rs b/datafusion/sql/src/parser.rs
index 6d130647a4..a68d849185 100644
--- a/datafusion/sql/src/parser.rs
+++ b/datafusion/sql/src/parser.rs
@@ -761,10 +761,10 @@ impl<'a> DFParser<'a> {
// Note that mixing both names and definitions is not
allowed
let peeked = self.parser.peek_nth_token(2);
if peeked == Token::Comma || peeked == Token::RParen {
- // list of column names
+ // List of column names
builder.table_partition_cols =
Some(self.parse_partitions()?)
} else {
- // list of column defs
+ // List of column defs
let (cols, cons) = self.parse_columns()?;
builder.table_partition_cols = Some(
cols.iter().map(|col|
col.name.to_string()).collect(),
@@ -850,7 +850,7 @@ impl<'a> DFParser<'a> {
options.push((key, value));
let comma = self.parser.consume_token(&Token::Comma);
if self.parser.consume_token(&Token::RParen) {
- // allow a trailing comma, even though it's not in standard
+ // Allow a trailing comma, even though it's not in standard
break;
} else if !comma {
return self.expected(
diff --git a/datafusion/sql/src/planner.rs b/datafusion/sql/src/planner.rs
index 1220789e04..66e360a9ad 100644
--- a/datafusion/sql/src/planner.rs
+++ b/datafusion/sql/src/planner.rs
@@ -117,7 +117,7 @@ impl ValueNormalizer {
/// CTEs, Views, subqueries and PREPARE statements. The states include
/// Common Table Expression (CTE) provided with WITH clause and
/// Parameter Data Types provided with PREPARE statement and the query schema
of the
-/// outer query plan
+/// outer query plan.
///
/// # Cloning
///
@@ -166,12 +166,12 @@ impl PlannerContext {
self
}
- // return a reference to the outer queries schema
+ // Return a reference to the outer query's schema
pub fn outer_query_schema(&self) -> Option<&DFSchema> {
self.outer_query_schema.as_ref().map(|s| s.as_ref())
}
- /// sets the outer query schema, returning the existing one, if
+ /// Sets the outer query schema, returning the existing one, if
/// any
pub fn set_outer_query_schema(
&mut self,
@@ -181,12 +181,12 @@ impl PlannerContext {
schema
}
- // return a clone of the outer FROM schema
+ // Return a clone of the outer FROM schema
pub fn outer_from_schema(&self) -> Option<Arc<DFSchema>> {
self.outer_from_schema.clone()
}
- /// sets the outer FROM schema, returning the existing one, if any
+ /// Sets the outer FROM schema, returning the existing one, if any
pub fn set_outer_from_schema(
&mut self,
mut schema: Option<DFSchemaRef>,
@@ -195,7 +195,7 @@ impl PlannerContext {
schema
}
- /// extends the FROM schema, returning the existing one, if any
+ /// Extends the FROM schema, returning the existing one, if any
pub fn extend_outer_from_schema(&mut self, schema: &DFSchemaRef) ->
Result<()> {
match self.outer_from_schema.as_mut() {
Some(from_schema) => Arc::make_mut(from_schema).merge(schema),
@@ -209,7 +209,7 @@ impl PlannerContext {
&self.prepare_param_data_types
}
- /// returns true if there is a Common Table Expression (CTE) /
+ /// Returns true if there is a Common Table Expression (CTE) /
/// Subquery for the specified name
pub fn contains_cte(&self, cte_name: &str) -> bool {
self.ctes.contains_key(cte_name)
@@ -520,9 +520,9 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
| SQLDataType::CharVarying(_)
| SQLDataType::CharacterLargeObject(_)
| SQLDataType::CharLargeObject(_)
- // precision is not supported
+ // Precision is not supported
| SQLDataType::Timestamp(Some(_), _)
- // precision is not supported
+ // Precision is not supported
| SQLDataType::Time(Some(_), _)
| SQLDataType::Dec(_)
| SQLDataType::BigNumeric(_)
@@ -586,7 +586,7 @@ pub fn object_name_to_table_reference(
object_name: ObjectName,
enable_normalization: bool,
) -> Result<TableReference> {
- // use destructure to make it clear no fields on ObjectName are ignored
+ // Use destructure to make it clear no fields on ObjectName are ignored
let ObjectName(idents) = object_name;
idents_to_table_reference(idents, enable_normalization)
}
@@ -597,7 +597,7 @@ pub(crate) fn idents_to_table_reference(
enable_normalization: bool,
) -> Result<TableReference> {
struct IdentTaker(Vec<Ident>);
- /// take the next identifier from the back of idents, panic'ing if
+ /// Take the next identifier from the back of idents, panic'ing if
/// there are none left
impl IdentTaker {
fn take(&mut self, enable_normalization: bool) -> String {
diff --git a/datafusion/sql/src/query.rs b/datafusion/sql/src/query.rs
index 71328cfd01..125259d227 100644
--- a/datafusion/sql/src/query.rs
+++ b/datafusion/sql/src/query.rs
@@ -205,7 +205,7 @@ fn convert_usize_with_check(n: i64, arg_name: &str) ->
Result<usize> {
/// Returns the order by expressions from the query.
fn to_order_by_exprs(order_by: Option<OrderBy>) -> Result<Vec<OrderByExpr>> {
let Some(OrderBy { exprs, interpolate }) = order_by else {
- // if no order by, return an empty array
+ // If no order by, return an empty array.
return Ok(vec![]);
};
if let Some(_interpolate) = interpolate {
diff --git a/datafusion/sql/src/relation/mod.rs
b/datafusion/sql/src/relation/mod.rs
index f8ebb04f38..256cc58e71 100644
--- a/datafusion/sql/src/relation/mod.rs
+++ b/datafusion/sql/src/relation/mod.rs
@@ -70,7 +70,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
.build()?;
(plan, alias)
} else {
- // normalize name and alias
+ // Normalize name and alias
let table_ref = self.object_name_to_table_reference(name)?;
let table_name = table_ref.to_string();
let cte = planner_context.get_cte(&table_name);
@@ -163,7 +163,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
subquery: TableFactor,
planner_context: &mut PlannerContext,
) -> Result<LogicalPlan> {
- // At this point for a syntacitally valid query the outer_from_schema
is
+ // At this point for a syntactically valid query the outer_from_schema
is
// guaranteed to be set, so the `.unwrap()` call will never panic. This
// is the case because we only call this method for lateral table
// factors, and those can never be the first factor in a FROM list.
This
diff --git a/datafusion/sql/src/select.rs b/datafusion/sql/src/select.rs
index c93d9e6fc4..c029fe2a23 100644
--- a/datafusion/sql/src/select.rs
+++ b/datafusion/sql/src/select.rs
@@ -52,7 +52,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
order_by: Vec<OrderByExpr>,
planner_context: &mut PlannerContext,
) -> Result<LogicalPlan> {
- // check for unsupported syntax first
+ // Check for unsupported syntax first
if !select.cluster_by.is_empty() {
return not_impl_err!("CLUSTER BY");
}
@@ -69,17 +69,17 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
return not_impl_err!("SORT BY");
}
- // process `from` clause
+ // Process `from` clause
let plan = self.plan_from_tables(select.from, planner_context)?;
let empty_from = matches!(plan, LogicalPlan::EmptyRelation(_));
- // process `where` clause
+ // Process `where` clause
let base_plan = self.plan_selection(select.selection, plan,
planner_context)?;
- // handle named windows before processing the projection expression
+ // Handle named windows before processing the projection expression
check_conflicting_windows(&select.named_window)?;
match_window_definitions(&mut select.projection,
&select.named_window)?;
- // process the SELECT expressions
+ // Process the SELECT expressions
let select_exprs = self.prepare_select_exprs(
&base_plan,
select.projection,
@@ -87,7 +87,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
planner_context,
)?;
- // having and group by clause may reference aliases defined in select
projection
+ // Having and group by clause may reference aliases defined in select
projection
let projected_plan = self.project(base_plan.clone(),
select_exprs.clone())?;
// Place the fields of the base plan at the front so that when there
are references
@@ -107,7 +107,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
)?;
let order_by_rex = normalize_sorts(order_by_rex, &projected_plan)?;
- // this alias map is resolved and looked up in both having exprs and
group by exprs
+ // This alias map is resolved and looked up in both having exprs and
group by exprs
let alias_map = extract_aliases(&select_exprs);
// Optionally the HAVING expression.
@@ -159,7 +159,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
planner_context,
)?;
- // aliases from the projection can conflict with
same-named expressions in the input
+ // Aliases from the projection can conflict with
same-named expressions in the input
let mut alias_map = alias_map.clone();
for f in base_plan.schema().fields() {
alias_map.remove(f.name());
@@ -192,7 +192,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
.collect()
};
- // process group by, aggregation or having
+ // Process group by, aggregation or having
let (plan, mut select_exprs_post_aggr, having_expr_post_aggr) = if
!group_by_exprs
.is_empty()
|| !aggr_exprs.is_empty()
@@ -219,7 +219,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
plan
};
- // process window function
+ // Process window function
let window_func_exprs = find_window_exprs(&select_exprs_post_aggr);
let plan = if window_func_exprs.is_empty() {
@@ -227,7 +227,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
} else {
let plan = LogicalPlanBuilder::window_plan(plan,
window_func_exprs.clone())?;
- // re-write the projection
+ // Re-write the projection
select_exprs_post_aggr = select_exprs_post_aggr
.iter()
.map(|expr| rebase_expr(expr, &window_func_exprs, &plan))
@@ -236,10 +236,10 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
plan
};
- // try process unnest expression or do the final projection
+ // Try processing unnest expression or do the final projection
let plan = self.try_process_unnest(plan, select_exprs_post_aggr)?;
- // process distinct clause
+ // Process distinct clause
let plan = match select.distinct {
None => Ok(plan),
Some(Distinct::Distinct) => {
@@ -304,7 +304,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
// Each expr in select_exprs can contains multiple unnest stage
// The transformation happen bottom up, one at a time for each
iteration
- // Only exaust the loop if no more unnest transformation is found
+ // Only exhaust the loop if no more unnest transformation is found
for i in 0.. {
let mut unnest_columns = vec![];
// from which column used for projection, before the unnest happen
@@ -390,7 +390,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
..
} = agg;
- // process unnest of group_by_exprs, and input of agg will be rewritten
+ // Process unnest of group_by_exprs, and input of agg will be rewritten
// for example:
//
// ```
diff --git a/datafusion/sql/src/statement.rs b/datafusion/sql/src/statement.rs
index 656d72d07b..3111fab9a2 100644
--- a/datafusion/sql/src/statement.rs
+++ b/datafusion/sql/src/statement.rs
@@ -878,14 +878,14 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
None => None,
};
- // at the moment functions can't be qualified `schema.name`
+ // At the moment functions can't be qualified `schema.name`
let name = match &name.0[..] {
[] => exec_err!("Function should have name")?,
[n] => n.value.clone(),
[..] => not_impl_err!("Qualified functions are not
supported")?,
};
//
- // convert resulting expression to data fusion expression
+ // Convert resulting expression to data fusion expression
//
let arg_types = args.as_ref().map(|arg| {
arg.iter().map(|t| t.data_type.clone()).collect::<Vec<_>>()
@@ -933,10 +933,10 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
func_desc,
..
} => {
- // according to postgresql documentation it can be only one
function
+ // According to postgresql documentation it can be only one
function
// specified in drop statement
if let Some(desc) = func_desc.first() {
- // at the moment functions can't be qualified `schema.name`
+ // At the moment functions can't be qualified `schema.name`
let name = match &desc.name.0[..] {
[] => exec_err!("Function should have name")?,
[n] => n.value.clone(),
@@ -1028,7 +1028,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
filter: Option<ShowStatementFilter>,
) -> Result<LogicalPlan> {
if self.has_table("information_schema", "tables") {
- // we only support the basic "SHOW TABLES"
+ // We only support the basic "SHOW TABLES"
// https://github.com/apache/datafusion/issues/3188
if db_name.is_some() || filter.is_some() || full || extended {
plan_err!("Unsupported parameters to SHOW TABLES")
@@ -1059,7 +1059,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
fn copy_to_plan(&self, statement: CopyToStatement) -> Result<LogicalPlan> {
- // determine if source is table or query and handle accordingly
+ // Determine if source is table or query and handle accordingly
let copy_source = statement.source;
let (input, input_schema, table_ref) = match copy_source {
CopyToSource::Relation(object_name) => {
@@ -1100,7 +1100,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
.to_string(),
)
};
- // try to infer file format from file extension
+ // Try to infer file format from file extension
let extension: &str = &Path::new(&statement.target)
.extension()
.ok_or_else(e)?
@@ -1406,11 +1406,11 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
let mut variable_lower = variable.to_lowercase();
if variable_lower == "timezone" || variable_lower == "time.zone" {
- // we could introduce alias in OptionDefinition if this string
matching thing grows
+ // We could introduce alias in OptionDefinition if this string
matching thing grows
variable_lower = "datafusion.execution.time_zone".to_string();
}
- // parse value string from Expr
+ // Parse value string from Expr
let value_string = match &value[0] {
SQLExpr::Identifier(i) => ident_to_string(i),
SQLExpr::Value(v) => match crate::utils::value_to_string(v) {
@@ -1419,7 +1419,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
Some(v) => v,
},
- // for capture signed number e.g. +8, -8
+ // For capture signed number e.g. +8, -8
SQLExpr::UnaryOp { op, expr } => match op {
UnaryOperator::Plus => format!("+{expr}"),
UnaryOperator::Minus => format!("-{expr}"),
@@ -1614,10 +1614,10 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
// Get insert fields and target table's value indices
//
- // if value_indices[i] = Some(j), it means that the value of the i-th
target table's column is
+ // If value_indices[i] = Some(j), it means that the value of the i-th
target table's column is
// derived from the j-th output of the source.
//
- // if value_indices[i] = None, it means that the value of the i-th
target table's column is
+ // If value_indices[i] = None, it means that the value of the i-th
target table's column is
// not provided, and should be filled with a default value later.
let (fields, value_indices) = if columns.is_empty() {
// Empty means we're inserting into all columns of the table
@@ -1749,7 +1749,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
let table_ref = self.object_name_to_table_reference(sql_table_name)?;
let _ = self.context_provider.get_table_source(table_ref)?;
- // treat both FULL and EXTENDED as the same
+ // Treat both FULL and EXTENDED as the same
let select_list = if full || extended {
"*"
} else {
diff --git a/datafusion/sql/src/unparser/dialect.rs
b/datafusion/sql/src/unparser/dialect.rs
index 609e6f2240..aef3b0dfab 100644
--- a/datafusion/sql/src/unparser/dialect.rs
+++ b/datafusion/sql/src/unparser/dialect.rs
@@ -162,7 +162,7 @@ impl Dialect for DefaultDialect {
fn identifier_quote_style(&self, identifier: &str) -> Option<char> {
let identifier_regex =
Regex::new(r"^[a-zA-Z_][a-zA-Z0-9_]*$").unwrap();
let id_upper = identifier.to_uppercase();
- // special case ignore "ID", see
https://github.com/sqlparser-rs/sqlparser-rs/issues/1382
+ // Special case ignore "ID", see
https://github.com/sqlparser-rs/sqlparser-rs/issues/1382
// ID is a keyword in ClickHouse, but we don't want to quote it when
unparsing SQL here
if (id_upper != "ID" && ALL_KEYWORDS.contains(&id_upper.as_str()))
|| !identifier_regex.is_match(identifier)
@@ -377,7 +377,7 @@ impl Default for CustomDialect {
}
impl CustomDialect {
- // create a CustomDialect
+ // Create a CustomDialect
#[deprecated(note = "please use `CustomDialectBuilder` instead")]
pub fn new(identifier_quote_style: Option<char>) -> Self {
Self {
@@ -541,7 +541,7 @@ impl CustomDialectBuilder {
self
}
- /// Customize the dialect to supports `NULLS FIRST` in `ORDER BY` clauses
+ /// Customize the dialect to support `NULLS FIRST` in `ORDER BY` clauses
pub fn with_supports_nulls_first_in_sort(
mut self,
supports_nulls_first_in_sort: bool,
@@ -620,7 +620,7 @@ impl CustomDialectBuilder {
self
}
- /// Customize the dialect to supports column aliases as part of alias
table definition
+ /// Customize the dialect to support column aliases as part of alias table
definition
pub fn with_supports_column_alias_in_table_alias(
mut self,
supports_column_alias_in_table_alias: bool,
diff --git a/datafusion/sql/src/unparser/expr.rs
b/datafusion/sql/src/unparser/expr.rs
index 537ac22744..b7491d1f88 100644
--- a/datafusion/sql/src/unparser/expr.rs
+++ b/datafusion/sql/src/unparser/expr.rs
@@ -82,7 +82,7 @@ pub fn sort_to_sql(sort: &Sort) -> Result<ast::OrderByExpr> {
}
const LOWEST: &BinaryOperator = &BinaryOperator::Or;
-// closest precedence we have to IS operator is BitwiseAnd (any other) in PG
docs
+// Closest precedence we have to IS operator is BitwiseAnd (any other) in PG
docs
// (https://www.postgresql.org/docs/7.2/sql-precedence.html)
const IS: &BinaryOperator = &BinaryOperator::BitwiseAnd;
@@ -698,7 +698,7 @@ impl Unparser<'_> {
match expr {
ast::Expr::Nested(_) | ast::Expr::Identifier(_) |
ast::Expr::Value(_) => 100,
ast::Expr::BinaryOp { op, .. } => self.sql_op_precedence(op),
- // closest precedence we currently have to Between is PGLikeMatch
+ // Closest precedence we currently have to Between is PGLikeMatch
// (https://www.postgresql.org/docs/7.2/sql-precedence.html)
ast::Expr::Between { .. } => {
self.sql_op_precedence(&ast::BinaryOperator::PGLikeMatch)
@@ -1141,7 +1141,7 @@ impl Unparser<'_> {
return Ok(ast::Expr::Interval(interval));
}
- // calculate the best single interval to represent the provided days
and microseconds
+ // Calculate the best single interval to represent the provided days
and microseconds
let microseconds = microseconds + (days as i64 * 24 * 60 * 60 *
1_000_000);
diff --git a/datafusion/sql/src/unparser/rewrite.rs
b/datafusion/sql/src/unparser/rewrite.rs
index 9b4eaca834..304a02f037 100644
--- a/datafusion/sql/src/unparser/rewrite.rs
+++ b/datafusion/sql/src/unparser/rewrite.rs
@@ -227,13 +227,13 @@ pub(super) fn subquery_alias_inner_query_and_columns(
return (plan, vec![]);
};
- // check if it's projection inside projection
+ // Check if it's projection inside projection
let Some(inner_projection) =
find_projection(outer_projections.input.as_ref()) else {
return (plan, vec![]);
};
let mut columns: Vec<Ident> = vec![];
- // check if the inner projection and outer projection have a matching
pattern like
+ // Check if the inner projection and outer projection have a matching
pattern like
// Projection: j1.j1_id AS id
// Projection: j1.j1_id
for (i, inner_expr) in inner_projection.expr.iter().enumerate() {
@@ -241,7 +241,7 @@ pub(super) fn subquery_alias_inner_query_and_columns(
return (plan, vec![]);
};
- // inner projection schema fields store the projection name which is
used in outer
+ // Inner projection schema fields store the projection name which is
used in outer
// projection expr
let inner_expr_string = match inner_expr {
Expr::Column(_) => inner_expr.to_string(),
diff --git a/datafusion/sql/src/unparser/utils.rs
b/datafusion/sql/src/unparser/utils.rs
index e05df8ba77..e8c4eca569 100644
--- a/datafusion/sql/src/unparser/utils.rs
+++ b/datafusion/sql/src/unparser/utils.rs
@@ -175,7 +175,7 @@ fn find_agg_expr<'a>(agg: &'a Aggregate, column: &Column)
-> Result<Option<&'a E
Ordering::Less => Ok(grouping_expr.into_iter().nth(index)),
Ordering::Equal => {
internal_err!(
- "Tried to unproject column refereing to internal
grouping id"
+ "Tried to unproject column referring to internal
grouping id"
)
}
Ordering::Greater => {
diff --git a/datafusion/sql/src/utils.rs b/datafusion/sql/src/utils.rs
index d8ad964be2..787bc66343 100644
--- a/datafusion/sql/src/utils.rs
+++ b/datafusion/sql/src/utils.rs
@@ -203,7 +203,7 @@ pub(crate) fn resolve_aliases_to_exprs(
.data()
}
-/// given a slice of window expressions sharing the same sort key, find their
common partition
+/// Given a slice of window expressions sharing the same sort key, find their
common partition
/// keys.
pub fn window_expr_common_partition_keys(window_exprs: &[Expr]) ->
Result<&[Expr]> {
let all_partition_keys = window_exprs
@@ -322,7 +322,7 @@ A full example of how the transformation works:
struct RecursiveUnnestRewriter<'a> {
input_schema: &'a DFSchemaRef,
root_expr: &'a Expr,
- // useful to detect which child expr is a part of/ not a part of unnest
operation
+ // Useful to detect which child expr is a part of/ not a part of unnest
operation
top_most_unnest: Option<Unnest>,
consecutive_unnest: Vec<Option<Unnest>>,
inner_projection_exprs: &'a mut Vec<Expr>,
@@ -399,14 +399,14 @@ impl<'a> RecursiveUnnestRewriter<'a> {
expr_in_unnest.clone().alias(placeholder_name.clone()),
);
- // let post_unnest_column =
Column::from_name(post_unnest_name);
+ // Let post_unnest_column =
Column::from_name(post_unnest_name);
let post_unnest_expr =
col(post_unnest_name.clone()).alias(alias_name);
match self
.columns_unnestings
.iter_mut()
.find(|(inner_col, _)| inner_col == &placeholder_column)
{
- // there is not unnesting done on this column yet
+ // There is not unnesting done on this column yet
None => {
self.columns_unnestings.push((
Column::from_name(placeholder_name.clone()),
@@ -416,7 +416,7 @@ impl<'a> RecursiveUnnestRewriter<'a> {
}]),
));
}
- // some unnesting(at some level) has been done on this
column
+ // Some unnesting(at some level) has been done on this
column
// e.g select unnest(column3), unnest(unnest(column3))
Some((_, unnesting)) => match unnesting {
ColumnUnnestType::List(list) => {
@@ -512,7 +512,7 @@ impl<'a> TreeNodeRewriter for RecursiveUnnestRewriter<'a> {
if traversing_unnest == self.top_most_unnest.as_ref().unwrap() {
self.top_most_unnest = None;
}
- // find inside consecutive_unnest, the sequence of continous
unnest exprs
+ // Find inside consecutive_unnest, the sequence of continous
unnest exprs
// Get the latest consecutive unnest exprs
// and check if current upward traversal is the returning to the
root expr
@@ -700,7 +700,7 @@ mod tests {
&mut inner_projection_exprs,
&original_expr,
)?;
- // only the bottom most unnest exprs are transformed
+ // Only the bottom most unnest exprs are transformed
assert_eq!(
transformed_exprs,
vec![col("unnest_placeholder(3d_col,depth=2)")
@@ -719,7 +719,7 @@ mod tests {
&unnest_placeholder_columns,
);
- // still reference struct_col in original schema but with alias,
+ // Still reference struct_col in original schema but with alias,
// to avoid colliding with the projection on the column itself if any
assert_eq!(
inner_projection_exprs,
@@ -751,7 +751,7 @@ mod tests {
],
&unnest_placeholder_columns,
);
- // still reference struct_col in original schema but with alias,
+ // Still reference struct_col in original schema but with alias,
// to avoid colliding with the projection on the column itself if any
assert_eq!(
inner_projection_exprs,
@@ -816,7 +816,7 @@ mod tests {
vec![("unnest_placeholder(struct_col)", "Struct")],
&unnest_placeholder_columns,
);
- // still reference struct_col in original schema but with alias,
+ // Still reference struct_col in original schema but with alias,
// to avoid colliding with the projection on the column itself if any
assert_eq!(
inner_projection_exprs,
@@ -841,7 +841,7 @@ mod tests {
],
&unnest_placeholder_columns,
);
- // only transform the unnest children
+ // Only transform the unnest children
assert_eq!(
transformed_exprs,
vec![col("unnest_placeholder(array_col,depth=1)")
@@ -849,8 +849,8 @@ mod tests {
.add(lit(1i64))]
);
- // keep appending to the current vector
- // still reference array_col in original schema but with alias,
+ // Keep appending to the current vector
+ // Still reference array_col in original schema but with alias,
// to avoid colliding with the projection on the column itself if any
assert_eq!(
inner_projection_exprs,
@@ -860,7 +860,7 @@ mod tests {
]
);
- // a nested structure struct[[]]
+ // A nested structure struct[[]]
let schema = Schema::new(vec![
Field::new(
"struct_col", // {array_col: [1,2,3]}
diff --git a/datafusion/sql/src/values.rs b/datafusion/sql/src/values.rs
index 9efb75bd60..cd33ddb3cf 100644
--- a/datafusion/sql/src/values.rs
+++ b/datafusion/sql/src/values.rs
@@ -31,7 +31,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
rows,
} = values;
- // values should not be based on any other schema
+ // Values should not be based on any other schema
let schema = DFSchema::empty();
let values = rows
.into_iter()
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]