comphead commented on code in PR #18553: URL: https://github.com/apache/datafusion/pull/18553#discussion_r2507052142
########## datafusion-examples/examples/custom_data_source/main.rs: ########## @@ -0,0 +1,121 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +//! # These examples are all related to extending or defining how DataFusion reads data +//! +//! These examples demonstrate how DataFusion reads data. +//! +//! Each subcommand runs a corresponding example: +//! - `csv_json_opener` — use low level FileOpener APIs to read CSV/JSON into Arrow RecordBatches +//! - `csv_sql_streaming` — build and run a streaming query plan from a SQL statement against a local CSV file +//! - `custom_datasource` — run queries against a custom datasource (TableProvider) +//! - `custom_file_casts` — implement custom casting rules to adapt file schemas +//! - `custom_file_format` — write data to a custom file format +//! - `file_stream_provider` — run a query on FileStreamProvider which implements StreamProvider for reading and writing to arbitrary stream sources/sinks + +mod csv_json_opener; +mod csv_sql_streaming; +mod custom_datasource; +mod custom_file_casts; +mod custom_file_format; +mod file_stream_provider; + +use std::str::FromStr; + +use datafusion::error::{DataFusionError, Result}; + +enum ExampleKind { + CsvJsonOpener, + CsvSqlStreaming, + CustomDatasource, + CustomFileCasts, + CustomFileFormat, + FileFtreamProvider, +} + +impl AsRef<str> for ExampleKind { + fn as_ref(&self) -> &str { + match self { + Self::CsvJsonOpener => "csv_json_opener", + Self::CsvSqlStreaming => "csv_sql_streaming", + Self::CustomDatasource => "custom_datasource", + Self::CustomFileCasts => "custom_file_casts", + Self::CustomFileFormat => "custom_file_format", + Self::FileFtreamProvider => "file_stream_provider", + } + } +} + +impl FromStr for ExampleKind { + type Err = DataFusionError; + + fn from_str(s: &str) -> Result<Self> { + match s { + "csv_json_opener" => Ok(Self::CsvJsonOpener), + "csv_sql_streaming" => Ok(Self::CsvSqlStreaming), + "custom_datasource" => Ok(Self::CustomDatasource), + "custom_file_casts" => Ok(Self::CustomFileCasts), + "custom_file_format" => Ok(Self::CustomFileFormat), + "file_stream_provider" => Ok(Self::FileFtreamProvider), + _ => Err(DataFusionError::Execution(format!("Unknown example: {s}"))), + } + } +} + +impl ExampleKind { + const ALL: [Self; 6] = [ + Self::CsvJsonOpener, + Self::CsvSqlStreaming, + Self::CustomDatasource, + Self::CustomFileCasts, + Self::CustomFileFormat, + Self::FileFtreamProvider, + ]; + + const EXAMPLE_NAME: &str = "custom_data_source"; + + fn variants() -> Vec<&'static str> { + Self::ALL.iter().map(|x| x.as_ref()).collect() + } +} + +#[tokio::main] +async fn main() -> Result<()> { Review Comment: Thanks @cj-zhukov it would be awesome to show the example command line to run specific example or all examples using cargo on the example overview -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
