milenkovicm commented on code in PR #19071:
URL: https://github.com/apache/datafusion/pull/19071#discussion_r2594845994
##########
datafusion/proto/src/logical_plan/mod.rs:
##########
@@ -208,6 +218,197 @@ impl LogicalExtensionCodec for
DefaultLogicalExtensionCodec {
) -> Result<()> {
not_impl_err!("LogicalExtensionCodec is not provided")
}
+
+ fn try_decode_file_format(
+ &self,
+ buf: &[u8],
+ ctx: &TaskContext,
+ ) -> Result<Arc<dyn FileFormatFactory>> {
+ if buf.is_empty() {
+ return Ok(Arc::new(ArrowFormatFactory::new()));
+ }
+
+ if let Ok(wrapper) = FileFormatWrapper::try_decode(buf) {
+ let result = match wrapper.kind {
+ FileFormatKind::Csv => {
+ let codec = CsvLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding CSV file format", e))
+ }
+ FileFormatKind::Json => {
+ let codec = JsonLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding JSON file format", e))
+ }
+ #[cfg(feature = "parquet")]
+ FileFormatKind::Parquet => {
+ let codec = ParquetLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding Parquet file format",
e))
+ }
+ #[cfg(not(feature = "parquet"))]
+ FileFormatKind::Parquet => {
+ internal_err!("Parquet feature is not enabled")
+ }
+ FileFormatKind::Arrow => {
+ let codec = ArrowLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding Arrow file format", e))
+ }
+ };
+ if result.is_ok() {
+ return result;
+ }
+ }
+
+ fn try_decode_roundtrip(
+ ctx: &TaskContext,
+ buf: &[u8],
+ ) -> Option<Arc<dyn FileFormatFactory>> {
+ #[cfg(feature = "parquet")]
+ let candidates: &[&dyn LogicalExtensionCodec] = &[
+ &ParquetLogicalExtensionCodec {},
+ &CsvLogicalExtensionCodec {},
+ &JsonLogicalExtensionCodec {},
+ ];
+ #[cfg(not(feature = "parquet"))]
+ let candidates: &[&dyn LogicalExtensionCodec] =
+ &[&CsvLogicalExtensionCodec {}, &JsonLogicalExtensionCodec {}];
+
+ for codec in candidates {
+ if let Ok(ff) = codec.try_decode_file_format(buf, ctx) {
+ let mut re = Vec::new();
+ if codec
+ .try_encode_file_format(
+ &mut re,
+ Arc::<dyn FileFormatFactory>::clone(&ff),
+ )
+ .is_ok()
+ && re == buf
+ {
+ return Some(ff);
+ }
+ }
+ }
+ None
+ }
+
+ if let Some(ff) = try_decode_roundtrip(ctx, buf) {
+ return Ok(ff);
+ }
+
+ // If nothing matched, return a clear error rather than guessing
+ exec_err!(
+ "Unsupported FileFormatFactory bytes for
DefaultLogicalExtensionCodec ({} bytes)",
+ buf.len()
+ )
Review Comment:
whats the purpose of this code?
##########
datafusion/proto/tests/cases/roundtrip_logical_plan.rs:
##########
@@ -2838,6 +2840,153 @@ async fn roundtrip_arrow_scan() -> Result<()> {
Ok(())
}
+#[test]
+fn test_default_codec_encode_decode_all_formats() -> Result<()> {
+ let ctx = SessionContext::new();
+ let codec = DefaultLogicalExtensionCodec {};
+ let table_options =
+
TableOptions::default_from_session_config(ctx.state().config_options());
+
+ // Test CSV with custom options
+ let mut csv_format = table_options.csv.clone();
+ csv_format.delimiter = b'|';
+ csv_format.has_header = Some(true);
+ let csv_factory: Arc<dyn FileFormatFactory> =
+ Arc::new(CsvFormatFactory::new_with_options(csv_format.clone()));
+
+ // Test JSON with custom options
+ let mut json_format = table_options.json.clone();
+ json_format.compression = CompressionTypeVariant::GZIP;
+ let json_factory: Arc<dyn FileFormatFactory> =
+ Arc::new(JsonFormatFactory::new_with_options(json_format.clone()));
+
+ // Test Parquet with custom options
+ let mut parquet_format = table_options.parquet.clone();
+ parquet_format.global.bloom_filter_on_read = true;
+ let parquet_factory: Arc<dyn FileFormatFactory> = Arc::new(
+ ParquetFormatFactory::new_with_options(parquet_format.clone()),
+ );
+
+ // Test Arrow (no options)
+ let arrow_factory: Arc<dyn FileFormatFactory> =
Arc::new(ArrowFormatFactory::new());
+
+ for factory in [csv_factory, json_factory, parquet_factory, arrow_factory]
{
+ // Encode and decode
+ let mut encoded = Vec::new();
+ codec.try_encode_file_format(&mut encoded, factory.clone())?;
+ let decoded = codec.try_decode_file_format(&encoded,
ctx.task_ctx().as_ref())?;
+
+ // Verify type preservation
+ assert_eq!(
+ factory.get_ext(),
+ decoded.get_ext(),
+ "Format extension should match after roundtrip"
+ );
+ }
+
+ Ok(())
+}
+
+#[test]
+fn test_default_codec_legacy_empty_buffer() -> Result<()> {
+ let ctx = SessionContext::new();
+ let codec = DefaultLogicalExtensionCodec {};
+
+ let empty_buf: &[u8] = &[];
+ let decoded = codec.try_decode_file_format(empty_buf,
ctx.task_ctx().as_ref())?;
+
+ assert_eq!(
+ decoded.get_ext(),
+ "arrow",
+ "Empty buffer should decode to ArrowFormatFactory (extension should be
'arrow')"
+ );
+
+ // Also verify we can re-encode it and it encodes as Arrow format
+ let mut re_encoded = Vec::new();
+ codec.try_encode_file_format(&mut re_encoded, decoded.clone())?;
+
+ // Re-encoded Arrow format should decode back to Arrow
+ let re_decoded =
+ codec.try_decode_file_format(&re_encoded, ctx.task_ctx().as_ref())?;
+ assert_eq!(
+ re_decoded.get_ext(),
+ "arrow",
+ "Re-encoded Arrow format should decode back to Arrow"
+ );
+
+ Ok(())
+}
+
+#[test]
+fn test_default_codec_legacy_raw_bytes_roundtrip() -> Result<()> {
Review Comment:
this functionality was not implemented how can we have a legacy
implementation?
##########
datafusion/proto/src/logical_plan/mod.rs:
##########
@@ -208,6 +218,197 @@ impl LogicalExtensionCodec for
DefaultLogicalExtensionCodec {
) -> Result<()> {
not_impl_err!("LogicalExtensionCodec is not provided")
}
+
+ fn try_decode_file_format(
+ &self,
+ buf: &[u8],
+ ctx: &TaskContext,
+ ) -> Result<Arc<dyn FileFormatFactory>> {
+ if buf.is_empty() {
+ return Ok(Arc::new(ArrowFormatFactory::new()));
+ }
+
+ if let Ok(wrapper) = FileFormatWrapper::try_decode(buf) {
+ let result = match wrapper.kind {
+ FileFormatKind::Csv => {
+ let codec = CsvLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding CSV file format", e))
+ }
+ FileFormatKind::Json => {
+ let codec = JsonLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding JSON file format", e))
+ }
+ #[cfg(feature = "parquet")]
+ FileFormatKind::Parquet => {
+ let codec = ParquetLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding Parquet file format",
e))
+ }
+ #[cfg(not(feature = "parquet"))]
+ FileFormatKind::Parquet => {
+ internal_err!("Parquet feature is not enabled")
+ }
+ FileFormatKind::Arrow => {
+ let codec = ArrowLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding Arrow file format", e))
+ }
+ };
+ if result.is_ok() {
+ return result;
+ }
+ }
+
+ fn try_decode_roundtrip(
+ ctx: &TaskContext,
+ buf: &[u8],
+ ) -> Option<Arc<dyn FileFormatFactory>> {
+ #[cfg(feature = "parquet")]
+ let candidates: &[&dyn LogicalExtensionCodec] = &[
+ &ParquetLogicalExtensionCodec {},
+ &CsvLogicalExtensionCodec {},
+ &JsonLogicalExtensionCodec {},
+ ];
+ #[cfg(not(feature = "parquet"))]
+ let candidates: &[&dyn LogicalExtensionCodec] =
+ &[&CsvLogicalExtensionCodec {}, &JsonLogicalExtensionCodec {}];
+
+ for codec in candidates {
+ if let Ok(ff) = codec.try_decode_file_format(buf, ctx) {
+ let mut re = Vec::new();
+ if codec
+ .try_encode_file_format(
+ &mut re,
+ Arc::<dyn FileFormatFactory>::clone(&ff),
+ )
+ .is_ok()
+ && re == buf
+ {
+ return Some(ff);
+ }
+ }
+ }
+ None
+ }
+
+ if let Some(ff) = try_decode_roundtrip(ctx, buf) {
+ return Ok(ff);
+ }
+
+ // If nothing matched, return a clear error rather than guessing
+ exec_err!(
+ "Unsupported FileFormatFactory bytes for
DefaultLogicalExtensionCodec ({} bytes)",
+ buf.len()
+ )
+ }
+
+ fn try_encode_file_format(
+ &self,
+ buf: &mut Vec<u8>,
+ node: Arc<dyn FileFormatFactory>,
+ ) -> Result<()> {
+ // CSV
+ if node.as_any().is::<CsvFormatFactory>() {
+ let mut blob = Vec::new();
+ CsvLogicalExtensionCodec {}
+ .try_encode_file_format(&mut blob, node)
+ .map_err(|e| context!("Encoding CSV file format", e))?;
+ return FileFormatWrapper::new(FileFormatKind::Csv,
blob).encode_into(buf);
+ }
+
+ // JSON
+ if node.as_any().is::<JsonFormatFactory>() {
+ let mut blob = Vec::new();
+ JsonLogicalExtensionCodec {}
+ .try_encode_file_format(&mut blob, node)
+ .map_err(|e| context!("Encoding JSON file format", e))?;
+ return FileFormatWrapper::new(FileFormatKind::Json,
blob).encode_into(buf);
+ }
+
+ // Parquet
+ #[cfg(feature = "parquet")]
+ if node.as_any().is::<ParquetFormatFactory>() {
+ let mut blob = Vec::new();
+ ParquetLogicalExtensionCodec {}
+ .try_encode_file_format(&mut blob, node)
+ .map_err(|e| context!("Encoding Parquet file format", e))?;
+ return FileFormatWrapper::new(FileFormatKind::Parquet, blob)
+ .encode_into(buf);
+ }
+
+ // Arrow: encode empty blob for compatibility
+ if node.as_any().is::<ArrowFormatFactory>() {
+ return FileFormatWrapper::new(FileFormatKind::Arrow, Vec::new())
+ .encode_into(buf);
+ }
+
+ not_impl_err!(
+ "Unsupported FileFormatFactory type for
DefaultLogicalExtensionCodec"
+ )
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+enum FileFormatKind {
+ Csv = 0,
+ Json = 1,
+ Parquet = 2,
+ Arrow = 3,
+}
+
+#[derive(Clone, PartialEq, ::prost::Message)]
+struct FileFormatProtoWrapper {
+ #[prost(int32, tag = "1")]
+ kind: i32,
+ #[prost(bytes, tag = "2")]
+ blob: Vec<u8>,
+}
Review Comment:
whould it make sense to have this as part of proto file ?
##########
datafusion/proto/tests/cases/roundtrip_logical_plan.rs:
##########
@@ -2838,6 +2840,153 @@ async fn roundtrip_arrow_scan() -> Result<()> {
Ok(())
}
+#[test]
+fn test_default_codec_encode_decode_all_formats() -> Result<()> {
+ let ctx = SessionContext::new();
+ let codec = DefaultLogicalExtensionCodec {};
+ let table_options =
+
TableOptions::default_from_session_config(ctx.state().config_options());
+
+ // Test CSV with custom options
+ let mut csv_format = table_options.csv.clone();
+ csv_format.delimiter = b'|';
+ csv_format.has_header = Some(true);
+ let csv_factory: Arc<dyn FileFormatFactory> =
+ Arc::new(CsvFormatFactory::new_with_options(csv_format.clone()));
+
+ // Test JSON with custom options
+ let mut json_format = table_options.json.clone();
+ json_format.compression = CompressionTypeVariant::GZIP;
Review Comment:
whats the purpose of setting this value if not checked later?
##########
datafusion/proto/tests/cases/roundtrip_logical_plan.rs:
##########
@@ -2838,6 +2840,153 @@ async fn roundtrip_arrow_scan() -> Result<()> {
Ok(())
}
+#[test]
+fn test_default_codec_encode_decode_all_formats() -> Result<()> {
+ let ctx = SessionContext::new();
+ let codec = DefaultLogicalExtensionCodec {};
+ let table_options =
+
TableOptions::default_from_session_config(ctx.state().config_options());
+
+ // Test CSV with custom options
+ let mut csv_format = table_options.csv.clone();
+ csv_format.delimiter = b'|';
+ csv_format.has_header = Some(true);
+ let csv_factory: Arc<dyn FileFormatFactory> =
+ Arc::new(CsvFormatFactory::new_with_options(csv_format.clone()));
+
+ // Test JSON with custom options
+ let mut json_format = table_options.json.clone();
+ json_format.compression = CompressionTypeVariant::GZIP;
+ let json_factory: Arc<dyn FileFormatFactory> =
+ Arc::new(JsonFormatFactory::new_with_options(json_format.clone()));
+
+ // Test Parquet with custom options
+ let mut parquet_format = table_options.parquet.clone();
+ parquet_format.global.bloom_filter_on_read = true;
Review Comment:
whats the purpose of setting this value if not checked later?
##########
datafusion/proto/tests/cases/roundtrip_logical_plan.rs:
##########
@@ -2838,6 +2840,153 @@ async fn roundtrip_arrow_scan() -> Result<()> {
Ok(())
}
+#[test]
+fn test_default_codec_encode_decode_all_formats() -> Result<()> {
+ let ctx = SessionContext::new();
+ let codec = DefaultLogicalExtensionCodec {};
+ let table_options =
+
TableOptions::default_from_session_config(ctx.state().config_options());
+
+ // Test CSV with custom options
+ let mut csv_format = table_options.csv.clone();
+ csv_format.delimiter = b'|';
+ csv_format.has_header = Some(true);
+ let csv_factory: Arc<dyn FileFormatFactory> =
+ Arc::new(CsvFormatFactory::new_with_options(csv_format.clone()));
+
+ // Test JSON with custom options
+ let mut json_format = table_options.json.clone();
+ json_format.compression = CompressionTypeVariant::GZIP;
+ let json_factory: Arc<dyn FileFormatFactory> =
+ Arc::new(JsonFormatFactory::new_with_options(json_format.clone()));
+
+ // Test Parquet with custom options
+ let mut parquet_format = table_options.parquet.clone();
+ parquet_format.global.bloom_filter_on_read = true;
+ let parquet_factory: Arc<dyn FileFormatFactory> = Arc::new(
+ ParquetFormatFactory::new_with_options(parquet_format.clone()),
+ );
+
+ // Test Arrow (no options)
+ let arrow_factory: Arc<dyn FileFormatFactory> =
Arc::new(ArrowFormatFactory::new());
+
+ for factory in [csv_factory, json_factory, parquet_factory, arrow_factory]
{
+ // Encode and decode
+ let mut encoded = Vec::new();
+ codec.try_encode_file_format(&mut encoded, factory.clone())?;
+ let decoded = codec.try_decode_file_format(&encoded,
ctx.task_ctx().as_ref())?;
+
+ // Verify type preservation
+ assert_eq!(
+ factory.get_ext(),
+ decoded.get_ext(),
+ "Format extension should match after roundtrip"
+ );
+ }
+
+ Ok(())
+}
+
+#[test]
+fn test_default_codec_legacy_empty_buffer() -> Result<()> {
Review Comment:
his functionality was not implemented how can we have a legacy
implementation?
##########
datafusion/proto/tests/cases/roundtrip_logical_plan.rs:
##########
@@ -2838,6 +2840,153 @@ async fn roundtrip_arrow_scan() -> Result<()> {
Ok(())
}
+#[test]
+fn test_default_codec_encode_decode_all_formats() -> Result<()> {
+ let ctx = SessionContext::new();
+ let codec = DefaultLogicalExtensionCodec {};
+ let table_options =
+
TableOptions::default_from_session_config(ctx.state().config_options());
+
+ // Test CSV with custom options
+ let mut csv_format = table_options.csv.clone();
+ csv_format.delimiter = b'|';
+ csv_format.has_header = Some(true);
Review Comment:
whats the purpose of setting this values if not checked later?
##########
datafusion/proto/src/logical_plan/mod.rs:
##########
@@ -208,6 +218,197 @@ impl LogicalExtensionCodec for
DefaultLogicalExtensionCodec {
) -> Result<()> {
not_impl_err!("LogicalExtensionCodec is not provided")
}
+
+ fn try_decode_file_format(
+ &self,
+ buf: &[u8],
+ ctx: &TaskContext,
+ ) -> Result<Arc<dyn FileFormatFactory>> {
+ if buf.is_empty() {
+ return Ok(Arc::new(ArrowFormatFactory::new()));
+ }
+
+ if let Ok(wrapper) = FileFormatWrapper::try_decode(buf) {
+ let result = match wrapper.kind {
+ FileFormatKind::Csv => {
+ let codec = CsvLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding CSV file format", e))
+ }
+ FileFormatKind::Json => {
+ let codec = JsonLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding JSON file format", e))
+ }
+ #[cfg(feature = "parquet")]
+ FileFormatKind::Parquet => {
+ let codec = ParquetLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding Parquet file format",
e))
+ }
+ #[cfg(not(feature = "parquet"))]
+ FileFormatKind::Parquet => {
+ internal_err!("Parquet feature is not enabled")
+ }
+ FileFormatKind::Arrow => {
+ let codec = ArrowLogicalExtensionCodec {};
+ codec
+ .try_decode_file_format(&wrapper.blob, ctx)
+ .map_err(|e| context!("Decoding Arrow file format", e))
+ }
+ };
+ if result.is_ok() {
+ return result;
+ }
+ }
+
+ fn try_decode_roundtrip(
+ ctx: &TaskContext,
+ buf: &[u8],
+ ) -> Option<Arc<dyn FileFormatFactory>> {
+ #[cfg(feature = "parquet")]
+ let candidates: &[&dyn LogicalExtensionCodec] = &[
+ &ParquetLogicalExtensionCodec {},
+ &CsvLogicalExtensionCodec {},
+ &JsonLogicalExtensionCodec {},
+ ];
+ #[cfg(not(feature = "parquet"))]
+ let candidates: &[&dyn LogicalExtensionCodec] =
+ &[&CsvLogicalExtensionCodec {}, &JsonLogicalExtensionCodec {}];
+
+ for codec in candidates {
+ if let Ok(ff) = codec.try_decode_file_format(buf, ctx) {
+ let mut re = Vec::new();
+ if codec
+ .try_encode_file_format(
+ &mut re,
+ Arc::<dyn FileFormatFactory>::clone(&ff),
+ )
+ .is_ok()
+ && re == buf
+ {
+ return Some(ff);
+ }
+ }
+ }
+ None
+ }
+
+ if let Some(ff) = try_decode_roundtrip(ctx, buf) {
+ return Ok(ff);
+ }
+
+ // If nothing matched, return a clear error rather than guessing
+ exec_err!(
+ "Unsupported FileFormatFactory bytes for
DefaultLogicalExtensionCodec ({} bytes)",
+ buf.len()
+ )
+ }
+
+ fn try_encode_file_format(
+ &self,
+ buf: &mut Vec<u8>,
+ node: Arc<dyn FileFormatFactory>,
+ ) -> Result<()> {
+ // CSV
+ if node.as_any().is::<CsvFormatFactory>() {
+ let mut blob = Vec::new();
+ CsvLogicalExtensionCodec {}
+ .try_encode_file_format(&mut blob, node)
+ .map_err(|e| context!("Encoding CSV file format", e))?;
+ return FileFormatWrapper::new(FileFormatKind::Csv,
blob).encode_into(buf);
+ }
+
+ // JSON
+ if node.as_any().is::<JsonFormatFactory>() {
+ let mut blob = Vec::new();
+ JsonLogicalExtensionCodec {}
+ .try_encode_file_format(&mut blob, node)
+ .map_err(|e| context!("Encoding JSON file format", e))?;
+ return FileFormatWrapper::new(FileFormatKind::Json,
blob).encode_into(buf);
+ }
+
+ // Parquet
+ #[cfg(feature = "parquet")]
+ if node.as_any().is::<ParquetFormatFactory>() {
+ let mut blob = Vec::new();
+ ParquetLogicalExtensionCodec {}
+ .try_encode_file_format(&mut blob, node)
+ .map_err(|e| context!("Encoding Parquet file format", e))?;
+ return FileFormatWrapper::new(FileFormatKind::Parquet, blob)
+ .encode_into(buf);
+ }
+
+ // Arrow: encode empty blob for compatibility
+ if node.as_any().is::<ArrowFormatFactory>() {
+ return FileFormatWrapper::new(FileFormatKind::Arrow, Vec::new())
+ .encode_into(buf);
+ }
+
+ not_impl_err!(
+ "Unsupported FileFormatFactory type for
DefaultLogicalExtensionCodec"
+ )
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+enum FileFormatKind {
+ Csv = 0,
+ Json = 1,
+ Parquet = 2,
+ Arrow = 3,
+}
+
+#[derive(Clone, PartialEq, ::prost::Message)]
+struct FileFormatProtoWrapper {
+ #[prost(int32, tag = "1")]
+ kind: i32,
+ #[prost(bytes, tag = "2")]
+ blob: Vec<u8>,
+}
+
+struct FileFormatWrapper {
+ kind: FileFormatKind,
+ blob: Vec<u8>,
+}
+
+impl FileFormatWrapper {
+ fn new(kind: FileFormatKind, blob: Vec<u8>) -> Self {
+ Self { kind, blob }
+ }
+
+ fn encode_into(self, buf: &mut Vec<u8>) -> Result<()> {
+ let proto = FileFormatProtoWrapper {
+ kind: self.kind as i32,
+ blob: self.blob,
+ };
+ proto
+ .encode(buf)
+ .map_err(|e| DataFusionError::Internal(e.to_string()))
+ }
+
+ fn try_decode(buf: &[u8]) -> Result<Self> {
+ let proto = FileFormatProtoWrapper::decode(buf)
+ .map_err(|e| DataFusionError::Internal(e.to_string()))?;
+ let kind = match proto.kind {
+ 0 => FileFormatKind::Csv,
+ 1 => FileFormatKind::Json,
+ 2 => FileFormatKind::Parquet,
+ 3 => FileFormatKind::Arrow,
+ _ => {
+ return Err(DataFusionError::Internal(
+ "Unknown file format kind".to_string(),
+ ))
+ }
+ };
+ Ok(Self {
+ kind,
+ blob: proto.blob,
+ })
+ }
Review Comment:
do we need structure additional to actual decode function
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]