rluvaton opened a new issue, #1867:
URL: https://github.com/apache/datafusion-comet/issues/1867
### Describe the bug
When I try to write to a delta table when the extension enabled I get panic.
(I also sometimes had it when reading)
```
25/06/08 20:01:28 ERROR Executor: Exception in task 0.0 in stage 1.0 (TID 14)
org.apache.comet.CometNativeException: StructBuilder (Schema { fields:
[Field { name: "storageType", data_type: Utf8, nullable: true, dict_id: 0,
dict_is_ordered: false, metadata: {} }, Field { name: "pathOrInlineDv",
data_type: Utf8, nullable: true, dict_id: 0, dict_is_ordered: false, metadata:
{} }, Field { name: "offset", data_type: Int32, nullable: true, dict_id: 0,
dict_is_ordered: false, metadata: {} }, Field { name: "sizeInBytes", data_type:
Int32, nullable: false, dict_id: 0, dict_is_ordered: false, metadata: {} },
Field { name: "cardinality", data_type: Int64, nullable: false, dict_id: 0,
dict_is_ordered: false, metadata: {} }, Field { name: "maxRowIndex", data_type:
Int64, nullable: true, dict_id: 0, dict_is_ordered: false, metadata: {} }],
metadata: {} }) and field_builder with index 0 (Utf8) are of unequal lengths:
(1 != 0).
at std::backtrace::Backtrace::create(__internal__:0)
at comet::errors::init::{{closure}}(__internal__:0)
at std::panicking::rust_panic_with_hook(__internal__:0)
at std::panicking::begin_panic_handler::{{closure}}(__internal__:0)
at std::sys::backtrace::__rust_end_short_backtrace(__internal__:0)
at _rust_begin_unwind(__internal__:0)
at core::panicking::panic_fmt(__internal__:0)
at core::panicking::panic_display(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::validate_content::{{closure}}::panic_cold_display(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::validate_content::{{closure}}(__internal__:0)
at
core::iter::traits::iterator::Iterator::for_each::call::{{closure}}(__internal__:0)
at <core::iter::adapters::enumerate::Enumerate<I> as
core::iter::traits::iterator::Iterator>::fold::enumerate::{{closure}}(__internal__:0)
at <core::slice::iter::Iter<T> as
core::iter::traits::iterator::Iterator>::fold(__internal__:0)
at <core::iter::adapters::enumerate::Enumerate<I> as
core::iter::traits::iterator::Iterator>::fold(__internal__:0)
at core::iter::traits::iterator::Iterator::for_each(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::validate_content(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::finish(__internal__:0)
at <arrow_array::builder::struct_builder::StructBuilder as
arrow_array::builder::ArrayBuilder>::finish(__internal__:0)
at <alloc::boxed::Box<dyn arrow_array::builder::ArrayBuilder> as
arrow_array::builder::ArrayBuilder>::finish(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::finish::{{closure}}(__internal__:0)
at core::iter::adapters::map::map_fold::{{closure}}(__internal__:0)
at <core::slice::iter::IterMut<T> as
core::iter::traits::iterator::Iterator>::fold(__internal__:0)
at <core::iter::adapters::map::Map<I,F> as
core::iter::traits::iterator::Iterator>::fold(__internal__:0)
at core::iter::traits::iterator::Iterator::for_each(__internal__:0)
at alloc::vec::Vec<T,A>::extend_trusted(__internal__:0)
at <alloc::vec::Vec<T,A> as
alloc::vec::spec_extend::SpecExtend<T,I>>::spec_extend(__internal__:0)
at <alloc::vec::Vec<T> as
alloc::vec::spec_from_iter_nested::SpecFromIterNested<T,I>>::from_iter(__internal__:0)
at <alloc::vec::Vec<T> as
alloc::vec::spec_from_iter::SpecFromIter<T,I>>::from_iter(__internal__:0)
at <alloc::vec::Vec<T> as
core::iter::traits::collect::FromIterator<T>>::from_iter(__internal__:0)
at core::iter::traits::iterator::Iterator::collect(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::finish(__internal__:0)
at <arrow_array::builder::struct_builder::StructBuilder as
arrow_array::builder::ArrayBuilder>::finish(__internal__:0)
at <alloc::boxed::Box<dyn arrow_array::builder::ArrayBuilder> as
arrow_array::builder::ArrayBuilder>::finish(__internal__:0)
at comet::execution::shuffle::row::builder_to_array(__internal__:0)
at
comet::execution::shuffle::row::process_sorted_row_partition::{{closure}}(__internal__:0)
at
core::iter::adapters::map::map_try_fold::{{closure}}(__internal__:0)
at core::iter::traits::iterator::Iterator::try_fold(__internal__:0)
at <core::iter::adapters::map::Map<I,F> as
core::iter::traits::iterator::Iterator>::try_fold(__internal__:0)
at <core::iter::adapters::GenericShunt<I,R> as
core::iter::traits::iterator::Iterator>::try_fold(__internal__:0)
at <core::iter::adapters::GenericShunt<I,R> as
core::iter::traits::iterator::Iterator>::next(__internal__:0)
at alloc::vec::Vec<T,A>::extend_desugared(__internal__:0)
at <alloc::vec::Vec<T,A> as
alloc::vec::spec_extend::SpecExtend<T,I>>::spec_extend(__internal__:0)
at <alloc::vec::Vec<T> as
alloc::vec::spec_from_iter_nested::SpecFromIterNested<T,I>>::from_iter(__internal__:0)
at <alloc::vec::Vec<T> as
alloc::vec::spec_from_iter::SpecFromIter<T,I>>::from_iter(__internal__:0)
at <alloc::vec::Vec<T> as
core::iter::traits::collect::FromIterator<T>>::from_iter(__internal__:0)
at <core::result::Result<V,E> as
core::iter::traits::collect::FromIterator<core::result::Result<A,E>>>::from_iter::{{closure}}(__internal__:0)
at core::iter::adapters::try_process(__internal__:0)
at <core::result::Result<V,E> as
core::iter::traits::collect::FromIterator<core::result::Result<A,E>>>::from_iter(__internal__:0)
at core::iter::traits::iterator::Iterator::collect(__internal__:0)
at
comet::execution::shuffle::row::process_sorted_row_partition(__internal__:0)
at
comet::execution::jni_api::Java_org_apache_comet_Native_writeSortedFileNative::{{closure}}(__internal__:0)
at comet::errors::curry::{{closure}}(__internal__:0)
at std::panicking::try::do_call(__internal__:0)
at ___rust_try(__internal__:0)
at std::panic::catch_unwind(__internal__:0)
at comet::errors::try_unwrap_or_throw(__internal__:0)
at
_Java_org_apache_comet_Native_writeSortedFileNative(__internal__:0)
at org.apache.comet.Native.writeSortedFileNative(Native Method)
at
org.apache.spark.sql.comet.execution.shuffle.SpillWriter.doSpilling(SpillWriter.java:187)
at
org.apache.spark.sql.comet.execution.shuffle.CometDiskBlockWriter$ArrowIPCWriter.doSpilling(CometDiskBlockWriter.java:405)
at
org.apache.spark.sql.comet.execution.shuffle.CometDiskBlockWriter.close(CometDiskBlockWriter.java:308)
at
org.apache.spark.sql.comet.execution.shuffle.CometBypassMergeSortShuffleWriter.write(CometBypassMergeSortShuffleWriter.java:222)
at
org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54)
at
org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:166)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at
org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at
org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at
org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:829)
```
### Steps to reproduce
Run the following:
```console
spark-shell \
--jars $COMET_JAR \
--packages io.delta:delta-spark_2.12:3.3.1 \
--conf "spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension" \
--conf
"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog"
\
--conf spark.driver.extraClassPath=$COMET_JAR \
--conf spark.executor.extraClassPath=$COMET_JAR \
--conf spark.plugins=org.apache.spark.CometPlugin \
--conf
spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager
\
--conf spark.comet.explainFallback.enabled=true \
--conf spark.comet.exec.shuffle.fallbackToColumnar=true \
--conf park.comet.convert.parquet.enabled=true \
--conf spark.comet.sparkToColumnar.enabled=true \
--conf spark.comet.convert.json.enabled=true \
--conf spark.memory.offHeap.enabled=true \
--conf spark.memory.offHeap.size=16g
```
and then in the shell:
```scala
spark.range(1, 5).write.format("delta").save("/tmp/delta-table")
```
### Expected behavior
Should not panic
### Additional context
I'm using spark 3.5.3 and comet 0.8.0
<details><summary>All the logs</summary>
```console
$ /tmp export COMET_JAR=/tmp/comet-spark-spark3.5_2.12-0.8.0.jar
$ /tmp spark-shell \
--jars $COMET_JAR \
--packages io.delta:delta-spark_2.12:3.3.1 --conf
"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension" --conf
"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog"
--conf spark.driver.extraClassPath=$COMET_JAR \
--conf spark.executor.extraClassPath=$COMET_JAR \
--conf spark.plugins=org.apache.spark.CometPlugin \
--conf
spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager
\
--conf spark.comet.explainFallback.enabled=true --conf
spark.comet.exec.shuffle.fallbackToColumnar=true --conf
park.comet.convert.parquet.enabled=true --conf
spark.comet.sparkToColumnar.enabled=true --conf
spark.comet.convert.json.enabled=true \
--conf spark.memory.offHeap.enabled=true \
--conf spark.memory.offHeap.size=16g
Warning: Ignoring non-Spark config property:
park.comet.convert.parquet.enabled
25/06/08 20:01:17 WARN Utils: Your hostname, Razs-MacBook-Pro.local resolves
to a loopback address: 127.0.0.1; using 192.168.1.63 instead (on interface en0)
25/06/08 20:01:17 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
another address
:: loading settings :: url =
jar:file:/Users/rluvaton/.sdkman/candidates/spark/3.5.3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml
Ivy Default Cache set to: /Users/rluvaton/.ivy2/cache
The jars for the packages stored in: /Users/rluvaton/.ivy2/jars
io.delta#delta-spark_2.12 added as a dependency
:: resolving dependencies ::
org.apache.spark#spark-submit-parent-d4d882d2-41a0-403b-89ac-af49bfa49bd3;1.0
confs: [default]
found io.delta#delta-spark_2.12;3.3.1 in central
found io.delta#delta-storage;3.3.1 in central
found org.antlr#antlr4-runtime;4.9.3 in local-m2-cache
:: resolution report :: resolve 73ms :: artifacts dl 2ms
:: modules in use:
io.delta#delta-spark_2.12;3.3.1 from central in [default]
io.delta#delta-storage;3.3.1 from central in [default]
org.antlr#antlr4-runtime;4.9.3 from local-m2-cache in [default]
---------------------------------------------------------------------
| | modules || artifacts |
| conf | number| search|dwnlded|evicted|| number|dwnlded|
---------------------------------------------------------------------
| default | 3 | 0 | 0 | 0 || 3 | 0 |
---------------------------------------------------------------------
:: retrieving ::
org.apache.spark#spark-submit-parent-d4d882d2-41a0-403b-89ac-af49bfa49bd3
confs: [default]
0 artifacts copied, 3 already retrieved (0kB/2ms)
25/06/08 20:01:17 WARN NativeCodeLoader: Unable to load native-hadoop
library for your platform... using builtin-java classes where applicable
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use
setLogLevel(newLevel).
Spark context Web UI available at http://192.168.1.63:4040
Spark context available as 'sc' (master = local[*], app id =
local-1749402079854).
Spark session available as 'spark'.
Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/___/ .__/\_,_/_/ /_/\_\ version 3.5.3
/_/
Using Scala version 2.12.18 (OpenJDK 64-Bit Server VM, Java 11.0.14.1)
Type in expressions to have them evaluated.
Type :help for more information.
scala> spark.range(1, 5).write.format("delta").save("/tmp/delta-table")
25/06/08 20:01:25 INFO core/src/lib.rs: Comet native library version 0.8.0
initialized
25/06/08 20:01:25 WARN CometSparkSessionExtensions$CometExecRule: Comet
cannot execute some parts of this plan natively (set
spark.comet.explainFallback.enabled=false to disable this logging):
Execute SaveIntoDataSourceCommand [COMET: Execute SaveIntoDataSourceCommand
is not supported]
+- SaveIntoDataSourceCommand
+- Range
25/06/08 20:01:27 WARN CometSparkSessionExtensions$CometExecRule: Comet
cannot execute some parts of this plan natively (set
spark.comet.explainFallback.enabled=false to disable this logging):
SerializeFromObject [COMET: SerializeFromObject is not supported]
+- MapElements [COMET: MapElements is not supported]
+- DeserializeToObject [COMET: DeserializeToObject is not supported]
+- Project [COMET: Project is not native because the following
children are not native (Filter)]
+- Filter [COMET: Filter is not native because the following
children are not native (Scan ExistingRDD)]
+- Scan ExistingRDD [COMET: Scan ExistingRDD is not supported]
25/06/08 20:01:27 WARN CometSparkSessionExtensions$CometExecRule: Comet
cannot execute some parts of this plan natively (set
spark.comet.explainFallback.enabled=false to disable this logging):
SerializeFromObject [COMET: SerializeFromObject is not supported]
+- MapPartitions [COMET: MapPartitions is not supported]
+- DeserializeToObject [COMET: DeserializeToObject is not supported]
+- CometProject
+- CometSort
+- CometSinkPlaceHolder [COMET: Exchange is not native because
the following children are not native (Project)]
+- CometColumnarExchange
+- Project [COMET: Project is not native because the
following children are not native (Scan json )]
+- Scan json [COMET: Unsupported file format JSON,
Unsupported schema
StructType(StructField(txn,StructType(StructField(appId,StringType,true),StructField(version,LongType,false),StructField(lastUpdated,LongType,true)),true),StructField(add,StructType(StructField(path,StringType,true),StructField(partitionValues,MapType(StringType,StringType,true),true),StructField(size,LongType,false),StructField(modificationTime,LongType,false),StructField(dataChange,BooleanType,false),StructField(stats,StringType,true),StructField(tags,MapType(StringType,StringType,true),true),StructField(deletionVector,StructType(StructField(storageType,StringType,true),StructField(pathOrInlineDv,StringType,true),StructField(offset,IntegerType,true),StructField(sizeInBytes,IntegerType,false),StructField(cardinality,LongType,false),StructField(maxRowIndex,LongType,true)),true),StructField(baseRowId,LongType,true),StructField(defaultRowCommitVersion,LongType,true),StructField(clusteringPro
vider,StringType,true)),true),StructField(remove,StructType(StructField(path,StringType,true),StructField(deletionTimestamp,LongType,true),StructField(dataChange,BooleanType,false),StructField(extendedFileMetadata,BooleanType,true),StructField(partitionValues,MapType(StringType,StringType,true),true),StructField(size,LongType,true),StructField(tags,MapType(StringType,StringType,true),true),StructField(deletionVector,StructType(StructField(storageType,StringType,true),StructField(pathOrInlineDv,StringType,true),StructField(offset,IntegerType,true),StructField(sizeInBytes,IntegerType,false),StructField(cardinality,LongType,false),StructField(maxRowIndex,LongType,true)),true),StructField(baseRowId,LongType,true),StructField(defaultRowCommitVersion,LongType,true),StructField(stats,StringType,true)),true),StructField(metaData,StructType(StructField(id,StringType,true),StructField(name,StringType,true),StructField(description,StringType,true),StructField(format,StructType(StructField(prov
ider,StringType,true),StructField(options,MapType(StringType,StringType,true),true)),true),StructField(schemaString,StringType,true),StructField(partitionColumns,ArrayType(StringType,true),true),StructField(configuration,MapType(StringType,StringType,true),true),StructField(createdTime,LongType,true)),true),StructField(protocol,StructType(StructField(minReaderVersion,IntegerType,false),StructField(minWriterVersion,IntegerType,false),StructField(readerFeatures,ArrayType(StringType,true),true),StructField(writerFeatures,ArrayType(StringType,true),true)),true),StructField(cdc,StructType(StructField(path,StringType,true),StructField(partitionValues,MapType(StringType,StringType,true),true),StructField(size,LongType,false),StructField(tags,MapType(StringType,StringType,true),true)),true),StructField(checkpointMetadata,StructType(StructField(version,LongType,false),StructField(tags,MapType(StringType,StringType,true),true)),true),StructField(sidecar,StructType(StructField(path,StringType,
true),StructField(sizeInBytes,LongType,false),StructField(modificationTime,LongType,false),StructField(tags,MapType(StringType,StringType,true),true)),true),StructField(domainMetadata,StructType(StructField(domain,StringType,true),StructField(configuration,StringType,true),StructField(removed,BooleanType,false)),true),StructField(commitInfo,StructType(StructField(version,LongType,true),StructField(inCommitTimestamp,LongType,true),StructField(timestamp,TimestampType,true),StructField(userId,StringType,true),StructField(userName,StringType,true),StructField(operation,StringType,true),StructField(operationParameters,MapType(StringType,StringType,true),true),StructField(job,StructType(StructField(jobId,StringType,true),StructField(jobName,StringType,true),StructField(jobRunId,StringType,true),StructField(runId,StringType,true),StructField(jobOwnerId,StringType,true),StructField(triggerType,StringType,true)),true),StructField(notebook,StructType(StructField(notebookId,StringType,true)),t
rue),StructField(clusterId,StringType,true),StructField(readVersion,LongType,true),StructField(isolationLevel,StringType,true),StructField(isBlindAppend,BooleanType,true),StructField(operationMetrics,MapType(StringType,StringType,true),true),StructField(userMetadata,StringType,true),StructField(tags,MapType(StringType,StringType,true),true),StructField(engineInfo,StringType,true),StructField(txnId,StringType,true)),true))
for native_comet, Scan json is not supported]
25/06/08 20:01:27 WARN CometSparkSessionExtensions$CometExecRule: Comet
cannot execute some parts of this plan natively (set
spark.comet.explainFallback.enabled=false to disable this logging):
SerializeFromObject [COMET: SerializeFromObject is not supported]
+- MapPartitions [COMET: MapPartitions is not supported]
+- DeserializeToObject [COMET: DeserializeToObject is not supported]
+- CometProject
+- CometSort
+- CometColumnarExchange [COMET: Exchange is not native because
the following children are not native (Project)]
+- Project [COMET: Project is not native because the
following children are not native (Scan json )]
+- Scan json [COMET: Unsupported file format JSON,
Unsupported schema
StructType(StructField(txn,StructType(StructField(appId,StringType,true),StructField(version,LongType,false),StructField(lastUpdated,LongType,true)),true),StructField(add,StructType(StructField(path,StringType,true),StructField(partitionValues,MapType(StringType,StringType,true),true),StructField(size,LongType,false),StructField(modificationTime,LongType,false),StructField(dataChange,BooleanType,false),StructField(stats,StringType,true),StructField(tags,MapType(StringType,StringType,true),true),StructField(deletionVector,StructType(StructField(storageType,StringType,true),StructField(pathOrInlineDv,StringType,true),StructField(offset,IntegerType,true),StructField(sizeInBytes,IntegerType,false),StructField(cardinality,LongType,false),StructField(maxRowIndex,LongType,true)),true),StructField(baseRowId,LongType,true),StructField(defaultRowCommitVersion,LongType,true),StructField(clusteringProvid
er,StringType,true)),true),StructField(remove,StructType(StructField(path,StringType,true),StructField(deletionTimestamp,LongType,true),StructField(dataChange,BooleanType,false),StructField(extendedFileMetadata,BooleanType,true),StructField(partitionValues,MapType(StringType,StringType,true),true),StructField(size,LongType,true),StructField(tags,MapType(StringType,StringType,true),true),StructField(deletionVector,StructType(StructField(storageType,StringType,true),StructField(pathOrInlineDv,StringType,true),StructField(offset,IntegerType,true),StructField(sizeInBytes,IntegerType,false),StructField(cardinality,LongType,false),StructField(maxRowIndex,LongType,true)),true),StructField(baseRowId,LongType,true),StructField(defaultRowCommitVersion,LongType,true),StructField(stats,StringType,true)),true),StructField(metaData,StructType(StructField(id,StringType,true),StructField(name,StringType,true),StructField(description,StringType,true),StructField(format,StructType(StructField(provide
r,StringType,true),StructField(options,MapType(StringType,StringType,true),true)),true),StructField(schemaString,StringType,true),StructField(partitionColumns,ArrayType(StringType,true),true),StructField(configuration,MapType(StringType,StringType,true),true),StructField(createdTime,LongType,true)),true),StructField(protocol,StructType(StructField(minReaderVersion,IntegerType,false),StructField(minWriterVersion,IntegerType,false),StructField(readerFeatures,ArrayType(StringType,true),true),StructField(writerFeatures,ArrayType(StringType,true),true)),true),StructField(cdc,StructType(StructField(path,StringType,true),StructField(partitionValues,MapType(StringType,StringType,true),true),StructField(size,LongType,false),StructField(tags,MapType(StringType,StringType,true),true)),true),StructField(checkpointMetadata,StructType(StructField(version,LongType,false),StructField(tags,MapType(StringType,StringType,true),true)),true),StructField(sidecar,StructType(StructField(path,StringType,tru
e),StructField(sizeInBytes,LongType,false),StructField(modificationTime,LongType,false),StructField(tags,MapType(StringType,StringType,true),true)),true),StructField(domainMetadata,StructType(StructField(domain,StringType,true),StructField(configuration,StringType,true),StructField(removed,BooleanType,false)),true),StructField(commitInfo,StructType(StructField(version,LongType,true),StructField(inCommitTimestamp,LongType,true),StructField(timestamp,TimestampType,true),StructField(userId,StringType,true),StructField(userName,StringType,true),StructField(operation,StringType,true),StructField(operationParameters,MapType(StringType,StringType,true),true),StructField(job,StructType(StructField(jobId,StringType,true),StructField(jobName,StringType,true),StructField(jobRunId,StringType,true),StructField(runId,StringType,true),StructField(jobOwnerId,StringType,true),StructField(triggerType,StringType,true)),true),StructField(notebook,StructType(StructField(notebookId,StringType,true)),true
),StructField(clusterId,StringType,true),StructField(readVersion,LongType,true),StructField(isolationLevel,StringType,true),StructField(isBlindAppend,BooleanType,true),StructField(operationMetrics,MapType(StringType,StringType,true),true),StructField(userMetadata,StringType,true),StructField(tags,MapType(StringType,StringType,true),true),StructField(engineInfo,StringType,true),StructField(txnId,StringType,true)),true))
for native_comet, Scan json is not supported]
25/06/08 20:01:27 WARN SparkStringUtils: Truncated the string representation
of a plan since it was too large. This behavior can be adjusted by setting
'spark.sql.debug.maxToStringFields'.
25/06/08 20:01:27 WARN CometSparkSessionExtensions$CometExecRule: Comet
cannot execute some parts of this plan natively (set
spark.comet.explainFallback.enabled=false to disable this logging):
CometColumnarExchange [COMET: Exchange is not native because the following
children are not native (Project)]
+- Project [COMET: Project is not native because the following children are
not native (Scan json )]
+- Scan json [COMET: Unsupported file format JSON, Unsupported schema
StructType(StructField(txn,StructType(StructField(appId,StringType,true),StructField(version,LongType,false),StructField(lastUpdated,LongType,true)),true),StructField(add,StructType(StructField(path,StringType,true),StructField(partitionValues,MapType(StringType,StringType,true),true),StructField(size,LongType,false),StructField(modificationTime,LongType,false),StructField(dataChange,BooleanType,false),StructField(stats,StringType,true),StructField(tags,MapType(StringType,StringType,true),true),StructField(deletionVector,StructType(StructField(storageType,StringType,true),StructField(pathOrInlineDv,StringType,true),StructField(offset,IntegerType,true),StructField(sizeInBytes,IntegerType,false),StructField(cardinality,LongType,false),StructField(maxRowIndex,LongType,true)),true),StructField(baseRowId,LongType,true),StructField(defaultRowCommitVersion,LongType,true),StructField(clusteringProvider,StringType,t
rue)),true),StructField(remove,StructType(StructField(path,StringType,true),StructField(deletionTimestamp,LongType,true),StructField(dataChange,BooleanType,false),StructField(extendedFileMetadata,BooleanType,true),StructField(partitionValues,MapType(StringType,StringType,true),true),StructField(size,LongType,true),StructField(tags,MapType(StringType,StringType,true),true),StructField(deletionVector,StructType(StructField(storageType,StringType,true),StructField(pathOrInlineDv,StringType,true),StructField(offset,IntegerType,true),StructField(sizeInBytes,IntegerType,false),StructField(cardinality,LongType,false),StructField(maxRowIndex,LongType,true)),true),StructField(baseRowId,LongType,true),StructField(defaultRowCommitVersion,LongType,true),StructField(stats,StringType,true)),true),StructField(metaData,StructType(StructField(id,StringType,true),StructField(name,StringType,true),StructField(description,StringType,true),StructField(format,StructType(StructField(provider,StringType,tr
ue),StructField(options,MapType(StringType,StringType,true),true)),true),StructField(schemaString,StringType,true),StructField(partitionColumns,ArrayType(StringType,true),true),StructField(configuration,MapType(StringType,StringType,true),true),StructField(createdTime,LongType,true)),true),StructField(protocol,StructType(StructField(minReaderVersion,IntegerType,false),StructField(minWriterVersion,IntegerType,false),StructField(readerFeatures,ArrayType(StringType,true),true),StructField(writerFeatures,ArrayType(StringType,true),true)),true),StructField(cdc,StructType(StructField(path,StringType,true),StructField(partitionValues,MapType(StringType,StringType,true),true),StructField(size,LongType,false),StructField(tags,MapType(StringType,StringType,true),true)),true),StructField(checkpointMetadata,StructType(StructField(version,LongType,false),StructField(tags,MapType(StringType,StringType,true),true)),true),StructField(sidecar,StructType(StructField(path,StringType,true),StructField(
sizeInBytes,LongType,false),StructField(modificationTime,LongType,false),StructField(tags,MapType(StringType,StringType,true),true)),true),StructField(domainMetadata,StructType(StructField(domain,StringType,true),StructField(configuration,StringType,true),StructField(removed,BooleanType,false)),true),StructField(commitInfo,StructType(StructField(version,LongType,true),StructField(inCommitTimestamp,LongType,true),StructField(timestamp,TimestampType,true),StructField(userId,StringType,true),StructField(userName,StringType,true),StructField(operation,StringType,true),StructField(operationParameters,MapType(StringType,StringType,true),true),StructField(job,StructType(StructField(jobId,StringType,true),StructField(jobName,StringType,true),StructField(jobRunId,StringType,true),StructField(runId,StringType,true),StructField(jobOwnerId,StringType,true),StructField(triggerType,StringType,true)),true),StructField(notebook,StructType(StructField(notebookId,StringType,true)),true),StructField(c
lusterId,StringType,true),StructField(readVersion,LongType,true),StructField(isolationLevel,StringType,true),StructField(isBlindAppend,BooleanType,true),StructField(operationMetrics,MapType(StringType,StringType,true),true),StructField(userMetadata,StringType,true),StructField(tags,MapType(StringType,StringType,true),true),StructField(engineInfo,StringType,true),StructField(txnId,StringType,true)),true))
for native_comet, Scan json is not supported]
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/10/temp_shuffle_87c686fa-c4f0-4bb0-ac05-247f93c12dc2
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/3e/temp_shuffle_394e89f9-941f-4165-998e-fd449293dc0c
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/19/temp_shuffle_0af43176-5af2-4b65-a716-4136f6d31dee
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/2a/temp_shuffle_bd037044-6f16-4cb0-90ef-a0a025ab370a
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/12/temp_shuffle_97e29524-a3c0-4a2e-a173-9c6621e2548f
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/20/temp_shuffle_01a9e621-ddba-4354-a3f8-7db8a124dc90
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/23/temp_shuffle_b3648463-dd4a-4035-9dec-e5e0ab656205
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/0b/temp_shuffle_9760cd6f-f2cb-47ed-9c5c-bf2856df33e4
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/33/temp_shuffle_985481ff-5036-440f-b4f2-8088494031d8
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/23/temp_shuffle_85483bf7-177e-4388-9208-040c7989fc9c
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/3c/temp_shuffle_479b9ecd-a549-4971-9f9e-98cf1d2515c4
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/04/temp_shuffle_7f8f710f-b525-4465-9ecb-fde962938682
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/18/temp_shuffle_194b9b7c-a473-4117-8048-40b3f306376f
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/26/temp_shuffle_61b048e1-eeeb-4fc4-b590-5d9f4416c028
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/1b/temp_shuffle_fec9d399-69fe-4263-a469-e9dbb97a8992
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/38/temp_shuffle_161eced1-b556-4041-bc9b-06f748e0e3aa
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/31/temp_shuffle_078c741b-240d-42a4-9f87-9d1bb7c2b333
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/25/temp_shuffle_42a923bd-f8e5-4f22-b130-412aa0c7ca99
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/39/temp_shuffle_63643777-44e5-4f63-9d20-05b265dcab32
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/23/temp_shuffle_686247b5-50a3-4939-96e6-a33560c632e3
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/2a/temp_shuffle_48c8ed68-e0cf-449e-b050-439b561b5abf
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/1a/temp_shuffle_45d674d9-1e4c-46f1-a3d8-c59f3f83fbcf
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/1d/temp_shuffle_a730f8e8-eb4c-4e81-811d-f5deb62e0b32
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/3b/temp_shuffle_c1624870-fb72-4fc0-8870-18832bc2474d
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/04/temp_shuffle_ffb0c879-f4bb-423a-b6f6-23a435999018
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/31/temp_shuffle_92fe6342-94a6-4fd0-9a27-96710e76b20a
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/32/temp_shuffle_b79d5557-3058-495e-8fcc-b3bf34c8b5b8
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/1a/temp_shuffle_6a1aba2c-d79e-4b06-86c4-e71cb1a32e00
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/08/temp_shuffle_6b4041a6-fcd4-4b66-a01a-9e4bfe71e38b
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/3d/temp_shuffle_48cb6161-d711-4977-8f4e-1c04e6004d7f
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/1a/temp_shuffle_f06acc88-b321-4eb0-b894-cc7e1c00bb35
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/1f/temp_shuffle_f7902417-bfa1-4e7d-9223-7cb3313d68dd
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/32/temp_shuffle_ea950ca9-aa6e-4b87-a511-5c00ef363214
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/32/temp_shuffle_8835a754-27e9-49f4-b6ba-be700cfd3e04
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/3a/temp_shuffle_7afaaa65-ec3c-422e-abe6-13a76e9b100a
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/2a/temp_shuffle_0ae83e65-75f4-4151-9319-906647af5692
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/28/temp_shuffle_72e51b86-c597-4812-86e5-8522d3afaf4d
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/14/temp_shuffle_7365e46b-6784-406f-8cce-b97f7b9683ce
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/22/temp_shuffle_1d93d55d-dee0-4422-b6dd-2d1fb6f7a4ef
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/3a/temp_shuffle_2b43f068-e833-4cd8-a5b5-f03e98b81a15
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/2d/temp_shuffle_b7835617-1c98-4d52-bd34-a558e4bba3b6
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/3d/temp_shuffle_b08c3361-5393-4312-8b58-58643acfe7cb
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/37/temp_shuffle_e2736ed3-1872-4985-9277-fc214b1067dd
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/2f/temp_shuffle_a7f1f255-b1ac-41ca-864b-1a73ca9f2af3
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/2d/temp_shuffle_cc098a26-9d46-45a4-979a-35ee700cbadb
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/3f/temp_shuffle_c098f8db-ea7b-43f0-9b1f-4219eb6d3156
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/1f/temp_shuffle_8e214ae9-a929-43ee-a73d-010a5c08cd43
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/14/temp_shuffle_5c760682-d862-47c5-adaf-fe461e95effe
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/3a/temp_shuffle_ae1d0a8b-c9fd-4b3f-b587-8e207b08ee7a
25/06/08 20:01:28 ERROR CometBypassMergeSortShuffleWriter: Error while
deleting file
/private/var/folders/hl/h6xbb3cj4wq_qyqyt790s5kw0000gn/T/blockmgr-269e31d6-bb56-4cf4-a75e-b169a9e72de0/17/temp_shuffle_10698b4c-11d3-4459-bc65-b03ca6263288
25/06/08 20:01:28 ERROR Executor: Exception in task 0.0 in stage 1.0 (TID 14)
org.apache.comet.CometNativeException: StructBuilder (Schema { fields:
[Field { name: "storageType", data_type: Utf8, nullable: true, dict_id: 0,
dict_is_ordered: false, metadata: {} }, Field { name: "pathOrInlineDv",
data_type: Utf8, nullable: true, dict_id: 0, dict_is_ordered: false, metadata:
{} }, Field { name: "offset", data_type: Int32, nullable: true, dict_id: 0,
dict_is_ordered: false, metadata: {} }, Field { name: "sizeInBytes", data_type:
Int32, nullable: false, dict_id: 0, dict_is_ordered: false, metadata: {} },
Field { name: "cardinality", data_type: Int64, nullable: false, dict_id: 0,
dict_is_ordered: false, metadata: {} }, Field { name: "maxRowIndex", data_type:
Int64, nullable: true, dict_id: 0, dict_is_ordered: false, metadata: {} }],
metadata: {} }) and field_builder with index 0 (Utf8) are of unequal lengths:
(1 != 0).
at std::backtrace::Backtrace::create(__internal__:0)
at comet::errors::init::{{closure}}(__internal__:0)
at std::panicking::rust_panic_with_hook(__internal__:0)
at std::panicking::begin_panic_handler::{{closure}}(__internal__:0)
at std::sys::backtrace::__rust_end_short_backtrace(__internal__:0)
at _rust_begin_unwind(__internal__:0)
at core::panicking::panic_fmt(__internal__:0)
at core::panicking::panic_display(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::validate_content::{{closure}}::panic_cold_display(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::validate_content::{{closure}}(__internal__:0)
at
core::iter::traits::iterator::Iterator::for_each::call::{{closure}}(__internal__:0)
at <core::iter::adapters::enumerate::Enumerate<I> as
core::iter::traits::iterator::Iterator>::fold::enumerate::{{closure}}(__internal__:0)
at <core::slice::iter::Iter<T> as
core::iter::traits::iterator::Iterator>::fold(__internal__:0)
at <core::iter::adapters::enumerate::Enumerate<I> as
core::iter::traits::iterator::Iterator>::fold(__internal__:0)
at core::iter::traits::iterator::Iterator::for_each(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::validate_content(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::finish(__internal__:0)
at <arrow_array::builder::struct_builder::StructBuilder as
arrow_array::builder::ArrayBuilder>::finish(__internal__:0)
at <alloc::boxed::Box<dyn arrow_array::builder::ArrayBuilder> as
arrow_array::builder::ArrayBuilder>::finish(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::finish::{{closure}}(__internal__:0)
at core::iter::adapters::map::map_fold::{{closure}}(__internal__:0)
at <core::slice::iter::IterMut<T> as
core::iter::traits::iterator::Iterator>::fold(__internal__:0)
at <core::iter::adapters::map::Map<I,F> as
core::iter::traits::iterator::Iterator>::fold(__internal__:0)
at core::iter::traits::iterator::Iterator::for_each(__internal__:0)
at alloc::vec::Vec<T,A>::extend_trusted(__internal__:0)
at <alloc::vec::Vec<T,A> as
alloc::vec::spec_extend::SpecExtend<T,I>>::spec_extend(__internal__:0)
at <alloc::vec::Vec<T> as
alloc::vec::spec_from_iter_nested::SpecFromIterNested<T,I>>::from_iter(__internal__:0)
at <alloc::vec::Vec<T> as
alloc::vec::spec_from_iter::SpecFromIter<T,I>>::from_iter(__internal__:0)
at <alloc::vec::Vec<T> as
core::iter::traits::collect::FromIterator<T>>::from_iter(__internal__:0)
at core::iter::traits::iterator::Iterator::collect(__internal__:0)
at
arrow_array::builder::struct_builder::StructBuilder::finish(__internal__:0)
at <arrow_array::builder::struct_builder::StructBuilder as
arrow_array::builder::ArrayBuilder>::finish(__internal__:0)
at <alloc::boxed::Box<dyn arrow_array::builder::ArrayBuilder> as
arrow_array::builder::ArrayBuilder>::finish(__internal__:0)
at comet::execution::shuffle::row::builder_to_array(__internal__:0)
at
comet::execution::shuffle::row::process_sorted_row_partition::{{closure}}(__internal__:0)
at
core::iter::adapters::map::map_try_fold::{{closure}}(__internal__:0)
at core::iter::traits::iterator::Iterator::try_fold(__internal__:0)
at <core::iter::adapters::map::Map<I,F> as
core::iter::traits::iterator::Iterator>::try_fold(__internal__:0)
at <core::iter::adapters::GenericShunt<I,R> as
core::iter::traits::iterator::Iterator>::try_fold(__internal__:0)
at <core::iter::adapters::GenericShunt<I,R> as
core::iter::traits::iterator::Iterator>::next(__internal__:0)
at alloc::vec::Vec<T,A>::extend_desugared(__internal__:0)
at <alloc::vec::Vec<T,A> as
alloc::vec::spec_extend::SpecExtend<T,I>>::spec_extend(__internal__:0)
at <alloc::vec::Vec<T> as
alloc::vec::spec_from_iter_nested::SpecFromIterNested<T,I>>::from_iter(__internal__:0)
at <alloc::vec::Vec<T> as
alloc::vec::spec_from_iter::SpecFromIter<T,I>>::from_iter(__internal__:0)
at <alloc::vec::Vec<T> as
core::iter::traits::collect::FromIterator<T>>::from_iter(__internal__:0)
at <core::result::Result<V,E> as
core::iter::traits::collect::FromIterator<core::result::Result<A,E>>>::from_iter::{{closure}}(__internal__:0)
at core::iter::adapters::try_process(__internal__:0)
at <core::result::Result<V,E> as
core::iter::traits::collect::FromIterator<core::result::Result<A,E>>>::from_iter(__internal__:0)
at core::iter::traits::iterator::Iterator::collect(__internal__:0)
at
comet::execution::shuffle::row::process_sorted_row_partition(__internal__:0)
at
comet::execution::jni_api::Java_org_apache_comet_Native_writeSortedFileNative::{{closure}}(__internal__:0)
at comet::errors::curry::{{closure}}(__internal__:0)
at std::panicking::try::do_call(__internal__:0)
at ___rust_try(__internal__:0)
at std::panic::catch_unwind(__internal__:0)
at comet::errors::try_unwrap_or_throw(__internal__:0)
at
_Java_org_apache_comet_Native_writeSortedFileNative(__internal__:0)
at org.apache.comet.Native.writeSortedFileNative(Native Method)
at
org.apache.spark.sql.comet.execution.shuffle.SpillWriter.doSpilling(SpillWriter.java:187)
at
org.apache.spark.sql.comet.execution.shuffle.CometDiskBlockWriter$ArrowIPCWriter.doSpilling(CometDiskBlockWriter.java:405)
at
org.apache.spark.sql.comet.execution.shuffle.CometDiskBlockWriter.close(CometDiskBlockWriter.java:308)
at
org.apache.spark.sql.comet.execution.shuffle.CometBypassMergeSortShuffleWriter.write(CometBypassMergeSortShuffleWriter.java:222)
at
org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54)
at
org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:166)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at
org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at
org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at
org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:829)
```
</details>
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]