openinx opened a new issue #2137:
URL: https://github.com/apache/iceberg/issues/2137


   After committed the #1936,  the travis ci report a hive connection leak 
issue from spark module: 
   
   ```
   > Task :iceberg-spark3-extensions:test
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithSerializableIsolation[catalogName = spark_catalog, implementation 
= org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteFromEmptyTable[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testExplain[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithDynamicFileFiltering[catalogName = spark_catalog, implementation 
= org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithSnapshotIsolation[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithNonDeterministicCondition[catalogName = spark_catalog, 
implementation = org.apache.iceberg.spark.SparkSessionCatalog, config = 
{type=hive, default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteUsingMetadataWithComplexCondition[catalogName = spark_catalog, 
implementation = org.apache.iceberg.spark.SparkSessionCatalog, config = 
{type=hive, default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteThatRequiresGroupingBeforeWrite[catalogName = spark_catalog, 
implementation = org.apache.iceberg.spark.SparkSessionCatalog, config = 
{type=hive, default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithConditionOnNestedColumn[catalogName = spark_catalog, 
implementation = org.apache.iceberg.spark.SparkSessionCatalog, config = 
{type=hive, default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithInSubquery[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithNullConditions[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithNotExistsSubquery[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithNotInSubqueryNotSupported[catalogName = spark_catalog, 
implementation = org.apache.iceberg.spark.SparkSessionCatalog, config = 
{type=hive, default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithExistSubquery[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithAlias[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithoutCondition[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithArbitraryPartitionPredicates[catalogName = spark_catalog, 
implementation = org.apache.iceberg.spark.SparkSessionCatalog, config = 
{type=hive, default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithScalarSubquery[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithFoldableConditions[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteNonExistingRecords[catalogName = spark_catalog, implementation = 
org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
               java.net.SocketException: Broken pipe (Write failed)
   
   org.apache.iceberg.spark.extensions.TestCopyOnWriteDelete > 
testDeleteWithMultiColumnInSubquery[catalogName = spark_catalog, implementation 
= org.apache.iceberg.spark.SparkSessionCatalog, config = {type=hive, 
default-namespace=default, clients=1, parquet-enabled=false, 
cache-enabled=false}, format = avro, vectorized = false] FAILED
       java.lang.RuntimeException: Failed to get table info from metastore 
default.table
   
           Caused by:
           org.apache.thrift.transport.TTransportException: 
java.net.SocketException: Broken pipe (Write failed)
   
               Caused by:
   [Thread-9] INFO org.apache.spark.util.ShutdownHookManager - Shutdown hook 
called
               java.net.SocketException: Broken pipe (Write failed)
   [Thread-9] INFO org.apache.spark.util.ShutdownHookManager - Deleting 
directory /tmp/spark-49ed0823-ab45-4f5a-ae40-ace956931708
   
   323 tests completed, 21 failed, 17 skipped
   ```


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to