Pranav Yogi Lodha created IMPALA-14960:
------------------------------------------

             Summary: TestIcebergTable.test_drop_partition fails with 
IOException
                 Key: IMPALA-14960
                 URL: https://issues.apache.org/jira/browse/IMPALA-14960
             Project: IMPALA
          Issue Type: Bug
            Reporter: Pranav Yogi Lodha


h3. Error Message

impala.error.OperationalError: Query bc4fc6d3c47516e5:28cfe6a800000000 failed: 
RuntimeIOException: Failed to write json to file: 
hdfs://localhost:20500/test-warehouse/test_drop_partition_eac0813d.db/iceberg_bucket_partitions/metadata/00012-8955d464-33bf-4201-82fb-cf8255828ce7.metadata.json
 CAUSED BY: IOException: The stream is closed
h3. Stacktrace

query_test/test_iceberg.py:1542: in test_drop_partition 
self.run_test_case('QueryTest/iceberg-drop-partition', vector, self = 
<test_iceberg.TestIcebergTable object at 0x7f43ec21f910> unique_database = 
'test_drop_partition_eac0813d' vector = "table_format: parquet/none" | 
"protocol: hs2" | "exec_option: \{'abort_on_error': 1, 'batch_size': 0, 
'disable_codegen': True, 'disable_codegen_rows_threshold': 0, 
'exec_single_node_rows_threshold': 0, 'num_nodes': 0, 'test_replan': 1}" 
common/impala_test_suite.py:971: in run_test_case result = exec_fn(query, 
impalad_clients, _ImpalaTestSuite__exec_in_hive = <function 
ImpalaTestSuite.run_test_case.<locals>.__exec_in_hive at 0x7f43ec232dc0> 
_ImpalaTestSuite__exec_in_impala = <function 
ImpalaTestSuite.run_test_case.<locals>.__exec_in_impala at 0x7f43ec2324c0> 
current_error = None encoding = None exec_fn = <function 
ImpalaTestSuite.run_test_case.<locals>.__exec_in_impala at 0x7f43ec2324c0> 
exec_options = \{'abort_on_error': 1, 'batch_size': 0, 'disable_codegen': True, 
'disable_codegen_rows_threshold': 0, ...} failed_count = 0 impalad_client = 
<tests.common.impala_connection.ImpylaHS2Connection object at 0x7f43ecef5490> 
impalad_clients = [<tests.common.impala_connection.ImpylaHS2Connection object 
at 0x7f43ecef5490>] lineage_log_dir = '' multiple_impalad = False protocol = 
'hs2' query = 'ALTER TABLE iceberg_bucket_partitions DROP PARTITION (bucket(5, 
bucket_timestamp) = 1)' query_section = 'ALTER TABLE iceberg_bucket_partitions 
DROP PARTITION (bucket(5, bucket_timestamp) = 1);\n' result = None result_list 
= [] rt_profile_info = None sections = [defaultdict(<class 'str'>, \{'QUERY': 
'# Creating tables for each partition transform class\nCREATE TABLE 
iceberg_iden...tity_int) VALUES (\'string-comma\', 568);\nINSERT INTO 
iceberg_mixed_partitions(identity_int) VALUES (NULL);\n'}), ...] self = 
<test_iceberg.TestIcebergTable object at 0x7f43ec21f910> table_format_info = 
<tests.common.test_dimensions.TableFormatInfo object at 0x7f440a758130> 
target_feng_real_impalad_clients = [] target_impalad_clients = 
[<tests.common.impala_connection.ImpylaHS2Connection object at 0x7f43ecef5490>] 
test_file_name = 'QueryTest/iceberg-drop-partition' test_file_vars = None 
test_section = defaultdict(<class 'str'>, \{'QUERY': 'ALTER TABLE 
iceberg_bucket_partitions DROP PARTITION (bucket(5, bucket_timestamp) = 1);\n', 
'RESULTS': "'Dropped 1 partition(s)'\n"}) total_count = 29 use_db = 
'test_drop_partition_eac0813d' vector = "table_format: parquet/none" | 
"protocol: hs2" | "exec_option: \{'abort_on_error': 1, 'batch_size': 0, 
'disable_codegen': True, 'disable_codegen_rows_threshold': 0, 
'exec_single_node_rows_threshold': 0, 'num_nodes': 0, 'test_replan': 1}" 
common/impala_test_suite.py:886: in __exec_in_impala result = 
self.__execute_query(target_impalad_client, query, user=user) impalad_clients = 
[<tests.common.impala_connection.ImpylaHS2Connection object at 0x7f43ecef5490>] 
protocol = 'hs2' query = 'ALTER TABLE iceberg_bucket_partitions DROP PARTITION 
(bucket(5, bucket_timestamp) = 1)' query_options_changed = [] result = None 
self = <test_iceberg.TestIcebergTable object at 0x7f43ec21f910> 
set_pattern_match = None target_impalad_client = 
<tests.common.impala_connection.ImpylaHS2Connection object at 0x7f43ecef5490> 
user = None vector = "table_format: parquet/none" | "protocol: hs2" | 
"exec_option: \{'abort_on_error': 1, 'batch_size': 0, 'disable_codegen': True, 
'disable_codegen_rows_threshold': 0, 'exec_single_node_rows_threshold': 0, 
'num_nodes': 0, 'test_replan': 1}" common/impala_test_suite.py:1389: in 
__execute_query return impalad_client.execute(query, user=user) cls = <class 
'test_iceberg.TestIcebergTable'> impalad_client = 
<tests.common.impala_connection.ImpylaHS2Connection object at 0x7f43ecef5490> 
query = 'ALTER TABLE iceberg_bucket_partitions DROP PARTITION (bucket(5, 
bucket_timestamp) = 1)' query_options = None user = None 
common/impala_connection.py:692: in execute cursor.execute(sql_stmt, 
configuration=self.__query_options) cursor = 
<impala.hiveserver2.HiveServer2Cursor object at 0x7f43cfb84c40> 
fetch_exec_summary = False fetch_profile_after_close = False profile_format = 0 
result = None same_user = True self = 
<tests.common.impala_connection.ImpylaHS2Connection object at 0x7f43ecef5490> 
sql_stmt = 'ALTER TABLE iceberg_bucket_partitions DROP PARTITION (bucket(5, 
bucket_timestamp) = 1)' user = None 
/data/jenkins/workspace/impala-cdwh-2025.0.21.2-exhaustive-release/repos/Impala/infra/python/env-gcc10.4.0-py3/lib/python3.8/site-packages/impala/hiveserver2.py:394:
 in execute self._wait_to_finish() # make execute synchronous configuration = 
\{'abort_on_error': '1', 'batch_size': '0', 'client_identifier': 
'query_test/test_iceberg.py::TestIcebergTable::test_dr...True;\'disable_codegen_rows_threshold\':0;\'exec_single_node_rows_threshold\':0;\'num',
 'disable_codegen': 'True', ...} operation = 'ALTER TABLE 
iceberg_bucket_partitions DROP PARTITION (bucket(5, bucket_timestamp) = 1)' 
parameters = None self = <impala.hiveserver2.HiveServer2Cursor object at 
0x7f43cfb84c40> 
/data/jenkins/workspace/impala-cdwh-2025.0.21.2-exhaustive-release/repos/Impala/infra/python/env-gcc10.4.0-py3/lib/python3.8/site-packages/impala/hiveserver2.py:484:
 in _wait_to_finish raise OperationalError(resp.errorMessage) E 
impala.error.OperationalError: Query bc4fc6d3c47516e5:28cfe6a800000000 failed: 
E RuntimeIOException: Failed to write json to file: 
hdfs://localhost:20500/test-warehouse/test_drop_partition_eac0813d.db/iceberg_bucket_partitions/metadata/00012-8955d464-33bf-4201-82fb-cf8255828ce7.metadata.json
 E CAUSED BY: IOException: The stream is closed loop_start = 1777980680.9962335 
operation_state = 'ERROR_STATE' req = 
TGetOperationStatusReq(operationHandle=TOperationHandle(operationId=THandleIdentifier(guid=b'\xe5\x16u\xc4\xd3\xc6O\xb...t=b'w\x8c\x96^\xf1\xf2C5\xbb\xd8\xbd\x93\x86\xca\xa8\x8f'),
 operationType=0, hasResultSet=True, modifiedRowCount=None)) resp = 
TGetOperationStatusResp(status=TStatus(statusCode=0, infoMessages=None, 
sqlState=None, errorCode=None, 
errorMessage=No...55d464-33bf-4201-82fb-cf8255828ce7.metadata.json\nCAUSED BY: 
IOException: The stream is closed\n\n', hasResultSet=None) rpc_time = 
0.002516508102416992 self = <impala.hiveserver2.HiveServer2Cursor object at 
0x7f43cfb84c40> sleep_time = 0.05 start_rpc_time = 1777980681.1468263



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to