[
https://issues.apache.org/jira/browse/IMPALA-10633?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17313716#comment-17313716
]
Quanlong Huang commented on IMPALA-10633:
-----------------------------------------
Stacktrace in catalogd.INFO
{code:java}
I0401 02:52:49.257333 7178 TableLoadingMgr.java:71] Loading metadata for
table: test_avro_schema_resolution_83f84c2b.alltypesagg_alter_avro_name_comment
I0401 02:52:49.257416 10488 TableLoader.java:60] Loading metadata for:
test_avro_schema_resolution_83f84c2b.alltypesagg_alter_avro_name_comment
(background load)
I0401 02:52:49.257501 7178 TableLoadingMgr.java:73] Remaining items in queue:
0. Loads in progress: 1
I0401 02:52:49.265799 10488 HdfsTable.java:1161] Loading metadata for table
definition and all partition(s) of
test_avro_schema_resolution_83f84c2b.alltypesagg_alter_avro_name_comment
(background load)
W0401 02:52:49.292006 88410 DataStreamer.java:826] DataStreamer Exception
Java exception follows:
java.nio.channels.ClosedByInterruptException
at
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:202)
at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:477)
at
org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:63)
at
org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:142)
at
org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:159)
at
org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:117)
at
java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82)
at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:140)
at java.io.DataOutputStream.flush(DataOutputStream.java:123)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:775)
I0401 02:52:49.292676 10488 HdfsTable.java:2684] Load Valid Write Id List Done.
Time taken: 5.667us
I0401 02:52:49.292732 10488 HdfsTable.java:1200] Fetching partition metadata
from the Metastore:
test_avro_schema_resolution_83f84c2b.alltypesagg_alter_avro_name_comment
I0401 02:52:49.294183 10488 HdfsTable.java:1207] Fetched partition metadata
from the Metastore:
test_avro_schema_resolution_83f84c2b.alltypesagg_alter_avro_name_comment
I0401 02:52:49.297397 39067 jni-util.cc:286]
org.apache.impala.common.ImpalaRuntimeException: Error making 'createTable' RPC
to Hive Metastore:
at
org.apache.impala.service.CatalogOpExecutor.createIcebergTable(CatalogOpExecutor.java:2821)
at
org.apache.impala.service.CatalogOpExecutor.createTable(CatalogOpExecutor.java:2447)
at
org.apache.impala.service.CatalogOpExecutor.execDdlRequest(CatalogOpExecutor.java:397)
at org.apache.impala.service.JniCatalog.execDdl(JniCatalog.java:171)
Caused by: org.apache.iceberg.exceptions.RuntimeIOException: Failed to write
json to file:
hdfs://localhost:20500/test-warehouse/test_show_create_table_8b557a01.db/iceberg_nullable_test/metadata/8dbfe7dd-0770-40b4-a111-3e55cdce2eda.metadata.json
at
org.apache.iceberg.TableMetadataParser.internalWrite(TableMetadataParser.java:124)
at
org.apache.iceberg.TableMetadataParser.write(TableMetadataParser.java:110)
at
org.apache.iceberg.hadoop.HadoopTableOperations.commit(HadoopTableOperations.java:148)
at org.apache.iceberg.hadoop.HadoopTables.create(HadoopTables.java:145)
at org.apache.iceberg.Tables.create(Tables.java:41)
at
org.apache.impala.catalog.iceberg.IcebergHadoopTables.createTable(IcebergHadoopTables.java:66)
at
org.apache.impala.service.IcebergCatalogOpExecutor.createTable(IcebergCatalogOpExecutor.java:75)
at
org.apache.impala.service.CatalogOpExecutor.createIcebergTable(CatalogOpExecutor.java:2759)
... 3 more
Caused by: java.io.IOException: The stream is closed
at
org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:118)
at
java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82)
at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:140)
at java.io.DataOutputStream.flush(DataOutputStream.java:123)
at java.io.FilterOutputStream.close(FilterOutputStream.java:158)
at
org.apache.hadoop.hdfs.DataStreamer.closeStream(DataStreamer.java:1000)
at
org.apache.hadoop.hdfs.DataStreamer.closeInternal(DataStreamer.java:848)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:843)
Suppressed: java.io.IOException: The stream is closed
at
org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:118)
at
java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82)
at
java.io.BufferedOutputStream.flush(BufferedOutputStream.java:140)
at java.io.FilterOutputStream.close(FilterOutputStream.java:158)
at java.io.FilterOutputStream.close(FilterOutputStream.java:159)
... 3 more
I0401 02:52:49.299058 10488 HdfsTable.java:763] Loaded file and block metadata
for test_avro_schema_resolution_83f84c2b.alltypesagg_alter_avro_name_comment
partitions: . Time taken: 659.300us
I0401 02:52:49.308449 10488 TableLoader.java:103] Loaded metadata for:
test_avro_schema_resolution_83f84c2b.alltypesagg_alter_avro_name_comment (51ms)
I0401 02:52:49.297439 39067 status.cc:129] ImpalaRuntimeException: Error making
'createTable' RPC to Hive Metastore:
CAUSED BY: RuntimeIOException: Failed to write json to file:
hdfs://localhost:20500/test-warehouse/test_show_create_table_8b557a01.db/iceberg_nullable_test/metadata/8dbfe7dd-0770-40b4-a111-3e55cdce2eda.metadata.json
CAUSED BY: IOException: The stream is closed
@ 0x1e52cd1 impala::Status::Status()
@ 0x2887c0f impala::JniUtil::GetJniExceptionMsg()
@ 0x1e34096 impala::JniCall::Call<>()
@ 0x1e32279 impala::JniUtil::CallJniMethod<>()
@ 0x1e3052e impala::Catalog::ExecDdl()
@ 0x1e0f4fb CatalogServiceThriftIf::ExecDdl()
@ 0x1eec32f impala::CatalogServiceProcessor::process_ExecDdl()
@ 0x1eec07d impala::CatalogServiceProcessor::dispatchCall()
@ 0x1df7ffb apache::thrift::TDispatchProcessor::process()
@ 0x2365838
apache::thrift::server::TAcceptQueueServer::Task::run()
@ 0x2359292 impala::ThriftThread::RunRunnable()
@ 0x235a8ce boost::_mfi::mf2<>::operator()()
@ 0x235a762 boost::_bi::list3<>::operator()<>()
@ 0x235a4a8 boost::_bi::bind_t<>::operator()()
@ 0x235a3ba
boost::detail::function::void_function_obj_invoker0<>::invoke()
@ 0x22ca99f boost::function0<>::operator()()
@ 0x2945e97 impala::Thread::SuperviseThread()
@ 0x294e7f8 boost::_bi::list5<>::operator()<>()
@ 0x294e71c boost::_bi::bind_t<>::operator()()
@ 0x294e6dd boost::detail::thread_data<>::run()
@ 0x41c7eb1 thread_proxy
@ 0x7f1dd19706b9 start_thread
@ 0x7f1dce3dd4dc clone
E0401 02:52:49.476990 39067 catalog-server.cc:136] ImpalaRuntimeException:
Error making 'createTable' RPC to Hive Metastore:
CAUSED BY: RuntimeIOException: Failed to write json to file:
hdfs://localhost:20500/test-warehouse/test_show_create_table_8b557a01.db/iceberg_nullable_test/metadata/8dbfe7dd-0770-40b4-a111-3e55cdce2eda.metadata.json
CAUSED BY: IOException: The stream is closed{code}
> TestShowCreateTable.test_show_create_table fails due to createTable HMS RPC
> failure
> -----------------------------------------------------------------------------------
>
> Key: IMPALA-10633
> URL: https://issues.apache.org/jira/browse/IMPALA-10633
> Project: IMPALA
> Issue Type: Bug
> Reporter: Quanlong Huang
> Priority: Critical
>
> Saw this failure in a nightly build:
> [https://jenkins.impala.io/job/ubuntu-16.04-from-scratch/13584/]
> {code:java}
> metadata.test_show_create_table.TestShowCreateTable.test_show_create_table[protocol:
> beeswax | table_format: text/none] (from pytest){code}
> *Stacktrace*
> {code:java}
> metadata/test_show_create_table.py:62: in test_show_create_table
> unique_database)
> metadata/test_show_create_table.py:122: in __run_show_create_table_test_case
> self.__exec(create_table_result)
> metadata/test_show_create_table.py:133: in __exec
> return self.execute_query_expect_success(self.client, sql_str)
> common/impala_test_suite.py:814: in wrapper
> return function(*args, **kwargs)
> common/impala_test_suite.py:822: in execute_query_expect_success
> result = cls.__execute_query(impalad_client, query, query_options, user)
> common/impala_test_suite.py:923: in __execute_query
> return impalad_client.execute(query, user=user)
> common/impala_connection.py:205: in execute
> return self.__beeswax_client.execute(sql_stmt, user=user)
> beeswax/impala_beeswax.py:187: in execute
> handle = self.__execute_query(query_string.strip(), user=user)
> beeswax/impala_beeswax.py:363: in __execute_query
> handle = self.execute_query_async(query_string, user=user)
> beeswax/impala_beeswax.py:357: in execute_query_async
> handle = self.__do_rpc(lambda: self.imp_service.query(query,))
> beeswax/impala_beeswax.py:520: in __do_rpc
> raise ImpalaBeeswaxException(self.__build_error_message(b), b)
> E ImpalaBeeswaxException: ImpalaBeeswaxException:
> E INNER EXCEPTION: <class 'beeswaxd.ttypes.BeeswaxException'>
> E MESSAGE: ImpalaRuntimeException: Error making 'createTable' RPC to Hive
> Metastore:
> E CAUSED BY: RuntimeIOException: Failed to write json to file:
> hdfs://localhost:20500/test-warehouse/test_show_create_table_8b557a01.db/iceberg_nullable_test/metadata/8dbfe7dd-0770-40b4-a111-3e55cdce2eda.metadata.json
> E CAUSED BY: IOException: The stream is closed
> {code}
--
This message was sent by Atlassian Jira
(v8.3.4#803005)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]