weijunlu commented on issue #2723: URL: https://github.com/apache/drill/issues/2723#issuecomment-1366577871
2022-12-28 19:03:07,401 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.e.p.s.h.DefaultSqlHandler - Drill Plan : { "head" : { "version" : 1, "generator" : { "type" : "InsertHandler", "info" : "" }, "type" : "APACHE_DRILL_PHYSICAL", "options" : [ ], "queue" : 0, "hasResourcePlan" : false, "scannedPluginNames" : [ "mysql", "pg" ], "resultMode" : "EXEC" }, "graph" : [ { "pop" : "jdbc-scan", "@id" : 1, "sql" : "INSERT INTO `public`.`t1` (`c1`, `c2`)\r\n(SELECT *\r\nFROM `test`.`t1`)", "columns" : [ "`ROWCOUNT`" ], "config" : { "type" : "jdbc", "driver" : "com.mysql.jdbc.Driver", "url" : "jdbc:mysql://localhost:3316", "username" : "root", "caseInsensitiveTableNames" : true, "writable" : true, "authMode" : "SHARED_USER", "writerBatchSize" : 10000, "enabled" : true }, "userName" : "anonymous", "cost" : { "memoryCost" : 1.6777216E7, "outputRowCount" : 1.0E9 } }, { "pop" : "screen", "@id" : 0, "child" : 1, "initialAllocation" : 1000000, "maxAllocation" : 10000000000, "cost" : { "memoryCost" : 1.6777216E7, "outputRowCount" : 1.0E9 } } ] } 2022-12-28 19:03:07,402 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.e.p.f.SimpleParallelizer - Root fragment: handle { query_id { part1: 2041218684968999600 part2: -1153457303194072660 } major_fragment_id: 0 minor_fragment_id: 0 } leaf_fragment: true assignment { address: "DESKTOP-PHHB7LC" user_port: 31010 control_port: 31011 data_port: 31012 version: "2.0.0-SNAPSHOT" state: STARTUP } foreman { address: "DESKTOP-PHHB7LC" user_port: 31010 control_port: 31011 data_port: 31012 version: "2.0.0-SNAPSHOT" state: STARTUP } mem_initial: 1000000 mem_max: 10000000000 credentials { user_name: "anonymous" } context { query_start_time: 1672225387214 time_zone: 299 default_schema_name: "" session_id: "0b3af775-337f-4db3-8ce4-52e20d5c50ee" } 2022-12-28 19:03:07,403 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] TRACE o.a.drill.exec.work.foreman.Foreman - PlanFragments for query part1: 2041218684968999600 part2: -1153457303194072660 2022-12-28 19:03:07,403 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.e.w.f.QueryStateProcessor - 1c53dd94-4277-9ab0-effe-18b1ab8989ac: State change requested PLANNING --> ENQUEUED 2022-12-28 19:03:07,403 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.e.w.f.QueryStateProcessor - 1c53dd94-4277-9ab0-effe-18b1ab8989ac: State change requested ENQUEUED --> STARTING 2022-12-28 19:03:07,403 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.exec.rpc.control.WorkEventBus - Adding fragment status listener for queryId 1c53dd94-4277-9ab0-effe-18b1ab8989ac. 2022-12-28 19:03:07,403 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.e.work.foreman.FragmentsRunner - Submitting fragments to run. 2022-12-28 19:03:07,403 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.exec.ops.FragmentContextImpl - Getting initial memory allocation of 1000000 2022-12-28 19:03:07,403 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.exec.ops.FragmentContextImpl - Fragment max allocation: 10000000000 2022-12-28 19:03:07,403 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.e.work.batch.IncomingBuffers - Came up with a list of 0 required fragments. Fragments {} 2022-12-28 19:03:07,403 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.e.work.foreman.FragmentsRunner - Fragments running. 2022-12-28 19:03:07,403 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:foreman] DEBUG o.a.d.e.w.f.QueryStateProcessor - 1c53dd94-4277-9ab0-effe-18b1ab8989ac: State change requested STARTING --> RUNNING 2022-12-28 19:03:07,421 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.e.physical.impl.BaseRootExec - BaseRootExec(60762332) operators: org.apache.drill.exec.physical.impl.protocol.OperatorRecordBatch 654876346 2022-12-28 19:03:07,421 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.exec.physical.impl.ImplCreator - Took 17 ms to create RecordBatch tree 2022-12-28 19:03:07,421 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] INFO o.a.d.e.w.fragment.FragmentExecutor - 1c53dd94-4277-9ab0-effe-18b1ab8989ac:0:0: State change requested AWAITING_ALLOCATION --> RUNNING 2022-12-28 19:03:07,421 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] INFO o.a.d.e.w.f.FragmentStatusReporter - 1c53dd94-4277-9ab0-effe-18b1ab8989ac:0:0: State to report: RUNNING 2022-12-28 19:03:07,421 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.exec.work.foreman.QueryManager - New fragment status was provided to QueryManager of profile { state: RUNNING minor_fragment_id: 0 operator_profile { input_profile { records: 0 batches: 0 schemas: 0 } operator_id: 1 operator_type: 44 setup_nanos: 0 process_nanos: 9931000 peak_local_memory_allocated: 0 wait_nanos: 0 operator_type_name: "JDBC_SCAN" } operator_profile { input_profile { records: 0 batches: 0 schemas: 0 } operator_id: 0 operator_type: 13 setup_nanos: 0 process_nanos: 0 peak_local_memory_allocated: 0 wait_nanos: 0 operator_type_name: "SCREEN" } start_time: 1672225387403 end_time: 1672225387421 memory_used: 2000000 max_memory_used: 2000000 endpoint { address: "DESKTOP-PHHB7LC" user_port: 31010 control_port: 31011 data_port: 31012 version: "2.0.0-SNAPSHOT" state: STARTUP } } handle { query_id { part1: 2041218684968999600 part2: -1153457303194072660 } major_fragment_id: 0 minor_fragment_id: 0 } 2022-12-28 19:03:07,421 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.e.w.fragment.FragmentExecutor - Starting fragment 0:0 on DESKTOP-PHHB7LC:31010 2022-12-28 19:03:07,456 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] INFO o.a.d.e.store.jdbc.JdbcBatchReader - User Error Occurred: The JDBC storage plugin failed while trying setup the SQL query. (Table 'public.t1' doesn't exist) org.apache.drill.common.exceptions.UserException: DATA_READ ERROR: The JDBC storage plugin failed while trying setup the SQL query. Sql: INSERT INTO `public`.`t1` (`c1`, `c2`) (SELECT * FROM `test`.`t1`) [Error Id: 160a04a4-caff-42ae-ac68-06669b926853 ] at org.apache.drill.common.exceptions.UserException$Builder.build(UserException.java:675) at org.apache.drill.exec.store.jdbc.JdbcBatchReader.open(JdbcBatchReader.java:155) at org.apache.drill.exec.physical.impl.scan.framework.ManagedScanFramework.open(ManagedScanFramework.java:211) at org.apache.drill.exec.physical.impl.scan.framework.ShimBatchReader.open(ShimBatchReader.java:76) at org.apache.drill.exec.physical.impl.scan.ReaderState.open(ReaderState.java:224) at org.apache.drill.exec.physical.impl.scan.ScanOperatorExec.nextAction(ScanOperatorExec.java:286) at org.apache.drill.exec.physical.impl.scan.ScanOperatorExec.next(ScanOperatorExec.java:242) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.doNext(OperatorDriver.java:201) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.start(OperatorDriver.java:179) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.next(OperatorDriver.java:129) at org.apache.drill.exec.physical.impl.protocol.OperatorRecordBatch.next(OperatorRecordBatch.java:149) at org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:103) at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext(ScreenCreator.java:81) at org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:93) at org.apache.drill.exec.work.fragment.FragmentExecutor.lambda$run$0(FragmentExecutor.java:321) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762) at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:310) at org.apache.drill.common.SelfCleaningRunnable.run(SelfCleaningRunnable.java:38) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:750) Caused by: com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException: Table 'public.t1' doesn't exist at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at com.mysql.jdbc.Util.handleNewInstance(Util.java:403) at com.mysql.jdbc.Util.getInstance(Util.java:386) at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:944) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3933) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3869) at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2524) at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2675) at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465) at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:1915) at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1254) at com.zaxxer.hikari.pool.ProxyPreparedStatement.execute(ProxyPreparedStatement.java:44) at com.zaxxer.hikari.pool.HikariProxyPreparedStatement.execute(HikariProxyPreparedStatement.java) at org.apache.drill.exec.store.jdbc.JdbcBatchReader.open(JdbcBatchReader.java:136) ... 21 common frames omitted 2022-12-28 19:03:07,457 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] ERROR o.a.d.e.physical.impl.BaseRootExec - Batch dump started: dumping last 1 failed batches 2022-12-28 19:03:07,457 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] ERROR o.a.d.e.p.i.p.OperatorRecordBatch - OperatorRecordBatch[batchAccessor=org.apache.drill.exec.physical.impl.protocol.VectorContainerAccessor@16ed85d5, lastOutcome=null] 2022-12-28 19:03:07,457 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] ERROR o.a.d.e.physical.impl.BaseRootExec - Batch dump completed. 2022-12-28 19:03:07,457 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] INFO o.a.d.e.w.fragment.FragmentExecutor - 1c53dd94-4277-9ab0-effe-18b1ab8989ac:0:0: State change requested RUNNING --> FAILED 2022-12-28 19:03:07,457 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.exec.ops.OperatorContextImpl - Closing context for org.apache.drill.exec.store.jdbc.JdbcSubScan 2022-12-28 19:03:07,457 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.e.physical.impl.BaseRootExec - closed operator 654876346 2022-12-28 19:03:07,457 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.exec.ops.OperatorContextImpl - Attempted to close Operator context for org.apache.drill.exec.store.jdbc.JdbcSubScan, but context is already closed 2022-12-28 19:03:07,457 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.exec.ops.OperatorContextImpl - Closing context for org.apache.drill.exec.physical.config.Screen 2022-12-28 19:03:07,457 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] INFO o.a.d.e.w.fragment.FragmentExecutor - 1c53dd94-4277-9ab0-effe-18b1ab8989ac:0:0: State change requested FAILED --> FINISHED 2022-12-28 19:03:07,463 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.exec.work.foreman.QueryManager - New fragment status was provided to QueryManager of profile { state: FAILED error { error_id: "160a04a4-caff-42ae-ac68-06669b926853" endpoint { address: "DESKTOP-PHHB7LC" user_port: 31010 control_port: 31011 data_port: 31012 version: "2.0.0-SNAPSHOT" state: STARTUP } error_type: DATA_READ message: "DATA_READ ERROR: The JDBC storage plugin failed while trying setup the SQL query. \n\nSql: INSERT INTO `public`.`t1` (`c1`, `c2`)\r\n(SELECT *\r\nFROM `test`.`t1`)\r\nFragment: 0:0\r\n\r\n[Error Id: 160a04a4-caff-42ae-ac68-06669b926853 on DESKTOP-PHHB7LC:31010]" exception { exception_class: "com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException" message: "Table \'public.t1\' doesn\'t exist" stack_trace { class_name: "..." line_number: 0 method_name: "..." is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.Util" file_name: "Util.java" line_number: 403 method_name: "handleNewInstance" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.Util" file_name: "Util.java" line_number: 386 method_name: "getInstance" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.SQLError" file_name: "SQLError.java" line_number: 944 method_name: "createSQLException" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.MysqlIO" file_name: "MysqlIO.java" line_number: 3933 method_name: "checkErrorPacket" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.MysqlIO" file_name: "MysqlIO.java" line_number: 3869 method_name: "checkErrorPacket" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.MysqlIO" file_name: "MysqlIO.java" line_number: 2524 method_name: "sendCommand" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.MysqlIO" file_name: "MysqlIO.java" line_number: 2675 method_name: "sqlQueryDirect" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.ConnectionImpl" file_name: "ConnectionImpl.java" line_number: 2465 method_name: "execSQL" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.PreparedStatement" file_name: "PreparedStatement.java" line_number: 1915 method_name: "executeInternal" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.PreparedStatement" file_name: "PreparedStatement.java" line_number: 1254 method_name: "execute" is_native_method: false } stack_trace { class_name: "com.zaxxer.hikari.pool.ProxyPreparedStatement" file_name: "ProxyPreparedStatement.java" line_number: 44 method_name: "execute" is_native_method: false } stack_trace { class_name: "com.zaxxer.hikari.pool.HikariProxyPreparedStatement" file_name: "HikariProxyPreparedStatement.java" line_number: -1 method_name: "execute" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.store.jdbc.JdbcBatchReader" file_name: "JdbcBatchReader.java" line_number: 136 method_name: "open" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.scan.framework.ManagedScanFramework" file_name: "ManagedScanFramework.java" line_number: 211 method_name: "open" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.scan.framework.ShimBatchReader" file_name: "ShimBatchReader.java" line_number: 76 method_name: "open" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.scan.ReaderState" file_name: "ReaderState.java" line_number: 224 method_name: "open" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.scan.ScanOperatorExec" file_name: "ScanOperatorExec.java" line_number: 286 method_name: "nextAction" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.scan.ScanOperatorExec" file_name: "ScanOperatorExec.java" line_number: 242 method_name: "next" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.protocol.OperatorDriver" file_name: "OperatorDriver.java" line_number: 201 method_name: "doNext" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.protocol.OperatorDriver" file_name: "OperatorDriver.java" line_number: 179 method_name: "start" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.protocol.OperatorDriver" file_name: "OperatorDriver.java" line_number: 129 method_name: "next" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.protocol.OperatorRecordBatch" file_name: "OperatorRecordBatch.java" line_number: 149 method_name: "next" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.BaseRootExec" file_name: "BaseRootExec.java" line_number: 103 method_name: "next" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot" file_name: "ScreenCreator.java" line_number: 81 method_name: "innerNext" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.BaseRootExec" file_name: "BaseRootExec.java" line_number: 93 method_name: "next" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.work.fragment.FragmentExecutor" file_name: "FragmentExecutor.java" line_number: 321 method_name: "lambda$run$0" is_native_method: false } stack_trace { class_name: "..." line_number: 0 method_name: "..." is_native_method: false } stack_trace { class_name: "org.apache.hadoop.security.UserGroupInformation" file_name: "UserGroupInformation.java" line_number: 1762 method_name: "doAs" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.work.fragment.FragmentExecutor" file_name: "FragmentExecutor.java" line_number: 310 method_name: "run" is_native_method: false } stack_trace { class_name: "org.apache.drill.common.SelfCleaningRunnable" file_name: "SelfCleaningRunnable.java" line_number: 38 method_name: "run" is_native_method: false } stack_trace { class_name: "..." line_number: 0 method_name: "..." is_native_method: false } } } minor_fragment_id: 0 operator_profile { input_profile { records: 0 batches: 0 schemas: 0 } operator_id: 1 operator_type: 44 setup_nanos: 0 process_nanos: 45495300 peak_local_memory_allocated: 0 wait_nanos: 0 operator_type_name: "JDBC_SCAN" } operator_profile { input_profile { records: 0 batches: 0 schemas: 0 } operator_id: 0 operator_type: 13 setup_nanos: 0 process_nanos: 2097 peak_local_memory_allocated: 0 wait_nanos: 1501 operator_type_name: "SCREEN" } start_time: 1672225387403 end_time: 1672225387457 memory_used: 0 max_memory_used: 2000000 endpoint { address: "DESKTOP-PHHB7LC" user_port: 31010 control_port: 31011 data_port: 31012 version: "2.0.0-SNAPSHOT" state: STARTUP } } handle { query_id { part1: 2041218684968999600 part2: -1153457303194072660 } major_fragment_id: 0 minor_fragment_id: 0 } 2022-12-28 19:03:07,469 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.e.w.f.QueryStateProcessor - 1c53dd94-4277-9ab0-effe-18b1ab8989ac: State change requested RUNNING --> FAILED 2022-12-28 19:03:07,471 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] WARN o.a.d.exec.rpc.control.WorkEventBus - Fragment 1c53dd94-4277-9ab0-effe-18b1ab8989ac:0:0 manager is not found in the work bus. 2022-12-28 19:03:07,472 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.drill.exec.work.foreman.Foreman - 1c53dd94-4277-9ab0-effe-18b1ab8989ac: cleaning up. 2022-12-28 19:03:07,472 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.exec.rpc.control.WorkEventBus - Removing fragment status listener for queryId 1c53dd94-4277-9ab0-effe-18b1ab8989ac. 2022-12-28 19:03:07,473 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] TRACE o.a.drill.exec.rpc.user.UserServer - Sending result to client with query_state: FAILED query_id { part1: 2041218684968999600 part2: -1153457303194072660 } error { error_id: "160a04a4-caff-42ae-ac68-06669b926853" endpoint { address: "DESKTOP-PHHB7LC" user_port: 31010 control_port: 31011 data_port: 31012 version: "2.0.0-SNAPSHOT" state: STARTUP } error_type: DATA_READ message: "DATA_READ ERROR: The JDBC storage plugin failed while trying setup the SQL query. \n\nSql: INSERT INTO `public`.`t1` (`c1`, `c2`)\r\n(SELECT *\r\nFROM `test`.`t1`)\r\nFragment: 0:0\r\n\r\n[Error Id: 160a04a4-caff-42ae-ac68-06669b926853 on DESKTOP-PHHB7LC:31010]" exception { exception_class: "com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException" message: "Table \'public.t1\' doesn\'t exist" stack_trace { class_name: "..." line_number: 0 method_name: "..." is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.Util" file_name: "Util.java" line_number: 403 method_name: "handleNewInstance" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.Util" file_name: "Util.java" line_number: 386 method_name: "getInstance" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.SQLError" file_name: "SQLError.java" line_number: 944 method_name: "createSQLException" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.MysqlIO" file_name: "MysqlIO.java" line_number: 3933 method_name: "checkErrorPacket" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.MysqlIO" file_name: "MysqlIO.java" line_number: 3869 method_name: "checkErrorPacket" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.MysqlIO" file_name: "MysqlIO.java" line_number: 2524 method_name: "sendCommand" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.MysqlIO" file_name: "MysqlIO.java" line_number: 2675 method_name: "sqlQueryDirect" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.ConnectionImpl" file_name: "ConnectionImpl.java" line_number: 2465 method_name: "execSQL" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.PreparedStatement" file_name: "PreparedStatement.java" line_number: 1915 method_name: "executeInternal" is_native_method: false } stack_trace { class_name: "com.mysql.jdbc.PreparedStatement" file_name: "PreparedStatement.java" line_number: 1254 method_name: "execute" is_native_method: false } stack_trace { class_name: "com.zaxxer.hikari.pool.ProxyPreparedStatement" file_name: "ProxyPreparedStatement.java" line_number: 44 method_name: "execute" is_native_method: false } stack_trace { class_name: "com.zaxxer.hikari.pool.HikariProxyPreparedStatement" file_name: "HikariProxyPreparedStatement.java" line_number: -1 method_name: "execute" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.store.jdbc.JdbcBatchReader" file_name: "JdbcBatchReader.java" line_number: 136 method_name: "open" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.scan.framework.ManagedScanFramework" file_name: "ManagedScanFramework.java" line_number: 211 method_name: "open" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.scan.framework.ShimBatchReader" file_name: "ShimBatchReader.java" line_number: 76 method_name: "open" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.scan.ReaderState" file_name: "ReaderState.java" line_number: 224 method_name: "open" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.scan.ScanOperatorExec" file_name: "ScanOperatorExec.java" line_number: 286 method_name: "nextAction" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.scan.ScanOperatorExec" file_name: "ScanOperatorExec.java" line_number: 242 method_name: "next" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.protocol.OperatorDriver" file_name: "OperatorDriver.java" line_number: 201 method_name: "doNext" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.protocol.OperatorDriver" file_name: "OperatorDriver.java" line_number: 179 method_name: "start" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.protocol.OperatorDriver" file_name: "OperatorDriver.java" line_number: 129 method_name: "next" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.protocol.OperatorRecordBatch" file_name: "OperatorRecordBatch.java" line_number: 149 method_name: "next" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.BaseRootExec" file_name: "BaseRootExec.java" line_number: 103 method_name: "next" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot" file_name: "ScreenCreator.java" line_number: 81 method_name: "innerNext" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.physical.impl.BaseRootExec" file_name: "BaseRootExec.java" line_number: 93 method_name: "next" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.work.fragment.FragmentExecutor" file_name: "FragmentExecutor.java" line_number: 321 method_name: "lambda$run$0" is_native_method: false } stack_trace { class_name: "..." line_number: 0 method_name: "..." is_native_method: false } stack_trace { class_name: "org.apache.hadoop.security.UserGroupInformation" file_name: "UserGroupInformation.java" line_number: 1762 method_name: "doAs" is_native_method: false } stack_trace { class_name: "org.apache.drill.exec.work.fragment.FragmentExecutor" file_name: "FragmentExecutor.java" line_number: 310 method_name: "run" is_native_method: false } stack_trace { class_name: "org.apache.drill.common.SelfCleaningRunnable" file_name: "SelfCleaningRunnable.java" line_number: 38 method_name: "run" is_native_method: false } stack_trace { class_name: "..." line_number: 0 method_name: "..." is_native_method: false } } } 2022-12-28 19:03:07,476 [Client-1] DEBUG o.a.d.e.rpc.user.QueryResultHandler - resultArrived: queryState: FAILED, queryId = 1c53dd94-4277-9ab0-effe-18b1ab8989ac 2022-12-28 19:03:07,476 [Client-1] TRACE o.a.d.e.rpc.user.QueryResultHandler - For QueryId [part1: 2041218684968999600 part2: -1153457303194072660 ], retrieved results listener org.apache.drill.jdbc.impl.DrillCursor$ResultsListener@29741514 2022-12-28 19:03:07,476 [Client-1] DEBUG o.a.d.j.i.DrillCursor$ResultsListener - Received query failure: 2 {} org.apache.drill.common.exceptions.UserRemoteException: DATA_READ ERROR: The JDBC storage plugin failed while trying setup the SQL query. Sql: INSERT INTO `public`.`t1` (`c1`, `c2`) (SELECT * FROM `test`.`t1`) Fragment: 0:0 [Error Id: 160a04a4-caff-42ae-ac68-06669b926853 on DESKTOP-PHHB7LC:31010] at org.apache.drill.exec.rpc.user.QueryResultHandler.resultArrived(QueryResultHandler.java:125) at org.apache.drill.exec.rpc.user.UserClient.handle(UserClient.java:422) at org.apache.drill.exec.rpc.user.UserClient.handle(UserClient.java:96) at org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:271) at org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:241) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:88) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:327) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:299) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:722) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:658) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:584) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:496) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:986) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at java.lang.Thread.run(Thread.java:750) Caused by: com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException: Table 'public.t1' doesn't exist at .......(:0) at com.mysql.jdbc.Util.handleNewInstance(Util.java:403) at com.mysql.jdbc.Util.getInstance(Util.java:386) at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:944) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3933) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3869) at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2524) at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2675) at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465) at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:1915) at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1254) at com.zaxxer.hikari.pool.ProxyPreparedStatement.execute(ProxyPreparedStatement.java:44) at com.zaxxer.hikari.pool.HikariProxyPreparedStatement.execute(HikariProxyPreparedStatement.java) at org.apache.drill.exec.store.jdbc.JdbcBatchReader.open(JdbcBatchReader.java:136) at org.apache.drill.exec.physical.impl.scan.framework.ManagedScanFramework.open(ManagedScanFramework.java:211) at org.apache.drill.exec.physical.impl.scan.framework.ShimBatchReader.open(ShimBatchReader.java:76) at org.apache.drill.exec.physical.impl.scan.ReaderState.open(ReaderState.java:224) at org.apache.drill.exec.physical.impl.scan.ScanOperatorExec.nextAction(ScanOperatorExec.java:286) at org.apache.drill.exec.physical.impl.scan.ScanOperatorExec.next(ScanOperatorExec.java:242) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.doNext(OperatorDriver.java:201) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.start(OperatorDriver.java:179) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.next(OperatorDriver.java:129) at org.apache.drill.exec.physical.impl.protocol.OperatorRecordBatch.next(OperatorRecordBatch.java:149) at org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:103) at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext(ScreenCreator.java:81) at org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:93) at org.apache.drill.exec.work.fragment.FragmentExecutor.lambda$run$0(FragmentExecutor.java:321) at .......(:0) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762) at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:310) at org.apache.drill.common.SelfCleaningRunnable.run(SelfCleaningRunnable.java:38) at .......(:0) 2022-12-28 19:03:07,476 [Client-1] DEBUG o.a.d.j.i.DrillCursor$ResultsListener - [#2] Query listener closing. 2022-12-28 19:03:07,477 [Client-1] INFO o.a.d.j.i.DrillCursor$ResultsListener - [#2] Query failed: org.apache.drill.common.exceptions.UserRemoteException: DATA_READ ERROR: The JDBC storage plugin failed while trying setup the SQL query. Sql: INSERT INTO `public`.`t1` (`c1`, `c2`) (SELECT * FROM `test`.`t1`) Fragment: 0:0 [Error Id: 160a04a4-caff-42ae-ac68-06669b926853 on DESKTOP-PHHB7LC:31010] at org.apache.drill.exec.rpc.user.QueryResultHandler.resultArrived(QueryResultHandler.java:125) at org.apache.drill.exec.rpc.user.UserClient.handle(UserClient.java:422) at org.apache.drill.exec.rpc.user.UserClient.handle(UserClient.java:96) at org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:271) at org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:241) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:88) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:327) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:299) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:722) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:658) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:584) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:496) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:986) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at java.lang.Thread.run(Thread.java:750) Caused by: com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException: Table 'public.t1' doesn't exist at .......(:0) at com.mysql.jdbc.Util.handleNewInstance(Util.java:403) at com.mysql.jdbc.Util.getInstance(Util.java:386) at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:944) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3933) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3869) at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2524) at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2675) at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465) at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:1915) at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1254) at com.zaxxer.hikari.pool.ProxyPreparedStatement.execute(ProxyPreparedStatement.java:44) at com.zaxxer.hikari.pool.HikariProxyPreparedStatement.execute(HikariProxyPreparedStatement.java) at org.apache.drill.exec.store.jdbc.JdbcBatchReader.open(JdbcBatchReader.java:136) at org.apache.drill.exec.physical.impl.scan.framework.ManagedScanFramework.open(ManagedScanFramework.java:211) at org.apache.drill.exec.physical.impl.scan.framework.ShimBatchReader.open(ShimBatchReader.java:76) at org.apache.drill.exec.physical.impl.scan.ReaderState.open(ReaderState.java:224) at org.apache.drill.exec.physical.impl.scan.ScanOperatorExec.nextAction(ScanOperatorExec.java:286) at org.apache.drill.exec.physical.impl.scan.ScanOperatorExec.next(ScanOperatorExec.java:242) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.doNext(OperatorDriver.java:201) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.start(OperatorDriver.java:179) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.next(OperatorDriver.java:129) at org.apache.drill.exec.physical.impl.protocol.OperatorRecordBatch.next(OperatorRecordBatch.java:149) at org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:103) at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext(ScreenCreator.java:81) at org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:93) at org.apache.drill.exec.work.fragment.FragmentExecutor.lambda$run$0(FragmentExecutor.java:321) at .......(:0) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762) at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:310) at org.apache.drill.common.SelfCleaningRunnable.run(SelfCleaningRunnable.java:38) at .......(:0) 2022-12-28 19:03:07,477 [main] DEBUG o.a.d.j.i.DrillCursor$ResultsListener - [#2] Dequeued query failure exception: {}. org.apache.drill.common.exceptions.UserRemoteException: DATA_READ ERROR: The JDBC storage plugin failed while trying setup the SQL query. Sql: INSERT INTO `public`.`t1` (`c1`, `c2`) (SELECT * FROM `test`.`t1`) Fragment: 0:0 [Error Id: 160a04a4-caff-42ae-ac68-06669b926853 on DESKTOP-PHHB7LC:31010] at org.apache.drill.exec.rpc.user.QueryResultHandler.resultArrived(QueryResultHandler.java:125) at org.apache.drill.exec.rpc.user.UserClient.handle(UserClient.java:422) at org.apache.drill.exec.rpc.user.UserClient.handle(UserClient.java:96) at org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:271) at org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:241) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:88) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:327) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:299) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:722) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:658) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:584) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:496) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:986) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at java.lang.Thread.run(Thread.java:750) Caused by: com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException: Table 'public.t1' doesn't exist at .......(:0) at com.mysql.jdbc.Util.handleNewInstance(Util.java:403) at com.mysql.jdbc.Util.getInstance(Util.java:386) at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:944) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3933) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3869) at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2524) at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2675) at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465) at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:1915) at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1254) at com.zaxxer.hikari.pool.ProxyPreparedStatement.execute(ProxyPreparedStatement.java:44) at com.zaxxer.hikari.pool.HikariProxyPreparedStatement.execute(HikariProxyPreparedStatement.java) at org.apache.drill.exec.store.jdbc.JdbcBatchReader.open(JdbcBatchReader.java:136) at org.apache.drill.exec.physical.impl.scan.framework.ManagedScanFramework.open(ManagedScanFramework.java:211) at org.apache.drill.exec.physical.impl.scan.framework.ShimBatchReader.open(ShimBatchReader.java:76) at org.apache.drill.exec.physical.impl.scan.ReaderState.open(ReaderState.java:224) at org.apache.drill.exec.physical.impl.scan.ScanOperatorExec.nextAction(ScanOperatorExec.java:286) at org.apache.drill.exec.physical.impl.scan.ScanOperatorExec.next(ScanOperatorExec.java:242) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.doNext(OperatorDriver.java:201) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.start(OperatorDriver.java:179) at org.apache.drill.exec.physical.impl.protocol.OperatorDriver.next(OperatorDriver.java:129) at org.apache.drill.exec.physical.impl.protocol.OperatorRecordBatch.next(OperatorRecordBatch.java:149) at org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:103) at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext(ScreenCreator.java:81) at org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:93) at org.apache.drill.exec.work.fragment.FragmentExecutor.lambda$run$0(FragmentExecutor.java:321) at .......(:0) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762) at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:310) at org.apache.drill.common.SelfCleaningRunnable.run(SelfCleaningRunnable.java:38) at .......(:0) 2022-12-28 19:03:07,478 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.e.w.f.QueryStateProcessor - 1c53dd94-4277-9ab0-effe-18b1ab8989ac: State change requested FAILED --> COMPLETED 2022-12-28 19:03:07,478 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] WARN o.a.d.e.w.f.QueryStateProcessor - Dropping request to move to COMPLETED state as query is already at FAILED state (which is terminal). 2022-12-28 19:03:07,478 [1c53dd94-4277-9ab0-effe-18b1ab8989ac:frag:0:0] DEBUG o.a.d.e.w.f.FragmentStatusReporter - Closing org.apache.drill.exec.work.fragment.FragmentStatusReporter@26ac4f8b 2022-12-28 19:03:07,480 [main] DEBUG o.a.d.j.i.DrillCursor$ResultsListener - [#2] Query listener closing. 2022-12-28 19:03:07,480 [main] DEBUG o.a.d.j.impl.DrillStatementRegistry - Removing from open-statements registry: org.apache.drill.jdbc.impl.DrillStatementImpl@71df3d2b -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: dev-unsubscr...@drill.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org