[
https://issues.apache.org/jira/browse/FLINK-30318?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17645907#comment-17645907
]
Samrat Deb edited comment on FLINK-30318 at 12/12/22 4:58 AM:
--------------------------------------------------------------
[~lsy] , m on it ! i will share the root cause once i find something.
There is one more interesting thing i can see
Add jar command from cli does not loads the custom jar , (i have added some
stack traces, thread dumps and enabled debug logs to gain more insights ).
{code:java}
Flink SQL> ADD JAR '/home/hadoop/flink-connector-mysql-cdc-2.3.0.jar';
Hello Here----
java.lang.RuntimeException: e
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.addJar(TableEnvironmentImpl.java:473)
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1177)
at
org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:206)
at
org.apache.flink.table.client.cli.CliClient.executeOperation(CliClient.java:639)
at
org.apache.flink.table.client.cli.CliClient.callOperation(CliClient.java:473)
at
org.apache.flink.table.client.cli.CliClient.executeOperation(CliClient.java:372)
at
org.apache.flink.table.client.cli.CliClient.getAndExecuteStatements(CliClient.java:329)
at
org.apache.flink.table.client.cli.CliClient.executeInteractive(CliClient.java:280)
at
org.apache.flink.table.client.cli.CliClient.executeInInteractiveMode(CliClient.java:228)
at org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:151)
at org.apache.flink.table.client.SqlClient.start(SqlClient.java:95)
at org.apache.flink.table.client.SqlClient.startClient(SqlClient.java:187)
at org.apache.flink.table.client.SqlClient.main(SqlClient.java:161)
2022-12-12 04:25:39,787 INFO org.apache.flink.table.resource.ResourceManager
[] - Added jar resource
[file:/home/hadoop/flink-connector-mysql-cdc-2.3.0.jar] to class path.
2022-12-12 04:25:39,787 INFO org.apache.flink.table.resource.ResourceManager
[] - Register resource
[/home/hadoop/flink-connector-mysql-cdc-2.3.0.jar] successfully.
[INFO] Execute statement succeed.{code}
then show jar command
{code:java}
Flink SQL> show jars;
+--------------------------------------------------+
| jars |
+--------------------------------------------------+
| /home/hadoop/flink-connector-mysql-cdc-2.3.0.jar |
+--------------------------------------------------+
1 row in set{code}
creating a mysql cdc table
{code:java}
Flink SQL> CREATE TABLE mysql_sample_table (
> id INT,
> name VARCHAR(50) NOT NULL,
> PRIMARY KEY(id) NOT ENFORCED
> ) WITH (
> 'connector' = 'mysql-cdc',
> 'hostname' = 'abcdcefgh.rds.amazonaws.com',
> 'port' = '3306',
> 'username' = 'user',
> 'password' = 'pass',
> 'database-name' = 'demo',
> 'table-name' = 'test_table'
> );
[INFO] Execute statement succeed.{code}
then selecting from the table
{code:java}
Flink SQL> select * from mysql_sample_table;
2022-12-12 04:26:30,302 WARN org.apache.flink.table.client.cli.CliClient
[] - Could not execute SQL statement.
org.apache.flink.table.client.gateway.SqlExecutionException: Failed to parse
statement: select * from mysql_sample_table;
at
org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:174)
~[flink-sql-client-1.16.0.jar:1.16.0]
at
org.apache.flink.table.client.cli.SqlCommandParserImpl.parseCommand(SqlCommandParserImpl.java:45)
~[flink-sql-client-1.16.0.jar:1.16.0]
at
org.apache.flink.table.client.cli.SqlMultiLineParser.parse(SqlMultiLineParser.java:71)
~[flink-sql-client-1.16.0.jar:1.16.0]
at
org.jline.reader.impl.LineReaderImpl.acceptLine(LineReaderImpl.java:2731)
~[jline-3.9.0.jar:?]
at org.jline.reader.impl.LineReaderImpl.readLine(LineReaderImpl.java:585)
~[jline-3.9.0.jar:?]
at
org.apache.flink.table.client.cli.CliClient.getAndExecuteStatements(CliClient.java:295)
[flink-sql-client-1.16.0.jar:1.16.0]
at
org.apache.flink.table.client.cli.CliClient.executeInteractive(CliClient.java:280)
[flink-sql-client-1.16.0.jar:1.16.0]
at
org.apache.flink.table.client.cli.CliClient.executeInInteractiveMode(CliClient.java:228)
[flink-sql-client-1.16.0.jar:1.16.0]
at org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:151)
[flink-sql-client-1.16.0.jar:1.16.0]
at org.apache.flink.table.client.SqlClient.start(SqlClient.java:95)
[flink-sql-client-1.16.0.jar:1.16.0]
at org.apache.flink.table.client.SqlClient.startClient(SqlClient.java:187)
[flink-sql-client-1.16.0.jar:1.16.0]
at org.apache.flink.table.client.SqlClient.main(SqlClient.java:161)
[flink-sql-client-1.16.0.jar:1.16.0]
Caused by: org.apache.flink.table.api.ValidationException: Unable to create a
source for reading table 'default_catalog.default_database.mysql_sample_table'.
Table options are:
'connector'='mysql-cdc'
'database-name'='demo'
'hostname'='abcdefgh.rds.amazonaws.com'
'password'='******'
'port'='3306'
'table-name'='test_table'
'username'='user'
at
org.apache.flink.table.factories.FactoryUtil.createDynamicTableSource(FactoryUtil.java:166)
~[flink-table-api-java-uber-1.16.0.jar:1.16.0]
at
org.apache.flink.table.factories.FactoryUtil.createDynamicTableSource(FactoryUtil.java:191)
~[flink-table-api-java-uber-1.16.0.jar:1.16.0]
at
org.apache.flink.table.planner.plan.schema.CatalogSourceTable.createDynamicTableSource(CatalogSourceTable.java:175)
~[?:?]
at
org.apache.flink.table.planner.plan.schema.CatalogSourceTable.toRel(CatalogSourceTable.java:115)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.toRel(SqlToRelConverter.java:3619)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertIdentifier(SqlToRelConverter.java:2559)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertFrom(SqlToRelConverter.java:2175)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertFrom(SqlToRelConverter.java:2095)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertFrom(SqlToRelConverter.java:2038)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertSelectImpl(SqlToRelConverter.java:669)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertSelect(SqlToRelConverter.java:657)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertQueryRecursive(SqlToRelConverter.java:3462)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertQuery(SqlToRelConverter.java:570)
~[?:?]
at
org.apache.flink.table.planner.calcite.FlinkPlannerImpl.org$apache$flink$table$planner$calcite$FlinkPlannerImpl$$rel(FlinkPlannerImpl.scala:215)
~[?:?]
at
org.apache.flink.table.planner.calcite.FlinkPlannerImpl.rel(FlinkPlannerImpl.scala:191)
~[?:?]
at
org.apache.flink.table.planner.operations.SqlToOperationConverter.toQueryOperation(SqlToOperationConverter.java:1498)
~[?:?]
~
~
"taskmanager.log" 309L, 78831B
309,69 Bot
directory.info launch_container.sh prelaunch.err prelaunch.out
taskmanager.err taskmanager.log taskmanager.out
[root@ip-172-31-42-148 container_1670653965628_0004_01_000002]# cd ..
[root@ip-172-31-42-148 application_1670653965628_0004]# ls
container_1670653965628_0004_01_000001 container_1670653965628_0004_01_000002
[root@ip-172-31-42-148 application_1670653965628_0004]# cd ..
[root@ip-172-31-42-148 containers]# ls
application_1670653965628_0001 application_1670653965628_0002
application_1670653965628_0003 application_1670653965628_0004
[root@ip-172-31-42-148 containers]# cd application_1670653965628_0004
[root@ip-172-31-42-148 application_1670653965628_0004]# ls
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertQuery(SqlToRelConverter.java:570)
~[?:?]
at
org.apache.flink.table.planner.calcite.FlinkPlannerImpl.org$apache$flink$table$planner$calcite$FlinkPlannerImpl$$rel(FlinkPlannerImpl.scala:215)
~[?:?]
at
org.apache.flink.table.planner.calcite.FlinkPlannerImpl.rel(FlinkPlannerImpl.scala:191)
~[?:?]
at
org.apache.flink.table.planner.operations.SqlToOperationConverter.toQueryOperation(SqlToOperationConverter.java:1498)
~[?:?]
at
org.apache.flink.table.planner.operations.SqlToOperationConverter.convertSqlQuery(SqlToOperationConverter.java:1253)
~[?:?]
at
org.apache.flink.table.planner.operations.SqlToOperationConverter.convertValidatedSqlNode(SqlToOperationConverter.java:374)
~[?:?]
at
org.apache.flink.table.planner.operations.SqlToOperationConverter.convert(SqlToOperationConverter.java:262)
~[?:?]
at
org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:106)
~[?:?]
at
org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:172)
~[flink-sql-client-1.16.0.jar:1.16.0]
... 11 more
[ERROR] Could not execute SQL statement. Reason:
java.lang.ClassNotFoundException:
com.ververica.cdc.debezium.utils.ResolvedSchemaUtils
{code}
i tried similar activity on flink 1.15.2 , similar commands don't fail.
was (Author: samrat007):
[~lsy] , m on it ! i will share the root cause once i find something.
There is one more interesting thing i can see
Add jar command from cli does not loads the custom jar , (i have added some
stack traces, thread dumps and enabled debug logs to gain more insights ).
{code:java}
Flink SQL> ADD JAR '/home/hadoop/flink-connector-mysql-cdc-2.3.0.jar';
Hello Here----
java.lang.RuntimeException: e
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.addJar(TableEnvironmentImpl.java:473)
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1177)
at
org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:206)
at
org.apache.flink.table.client.cli.CliClient.executeOperation(CliClient.java:639)
at
org.apache.flink.table.client.cli.CliClient.callOperation(CliClient.java:473)
at
org.apache.flink.table.client.cli.CliClient.executeOperation(CliClient.java:372)
at
org.apache.flink.table.client.cli.CliClient.getAndExecuteStatements(CliClient.java:329)
at
org.apache.flink.table.client.cli.CliClient.executeInteractive(CliClient.java:280)
at
org.apache.flink.table.client.cli.CliClient.executeInInteractiveMode(CliClient.java:228)
at org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:151)
at org.apache.flink.table.client.SqlClient.start(SqlClient.java:95)
at org.apache.flink.table.client.SqlClient.startClient(SqlClient.java:187)
at org.apache.flink.table.client.SqlClient.main(SqlClient.java:161)
2022-12-12 04:25:39,787 INFO org.apache.flink.table.resource.ResourceManager
[] - Added jar resource
[file:/home/hadoop/flink-connector-mysql-cdc-2.3.0.jar] to class path.
2022-12-12 04:25:39,787 INFO org.apache.flink.table.resource.ResourceManager
[] - Register resource
[/home/hadoop/flink-connector-mysql-cdc-2.3.0.jar] successfully.
[INFO] Execute statement succeed.{code}
then show jar command
{code:java}
Flink SQL> show jars;
+--------------------------------------------------+
| jars |
+--------------------------------------------------+
| /home/hadoop/flink-connector-mysql-cdc-2.3.0.jar |
+--------------------------------------------------+
1 row in set{code}
creating a mysql cdc table
{code:java}
Flink SQL> CREATE TABLE mysql_sample_table (
> id INT,
> name VARCHAR(50) NOT NULL,
> PRIMARY KEY(id) NOT ENFORCED
> ) WITH (
> 'connector' = 'mysql-cdc',
> 'hostname' = 'abcdcefgh.rds.amazonaws.com',
> 'port' = '3306',
> 'username' = 'user',
> 'password' = 'pass',
> 'database-name' = 'demo',
> 'table-name' = 'test_table'
> );
[INFO] Execute statement succeed.{code}
then selecting from the table
{code:java}
Flink SQL> select * from mysql_sample_table;
2022-12-12 04:26:30,302 WARN org.apache.flink.table.client.cli.CliClient
[] - Could not execute SQL statement.
org.apache.flink.table.client.gateway.SqlExecutionException: Failed to parse
statement: select * from mysql_sample_table;
at
org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:174)
~[flink-sql-client-1.16.0.jar:1.16.0]
at
org.apache.flink.table.client.cli.SqlCommandParserImpl.parseCommand(SqlCommandParserImpl.java:45)
~[flink-sql-client-1.16.0.jar:1.16.0]
at
org.apache.flink.table.client.cli.SqlMultiLineParser.parse(SqlMultiLineParser.java:71)
~[flink-sql-client-1.16.0.jar:1.16.0]
at
org.jline.reader.impl.LineReaderImpl.acceptLine(LineReaderImpl.java:2731)
~[jline-3.9.0.jar:?]
at org.jline.reader.impl.LineReaderImpl.readLine(LineReaderImpl.java:585)
~[jline-3.9.0.jar:?]
at
org.apache.flink.table.client.cli.CliClient.getAndExecuteStatements(CliClient.java:295)
[flink-sql-client-1.16.0.jar:1.16.0]
at
org.apache.flink.table.client.cli.CliClient.executeInteractive(CliClient.java:280)
[flink-sql-client-1.16.0.jar:1.16.0]
at
org.apache.flink.table.client.cli.CliClient.executeInInteractiveMode(CliClient.java:228)
[flink-sql-client-1.16.0.jar:1.16.0]
at org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:151)
[flink-sql-client-1.16.0.jar:1.16.0]
at org.apache.flink.table.client.SqlClient.start(SqlClient.java:95)
[flink-sql-client-1.16.0.jar:1.16.0]
at org.apache.flink.table.client.SqlClient.startClient(SqlClient.java:187)
[flink-sql-client-1.16.0.jar:1.16.0]
at org.apache.flink.table.client.SqlClient.main(SqlClient.java:161)
[flink-sql-client-1.16.0.jar:1.16.0]
Caused by: org.apache.flink.table.api.ValidationException: Unable to create a
source for reading table 'default_catalog.default_database.mysql_sample_table'.
Table options are:
'connector'='mysql-cdc'
'database-name'='demo'
'hostname'='abcdefgh.rds.amazonaws.com'
'password'='******'
'port'='3306'
'table-name'='test_table'
'username'='user'
at
org.apache.flink.table.factories.FactoryUtil.createDynamicTableSource(FactoryUtil.java:166)
~[flink-table-api-java-uber-1.16.0.jar:1.16.0]
at
org.apache.flink.table.factories.FactoryUtil.createDynamicTableSource(FactoryUtil.java:191)
~[flink-table-api-java-uber-1.16.0.jar:1.16.0]
at
org.apache.flink.table.planner.plan.schema.CatalogSourceTable.createDynamicTableSource(CatalogSourceTable.java:175)
~[?:?]
at
org.apache.flink.table.planner.plan.schema.CatalogSourceTable.toRel(CatalogSourceTable.java:115)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.toRel(SqlToRelConverter.java:3619)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertIdentifier(SqlToRelConverter.java:2559)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertFrom(SqlToRelConverter.java:2175)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertFrom(SqlToRelConverter.java:2095)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertFrom(SqlToRelConverter.java:2038)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertSelectImpl(SqlToRelConverter.java:669)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertSelect(SqlToRelConverter.java:657)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertQueryRecursive(SqlToRelConverter.java:3462)
~[?:?]
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertQuery(SqlToRelConverter.java:570)
~[?:?]
at
org.apache.flink.table.planner.calcite.FlinkPlannerImpl.org$apache$flink$table$planner$calcite$FlinkPlannerImpl$$rel(FlinkPlannerImpl.scala:215)
~[?:?]
at
org.apache.flink.table.planner.calcite.FlinkPlannerImpl.rel(FlinkPlannerImpl.scala:191)
~[?:?]
at
org.apache.flink.table.planner.operations.SqlToOperationConverter.toQueryOperation(SqlToOperationConverter.java:1498)
~[?:?]
~
~
"taskmanager.log" 309L, 78831B
309,69 Bot
directory.info launch_container.sh prelaunch.err prelaunch.out
taskmanager.err taskmanager.log taskmanager.out
[root@ip-172-31-42-148 container_1670653965628_0004_01_000002]# cd ..
[root@ip-172-31-42-148 application_1670653965628_0004]# ls
container_1670653965628_0004_01_000001 container_1670653965628_0004_01_000002
[root@ip-172-31-42-148 application_1670653965628_0004]# cd ..
[root@ip-172-31-42-148 containers]# ls
application_1670653965628_0001 application_1670653965628_0002
application_1670653965628_0003 application_1670653965628_0004
[root@ip-172-31-42-148 containers]# cd application_1670653965628_0004
[root@ip-172-31-42-148 application_1670653965628_0004]# ls
at
org.apache.calcite.sql2rel.SqlToRelConverter.convertQuery(SqlToRelConverter.java:570)
~[?:?]
at
org.apache.flink.table.planner.calcite.FlinkPlannerImpl.org$apache$flink$table$planner$calcite$FlinkPlannerImpl$$rel(FlinkPlannerImpl.scala:215)
~[?:?]
at
org.apache.flink.table.planner.calcite.FlinkPlannerImpl.rel(FlinkPlannerImpl.scala:191)
~[?:?]
at
org.apache.flink.table.planner.operations.SqlToOperationConverter.toQueryOperation(SqlToOperationConverter.java:1498)
~[?:?]
at
org.apache.flink.table.planner.operations.SqlToOperationConverter.convertSqlQuery(SqlToOperationConverter.java:1253)
~[?:?]
at
org.apache.flink.table.planner.operations.SqlToOperationConverter.convertValidatedSqlNode(SqlToOperationConverter.java:374)
~[?:?]
at
org.apache.flink.table.planner.operations.SqlToOperationConverter.convert(SqlToOperationConverter.java:262)
~[?:?]
at
org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:106)
~[?:?]
at
org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:172)
~[flink-sql-client-1.16.0.jar:1.16.0]
... 11 more
[ERROR] Could not execute SQL statement. Reason:
java.lang.ClassNotFoundException:
com.ververica.cdc.debezium.utils.ResolvedSchemaUtils
{code}
i tried similar activity on flink 1.15.2 , similar commands don't fail.
One more thing is putting the custom jar in flink's lib path , it works for
1.16.0 . was that a part of change on the Jira to support add jar feature on
advanced function ?
> sql-client failed to load jar passed with -j args in flink 1.16.0
> ------------------------------------------------------------------
>
> Key: FLINK-30318
> URL: https://issues.apache.org/jira/browse/FLINK-30318
> Project: Flink
> Issue Type: Bug
> Components: Table SQL / Client
> Affects Versions: 1.16.0
> Reporter: Samrat Deb
> Priority: Major
>
> In Flink 1.16.0 , /usr/lib/flink/bin/sql-client.sh embedded -j
> hudi-flink-bundle_2.12-0.10.1.jar failed to load jar passed through argument
>
> {code:java}
> /usr/lib/flink/bin/sql-client.sh embedded -j
> hudi-flink-bundle_2.12-0.10.1.jar
> Setting HBASE_CONF_DIR=/etc/hbase/conf because no HBASE_CONF_DIR was set.
> Using the result of 'hadoop classpath' to augment the Hadoop classpath:
> /etc/hadoop/conf:/usr/lib/hadoop/lib/:/usr/lib/hadoop/.//:/usr/lib/hadoop-hdfs/./:/usr/lib/hadoop-hdfs/lib/:/usr/lib/hadoop-hdfs/.//:/usr/lib/hadoop-mapreduce/.//:/usr/lib/hadoop-yarn/lib/:/usr/lib/hadoop-yarn/.//:/etc/tez/conf:/usr/lib/tez/hadoop-shim-0.10.2-amzn-0.jar:/usr/lib/tez/hadoop-shim-2.8-0.10.2-amzn-0.jar:/usr/lib/tez/lib:/usr/lib/tez/LICENSE:/usr/lib/tez/LICENSE-BSD-3clause:/usr/lib/tez/LICENSE-CDDLv1.1-GPLv2_withCPE:/usr/lib/tez/LICENSE-MIT:/usr/lib/tez/LICENSE-SIL_OpenFontLicense-v1.1:/usr/lib/tez/NOTICE:/usr/lib/tez/tez-api-0.10.2-amzn-0.jar:/usr/lib/tez/tez-aux-services-0.10.2-amzn-0.jar:/usr/lib/tez/tez-build-tools-0.10.2-amzn-0.jar:/usr/lib/tez/tez-common-0.10.2-amzn-0.jar:/usr/lib/tez/tez-dag-0.10.2-amzn-0.jar:/usr/lib/tez/tez-examples-0.10.2-amzn-0.jar:/usr/lib/tez/tez-history-parser-0.10.2-amzn-0.jar:/usr/lib/tez/tez-javadoc-tools-0.10.2-amzn-0.jar:/usr/lib/tez/tez-job-analyzer-0.10.2-amzn-0.jar:/usr/lib/tez/tez-mapreduce-0.10.2-amzn-0.jar:/usr/lib/tez/tez-protobuf-history-plugin-0.10.2-amzn-0.jar:/usr/lib/tez/tez-runtime-internals-0.10.2-amzn-0.jar:/usr/lib/tez/tez-runtime-library-0.10.2-amzn-0.jar:/usr/lib/tez/tez-tests-0.10.2-amzn-0.jar:/usr/lib/tez/tez-ui-0.10.2-amzn-0.war:/usr/lib/tez/tez-yarn-timeline-cache-plugin-0.10.2-amzn-0.jar:/usr/lib/tez/tez-yarn-timeline-history-0.10.2-amzn-0.jar:/usr/lib/tez/tez-yarn-timeline-history-with-acls-0.10.2-amzn-0.jar:/usr/lib/tez/tez-yarn-timeline-history-with-fs-0.10.2-amzn-0.jar:/usr/lib/tez/lib/async-http-client-2.12.3.jar:/usr/lib/tez/lib/commons-cli-1.2.jar:/usr/lib/tez/lib/commons-codec-1.11.jar:/usr/lib/tez/lib/commons-collections4-4.1.jar:/usr/lib/tez/lib/commons-io-2.8.0.jar:/usr/lib/tez/lib/commons-lang-2.6.jar:/usr/lib/tez/lib/guava-31.1-jre.jar:/usr/lib/tez/lib/hadoop-annotations.jar:/usr/lib/tez/lib/hadoop-auth.jar:/usr/lib/tez/lib/hadoop-hdfs-client-3.3.3-amzn-0.jar:/usr/lib/tez/lib/hadoop-mapreduce-client-common-3.3.3-amzn-0.jar:/usr/lib/tez/lib/hadoop-mapreduce-client-common.jar:/usr/lib/tez/lib/hadoop-mapreduce-client-core-3.3.3-amzn-0.jar:/usr/lib/tez/lib/hadoop-yarn-server-timeline-pluginstorage-3.3.3-amzn-0.jar:/usr/lib/tez/lib/hadoop-yarn-server-web-proxy.jar:/usr/lib/tez/lib/hhadoop-mapreduce-client-core.jar:/usr/lib/tez/lib/javax.servlet-api-3.1.0.jar:/usr/lib/tez/lib/jersey-client-1.19.jar:/usr/lib/tez/lib/jersey-json-1.19.jar:/usr/lib/tez/lib/jettison-1.3.4.jar:/usr/lib/tez/lib/jsr305-3.0.0.jar:/usr/lib/tez/lib/metrics-core-3.1.0.jar:/usr/lib/tez/lib/netty-all-4.1.72.Final.jar:/usr/lib/tez/lib/protobuf-java-2.5.0.jar:/usr/lib/tez/lib/RoaringBitmap-0.7.45.jar:/usr/lib/tez/lib/slf4j-api-1.7.36.jar:/usr/lib/hadoop-lzo/lib/hadoop-lzo-0.4.19.jar:/usr/lib/hadoop-lzo/lib/hadoop-lzo.jar:/usr/lib/hadoop-lzo/lib/native:/usr/share/aws/aws-java-sdk/aws-java-sdk-bundle-1.12.331.jar:/usr/share/aws/aws-java-sdk/LICENSE.txt:/usr/share/aws/aws-java-sdk/NOTICE.txt:/usr/share/aws/aws-java-sdk/README.md:/usr/share/aws/emr/emrfs/conf:/usr/share/aws/emr/emrfs/lib/animal-sniffer-annotations-1.14.jar:/usr/share/aws/emr/emrfs/lib/annotations-16.0.2.jar:/usr/share/aws/emr/emrfs/lib/aopalliance-1.0.jar:/usr/share/aws/emr/emrfs/lib/bcprov-ext-jdk15on-1.66.jar:/usr/share/aws/emr/emrfs/lib/checker-qual-2.5.2.jar:/usr/share/aws/emr/emrfs/lib/emrfs-hadoop-assembly-2.54.0.jar:/usr/share/aws/emr/emrfs/lib/error_prone_annotations-2.1.3.jar:/usr/share/aws/emr/emrfs/lib/findbugs-annotations-3.0.1.jar:/usr/share/aws/emr/emrfs/lib/ion-java-1.0.2.jar:/usr/share/aws/emr/emrfs/lib/j2objc-annotations-1.1.jar:/usr/share/aws/emr/emrfs/lib/javax.inject-1.jar:/usr/share/aws/emr/emrfs/lib/jmespath-java-1.12.331.jar:/usr/share/aws/emr/emrfs/lib/jsr305-3.0.2.jar:/usr/share/aws/emr/emrfs/auxlib/:/usr/share/aws/emr/ddb/lib/emr-ddb-hadoop.jar:/usr/share/aws/emr/goodies/lib/emr-hadoop-goodies.jar:/usr/share/aws/emr/kinesis/lib/emr-kinesis-hadoop.jar:/usr/share/aws/emr/cloudwatch-sink/lib/cloudwatch-sink-2.3.0.jar:/usr/share/aws/emr/cloudwatch-sink/lib/cloudwatch-sink.jar
> SLF4J: Class path contains multiple SLF4J bindings.
> SLF4J: Found binding in
> [jar:file:/usr/lib/flink/lib/log4j-slf4j-impl-2.17.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
> [jar:file:/usr/lib/hadoop/lib/slf4j-reload4j-1.7.36.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> explanation.
> SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
> 2022-12-07 03:17:05,554 INFO org.apache.flink.yarn.cli.FlinkYarnSessionCli
> [] - Found Yarn properties file under
> /var/lib/flink/yarn/.yarn-properties-hadoop.
> 2022-12-07 03:17:05,554 INFO org.apache.flink.yarn.cli.FlinkYarnSessionCli
> [] - Found Yarn properties file under
> /var/lib/flink/yarn/.yarn-properties-hadoop.
> ????????
> ????????????????
> ??????? ??????? ?
> ???? ????????? ?????
> ??? ??????? ?????
> ??? ??? ?????
> ?? ???????????????
> ?? ? ??? ?????? ?????
> ????? ???? ????? ?????
> ??????? ??? ??????? ???
> ????????? ?? ?? ??????????
> ???????? ?? ? ?? ???????
> ???? ??? ? ?? ???????? ?????
> ???? ? ?? ? ?? ???????? ???? ??
> ???? ???? ?????????? ??? ?? ????
> ???? ?? ??? ??????????? ???? ? ? ???
> ??? ?? ??? ????????? ???? ???
> ?? ? ??????? ???????? ??? ??
> ??? ??? ???????????????????? ???? ?
> ????? ??? ?????? ???????? ???? ??
> ???????? ??????????????? ??
> ?? ???? ??????? ??? ?????? ?? ???
> ??? ??? ??? ??????? ???? ?????????????
> ??? ????? ???? ?? ?? ???? ???
> ?? ??? ? ?? ?? ??
> ?? ?? ?? ?? ????????
> ?? ????? ?? ??????????? ??
> ?? ???? ? ??????? ??
> ??? ????? ?? ???????????
> ???? ???? ??????? ????????
> ????? ?? ???? ?????
> ????????????????????????????????? ?????
> ______ _ _ _ _____ ____ _ _____ _ _ _
> BETA
> | ___| () | | / ___|/ __ | | / ____| () | |
> | |__ | |_ _ __ | | __ | (___ | | | | | | | | |_ ___ _ __ | |_
> | _| | | | ' | |/ / ___ | | | | | | | | | |/ _ \ '_ | __|
> | | | | | | | | < ___) | || | |___ | |___| | | / | | | |
> || |||| |||_\ |____/ ______| ___|||_|| ||_|
> Welcome! Enter 'HELP;' to list all available commands. 'QUIT;' to
> exit.
> Command history file path: /home/hadoop/.flink-sql-history
> Flink SQL> show jars;
> Empty set
> {code}
> This works only when keeping the hudi-flink bundle jar in /flink/lib/ path
>
> But this works with flink 1.15.2
> {code:java}
> /usr/lib/flink/bin/sql-client.sh embedded -j hudi-flink-bundle_2.12-0.10.1.jar
> Setting HBASE_CONF_DIR=/etc/hbase/conf because no HBASE_CONF_DIR was set.
> Using the result of 'hadoop classpath' to augment the Hadoop classpath:
> /etc/hadoop/conf:/usr/lib/hadoop/lib/:/usr/lib/hadoop/.//:/usr/lib/hadoop-hdfs/./:/usr/lib/hadoop-hdfs/lib/:/usr/lib/hadoop-hdfs/.//:/usr/lib/hadoop-mapreduce/.//:/usr/lib/hadoop-yarn/lib/:/usr/lib/hadoop-yarn/.//:/etc/tez/conf:/usr/lib/tez/hadoop-shim-0.10.2-amzn-0.jar:/usr/lib/tez/hadoop-shim-2.8-0.10.2-amzn-0.jar:/usr/lib/tez/lib:/usr/lib/tez/LICENSE:/usr/lib/tez/LICENSE-BSD-3clause:/usr/lib/tez/LICENSE-CDDLv1.1-GPLv2_withCPE:/usr/lib/tez/LICENSE-MIT:/usr/lib/tez/LICENSE-SIL_OpenFontLicense-v1.1:/usr/lib/tez/NOTICE:/usr/lib/tez/tez-api-0.10.2-amzn-0.jar:/usr/lib/tez/tez-aux-services-0.10.2-amzn-0.jar:/usr/lib/tez/tez-build-tools-0.10.2-amzn-0.jar:/usr/lib/tez/tez-common-0.10.2-amzn-0.jar:/usr/lib/tez/tez-dag-0.10.2-amzn-0.jar:/usr/lib/tez/tez-examples-0.10.2-amzn-0.jar:/usr/lib/tez/tez-history-parser-0.10.2-amzn-0.jar:/usr/lib/tez/tez-javadoc-tools-0.10.2-amzn-0.jar:/usr/lib/tez/tez-job-analyzer-0.10.2-amzn-0.jar:/usr/lib/tez/tez-mapreduce-0.10.2-amzn-0.jar:/usr/lib/tez/tez-protobuf-history-plugin-0.10.2-amzn-0.jar:/usr/lib/tez/tez-runtime-internals-0.10.2-amzn-0.jar:/usr/lib/tez/tez-runtime-library-0.10.2-amzn-0.jar:/usr/lib/tez/tez-tests-0.10.2-amzn-0.jar:/usr/lib/tez/tez-ui-0.10.2-amzn-0.war:/usr/lib/tez/tez-yarn-timeline-cache-plugin-0.10.2-amzn-0.jar:/usr/lib/tez/tez-yarn-timeline-history-0.10.2-amzn-0.jar:/usr/lib/tez/tez-yarn-timeline-history-with-acls-0.10.2-amzn-0.jar:/usr/lib/tez/tez-yarn-timeline-history-with-fs-0.10.2-amzn-0.jar:/usr/lib/tez/lib/async-http-client-2.12.3.jar:/usr/lib/tez/lib/commons-cli-1.2.jar:/usr/lib/tez/lib/commons-codec-1.11.jar:/usr/lib/tez/lib/commons-collections4-4.1.jar:/usr/lib/tez/lib/commons-io-2.8.0.jar:/usr/lib/tez/lib/commons-lang-2.6.jar:/usr/lib/tez/lib/guava-31.1-jre.jar:/usr/lib/tez/lib/hadoop-annotations.jar:/usr/lib/tez/lib/hadoop-auth.jar:/usr/lib/tez/lib/hadoop-hdfs-client-3.3.3-amzn-0.jar:/usr/lib/tez/lib/hadoop-mapreduce-client-common-3.3.3-amzn-0.jar:/usr/lib/tez/lib/hadoop-mapreduce-client-common.jar:/usr/lib/tez/lib/hadoop-mapreduce-client-core-3.3.3-amzn-0.jar:/usr/lib/tez/lib/hadoop-yarn-server-timeline-pluginstorage-3.3.3-amzn-0.jar:/usr/lib/tez/lib/hadoop-yarn-server-web-proxy.jar:/usr/lib/tez/lib/hhadoop-mapreduce-client-core.jar:/usr/lib/tez/lib/javax.servlet-api-3.1.0.jar:/usr/lib/tez/lib/jersey-client-1.19.jar:/usr/lib/tez/lib/jersey-json-1.19.jar:/usr/lib/tez/lib/jettison-1.3.4.jar:/usr/lib/tez/lib/jsr305-3.0.0.jar:/usr/lib/tez/lib/metrics-core-3.1.0.jar:/usr/lib/tez/lib/netty-all-4.1.72.Final.jar:/usr/lib/tez/lib/protobuf-java-2.5.0.jar:/usr/lib/tez/lib/RoaringBitmap-0.7.45.jar:/usr/lib/tez/lib/slf4j-api-1.7.36.jar:/usr/lib/hadoop-lzo/lib/hadoop-lzo-0.4.19.jar:/usr/lib/hadoop-lzo/lib/hadoop-lzo.jar:/usr/lib/hadoop-lzo/lib/native:/usr/share/aws/aws-java-sdk/aws-java-sdk-bundle-1.12.331.jar:/usr/share/aws/aws-java-sdk/LICENSE.txt:/usr/share/aws/aws-java-sdk/NOTICE.txt:/usr/share/aws/aws-java-sdk/README.md:/usr/share/aws/emr/emrfs/conf:/usr/share/aws/emr/emrfs/lib/animal-sniffer-annotations-1.14.jar:/usr/share/aws/emr/emrfs/lib/annotations-16.0.2.jar:/usr/share/aws/emr/emrfs/lib/aopalliance-1.0.jar:/usr/share/aws/emr/emrfs/lib/bcprov-ext-jdk15on-1.66.jar:/usr/share/aws/emr/emrfs/lib/checker-qual-2.5.2.jar:/usr/share/aws/emr/emrfs/lib/emrfs-hadoop-assembly-2.54.0.jar:/usr/share/aws/emr/emrfs/lib/error_prone_annotations-2.1.3.jar:/usr/share/aws/emr/emrfs/lib/findbugs-annotations-3.0.1.jar:/usr/share/aws/emr/emrfs/lib/ion-java-1.0.2.jar:/usr/share/aws/emr/emrfs/lib/j2objc-annotations-1.1.jar:/usr/share/aws/emr/emrfs/lib/javax.inject-1.jar:/usr/share/aws/emr/emrfs/lib/jmespath-java-1.12.331.jar:/usr/share/aws/emr/emrfs/lib/jsr305-3.0.2.jar:/usr/share/aws/emr/emrfs/auxlib/:/usr/share/aws/emr/ddb/lib/emr-ddb-hadoop.jar:/usr/share/aws/emr/goodies/lib/emr-hadoop-goodies.jar:/usr/share/aws/emr/kinesis/lib/emr-kinesis-hadoop.jar:/usr/share/aws/emr/cloudwatch-sink/lib/cloudwatch-sink-2.3.0.jar:/usr/share/aws/emr/cloudwatch-sink/lib/cloudwatch-sink.jar
> SLF4J: Class path contains multiple SLF4J bindings.
> SLF4J: Found binding in
> [jar:file:/usr/lib/flink/lib/log4j-slf4j-impl-2.17.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
> [jar:file:/usr/lib/hadoop/lib/slf4j-reload4j-1.7.36.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> explanation.
> SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
> 2022-12-07 03:19:37,344 INFO org.apache.flink.yarn.cli.FlinkYarnSessionCli
> [] - Found Yarn properties file under
> /var/lib/flink/yarn/.yarn-properties-hadoop.
> 2022-12-07 03:19:37,344 INFO org.apache.flink.yarn.cli.FlinkYarnSessionCli
> [] - Found Yarn properties file under
> /var/lib/flink/yarn/.yarn-properties-hadoop.
> ????????
> ????????????????
> ??????? ??????? ?
> ???? ????????? ?????
> ??? ??????? ?????
> ??? ??? ?????
> ?? ???????????????
> ?? ? ??? ?????? ?????
> ????? ???? ????? ?????
> ??????? ??? ??????? ???
> ????????? ?? ?? ??????????
> ???????? ?? ? ?? ???????
> ???? ??? ? ?? ???????? ?????
> ???? ? ?? ? ?? ???????? ???? ??
> ???? ???? ?????????? ??? ?? ????
> ???? ?? ??? ??????????? ???? ? ? ???
> ??? ?? ??? ????????? ???? ???
> ?? ? ??????? ???????? ??? ??
> ??? ??? ???????????????????? ???? ?
> ????? ??? ?????? ???????? ???? ??
> ???????? ??????????????? ??
> ?? ???? ??????? ??? ?????? ?? ???
> ??? ??? ??? ??????? ???? ?????????????
> ??? ????? ???? ?? ?? ???? ???
> ?? ??? ? ?? ?? ??
> ?? ?? ?? ?? ????????
> ?? ????? ?? ??????????? ??
> ?? ???? ? ??????? ??
> ??? ????? ?? ???????????
> ???? ???? ??????? ????????
> ????? ?? ???? ?????
> ????????????????????????????????? ?????
> ______ _ _ _ _____ ____ _ _____ _ _ _
> BETA
> | ___| () | | / ___|/ __ | | / ____| () | |
> | |__ | |_ _ __ | | __ | (___ | | | | | | | | |_ ___ _ __ | |_
> | _| | | | ' | |/ / ___ | | | | | | | | | |/ _ \ '_ | __|
> | | | | | | | | < ___) | || | |___ | |___| | | / | | | |
> || |||| |||_\ |____/ ______| ___|||_|| ||_|
> Welcome! Enter 'HELP;' to list all available commands. 'QUIT;' to
> exit.
> Command history file path: /home/hadoop/.flink-sql-history
> Flink SQL> show jars;
> /usr/lib/flink/opt/flink-sql-client-1.15.2.jar
> /usr/lib/flink/opt/flink-python_2.12-1.15.2.jar
> /home/hadoop/hudi-flink-bundle_2.12-0.10.1.jar
> {code}
>
>
>
--
This message was sent by Atlassian Jira
(v8.20.10#820010)