ymZhao1001 opened a new issue, #6914:
URL: https://github.com/apache/iceberg/issues/6914
### Apache Iceberg version
main (development)
### Query engine
Flink
### Please describe the bug 🐞
CREATE CATALOG c1WITH (
'type'='iceberg',
'catalog-type'='hive',
'uri'='thrift://***:9083',
'clients'='5',
'property-version'='1',
'warehouse'='hdfs://***:8020/iceberg/'
);
use catalog c1;
create database d1;
use d1;
CREATE TABLE t1(
ss_create_time TIMESTAMP(3),
ss_update_time TIMESTAMP(3),
ss_user_name STRING,
ss_product_name STRING,
ss_shop_name STRING,
ss_city_name STRING,
ss_country_name STRING,
ss_sold_date_sk BIGINT,
ss_sold_time_sk BIGINT,
ss_item_sk BIGINT,
ss_customer_sk BIGINT,
ss_cdemo_sk BIGINT,
ss_hdemo_sk BIGINT,
ss_addr_sk BIGINT,
ss_store_sk BIGINT,
ss_promo_sk BIGINT,
ss_ticket_number BIGINT,
PRIMARY KEY(ss_ticket_number,ss_sold_date_sk,ss_sold_time_sk) NOT ENFORCED
) PARTITIONED BY (ss_sold_date_sk) WITH
('format-version'='2','write.upsert.enabled'='true');
insert into t1 select * from default_catalog.default_database.datagen_src;
then spark engine
ALTER TABLE t1 ADD PARTITION FIELD ss_sold_time_sk;
flink query
select * from t1;
exception:
org.apache.flink.table.api.TableException: Column 'ss_sold_time_sk' is NOT
NULL, however, a null value is being written into it. You can set job
configuration 'table.exec.sink.not-null-enforcer'='drop' to suppress this
exception and drop such records silently.
at
org.apache.flink.table.runtime.operators.sink.SinkNotNullEnforcer.filter(SinkNotNullEnforcer.java:56)
at
org.apache.flink.table.runtime.operators.sink.SinkNotNullEnforcer.filter(SinkNotNullEnforcer.java:30)
at
org.apache.flink.streaming.api.operators.StreamFilter.processElement(StreamFilter.java:38)
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]