trhlxc opened a new issue, #6962:
URL: https://github.com/apache/seatunnel/issues/6962

   ### Search before asking
   
   - [X] I had searched in the 
[issues](https://github.com/apache/seatunnel/issues?q=is%3Aissue+label%3A%22bug%22)
 and found no similar issues.
   
   
   ### What happened
   
   sqlserver table ddl
   ```
   CREATE TABLE iplant.dbo.test (
        item varchar(50) COLLATE Chinese_PRC_CI_AI NOT NULL,
        createdate datetime NULL,
        createby varchar(50) COLLATE Chinese_PRC_CI_AI NULL,
        [date] datetime NULL,
        CONSTRAINT PK_test PRIMARY KEY (item)
   );
   ```
   doris Keyword fields are not supported
   ddl
   ```
   CREATE TABLE ODS.ODS_TEST_test_CDC (
        item varchar(20) not NULL,
        createdate datetime DEFAULT NULL,
        createby varchar(50) DEFAULT NULL,
        d_date datetime DEFAULT NULL
   )
   ENGINE=OLAP
   UNIQUE KEY(item)
   COMMENT "测试"
   DISTRIBUTED BY HASH(`item`) BUCKETS 1
   PROPERTIES (
   "replication_allocation" = "tag.location.default: 1",
   "function_column.sequence_col" = 'd_date',
   "in_memory" = "false",
   "storage_format" = "V2"
   );
   ```
   The d_date field of the doris table does not have a value after execution。
   use:transform FieldMapper
   Single table success. CDC multi-table, all the data into the A table, B 
table insert data is also written to the A table.
   
   ### SeaTunnel Version
   
   2.3.5
   
   ### SeaTunnel Config
   
   ```conf
   #Single table
   env {
     # You can set engine configuration here
     parallelism = 1
     job.mode = "STREAMING"
     checkpoint.interval = 5000
   }
   
   source {
        
     SqlServer-CDC {
        result_table_name = "Table1"
        username = "xxx"
        password = "xxx@read"
        startup.mode = "initial"
        incremental.parallelism = "1"
        database-names = ["iplant"]
        table-names = ["iplant.dbo.test"]
        base-url = "jdbc:sqlserver://xxx.xx.x.xxx:1433;databaseName=iplant"
        }
   }
   
   transform {
     FieldMapper {
       source_table_name = "Table1"
       result_table_name = "Table2"
       field_mapper = {
            item = item
                createdate = createdate
                createby = createby
           date = d_date
                
       }
     }
   }
   
   sink {
       Doris {
                # Doris 连接信息
                fenodes = "xxx.xx.x.61:8030"
                username = root
                password = ""
                database = "ODS"
                table = "ODS_TEST_${table_name}_CDC"
                source_table_name = "Table2"
                
                # 其他通用配置
                sink.enable-delete = "true"
                schema_save_mode = "CREATE_SCHEMA_WHEN_NOT_EXIST"
                data_save_mode = "APPEND_DATA"
                sink.label-prefix = "test_onetable"
                sink.enable-2pc = "true"
                
                # Doris 数据格式配置
                doris.config {
                format = "json"
                read_json_by_line = "true"
                }
        }  
   }
   
   
   # multi-table
   env {
     # You can set engine configuration here
     parallelism = 1
     job.mode = "STREAMING"
     checkpoint.interval = 5000
   }
   
   source {
        
     SqlServer-CDC {
        result_table_name = "Source1"
        username = "xx"
        password = "xx@read"
        startup.mode = "initial"
        incremental.parallelism = "1"
        database-names = ["iplant"]
        table-names = ["iplant.dbo.test"]
        base-url = "jdbc:sqlserver://xxxxx:1433;databaseName=iplant"
        }
     SqlServer-CDC {
        result_table_name = "Source2"
        username = "xx"
        password = "xxx@read"
        startup.mode = "initial"
        incremental.parallelism = "1"
        database-names = ["iplant"]
        table-names = ["iplant.dbo.test2"]
        base-url = "jdbc:sqlserver://xxxxx:1433;databaseName=iplant"
        }       
   }
   
   transform {
     # Source1映射
     FieldMapper {
       source_table_name = "Source1"
       result_table_name = "Trans1"
       field_mapper = {
            item = item
            createdate = createdate
            createby = createby
               date = d_date
                
       }
     }
     #Source2 映射
     FieldMapper {
       source_table_name = "Source2"
       result_table_name = "Trans2"
       field_mapper = {
            item = item
            createdate = createdate
            createby = createby
               date = d_date
                
       }
     }
   }
   
   sink {
       Doris {
                # Doris 连接信息
                fenodes = "xxxxx:8030"
                username = root
                password = ""
                database = "ODS"
                table = "ODS_TEST_${table_name}_CDC"
                source_table_name = ["Trans1","Trans2"]
                
                # 其他通用配置
                sink.enable-delete = "true"
                schema_save_mode = "CREATE_SCHEMA_WHEN_NOT_EXIST"
                data_save_mode = "APPEND_DATA"
                sink.label-prefix = "test_onetable"
                sink.enable-2pc = "true"
                
                # Doris 数据格式配置
                doris.config {
                format = "json"
                read_json_by_line = "true"
                }
        }  
   }
   ```
   
   
   ### Running Command
   
   ```shell
   ./bin/seatunnel.sh --config ./job/sqlserverTest1.config
   ```
   
   
   ### Error Exception
   
   ```log
   Hopefully there is a counterpart for keyword fields
   ```
   
   
   ### Zeta or Flink or Spark Version
   
   zeta
   
   ### Java or Scala Version
   
   java1.8
   
   ### Screenshots
   
   _No response_
   
   ### Are you willing to submit PR?
   
   - [ ] Yes I am willing to submit a PR!
   
   ### Code of Conduct
   
   - [X] I agree to follow this project's [Code of 
Conduct](https://www.apache.org/foundation/policies/conduct)
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to