yihua commented on issue #6572:
URL: https://github.com/apache/hudi/issues/6572#issuecomment-1264163133
This command works for me. I can see in beeline that the table and
partition paths are synced.
```
export SPARK_HOME=/Users/ethan/Work/lib/spark-3.2.2-bin-hadoop3.2
/Users/ethan/Work/lib/spark-3.2.2-bin-hadoop3.2/bin/spark-submit \
--master local[4] \
--driver-memory 4g --executor-memory 2g --num-executors 4
--executor-cores 1 \
--conf spark.serializer=org.apache.spark.serializer.KryoSerializer \
--conf spark.sql.catalogImplementation=hive \
--conf spark.driver.maxResultSize=1g \
--conf spark.speculation=true \
--conf spark.speculation.multiplier=1.0 \
--conf spark.speculation.quantile=0.5 \
--conf spark.ui.port=6680 \
--conf spark.eventLog.enabled=true \
--conf spark.eventLog.dir=/Users/ethan/Work/data/hudi/spark-logs \
--jars
$HUDI_DIR/packaging/hudi-spark-bundle/target/hudi-spark3.2-bundle_2.12-0.12.0.jar
\
--class org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer \
$HUDI_DIR/packaging/hudi-utilities-bundle/target/hudi-utilities-bundle_2.12-0.12.0.jar
\
--props $TEST_BASE_DIR/ds_cow.properties \
--schemaprovider-class
org.apache.hudi.utilities.schema.FilebasedSchemaProvider \
--source-class org.apache.hudi.utilities.sources.ParquetDFSSource \
--source-ordering-field ts \
--target-base-path file:$TEST_BASE_DIR/hudi_mysql_table2 \
--target-table hudi_mysql_table2 \
--table-type COPY_ON_WRITE \
--op UPSERT \
--enable-sync \
--sync-tool-classes org.apache.hudi.hive.HiveSyncTool \
--hoodie-conf
hoodie.datasource.hive_sync.partition_extractor_class=org.apache.hudi.hive.MultiPartKeysValueExtractor
\
--hoodie-conf hoodie.datasource.write.hive_style_partitioning=true \
--hoodie-conf hoodie.datasource.hive_sync.enable=true \
--hoodie-conf hoodie.datasource.hive_sync.database=default \
--hoodie-conf hoodie.datasource.hive_sync.table=hudi_mysql_table2 \
--hoodie-conf hoodie.datasource.hive_sync.partition_fields=u_name \
--hoodie-conf hoodie.datasource.hive_sync.use_jdbc=false \
--hoodie-conf
hoodie.datasource.hive_sync.metastore.uris=thrift://localhost:9083 \
--hoodie-conf hoodie.datasource.hive_sync.mode=hms >> ds.log 2>&1
```
ds_cow.properties
```
# Key fields, for kafka example
hoodie.datasource.write.recordkey.field=key
hoodie.datasource.write.partitionpath.field=u_name
# Schema provider props (change to absolute path based on your installation)
hoodie.deltastreamer.schemaprovider.source.schema.file=file:/TEST_BASE_DIR/schema.avsc
hoodie.deltastreamer.schemaprovider.target.schema.file=file:/TEST_BASE_DIR/schema.avsc
# DFS Source
hoodie.deltastreamer.source.dfs.root=file:/TEST_BASE_DIR/source
```
schema.avsc
```
{
"type" : "record",
"name" : "test_struct",
"namespace" : "test_namespace",
"fields" : [ {
"name" : "key",
"type" : [ "null", "string" ],
"default" : null
}, {
"name" : "partition",
"type" : [ "null", "string" ],
"default" : null
}, {
"name" : "ts",
"type" : [ "null", "long" ],
"default" : null
}, {
"name" : "textField",
"type" : [ "null", "string" ],
"default" : null
}, {
"name" : "decimalField",
"type" : [ "null", "float" ],
"default" : null
}, {
"name" : "longField",
"type" : [ "null", "long" ],
"default" : null
}, {
"name" : "arrayField",
"type" : [ "null", {
"type" : "array",
"items" : [ "int", "null" ]
} ],
"default" : null
}, {
"name" : "mapField",
"type" : [ "null", {
"type" : "map",
"values" : [ "int", "null" ]
} ],
"default" : null
}, {
"name" : "round",
"type" : [ "null", "int" ],
"default" : null
}, {
"name" : "u_name",
"type" : [ "null", "string" ],
"default" : null
} ]
}
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]