ad1happy2go commented on issue #10752:
URL: https://github.com/apache/hudi/issues/10752#issuecomment-1966339703

   @soumilshah1995 I made few modifications and made it work with below code. 
After 0.14.X, the classname changed to HoodieStreamer.
   
   ```
   import com.amazonaws.services.glue.GlueContext
   import com.amazonaws.services.glue.MappingSpec
   import com.amazonaws.services.glue.errors.CallSite
   import com.amazonaws.services.glue.util.GlueArgParser
   import com.amazonaws.services.glue.util.Job
   import com.amazonaws.services.glue.util.JsonOptions
   import org.apache.spark.SparkContext
   import scala.collection.JavaConverters._
   import org.apache.spark.sql.SparkSession
   import org.apache.spark.api.java.JavaSparkContext
   import org.apache.hudi.utilities.streamer.HoodieStreamer
   import org.apache.hudi.utilities.streamer.SchedulerConfGenerator
   import org.apache.hudi.utilities.UtilHelpers
   import com.beust.jcommander.JCommander;
   import com.beust.jcommander.Parameter;
   
   object GlueApp {
     
     def main(sysArgs: Array[String]) {
       val args = GlueArgParser.getResolvedOptions(sysArgs, 
Seq("JOB_NAME").toArray)
   
       val BUCKET = "rxusandbox-us-west-2"
   
       var config = Array(
         "--source-class", "org.apache.hudi.utilities.sources.JsonDFSSource",
         "--source-ordering-field", "ts",
         "--target-base-path", 
"s3://performance-benchmark-datasets-us-west-2/sandbox/aditya_sandbox/testds_glue/invoice"
 ,
         "--target-table", "invoice",
         "--table-type" , "COPY_ON_WRITE",
         "--hoodie-conf", 
"hoodie.datasource.write.keygenerator.class=org.apache.hudi.keygen.SimpleKeyGenerator",
         "--hoodie-conf", "hoodie.datasource.write.recordkey.field=symbol",
         "--hoodie-conf", "hoodie.datasource.write.partitionpath.field=date",
         "--hoodie-conf", 
s"hoodie.streamer.source.dfs.root=s3://${BUCKET}/testcases/stocks/data/source",
         "--hoodie-conf", "hoodie.datasource.write.precombine.field=ts",
         
"--schemaprovider-class","org.apache.hudi.utilities.schema.FilebasedSchemaProvider",
         "--hoodie-conf", 
"hoodie.datasource.write.hive_style_partitioning=false",
         "--hoodie-conf", 
s"hoodie.deltastreamer.schemaprovider.source.schema.file=s3://${BUCKET}/testcases/stocks/data/schema.avsc",
         "--hoodie-conf", 
s"hoodie.deltastreamer.schemaprovider.target.schema.file=s3://${BUCKET}/testcases/stocks/data/schema.avsc",
         "--payload-class", 
s"org.apache.hudi.common.model.DefaultHoodieRecordPayload",
         "--props", s"s3://${BUCKET}/testcases/stocks/configs/hoodie.properties"
       )
   
       val cfg = HoodieStreamer.getConfig(config)
       val additionalSparkConfigs = 
SchedulerConfGenerator.getSparkSchedulingConfigs(cfg)
       val jssc = UtilHelpers.buildSparkContext("delta-streamer-test", "jes", 
additionalSparkConfigs)
       val spark = jssc.sc
   
       val glueContext: GlueContext = new GlueContext(spark)
       Job.init(args("JOB_NAME"), glueContext, args.asJava)
   
       try {
           new HoodieStreamer(cfg, jssc).sync();
       } finally {
           jssc.stop();
       }
   
       Job.commit()
     }
   }
   ```
   
   used these jars - 
hudi-utilities-slim-bundle_2.12-0.14.1.jar,hudi-spark3.3-bundle_2.12-0.14.1.jar,jcommander-1.78.jar


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to