[ 
https://issues.apache.org/jira/browse/HUDI-9093?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Mansi Patel updated HUDI-9093:
------------------------------
    Description: 
HoodieClusteringJob {color:#1f2328}has bean set two options with the same "-sc" 
parameter.{color}
*{color:#1f2328}Source Code:{color}*
{code:java}
@Parameter(names = {"--spark-memory", "-sm"}, description = "spark memory to 
use", required = false)
public String sparkMemory = null;
@Parameter(names = {"--retry", "-rt"}, description = "number of retries")
public int retry = 0;
@Parameter(names = {"--skip-clean", "-sc"}, description = "do not trigger clean 
after clustering", required = false)
public Boolean skipClean = true;

@Parameter(names = {"--schedule", "-sc"}, description = "Schedule clustering 
@desperate soon please use \"--mode schedule\" instead")
public Boolean runSchedule = false;
 {code}
*{color:#1f2328}Spark-Submit command:{color}*
{code:java}
spark-submit \
  --class org.apache.hudi.utilities.HoodieClusteringJob \
   
packaging/hudi-utilities-bundle/target/hudi-utilities-bundle_2.12-1.1.0-SNAPSHOT.jar
 \
  --base-path <PATH> \
  --table-name <TABLE_NAME> \
  --instant-time 20250224233500970
{code}
{color:#1f2328}*Exception:*{color}
{code:java}
25/02/28 23:39:50 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another 
address
Exception in thread "main" 
org.apache.hudi.com.beust.jcommander.ParameterException: Found the option -sc 
multiple times
        at 
org.apache.hudi.com.beust.jcommander.JCommander.addDescription(JCommander.java:627)
        at 
org.apache.hudi.com.beust.jcommander.JCommander.createDescriptions(JCommander.java:594)
        at 
org.apache.hudi.com.beust.jcommander.JCommander.<init>(JCommander.java:249)
        at 
org.apache.hudi.utilities.HoodieClusteringJob.main(HoodieClusteringJob.java:149)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1034)
        at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:199)
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:222)
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91)
        at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1125)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1134)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
{code}
{color:#1f2328}*Issue Reference:*{color}

{color:#1f2328}[https://github.com/apache/hudi/issues/12832]{color}

  was:
{color:#1f2328}HoodieCompactor has bean set two options with the same "-sc" 
parameter.{color}

*{color:#1f2328}Source Code:{color}*
{code:java}
    @Parameter(names = {"--skip-clean", "-sc"}, description = "do not trigger 
clean after compaction", required = false)
    public Boolean skipClean = true;
    @Parameter(names = {"--schedule", "-sc"}, description = "Schedule 
compaction", required = false)
    public Boolean runSchedule = false;
    @Parameter(names = {"--mode", "-m"}, description = "Set job mode: Set 
\"schedule\" means make a compact plan; "
        + "Set \"execute\" means execute a compact plan at given instant which 
means --instant-time is needed here; "
        + "Set \"scheduleAndExecute\" means make a compact plan first and 
execute that plan immediately", required = false)
{code}
{color:#1f2328}{color}{color:#1f2328}[https://github.com/apache/hudi/blob/master/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java#L97]{color}

*{color:#1f2328}Spark-Submit command:{color}*
{code:java}
spark-submit --class org.apache.hudi.utilities.HoodieCompactor \
  
${HUDI_REPO}hudi/packaging/hudi-utilities-bundle/target/hudi-utilities-bundle_2.12-1.1.0-SNAPSHOT.jar
 \
--jars 
${HUDI_JARS}/hudi-spark3.4-bundle_2.12-1.0.0.jar,${HUDI_JARS}/hadoop-aws-3.3.4.jar,${HUDI_JARS}//aws-java-sdk-bundle-1.12.367.jar
 \
--conf spark.hadoop.fs.s3a.endpoint=http://127.0.0.1:19000 \
--conf spark.hadoop.fs.s3a.access.key=xxxxx \
--conf spark.hadoop.fs.s3a.secret.key=xxxxx \
--conf spark.hadoop.fs.s3a.path.style.access=true \
--conf spark.hadoop.fs.s3a.connection.ssl.enable=false \
--conf spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem \
--base-path s3a://test/hudi-test-topic \
--table-name hudi-test-topic \
--schema-file s3a://test/schema.avsc \
--instant-time 20250211112324137 \
--parallelism 2 \
--spark-memory 1g {code}
{color:#1f2328}*Exception:*{color}
{code:java}
Exception in thread "main" 
org.apache.hudi.com.beust.jcommander.ParameterException: Found the option -sc 
multiple times
    at 
org.apache.hudi.com.beust.jcommander.JCommander.addDescription(JCommander.java:627)
    at 
org.apache.hudi.com.beust.jcommander.JCommander.createDescriptions(JCommander.java:594)
    at 
org.apache.hudi.com.beust.jcommander.JCommander.<init>(JCommander.java:249)
    at org.apache.hudi.utilities.HoodieCompactor.main(HoodieCompactor.java:173)
    at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native 
Method)
    at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
    at 
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.base/java.lang.reflect.Method.invoke(Method.java:568)
    at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
    at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1020)
    at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:192)
    at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:215)
    at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91)
    at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1111)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1120)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
25/02/12 17:06:22 INFO ShutdownHookManager: Shutdown hook called
25/02/12 17:06:22 INFO ShutdownHookManager: Deleting directory 
/private/var/folders/vy/55g0v9hj7k30l58rx3crpgj40000gn/T/spark-91863f32-7ad6-47bc-9277-cc5eae852871
 {code}
{color:#1f2328}*Issue Reference:*{color}

{color:#1f2328}https://github.com/apache/hudi/issues/12832{color}


> HoodieClusteringJob has bean set two options with the same "-sc" parameter
> --------------------------------------------------------------------------
>
>                 Key: HUDI-9093
>                 URL: https://issues.apache.org/jira/browse/HUDI-9093
>             Project: Apache Hudi
>          Issue Type: Bug
>            Reporter: Mansi Patel
>            Assignee: Mansi Patel
>            Priority: Major
>              Labels: pull-request-available
>             Fix For: 1.1.0
>
>
> HoodieClusteringJob {color:#1f2328}has bean set two options with the same 
> "-sc" parameter.{color}
> *{color:#1f2328}Source Code:{color}*
> {code:java}
> @Parameter(names = {"--spark-memory", "-sm"}, description = "spark memory to 
> use", required = false)
> public String sparkMemory = null;
> @Parameter(names = {"--retry", "-rt"}, description = "number of retries")
> public int retry = 0;
> @Parameter(names = {"--skip-clean", "-sc"}, description = "do not trigger 
> clean after clustering", required = false)
> public Boolean skipClean = true;
> @Parameter(names = {"--schedule", "-sc"}, description = "Schedule clustering 
> @desperate soon please use \"--mode schedule\" instead")
> public Boolean runSchedule = false;
>  {code}
> *{color:#1f2328}Spark-Submit command:{color}*
> {code:java}
> spark-submit \
>   --class org.apache.hudi.utilities.HoodieClusteringJob \
>    
> packaging/hudi-utilities-bundle/target/hudi-utilities-bundle_2.12-1.1.0-SNAPSHOT.jar
>  \
>   --base-path <PATH> \
>   --table-name <TABLE_NAME> \
>   --instant-time 20250224233500970
> {code}
> {color:#1f2328}*Exception:*{color}
> {code:java}
> 25/02/28 23:39:50 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to 
> another address
> Exception in thread "main" 
> org.apache.hudi.com.beust.jcommander.ParameterException: Found the option -sc 
> multiple times
>         at 
> org.apache.hudi.com.beust.jcommander.JCommander.addDescription(JCommander.java:627)
>         at 
> org.apache.hudi.com.beust.jcommander.JCommander.createDescriptions(JCommander.java:594)
>         at 
> org.apache.hudi.com.beust.jcommander.JCommander.<init>(JCommander.java:249)
>         at 
> org.apache.hudi.utilities.HoodieClusteringJob.main(HoodieClusteringJob.java:149)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:498)
>         at 
> org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
>         at 
> org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1034)
>         at 
> org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:199)
>         at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:222)
>         at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91)
>         at 
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1125)
>         at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1134)
>         at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> {code}
> {color:#1f2328}*Issue Reference:*{color}
> {color:#1f2328}[https://github.com/apache/hudi/issues/12832]{color}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to