Github user andrewor14 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3916#discussion_r25292951
  
    --- Diff: 
core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala ---
    @@ -277,169 +286,140 @@ private[spark] class SparkSubmitArguments(args: 
Seq[String], env: Map[String, St
         """.stripMargin
       }
     
    -  /**
    -   * Fill in values by parsing user options.
    -   * NOTE: Any changes here must be reflected in 
YarnClientSchedulerBackend.
    -   */
    -  private def parseOpts(opts: Seq[String]): Unit = {
    -    val EQ_SEPARATED_OPT="""(--[^=]+)=(.+)""".r
    -
    -    // Delineates parsing of Spark options from parsing of user options.
    -    parse(opts)
    -
    -    /**
    -     * NOTE: If you add or remove spark-submit options,
    -     * modify NOT ONLY this file but also utils.sh
    -     */
    -    def parse(opts: Seq[String]): Unit = opts match {
    -      case ("--name") :: value :: tail =>
    +  /** Fill in values by parsing user options. */
    +  override protected def handle(opt: String, value: String): Boolean = {
    +    opt match {
    +      case NAME =>
             name = value
    -        parse(tail)
     
    -      case ("--master") :: value :: tail =>
    +      case MASTER =>
             master = value
    -        parse(tail)
     
    -      case ("--class") :: value :: tail =>
    +      case CLASS =>
             mainClass = value
    -        parse(tail)
     
    -      case ("--deploy-mode") :: value :: tail =>
    +      case DEPLOY_MODE =>
             if (value != "client" && value != "cluster") {
               SparkSubmit.printErrorAndExit("--deploy-mode must be either 
\"client\" or \"cluster\"")
             }
             deployMode = value
    -        parse(tail)
     
    -      case ("--num-executors") :: value :: tail =>
    +      case NUM_EXECUTORS =>
             numExecutors = value
    -        parse(tail)
     
    -      case ("--total-executor-cores") :: value :: tail =>
    +      case TOTAL_EXECUTOR_CORES =>
             totalExecutorCores = value
    -        parse(tail)
     
    -      case ("--executor-cores") :: value :: tail =>
    +      case EXECUTOR_CORES =>
             executorCores = value
    -        parse(tail)
     
    -      case ("--executor-memory") :: value :: tail =>
    +      case EXECUTOR_MEMORY =>
             executorMemory = value
    -        parse(tail)
     
    -      case ("--driver-memory") :: value :: tail =>
    +      case DRIVER_MEMORY =>
             driverMemory = value
    -        parse(tail)
     
    -      case ("--driver-cores") :: value :: tail =>
    +      case DRIVER_CORES =>
             driverCores = value
    -        parse(tail)
     
    -      case ("--driver-class-path") :: value :: tail =>
    +      case DRIVER_CLASS_PATH =>
             driverExtraClassPath = value
    -        parse(tail)
     
    -      case ("--driver-java-options") :: value :: tail =>
    +      case DRIVER_JAVA_OPTIONS =>
             driverExtraJavaOptions = value
    -        parse(tail)
     
    -      case ("--driver-library-path") :: value :: tail =>
    +      case DRIVER_LIBRARY_PATH =>
             driverExtraLibraryPath = value
    -        parse(tail)
     
    -      case ("--properties-file") :: value :: tail =>
    +      case PROPERTIES_FILE =>
             propertiesFile = value
    -        parse(tail)
     
    -      case ("--kill") :: value :: tail =>
    +      case KILL_SUBMISSION =>
             submissionToKill = value
             if (action != null) {
               SparkSubmit.printErrorAndExit(s"Action cannot be both $action 
and $KILL.")
             }
             action = KILL
    -        parse(tail)
     
    -      case ("--status") :: value :: tail =>
    +      case STATUS =>
             submissionToRequestStatusFor = value
             if (action != null) {
               SparkSubmit.printErrorAndExit(s"Action cannot be both $action 
and $REQUEST_STATUS.")
             }
             action = REQUEST_STATUS
    -        parse(tail)
     
    -      case ("--supervise") :: tail =>
    +      case SUPERVISE =>
             supervise = true
    -        parse(tail)
     
    -      case ("--queue") :: value :: tail =>
    +      case QUEUE =>
             queue = value
    -        parse(tail)
     
    -      case ("--files") :: value :: tail =>
    +      case FILES =>
             files = Utils.resolveURIs(value)
    -        parse(tail)
     
    -      case ("--py-files") :: value :: tail =>
    +      case PY_FILES =>
             pyFiles = Utils.resolveURIs(value)
    -        parse(tail)
     
    -      case ("--archives") :: value :: tail =>
    +      case ARCHIVES =>
             archives = Utils.resolveURIs(value)
    -        parse(tail)
     
    -      case ("--jars") :: value :: tail =>
    +      case JARS =>
             jars = Utils.resolveURIs(value)
    -        parse(tail)
     
    -      case ("--packages") :: value :: tail =>
    +      case PACKAGES =>
             packages = value
    -        parse(tail)
     
    -      case ("--repositories") :: value :: tail =>
    +      case REPOSITORIES =>
             repositories = value
    -        parse(tail)
     
    -      case ("--conf" | "-c") :: value :: tail =>
    +      case CONF =>
             value.split("=", 2).toSeq match {
               case Seq(k, v) => sparkProperties(k) = v
               case _ => SparkSubmit.printErrorAndExit(s"Spark config without 
'=': $value")
             }
    -        parse(tail)
     
    -      case ("--proxy-user") :: value :: tail =>
    +      case PROXY_USER =>
             proxyUser = value
    -        parse(tail)
     
    -      case ("--help" | "-h") :: tail =>
    +      case HELP =>
             printUsageAndExit(0)
     
    -      case ("--verbose" | "-v") :: tail =>
    +      case VERBOSE =>
             verbose = true
    -        parse(tail)
     
    -      case ("--version") :: tail =>
    +      case VERSION =>
             SparkSubmit.printVersionAndExit()
     
    -      case EQ_SEPARATED_OPT(opt, value) :: tail =>
    -        parse(opt :: value :: tail)
    +      case _ =>
    +        throw new IllegalArgumentException(s"Unexpected argument '$opt'.")
    +    }
    +    true
    +  }
     
    -      case value :: tail if value.startsWith("-") =>
    -        SparkSubmit.printErrorAndExit(s"Unrecognized option '$value'.")
    +  /**
    +   * The first unrecognized option is treated as the "primary resource". 
Everything else is
    +   * treated as application arguments.
    --- End diff --
    
    this isn't really a javadoc. We should phrase it in a way that looks more 
like one (minor point)


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to