HI im using below code to submit a spark 2.3 application on kubernetes
cluster in scala using play framework

I have also tried as a simple scala program without using play framework

im trying to spark submit which was mentioned below programatically
https://spark.apache.org/docs/latest/running-on-kubernetes.html
<http://spark%202.3%20on%20kubernetes>


$ bin/spark-submit \

    --master k8s://https://<k8s-apiserver-host>:<k8s-apiserver-port> \

    --deploy-mode cluster \

    --name spark-pi \

    --class org.apache.spark.examples.SparkPi \

    --conf spark.executor.instances=5 \

    --conf spark.kubernetes.container.image=<spark-image> \

    local:///path/to/examples.jar



  def index = Action {

    try

    {

    val spark = new SparkLauncher()

      .setMaster("my k8 apiserver host")

      .setVerbose(true)

      .addSparkArg("--verbose")

      .setAppResource("hdfs://server/inputs/my.jar")

      .setConf("spark.app.name","myapp")

      .setConf("spark.executor.instances","5")

      .setConf("spark.kubernetes.container.image","mydockerimage")

      .setDeployMode("cluster")

      .startApplication(new SparkAppHandle.Listener(){

        def infoChanged(handle: SparkAppHandle): Unit = {

          System.out.println("Spark App Id [" + handle.getAppId + "] Info
Changed.  State [" + handle.getState + "]")

        }

       def stateChanged(handle: SparkAppHandle): Unit = {

          System.out.println("Spark App Id [" + handle.getAppId + "] State
Changed. State [" + handle.getState + "]")

          if (handle.getState.toString == "FINISHED") System.exit(0)

        }

      } )

    Ok(spark.getState().toString())

    }

    catch

    {

      case NonFatal(e)=>{

        println("failed with exception: " + e)

      }

    }

Ok

  }

Reply via email to