i try to subbmit spark job with oozie but it was failed with this message.

Main class [org.apache.oozie.action.hadoop.SparkMain], exit code [1]

is it any wrong configuration from me?
this is my xml conf.

<workflow-app xmlns='uri:oozie:workflow:0.5' name='tkg-cangkul'>
    <start to='spark-node' />
    <action name='spark-node'>
        <spark xmlns="uri:oozie:spark-action:0.1">
            <job-tracker>${jobTracker}</job-tracker>
            <name-node>${nameNode}</name-node>
                <configuration>
                        <property>
<name>mapred.job.queue.name</name>
                                <value>default</value>
                        </property>
                        <property>
<name>oozie.launcher.mapred.job.queue.name</name>
                                <value>user1</value>
                        </property>
                </configuration>
            <master>yarn-cluster</master>
            <name>Spark</name>
            <class>cobaSpark.pack</class>
<jar>hdfs://localhost:8020/user/apps/cobaSpark.jar</jar>
            <arg>/user/apps/sample1.txt</arg>
            <arg>/user/apps/oozie-spark/out</arg>
        </spark>
        <ok to="end" />
        <error to="fail" />
    </action>
    <kill name="fail">
        <message>Workflow failed, error
            message[${wf:errorMessage(wf:lastErrorNode())}]
        </message>
    </kill>
    <end name='end' />
</workflow-app>

Reply via email to