SparkLauncher提交jar任务到spark-yarn
程序员文章站
2022-04-01 22:04:41
...
#spark目录下执行
./bin/spark-submit --class org.apache.spark.examples.SparkPi \
--master yarn \
--deploy-mode cluster \
/Users/zheng/spark/spark-3.0.0/examples/jars/spark-examples_2.12-3.0.0-preview2.jar
以下通过SparkLauncher提交跟终端提交是一样的,底层还是借助spark-submit提交
package com.zheng.spark;
import org.apache.spark.launcher.SparkAppHandle;
import org.apache.spark.launcher.SparkLauncher;
import java.io.IOException;
/**
* Created by zheng on 2020-04-07.
*/
public class SparkClient {
public static void main(String[] args) throws IOException {
SparkLauncher sparkLauncher = new SparkLauncher();
SparkAppHandle sparkAppHandle = sparkLauncher
.setMaster("yarn")
.setDeployMode("cluster")
.setMainClass("org.apache.spark.examples.SparkPi")
.setAppResource("/Users/zheng/spark/spark-3.0.0/examples/jars/spark-examples_2.12-3.0.0-preview2.jar")
.startApplication();
SparkAppHandle.State state = sparkAppHandle.getState();
String applicationId = null;
while (state != SparkAppHandle.State.RUNNING) {
applicationId = sparkAppHandle.getAppId();
if(applicationId != null){
System.out.println("applicationId:"+sparkAppHandle.getAppId());
break;
}
}
}
}
上一篇: 记Spark提交任务到Yarn错误汇总
下一篇: spark-yarn提交任务