Apache spark Spark launcher句柄未在独立群集模式下更新状态
我试图在SpringWeb应用程序中使用Spark Launcher库以编程方式提交Spark作业 在Apache spark Spark launcher句柄未在独立群集模式下更新状态,apache-spark,cluster-computing,apache-spark-standalone,spark-launcher,Apache Spark,Cluster Computing,Apache Spark Standalone,Spark Launcher,我试图在SpringWeb应用程序中使用Spark Launcher库以编程方式提交Spark作业 在纱线客户端、纱线簇和独立客户端模式下,一切正常。但是,当使用独立集群模式时,Sparkaphandle的getState()将永远处于未知状态。有什么建议吗?谢谢 这是服务代码 import org.apache.spark.launcher.SparkAppHandle; import org.apache.spark.launcher.SparkLauncher; import org.sp
纱线客户端
、纱线簇
和独立客户端
模式下,一切正常。但是,当使用独立集群
模式时,Sparkaphandle的getState()
将永远处于未知
状态。有什么建议吗?谢谢
这是服务代码
import org.apache.spark.launcher.SparkAppHandle;
import org.apache.spark.launcher.SparkLauncher;
import org.springframework.stereotype.Service;
@Service
public class SparkServices {
public String launchJob(String master, String mode) throws Exception {
SparkAppHandle handle = new SparkLauncher()
.setAppName("test1")
.setSparkHome("/usr/local/spark")
.setAppResource("hdfs://nn:9000/spark-application.jar")
.setMainClass("my.App")
.setMaster(master)
.setDeployMode(mode)
.setConf("spark.executor.instances", "2")
.setConf("spark.driver.memory", "2g")
.setConf("spark.driver.cores", "1")
.setConf("spark.executor.memory", "2g")
.setConf("spark.executor.cores", "1")
.addAppArgs("hdfs://nn:9000/spark-project/files/")
.setVerbose(true)
.startApplication(new SparkAppHandle.Listener() {
@Override
public void stateChanged(SparkAppHandle sparkAppHandle) {
System.out.println("state >>> " + sparkAppHandle.getState());
}
@Override
public void infoChanged(SparkAppHandle sparkAppHandle) {
System.out.println("info >>> " + sparkAppHandle.getState());
}
});
while (!handle.getState().isFinal()){
System.out.println("state >>> " + handle.getState());
Thread.sleep(10000);
}
return "finished with >>>" + handle.getState();
}
}
以及控制器的代码
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class TaskController {
@Autowired
private SparkServices sparkServices;
@GetMapping("/sparkJobs/{master}/{mode}")
public String sparkJob(@PathVariable("master") String master, @PathVariable("mode") String mode) throws Exception {
return sparkServices.launchJob(master, mode);
}
}