com.datastax.insight.core.driver.Launcher Maven / Gradle / Ivy
package com.datastax.insight.core.driver;
import com.datastax.insight.core.conf.Configuration;
import com.datastax.util.io.FileUtil;
import com.datastax.util.lang.ProcessStreamHandler;
import com.datastax.util.lang.ProcessUtil;
import com.datastax.util.lang.StringUtil;
import com.google.common.base.Strings;
import org.apache.spark.launcher.SparkLauncher;
import java.io.File;
import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import static com.datastax.insight.core.Consts.DELIMITER;
/**
* User: Anders Hong
* Date: 07/09/2016
* Time: 16:29
*/
public class Launcher {
public static final int INTERVAL=5000;
public static boolean launch(Configuration configuration, SparkConfig sparkAction, String logId) {
Map env=new HashMap<>();
env.put("hadoop.home.dir", configuration.getHadoopHome());
env.put("HADOOP_HOME", configuration.getHadoopHome());
env.put("spark.home.dir", configuration.getSparkHome());
env.put("SPARK_HOME", configuration.getSparkHome());
env.put("HADOOP_CONF_DIR", configuration.getHadoopConfDir());
if(!Strings.isNullOrEmpty(configuration.getYarnConfDir())) {
env.put("YARN_CONF_DIR", configuration.getYarnConfDir());
}
env.put("HADOOP_USER_NAME", configuration.getHadoopUserName());
try {
SparkLauncher launcher = new SparkLauncher(env);
if(!Strings.isNullOrEmpty(configuration.getJars())) {
launcher = launcher.addJar(configuration.getJars());
}
launcher = launcher.setAppName("spark-app-" + sparkAction.getAppArgs().get(7))
.setAppResource(sparkAction.getAppResource())
.setMainClass(sparkAction.getMainClass())
.setMaster(sparkAction.getMaster())
.setVerbose(sparkAction.isVerbose());
if(sparkAction.getDeployMode()!=null){
launcher=launcher.setDeployMode(sparkAction.getDeployMode());
}
for(String key : sparkAction.getConfMap().keySet()){
launcher=launcher.setConf(key, sparkAction.getConfMap().get(key));
}
if(sparkAction.getAppArgs()!=null) {
for (String arg : sparkAction.getAppArgs()) {
launcher=launcher.addAppArgs(arg);
}
}
if (configuration.getQueue() != null) {
launcher = launcher.addSparkArg("--conf","spark.yarn.queue"+ "=" + configuration.getQueue());
}
Process process = launcher.launch();
File logFile = Paths.get(configuration.getLogDir(), logId+".log").toFile();
DateFormat dFormat = new SimpleDateFormat("yy/MM/dd HH:mm:ss");
ProcessStreamHandler handler = "yarn".equals(sparkAction.getMaster())
? line -> FileUtil.writeFile(logFile,dFormat.format(new Date()) + DELIMITER + line + "\r\n",true)
: line -> FileUtil.writeFile(logFile,line + "\r\n",true);
ProcessUtil processUtil=new ProcessUtil(handler);
processUtil.unblock(process);
System.out.println("launcher over");
File stdoutLogFile = Paths.get(configuration.getLogDir(), logId+".stdout").toFile();
return flowResult(stdoutLogFile);
/*SparkAppHandle handle = launcher.startApplication();
while (!handle.getState().isFinal()) {
Thread.sleep(INTERVAL);
}
if (!handle.getState().equals(SparkAppHandle.State.FINISHED)) {
return false;
}*/
} catch (Exception ex){
ex.printStackTrace();
return false;
}
}
public static void newThread(Configuration configuration, SparkConfig sparkConfig, String logId){
Thread thread=new Thread(new Runnable() {
@Override
public void run() {
launch(configuration,sparkConfig,logId);
}
});
thread.start();
}
private static boolean flowResult(File file) {
if (file.exists()) {
String fileData = FileUtil.readFile(file);
if (fileData.contains("Exception")) {
return false;
}
} else {
return false;
}
return true;
}
}