Java tutorial
/* * Copyright (C) 2017 Baifendian Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.baifendian.swordfish.masterserver.master; import com.baifendian.swordfish.common.utils.CommonUtil; import com.baifendian.swordfish.dao.AdHocDao; import com.baifendian.swordfish.dao.DaoFactory; import com.baifendian.swordfish.dao.FlowDao; import com.baifendian.swordfish.dao.StreamingDao; import com.baifendian.swordfish.dao.enums.ExecType; import com.baifendian.swordfish.dao.enums.FailurePolicyType; import com.baifendian.swordfish.dao.enums.FlowStatus; import com.baifendian.swordfish.dao.enums.NodeDepType; import com.baifendian.swordfish.dao.enums.NotifyType; import com.baifendian.swordfish.dao.model.AdHoc; import com.baifendian.swordfish.dao.model.ExecutionFlow; import com.baifendian.swordfish.dao.model.ProjectFlow; import com.baifendian.swordfish.dao.model.Schedule; import com.baifendian.swordfish.dao.model.StreamingResult; import com.baifendian.swordfish.masterserver.config.MasterConfig; import com.baifendian.swordfish.masterserver.exception.ExecException; import com.baifendian.swordfish.masterserver.exception.MasterException; import com.baifendian.swordfish.masterserver.exec.ExecutorClient; import com.baifendian.swordfish.masterserver.exec.ExecutorServerInfo; import com.baifendian.swordfish.masterserver.exec.ExecutorServerManager; import com.baifendian.swordfish.masterserver.quartz.FlowScheduleJob; import com.baifendian.swordfish.masterserver.quartz.QuartzManager; import com.baifendian.swordfish.masterserver.utils.ResultDetailHelper; import com.baifendian.swordfish.masterserver.utils.ResultHelper; import com.baifendian.swordfish.rpc.ExecInfo; import com.baifendian.swordfish.rpc.HeartBeatData; import com.baifendian.swordfish.rpc.MasterService.Iface; import com.baifendian.swordfish.rpc.RetInfo; import com.baifendian.swordfish.rpc.RetResultInfo; import com.baifendian.swordfish.rpc.ScheduleInfo; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.tuple.Pair; import org.apache.thrift.TException; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * MasterService <p> */ public class MasterServiceImpl implements Iface { /** * logger */ private final Logger logger = LoggerFactory.getLogger(getClass()); /** * ??? */ private final FlowDao flowDao; /** * ??? */ private final AdHocDao adHocDao; /** * ??? */ private final StreamingDao streamingDao; /** * server ? */ private ExecutorServerManager executorServerManager; /** * ? */ private final BlockingQueue<ExecFlowInfo> executionFlowQueue; /** * ? service, ? exec-service ?, ?? */ private ScheduledExecutorService checkService; /** * , ?, ? executor */ private Submit2ExecutorServerThread flowSubmit2ExecutorThread; /** * flow ? */ private FlowExecManager flowExecManager; /** * executor server ? **/ private ExecutorCheckThread executorCheckThread; /** * streaming */ private StreamingCheckThread streamingCheckThread; public MasterServiceImpl() { this.flowDao = DaoFactory.getDaoInstance(FlowDao.class); this.adHocDao = DaoFactory.getDaoInstance(AdHocDao.class); this.streamingDao = DaoFactory.getDaoInstance(StreamingDao.class); this.executorServerManager = new ExecutorServerManager(); this.executionFlowQueue = new LinkedBlockingQueue<>(MasterConfig.executionFlowQueueSize); this.checkService = Executors.newScheduledThreadPool(5); } /** * ? flow exec */ public void run() { flowExecManager = new FlowExecManager(this, flowDao); // ? FlowScheduleJob.init(executionFlowQueue, flowDao); // ? executor server ? flowSubmit2ExecutorThread = new Submit2ExecutorServerThread(executorServerManager, flowDao, executionFlowQueue); flowSubmit2ExecutorThread.setDaemon(true); flowSubmit2ExecutorThread.start(); // executor executorCheckThread = new ExecutorCheckThread(executorServerManager, MasterConfig.heartBeatTimeoutInterval, flowSubmit2ExecutorThread); // executor checkService.scheduleAtFixedRate(executorCheckThread, 10, MasterConfig.heartBeatCheckInterval, TimeUnit.SECONDS); // ?? streamingCheckThread = new StreamingCheckThread(streamingDao); // executor checkService.scheduleAtFixedRate(streamingCheckThread, 10, MasterConfig.streamingCheckInterval, TimeUnit.SECONDS); // ?, , ???? Thread t = new Thread(() -> { try { // ?, ???? Thread.sleep(MasterConfig.heartBeatTimeoutInterval + 1000); List<ExecutionFlow> executionFlowList = flowDao.queryAllNoFinishFlow(); logger.info("recovery exec flows: {}", executionFlowList.size()); for (ExecutionFlow executionFlow : executionFlowList) { Pair<String, Integer> pair = CommonUtil.parseWorker(executionFlow.getWorker()); ExecFlowInfo execFlowInfo = (pair == null) ? new ExecFlowInfo(executionFlow.getId()) : new ExecFlowInfo(pair.getLeft(), pair.getRight(), executionFlow.getId()); executionFlowQueue.add(execFlowInfo); } } catch (Exception e) { logger.error("Catch an exception", e); } }); t.start(); } /** * ? executor */ public void stop() { if (!checkService.isShutdown()) { checkService.shutdownNow(); } flowSubmit2ExecutorThread.disable(); try { flowSubmit2ExecutorThread.interrupt(); flowSubmit2ExecutorThread.join(); } catch (InterruptedException e) { logger.error("join thread exception", e); } flowExecManager.destroy(); } /** * */ public void addExecFlow(ExecFlowInfo execFlowInfo) { executionFlowQueue.add(execFlowInfo); } /** * ?, Crontab ?( Quartz ) * * @see CronExpression */ @Override public RetInfo setSchedule(int projectId, int flowId) throws TException { logger.info("set schedule, project id: {}, flow id: {}", projectId, flowId); try { Schedule schedule = flowDao.querySchedule(flowId); if (schedule == null) { return ResultHelper.createErrorResult("flow schedule info not exists"); } // ?? Date startDate = schedule.getStartDate(); Date endDate = schedule.getEndDate(); String jobName = FlowScheduleJob.genJobName(flowId); String jobGroupName = FlowScheduleJob.genJobGroupName(projectId); Map<String, Object> dataMap = FlowScheduleJob.genDataMap(projectId, flowId, schedule); QuartzManager.addJobAndTrigger(jobName, jobGroupName, FlowScheduleJob.class, startDate, endDate, schedule.getCrontab(), dataMap); } catch (Exception e) { logger.error(e.getMessage(), e); return ResultHelper.createErrorResult(e.getMessage()); } return ResultHelper.SUCCESS; } /** * ? */ @Override public RetInfo deleteSchedule(int projectId, int flowId) throws TException { logger.info("delete schedules of project id:{}, flow id:{}", projectId, flowId); try { String jobName = FlowScheduleJob.genJobName(flowId); String jobGroupName = FlowScheduleJob.genJobGroupName(projectId); QuartzManager.deleteJob(jobName, jobGroupName); } catch (Exception e) { logger.error(e.getMessage(), e); return ResultHelper.createErrorResult(e.getMessage()); } return ResultHelper.SUCCESS; } /** * ? */ @Override public RetInfo deleteSchedules(int projectId) throws TException { logger.info("delete schedules of project id:{}", projectId); try { String jobGroupName = FlowScheduleJob.genJobGroupName(projectId); QuartzManager.deleteJobs(jobGroupName); } catch (Exception e) { logger.error(e.getMessage(), e); return ResultHelper.createErrorResult(e.getMessage()); } return ResultHelper.SUCCESS; } /** * ??, ?? * * @param runTime ? */ @Override public RetResultInfo execFlow(int projectId, int flowId, long runTime, ExecInfo execInfo) throws TException { logger.info("exec flow project id:{}, flow id:{}, run time:{}, exec info:{}", projectId, flowId, runTime, execInfo); ExecutionFlow executionFlow; try { ProjectFlow flow = flowDao.projectFlowFindById(flowId); if (flow == null) { logger.error("flow: {} is not exists", flowId); return new RetResultInfo(ResultHelper.createErrorResult("flow is not exists"), null); } // ? executionFlow = flowDao.scheduleFlowToExecution(projectId, flowId, flow.getOwnerId(), new Date(runTime), ExecType.DIRECT, FailurePolicyType.valueOfType(execInfo.failurePolicy), 0, // ??? execInfo.getNodeName(), NodeDepType.valueOfType(execInfo.getNodeDep()), NotifyType.valueOfType(execInfo.getNotifyType()), execInfo.getNotifyMails(), execInfo.timeout); ExecFlowInfo execFlowInfo = new ExecFlowInfo(executionFlow.getId()); logger.info("insert a flow to execution, exec id:{}, flow id:{}", executionFlow.getId(), flowId); // executionFlowQueue.add(execFlowInfo); } catch (Exception e) { logger.error(e.getMessage(), e); return new RetResultInfo(ResultHelper.createErrorResult(e.getMessage()), null); } return new RetResultInfo(ResultHelper.SUCCESS, Arrays.asList(executionFlow.getId())); } /** * @param execId * @return * @throws TException */ @Override public RetInfo cancelExecFlow(int execId) throws TException { logger.info("receive exec workflow request, id: {}", execId); try { ExecutionFlow executionFlow = flowDao.queryExecutionFlow(execId); if (executionFlow == null) { throw new MasterException("workflow exec id is not exists"); } String worker = executionFlow.getWorker(); if (worker == null) { throw new MasterException("worker is not exists"); } String[] workerInfo = worker.split(":"); if (workerInfo.length < 2) { throw new MasterException("worker is not validate format " + worker); } logger.info("cancel exec flow {} on worker {}", execId, worker); ExecutorClient executionClient = new ExecutorClient(workerInfo[0], Integer.valueOf(workerInfo[1])); return executionClient.cancelExecFlow(execId); } catch (Exception e) { logger.warn("executor report error", e); return ResultHelper.createErrorResult(e.getMessage()); } } /** * ? */ @Override public RetResultInfo appendWorkFlow(int projectId, int flowId, ScheduleInfo scheduleInfo, ExecInfo execInfo) throws TException { logger.info("append workflow projectId:{}, flowId:{}, scheduleMeta:{}, execInfo:{}", projectId, flowId, scheduleInfo, execInfo); try { ProjectFlow flow = flowDao.projectFlowFindById(flowId); // workflow if (flow == null) { logger.error("projectId:{}, flowId:{} workflow not exists", projectId, flowId); return ResultDetailHelper.createErrorResult("current workflow not exists"); } String crontabStr = scheduleInfo.getCrontab(); CronExpression cron = new CronExpression(crontabStr); Date startDateTime = new Date(scheduleInfo.getStartDate()); Date endDateTime = new Date(scheduleInfo.getEndDate()); // ??? flowExecManager.submitAddData(flow, cron, startDateTime, endDateTime, execInfo); } catch (Exception e) { logger.error(e.getMessage(), e); return ResultDetailHelper.createErrorResult(e.getMessage()); } return ResultDetailHelper.createSuccessResult(Collections.emptyList()); } /** * ?? id ? */ @Override public RetInfo execAdHoc(int adHocId) { logger.info("receive exec ad hoc request, id: {}", adHocId); try { AdHoc adHoc = adHocDao.getAdHoc(adHocId); if (adHoc == null) { logger.error("ad hoc id {} not exists", adHocId); return ResultHelper.createErrorResult("ad hoc id not exists"); } // , ?, ?? if (adHoc.getStatus().typeIsFinished()) { logger.error("ad hoc id {} finished unexpected", adHocId); return ResultHelper.createErrorResult("task finished unexpected"); } adHoc.setStatus(FlowStatus.WAITING_RES); adHocDao.updateAdHocStatus(adHoc); ExecutorServerInfo executorServerInfo = executorServerManager.getExecutorServer(); if (executorServerInfo == null) { throw new ExecException("can't found active executor server"); } logger.info("exec ad hoc {} on server {}:{}", adHocId, executorServerInfo.getHost(), executorServerInfo.getPort()); ExecutorClient executorClient = new ExecutorClient(executorServerInfo); executorClient.execAdHoc(adHocId); } catch (Exception e) { logger.error(e.getMessage(), e); // ???, , AdHoc adHoc = adHocDao.getAdHoc(adHocId); if (adHoc != null && adHoc.getStatus() == FlowStatus.WAITING_RES) { adHoc.setStatus(FlowStatus.FAILED); adHoc.setEndTime(new Date()); adHocDao.updateAdHocStatus(adHoc); } return ResultHelper.createErrorResult(e.getMessage()); } return ResultHelper.SUCCESS; } /** * ?? <p> * * @param execId : id */ public RetInfo execStreamingJob(int execId) throws TException { logger.info("receive exec streaming job request, id: {}", execId); try { ExecutorServerInfo executorServerInfo = executorServerManager.getExecutorServer(); if (executorServerInfo == null) { throw new ExecException("can't found active executor server"); } StreamingResult streamingResult = streamingDao.queryStreamingExec(execId); if (streamingResult == null) { logger.error("streaming exec id {} not exists", execId); return ResultHelper.createErrorResult("streaming exec id not exists"); } // , ?, ?? if (streamingResult.getStatus().typeIsFinished()) { logger.error("streaming exec id {} finished unexpected", execId); return ResultHelper.createErrorResult("task finished unexpected"); } streamingResult.setStatus(FlowStatus.WAITING_RES); streamingResult .setWorker(String.format("%s:%s", executorServerInfo.getHost(), executorServerInfo.getPort())); streamingDao.updateResult(streamingResult); logger.info("exec streaming job {} on server {}:{}", execId, executorServerInfo.getHost(), executorServerInfo.getPort()); ExecutorClient executionClient = new ExecutorClient(executorServerInfo.getHost(), executorServerInfo.getPort()); return executionClient.execStreamingJob(execId); } catch (Exception e) { logger.error(e.getMessage(), e); // ???, , StreamingResult streamingResult = streamingDao.queryStreamingExec(execId); if (streamingResult != null && streamingResult.getStatus() == FlowStatus.WAITING_RES) { streamingResult.setStatus(FlowStatus.FAILED); streamingResult.setEndTime(new Date()); streamingDao.updateResult(streamingResult); } return ResultHelper.createErrorResult(e.getMessage()); } } /** * ?? <p> * * @param execId : id */ public RetInfo cancelStreamingJob(int execId) throws TException { logger.info("receive cancel streaming job request, id: {}", execId); try { StreamingResult streamingResult = streamingDao.queryStreamingExec(execId); if (streamingResult == null) { throw new MasterException("streaming exec id is not exists"); } String worker = streamingResult.getWorker(); if (worker == null) { throw new MasterException("worker is not exists"); } Pair<String, Integer> pair = CommonUtil.parseWorker(worker); if (pair == null) { throw new MasterException("worker is not validate format " + worker); } logger.info("cancel exec streaming {} on worker {}", execId, worker); ExecutorClient executionClient = new ExecutorClient(pair.getLeft(), pair.getRight()); return executionClient.cancelStreamingJob(execId); } catch (Exception e) { logger.warn("executor report error", e); return ResultHelper.createErrorResult(e.getMessage()); } } /** * ?, ??? */ @Override public RetInfo activateStreamingJob(int execId) throws TException { logger.info("receive activate streaming job request, id: {}", execId); try { StreamingResult streamingResult = streamingDao.queryStreamingExec(execId); if (streamingResult == null) { throw new MasterException("streaming exec id is not exists"); } String worker = streamingResult.getWorker(); if (worker == null) { throw new MasterException("worker is not exists"); } Pair<String, Integer> pair = CommonUtil.parseWorker(worker); if (pair == null) { throw new MasterException("worker is not validate format " + worker); } logger.info("Activate exec streaming {} on worker {}", execId, worker); ExecutorClient executionClient = new ExecutorClient(pair.getLeft(), pair.getRight()); return executionClient.activateStreamingJob(execId); } catch (Exception e) { logger.warn("executor report error", e); return ResultHelper.createErrorResult(e.getMessage()); } } /** * ?? */ @Override public RetInfo deactivateStreamingJob(int execId) throws TException { logger.info("receive deactivate streaming job request, id: {}", execId); try { StreamingResult streamingResult = streamingDao.queryStreamingExec(execId); if (streamingResult == null) { throw new MasterException("streaming exec id is not exists"); } String worker = streamingResult.getWorker(); if (worker == null) { throw new MasterException("worker is not exists"); } Pair<String, Integer> pair = CommonUtil.parseWorker(worker); if (pair == null) { throw new MasterException("worker is not validate format " + worker); } logger.info("Deactivate exec streaming {} on worker {}", execId, worker); ExecutorClient executionClient = new ExecutorClient(pair.getLeft(), pair.getRight()); return executionClient.deactivateStreamingJob(execId); } catch (Exception e) { logger.warn("executor report error", e); return ResultHelper.createErrorResult(e.getMessage()); } } /** * ? executor ? * * @param host executor host ? * @param port executor port */ @Override public RetInfo registerExecutor(String host, int port, long registerTime) throws TException { logger.info("register executor server[{}:{}]", host, port); try { if (Math.abs(System.currentTimeMillis() - registerTime) > MasterConfig.masterExecutorMaxAllowTicketInterval) { logger.warn("master and executor clock ticket is more than {}", MasterConfig.masterExecutorMaxAllowTicketInterval); return ResultHelper.createErrorResult("master and executor clock ticket is more than " + MasterConfig.masterExecutorMaxAllowTicketInterval); } // , ???? HeartBeatData heartBeatData = new HeartBeatData(); heartBeatData.setReportDate(registerTime); heartBeatData.setReceiveDate(System.currentTimeMillis()); ExecutorServerInfo executorServerInfo = new ExecutorServerInfo(); executorServerInfo.setHost(host); executorServerInfo.setPort(port); executorServerInfo.setHeartBeatData(heartBeatData); if (executorServerManager.containServer(executorServerInfo)) { executorServerManager.updateServer(executorServerInfo); // , ???? flowSubmit2ExecutorThread.resubmitExecFlow(executorServerInfo); } else { // ??, server executorServerManager.addServer(executorServerInfo); } } catch (Exception e) { logger.warn("executor register error", e); return ResultHelper.createErrorResult(e.getMessage()); } return ResultHelper.SUCCESS; } // master ? @Override public RetInfo downExecutor(String host, int port) throws TException { logger.info("down executor server[{}:{}]", host, port); try { ExecutorServerInfo executorServerInfo = new ExecutorServerInfo(); executorServerInfo.setHost(host); executorServerInfo.setPort(port); if (executorServerManager.containServer(executorServerInfo)) { executorServerManager.removeServer(executorServerInfo); // ???? flowSubmit2ExecutorThread.resubmitExecFlow(executorServerInfo); } } catch (Exception e) { logger.warn("executor down error", e); return ResultHelper.createErrorResult(e.getMessage()); } return ResultHelper.SUCCESS; } /** * ? executor ? * * @param host executor host ? * @param port executor port */ @Override public RetInfo executorReport(String host, int port, HeartBeatData heartBeatData) throws TException { logger.info("executor server[{}:{}] report info {}", host, port, heartBeatData); try { // heartBeatData.setReceiveDate(System.currentTimeMillis()); ExecutorServerInfo executorServerInfo = new ExecutorServerInfo(); executorServerInfo.setHost(host); executorServerInfo.setPort(port); executorServerInfo.setHeartBeatData(heartBeatData); executorServerManager.updateServer(executorServerInfo); } catch (Exception e) { logger.warn(String.format("executor report error, [%s:%d]", host, port), e); return ResultHelper.createErrorResult(e.getMessage()); } return ResultHelper.SUCCESS; } }