com.baifendian.swordfish.execserver.runner.streaming.StreamingRunnerManager.java Source code

Java tutorial

Introduction

Here is the source code for com.baifendian.swordfish.execserver.runner.streaming.StreamingRunnerManager.java

Source

/*
 * Copyright (C) 2017 Baifendian Corporation
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *          http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.baifendian.swordfish.execserver.runner.streaming;

import static com.baifendian.swordfish.common.job.struct.node.JobType.SPARK_STREAMING;
import static com.baifendian.swordfish.common.job.struct.node.JobType.STORM;

import com.baifendian.swordfish.common.config.BaseConfig;
import com.baifendian.swordfish.common.hadoop.YarnRestClient;
import com.baifendian.swordfish.common.utils.DateUtils;
import com.baifendian.swordfish.dao.DaoFactory;
import com.baifendian.swordfish.dao.StreamingDao;
import com.baifendian.swordfish.dao.enums.FlowStatus;
import com.baifendian.swordfish.dao.model.StreamingResult;
import com.baifendian.swordfish.execserver.job.AbstractStormProcessJob;
import com.baifendian.swordfish.execserver.job.AbstractYarnJob;
import com.baifendian.swordfish.execserver.job.JobProps;
import com.baifendian.swordfish.execserver.utils.Constants;
import com.baifendian.swordfish.execserver.utils.JobLogger;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.text.MessageFormat;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.configuration.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class StreamingRunnerManager {

    private static Logger logger = LoggerFactory.getLogger(StreamingRunnerManager.class.getName());

    private final ExecutorService streamingExecutorService;

    private StreamingDao streamingDao;

    public StreamingRunnerManager(Configuration conf) {
        streamingDao = DaoFactory.getDaoInstance(StreamingDao.class);

        int threads = conf.getInt(Constants.EXECUTOR_STREAMING_THREADS, Constants.defaultStreamingThreadNum);

        ThreadFactory flowThreadFactory = new ThreadFactoryBuilder().setNameFormat("Exec-Server-StreamingRunner")
                .build();
        streamingExecutorService = Executors.newFixedThreadPool(threads, flowThreadFactory);
    }

    /**
     * ???
     *
     * @param streamingResult ?
     */
    public void submitJob(StreamingResult streamingResult) {

        // ???, ??()
        if (streamingResult.getStatus().typeIsFinished()) {
            return;
        }

        String jobId = String.format("STREAMING_JOB_%s_%s", streamingResult.getExecId(),
                DateUtils.now(Constants.DATETIME_FORMAT));

        Date now = new Date();

        streamingResult.setStartTime(now);

        streamingResult.setStatus(FlowStatus.RUNNING);
        streamingResult.setJobId(jobId);

        streamingDao.updateResult(streamingResult);

        // ??
        Logger jobLogger = new JobLogger(jobId);

        StreamingRunner runner = new StreamingRunner(streamingResult, jobLogger);
        streamingExecutorService.submit(runner);
    }

    /**
     * ??, ?? application, ??
     *
     * @param streamingResult ?
     */
    public void cancelJob(StreamingResult streamingResult) throws Exception {
        JobProps props = new JobProps();

        props.setWorkDir(BaseConfig.getStreamingExecDir(streamingResult.getProjectId(),
                streamingResult.getStreamingId(), streamingResult.getExecId()));
        props.setProxyUser(streamingResult.getProxyUser());
        props.setEnvFile(BaseConfig.getSystemEnvPath());
        props.setJobAppId(streamingResult.getJobId());

        FlowStatus status = null;

        try {
            switch (streamingResult.getType()) {
            case SPARK_STREAMING: {
                AbstractYarnJob.cancelApplication(streamingResult.getAppLinkList(), props,
                        new JobLogger(streamingResult.getJobId()));

                // ?, ??
                List<String> appLinkList = streamingResult.getAppLinkList();
                String appId = (CollectionUtils.isEmpty(appLinkList)) ? null
                        : appLinkList.get(appLinkList.size() - 1);

                status = YarnRestClient.getInstance().getApplicationStatus(appId);
                break;

            }
            case STORM: {
                if (CollectionUtils.isEmpty(streamingResult.getAppLinkList())) {
                    return;
                }

                AbstractStormProcessJob.cancelApplication(streamingResult.getAppLinkList().get(0));
                break;
            }
            default: {
                String msg = MessageFormat.format("Not support job type: {0}", streamingResult.getType());
                throw new Exception(msg);
            }
            }

            if (status == null) {
                status = FlowStatus.KILL;
            }

            Date now = new Date();

            if (status.typeIsFinished()) {
                streamingResult.setStatus(status);
                streamingResult.setEndTime(now);

                streamingDao.updateResult(streamingResult);
            }
        } catch (Exception e) {
            logger.error(String.format("Cancel streaming job exception: %d", streamingResult.getExecId()), e);
            throw e;
        }
    }

    /**
     * ? Job
     */
    public void activateJob(StreamingResult streamingResult) throws Exception {
        try {
            switch (streamingResult.getType()) {
            case STORM:
                if (CollectionUtils.isEmpty(streamingResult.getAppLinkList())) {
                    return;
                }

                AbstractStormProcessJob.activateApplication(streamingResult.getAppLinkList().get(0));

                break;
            default:
                String msg = MessageFormat.format("Not support job type: {0}", streamingResult.getType());
                throw new Exception(msg);
            }

            streamingResult.setStatus(FlowStatus.RUNNING);
            streamingDao.updateResult(streamingResult);
        } catch (Exception e) {
            logger.error(String.format("Activate streaming job exception: %d", streamingResult.getExecId()), e);
            throw e;
        }
    }

    /**
     * ? job
     */
    public void deactivateJob(StreamingResult streamingResult) throws Exception {
        try {
            switch (streamingResult.getType()) {
            case STORM:
                if (CollectionUtils.isEmpty(streamingResult.getAppLinkList())) {
                    return;
                }

                AbstractStormProcessJob.deactivateApplication(streamingResult.getAppLinkList().get(0));
                break;
            default:
                String msg = MessageFormat.format("Not support job type: {0}", streamingResult.getType());
                throw new Exception(msg);
            }

            streamingResult.setStatus(FlowStatus.INACTIVE);
            streamingDao.updateResult(streamingResult);
        } catch (Exception e) {
            logger.error(String.format("Deactivate streaming job exception: %d", streamingResult.getExecId()), e);
            throw e;
        }
    }

    /**
     * ?
     */
    public void destory() {
        if (!streamingExecutorService.isShutdown()) {
            streamingExecutorService.shutdownNow();
        }
    }
}