Example usage for java.lang InterruptedException InterruptedException

List of usage examples for java.lang InterruptedException InterruptedException

Introduction

In this page you can find the example usage for java.lang InterruptedException InterruptedException.

Prototype

public InterruptedException(String s) 

Source Link

Document

Constructs an InterruptedException with the specified detail message.

Usage

From source file:com.amazonaws.services.kinesis.scaling.auto.AutoscalingController.java

public void startMonitors() {
    // run all the configured monitors in a thread pool
    try {//  ww w. ja v a  2s.c  o  m
        int i = 0;
        for (AutoscalingConfiguration streamConfig : this.config) {
            StreamMonitor monitor;
            try {
                LOG.info(String.format("AutoscalingController creating Stream Monitor for Stream %s",
                        streamConfig.getStreamName()));
                monitor = new StreamMonitor(streamConfig);
                runningMonitors.put(i, monitor);
                monitorFutures.put(i, executor.submit(monitor));
                i++;
            } catch (Exception e) {
                LOG.error(e.getMessage(), e);
            }
        }

        // spin through all stream monitors to see if any failed
        while (true) {
            for (Map.Entry<Integer, Future<?>> entry : monitorFutures.entrySet()) {
                if (entry.getValue() == null) {
                    throw new InterruptedException("Null Monitor Future");
                } else {
                    if (entry.getValue().isDone()) {
                        if (runningMonitors.get(entry.getKey()).getException() != null) {
                            throw new InterruptedException(
                                    runningMonitors.get(entry.getKey()).getException().getMessage());
                        }
                    }
                }
            }

            Thread.sleep(60000);
        }
    } catch (InterruptedException e) {
        try {
            stopAll();

            // stop the executor service
            LOG.error(e.getMessage(), e);
            LOG.info("Terminating Thread Pool");
            executor.shutdown();
        } catch (Exception e1) {
            LOG.error(e1.getMessage(), e1);
        }
    }
}

From source file:com.longevitysoft.java.bigslice.server.SlicerSlic3r.java

@Override
public void slice(@Header(value = Constants.MSG_HEADER_SLICE_CONFIG_ARRAY_LIST) String configList,
        @Header(value = Constants.MSG_HEADER_CAMEL_FILE_ABSOLUTE_PATH) String filePath,
        @Header(value = Constants.MSG_HEADER_OUTPUT_PATH) String headerOutputFilename) {
    Endpoint epSlice = camel.getEndpoint(Constants.EP_NAME_JMS_QUEUE_PENDINGSTL);
    Exchange inEx = epSlice.createExchange(ExchangePattern.InOnly);
    // check if output filename header present
    StringBuilder configFileParam = new StringBuilder();
    if (null != configList) {
        configList = configList.trim();//from   w  w  w  .j  a v  a2s. c o m
        String[] configs = configList.split(",");
        LOG.debug("configs received in msg header");
        configFileParam.append(Constants.SPACE);
        for (String configName : configs) {
            configFileParam.append(Constants.SLIC3R_PARAM_NAME_LOAD).append(Constants.SPACE).append(configName);
        }
    }
    // check if output filename header present
    try {
        StringBuilder gcodeOutputFilename = new StringBuilder().append(Constants.PARENT_DIR)
                .append(Constants.FILEPATH_GCODEOUT); // slic3r
        // uses
        // STL
        // folder
        // by
        // default
        if (null != headerOutputFilename) {
            gcodeOutputFilename.append(Constants.SLASH)
                    .append(headerOutputFilename.replace(Constants.TILDE_BANG_TILDE,
                            Constants.SLIC3R_PARAM_VAL_INPUT_FILENAME_BASE + Constants.UNDERSCORE));
            // init directory-tree in output filename
            int lastSlash = headerOutputFilename.lastIndexOf("/");
            if (0 < lastSlash) {
                String outputTree = headerOutputFilename.substring(0, lastSlash);
                outputTree = outputTree.replace("/./", "/");
                File outDir = new File(buildOutputPath() + Constants.SLASH + outputTree);
                outDir.mkdirs();
            }
        } else {
            gcodeOutputFilename.append(Constants.SLASH).append(Constants.SLIC3R_PARAM_VAL_INPUT_FILENAME_BASE);
            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-dd-MM__HH-mm-ss");
            gcodeOutputFilename.append("_TS").append(sdf.format(Calendar.getInstance().getTime()));
        }
        if (null == pathToExecutable) {
            throw new InterruptedException("no exec path found");
        }
        for (int i = 0; i < pathToExecutable.size(); i++) {
            String execPath = pathToExecutable.get(i);
            // inject executable name into gcode output filename
            String insToken = null;
            int insertPos = gcodeOutputFilename.indexOf(Constants.SLIC3R_PARAM_VAL_INPUT_FILENAME_BASE);
            if (0 < insertPos) {
                insertPos += Constants.SLIC3R_PARAM_VAL_INPUT_FILENAME_BASE.length();
                if (null != execOutputFilenameFilter) {
                    insToken = execOutputFilenameFilter.get(i);
                }
                if (null == insToken) {
                    insToken = sanitizeFilename(execPath);
                }
                insToken = Constants.UNDERSCORE + insToken;
            }
            // build exec string
            final StringBuilder execStr = new StringBuilder(execPath).append(configFileParam)
                    .append(Constants.SPACE).append(Constants.SLIC3R_CLI_PARAM_OUTPUT_FILENAME_FORMAT)
                    .append(gcodeOutputFilename.substring(0, insertPos)).append(insToken)
                    .append(gcodeOutputFilename.substring(insertPos, gcodeOutputFilename.length()))
                    .append(Constants.EXT_GCODE).append(Constants.SPACE).append(filePath);
            LOG.debug("executing-slic3r: " + execStr + Constants.NEWLINE);
            final String fPath = filePath;
            final StringBuilder gcodeOutFName = gcodeOutputFilename;
            executorService.execute(new Runnable() {
                @Override
                public void run() {
                    try {
                        Runtime rt = Runtime.getRuntime();
                        RuntimeExec rte = new RuntimeExec();
                        StreamWrapper error, output;
                        Process proc = rt.exec(execStr.toString());
                        error = rte.getStreamWrapper(proc.getErrorStream(), Constants.STREAM_NAME_ERROR);
                        output = rte.getStreamWrapper(proc.getInputStream(), Constants.STREAM_NAME_OUTPUT);
                        int exitVal = 0;
                        error.start();
                        output.start();
                        error.join(3000);
                        output.join(3000);
                        exitVal = proc.waitFor();
                        // TODO process exitVal for caller - decide what to
                        // do in
                        // http://camel.apache.org/exception-clause.html
                        LOG.info(new StringBuilder().append("stl-file-path: ").append(fPath)
                                .append(", output-file-path:").append(gcodeOutFName).append(Constants.NEWLINE)
                                .append(", proc-output: ").append(output.getMessage()).append(Constants.NEWLINE)
                                .append(", proc-error: ").append(error.getMessage()).toString());
                    } catch (Exception e) {
                        LOG.trace(e.toString());
                    }
                }
            });
        }
    } catch (InterruptedException e) {
        LOG.trace(e.toString());
    }
}

From source file:org.apache.camel.component.file.remote.FtpOperations.java

public boolean connect(RemoteFileConfiguration configuration) throws GenericFileOperationFailedException {
    log.trace("Connecting using FTPClient: {}", client);

    String host = configuration.getHost();
    int port = configuration.getPort();
    String username = configuration.getUsername();

    if (clientConfig != null) {
        log.trace("Configuring FTPClient with config: {}", clientConfig);
        client.configure(clientConfig);/*www  . j a va  2s .c  om*/
    }

    if (log.isTraceEnabled()) {
        log.trace("Connecting to {} using connection timeout: {}", configuration.remoteServerInformation(),
                client.getConnectTimeout());
    }

    boolean connected = false;
    int attempt = 0;

    while (!connected) {
        try {
            if (log.isTraceEnabled() && attempt > 0) {
                log.trace("Reconnect attempt #{} connecting to {}", attempt,
                        configuration.remoteServerInformation());
            }
            client.connect(host, port);
            // must check reply code if we are connected
            int reply = client.getReplyCode();

            if (FTPReply.isPositiveCompletion(reply)) {
                // yes we could connect
                connected = true;
            } else {
                // throw an exception to force the retry logic in the catch exception block
                throw new GenericFileOperationFailedException(client.getReplyCode(), client.getReplyString(),
                        "Server refused connection");
            }
        } catch (Exception e) {
            // check if we are interrupted so we can break out
            if (Thread.currentThread().isInterrupted()) {
                throw new GenericFileOperationFailedException("Interrupted during connecting",
                        new InterruptedException("Interrupted during connecting"));
            }

            GenericFileOperationFailedException failed;
            if (e instanceof GenericFileOperationFailedException) {
                failed = (GenericFileOperationFailedException) e;
            } else {
                failed = new GenericFileOperationFailedException(client.getReplyCode(), client.getReplyString(),
                        e.getMessage(), e);
            }

            log.trace("Cannot connect due: {}", failed.getMessage());
            attempt++;
            if (attempt > endpoint.getMaximumReconnectAttempts()) {
                throw failed;
            }
            if (endpoint.getReconnectDelay() > 0) {
                try {
                    Thread.sleep(endpoint.getReconnectDelay());
                } catch (InterruptedException ie) {
                    // we could potentially also be interrupted during sleep
                    Thread.currentThread().interrupt();
                    throw new GenericFileOperationFailedException("Interrupted during sleeping", ie);
                }
            }
        }
    }

    // must enter passive mode directly after connect
    if (configuration.isPassiveMode()) {
        log.trace("Using passive mode connections");
        client.enterLocalPassiveMode();
    }

    // must set soTimeout after connect
    if (endpoint instanceof FtpEndpoint) {
        FtpEndpoint<?> ftpEndpoint = (FtpEndpoint<?>) endpoint;
        if (ftpEndpoint.getSoTimeout() > 0) {
            log.trace("Using SoTimeout=" + ftpEndpoint.getSoTimeout());
            try {
                client.setSoTimeout(ftpEndpoint.getSoTimeout());
            } catch (IOException e) {
                throw new GenericFileOperationFailedException(client.getReplyCode(), client.getReplyString(),
                        e.getMessage(), e);
            }
        }
    }

    try {
        boolean login;
        if (username != null) {
            log.trace("Attempting to login user: {} using password: {}", username, configuration.getPassword());
            login = client.login(username, configuration.getPassword());
        } else {
            log.trace("Attempting to login anonymous");
            login = client.login("anonymous", "");
        }
        log.trace("User {} logged in: {}", username != null ? username : "anonymous", login);
        if (!login) {
            throw new GenericFileOperationFailedException(client.getReplyCode(), client.getReplyString());
        }
        client.setFileType(configuration.isBinary() ? FTP.BINARY_FILE_TYPE : FTP.ASCII_FILE_TYPE);
    } catch (IOException e) {
        throw new GenericFileOperationFailedException(client.getReplyCode(), client.getReplyString(),
                e.getMessage(), e);
    }

    // site commands
    if (endpoint.getConfiguration().getSiteCommand() != null) {
        // commands can be separated using new line
        Iterator<?> it = ObjectHelper.createIterator(endpoint.getConfiguration().getSiteCommand(), "\n");
        while (it.hasNext()) {
            Object next = it.next();
            String command = endpoint.getCamelContext().getTypeConverter().convertTo(String.class, next);
            log.trace("Site command to send: {}", command);
            if (command != null) {
                boolean result = sendSiteCommand(command);
                if (!result) {
                    throw new GenericFileOperationFailedException(
                            "Site command: " + command + " returned false");
                }
            }
        }
    }

    return true;
}

From source file:org.onecmdb.ui.gwt.desktop.server.command.exec.MDRExecThread.java

public void run() {
    CiBean historyBean = null;/*from   ww  w. j  a v  a  2  s  .c o  m*/
    String stdErr = null;
    String stdOut = null;
    ExecResult result = null;
    String prgPath = null;

    try {
        CiBean[] beans = getMDRBeans();
        CiBean mdrBean = beans[0];
        CiBean configBean = beans[1];
        /*
        final CiBean mdrBean = cmd.getCI("Ci", cmd.getMdr());
                   
        //final CiBean mdrBean = cmd.getCI("Ci", configBean.toStringValue("mdrRepository"));
        if (mdrBean == null) {
           throw new IllegalArgumentException("No MDR with name '" + cmd.getMdr() + "' found!");
        }
        log("MDR '" + cmd.getMdr() + "' loaded...");
                
        // Fetch Config.
        Collection<CiBean> configBeans = cmd.queryCI("MDR_ConfigEntry", "name", cmd.getConfig());
        if (configBeans.size() != 1) {
           throw new IllegalArgumentException("Found " + configBeans.size() + " config CI's with name <" + cmd.getConfig() + "> is not found.");
        }
                
        final CiBean configBean = configBeans.iterator().next();
        */
        log("MDR Config loaded...");

        historyBean = cmd.createHistory(configBean);

        log("History created [" + historyBean.getAlias() + "] ...");

        final HashMap<String, String> params = new HashMap<String, String>();
        log("Initilize Params START...");
        for (ValueBean v : configBean.getAttributeValues()) {
            String value = v.getValue();
            if (params.containsKey(v.getAlias())) {
                String oldValue = params.get(v.getAlias());
                value = oldValue + "," + value;
            }
            params.put(v.getAlias(), value);
            log("\t" + v.getAlias() + "=" + value);
        }
        params.put("mdr_history", historyBean.getAlias());
        params.put("onecmdb_token", cmd.getToken());

        log("Initilize Params END...");

        log("Exec Thread Started...");

        // Create exec.
        synchronized (this) {
            if (this.terminate) {
                throw new InterruptedException("Terminate...");
            }
            exec = new JavaExec(this);
            Properties p = ShellMapper.getShellProperties();
            if (p != null) {
                exec.setShellMap(p);
            }
        }
        exec.setProgramArgs(params);

        String mdrName = mdrBean.toStringValue("name");
        String configProgram = configBean.toStringValue("program");

        String startPath = cmd.getRoot() + "/" + mdrName;
        File startFile = new File(startPath);
        log("Start path set to '" + startPath + "'");

        if (!startFile.exists() || !startFile.isDirectory()) {
            throw new IllegalArgumentException("No 'program' specified in config!");
        }

        if (configProgram.length() == 0) {
            throw new IllegalArgumentException("No 'program' specified in config!");
        }

        prgPath = startPath + "/" + configProgram;

        log("Program path set to '" + prgPath + "'");

        exec.setProgramPath(prgPath);
        exec.setStartDir(startPath);
        // Direct in/out/err...

        String logPath = startPath + "/logs/";
        File logFile = new File(logPath);
        ;
        if (!logFile.exists()) {
            logFile.mkdirs();
        }
        File prgFile = new File(prgPath);
        String prgName = prgFile.getName();

        // Time
        SimpleDateFormat fmt = new SimpleDateFormat("yyyyMMdd-HHmmss");
        String dateStr = fmt.format(new Date());
        stdErr = logPath + "stderr-" + prgName + "-" + dateStr;
        stdOut = logPath + "stdout-" + prgName + "-" + dateStr;
        synchronized (this) {
            if (this.terminate) {
                throw new InterruptedException("Terminate...");
            }
            streamHandler = new StreamHandler(this,
                    mdrBean.toStringValue("name") + "/" + configBean.toStringValue("program"));
        }

        streamHandler.setStderr(new FileWriter(new File(stdErr)));
        streamHandler.setStdout(new FileWriter(new File(stdOut)));
        streamHandler.setAutoClose(true);
        //streamHandler.setStdin(writer);

        log("Direct stdout to " + stdOut);
        log("Direct stderr to " + stdErr);

        exec.setStreamHandler(streamHandler);

        // Do exec.
        log("Start Exec...");
        long start = System.currentTimeMillis();
        result = exec.doExec();
        long stop = System.currentTimeMillis();
        log("Exec Ended [" + (stop - start) + "ms], result=" + result.toString());
    } catch (Throwable t) {
        log("Exec Exception: ");
        log(t);

        result = new ExecResult();
        result.setMessage("Internal Exception: " + t.getMessage());
        result.setRc(ExecResult.ERROR_EXCEPTION_INIT);
        cmd.setExecError(t);
    } finally {
        if (streamHandler != null) {
            streamHandler.terminate();
        }
        if (historyBean != null) {
            try {
                log("Update History....");
                // Reload historyBean:
                CiBean local = cmd.getCI(historyBean.getDerivedFrom(), historyBean.getAlias());
                CiBean base = local.copy();
                cmd.setValue(local, "exitCode", "" + result.getRc());
                cmd.setValue(local, "execMessage", result.getMessage());
                cmd.setValue(local, "stderr", stdErr);
                cmd.setValue(local, "stdout", stdOut);
                if (result.getRc() != 0) {
                    cmd.setValue(local, "status", MDRHistoryState.FAILED);
                } else {
                    String status = local.toStringValue("status");
                    if (status == null || status.length() == 0 || status.equals(MDRHistoryState.EXECUTING)) {
                        cmd.setValue(local, "status", MDRHistoryState.READY);
                    }
                }
                IRfcResult rfcResult = cmd.update(local, base);
                if (rfcResult.isRejected()) {
                    log("History updated was rejected, cause " + rfcResult.getRejectCause());
                } else {
                    log("History update complete");
                }

            } catch (Throwable t) {
                logger.error("Can't update history " + historyBean.getAlias() + " for " + prgPath);
                log("History update failed");
                log(t);
                cmd.setExecError(t);
            }
        }
        log("Execution Completed");
        cmd.unregister(this);
    }
}

From source file:org.apache.camel.component.file.remote.SftpOperations.java

public boolean connect(RemoteFileConfiguration configuration) throws GenericFileOperationFailedException {
    if (isConnected()) {
        // already connected
        return true;
    }//from w  w  w.ja  va 2 s.c o  m

    boolean connected = false;
    int attempt = 0;

    while (!connected) {
        try {
            if (LOG.isTraceEnabled() && attempt > 0) {
                LOG.trace("Reconnect attempt #" + attempt + " connecting to + "
                        + configuration.remoteServerInformation());
            }

            if (channel == null || !channel.isConnected()) {
                if (session == null || !session.isConnected()) {
                    LOG.trace("Session isn't connected, trying to recreate and connect.");
                    session = createSession(configuration);
                    if (endpoint.getConfiguration().getConnectTimeout() > 0) {
                        LOG.trace("Connecting use connectTimeout: "
                                + endpoint.getConfiguration().getConnectTimeout() + " ...");
                        session.connect(endpoint.getConfiguration().getConnectTimeout());
                    } else {
                        LOG.trace("Connecting ...");
                        session.connect();
                    }
                }

                LOG.trace("Channel isn't connected, trying to recreate and connect.");
                channel = (ChannelSftp) session.openChannel("sftp");

                if (endpoint.getConfiguration().getConnectTimeout() > 0) {
                    LOG.trace("Connecting use connectTimeout: "
                            + endpoint.getConfiguration().getConnectTimeout() + " ...");
                    channel.connect(endpoint.getConfiguration().getConnectTimeout());
                } else {
                    LOG.trace("Connecting ...");
                    channel.connect();
                }
                LOG.info("Connected to " + configuration.remoteServerInformation());
            }

            // yes we could connect
            connected = true;
        } catch (Exception e) {
            // check if we are interrupted so we can break out
            if (Thread.currentThread().isInterrupted()) {
                throw new GenericFileOperationFailedException("Interrupted during connecting",
                        new InterruptedException("Interrupted during connecting"));
            }

            GenericFileOperationFailedException failed = new GenericFileOperationFailedException(
                    "Cannot connect to " + configuration.remoteServerInformation(), e);
            if (LOG.isTraceEnabled()) {
                LOG.trace("Cannot connect due: " + failed.getMessage());
            }
            attempt++;
            if (attempt > endpoint.getMaximumReconnectAttempts()) {
                throw failed;
            }
            if (endpoint.getReconnectDelay() > 0) {
                try {
                    Thread.sleep(endpoint.getReconnectDelay());
                } catch (InterruptedException ie) {
                    // we could potentially also be interrupted during sleep
                    Thread.currentThread().interrupt();
                    throw new GenericFileOperationFailedException("Interrupted during sleeping", ie);
                }
            }
        }
    }

    return true;
}

From source file:org.apache.giraph.rexster.utils.RexsterUtils.java

/**
 * Parse all the vertices from the JSON retreived from Rexster. Inspired
 * by the implementation of the JSONObject class.
 *
 * @param  br           buffer over the HTTP response content
 * @return JSONTokener  tokener over the HTTP JSON. Null in case the results
 *                      array is empty.//w w  w  .jav a  2  s.co  m
 */
public static JSONTokener parseJSONEnvelope(BufferedReader br) throws InterruptedException {

    JSONTokener tokener = null;

    try {
        tokener = new JSONTokener(br);
        /* check that the JSON is well-formed by starting with a '{' */
        if (tokener.nextClean() != START_OBJECT) {
            LOG.error(String.format("A JSONObject text must begin with '%c'", START_OBJECT));
        }

        /* loop on the whole array */
        char c = '\0';
        String key = null;
        for (;;) {
            c = tokener.nextClean();
            switch (c) {
            case 0:
                LOG.error(String.format("A JSONObject text must end with '%c'", END_OBJECT));
                break;
            case END_OBJECT:
                return tokener;
            default:
                tokener.back();
                key = tokener.nextValue().toString();
            }

            c = tokener.nextClean();

            if (c != KEY_VALUE_SEPARATOR) {
                LOG.error(String.format("Expected a %c after a key", c));
            }

            if (key != null && !key.equals("results")) {
                tokener.nextValue();
            } else {
                /* starting array */
                c = tokener.nextClean();
                if (c != START_ARRAY) {
                    LOG.error("'results' is expected to be an array");
                }

                /* check if the array is emty. If so, return null to signal that
                   no objects are available in the array, otherwise return the
                   tokener. */
                c = tokener.nextClean();
                if (c == END_ARRAY) {
                    return null;
                } else {
                    tokener.back();
                    return tokener;
                }
            }

            switch (tokener.nextClean()) {
            case ';':
            case ',':
                if (tokener.nextClean() == '}') {
                    return tokener;
                }
                tokener.back();
                break;
            case '}':
                return tokener;
            default:
                LOG.error("Expected a ',' or '}'");
            }
        }

    } catch (JSONException e) {
        LOG.error("Unable to parse the JSON with the vertices.\n" + e.getMessage());
        throw new InterruptedException(e.toString());
    }
}

From source file:org.openqa.selenium.os.UnixProcess.java

public void waitFor(long timeout) throws InterruptedException {
    long until = System.currentTimeMillis() + timeout;
    boolean timedOut = true;
    while (System.currentTimeMillis() < until) {
        if (handler.hasResult()) {
            timedOut = false;/*from   w ww.ja v  a2  s.  c  o  m*/
            break;
        }
        Thread.sleep(50);
    }
    if (timedOut) {
        throw new InterruptedException(String.format("Process timed out after waiting for %d ms.", timeout));
    }
}

From source file:com.hi.datacleaner.ExternalCommandTransformer.java

private String[] getResult(List<String> commandTokens) throws IOException, InterruptedException {
    Process process = new ProcessBuilder(commandTokens).start();

    if (!process.waitFor(_timeout, TimeUnit.MILLISECONDS)) {
        process.destroy();/*from w  w w .ja va  2s.  com*/
        throw new InterruptedException(
                "Process has been interrupted because of timeout (" + _timeout + "ms). ");
    }

    BufferedReader stdin = new BufferedReader(new InputStreamReader(process.getInputStream()));
    BufferedReader stderr = new BufferedReader(new InputStreamReader(process.getErrorStream()));

    StringBuilder result = new StringBuilder();
    String line;
    int linesCount = 0;

    while ((line = stdin.readLine()) != null) {
        linesCount++;
        result.append(line).append(_separator);
    }

    if (linesCount == 0) {
        result.append(ERROR);

        while ((line = stderr.readLine()) != null) {
            result.append(line).append(_separator);
        }
    }

    return new String[] { result.toString() };
}

From source file:io.druid.query.extraction.namespace.TestKafkaExtractionCluster.java

@BeforeClass
public static void setupStatic() throws Exception {
    zkTestServer = new TestingServer(-1, new File(tmpDir.getAbsolutePath() + "/zk"), true);
    zkClient = new ZkClient(zkTestServer.getConnectString(), 10000, 10000, ZKStringSerializer$.MODULE$);
    if (!zkClient.exists("/kafka")) {
        zkClient.create("/kafka", null, CreateMode.PERSISTENT);
    }/*from www  .  ja va  2 s  .  c o m*/

    log.info("---------------------------Started ZK---------------------------");

    final Properties serverProperties = new Properties();
    serverProperties.putAll(kafkaProperties);
    serverProperties.put("broker.id", "0");
    serverProperties.put("log.dir", tmpDir.getAbsolutePath() + "/log");
    serverProperties.put("log.cleaner.enable", "true");
    serverProperties.put("host.name", "127.0.0.1");
    serverProperties.put("zookeeper.connect", zkTestServer.getConnectString() + "/kafka");
    serverProperties.put("zookeeper.session.timeout.ms", "10000");
    serverProperties.put("zookeeper.sync.time.ms", "200");

    kafkaConfig = new KafkaConfig(serverProperties);

    final long time = DateTime.parse("2015-01-01").getMillis();
    kafkaServer = new KafkaServer(kafkaConfig, new Time() {

        @Override
        public long milliseconds() {
            return time;
        }

        @Override
        public long nanoseconds() {
            return milliseconds() * 1_000_000;
        }

        @Override
        public void sleep(long ms) {
            try {
                Thread.sleep(ms);
            } catch (InterruptedException e) {
                throw Throwables.propagate(e);
            }
        }
    });
    kafkaServer.startup();
    int sleepCount = 0;
    while (!kafkaServer.kafkaController().isActive()) {
        Thread.sleep(10);
        if (++sleepCount > 100) {
            throw new InterruptedException("Controller took to long to awaken");
        }
    }

    log.info("---------------------------Started Kafka Server---------------------------");

    ZkClient zkClient = new ZkClient(zkTestServer.getConnectString() + "/kafka", 10000, 10000,
            ZKStringSerializer$.MODULE$);
    try {
        final Properties topicProperties = new Properties();
        topicProperties.put("cleanup.policy", "compact");
        if (!AdminUtils.topicExists(zkClient, topicName)) {
            AdminUtils.createTopic(zkClient, topicName, 1, 1, topicProperties);
        }

        log.info("---------------------------Created topic---------------------------");

        Assert.assertTrue(AdminUtils.topicExists(zkClient, topicName));
    } finally {
        zkClient.close();
    }
    fnCache.clear();
    final Properties kafkaProducerProperties = makeProducerProperties();
    Producer<byte[], byte[]> producer = new Producer<byte[], byte[]>(
            new ProducerConfig(kafkaProducerProperties));
    try {
        producer.send(new KeyedMessage<byte[], byte[]>(topicName, StringUtils.toUtf8("abcdefg"),
                StringUtils.toUtf8("abcdefg")));
    } catch (Exception e) {
        throw Throwables.propagate(e);
    } finally {
        producer.close();
    }

    System.setProperty("druid.extensions.searchCurrentClassloader", "false");

    injector = Initialization.makeInjectorWithModules(
            GuiceInjectors.makeStartupInjectorWithModules(ImmutableList.<Module>of()),
            ImmutableList.of(new Module() {
                @Override
                public void configure(Binder binder) {
                    binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test");
                    binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
                }
            }, new NamespacedExtractionModule(), new KafkaExtractionNamespaceModule() {
                @Override
                public Properties getProperties(@Json ObjectMapper mapper, Properties systemProperties) {
                    final Properties consumerProperties = new Properties(kafkaProperties);
                    consumerProperties.put("zookeeper.connect", zkTestServer.getConnectString() + "/kafka");
                    consumerProperties.put("zookeeper.session.timeout.ms", "10000");
                    consumerProperties.put("zookeeper.sync.time.ms", "200");
                    return consumerProperties;
                }
            }));
    renameManager = injector.getInstance(KafkaExtractionManager.class);

    log.info("--------------------------- placed default item via producer ---------------------------");
    extractionCacheManager = injector.getInstance(NamespaceExtractionCacheManager.class);
    extractionCacheManager.schedule(new KafkaExtractionNamespace(topicName, namespace));
    long start = System.currentTimeMillis();
    while (renameManager.getBackgroundTaskCount() < 1) {
        Thread.sleep(10); // wait for map populator to start up
        if (System.currentTimeMillis() > start + 60_000) {
            throw new ISE("renameManager took too long to start");
        }
    }
    log.info("--------------------------- started rename manager ---------------------------");
}

From source file:org.wso2.carbon.registry.governance.api.test.LifeCycleServiceTestCase.java

@Test(groups = {
        "wso2.greg" }, description = "check service life cycle promote/demote test scenarios", dependsOnMethods = {
                "testAddWSDL" })
public void testCheckLifeCycle() throws RegistryException, InterruptedException {
    String testStageState;//from   w  w w.  j  a v  a  2s  . co  m
    if (FrameworkSettings.STRATOS.equalsIgnoreCase("true")) {
        testStageState = "Tested";
    } else {
        testStageState = "Testing";
    }

    try {
        registry.associateAspect(wsdl_path, lcName);
        assertEquals(registry.get(wsdl_path).getProperty(StateProperty), "Development",
                "Default Service Life Cycle Development State Fail:");
        Thread.sleep(sleepTime);

        registry.invokeAspect(wsdl_path, lcName, "Promote"); //Promote Life cycle to Tested State
        assertEquals(registry.get(wsdl_path).getProperty(StateProperty), testStageState,
                "Service Life Cycle Promote to Test state fail :");
        Thread.sleep(3000);

        registry.invokeAspect(wsdl_path, lcName, "Promote"); //Promote Life cycle to Production State
        assertEquals(registry.get(wsdl_path).getProperty(StateProperty), "Production",
                "Service Life Cycle Promote to Production state fail:");
        Thread.sleep(3000);

        registry.invokeAspect(wsdl_path, lcName, "Demote"); //Demote Life cycle to Tested State
        assertEquals(registry.get(wsdl_path).getProperty(StateProperty), testStageState,
                "Service Life Cycle Demote to Test State fail :");
        Thread.sleep(3000);

        registry.invokeAspect(wsdl_path, lcName, "Demote"); //Demote Life cycle to Development State
        assertEquals(registry.get(wsdl_path).getProperty(StateProperty), "Development",
                "Service Life Cycle Demote to initial state fail:");
        Thread.sleep(3000);
        deleteWSDL(); //Delete wsdl

        Assert.assertFalse(registry.resourceExists(wsdl_path), "WSDL delete failed");
        log.info("LifeCycleServiceTestClient testCheckLifeCycle() - Passed");
    } catch (RegistryException e) {
        log.error("Failed to Promote/Demote Life Cycle :" + e);
        throw new RegistryException("Failed to Promote/Demote Life Cycle :" + e);
    } catch (InterruptedException e) {
        log.error("Failed to Promote/Demote Life Cycle :" + e);
        throw new InterruptedException("Failed to Promote/Demote Life Cycle :" + e);
    }
}