Example usage for java.lang Thread sleep

List of usage examples for java.lang Thread sleep

Introduction

In this page you can find the example usage for java.lang Thread sleep.

Prototype

public static native void sleep(long millis) throws InterruptedException;

Source Link

Document

Causes the currently executing thread to sleep (temporarily cease execution) for the specified number of milliseconds, subject to the precision and accuracy of system timers and schedulers.

Usage

From source file:FormatStorage2ColumnStorageMR.java

@SuppressWarnings("deprecation")
public static void main(String[] args) throws Exception {

    if (args.length != 2) {
        System.out.println("FormatStorage2ColumnStorageMR <input> <output>");
        System.exit(-1);//w  w w.  j  a v  a2 s  . c om
    }

    JobConf conf = new JobConf(FormatStorageMR.class);

    conf.setJobName("FormatStorage2ColumnStorageMR");

    conf.setNumMapTasks(1);
    conf.setNumReduceTasks(4);

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Unit.Record.class);

    conf.setMapperClass(FormatStorageMapper.class);
    conf.setReducerClass(ColumnStorageReducer.class);

    conf.setInputFormat(FormatStorageInputFormat.class);
    conf.set("mapred.output.compress", "flase");

    Head head = new Head();
    initHead(head);

    head.toJobConf(conf);

    FileInputFormat.setInputPaths(conf, args[0]);
    Path outputPath = new Path(args[1]);
    FileOutputFormat.setOutputPath(conf, outputPath);

    FileSystem fs = outputPath.getFileSystem(conf);
    fs.delete(outputPath, true);

    JobClient jc = new JobClient(conf);
    RunningJob rj = null;
    rj = jc.submitJob(conf);

    String lastReport = "";
    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss,SSS");
    long reportTime = System.currentTimeMillis();
    long maxReportInterval = 3 * 1000;
    while (!rj.isComplete()) {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
        }

        int mapProgress = Math.round(rj.mapProgress() * 100);
        int reduceProgress = Math.round(rj.reduceProgress() * 100);

        String report = " map = " + mapProgress + "%,  reduce = " + reduceProgress + "%";

        if (!report.equals(lastReport) || System.currentTimeMillis() >= reportTime + maxReportInterval) {

            String output = dateFormat.format(Calendar.getInstance().getTime()) + report;
            System.out.println(output);
            lastReport = report;
            reportTime = System.currentTimeMillis();
        }
    }

    System.exit(0);

}

From source file:MailHandlerDemo.java

/**
 * Runs the demo./*from  ww w . ja v a  2s.  c  om*/
 *
 * @param args the command line arguments
 * @throws IOException if there is a problem.
 */
public static void main(String[] args) throws IOException {
    List<String> l = Arrays.asList(args);
    if (l.contains("/?") || l.contains("-?") || l.contains("-help")) {
        LOGGER.info("Usage: java MailHandlerDemo " + "[[-all] | [-body] | [-custom] | [-debug] | [-low] "
                + "| [-simple] | [-pushlevel] | [-pushfilter] " + "| [-pushnormal] | [-pushonly]] " + "\n\n"
                + "-all\t\t: Execute all demos.\n" + "-body\t\t: An email with all records and only a body.\n"
                + "-custom\t\t: An email with attachments and dynamic names.\n"
                + "-debug\t\t: Output basic debug information about the JVM " + "and log configuration.\n"
                + "-low\t\t: Generates multiple emails due to low capacity." + "\n"
                + "-simple\t\t: An email with all records with body and " + "an attachment.\n"
                + "-pushlevel\t: Generates high priority emails when the"
                + " push level is triggered and normal priority when " + "flushed.\n"
                + "-pushFilter\t: Generates high priority emails when the "
                + "push level and the push filter is triggered and normal " + "priority emails when flushed.\n"
                + "-pushnormal\t: Generates multiple emails when the "
                + "MemoryHandler push level is triggered.  All generated "
                + "email are sent as normal priority.\n" + "-pushonly\t: Generates multiple emails when the "
                + "MemoryHandler push level is triggered.  Generates high "
                + "priority emails when the push level is triggered and " + "normal priority when flushed.\n");
    } else {
        final boolean debug = init(l); //may create log messages.
        try {
            LOGGER.log(Level.FINEST, "This is the finest part of the demo.",
                    new MessagingException("Fake JavaMail issue."));
            LOGGER.log(Level.FINER, "This is the finer part of the demo.",
                    new NullPointerException("Fake bug."));
            LOGGER.log(Level.FINE, "This is the fine part of the demo.");
            LOGGER.log(Level.CONFIG, "Logging config file is {0}.", getConfigLocation());
            LOGGER.log(Level.INFO, "Your temp directory is {0}, " + "please wait...", getTempDir());

            try { //Waste some time for the custom formatter.
                Thread.sleep(3L * 1000L);
            } catch (InterruptedException ex) {
                Thread.currentThread().interrupt();
            }

            LOGGER.log(Level.WARNING, "This is a warning.",
                    new FileNotFoundException("Fake file chooser issue."));
            LOGGER.log(Level.SEVERE, "The end of the demo.", new IOException("Fake access denied issue."));
        } finally {
            closeHandlers();
        }

        //Force parse errors.  This does have side effects.
        if (debug && getConfigLocation() != null) {
            LogManager.getLogManager().readConfiguration();
        }
    }
}

From source file:com.datis.kafka.stream.PageViewUntypedDemo.java

public static void main(String[] args) throws Exception {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-pageview-untyped");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181");
    props.put(StreamsConfig.TIMESTAMP_EXTRACTOR_CLASS_CONFIG, JsonTimestampExtractor.class);

    // setting offset reset to earliest so that we can re-run the demo code with the same pre-loaded data
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    KStreamBuilder builder = new KStreamBuilder();

    final Serializer<JsonNode> jsonSerializer = new JsonSerializer();
    final Deserializer<JsonNode> jsonDeserializer = new JsonDeserializer();
    final Serde<JsonNode> jsonSerde = Serdes.serdeFrom(jsonSerializer, jsonDeserializer);

    KStream<String, JsonNode> views = builder.stream(Serdes.String(), jsonSerde, "streams-pageview-input");

    KTable<String, JsonNode> users = builder.table(Serdes.String(), jsonSerde, "streams-userprofile-input");

    KTable<String, String> userRegions = users.mapValues(new ValueMapper<JsonNode, String>() {
        @Override//from  www .j av  a 2  s. co m
        public String apply(JsonNode record) {
            return record.get("region").textValue();
        }
    });

    KStream<JsonNode, JsonNode> regionCount = views
            .leftJoin(userRegions, new ValueJoiner<JsonNode, String, JsonNode>() {
                @Override
                public JsonNode apply(JsonNode view, String region) {
                    ObjectNode jNode = JsonNodeFactory.instance.objectNode();

                    return jNode.put("user", view.get("user").textValue())
                            .put("page", view.get("page").textValue())
                            .put("region", region == null ? "UNKNOWN" : region);
                }
            }).map(new KeyValueMapper<String, JsonNode, KeyValue<String, JsonNode>>() {
                @Override
                public KeyValue<String, JsonNode> apply(String user, JsonNode viewRegion) {
                    return new KeyValue<>(viewRegion.get("region").textValue(), viewRegion);
                }
            })
            .countByKey(TimeWindows.of("GeoPageViewsWindow", 7 * 24 * 60 * 60 * 1000L).advanceBy(1000),
                    Serdes.String())
            // TODO: we can merge ths toStream().map(...) with a single toStream(...)
            .toStream().map(new KeyValueMapper<Windowed<String>, Long, KeyValue<JsonNode, JsonNode>>() {
                @Override
                public KeyValue<JsonNode, JsonNode> apply(Windowed<String> key, Long value) {
                    ObjectNode keyNode = JsonNodeFactory.instance.objectNode();
                    keyNode.put("window-start", key.window().start()).put("region", key.key());

                    ObjectNode valueNode = JsonNodeFactory.instance.objectNode();
                    valueNode.put("count", value);

                    return new KeyValue<>((JsonNode) keyNode, (JsonNode) valueNode);
                }
            });

    // write to the result topic
    regionCount.to(jsonSerde, jsonSerde, "streams-pageviewstats-untyped-output");

    KafkaStreams streams = new KafkaStreams(builder, props);
    streams.start();

    // usually the stream application would be running forever,
    // in this example we just let it run for some time and stop since the input data is finite.
    Thread.sleep(5000L);

    streams.close();
}

From source file:awskinesis.AmazonKinesisApplicationSample.java

public static void main(String[] args) throws Exception {
    init();/*from ww  w .  j  a  v  a2  s. c  om*/

    if (args.length == 1 && "delete-resources".equals(args[0])) {
        deleteResources();
        return;
    }

    String workerId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + UUID.randomUUID();
    KinesisClientLibConfiguration kinesisClientLibConfiguration = new KinesisClientLibConfiguration(
            SAMPLE_APPLICATION_NAME, SAMPLE_APPLICATION_STREAM_NAME, credentialsProvider, workerId)
                    .withRegionName("cn-north-1");
    kinesisClientLibConfiguration.withInitialPositionInStream(SAMPLE_APPLICATION_INITIAL_POSITION_IN_STREAM);

    IRecordProcessorFactory recordProcessorFactory = new AmazonKinesisApplicationRecordProcessorFactory();
    final Worker worker = new Worker(recordProcessorFactory, kinesisClientLibConfiguration);

    System.out.printf("Running %s to process stream %s as worker %s...\n", SAMPLE_APPLICATION_NAME,
            SAMPLE_APPLICATION_STREAM_NAME, workerId);

    int exitCode = 0;
    try {
        worker.run();
    } catch (Throwable t) {
        System.err.println("Caught throwable while processing data.");
        t.printStackTrace();
        exitCode = 1;
    }

    // add a shutdown hook to stop the server
    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
        @Override
        public void run() {
            LOG.info("########### shoutdown begin....");
            worker.shutdown();

            try {
                Thread.sleep(10000);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            LOG.info("########### shoutdown end....");
        }
    }));

    System.exit(exitCode);
}

From source file:eqtlmappingpipeline.util.ModuleEqtlNeutrophilReplication.java

/**
 * @param args the command line arguments
 *//*from  www  . j  av  a2 s  . c  om*/
public static void main(String[] args) throws IOException, LdCalculatorException {

    System.out.println(HEADER);
    System.out.println();
    System.out.flush(); //flush to make sure header is before errors
    try {
        Thread.sleep(25); //Allows flush to complete
    } catch (InterruptedException ex) {
    }

    CommandLineParser parser = new PosixParser();
    final CommandLine commandLine;
    try {
        commandLine = parser.parse(OPTIONS, args, true);
    } catch (ParseException ex) {
        System.err.println("Invalid command line arguments: " + ex.getMessage());
        System.err.println();
        new HelpFormatter().printHelp(" ", OPTIONS);
        System.exit(1);
        return;
    }

    final String[] genotypesBasePaths = commandLine.getOptionValues("g");
    final RandomAccessGenotypeDataReaderFormats genotypeDataType;
    final String replicationQtlFilePath = commandLine.getOptionValue("e");
    final String interactionQtlFilePath = commandLine.getOptionValue("i");
    final String outputFilePath = commandLine.getOptionValue("o");
    final double ldCutoff = Double.parseDouble(commandLine.getOptionValue("ld"));
    final int window = Integer.parseInt(commandLine.getOptionValue("w"));

    System.out.println("Genotype: " + Arrays.toString(genotypesBasePaths));
    System.out.println("Interaction file: " + interactionQtlFilePath);
    System.out.println("Replication file: " + replicationQtlFilePath);
    System.out.println("Output: " + outputFilePath);
    System.out.println("LD: " + ldCutoff);
    System.out.println("Window: " + window);

    try {
        if (commandLine.hasOption("G")) {
            genotypeDataType = RandomAccessGenotypeDataReaderFormats
                    .valueOf(commandLine.getOptionValue("G").toUpperCase());
        } else {
            if (genotypesBasePaths[0].endsWith(".vcf")) {
                System.err.println(
                        "Only vcf.gz is supported. Please see manual on how to do create a vcf.gz file.");
                System.exit(1);
                return;
            }
            try {
                genotypeDataType = RandomAccessGenotypeDataReaderFormats
                        .matchFormatToPath(genotypesBasePaths[0]);
            } catch (GenotypeDataException e) {
                System.err
                        .println("Unable to determine input 1 type based on specified path. Please specify -G");
                System.exit(1);
                return;
            }
        }
    } catch (IllegalArgumentException e) {
        System.err.println("Error parsing --genotypesFormat \"" + commandLine.getOptionValue("G")
                + "\" is not a valid input data format");
        System.exit(1);
        return;
    }

    final RandomAccessGenotypeData genotypeData;

    try {
        genotypeData = genotypeDataType.createFilteredGenotypeData(genotypesBasePaths, 100, null, null, null,
                0.8);
    } catch (TabixFileNotFoundException e) {
        LOGGER.fatal("Tabix file not found for input data at: " + e.getPath() + "\n"
                + "Please see README on how to create a tabix file");
        System.exit(1);
        return;
    } catch (IOException e) {
        LOGGER.fatal("Error reading input data: " + e.getMessage(), e);
        System.exit(1);
        return;
    } catch (IncompatibleMultiPartGenotypeDataException e) {
        LOGGER.fatal("Error combining the impute genotype data files: " + e.getMessage(), e);
        System.exit(1);
        return;
    } catch (GenotypeDataException e) {
        LOGGER.fatal("Error reading input data: " + e.getMessage(), e);
        System.exit(1);
        return;
    }

    ChrPosTreeMap<ArrayList<ReplicationQtl>> replicationQtls = new ChrPosTreeMap<>();

    CSVReader replicationQtlReader = new CSVReader(new FileReader(replicationQtlFilePath), '\t');
    replicationQtlReader.readNext();//skip header
    String[] replicationLine;
    while ((replicationLine = replicationQtlReader.readNext()) != null) {

        try {

            GeneticVariant variant = genotypeData.getSnpVariantByPos(replicationLine[REPLICATION_SNP_CHR_COL],
                    Integer.parseInt(replicationLine[REPLICATION_SNP_POS_COL]));
            if (variant == null) {
                continue;
            }

            ReplicationQtl replicationQtl = new ReplicationQtl(replicationLine[REPLICATION_SNP_CHR_COL],
                    Integer.parseInt(replicationLine[REPLICATION_SNP_POS_COL]),
                    replicationLine[REPLICATION_GENE_COL],
                    Double.parseDouble(replicationLine[REPLICATION_BETA_COL]),
                    variant.getAlternativeAlleles().get(0).getAlleleAsString());
            ArrayList<ReplicationQtl> posReplicationQtls = replicationQtls.get(replicationQtl.getChr(),
                    replicationQtl.getPos());
            if (posReplicationQtls == null) {
                posReplicationQtls = new ArrayList<>();
                replicationQtls.put(replicationQtl.getChr(), replicationQtl.getPos(), posReplicationQtls);
            }
            posReplicationQtls.add(replicationQtl);

        } catch (Exception e) {
            System.out.println(Arrays.toString(replicationLine));
            throw e;
        }
    }

    int interactionSnpNotInGenotypeData = 0;
    int noReplicationQtlsInWindow = 0;
    int noReplicationQtlsInLd = 0;
    int multipleReplicationQtlsInLd = 0;
    int replicationTopSnpNotInGenotypeData = 0;

    final CSVWriter outputWriter = new CSVWriter(new FileWriter(new File(outputFilePath)), '\t', '\0');
    final String[] outputLine = new String[14];
    int c = 0;
    outputLine[c++] = "Chr";
    outputLine[c++] = "Pos";
    outputLine[c++] = "SNP";
    outputLine[c++] = "Gene";
    outputLine[c++] = "Module";
    outputLine[c++] = "DiscoveryZ";
    outputLine[c++] = "ReplicationZ";
    outputLine[c++] = "DiscoveryZCorrected";
    outputLine[c++] = "ReplicationZCorrected";
    outputLine[c++] = "DiscoveryAlleleAssessed";
    outputLine[c++] = "ReplicationAlleleAssessed";
    outputLine[c++] = "bestLd";
    outputLine[c++] = "bestLd_dist";
    outputLine[c++] = "nextLd";
    outputWriter.writeNext(outputLine);

    HashSet<String> notFound = new HashSet<>();

    CSVReader interactionQtlReader = new CSVReader(new FileReader(interactionQtlFilePath), '\t');
    interactionQtlReader.readNext();//skip header
    String[] interactionQtlLine;
    while ((interactionQtlLine = interactionQtlReader.readNext()) != null) {

        String snp = interactionQtlLine[1];
        String chr = interactionQtlLine[2];
        int pos = Integer.parseInt(interactionQtlLine[3]);
        String gene = interactionQtlLine[4];
        String alleleAssessed = interactionQtlLine[9];
        String module = interactionQtlLine[12];
        double discoveryZ = Double.parseDouble(interactionQtlLine[10]);

        GeneticVariant interactionQtlVariant = genotypeData.getSnpVariantByPos(chr, pos);

        if (interactionQtlVariant == null) {
            System.err.println("Interaction QTL SNP not found in genotype data: " + chr + ":" + pos);
            ++interactionSnpNotInGenotypeData;
            continue;
        }

        ReplicationQtl bestMatch = null;
        double bestMatchR2 = Double.NaN;
        Ld bestMatchLd = null;
        double nextBestR2 = Double.NaN;

        ArrayList<ReplicationQtl> sameSnpQtls = replicationQtls.get(chr, pos);

        if (sameSnpQtls != null) {
            for (ReplicationQtl sameSnpQtl : sameSnpQtls) {
                if (sameSnpQtl.getGene().equals(gene)) {
                    bestMatch = sameSnpQtl;
                    bestMatchR2 = 1;
                }
            }
        }

        NavigableMap<Integer, ArrayList<ReplicationQtl>> potentionalReplicationQtls = replicationQtls
                .getChrRange(chr, pos - window, true, pos + window, true);

        for (ArrayList<ReplicationQtl> potentialReplicationQtls : potentionalReplicationQtls.values()) {

            for (ReplicationQtl potentialReplicationQtl : potentialReplicationQtls) {

                if (!potentialReplicationQtl.getGene().equals(gene)) {
                    continue;
                }

                GeneticVariant potentialReplicationQtlVariant = genotypeData
                        .getSnpVariantByPos(potentialReplicationQtl.getChr(), potentialReplicationQtl.getPos());

                if (potentialReplicationQtlVariant == null) {
                    notFound.add(potentialReplicationQtl.getChr() + ":" + potentialReplicationQtl.getPos());
                    ++replicationTopSnpNotInGenotypeData;
                    continue;
                }

                Ld ld = interactionQtlVariant.calculateLd(potentialReplicationQtlVariant);
                double r2 = ld.getR2();

                if (r2 > 1) {
                    r2 = 1;
                }

                if (bestMatch == null) {
                    bestMatch = potentialReplicationQtl;
                    bestMatchR2 = r2;
                    bestMatchLd = ld;
                } else if (r2 > bestMatchR2) {
                    bestMatch = potentialReplicationQtl;
                    nextBestR2 = bestMatchR2;
                    bestMatchR2 = r2;
                    bestMatchLd = ld;
                }

            }
        }

        double replicationZ = Double.NaN;
        double replicationZCorrected = Double.NaN;
        double discoveryZCorrected = Double.NaN;

        String replicationAlleleAssessed = null;

        if (bestMatch != null) {
            replicationZ = bestMatch.getBeta();
            replicationAlleleAssessed = bestMatch.getAssessedAllele();

            if (pos != bestMatch.getPos()) {

                String commonHap = null;
                double commonHapFreq = -1;
                for (Map.Entry<String, Double> hapFreq : bestMatchLd.getHaplotypesFreq().entrySet()) {

                    double f = hapFreq.getValue();

                    if (f > commonHapFreq) {
                        commonHapFreq = f;
                        commonHap = hapFreq.getKey();
                    }

                }

                String[] commonHapAlleles = StringUtils.split(commonHap, '/');

                discoveryZCorrected = commonHapAlleles[0].equals(alleleAssessed) ? discoveryZ : discoveryZ * -1;
                replicationZCorrected = commonHapAlleles[1].equals(replicationAlleleAssessed) ? replicationZ
                        : replicationZ * -1;

            } else {

                discoveryZCorrected = discoveryZ;
                replicationZCorrected = alleleAssessed.equals(replicationAlleleAssessed) ? replicationZ
                        : replicationZ * -1;

            }

        }

        c = 0;
        outputLine[c++] = chr;
        outputLine[c++] = String.valueOf(pos);
        outputLine[c++] = snp;
        outputLine[c++] = gene;
        outputLine[c++] = module;
        outputLine[c++] = String.valueOf(discoveryZ);
        outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(replicationZ);
        outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(discoveryZCorrected);
        outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(replicationZCorrected);
        outputLine[c++] = alleleAssessed;
        outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(bestMatch.getAssessedAllele());
        outputLine[c++] = String.valueOf(bestMatchR2);
        outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(Math.abs(pos - bestMatch.getPos()));
        outputLine[c++] = String.valueOf(nextBestR2);
        outputWriter.writeNext(outputLine);

    }

    outputWriter.close();

    for (String e : notFound) {
        System.err.println("Not found: " + e);
    }

    System.out.println("interactionSnpNotInGenotypeData: " + interactionSnpNotInGenotypeData);
    System.out.println("noReplicationQtlsInWindow: " + noReplicationQtlsInWindow);
    System.out.println("noReplicationQtlsInLd: " + noReplicationQtlsInLd);
    System.out.println("multipleReplicationQtlsInLd: " + multipleReplicationQtlsInLd);
    System.out.println("replicationTopSnpNotInGenotypeData: " + replicationTopSnpNotInGenotypeData);

}

From source file:com.threeglav.sh.bauk.main.StreamHorizonEngine.java

public static void main(final String[] args) throws Exception {
    printRuntimeInfo();/*  w  ww  . j  av a2  s  . c o m*/
    final long start = System.currentTimeMillis();
    LOG.info("To run in test mode set system parameter {}=true",
            BaukEngineConfigurationConstants.IDEMPOTENT_FEED_PROCESSING_PARAM_NAME);
    Runtime.getRuntime().addShutdownHook(new ShutdownHook());
    final BaukConfiguration conf = findConfiguration();
    if (conf != null) {
        ConfigurationProperties.setBaukProperties(conf.getProperties());
        final ConfigurationValidator configValidator = new ConfigurationValidator(conf);
        try {
            configValidator.validate();
        } catch (final Exception exc) {
            BaukUtil.logEngineMessageSync("Error while validating configuration: " + exc.getMessage());
            LOG.error("", exc);
            System.exit(-1);
        }
        remotingHandler = new RemotingServer();
        remotingHandler.start();
        createProcessingRoutes(conf);
        final boolean throughputTestingMode = ConfigurationProperties
                .getSystemProperty(BaukEngineConfigurationConstants.THROUGHPUT_TESTING_MODE_PARAM_NAME, false);
        if (throughputTestingMode) {
            BaukUtil.logEngineMessageSync(
                    "ENGINE IS RUNNING IN THROUGHPUT TESTING MODE! ONE INPUT FEED FILE PER THREAD WILL BE CACHED AND PROCESSED REPEATEDLY!!!");
        }
        instanceStartTime = System.currentTimeMillis();
        final boolean isMultiInstance = ConfigurationProperties.isConfiguredPartitionedMultipleInstances();
        if (isMultiInstance) {
            final int totalPartitionsCount = ConfigurationProperties.getSystemProperty(
                    BaukEngineConfigurationConstants.MULTI_INSTANCE_PARTITION_COUNT_PARAM_NAME, -1);
            final String myUniqueIdentifier = ConfigurationProperties.getBaukInstanceIdentifier();
            BaukUtil.logEngineMessageSync("Configured to run in multi-instance mode of " + totalPartitionsCount
                    + " instances in total. My unique identifier is " + myUniqueIdentifier);
        }
        BaukUtil.logEngineMessageSync("Finished initialization! Started counting uptime");
        startProcessing();
        final long total = System.currentTimeMillis() - start;
        final long totalSec = total / 1000;
        final boolean detectBaukInstances = ConfigurationProperties
                .getSystemProperty(BaukEngineConfigurationConstants.DETECT_OTHER_BAUK_INSTANCES, false);
        if (detectBaukInstances) {
            final int numberOfInstances = HazelcastCacheInstanceManager.getNumberOfBaukInstances();
            BaukUtil.logEngineMessage(
                    "Total number of detected running engine instances is " + numberOfInstances);
        }
        BaukUtil.logEngineMessage("Engine started successfully in " + total + "ms (" + totalSec
                + " seconds). Waiting for feed files...\n\n");
    } else {
        LOG.error(
                "Unable to find valid configuration file! Check your startup scripts and make sure system property {} points to valid feed configuration file. Aborting!",
                CONFIG_FILE_PROP_NAME);
        BaukUtil.logEngineMessage(
                "Unable to find valid configuration file! Check your startup scripts and make sure system property "
                        + CONFIG_FILE_PROP_NAME + " points to valid feed configuration file. Aborting!");
        System.exit(-1);
    }
    // sleep forever
    while (!BaukUtil.shutdownStarted()) {
        Thread.sleep(10000);
    }
}

From source file:com.moss.veracity.core.config.ConfigManager.java

public static void main(String[] args) throws Exception {

    BasicConfigurator.configure();//from w  ww . j ava 2 s  . c  o  m

    ConfigManager manager = new ConfigManager(new File("test.xml"));

    Thread.sleep(3000l);

    manager.updateConfig(new Configuration());

    Thread.sleep(3000l);

    manager.close();
}

From source file:com.linkedin.helix.mock.storage.MockStorageProcess.java

public static void main(String[] args) throws Exception {
    String clusterName = "storage-cluster";
    String relayClusterName = "relay-cluster";
    String zkServerAddress = "localhost:2181";
    String host = "localhost";
    int port = 8900;
    if (args.length > 0) {
        CommandLine cmd = processCommandLineArgs(args);
        zkServerAddress = cmd.getOptionValue(zkServer);
        clusterName = cmd.getOptionValue(cluster);
        relayClusterName = cmd.getOptionValue(relayCluster);
        host = cmd.getOptionValue(hostAddress);
        String portString = cmd.getOptionValue(hostPort);
        port = Integer.parseInt(portString);
    }//from  w w w .ja v a 2  s.  com
    // Espresso_driver.py will consume this
    System.out.println("Mock storage started");
    MockStorageProcess process = new MockStorageProcess();
    process.start(host + "_" + port, zkServerAddress, clusterName, relayClusterName);

    Thread.sleep(10000000);
}

From source file:com.cloud.test.utils.TestClient.java

public static void main(String[] args) {
    String host = "http://localhost";
    String port = "8080";
    String testUrl = "/client/test";
    int numThreads = 1;

    try {/*  w  w w . ja v  a  2  s.co m*/
        // Parameters
        List<String> argsList = Arrays.asList(args);
        Iterator<String> iter = argsList.iterator();
        while (iter.hasNext()) {
            String arg = iter.next();
            // host
            if (arg.equals("-h")) {
                host = "http://" + iter.next();
            }

            if (arg.equals("-p")) {
                port = iter.next();
            }

            if (arg.equals("-t")) {
                numThreads = Integer.parseInt(iter.next());
            }

            if (arg.equals("-s")) {
                sleepTime = Long.parseLong(iter.next());
            }

            if (arg.equals("-c")) {
                cleanUp = Boolean.parseBoolean(iter.next());
                if (!cleanUp)
                    sleepTime = 0L; // no need to wait if we don't ever cleanup
            }

            if (arg.equals("-r")) {
                repeat = Boolean.parseBoolean(iter.next());
            }

            if (arg.equals("-u")) {
                numOfUsers = Integer.parseInt(iter.next());
            }

            if (arg.equals("-i")) {
                internet = Boolean.parseBoolean(iter.next());
            }
        }

        final String server = host + ":" + port + testUrl;
        s_logger.info("Starting test against server: " + server + " with " + numThreads + " thread(s)");
        if (cleanUp)
            s_logger.info("Clean up is enabled, each test will wait " + sleepTime + " ms before cleaning up");

        if (numOfUsers > 0) {
            s_logger.info("Pre-generating users for test of size : " + numOfUsers);
            users = new String[numOfUsers];
            Random ran = new Random();
            for (int i = 0; i < numOfUsers; i++) {
                users[i] = Math.abs(ran.nextInt()) + "-user";
            }
        }

        for (int i = 0; i < numThreads; i++) {
            new Thread(new Runnable() {
                public void run() {
                    do {
                        String username = null;
                        try {
                            long now = System.currentTimeMillis();
                            Random ran = new Random();
                            if (users != null) {
                                username = users[Math.abs(ran.nextInt()) % numOfUsers];
                            } else {
                                username = Math.abs(ran.nextInt()) + "-user";
                            }
                            NDC.push(username);

                            String url = server + "?email=" + username + "&password=" + username
                                    + "&command=deploy";
                            s_logger.info("Launching test for user: " + username + " with url: " + url);
                            HttpClient client = new HttpClient();
                            HttpMethod method = new GetMethod(url);
                            int responseCode = client.executeMethod(method);
                            boolean success = false;
                            String reason = null;
                            if (responseCode == 200) {
                                if (internet) {
                                    s_logger.info("Deploy successful...waiting 5 minute before SSH tests");
                                    Thread.sleep(300000L); // Wait 60 seconds so the linux VM can boot up.

                                    s_logger.info("Begin Linux SSH test");
                                    reason = sshTest(method.getResponseHeader("linuxIP").getValue());

                                    if (reason == null) {
                                        s_logger.info("Linux SSH test successful");
                                        s_logger.info("Begin Windows SSH test");
                                        reason = sshWinTest(method.getResponseHeader("windowsIP").getValue());
                                    }
                                }
                                if (reason == null) {
                                    if (internet) {
                                        s_logger.info("Windows SSH test successful");
                                    } else {
                                        s_logger.info("deploy test successful....now cleaning up");
                                        if (cleanUp) {
                                            s_logger.info(
                                                    "Waiting " + sleepTime + " ms before cleaning up vms");
                                            Thread.sleep(sleepTime);
                                        } else {
                                            success = true;
                                        }
                                    }
                                    if (users == null) {
                                        s_logger.info("Sending cleanup command");
                                        url = server + "?email=" + username + "&password=" + username
                                                + "&command=cleanup";
                                    } else {
                                        s_logger.info("Sending stop DomR / destroy VM command");
                                        url = server + "?email=" + username + "&password=" + username
                                                + "&command=stopDomR";
                                    }
                                    method = new GetMethod(url);
                                    responseCode = client.executeMethod(method);
                                    if (responseCode == 200) {
                                        success = true;
                                    } else {
                                        reason = method.getStatusText();
                                    }
                                } else {
                                    // Just stop but don't destroy the VMs/Routers
                                    s_logger.info("SSH test failed with reason '" + reason + "', stopping VMs");
                                    url = server + "?email=" + username + "&password=" + username
                                            + "&command=stop";
                                    responseCode = client.executeMethod(new GetMethod(url));
                                }
                            } else {
                                // Just stop but don't destroy the VMs/Routers
                                reason = method.getStatusText();
                                s_logger.info("Deploy test failed with reason '" + reason + "', stopping VMs");
                                url = server + "?email=" + username + "&password=" + username + "&command=stop";
                                client.executeMethod(new GetMethod(url));
                            }

                            if (success) {
                                s_logger.info("***** Completed test for user : " + username + " in "
                                        + ((System.currentTimeMillis() - now) / 1000L) + " seconds");
                            } else {
                                s_logger.info("##### FAILED test for user : " + username + " in "
                                        + ((System.currentTimeMillis() - now) / 1000L)
                                        + " seconds with reason : " + reason);
                            }
                        } catch (Exception e) {
                            s_logger.warn("Error in thread", e);
                            try {
                                HttpClient client = new HttpClient();
                                String url = server + "?email=" + username + "&password=" + username
                                        + "&command=stop";
                                client.executeMethod(new GetMethod(url));
                            } catch (Exception e1) {
                            }
                        } finally {
                            NDC.clear();
                        }
                    } while (repeat);
                }
            }).start();
        }
    } catch (Exception e) {
        s_logger.error(e);
    }
}

From source file:it.cnr.isti.labsedc.glimpse.MainMonitoring.java

/**
 * Read the properties and init the connections to the enterprise service bus
 * //  w w w.  j av  a  2 s . c  o m
 * @param is the systemSettings file
 */
public static void main(String[] args) {
    try {
        FileOutputStream fos = new FileOutputStream("glimpseLog.log");
        PrintStream ps = new PrintStream(fos);
        System.setErr(ps);

        Logger log = Logger.getLogger(MainMonitoring.class.getName());

        log.debug("Hello this is an debug message");
        log.info("Hello this is an info message");

        if (MainMonitoring.initProps(args[0]) && MainMonitoring.init()) {

            SplashScreen.Show();
            System.out.println("Please wait until setup is done...");

            //the buffer where the events are stored to be analyzed, in this version
            //the buffer object is not used because Drools has it's own eventStream object
            EventsBuffer<GlimpseBaseEvent<?>> buffer = new EventsBufferImpl<GlimpseBaseEvent<?>>();

            //The complex event engine that will be used (in this case drools)
            ComplexEventProcessor engineOne = new ComplexEventProcessorImpl(Manager.Read(MANAGERPARAMETERFILE),
                    buffer, connFact, initConn);
            engineOne.start();

            try {
                Thread.sleep(3000);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }

            RuleTemplateManager templateManager = new RuleTemplateManager(DROOLSRULEREQUESTTEMPLATE1,
                    DROOLSRULEREQUESTTEMPLATE2, DROOLSRULEREQUESTTEMPLATE3_1, DROOLSRULEREQUESTTEMPLATE3_2);

            //the component in charge to locate services and load specific rules.
            ServiceLocatorFactory.getServiceLocatorParseViolationReceivedFromBSM(engineOne, templateManager,
                    REGEXPATTERNFILEPATH).start();

            //start MailNotifier component
            MailNotification mailer = new MailNotification(Manager.Read(MAILNOTIFICATIONSETTINGSFILEPATH));
            mailer.start();

            //the manager of all the architecture
            GlimpseManager manager = new GlimpseManager(Manager.Read(MANAGERPARAMETERFILE), connFact, initConn,
                    engineOne.getRuleManager());
            manager.start();
        }
    } catch (Exception e) {
        System.out.println("USAGE: java -jar MainMonitoring.jar \"systemSettings\"");
    }
}