Example usage for java.util Timer Timer

List of usage examples for java.util Timer Timer

Introduction

In this page you can find the example usage for java.util Timer Timer.

Prototype

public Timer() 

Source Link

Document

Creates a new timer.

Usage

From source file:UtilTimerDemo.java

public static void main(String[] args) {
    // Start and run a fixed-delay timer
    timer = new Timer();
    startTime = prevTime = System.currentTimeMillis();
    System.out.println("Fixed Delay Times");
    timer.schedule(new UtilTimerDemo(), DELAY, DELAY);

    // Sleep long enough to let the first timer finish
    try {/* w  ww  .  j  a  va 2  s.c  o m*/
        Thread.sleep(DURATION * 2);
    } catch (Exception e) {
    }

    // Start and run a fixed-rate timer
    timer = new Timer();
    startTime = prevTime = System.currentTimeMillis();
    System.out.println("Fixed Rate Times");
    timer.scheduleAtFixedRate(new UtilTimerDemo(), DELAY, DELAY);
}

From source file:com.boozallen.cognition.kom.KafakOffsetMonitor.java

public static void main(String[] args) {
    CommandLineParser parser = new DefaultParser();
    CommandLine cmd = null;//from   w w  w.ja va2  s. co  m
    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.err.println(e.getMessage());
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("KafakOffsetMonitor", options);
        System.exit(1);
    }

    KafakOffsetMonitor monitor = new KafakOffsetMonitor();
    monitor.zkHosts = cmd.getOptionValue("zkHosts");
    monitor.zkRoot = cmd.getOptionValue("zkRoot", "/storm-kafka");
    monitor.spoutId = cmd.getOptionValue("spoutId");
    monitor.logstashHost = cmd.getOptionValue("logstashHost");
    monitor.logstashPort = Integer.parseInt(cmd.getOptionValue("logstashPort"));

    int refresh = Integer.parseInt(cmd.getOptionValue("refresh", "15"));

    Timer timer = new Timer();
    int period = refresh * 1000;
    int delay = 0;
    timer.schedule(monitor, delay, period);
}

From source file:edu.upenn.egricelab.AlignerBoost.FilterSAMAlignPE.java

public static void main(String[] args) {
    if (args.length == 0) {
        printUsage();/* w w w  . j  a  va 2 s .  c  o m*/
        return;
    }
    try {
        parseOptions(args);
    } catch (IllegalArgumentException e) {
        System.err.println("Error: " + e.getMessage());
        printUsage();
        return;
    }

    // Read in chrList, if specified
    if (chrFile != null) {
        chrFilter = new HashSet<String>();
        try {
            BufferedReader chrFilterIn = new BufferedReader(new FileReader(chrFile));
            String chr = null;
            while ((chr = chrFilterIn.readLine()) != null)
                chrFilter.add(chr);
            chrFilterIn.close();
            if (verbose > 0)
                System.err.println(
                        "Only looking at alignments on " + chrFilter.size() + " specified chromosomes");
        } catch (IOException e) {
            System.err.println("Error: " + e.getMessage());
            return;
        }
    }

    if (verbose > 0) {
        // Start the processMonitor
        processMonitor = new Timer();
        // Start the ProcessStatusTask
        statusTask = new ProcessStatusTask();
        // Schedule to show the status every 1 second
        processMonitor.scheduleAtFixedRate(statusTask, 0, statusFreq);
    }

    // Read in known SNP file, if specified
    if (knownSnpFile != null) {
        if (verbose > 0)
            System.err.println("Checking known SNPs from user specified VCF file");
        knownVCF = new VCFFileReader(new File(knownSnpFile));
    }

    SamReaderFactory readerFac = SamReaderFactory.makeDefault();
    SAMFileWriterFactory writerFac = new SAMFileWriterFactory();
    if (!isSilent)
        readerFac.validationStringency(ValidationStringency.LENIENT); // use LENIENT stringency
    else
        readerFac.validationStringency(ValidationStringency.SILENT); // use SILENT stringency

    SamReader in = readerFac.open(new File(inFile));
    SAMFileHeader inHeader = in.getFileHeader();
    if (inHeader.getGroupOrder() == GroupOrder.reference && inHeader.getSortOrder() == SortOrder.coordinate)
        System.err.println("Warning: Input file '" + inFile
                + "' might be sorted by coordinate and cannot be correctly processed!");

    SAMFileHeader header = inHeader.clone(); // copy the inFile header as outFile header
    // Add new programHeader
    SAMProgramRecord progRec = new SAMProgramRecord(progName);
    progRec.setProgramName(progName);
    progRec.setProgramVersion(progVer);
    progRec.setCommandLine(StringUtils.join(" ", args));
    header.addProgramRecord(progRec);
    //System.err.println(inFile + " groupOrder: " + in.getFileHeader().getGroupOrder() + " sortOrder: " + in.getFileHeader().getSortOrder());
    // reset the orders
    header.setGroupOrder(groupOrder);
    header.setSortOrder(sortOrder);

    // write SAMHeader
    String prevID = null;
    SAMRecord prevRecord = null;
    List<SAMRecord> alnList = new ArrayList<SAMRecord>();
    List<SAMRecordPair> alnPEList = null;

    // Estimate fragment length distribution by scan one-pass through the alignments
    SAMRecordIterator results = in.iterator();
    if (!NO_ESTIMATE) {
        if (verbose > 0) {
            System.err.println("Estimating insert fragment size distribution ...");
            statusTask.reset();
            statusTask.setInfo("alignments scanned");
        }
        long N = 0;
        double fragL_S = 0; // fragLen sum
        double fragL_SS = 0; // fragLen^2 sum
        while (results.hasNext()) {
            SAMRecord record = results.next();
            if (verbose > 0)
                statusTask.updateStatus();
            if (record.getFirstOfPairFlag() && !record.isSecondaryOrSupplementary()) {
                double fragLen = Math.abs(record.getInferredInsertSize());
                if (fragLen != 0 && fragLen >= MIN_FRAG_LEN && fragLen <= MAX_FRAG_LEN) { // only consider certain alignments
                    N++;
                    fragL_S += fragLen;
                    fragL_SS += fragLen * fragLen;
                }
                // stop estimate if already enough
                if (MAX_ESTIMATE_SCAN > 0 && N >= MAX_ESTIMATE_SCAN)
                    break;
            }
        }
        if (verbose > 0)
            statusTask.finish();
        // estimate fragment size
        if (N >= MIN_ESTIMATE_BASE) { // override command line values
            MEAN_FRAG_LEN = fragL_S / N;
            SD_FRAG_LEN = Math.sqrt((N * fragL_SS - fragL_S * fragL_S) / (N * (N - 1)));
            String estStr = String.format("Estimated fragment size distribution: N(%.1f, %.1f)", MEAN_FRAG_LEN,
                    SD_FRAG_LEN);
            if (verbose > 0)
                System.err.println(estStr);
            // also add the estimation to comment
            header.addComment(estStr);
        } else {
            System.err.println(
                    "Unable to estimate the fragment size distribution due to too few observed alignments");
            System.err.println(
                    "You have to specify the '--mean-frag-len' and '--sd-frag-len' on the command line and re-run this step");
            statusTask.cancel();
            processMonitor.cancel();
            return;
        }
        // Initiate the normal model
        normModel = new NormalDistribution(MEAN_FRAG_LEN, SD_FRAG_LEN);
        // reset the iterator, if necessary
        if (in.type() == SamReader.Type.SAM_TYPE) {
            try {
                in.close();
            } catch (IOException e) {
                System.err.println(e.getMessage());
            }
            in = readerFac.open(new File(inFile));
        }
        results.close();
        results = in.iterator();
    } // end of NO_ESTIMATE

    SAMFileWriter out = OUT_IS_SAM ? writerFac.makeSAMWriter(header, false, new File(outFile))
            : writerFac.makeBAMWriter(header, false, new File(outFile));

    // check each alignment again
    if (verbose > 0) {
        System.err.println("Filtering alignments ...");
        statusTask.reset();
        statusTask.setInfo("alignments processed");
    }
    while (results.hasNext()) {
        SAMRecord record = results.next();
        if (verbose > 0)
            statusTask.updateStatus();
        String ID = record.getReadName();
        // fix read and quality string for this read, if is a secondary hit from multiple hits, used for BWA alignment
        if (ID.equals(prevID) && record.getReadLength() == 0)
            SAMAlignFixer.fixSAMRecordRead(record, prevRecord);
        if (chrFilter != null && !chrFilter.contains(record.getReferenceName())) {
            prevID = ID;
            prevRecord = record;
            continue;
        }

        // fix MD:Z string for certain aligners with invalid format (i.e. seqAlto)
        if (fixMD)
            SAMAlignFixer.fixMisStr(record);

        // fix alignment, ignore if failed (unmapped or empty)
        if (!SAMAlignFixer.fixSAMRecord(record, knownVCF, DO_1DP)) {
            prevID = ID;
            prevRecord = record;
            continue;
        }
        if (!record.getReadPairedFlag()) {
            System.err.println("Error: alignment is not from a paired-end read at\n" + record.getSAMString());
            out.close();
            statusTask.cancel();
            processMonitor.cancel();
            return;
        }

        if (!ID.equals(prevID) && prevID != null || !results.hasNext()) { // a non-first new ID meet, or end of alignments
            // create alnPEList from filtered alnList
            alnPEList = createAlnPEListFromAlnList(alnList);
            //System.err.printf("%d alignments for %s transformed to %d alnPairs%n", alnList.size(), prevID, alnPEList.size());
            int totalPair = alnPEList.size();
            // filter highly unlikely PEhits
            filterPEHits(alnPEList, MIN_ALIGN_RATE, MIN_IDENTITY);
            // calculate posterior mapQ for each pair
            calcPEHitPostP(alnPEList, totalPair, MAX_HIT);
            // filter hits by mapQ
            if (MIN_MAPQ > 0)
                filterPEHits(alnPEList, MIN_MAPQ);

            // sort the list first with an anonymous class of comparator, with DESCREASING order
            Collections.sort(alnPEList, Collections.reverseOrder());
            // control max-best
            if (MAX_BEST != 0 && alnPEList.size() > MAX_BEST) { // potential too much best hits
                int nBestStratum = 0;
                int bestMapQ = alnPEList.get(0).getPEMapQ(); // best mapQ from first PE
                for (SAMRecordPair pr : alnPEList)
                    if (pr.getPEMapQ() == bestMapQ)
                        nBestStratum++;
                    else
                        break; // stop searching for sorted list
                if (nBestStratum > MAX_BEST)
                    alnPEList.clear();
            }
            // filter alignments with auxiliary filters
            if (!MAX_SENSITIVITY)
                filterPEHits(alnPEList, MAX_SEED_MIS, MAX_SEED_INDEL, MAX_ALL_MIS, MAX_ALL_INDEL);

            // report remaining secondary alignments, up-to MAX_REPORT
            for (int i = 0; i < alnPEList.size() && (MAX_REPORT == 0 || i < MAX_REPORT); i++) {
                SAMRecordPair repPair = alnPEList.get(i);
                if (doUpdateBit)
                    repPair.setNotPrimaryAlignmentFlags(i != 0);
                int nReport = MAX_REPORT == 0 ? Math.min(alnPEList.size(), MAX_REPORT) : alnPEList.size();
                int nFiltered = alnPEList.size();
                if (repPair.fwdRecord != null) {
                    repPair.fwdRecord.setAttribute("NH", nReport);
                    repPair.fwdRecord.setAttribute("XN", nFiltered);
                    out.addAlignment(repPair.fwdRecord);
                }
                if (repPair.revRecord != null) {
                    repPair.revRecord.setAttribute("NH", nReport);
                    repPair.revRecord.setAttribute("XN", nFiltered);
                    out.addAlignment(repPair.revRecord);
                }
            }
            // reset list
            alnList.clear();
            alnPEList.clear();
        }
        // update
        if (!ID.equals(prevID)) {
            prevID = ID;
            prevRecord = record;
        }
        alnList.add(record);
    } // end while
    try {
        in.close();
        out.close();
    } catch (IOException e) {
        System.err.println(e.getMessage());
    }
    // Terminate the monitor task and monitor
    if (verbose > 0) {
        statusTask.cancel();
        statusTask.finish();
        processMonitor.cancel();
    }
}

From source file:edu.hawaii.soest.kilonalu.dvp2.DavisWxXMLSink.java

/**
 * Runs DavisWxXMLSink.// w ww  . ja va2s. co m
 * 
 * @param args  the command line arguments
 */
public static void main(String[] args) {

    final DavisWxXMLSink davisWxXMLSink = new DavisWxXMLSink();

    // Set up a simple logger that logs to the console
    PropertyConfigurator.configure(davisWxXMLSink.getLogConfigurationFile());

    if (davisWxXMLSink.parseArgs(args)) {

        // export data on a schedule

        TimerTask exportXML = new TimerTask() {
            public void run() {
                logger.debug("TimerTask.run() called.");
                davisWxXMLSink.export();
            }
        };

        Timer exportTimer = new Timer();
        // run the exportXML timer task on the hour, every hour
        exportTimer.scheduleAtFixedRate(exportXML, new Date(), davisWxXMLSink.exportInterval);

    }
}

From source file:org.mitre.ptmatchadapter.PtmatchAdapter.java

public static void main(String... args) {
    // Create a one-time task that will open a url to config page in a browser
    // after the application has been given a couple of seconds to start up.
    new Timer().schedule(new TimerTask() {
        @Override//from w  w w  . jav a2s  .c o m
        public void run() {
            LOG.info("============= Open URL in Browser");
            // Open a Browser window for the user
            BareBonesBrowserLaunch.openURL("http://localhost:8082/index.html");
            LOG.info("============= After Open URL in Browser");
        }
    }, 9000);

    LOG.info("============= Call Fat Jar Router Main");
    // Call Fat Jar Router main last because it never returns
    FatJarRouter.main(args);
    LOG.info("============= Returned from Fat Jar Router Main");
}

From source file:com.inclouds.hbase.utils.RegionServerPoker.java

public static void main(String[] args) throws IOException {
    parseArgs(args);// w w w  .  j av a  2s  .  co m

    byte[] name = "usertable".getBytes();
    Configuration cfg = HBaseConfiguration.create();

    HTable table = new HTable(cfg, name);
    List<byte[]> keys = selectRandomKeys(table);

    LOG.info("Found keys:\n");
    for (byte[] k : keys) {
        LOG.info(new String(k));
    }

    Poker[] workers = new Poker[threads];
    for (int i = 0; i < threads; i++) {
        workers[i] = (reuseConfig == false) ? new Poker(i, keys) : new Poker(i, cfg, keys);
        workers[i].start();
    }

    // Start stats
    timer = new Timer();
    timer.schedule(new Stats(), 5000, 5000);
    // Join all workers
    for (int i = 0; i < threads; i++) {
        try {
            workers[i].join();
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            //e.printStackTrace();
        }
    }

    LOG.info("Finished: " + ((double) completed.get() * 1000) / (endTime.get() - startTime.get()) + " RPS");
    System.exit(-1);
}

From source file:com.inclouds.hbase.utils.UserTableLoader.java

public static void main(String[] args) throws IOException {
    parseArgs(args);// w  w w .j a  v  a  2  s  .c  om

    Configuration cfg = HBaseConfiguration.create();

    if (startRecordNumber == 0) {
        recreateTable(cfg);
    }

    // set current id to startRecordNumber
    completed.set(startRecordNumber);

    lastId = startRecordNumber + records;

    Worker[] workers = new Worker[threads];
    for (int i = 0; i < threads; i++) {
        workers[i] = (reuseConfig == false) ? new Worker(i) : new Worker(i, cfg);
        workers[i].start();
    }

    // Start stats
    timer = new Timer();
    timer.schedule(new Stats(), 5000, 5000);
    // Join all workers
    for (int i = 0; i < threads; i++) {
        try {
            workers[i].join();
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            //e.printStackTrace();
        }
    }

    LOG.info("Finished: " + ((double) records * 1000) / (endTime.get() - startTime.get()) + " RPS");
    System.exit(-1);
}

From source file:edu.hawaii.soest.kilonalu.utilities.FileArchiverSink.java

/**
 * Runs FileArchiverSink./*  w w w.ja  v a 2  s  .c o  m*/
 * 
 * @param args  the command line arguments
 */
public static void main(String[] args) {

    // Set up a simple logger that logs to the console
    BasicConfigurator.configure();

    final FileArchiverSink fileArchiverSink = new FileArchiverSink();

    if (fileArchiverSink.parseArgs(args)) {

        setupShutdownHook(fileArchiverSink);
        setupProgressListener(fileArchiverSink);

        // archive data on a schedule
        if (fileArchiverSink.getArchiveInterval() > 0) {
            // override the command line start and end times      
            fileArchiverSink.setupArchiveTime(fileArchiverSink);

            TimerTask archiveData = new TimerTask() {
                public void run() {
                    logger.debug("TimerTask.run() called.");

                    if (fileArchiverSink.validateSetup()) {
                        fileArchiverSink.export();
                        fileArchiverSink.setupArchiveTime(fileArchiverSink);
                    }
                }
            };

            Timer archiveTimer = new Timer();
            // run the archiveData timer task on the hour, every hour (or every day)
            archiveTimer.scheduleAtFixedRate(archiveData, endArchiveCal.getTime(),
                    fileArchiverSink.getArchiveInterval() * 1000);

            // archive data once based on the start and end times  
        } else {
            fileArchiverSink.export();

        }
    }
}

From source file:com.inclouds.hbase.utils.RandomReadsPerf.java

public static void main(String[] args) throws IOException {
    parseArgs(args);/*from w  ww .  j  a  v  a  2s . c  om*/

    latch = new CountDownLatch(threads);

    Configuration cfg = HBaseConfiguration.create();

    Worker[] workers = new Worker[threads];
    for (int i = 0; i < threads; i++) {
        workers[i] = (reuseConfig == false) ? new Worker(i, totalRead / threads)
                : new Worker(i, cfg, totalRead / threads);
        workers[i].start();
    }

    // Start stats
    timer = new Timer();
    timer.schedule(new Stats(), 5000, 5000);
    // Join all workers
    for (int i = 0; i < threads; i++) {
        try {
            workers[i].join();
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            //e.printStackTrace();
        }
    }

    if (singlePass == false) {
        LOG.info("Finished: first pass : "
                + ((double) completed.get() * 1000) / (2 * (secondPassStartTime.get() - startTime.get()))
                + " RPS");
        LOG.info("Finished: second pass: "
                + ((double) completed.get() * 1000) / (2 * (endTime.get() - secondPassStartTime.get()))
                + " RPS");
    } else {
        LOG.info("Finished: " + ((double) completed.get() * 1000) / ((endTime.get() - startTime.get()))
                + " RPS");
    }
    System.exit(0);
}

From source file:Main.java

public static void timer(TimerTask t, int time) {
    new Timer().schedule(t, time);
}