Example usage for java.util.concurrent ExecutorService submit

List of usage examples for java.util.concurrent ExecutorService submit

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService submit.

Prototype

Future<?> submit(Runnable task);

Source Link

Document

Submits a Runnable task for execution and returns a Future representing that task.

Usage

From source file:com.btoddb.fastpersitentqueue.speedtest.SpeedTest.java

public static void main(String[] args) throws Exception {
    if (0 == args.length) {
        System.out.println();/*w  w w . ja  v  a2 s . c o m*/
        System.out.println("ERROR: must specify the config file path/name");
        System.out.println();
        System.exit(1);
    }

    SpeedTestConfig config = SpeedTestConfig.create(args[0]);

    System.out.println(config.toString());

    File theDir = new File(config.getDirectory(), "speed-" + UUID.randomUUID().toString());
    FileUtils.forceMkdir(theDir);

    Fpq queue = config.getFpq();
    queue.setJournalDirectory(new File(theDir, "journals"));
    queue.setPagingDirectory(new File(theDir, "pages"));

    try {
        queue.init();

        //
        // start workers
        //

        AtomicLong counter = new AtomicLong();
        AtomicLong pushSum = new AtomicLong();
        AtomicLong popSum = new AtomicLong();

        long startTime = System.currentTimeMillis();

        Set<SpeedPushWorker> pushWorkers = new HashSet<SpeedPushWorker>();
        for (int i = 0; i < config.getNumberOfPushers(); i++) {
            pushWorkers.add(new SpeedPushWorker(queue, config, counter, pushSum));
        }

        Set<SpeedPopWorker> popWorkers = new HashSet<SpeedPopWorker>();
        for (int i = 0; i < config.getNumberOfPoppers(); i++) {
            popWorkers.add(new SpeedPopWorker(queue, config, popSum));
        }

        ExecutorService pusherExecSrvc = Executors.newFixedThreadPool(
                config.getNumberOfPushers() + config.getNumberOfPoppers(), new ThreadFactory() {
                    @Override
                    public Thread newThread(Runnable runnable) {
                        Thread t = new Thread(runnable);
                        t.setName("SpeedTest-Pusher");
                        return t;
                    }
                });

        ExecutorService popperExecSrvc = Executors.newFixedThreadPool(
                config.getNumberOfPushers() + config.getNumberOfPoppers(), new ThreadFactory() {
                    @Override
                    public Thread newThread(Runnable runnable) {
                        Thread t = new Thread(runnable);
                        t.setName("SpeedTest-Popper");
                        return t;
                    }
                });

        long startPushing = System.currentTimeMillis();
        for (SpeedPushWorker sw : pushWorkers) {
            pusherExecSrvc.submit(sw);
        }

        long startPopping = System.currentTimeMillis();
        for (SpeedPopWorker sw : popWorkers) {
            popperExecSrvc.submit(sw);
        }

        //
        // wait to finish
        //

        long endTime = startTime + config.getDurationOfTest() * 1000;
        long endPushing = 0;
        long displayTimer = 0;
        while (0 == endPushing || !queue.isEmpty()) {
            // display status every second
            if (1000 < (System.currentTimeMillis() - displayTimer)) {
                System.out.println(String.format("status (%ds) : journals = %d : memory segments = %d",
                        (endTime - System.currentTimeMillis()) / 1000,
                        queue.getJournalMgr().getJournalIdMap().size(),
                        queue.getMemoryMgr().getSegments().size()));
                displayTimer = System.currentTimeMillis();
            }

            pusherExecSrvc.shutdown();
            if (pusherExecSrvc.awaitTermination(100, TimeUnit.MILLISECONDS)) {
                endPushing = System.currentTimeMillis();
                // tell poppers, all pushers are finished
                for (SpeedPopWorker sw : popWorkers) {
                    sw.stopWhenQueueEmpty();
                }
            }
        }

        long endPopping = System.currentTimeMillis();

        popperExecSrvc.shutdown();
        popperExecSrvc.awaitTermination(10, TimeUnit.SECONDS);

        long numberOfPushes = 0;
        for (SpeedPushWorker sw : pushWorkers) {
            numberOfPushes += sw.getNumberOfEntries();
        }

        long numberOfPops = 0;
        for (SpeedPopWorker sw : popWorkers) {
            numberOfPops += sw.getNumberOfEntries();
        }

        long pushDuration = endPushing - startPushing;
        long popDuration = endPopping - startPopping;

        System.out.println("push - pop checksum = " + pushSum.get() + " - " + popSum.get() + " = "
                + (pushSum.get() - popSum.get()));
        System.out.println("push duration = " + pushDuration);
        System.out.println("pop duration = " + popDuration);
        System.out.println();
        System.out.println("pushed = " + numberOfPushes);
        System.out.println("popped = " + numberOfPops);
        System.out.println();
        System.out.println("push entries/sec = " + numberOfPushes / (pushDuration / 1000f));
        System.out.println("pop entries/sec = " + numberOfPops / (popDuration / 1000f));
        System.out.println();
        System.out.println("journals created = " + queue.getJournalsCreated());
        System.out.println("journals removed = " + queue.getJournalsRemoved());
    } finally {
        if (null != queue) {
            queue.shutdown();
        }
        //            FileUtils.deleteDirectory(theDir);
    }
}

From source file:gobblin.restli.throttling.LocalStressTest.java

public static void main(String[] args) throws Exception {

    CommandLine cli = StressTestUtils.parseCommandLine(OPTIONS, args);

    int stressorThreads = Integer.parseInt(
            cli.getOptionValue(STRESSOR_THREADS.getOpt(), Integer.toString(DEFAULT_STRESSOR_THREADS)));
    int processorThreads = Integer.parseInt(
            cli.getOptionValue(PROCESSOR_THREADS.getOpt(), Integer.toString(DEFAULT_PROCESSOR_THREADS)));
    int artificialLatency = Integer.parseInt(
            cli.getOptionValue(ARTIFICIAL_LATENCY.getOpt(), Integer.toString(DEFAULT_ARTIFICIAL_LATENCY)));
    long targetQps = Integer.parseInt(cli.getOptionValue(QPS.getOpt(), Integer.toString(DEFAULT_TARGET_QPS)));

    Configuration configuration = new Configuration();
    StressTestUtils.populateConfigFromCli(configuration, cli);

    String resourceLimited = LocalStressTest.class.getSimpleName();

    Map<String, String> configMap = Maps.newHashMap();

    ThrottlingPolicyFactory factory = new ThrottlingPolicyFactory();
    SharedLimiterKey res1key = new SharedLimiterKey(resourceLimited);
    configMap.put(BrokerConfigurationKeyGenerator.generateKey(factory, res1key, null,
            ThrottlingPolicyFactory.POLICY_KEY), QPSPolicy.FACTORY_ALIAS);
    configMap.put(BrokerConfigurationKeyGenerator.generateKey(factory, res1key, null, QPSPolicy.QPS),
            Long.toString(targetQps));

    ThrottlingGuiceServletConfig guiceServletConfig = new ThrottlingGuiceServletConfig();
    guiceServletConfig.initialize(ConfigFactory.parseMap(configMap));
    LimiterServerResource limiterServer = guiceServletConfig.getInjector()
            .getInstance(LimiterServerResource.class);

    RateComputingLimiterContainer limiterContainer = new RateComputingLimiterContainer();

    Class<? extends Stressor> stressorClass = configuration.getClass(StressTestUtils.STRESSOR_CLASS,
            StressTestUtils.DEFAULT_STRESSOR_CLASS, Stressor.class);

    ExecutorService executorService = Executors.newFixedThreadPool(stressorThreads);

    SharedResourcesBroker broker = guiceServletConfig.getInjector().getInstance(
            Key.get(SharedResourcesBroker.class, Names.named(LimiterServerResource.BROKER_INJECT_NAME)));
    ThrottlingPolicy policy = (ThrottlingPolicy) broker.getSharedResource(new ThrottlingPolicyFactory(),
            new SharedLimiterKey(resourceLimited));
    ScheduledExecutorService reportingThread = Executors.newSingleThreadScheduledExecutor();
    reportingThread.scheduleAtFixedRate(new Reporter(limiterContainer, policy), 0, 15, TimeUnit.SECONDS);

    Queue<Future<?>> futures = new LinkedList<>();
    MockRequester requester = new MockRequester(limiterServer, artificialLatency, processorThreads);

    requester.start();/*from ww w. ja  va  2 s . c  o  m*/
    for (int i = 0; i < stressorThreads; i++) {
        RestliServiceBasedLimiter restliLimiter = RestliServiceBasedLimiter.builder()
                .resourceLimited(resourceLimited).requestSender(requester).serviceIdentifier("stressor" + i)
                .build();

        Stressor stressor = stressorClass.newInstance();
        stressor.configure(configuration);
        futures.add(executorService
                .submit(new StressorRunner(limiterContainer.decorateLimiter(restliLimiter), stressor)));
    }
    int stressorFailures = 0;
    for (Future<?> future : futures) {
        try {
            future.get();
        } catch (ExecutionException ee) {
            stressorFailures++;
        }
    }
    requester.stop();

    executorService.shutdownNow();

    if (stressorFailures > 0) {
        log.error("There were " + stressorFailures + " failed stressor threads.");
    }
    System.exit(stressorFailures);
}

From source file:edu.msu.cme.rdp.kmer.cli.KmerCoverage.java

/**
 * This program maps the kmers from reads to kmers on each contig,
 * writes the mean, median coverage of each contig to a file
 * writes the kmer abundance to a file//ww  w.ja  v a2 s  .co m
 * @param args
 * @throws IOException 
 */
public static void main(String[] args) throws IOException, InterruptedException {
    int kmerSize = 45;
    final int maxThreads;
    final int maxTasks = 1000;
    final PrintStream match_reads_out;
    try {
        CommandLine cmdLine = new PosixParser().parse(options, args);
        args = cmdLine.getArgs();
        if (args.length < 5) {
            throw new Exception("Unexpected number of arguments");
        }
        kmerSize = Integer.parseInt(args[0]);
        if (kmerSize > Kmer.max_nucl_kmer_size) {
            throw new Exception("kmerSize should be less than " + Kmer.max_nucl_kmer_size);
        }
        if (cmdLine.hasOption("match_reads_out")) {
            match_reads_out = new PrintStream(cmdLine.getOptionValue("match_reads_out"));
        } else {
            match_reads_out = null;
        }
        if (cmdLine.hasOption("threads")) {
            maxThreads = Integer.valueOf(cmdLine.getOptionValue("threads"));
            if (maxThreads >= Runtime.getRuntime().availableProcessors()) {
                System.err.println(" Runtime.getRuntime().availableProcessors() "
                        + Runtime.getRuntime().availableProcessors());
            }

        } else {
            maxThreads = 1;
        }

        final KmerCoverage kmerCoverage = new KmerCoverage(kmerSize, new SequenceReader(new File(args[1])));

        final AtomicInteger outstandingTasks = new AtomicInteger();
        ExecutorService service = Executors.newFixedThreadPool(maxThreads);

        Sequence seq;

        // parse one file at a time
        for (int index = 4; index < args.length; index++) {

            SequenceReader reader = new SequenceReader(new File(args[index]));
            while ((seq = reader.readNextSequence()) != null) {
                if (seq.getSeqString().length() < kmerSize) {
                    continue;
                }
                final Sequence threadSeq = seq;

                Runnable r = new Runnable() {

                    public void run() {
                        try {
                            kmerCoverage.processReads(threadSeq, match_reads_out);
                            outstandingTasks.decrementAndGet();
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                };

                outstandingTasks.incrementAndGet();
                service.submit(r);

                while (outstandingTasks.get() >= maxTasks)
                    ;

            }
            reader.close();
        }
        service.shutdown();
        service.awaitTermination(1, TimeUnit.DAYS);

        kmerCoverage.printCovereage(new FileOutputStream(new File(args[2])),
                new FileOutputStream(new File(args[3])));
        if (match_reads_out != null) {
            match_reads_out.close();
        }
    } catch (Exception e) {
        new HelpFormatter().printHelp(
                "KmerCoverage <kmerSize> <query_file> <coverage_out> <abundance_out> <reads_file> <reads_file>...\nmaximum kmerSize "
                        + Kmer.max_nucl_kmer_size,
                options);
        e.printStackTrace();
        System.exit(1);
    }
}

From source file:com.rk.grid.federation.FederatedCluster.java

/**
 * @param args/*from w ww.  j  a  v a  2  s.c om*/
 * @throws Exception
 */
public static void main(String[] args) throws Exception {
    int port = Integer.parseInt(args[0]);
    String clusterName = args[1];
    String masterBrokerServiceName = args[2];
    int masterPort = Integer.parseInt(args[3]);
    String masterHost = args[4];

    IBroker<Object> masterBroker = null;
    for (int i = 0; i < 100; i++) {
        try {
            masterBroker = getConnection(masterBrokerServiceName, masterPort, masterHost);
            if (masterBroker != null)
                break;
        } catch (RemoteLookupFailureException e) {
            if (i % 100 == 0)
                System.out.println("Sleeping....");
        }
        Thread.sleep(100);
    }

    if (masterBroker == null)
        throw new RuntimeException("Unable to find master broker " + masterBrokerServiceName);

    BrokerInfo brokerInfo = masterBroker.getBrokerInfo();
    GridConfig gridConfig = brokerInfo.getConfig();
    List<String> jvmNodeParams = masterBroker.getBrokerInfo().getJvmNodeParams();
    GridExecutorService cluster = new GridExecutorService(port, jvmNodeParams, gridConfig, clusterName);
    cluster.getBroker().unPause();

    final TaskExecutor taskExecutor = new TaskExecutor(cluster);

    final IRemoteResultsHandler<Object> callback = masterBroker.getCallback();
    IWorkQueue<Object> workQueue = masterBroker.getWorkQueue();

    ExecutorService pool = Executors.newFixedThreadPool(3);

    masterBroker.unPause();

    while (!Thread.currentThread().isInterrupted()) {
        final IExecutable<?> executable = workQueue.take();

        if (executable == null)
            continue;

        if (executable.equals(IExecutable.POISON)) {
            break;
        }

        Callable<Object> callable = new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                Future<ITaskResult<?>> future = taskExecutor.submit(executable);
                ITaskResult<?> iResult = future.get();

                String uid = executable.getUID();
                try {
                    callback.accept(new RemoteResult<Object>(iResult, uid));
                } catch (Throwable t) {
                    t.printStackTrace();
                    try {
                        callback.accept(new RemoteResult<Object>(
                                new RemoteExecutorException("Error execution remote task '" + uid + "'", t),
                                uid));
                    } catch (RemoteException e) {
                        throw new RuntimeException(e);
                    }
                }
                return null;
            }

        };

        pool.submit(callable);
    }
    pool.shutdown();
    taskExecutor.shutdown();
    System.out.println("Finished...!");
}

From source file:edu.msu.cme.rdp.kmer.KmerFilter.java

public static void main(String[] args) throws Exception {
    final KmerTrie kmerTrie;
    final SeqReader queryReader;
    final SequenceType querySeqType;
    final File queryFile;
    final KmerStartsWriter out;
    final boolean translQuery;
    final int wordSize;
    final int translTable;
    final boolean alignedSeqs;
    final List<String> refLabels = new ArrayList();
    final int maxThreads;

    try {/* w ww.ja  va2s  .  c  om*/
        CommandLine cmdLine = new PosixParser().parse(options, args);
        args = cmdLine.getArgs();

        if (args.length < 3) {
            throw new Exception("Unexpected number of arguments");
        }

        if (cmdLine.hasOption("out")) {
            out = new KmerStartsWriter(cmdLine.getOptionValue("out"));
        } else {
            out = new KmerStartsWriter(System.out);
        }

        if (cmdLine.hasOption("aligned")) {
            alignedSeqs = true;
        } else {
            alignedSeqs = false;
        }

        if (cmdLine.hasOption("transl-table")) {
            translTable = Integer.valueOf(cmdLine.getOptionValue("transl-table"));
        } else {
            translTable = 11;
        }

        if (cmdLine.hasOption("threads")) {
            maxThreads = Integer.valueOf(cmdLine.getOptionValue("threads"));
        } else {
            maxThreads = Runtime.getRuntime().availableProcessors();
        }

        queryFile = new File(args[1]);
        wordSize = Integer.valueOf(args[0]);
        SequenceType refSeqType = null;

        querySeqType = SeqUtils.guessSequenceType(queryFile);
        queryReader = new SequenceReader(queryFile);

        if (querySeqType == SequenceType.Protein) {
            throw new Exception("Expected nucl query sequences");
        }

        refSeqType = SeqUtils
                .guessSequenceType(new File(args[2].contains("=") ? args[2].split("=")[1] : args[2]));

        translQuery = refSeqType == SequenceType.Protein;

        if (translQuery && wordSize % 3 != 0) {
            throw new Exception("Word size must be a multiple of 3 for nucl ref seqs");
        }

        int trieWordSize;
        if (translQuery) {
            trieWordSize = wordSize / 3;
        } else {
            trieWordSize = wordSize;
        }
        kmerTrie = new KmerTrie(trieWordSize, translQuery);

        for (int index = 2; index < args.length; index++) {
            String refName;
            String refFileName = args[index];
            if (refFileName.contains("=")) {
                String[] lexemes = refFileName.split("=");
                refName = lexemes[0];
                refFileName = lexemes[1];
            } else {
                String tmpName = new File(refFileName).getName();
                if (tmpName.contains(".")) {
                    refName = tmpName.substring(0, tmpName.lastIndexOf("."));
                } else {
                    refName = tmpName;
                }
            }

            File refFile = new File(refFileName);

            if (refSeqType != SeqUtils.guessSequenceType(refFile)) {
                throw new Exception(
                        "Reference file " + refFile + " contains " + SeqUtils.guessFileFormat(refFile)
                                + " sequences but expected " + refSeqType + " sequences");
            }

            SequenceReader seqReader = new SequenceReader(refFile);
            Sequence seq;

            while ((seq = seqReader.readNextSequence()) != null) {
                if (seq.getSeqName().startsWith("#")) {
                    continue;
                }
                if (alignedSeqs) {
                    kmerTrie.addModelSequence(seq, refLabels.size());
                } else {
                    kmerTrie.addSequence(seq, refLabels.size());
                }
            }
            seqReader.close();

            refLabels.add(refName);
        }

    } catch (Exception e) {
        new HelpFormatter().printHelp("KmerSearch <word_size> <query_file> [name=]<ref_file> ...", options);
        System.err.println(e.getMessage());
        e.printStackTrace();
        System.exit(1);
        throw new RuntimeException("Stupid jvm"); //While this will never get thrown it is required to make sure javac doesn't get confused about uninitialized variables
    }

    long startTime = System.currentTimeMillis();
    long seqCount = 0;
    final int maxTasks = 25000;

    /*
     * if (args.length == 4) { maxThreads = Integer.valueOf(args[3]); } else {
     */

    //}

    System.err.println("Starting kmer mapping at " + new Date());
    System.err.println("*  Number of threads:       " + maxThreads);
    System.err.println("*  References:              " + refLabels);
    System.err.println("*  Reads file:              " + queryFile);
    System.err.println("*  Kmer length:             " + kmerTrie.getWordSize());

    final AtomicInteger processed = new AtomicInteger();
    final AtomicInteger outstandingTasks = new AtomicInteger();

    ExecutorService service = Executors.newFixedThreadPool(maxThreads);

    Sequence querySeq;

    while ((querySeq = queryReader.readNextSequence()) != null) {
        seqCount++;

        String seqString = querySeq.getSeqString();

        if (seqString.length() < 3) {
            System.err.println("Sequence " + querySeq.getSeqName() + "'s length is less than 3");
            continue;
        }

        final Sequence threadSeq = querySeq;

        Runnable r = new Runnable() {

            public void run() {
                try {
                    processSeq(threadSeq, refLabels, kmerTrie, out, wordSize, translQuery, translTable, false);
                    processSeq(threadSeq, refLabels, kmerTrie, out, wordSize, translQuery, translTable, true);
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }

                processed.incrementAndGet();
                outstandingTasks.decrementAndGet();
            }
        };

        outstandingTasks.incrementAndGet();
        service.submit(r);

        while (outstandingTasks.get() >= maxTasks)
            ;

        if ((processed.get() + 1) % 1000000 == 0) {
            System.err.println("Processed " + processed + " sequences in "
                    + (System.currentTimeMillis() - startTime) + " ms");
        }
    }

    service.shutdown();
    service.awaitTermination(1, TimeUnit.DAYS);

    System.err.println("Finished Processed " + processed + " sequences in "
            + (System.currentTimeMillis() - startTime) + " ms");

    out.close();
}

From source file:org.apache.cxf.cwiki.SiteExporter.java

public static void main(String[] args) throws Exception {
    Authenticator.setDefault(new Authenticator() {
        protected PasswordAuthentication getPasswordAuthentication() {
            return new PasswordAuthentication(userName, password.toCharArray());
        }/*from w  w  w.j  a  v a  2  s .  co m*/
    });
    ListIterator<String> it = Arrays.asList(args).listIterator();
    List<String> files = new ArrayList<String>();
    boolean forceAll = false;
    int maxThreads = -1;
    while (it.hasNext()) {
        String s = it.next();
        if ("-debug".equals(s)) {
            debug = true;
        } else if ("-user".equals(s)) {
            userName = it.next();
        } else if ("-password".equals(s)) {
            password = it.next();
        } else if ("-d".equals(s)) {
            rootOutputDir = new File(it.next());
        } else if ("-force".equals(s)) {
            forceAll = true;
        } else if ("-svn".equals(s)) {
            svn = true;
        } else if ("-commit".equals(s)) {
            commit = true;
        } else if ("-maxThreads".equals(s)) {
            maxThreads = Integer.parseInt(it.next());
        } else if (s != null && s.length() > 0) {
            files.add(s);
        }
    }

    List<SiteExporter> exporters = new ArrayList<SiteExporter>();
    for (String file : files) {
        exporters.add(new SiteExporter(file, forceAll));
    }
    List<SiteExporter> modified = new ArrayList<SiteExporter>();
    for (SiteExporter exporter : exporters) {
        if (exporter.initialize()) {
            modified.add(exporter);
        }
    }

    // render stuff only if needed
    if (!modified.isEmpty()) {
        setSiteExporters(exporters);

        if (maxThreads <= 0) {
            maxThreads = modified.size();
        }

        ExecutorService executor = Executors.newFixedThreadPool(maxThreads, new ThreadFactory() {
            public Thread newThread(Runnable r) {
                Thread t = new Thread(r);
                t.setDaemon(true);
                return t;
            }
        });
        List<Future<?>> futures = new ArrayList<Future<?>>(modified.size());
        for (SiteExporter exporter : modified) {
            futures.add(executor.submit(exporter));
        }
        for (Future<?> t : futures) {
            t.get();
        }
    }

    if (commit) {
        File file = FileUtils.createTempFile("svncommit", "txt");
        FileWriter writer = new FileWriter(file);
        writer.write(svnCommitMessage.toString());
        writer.close();
        callSvn(rootOutputDir, "commit", "-F", file.getAbsolutePath(), rootOutputDir.getAbsolutePath());
        svnCommitMessage.setLength(0);
    }
}

From source file:edu.msu.cme.rdp.kmer.cli.FastKmerFilter.java

public static void main(String[] args) throws Exception {
    final KmerSet<Set<RefKmer>> kmerSet;
    final SeqReader queryReader;
    final SequenceType querySeqType;
    final File queryFile;
    final KmerStartsWriter out;
    final boolean translQuery;
    final int wordSize;
    final int translTable;
    final boolean alignedSeqs;
    final List<String> refLabels = new ArrayList();
    final int maxThreads;
    final int trieWordSize;

    try {/*from w w w.  j a  v  a  2s . c  o  m*/
        CommandLine cmdLine = new PosixParser().parse(options, args);
        args = cmdLine.getArgs();

        if (args.length < 3) {
            throw new Exception("Unexpected number of arguments");
        }

        if (cmdLine.hasOption("out")) {
            out = new KmerStartsWriter(cmdLine.getOptionValue("out"));
        } else {
            out = new KmerStartsWriter(System.out);
        }

        if (cmdLine.hasOption("aligned")) {
            alignedSeqs = true;
        } else {
            alignedSeqs = false;
        }

        if (cmdLine.hasOption("transl-table")) {
            translTable = Integer.valueOf(cmdLine.getOptionValue("transl-table"));
        } else {
            translTable = 11;
        }

        if (cmdLine.hasOption("threads")) {
            maxThreads = Integer.valueOf(cmdLine.getOptionValue("threads"));
        } else {
            maxThreads = Runtime.getRuntime().availableProcessors();
        }

        queryFile = new File(args[1]);
        wordSize = Integer.valueOf(args[0]);
        SequenceType refSeqType = null;

        querySeqType = SeqUtils.guessSequenceType(queryFile);
        queryReader = new SequenceReader(queryFile);

        if (querySeqType == SequenceType.Protein) {
            throw new Exception("Expected nucl query sequences");
        }

        refSeqType = SeqUtils
                .guessSequenceType(new File(args[2].contains("=") ? args[2].split("=")[1] : args[2]));

        translQuery = refSeqType == SequenceType.Protein;

        if (translQuery && wordSize % 3 != 0) {
            throw new Exception("Word size must be a multiple of 3 for nucl ref seqs");
        }

        if (translQuery) {
            trieWordSize = wordSize / 3;
        } else {
            trieWordSize = wordSize;
        }
        kmerSet = new KmerSet<Set<RefKmer>>();//new KmerTrie(trieWordSize, translQuery);

        for (int index = 2; index < args.length; index++) {
            String refName;
            String refFileName = args[index];
            if (refFileName.contains("=")) {
                String[] lexemes = refFileName.split("=");
                refName = lexemes[0];
                refFileName = lexemes[1];
            } else {
                String tmpName = new File(refFileName).getName();
                if (tmpName.contains(".")) {
                    refName = tmpName.substring(0, tmpName.lastIndexOf("."));
                } else {
                    refName = tmpName;
                }
            }

            File refFile = new File(refFileName);

            if (refSeqType != SeqUtils.guessSequenceType(refFile)) {
                throw new Exception(
                        "Reference file " + refFile + " contains " + SeqUtils.guessFileFormat(refFile)
                                + " sequences but expected " + refSeqType + " sequences");
            }

            SequenceReader seqReader = new SequenceReader(refFile);
            Sequence seq;

            while ((seq = seqReader.readNextSequence()) != null) {
                if (seq.getSeqName().startsWith("#")) {
                    continue;
                }

                KmerGenerator kmers;
                try {
                    if (translQuery) { //protein ref
                        kmers = new ProtKmerGenerator(seq.getSeqString(), trieWordSize, alignedSeqs);
                    } else {
                        kmers = new NuclKmerGenerator(seq.getSeqString(), trieWordSize, alignedSeqs);
                    }
                    while (kmers.hasNext()) {
                        Kmer temp = kmers.next();
                        long[] next = temp.getLongKmers();
                        Set<RefKmer> refKmers = kmerSet.get(next);
                        if (refKmers == null) {
                            refKmers = new HashSet();
                            kmerSet.add(next, refKmers);
                        }

                        RefKmer kmerRef = new RefKmer();
                        kmerRef.modelPos = kmers.getPosition();
                        kmerRef.refFileIndex = refLabels.size();
                        kmerRef.refSeqid = seq.getSeqName();
                        refKmers.add(kmerRef);
                    }
                } catch (IllegalArgumentException ex) {
                    //System.err.println(seq.getSeqName()+ " " + ex.getMessage());
                }
            }
            seqReader.close();

            refLabels.add(refName);
        }

    } catch (Exception e) {
        new HelpFormatter().printHelp(
                "KmerSearch <kmerSize> <query_file> [name=]<ref_file> ...\nkmerSize should be multiple of 3, (recommend 45, minimum 30, maximum 63) ",
                options);
        e.printStackTrace();
        System.exit(1);
        throw new RuntimeException("Stupid jvm"); //While this will never get thrown it is required to make sure javac doesn't get confused about uninitialized variables
    }

    long startTime = System.currentTimeMillis();
    long seqCount = 0;
    final int maxTasks = 25000;

    System.err.println("Starting kmer mapping at " + new Date());
    System.err.println("*  Number of threads:       " + maxThreads);
    System.err.println("*  References:              " + refLabels);
    System.err.println("*  Reads file:              " + queryFile);
    System.err.println("*  Kmer length:             " + trieWordSize);
    System.err.println("*  Kmer Refset Size:        " + kmerSet.size());

    final AtomicInteger processed = new AtomicInteger();
    final AtomicInteger outstandingTasks = new AtomicInteger();

    ExecutorService service = Executors.newFixedThreadPool(maxThreads);

    Sequence querySeq;

    while ((querySeq = queryReader.readNextSequence()) != null) {
        seqCount++;

        String seqString = querySeq.getSeqString();

        if ((!translQuery && seqString.length() < wordSize)
                || (translQuery && seqString.length() < wordSize + 2)) {
            //System.err.println(querySeq.getSeqName() + "\t" + seqString.length());
            continue;
        }

        final Sequence threadSeq = querySeq;

        Runnable r = new Runnable() {

            public void run() {
                try {
                    processSeq(threadSeq, refLabels, kmerSet, out, wordSize, translQuery, translTable, false);
                    processSeq(threadSeq, refLabels, kmerSet, out, wordSize, translQuery, translTable, true);

                    processed.incrementAndGet();
                    outstandingTasks.decrementAndGet();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        };

        outstandingTasks.incrementAndGet();
        service.submit(r);

        while (outstandingTasks.get() >= maxTasks)
            ;

        if ((processed.get() + 1) % 1000000 == 0) {
            System.err.println("Processed " + processed + " sequences in "
                    + (System.currentTimeMillis() - startTime) + " ms");
        }
    }

    service.shutdown();
    service.awaitTermination(1, TimeUnit.DAYS);

    System.err.println("Finished Processed " + processed + " sequences in "
            + (System.currentTimeMillis() - startTime) + " ms");

    out.close();
}

From source file:de.huberlin.wbi.cuneiform.cmdline.main.Main.java

public static void main(String[] args)
        throws IOException, ParseException, InterruptedException, NotDerivableException {

    CommandLine cmd;//from  www.  j  av  a2s  .com
    Options opt;
    BaseRepl repl;
    BaseCreActor cre;
    Path sandbox;
    ExecutorService executor;
    TicketSrcActor ticketSrc;
    JsonSummary summary;
    Path summaryPath;
    Log statLog;
    int nthread;
    Path workDir;

    statLog = LogFactory.getLog("statLogger");

    executor = Executors.newCachedThreadPool();
    try {

        opt = getOptions();
        cmd = parse(args, opt);
        config(cmd);

        if (cmd.hasOption('h')) {

            System.out.println("CUNEIFORM - A Functional Workflow Language\nversion " + BaseRepl.LABEL_VERSION
                    + " build " + BaseRepl.LABEL_BUILD);
            new HelpFormatter().printHelp("java -jar cuneiform.jar [OPTION]*", opt);

            return;
        }

        if (cmd.hasOption('r'))
            Invocation.putLibPath(ForeignLambdaExpr.LANGID_R, cmd.getOptionValue('r'));

        if (cmd.hasOption('y'))
            Invocation.putLibPath(ForeignLambdaExpr.LANGID_PYTHON, cmd.getOptionValue('y'));

        if (cmd.hasOption('l'))
            sandbox = Paths.get(cmd.getOptionValue("l"));
        else
            sandbox = Paths.get(System.getProperty("user.home")).resolve(".cuneiform");

        sandbox = sandbox.toAbsolutePath();

        if (cmd.hasOption('c'))
            LocalThread.deleteIfExists(sandbox);

        if (cmd.hasOption('t'))
            nthread = Integer.valueOf(cmd.getOptionValue('t'));
        else
            nthread = Runtime.getRuntime().availableProcessors();

        if (cmd.hasOption('w'))
            workDir = Paths.get(cmd.getOptionValue('w'));
        else
            workDir = Paths.get(System.getProperty("user.dir"));

        workDir = workDir.toAbsolutePath();

        switch (platform) {

        case PLATFORM_LOCAL:

            if (!Files.exists(sandbox))
                Files.createDirectories(sandbox);

            cre = new LocalCreActor(sandbox, workDir, nthread);
            break;

        case PLATFORM_HTCONDOR:

            if (!Files.exists(sandbox))
                Files.createDirectories(sandbox);

            if (cmd.hasOption('m')) { // MAX_TRANSFER SIZE
                String maxTransferSize = cmd.getOptionValue('m');
                try {
                    cre = new CondorCreActor(sandbox, maxTransferSize);
                } catch (Exception e) {
                    System.out.println("INVALID '-m' option value: " + maxTransferSize
                            + "\n\nCUNEIFORM - A Functional Workflow Language\nversion "
                            + BaseRepl.LABEL_VERSION + " build " + BaseRepl.LABEL_BUILD);
                    new HelpFormatter().printHelp("java -jar cuneiform.jar [OPTION]*", opt);

                    return;
                }
            } else {
                cre = new CondorCreActor(sandbox);
            }

            break;

        default:
            throw new RuntimeException("Platform not recognized.");
        }

        executor.submit(cre);
        ticketSrc = new TicketSrcActor(cre);
        executor.submit(ticketSrc);
        executor.shutdown();

        switch (format) {

        case FORMAT_CF:

            if (cmd.hasOption("i"))
                repl = new InteractiveRepl(ticketSrc, statLog);
            else
                repl = new CmdlineRepl(ticketSrc, statLog);
            break;

        case FORMAT_DAX:
            repl = new DaxRepl(ticketSrc, statLog);
            break;

        default:
            throw new RuntimeException("Format not recognized.");
        }

        if (cmd.hasOption("i")) {

            // run in interactive mode
            BaseRepl.run(repl);

            return;
        }

        // run in quiet mode

        if (inputFileVector.length > 0)

            for (Path f : inputFileVector)
                repl.interpret(readFile(f));

        else
            repl.interpret(readStdIn());

        Thread.sleep(3 * Actor.DELAY);
        while (repl.isBusy())
            Thread.sleep(Actor.DELAY);

        if (cmd.hasOption("s")) {

            summary = new JsonSummary(ticketSrc.getRunId(), sandbox, repl.getAns());
            summaryPath = Paths.get(cmd.getOptionValue("s"));
            summaryPath = summaryPath.toAbsolutePath();
            try (BufferedWriter writer = Files.newBufferedWriter(summaryPath, Charset.forName("UTF-8"))) {

                writer.write(summary.toString());
            }

        }

    } finally {
        executor.shutdownNow();
    }

}

From source file:tuit.java

@SuppressWarnings("ConstantConditions")
public static void main(String[] args) {
    System.out.println(licence);//  w ww . j  a v  a  2  s.co m
    //Declare variables
    File inputFile;
    File outputFile;
    File tmpDir;
    File blastnExecutable;
    File properties;
    File blastOutputFile = null;
    //
    TUITPropertiesLoader tuitPropertiesLoader;
    TUITProperties tuitProperties;
    //
    String[] parameters = null;
    //
    Connection connection = null;
    MySQL_Connector mySQL_connector;
    //
    Map<Ranks, TUITCutoffSet> cutoffMap;
    //
    BLASTIdentifier blastIdentifier = null;
    //
    RamDb ramDb = null;

    CommandLineParser parser = new GnuParser();
    Options options = new Options();

    options.addOption(tuit.IN, "input<file>", true, "Input file (currently fasta-formatted only)");
    options.addOption(tuit.OUT, "output<file>", true, "Output file (in " + tuit.TUIT_EXT + " format)");
    options.addOption(tuit.P, "prop<file>", true, "Properties file (XML formatted)");
    options.addOption(tuit.V, "verbose", false, "Enable verbose output");
    options.addOption(tuit.B, "blast_output<file>", true, "Perform on a pre-BLASTed output");
    options.addOption(tuit.DEPLOY, "deploy", false, "Deploy the taxonomic databases");
    options.addOption(tuit.UPDATE, "update", false, "Update the taxonomic databases");
    options.addOption(tuit.USE_DB, "usedb", false, "Use RDBMS instead of RAM-based taxonomy");

    Option option = new Option(tuit.REDUCE, "reduce", true,
            "Pack identical (100% similar sequences) records in the given sample file");
    option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(option);
    option = new Option(tuit.COMBINE, "combine", true,
            "Combine a set of given reduction files into an HMP Tree-compatible taxonomy");
    option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(option);
    options.addOption(tuit.NORMALIZE, "normalize", false,
            "If used in combination with -combine ensures that the values are normalized by the root value");

    HelpFormatter formatter = new HelpFormatter();

    try {

        //Get TUIT directory
        final File tuitDir = new File(
                new File(tuit.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath())
                        .getParent());
        final File ramDbFile = new File(tuitDir, tuit.RAM_DB);

        //Setup logger
        Log.getInstance().setLogName("tuit.log");

        //Read command line
        final CommandLine commandLine = parser.parse(options, args, true);

        //Check if the REDUCE option is on
        if (commandLine.hasOption(tuit.REDUCE)) {

            final String[] fileList = commandLine.getOptionValues(tuit.REDUCE);
            for (String s : fileList) {
                final Path path = Paths.get(s);
                Log.getInstance().log(Level.INFO, "Processing " + path.toString() + "...");
                final NucleotideFastaSequenceReductor nucleotideFastaSequenceReductor = NucleotideFastaSequenceReductor
                        .fromPath(path);
                ReductorFileOperator.save(nucleotideFastaSequenceReductor,
                        path.resolveSibling(path.getFileName().toString() + ".rdc"));
            }

            Log.getInstance().log(Level.FINE, "Task done, exiting...");
            return;
        }

        //Check if COMBINE is on
        if (commandLine.hasOption(tuit.COMBINE)) {
            final boolean normalize = commandLine.hasOption(tuit.NORMALIZE);
            final String[] fileList = commandLine.getOptionValues(tuit.COMBINE);
            //TODO: implement a test for format here

            final List<TreeFormatter.TreeFormatterFormat.HMPTreesOutput> hmpTreesOutputs = new ArrayList<>();
            final TreeFormatter treeFormatter = TreeFormatter
                    .newInstance(new TreeFormatter.TuitLineTreeFormatterFormat());
            for (String s : fileList) {
                final Path path = Paths.get(s);
                Log.getInstance().log(Level.INFO, "Merging " + path.toString() + "...");
                treeFormatter.loadFromPath(path);
                final TreeFormatter.TreeFormatterFormat.HMPTreesOutput output = TreeFormatter.TreeFormatterFormat.HMPTreesOutput
                        .newInstance(treeFormatter.toHMPTree(normalize), s.substring(0, s.indexOf(".")));
                hmpTreesOutputs.add(output);
                treeFormatter.erase();
            }
            final Path destination;
            if (commandLine.hasOption(OUT)) {
                destination = Paths.get(commandLine.getOptionValue(tuit.OUT));
            } else {
                destination = Paths.get("merge.tcf");
            }
            CombinatorFileOperator.save(hmpTreesOutputs, treeFormatter, destination);
            Log.getInstance().log(Level.FINE, "Task done, exiting...");
            return;
        }

        if (!commandLine.hasOption(tuit.P)) {
            throw new ParseException("No properties file option found, exiting.");
        } else {
            properties = new File(commandLine.getOptionValue(tuit.P));
        }

        //Load properties
        tuitPropertiesLoader = TUITPropertiesLoader.newInstanceFromFile(properties);
        tuitProperties = tuitPropertiesLoader.getTuitProperties();

        //Create tmp directory and blastn executable
        tmpDir = new File(tuitProperties.getTMPDir().getPath());
        blastnExecutable = new File(tuitProperties.getBLASTNPath().getPath());

        //Check for deploy
        if (commandLine.hasOption(tuit.DEPLOY)) {
            if (commandLine.hasOption(tuit.USE_DB)) {
                NCBITablesDeployer.fastDeployNCBIDatabasesFromNCBI(connection, tmpDir);
            } else {
                NCBITablesDeployer.fastDeployNCBIRamDatabaseFromNCBI(tmpDir, ramDbFile);
            }

            Log.getInstance().log(Level.FINE, "Task done, exiting...");
            return;
        }
        //Check for update
        if (commandLine.hasOption(tuit.UPDATE)) {
            if (commandLine.hasOption(tuit.USE_DB)) {
                NCBITablesDeployer.updateDatabasesFromNCBI(connection, tmpDir);
            } else {
                //No need to specify a different way to update the database other than just deploy in case of the RAM database
                NCBITablesDeployer.fastDeployNCBIRamDatabaseFromNCBI(tmpDir, ramDbFile);
            }
            Log.getInstance().log(Level.FINE, "Task done, exiting...");
            return;
        }

        //Connect to the database
        if (commandLine.hasOption(tuit.USE_DB)) {
            mySQL_connector = MySQL_Connector.newDefaultInstance(
                    "jdbc:mysql://" + tuitProperties.getDBConnection().getUrl().trim() + "/",
                    tuitProperties.getDBConnection().getLogin().trim(),
                    tuitProperties.getDBConnection().getPassword().trim());
            mySQL_connector.connectToDatabase();
            connection = mySQL_connector.getConnection();
        } else {
            //Probe for ram database

            if (ramDbFile.exists() && ramDbFile.canRead()) {
                Log.getInstance().log(Level.INFO, "Loading RAM taxonomic map...");
                try {
                    ramDb = RamDb.loadSelfFromFile(ramDbFile);
                } catch (IOException ie) {
                    if (ie instanceof java.io.InvalidClassException)
                        throw new IOException("The RAM-based taxonomic database needs to be updated.");
                }

            } else {
                Log.getInstance().log(Level.SEVERE,
                        "The RAM database either has not been deployed, or is not accessible."
                                + "Please use the --deploy option and check permissions on the TUIT directory. "
                                + "If you were looking to use the RDBMS as a taxonomic reference, plese use the -usedb option.");
                return;
            }
        }

        if (commandLine.hasOption(tuit.B)) {
            blastOutputFile = new File(commandLine.getOptionValue(tuit.B));
            if (!blastOutputFile.exists() || !blastOutputFile.canRead()) {
                throw new Exception("BLAST output file either does not exist, or is not readable.");
            } else if (blastOutputFile.isDirectory()) {
                throw new Exception("BLAST output file points to a directory.");
            }
        }
        //Check vital parameters
        if (!commandLine.hasOption(tuit.IN)) {
            throw new ParseException("No input file option found, exiting.");
        } else {
            inputFile = new File(commandLine.getOptionValue(tuit.IN));
            Log.getInstance().setLogName(inputFile.getName().split("\\.")[0] + ".tuit.log");
        }
        //Correct the output file option if needed
        if (!commandLine.hasOption(tuit.OUT)) {
            outputFile = new File((inputFile.getPath()).split("\\.")[0] + tuit.TUIT_EXT);
        } else {
            outputFile = new File(commandLine.getOptionValue(tuit.OUT));
        }

        //Adjust the output level
        if (commandLine.hasOption(tuit.V)) {
            Log.getInstance().setLevel(Level.FINE);
            Log.getInstance().log(Level.INFO, "Using verbose output for the log");
        } else {
            Log.getInstance().setLevel(Level.INFO);
        }
        //Try all files
        if (inputFile != null) {
            if (!inputFile.exists() || !inputFile.canRead()) {
                throw new Exception("Input file either does not exist, or is not readable.");
            } else if (inputFile.isDirectory()) {
                throw new Exception("Input file points to a directory.");
            }
        }

        if (!properties.exists() || !properties.canRead()) {
            throw new Exception("Properties file either does not exist, or is not readable.");
        } else if (properties.isDirectory()) {
            throw new Exception("Properties file points to a directory.");
        }

        //Create blast parameters
        final StringBuilder stringBuilder = new StringBuilder();
        for (Database database : tuitProperties.getBLASTNParameters().getDatabase()) {
            stringBuilder.append(database.getUse());
            stringBuilder.append(" ");//Gonna insert an extra space for the last database
        }
        String remote;
        String entrez_query;
        if (tuitProperties.getBLASTNParameters().getRemote().getDelegate().equals("yes")) {
            remote = "-remote";
            entrez_query = "-entrez_query";
            parameters = new String[] { "-db", stringBuilder.toString(), remote, entrez_query,
                    tuitProperties.getBLASTNParameters().getEntrezQuery().getValue(), "-evalue",
                    tuitProperties.getBLASTNParameters().getExpect().getValue() };
        } else {
            if (!commandLine.hasOption(tuit.B)) {
                if (tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase()
                        .startsWith("NOT")
                        || tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase()
                                .startsWith("ALL")) {
                    parameters = new String[] { "-db", stringBuilder.toString(), "-evalue",
                            tuitProperties.getBLASTNParameters().getExpect().getValue(), "-negative_gilist",
                            TUITFileOperatorHelper.restrictToEntrez(tmpDir,
                                    tuitProperties.getBLASTNParameters().getEntrezQuery().getValue()
                                            .toUpperCase().replace("NOT", "OR"))
                                    .getAbsolutePath(),
                            "-num_threads", tuitProperties.getBLASTNParameters().getNumThreads().getValue() };
                } else if (tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase()
                        .equals("")) {
                    parameters = new String[] { "-db", stringBuilder.toString(), "-evalue",
                            tuitProperties.getBLASTNParameters().getExpect().getValue(), "-num_threads",
                            tuitProperties.getBLASTNParameters().getNumThreads().getValue() };
                } else {
                    parameters = new String[] { "-db", stringBuilder.toString(), "-evalue",
                            tuitProperties.getBLASTNParameters().getExpect().getValue(),
                            /*"-gilist", TUITFileOperatorHelper.restrictToEntrez(
                            tmpDir, tuitProperties.getBLASTNParameters().getEntrezQuery().getValue()).getAbsolutePath(),*/ //TODO remove comment!!!!!
                            "-num_threads", tuitProperties.getBLASTNParameters().getNumThreads().getValue() };
                }
            }
        }
        //Prepare a cutoff Map
        if (tuitProperties.getSpecificationParameters() != null
                && tuitProperties.getSpecificationParameters().size() > 0) {
            cutoffMap = new HashMap<Ranks, TUITCutoffSet>(tuitProperties.getSpecificationParameters().size());
            for (SpecificationParameters specificationParameters : tuitProperties
                    .getSpecificationParameters()) {
                cutoffMap.put(Ranks.valueOf(specificationParameters.getCutoffSet().getRank()),
                        TUITCutoffSet.newDefaultInstance(
                                Double.parseDouble(
                                        specificationParameters.getCutoffSet().getPIdentCutoff().getValue()),
                                Double.parseDouble(specificationParameters.getCutoffSet()
                                        .getQueryCoverageCutoff().getValue()),
                                Double.parseDouble(
                                        specificationParameters.getCutoffSet().getAlpha().getValue())));
            }
        } else {
            cutoffMap = new HashMap<Ranks, TUITCutoffSet>();
        }
        final TUITFileOperatorHelper.OutputFormat format;
        if (tuitProperties.getBLASTNParameters().getOutputFormat().getFormat().equals("rdp")) {
            format = TUITFileOperatorHelper.OutputFormat.RDP_FIXRANK;
        } else {
            format = TUITFileOperatorHelper.OutputFormat.TUIT;
        }

        try (TUITFileOperator<NucleotideFasta> nucleotideFastaTUITFileOperator = NucleotideFastaTUITFileOperator
                .newInstance(format, cutoffMap);) {
            nucleotideFastaTUITFileOperator.setInputFile(inputFile);
            nucleotideFastaTUITFileOperator.setOutputFile(outputFile);
            final String cleanupString = tuitProperties.getBLASTNParameters().getKeepBLASTOuts().getKeep();
            final boolean cleanup;
            if (cleanupString.equals("no")) {
                Log.getInstance().log(Level.INFO, "Temporary BLAST files will be deleted.");
                cleanup = true;
            } else {
                Log.getInstance().log(Level.INFO, "Temporary BLAST files will be kept.");
                cleanup = false;
            }
            //Create blast identifier
            ExecutorService executorService = Executors.newSingleThreadExecutor();
            if (commandLine.hasOption(tuit.USE_DB)) {

                if (blastOutputFile == null) {
                    blastIdentifier = TUITBLASTIdentifierDB.newInstanceFromFileOperator(tmpDir,
                            blastnExecutable, parameters, nucleotideFastaTUITFileOperator, connection,
                            cutoffMap,
                            Integer.parseInt(
                                    tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()),
                            cleanup);

                } else {
                    try {
                        blastIdentifier = TUITBLASTIdentifierDB.newInstanceFromBLASTOutput(
                                nucleotideFastaTUITFileOperator, connection, cutoffMap, blastOutputFile,
                                Integer.parseInt(
                                        tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()),
                                cleanup);

                    } catch (JAXBException e) {
                        Log.getInstance().log(Level.SEVERE, "Error reading " + blastOutputFile.getName()
                                + ", please check input. The file must be XML formatted.");
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }

            } else {
                if (blastOutputFile == null) {
                    blastIdentifier = TUITBLASTIdentifierRAM.newInstanceFromFileOperator(tmpDir,
                            blastnExecutable, parameters, nucleotideFastaTUITFileOperator, cutoffMap,
                            Integer.parseInt(
                                    tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()),
                            cleanup, ramDb);

                } else {
                    try {
                        blastIdentifier = TUITBLASTIdentifierRAM.newInstanceFromBLASTOutput(
                                nucleotideFastaTUITFileOperator, cutoffMap, blastOutputFile,
                                Integer.parseInt(
                                        tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()),
                                cleanup, ramDb);

                    } catch (JAXBException e) {
                        Log.getInstance().log(Level.SEVERE, "Error reading " + blastOutputFile.getName()
                                + ", please check input. The file must be XML formatted.");
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }
            Future<?> runnableFuture = executorService.submit(blastIdentifier);
            runnableFuture.get();
            executorService.shutdown();
        }
    } catch (ParseException pe) {
        Log.getInstance().log(Level.SEVERE, (pe.getMessage()));
        formatter.printHelp("tuit", options);
    } catch (SAXException saxe) {
        Log.getInstance().log(Level.SEVERE, saxe.getMessage());
    } catch (FileNotFoundException fnfe) {
        Log.getInstance().log(Level.SEVERE, fnfe.getMessage());
    } catch (TUITPropertyBadFormatException tpbfe) {
        Log.getInstance().log(Level.SEVERE, tpbfe.getMessage());
    } catch (ClassCastException cce) {
        Log.getInstance().log(Level.SEVERE, cce.getMessage());
    } catch (JAXBException jaxbee) {
        Log.getInstance().log(Level.SEVERE,
                "The properties file is not well formatted. Please ensure that the XML is consistent with the io.properties.dtd schema.");
    } catch (ClassNotFoundException cnfe) {
        //Probably won't happen unless the library deleted from the .jar
        Log.getInstance().log(Level.SEVERE, cnfe.getMessage());
        //cnfe.printStackTrace();
    } catch (SQLException sqle) {
        Log.getInstance().log(Level.SEVERE,
                "A database communication error occurred with the following message:\n" + sqle.getMessage());
        //sqle.printStackTrace();
        if (sqle.getMessage().contains("Access denied for user")) {
            Log.getInstance().log(Level.SEVERE, "Please use standard database login: "
                    + NCBITablesDeployer.login + " and password: " + NCBITablesDeployer.password);
        }
    } catch (Exception e) {
        Log.getInstance().log(Level.SEVERE, e.getMessage());
        e.printStackTrace();
    } finally {
        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException sqle) {
                Log.getInstance().log(Level.SEVERE, "Problem closing the database connection: " + sqle);
            }
        }
        Log.getInstance().log(Level.FINE, "Task done, exiting...");
    }
}

From source file:Main.java

/**
 * Run the given runnable in a new thread.
 *
 * @param runnable The runnable to run in a new thread.
 *///  ww  w. j  a  va2  s . c  om
public static void inNewThread(Runnable runnable) {
    ExecutorService executor = Executors.newSingleThreadExecutor();

    executor.submit(runnable);

    executor.shutdown();
}