List of usage examples for java.util.concurrent ExecutorService shutdown
void shutdown();
From source file:eu.itesla_project.offline.mpi.Master.java
public static void main(String[] args) throws Exception { try {//w w w . jav a 2 s .co m CommandLineParser parser = new GnuParser(); CommandLine line = parser.parse(OPTIONS, args); Mode mode = Mode.valueOf(line.getOptionValue("mode")); String simulationDbName = line.hasOption("simulation-db-name") ? line.getOptionValue("simulation-db-name") : OfflineConfig.DEFAULT_SIMULATION_DB_NAME; String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name") : OfflineConfig.DEFAULT_RULES_DB_NAME; String metricsDbName = line.hasOption("metrics-db-name") ? line.getOptionValue("metrics-db-name") : OfflineConfig.DEFAULT_METRICS_DB_NAME; Path tmpDir = Paths.get(line.getOptionValue("tmp-dir")); Class<?> statisticsFactoryClass = Class.forName(line.getOptionValue("statistics-factory-class")); Path statisticsDbDir = Paths.get(line.getOptionValue("statistics-db-dir")); String statisticsDbName = line.getOptionValue("statistics-db-name"); int coresPerRank = Integer.parseInt(line.getOptionValue("cores")); Path stdOutArchive = line.hasOption("stdout-archive") ? Paths.get(line.getOptionValue("stdout-archive")) : null; String workflowId = line.hasOption("workflow") ? line.getOptionValue("workflow") : null; MpiExecutorContext mpiExecutorContext = new MultiStateNetworkAwareMpiExecutorContext(); ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(1); ExecutorService offlineExecutorService = MultiStateNetworkAwareExecutors .newSizeLimitedThreadPool("OFFLINE_POOL", 100); try { MpiStatisticsFactory statisticsFactory = statisticsFactoryClass .asSubclass(MpiStatisticsFactory.class).newInstance(); try (MpiStatistics statistics = statisticsFactory.create(statisticsDbDir, statisticsDbName)) { try (ComputationManager computationManager = new MpiComputationManager(tmpDir, statistics, mpiExecutorContext, coresPerRank, false, stdOutArchive)) { OfflineConfig config = OfflineConfig.load(); try (LocalOfflineApplication application = new LocalOfflineApplication(config, computationManager, simulationDbName, rulesDbName, metricsDbName, scheduledExecutorService, offlineExecutorService)) { switch (mode) { case ui: application.await(); break; case simulations: { if (workflowId == null) { workflowId = application.createWorkflow(null, OfflineWorkflowCreationParameters.load()); } application.startWorkflowAndWait(workflowId, OfflineWorkflowStartParameters.load()); } break; case rules: { if (workflowId == null) { throw new RuntimeException("Workflow '" + workflowId + "' not found"); } application.computeSecurityRulesAndWait(workflowId); } break; default: throw new IllegalArgumentException("Invalid mode " + mode); } } } } } finally { mpiExecutorContext.shutdown(); offlineExecutorService.shutdown(); scheduledExecutorService.shutdown(); offlineExecutorService.awaitTermination(15, TimeUnit.MINUTES); scheduledExecutorService.awaitTermination(15, TimeUnit.MINUTES); } } catch (ParseException e) { System.err.println(e.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("master", OPTIONS, true); System.exit(-1); } catch (Throwable t) { LOGGER.error(t.toString(), t); System.exit(-1); } }
From source file:edu.cornell.med.icb.R.RUtils.java
public static void main(final String[] args) throws ParseException, ConfigurationException { final Options options = new Options(); final Option helpOption = new Option("h", "help", false, "Print this message"); options.addOption(helpOption);//from w w w . j a va 2 s . c o m final Option startupOption = new Option(Mode.startup.name(), Mode.startup.name(), false, "Start Rserve process"); final Option shutdownOption = new Option(Mode.shutdown.name(), Mode.shutdown.name(), false, "Shutdown Rserve process"); final Option validateOption = new Option(Mode.validate.name(), Mode.validate.name(), false, "Validate that Rserve processes are running"); final OptionGroup optionGroup = new OptionGroup(); optionGroup.addOption(startupOption); optionGroup.addOption(shutdownOption); optionGroup.addOption(validateOption); optionGroup.setRequired(true); options.addOptionGroup(optionGroup); final Option portOption = new Option("port", "port", true, "Use specified port to communicate with the Rserve process"); portOption.setArgName("port"); portOption.setType(int.class); options.addOption(portOption); final Option hostOption = new Option("host", "host", true, "Communicate with the Rserve process on the given host"); hostOption.setArgName("hostname"); hostOption.setType(String.class); options.addOption(hostOption); final Option userOption = new Option("u", "username", true, "Username to send to the Rserve process"); userOption.setArgName("username"); userOption.setType(String.class); options.addOption(userOption); final Option passwordOption = new Option("p", "password", true, "Password to send to the Rserve process"); passwordOption.setArgName("password"); passwordOption.setType(String.class); options.addOption(passwordOption); final Option configurationOption = new Option("c", "configuration", true, "Configuration file or url to read from"); configurationOption.setArgName("configuration"); configurationOption.setType(String.class); options.addOption(configurationOption); final Parser parser = new BasicParser(); final CommandLine commandLine; try { commandLine = parser.parse(options, args); } catch (ParseException e) { usage(options); throw e; } int exitStatus = 0; if (commandLine.hasOption("h")) { usage(options); } else { Mode mode = null; for (final Mode potentialMode : Mode.values()) { if (commandLine.hasOption(potentialMode.name())) { mode = potentialMode; break; } } final ExecutorService threadPool = Executors.newCachedThreadPool(); if (commandLine.hasOption("configuration")) { final String configurationFile = commandLine.getOptionValue("configuration"); LOG.info("Reading configuration from " + configurationFile); XMLConfiguration configuration; try { final URL configurationURL = new URL(configurationFile); configuration = new XMLConfiguration(configurationURL); } catch (MalformedURLException e) { // resource is not a URL: attempt to get the resource from a file LOG.debug("Configuration is not a valid url"); configuration = new XMLConfiguration(configurationFile); } configuration.setValidating(true); final int numberOfRServers = configuration.getMaxIndex("RConfiguration.RServer") + 1; boolean failed = false; for (int i = 0; i < numberOfRServers; i++) { final String server = "RConfiguration.RServer(" + i + ")"; final String host = configuration.getString(server + "[@host]"); final int port = configuration.getInt(server + "[@port]", RConfigurationUtils.DEFAULT_RSERVE_PORT); final String username = configuration.getString(server + "[@username]"); final String password = configuration.getString(server + "[@password]"); final String command = configuration.getString(server + "[@command]", DEFAULT_RSERVE_COMMAND); if (executeMode(mode, threadPool, host, port, username, password, command) != 0) { failed = true; // we have other hosts to check so keep a failed state } } if (failed) { exitStatus = 3; } } else { final String host = commandLine.getOptionValue("host", "localhost"); final int port = Integer.valueOf(commandLine.getOptionValue("port", "6311")); final String username = commandLine.getOptionValue("username"); final String password = commandLine.getOptionValue("password"); exitStatus = executeMode(mode, threadPool, host, port, username, password, null); } threadPool.shutdown(); } System.exit(exitStatus); }
From source file:cloudworker.RemoteWorker.java
public static void main(String[] args) throws Exception { //Command interpreter CommandLineInterface cmd = new CommandLineInterface(args); final int poolSize = Integer.parseInt(cmd.getOptionValue("s")); long idle_time = Long.parseLong(cmd.getOptionValue("i")); //idle time = 60 sec init();// w w w. ja v a 2 s. c om System.out.println("Initialized one remote worker.\n"); //Create thread pool ExecutorService threadPool = Executors.newFixedThreadPool(poolSize); BlockingExecutor blockingPool = new BlockingExecutor(threadPool, poolSize); //Get queue url GetQueueUrlResult urlResult = sqs.getQueueUrl("JobQueue"); String jobQueueUrl = urlResult.getQueueUrl(); // Receive messages //System.out.println("Receiving messages from JobQueue.\n"); //...Check idle state boolean terminate = false; boolean startClock = true; long start_time = 0, end_time; JSONParser parser = new JSONParser(); Runtime runtime = Runtime.getRuntime(); String task_id = null; boolean runAnimoto = false; while (!terminate || idle_time == 0) { while (getQueueSize(sqs, jobQueueUrl) > 0) { //Batch retrieving messages ReceiveMessageRequest receiveMessageRequest = new ReceiveMessageRequest().withQueueUrl(jobQueueUrl) .withMaxNumberOfMessages(10); List<Message> messages = sqs.receiveMessage(receiveMessageRequest).getMessages(); for (Message message : messages) { //System.out.println(" Message"); // System.out.println(" MessageId: " + message.getMessageId()); // System.out.println(" ReceiptHandle: " + message.getReceiptHandle()); // System.out.println(" MD5OfBody: " + message.getMD5OfBody()); //System.out.println(" Body: " + message.getBody()); //Get task String messageBody = message.getBody(); JSONObject json = (JSONObject) parser.parse(messageBody); task_id = json.get("task_id").toString(); String task = json.get("task").toString(); try { //Check duplicate task dynamoDB.addTask(task_id, task); //Execute task, will be blocked if no more thread is currently available blockingPool.submitTask(new Animoto(task_id, task, sqs)); // Delete the message String messageRecieptHandle = message.getReceiptHandle(); sqs.deleteMessage(new DeleteMessageRequest(jobQueueUrl, messageRecieptHandle)); } catch (ConditionalCheckFailedException ccf) { //DO something... } } startClock = true; } //Start clock to measure idle time if (startClock) { startClock = false; start_time = System.currentTimeMillis(); } else { end_time = System.currentTimeMillis(); long elapsed_time = (end_time - start_time) / 1000; if (elapsed_time > idle_time) { terminate = true; } } } //System.out.println(); threadPool.shutdown(); // Wait until all threads are finished while (!threadPool.isTerminated()) { } //Terminate running instance cleanUpInstance(); }
From source file:de.huberlin.wbi.cuneiform.cmdline.main.Main.java
public static void main(String[] args) throws IOException, ParseException, InterruptedException, NotDerivableException { CommandLine cmd;// w w w . jav a 2 s . c o m Options opt; BaseRepl repl; BaseCreActor cre; Path sandbox; ExecutorService executor; TicketSrcActor ticketSrc; JsonSummary summary; Path summaryPath; Log statLog; int nthread; Path workDir; statLog = LogFactory.getLog("statLogger"); executor = Executors.newCachedThreadPool(); try { opt = getOptions(); cmd = parse(args, opt); config(cmd); if (cmd.hasOption('h')) { System.out.println("CUNEIFORM - A Functional Workflow Language\nversion " + BaseRepl.LABEL_VERSION + " build " + BaseRepl.LABEL_BUILD); new HelpFormatter().printHelp("java -jar cuneiform.jar [OPTION]*", opt); return; } if (cmd.hasOption('r')) Invocation.putLibPath(ForeignLambdaExpr.LANGID_R, cmd.getOptionValue('r')); if (cmd.hasOption('y')) Invocation.putLibPath(ForeignLambdaExpr.LANGID_PYTHON, cmd.getOptionValue('y')); if (cmd.hasOption('l')) sandbox = Paths.get(cmd.getOptionValue("l")); else sandbox = Paths.get(System.getProperty("user.home")).resolve(".cuneiform"); sandbox = sandbox.toAbsolutePath(); if (cmd.hasOption('c')) LocalThread.deleteIfExists(sandbox); if (cmd.hasOption('t')) nthread = Integer.valueOf(cmd.getOptionValue('t')); else nthread = Runtime.getRuntime().availableProcessors(); if (cmd.hasOption('w')) workDir = Paths.get(cmd.getOptionValue('w')); else workDir = Paths.get(System.getProperty("user.dir")); workDir = workDir.toAbsolutePath(); switch (platform) { case PLATFORM_LOCAL: if (!Files.exists(sandbox)) Files.createDirectories(sandbox); cre = new LocalCreActor(sandbox, workDir, nthread); break; case PLATFORM_HTCONDOR: if (!Files.exists(sandbox)) Files.createDirectories(sandbox); if (cmd.hasOption('m')) { // MAX_TRANSFER SIZE String maxTransferSize = cmd.getOptionValue('m'); try { cre = new CondorCreActor(sandbox, maxTransferSize); } catch (Exception e) { System.out.println("INVALID '-m' option value: " + maxTransferSize + "\n\nCUNEIFORM - A Functional Workflow Language\nversion " + BaseRepl.LABEL_VERSION + " build " + BaseRepl.LABEL_BUILD); new HelpFormatter().printHelp("java -jar cuneiform.jar [OPTION]*", opt); return; } } else { cre = new CondorCreActor(sandbox); } break; default: throw new RuntimeException("Platform not recognized."); } executor.submit(cre); ticketSrc = new TicketSrcActor(cre); executor.submit(ticketSrc); executor.shutdown(); switch (format) { case FORMAT_CF: if (cmd.hasOption("i")) repl = new InteractiveRepl(ticketSrc, statLog); else repl = new CmdlineRepl(ticketSrc, statLog); break; case FORMAT_DAX: repl = new DaxRepl(ticketSrc, statLog); break; default: throw new RuntimeException("Format not recognized."); } if (cmd.hasOption("i")) { // run in interactive mode BaseRepl.run(repl); return; } // run in quiet mode if (inputFileVector.length > 0) for (Path f : inputFileVector) repl.interpret(readFile(f)); else repl.interpret(readStdIn()); Thread.sleep(3 * Actor.DELAY); while (repl.isBusy()) Thread.sleep(Actor.DELAY); if (cmd.hasOption("s")) { summary = new JsonSummary(ticketSrc.getRunId(), sandbox, repl.getAns()); summaryPath = Paths.get(cmd.getOptionValue("s")); summaryPath = summaryPath.toAbsolutePath(); try (BufferedWriter writer = Files.newBufferedWriter(summaryPath, Charset.forName("UTF-8"))) { writer.write(summary.toString()); } } } finally { executor.shutdownNow(); } }
From source file:tuit.java
@SuppressWarnings("ConstantConditions") public static void main(String[] args) { System.out.println(licence);//from ww w. j ava 2s . c om //Declare variables File inputFile; File outputFile; File tmpDir; File blastnExecutable; File properties; File blastOutputFile = null; // TUITPropertiesLoader tuitPropertiesLoader; TUITProperties tuitProperties; // String[] parameters = null; // Connection connection = null; MySQL_Connector mySQL_connector; // Map<Ranks, TUITCutoffSet> cutoffMap; // BLASTIdentifier blastIdentifier = null; // RamDb ramDb = null; CommandLineParser parser = new GnuParser(); Options options = new Options(); options.addOption(tuit.IN, "input<file>", true, "Input file (currently fasta-formatted only)"); options.addOption(tuit.OUT, "output<file>", true, "Output file (in " + tuit.TUIT_EXT + " format)"); options.addOption(tuit.P, "prop<file>", true, "Properties file (XML formatted)"); options.addOption(tuit.V, "verbose", false, "Enable verbose output"); options.addOption(tuit.B, "blast_output<file>", true, "Perform on a pre-BLASTed output"); options.addOption(tuit.DEPLOY, "deploy", false, "Deploy the taxonomic databases"); options.addOption(tuit.UPDATE, "update", false, "Update the taxonomic databases"); options.addOption(tuit.USE_DB, "usedb", false, "Use RDBMS instead of RAM-based taxonomy"); Option option = new Option(tuit.REDUCE, "reduce", true, "Pack identical (100% similar sequences) records in the given sample file"); option.setArgs(Option.UNLIMITED_VALUES); options.addOption(option); option = new Option(tuit.COMBINE, "combine", true, "Combine a set of given reduction files into an HMP Tree-compatible taxonomy"); option.setArgs(Option.UNLIMITED_VALUES); options.addOption(option); options.addOption(tuit.NORMALIZE, "normalize", false, "If used in combination with -combine ensures that the values are normalized by the root value"); HelpFormatter formatter = new HelpFormatter(); try { //Get TUIT directory final File tuitDir = new File( new File(tuit.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath()) .getParent()); final File ramDbFile = new File(tuitDir, tuit.RAM_DB); //Setup logger Log.getInstance().setLogName("tuit.log"); //Read command line final CommandLine commandLine = parser.parse(options, args, true); //Check if the REDUCE option is on if (commandLine.hasOption(tuit.REDUCE)) { final String[] fileList = commandLine.getOptionValues(tuit.REDUCE); for (String s : fileList) { final Path path = Paths.get(s); Log.getInstance().log(Level.INFO, "Processing " + path.toString() + "..."); final NucleotideFastaSequenceReductor nucleotideFastaSequenceReductor = NucleotideFastaSequenceReductor .fromPath(path); ReductorFileOperator.save(nucleotideFastaSequenceReductor, path.resolveSibling(path.getFileName().toString() + ".rdc")); } Log.getInstance().log(Level.FINE, "Task done, exiting..."); return; } //Check if COMBINE is on if (commandLine.hasOption(tuit.COMBINE)) { final boolean normalize = commandLine.hasOption(tuit.NORMALIZE); final String[] fileList = commandLine.getOptionValues(tuit.COMBINE); //TODO: implement a test for format here final List<TreeFormatter.TreeFormatterFormat.HMPTreesOutput> hmpTreesOutputs = new ArrayList<>(); final TreeFormatter treeFormatter = TreeFormatter .newInstance(new TreeFormatter.TuitLineTreeFormatterFormat()); for (String s : fileList) { final Path path = Paths.get(s); Log.getInstance().log(Level.INFO, "Merging " + path.toString() + "..."); treeFormatter.loadFromPath(path); final TreeFormatter.TreeFormatterFormat.HMPTreesOutput output = TreeFormatter.TreeFormatterFormat.HMPTreesOutput .newInstance(treeFormatter.toHMPTree(normalize), s.substring(0, s.indexOf("."))); hmpTreesOutputs.add(output); treeFormatter.erase(); } final Path destination; if (commandLine.hasOption(OUT)) { destination = Paths.get(commandLine.getOptionValue(tuit.OUT)); } else { destination = Paths.get("merge.tcf"); } CombinatorFileOperator.save(hmpTreesOutputs, treeFormatter, destination); Log.getInstance().log(Level.FINE, "Task done, exiting..."); return; } if (!commandLine.hasOption(tuit.P)) { throw new ParseException("No properties file option found, exiting."); } else { properties = new File(commandLine.getOptionValue(tuit.P)); } //Load properties tuitPropertiesLoader = TUITPropertiesLoader.newInstanceFromFile(properties); tuitProperties = tuitPropertiesLoader.getTuitProperties(); //Create tmp directory and blastn executable tmpDir = new File(tuitProperties.getTMPDir().getPath()); blastnExecutable = new File(tuitProperties.getBLASTNPath().getPath()); //Check for deploy if (commandLine.hasOption(tuit.DEPLOY)) { if (commandLine.hasOption(tuit.USE_DB)) { NCBITablesDeployer.fastDeployNCBIDatabasesFromNCBI(connection, tmpDir); } else { NCBITablesDeployer.fastDeployNCBIRamDatabaseFromNCBI(tmpDir, ramDbFile); } Log.getInstance().log(Level.FINE, "Task done, exiting..."); return; } //Check for update if (commandLine.hasOption(tuit.UPDATE)) { if (commandLine.hasOption(tuit.USE_DB)) { NCBITablesDeployer.updateDatabasesFromNCBI(connection, tmpDir); } else { //No need to specify a different way to update the database other than just deploy in case of the RAM database NCBITablesDeployer.fastDeployNCBIRamDatabaseFromNCBI(tmpDir, ramDbFile); } Log.getInstance().log(Level.FINE, "Task done, exiting..."); return; } //Connect to the database if (commandLine.hasOption(tuit.USE_DB)) { mySQL_connector = MySQL_Connector.newDefaultInstance( "jdbc:mysql://" + tuitProperties.getDBConnection().getUrl().trim() + "/", tuitProperties.getDBConnection().getLogin().trim(), tuitProperties.getDBConnection().getPassword().trim()); mySQL_connector.connectToDatabase(); connection = mySQL_connector.getConnection(); } else { //Probe for ram database if (ramDbFile.exists() && ramDbFile.canRead()) { Log.getInstance().log(Level.INFO, "Loading RAM taxonomic map..."); try { ramDb = RamDb.loadSelfFromFile(ramDbFile); } catch (IOException ie) { if (ie instanceof java.io.InvalidClassException) throw new IOException("The RAM-based taxonomic database needs to be updated."); } } else { Log.getInstance().log(Level.SEVERE, "The RAM database either has not been deployed, or is not accessible." + "Please use the --deploy option and check permissions on the TUIT directory. " + "If you were looking to use the RDBMS as a taxonomic reference, plese use the -usedb option."); return; } } if (commandLine.hasOption(tuit.B)) { blastOutputFile = new File(commandLine.getOptionValue(tuit.B)); if (!blastOutputFile.exists() || !blastOutputFile.canRead()) { throw new Exception("BLAST output file either does not exist, or is not readable."); } else if (blastOutputFile.isDirectory()) { throw new Exception("BLAST output file points to a directory."); } } //Check vital parameters if (!commandLine.hasOption(tuit.IN)) { throw new ParseException("No input file option found, exiting."); } else { inputFile = new File(commandLine.getOptionValue(tuit.IN)); Log.getInstance().setLogName(inputFile.getName().split("\\.")[0] + ".tuit.log"); } //Correct the output file option if needed if (!commandLine.hasOption(tuit.OUT)) { outputFile = new File((inputFile.getPath()).split("\\.")[0] + tuit.TUIT_EXT); } else { outputFile = new File(commandLine.getOptionValue(tuit.OUT)); } //Adjust the output level if (commandLine.hasOption(tuit.V)) { Log.getInstance().setLevel(Level.FINE); Log.getInstance().log(Level.INFO, "Using verbose output for the log"); } else { Log.getInstance().setLevel(Level.INFO); } //Try all files if (inputFile != null) { if (!inputFile.exists() || !inputFile.canRead()) { throw new Exception("Input file either does not exist, or is not readable."); } else if (inputFile.isDirectory()) { throw new Exception("Input file points to a directory."); } } if (!properties.exists() || !properties.canRead()) { throw new Exception("Properties file either does not exist, or is not readable."); } else if (properties.isDirectory()) { throw new Exception("Properties file points to a directory."); } //Create blast parameters final StringBuilder stringBuilder = new StringBuilder(); for (Database database : tuitProperties.getBLASTNParameters().getDatabase()) { stringBuilder.append(database.getUse()); stringBuilder.append(" ");//Gonna insert an extra space for the last database } String remote; String entrez_query; if (tuitProperties.getBLASTNParameters().getRemote().getDelegate().equals("yes")) { remote = "-remote"; entrez_query = "-entrez_query"; parameters = new String[] { "-db", stringBuilder.toString(), remote, entrez_query, tuitProperties.getBLASTNParameters().getEntrezQuery().getValue(), "-evalue", tuitProperties.getBLASTNParameters().getExpect().getValue() }; } else { if (!commandLine.hasOption(tuit.B)) { if (tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase() .startsWith("NOT") || tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase() .startsWith("ALL")) { parameters = new String[] { "-db", stringBuilder.toString(), "-evalue", tuitProperties.getBLASTNParameters().getExpect().getValue(), "-negative_gilist", TUITFileOperatorHelper.restrictToEntrez(tmpDir, tuitProperties.getBLASTNParameters().getEntrezQuery().getValue() .toUpperCase().replace("NOT", "OR")) .getAbsolutePath(), "-num_threads", tuitProperties.getBLASTNParameters().getNumThreads().getValue() }; } else if (tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase() .equals("")) { parameters = new String[] { "-db", stringBuilder.toString(), "-evalue", tuitProperties.getBLASTNParameters().getExpect().getValue(), "-num_threads", tuitProperties.getBLASTNParameters().getNumThreads().getValue() }; } else { parameters = new String[] { "-db", stringBuilder.toString(), "-evalue", tuitProperties.getBLASTNParameters().getExpect().getValue(), /*"-gilist", TUITFileOperatorHelper.restrictToEntrez( tmpDir, tuitProperties.getBLASTNParameters().getEntrezQuery().getValue()).getAbsolutePath(),*/ //TODO remove comment!!!!! "-num_threads", tuitProperties.getBLASTNParameters().getNumThreads().getValue() }; } } } //Prepare a cutoff Map if (tuitProperties.getSpecificationParameters() != null && tuitProperties.getSpecificationParameters().size() > 0) { cutoffMap = new HashMap<Ranks, TUITCutoffSet>(tuitProperties.getSpecificationParameters().size()); for (SpecificationParameters specificationParameters : tuitProperties .getSpecificationParameters()) { cutoffMap.put(Ranks.valueOf(specificationParameters.getCutoffSet().getRank()), TUITCutoffSet.newDefaultInstance( Double.parseDouble( specificationParameters.getCutoffSet().getPIdentCutoff().getValue()), Double.parseDouble(specificationParameters.getCutoffSet() .getQueryCoverageCutoff().getValue()), Double.parseDouble( specificationParameters.getCutoffSet().getAlpha().getValue()))); } } else { cutoffMap = new HashMap<Ranks, TUITCutoffSet>(); } final TUITFileOperatorHelper.OutputFormat format; if (tuitProperties.getBLASTNParameters().getOutputFormat().getFormat().equals("rdp")) { format = TUITFileOperatorHelper.OutputFormat.RDP_FIXRANK; } else { format = TUITFileOperatorHelper.OutputFormat.TUIT; } try (TUITFileOperator<NucleotideFasta> nucleotideFastaTUITFileOperator = NucleotideFastaTUITFileOperator .newInstance(format, cutoffMap);) { nucleotideFastaTUITFileOperator.setInputFile(inputFile); nucleotideFastaTUITFileOperator.setOutputFile(outputFile); final String cleanupString = tuitProperties.getBLASTNParameters().getKeepBLASTOuts().getKeep(); final boolean cleanup; if (cleanupString.equals("no")) { Log.getInstance().log(Level.INFO, "Temporary BLAST files will be deleted."); cleanup = true; } else { Log.getInstance().log(Level.INFO, "Temporary BLAST files will be kept."); cleanup = false; } //Create blast identifier ExecutorService executorService = Executors.newSingleThreadExecutor(); if (commandLine.hasOption(tuit.USE_DB)) { if (blastOutputFile == null) { blastIdentifier = TUITBLASTIdentifierDB.newInstanceFromFileOperator(tmpDir, blastnExecutable, parameters, nucleotideFastaTUITFileOperator, connection, cutoffMap, Integer.parseInt( tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()), cleanup); } else { try { blastIdentifier = TUITBLASTIdentifierDB.newInstanceFromBLASTOutput( nucleotideFastaTUITFileOperator, connection, cutoffMap, blastOutputFile, Integer.parseInt( tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()), cleanup); } catch (JAXBException e) { Log.getInstance().log(Level.SEVERE, "Error reading " + blastOutputFile.getName() + ", please check input. The file must be XML formatted."); } catch (Exception e) { e.printStackTrace(); } } } else { if (blastOutputFile == null) { blastIdentifier = TUITBLASTIdentifierRAM.newInstanceFromFileOperator(tmpDir, blastnExecutable, parameters, nucleotideFastaTUITFileOperator, cutoffMap, Integer.parseInt( tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()), cleanup, ramDb); } else { try { blastIdentifier = TUITBLASTIdentifierRAM.newInstanceFromBLASTOutput( nucleotideFastaTUITFileOperator, cutoffMap, blastOutputFile, Integer.parseInt( tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()), cleanup, ramDb); } catch (JAXBException e) { Log.getInstance().log(Level.SEVERE, "Error reading " + blastOutputFile.getName() + ", please check input. The file must be XML formatted."); } catch (Exception e) { e.printStackTrace(); } } } Future<?> runnableFuture = executorService.submit(blastIdentifier); runnableFuture.get(); executorService.shutdown(); } } catch (ParseException pe) { Log.getInstance().log(Level.SEVERE, (pe.getMessage())); formatter.printHelp("tuit", options); } catch (SAXException saxe) { Log.getInstance().log(Level.SEVERE, saxe.getMessage()); } catch (FileNotFoundException fnfe) { Log.getInstance().log(Level.SEVERE, fnfe.getMessage()); } catch (TUITPropertyBadFormatException tpbfe) { Log.getInstance().log(Level.SEVERE, tpbfe.getMessage()); } catch (ClassCastException cce) { Log.getInstance().log(Level.SEVERE, cce.getMessage()); } catch (JAXBException jaxbee) { Log.getInstance().log(Level.SEVERE, "The properties file is not well formatted. Please ensure that the XML is consistent with the io.properties.dtd schema."); } catch (ClassNotFoundException cnfe) { //Probably won't happen unless the library deleted from the .jar Log.getInstance().log(Level.SEVERE, cnfe.getMessage()); //cnfe.printStackTrace(); } catch (SQLException sqle) { Log.getInstance().log(Level.SEVERE, "A database communication error occurred with the following message:\n" + sqle.getMessage()); //sqle.printStackTrace(); if (sqle.getMessage().contains("Access denied for user")) { Log.getInstance().log(Level.SEVERE, "Please use standard database login: " + NCBITablesDeployer.login + " and password: " + NCBITablesDeployer.password); } } catch (Exception e) { Log.getInstance().log(Level.SEVERE, e.getMessage()); e.printStackTrace(); } finally { if (connection != null) { try { connection.close(); } catch (SQLException sqle) { Log.getInstance().log(Level.SEVERE, "Problem closing the database connection: " + sqle); } } Log.getInstance().log(Level.FINE, "Task done, exiting..."); } }
From source file:Main.java
static void awaitTermination(ExecutorService threadPool) { threadPool.shutdown(); try {//from w w w. ja va2 s . c o m threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } catch (InterruptedException e) { e.printStackTrace(); } }
From source file:ReplaceWorker.java
private static void awaitTermination(ExecutorService threadPool) { try {/*from w ww . j a v a2 s . c o m*/ threadPool.shutdown(); boolean awaitTermination = threadPool.awaitTermination(1, TimeUnit.SECONDS); System.out.println("terminted successfull: " + awaitTermination); } catch (Exception e) { e.printStackTrace(); } }
From source file:Main.java
public static void stop(ExecutorService executor) { try {//from w w w . j a v a2s . c om executor.shutdown(); executor.awaitTermination(60, TimeUnit.SECONDS); } catch (InterruptedException e) { e.printStackTrace(); } }
From source file:Main.java
public static void stop(ExecutorService executor) { try {// ww w . j ava2 s . co m executor.shutdown(); executor.awaitTermination(5, TimeUnit.SECONDS); } catch (InterruptedException e) { System.err.println("termination interrupted"); } finally { if (!executor.isTerminated()) { System.err.println("killing non-finished tasks"); } executor.shutdownNow(); } }
From source file:Main.java
public static void stop(ExecutorService executor) { try {/*w ww. j a va2 s . c o m*/ executor.shutdown(); executor.awaitTermination(60, TimeUnit.SECONDS); } catch (InterruptedException e) { System.err.println("termination interrupted"); } finally { if (!executor.isTerminated()) { System.err.println("killing non-finished tasks"); } executor.shutdownNow(); } }