List of usage examples for java.util.concurrent Executors newFixedThreadPool
public static ExecutorService newFixedThreadPool(int nThreads)
From source file:com.guns.media.tools.yuv.MediaTool.java
/** * @param args the command line arguments */// w w w.j a v a2s. co m public static void main(String[] args) throws IOException { try { Options options = getOptions(); CommandLine cmd = null; int offset = 0; CommandLineParser parser = new GnuParser(); cmd = parser.parse(options, args); if (cmd.hasOption("help")) { printHelp(options); exit(1); } if (cmd.hasOption("offset")) { offset = new Integer(cmd.getOptionValue("offset")); } int frame = new Integer(cmd.getOptionValue("f")); // int scale = new Integer(args[2]); BufferedInputStream if1 = new BufferedInputStream(new FileInputStream(cmd.getOptionValue("if1"))); BufferedInputStream if2 = new BufferedInputStream(new FileInputStream(cmd.getOptionValue("if2"))); DataStorage.create(new Integer(cmd.getOptionValue("f"))); int width = new Integer(cmd.getOptionValue("w")); int height = new Integer(cmd.getOptionValue("h")); LookUp.initSSYUV(width, height); // int[][] frame1 = new int[width][height]; // int[][] frame2 = new int[width][height]; int nRead; int fRead; byte[] data = new byte[width * height + ((width * height) / 2)]; byte[] data1 = new byte[width * height + ((width * height) / 2)]; int frames = 0; long start_ms = System.currentTimeMillis() / 1000L; long end_ms = start_ms; if (offset > 0) { if1.skip(((width * height + ((width * height) / 2)) * offset)); } else if (offset < 0) { if2.skip(((width * height + ((width * height) / 2)) * (-1 * offset))); } if (cmd.hasOption("psnr")) { ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newCachedThreadPool(); while ((nRead = if1.read(data)) != -1 && ((fRead = if2.read(data1)) != -1) && frames < frame) { byte[] data_out = data.clone(); byte[] data1_out = data1.clone(); PSNRCalculatorThread wt = new PSNRCalculatorThread(data_out, data1_out, frames, width, height); executor.execute(wt); frames++; } executor.shutdown(); end_ms = System.currentTimeMillis(); System.out.println("Frame Rate :" + frames * 1000 / ((end_ms - start_ms))); for (int i = 0; i < frames; i++) { System.out.println( i + "," + 10 * Math.log10((255 * 255) / (DataStorage.getFrame(i) / (width * height)))); } } if (cmd.hasOption("sub")) { RandomAccessFile raf = new RandomAccessFile(cmd.getOptionValue("o"), "rw"); ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newCachedThreadPool(); while ((nRead = if1.read(data)) != -1 && ((fRead = if2.read(data1)) != -1)) { byte[] data_out = data.clone(); byte[] data1_out = data1.clone(); ImageSubstractThread wt = new ImageSubstractThread(data_out, data1_out, frames, width, height, raf); //wt.run(); executor.execute(wt); frames++; } executor.shutdown(); end_ms = System.currentTimeMillis() / 1000L; System.out.println("Frame Rate :" + frames / ((end_ms - start_ms))); raf.close(); } if (cmd.hasOption("ss") && !cmd.getOptionValue("o").matches("-")) { RandomAccessFile raf = new RandomAccessFile(cmd.getOptionValue("o"), "rw"); // RandomAccessFile ra = new RandomAccessFile(cmd.getOptionValue("o"), "rw"); // MappedByteBuffer raf = new RandomAccessFile(cmd.getOptionValue("o"), "rw").getChannel().map(FileChannel.MapMode.READ_WRITE, 0, ((width*height)+(width*height/2))*frame); ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newCachedThreadPool(); while ((nRead = if1.read(data)) != -1 && ((fRead = if2.read(data1)) != -1) && frames < frame) { byte[] data_out = data.clone(); byte[] data1_out = data1.clone(); SidebySideImageThread wt = new SidebySideImageThread(data_out, data1_out, frames, width, height, raf); // MPSidebySideImageThread wt = new MPSidebySideImageThread(data_out, data1_out, frames, width, height, ra); frames++; // wt.run(); executor.execute(wt); } executor.shutdown(); end_ms = System.currentTimeMillis() / 1000L; while (!executor.isTerminated()) { } raf.close(); } if (cmd.hasOption("ss") && cmd.getOptionValue("o").matches("-")) { PrintStream stdout = new PrintStream(System.out); // RandomAccessFile ra = new RandomAccessFile(cmd.getOptionValue("o"), "rw"); // MappedByteBuffer raf = new RandomAccessFile(cmd.getOptionValue("o"), "rw").getChannel().map(FileChannel.MapMode.READ_WRITE, 0, ((width*height)+(width*height/2))*frame); // ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newCachedThreadPool(); while ((nRead = if1.read(data)) != -1 && ((fRead = if2.read(data1)) != -1) && frames < frame) { byte[] data_out = data.clone(); byte[] data1_out = data1.clone(); SidebySideImageThread wt = new SidebySideImageThread(data_out, data1_out, frames, width, height, stdout); // MPSidebySideImageThread wt = new MPSidebySideImageThread(data_out, data1_out, frames, width, height, ra); frames++; // wt.run(); wt.run(); } end_ms = System.currentTimeMillis() / 1000L; System.out.println("Frame Rate :" + frames / ((end_ms - start_ms))); stdout.close(); } if (cmd.hasOption("image")) { System.setProperty("java.awt.headless", "true"); //ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newCachedThreadPool(); ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(10); while ((nRead = if1.read(data)) != -1 && ((fRead = if2.read(data1)) != -1)) { if (frames == frame) { byte[] data_out = data.clone(); byte[] data1_out = data1.clone(); Frame f1 = new Frame(data_out, width, height, Frame.YUV420); Frame f2 = new Frame(data1_out, width, height, Frame.YUV420); // System.out.println(cmd.getOptionValue("o")); ExtractImageThread wt = new ExtractImageThread(f1, frames, cmd.getOptionValue("if1") + "frame1-" + cmd.getOptionValue("o")); ExtractImageThread wt1 = new ExtractImageThread(f2, frames, cmd.getOptionValue("if2") + "frame2-" + cmd.getOptionValue("o")); // executor.execute(wt); executor.execute(wt); executor.execute(wt1); } frames++; } executor.shutdown(); // executor.shutdown(); end_ms = System.currentTimeMillis() / 1000L; System.out.println("Frame Rate :" + frames / ((end_ms - start_ms))); } } catch (ParseException ex) { Logger.getLogger(MediaTool.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.aerospike.load.AerospikeLoad.java
public static void main(String[] args) throws IOException { Thread statPrinter = new Thread(new PrintStat(counters)); try {//from w w w .ja v a 2 s . com log.info("Aerospike loader started"); Options options = new Options(); options.addOption("h", "host", true, "Server hostname (default: localhost)"); options.addOption("p", "port", true, "Server port (default: 3000)"); options.addOption("n", "namespace", true, "Namespace (default: test)"); options.addOption("s", "set", true, "Set name. (default: null)"); options.addOption("c", "config", true, "Column definition file name"); options.addOption("wt", "write-threads", true, "Number of writer threads (default: Number of cores * 5)"); options.addOption("rt", "read-threads", true, "Number of reader threads (default: Number of cores * 1)"); options.addOption("l", "rw-throttle", true, "Throttling of reader to writer(default: 10k) "); options.addOption("tt", "transaction-timeout", true, "write transaction timeout in miliseconds(default: No timeout)"); options.addOption("et", "expiration-time", true, "Expiration time of records in seconds (default: never expire)"); options.addOption("T", "timezone", true, "Timezone of source where data dump is taken (default: local timezone)"); options.addOption("ec", "abort-error-count", true, "Error count to abort (default: 0)"); options.addOption("wa", "write-action", true, "Write action if key already exists (default: update)"); options.addOption("v", "verbose", false, "Logging all"); options.addOption("u", "usage", false, "Print usage."); CommandLineParser parser = new PosixParser(); CommandLine cl = parser.parse(options, args, false); if (args.length == 0 || cl.hasOption("u")) { logUsage(options); return; } if (cl.hasOption("l")) { rwThrottle = Integer.parseInt(cl.getOptionValue("l")); } else { rwThrottle = Constants.READLOAD; } // Get all command line options params = Utils.parseParameters(cl); //Get client instance AerospikeClient client = new AerospikeClient(params.host, params.port); if (!client.isConnected()) { log.error("Client is not able to connect:" + params.host + ":" + params.port); return; } if (params.verbose) { log.setLevel(Level.DEBUG); } // Get available processors to calculate default number of threads int cpus = Runtime.getRuntime().availableProcessors(); nWriterThreads = cpus * scaleFactor; nReaderThreads = cpus; // Get writer thread count if (cl.hasOption("wt")) { nWriterThreads = Integer.parseInt(cl.getOptionValue("wt")); nWriterThreads = (nWriterThreads > 0 ? (nWriterThreads > Constants.MAX_THREADS ? Constants.MAX_THREADS : nWriterThreads) : 1); log.debug("Using writer Threads: " + nWriterThreads); } writerPool = Executors.newFixedThreadPool(nWriterThreads); // Get reader thread count if (cl.hasOption("rt")) { nReaderThreads = Integer.parseInt(cl.getOptionValue("rt")); nReaderThreads = (nReaderThreads > 0 ? (nReaderThreads > Constants.MAX_THREADS ? Constants.MAX_THREADS : nReaderThreads) : 1); log.debug("Using reader Threads: " + nReaderThreads); } String columnDefinitionFileName = cl.getOptionValue("c", null); log.debug("Column definition files/directory: " + columnDefinitionFileName); if (columnDefinitionFileName == null) { log.error("Column definition files/directory not specified. use -c <file name>"); return; } File columnDefinitionFile = new File(columnDefinitionFileName); if (!columnDefinitionFile.exists()) { log.error("Column definition files/directory does not exist: " + Utils.getFileName(columnDefinitionFileName)); return; } // Get data file list String[] files = cl.getArgs(); if (files.length == 0) { log.error("No data file Specified: add <file/dir name> to end of the command "); return; } List<String> allFileNames = new ArrayList<String>(); allFileNames = Utils.getFileNames(files); if (allFileNames.size() == 0) { log.error("Given datafiles/directory does not exist"); return; } for (int i = 0; i < allFileNames.size(); i++) { log.debug("File names:" + Utils.getFileName(allFileNames.get(i))); File file = new File(allFileNames.get(i)); counters.write.recordTotal = counters.write.recordTotal + file.length(); } //remove column definition file from list allFileNames.remove(columnDefinitionFileName); log.info("Number of data files:" + allFileNames.size()); /** * Process column definition file to get meta data and bin mapping. */ metadataColumnDefs = new ArrayList<ColumnDefinition>(); binColumnDefs = new ArrayList<ColumnDefinition>(); metadataConfigs = new HashMap<String, String>(); if (Parser.processJSONColumnDefinitions(columnDefinitionFile, metadataConfigs, metadataColumnDefs, binColumnDefs, params)) { log.info("Config file processed."); } else { throw new Exception("Config file parsing Error"); } // Add metadata of config to parameters String metadata; if ((metadata = metadataConfigs.get(Constants.INPUT_TYPE)) != null) { params.fileType = metadata; if (params.fileType.equals(Constants.CSV_FILE)) { // Version check metadata = metadataConfigs.get(Constants.VERSION); String[] vNumber = metadata.split("\\."); int v1 = Integer.parseInt(vNumber[0]); int v2 = Integer.parseInt(vNumber[1]); if ((v1 <= Constants.MajorV) && (v2 <= Constants.MinorV)) { log.debug("Config version used:" + metadata); } else throw new Exception("\"" + Constants.VERSION + ":" + metadata + "\" is not Supported"); // Set delimiter if ((metadata = metadataConfigs.get(Constants.DELIMITER)) != null && metadata.length() == 1) { params.delimiter = metadata.charAt(0); } else { log.warn("\"" + Constants.DELIMITER + ":" + metadata + "\" is not properly specified in config file. Default is ','"); } if ((metadata = metadataConfigs.get(Constants.IGNORE_FIRST_LINE)) != null) { params.ignoreFirstLine = metadata.equals("true"); } else { log.warn("\"" + Constants.IGNORE_FIRST_LINE + ":" + metadata + "\" is not properly specified in config file. Default is false"); } if ((metadata = metadataConfigs.get(Constants.COLUMNS)) != null) { counters.write.colTotal = Integer.parseInt(metadata); } else { throw new Exception("\"" + Constants.COLUMNS + ":" + metadata + "\" is not properly specified in config file"); } } else { throw new Exception("\"" + params.fileType + "\" is not supported in config file"); } } else { throw new Exception("\"" + Constants.INPUT_TYPE + "\" is not specified in config file"); } // add config input to column definitions if (params.fileType.equals(Constants.CSV_FILE)) { List<String> binName = null; if (params.ignoreFirstLine) { String line; BufferedReader br = new BufferedReader( new InputStreamReader(new FileInputStream(allFileNames.get(0)), "UTF8")); if ((line = br.readLine()) != null) { binName = Parser.getCSVRawColumns(line, params.delimiter); br.close(); if (binName.size() != counters.write.colTotal) { throw new Exception("Number of column in config file and datafile are mismatch." + " Datafile: " + Utils.getFileName(allFileNames.get(0)) + " Configfile: " + Utils.getFileName(columnDefinitionFileName)); } } } //update columndefs for metadata for (int i = 0; i < metadataColumnDefs.size(); i++) { if (metadataColumnDefs.get(i).staticValue) { } else { if (metadataColumnDefs.get(i).binValuePos < 0) { if (metadataColumnDefs.get(i).columnName == null) { if (metadataColumnDefs.get(i).jsonPath == null) { log.error("dynamic metadata having improper info" + metadataColumnDefs.toString()); //TODO } else { //TODO check for json_path } } else { if (params.ignoreFirstLine) { if (binName.indexOf(metadataColumnDefs.get(i).binValueHeader) != -1) { metadataColumnDefs.get(i).binValuePos = binName .indexOf(metadataColumnDefs.get(i).binValueHeader); } else { throw new Exception("binName missing in data file:" + metadataColumnDefs.get(i).binValueHeader); } } } } else { if (params.ignoreFirstLine) metadataColumnDefs.get(i).binValueHeader = binName .get(metadataColumnDefs.get(i).binValuePos); } } if ((!metadataColumnDefs.get(i).staticValue) && (metadataColumnDefs.get(i).binValuePos < 0)) { throw new Exception("Information for bin mapping is missing in config file:" + metadataColumnDefs.get(i)); } if (metadataColumnDefs.get(i).srcType == null) { throw new Exception( "Source data type is not properly mentioned:" + metadataColumnDefs.get(i)); } if (metadataColumnDefs.get(i).binNameHeader == Constants.SET && !metadataColumnDefs.get(i).srcType.equals(SrcColumnType.STRING)) { throw new Exception("Set name should be string type:" + metadataColumnDefs.get(i)); } if (metadataColumnDefs.get(i).binNameHeader.equalsIgnoreCase(Constants.SET) && params.set != null) { throw new Exception( "Set name is given both in config file and commandline. Provide only once."); } } //update columndefs for bins for (int i = 0; i < binColumnDefs.size(); i++) { if (binColumnDefs.get(i).staticName) { } else { if (binColumnDefs.get(i).binNamePos < 0) { if (binColumnDefs.get(i).columnName == null) { if (binColumnDefs.get(i).jsonPath == null) { log.error("dynamic bin having improper info"); //TODO } else { //TODO check for json_path } } else { if (params.ignoreFirstLine) { if (binName.indexOf(binColumnDefs.get(i).binNameHeader) != -1) { binColumnDefs.get(i).binNamePos = binName .indexOf(binColumnDefs.get(i).binNameHeader); } else { throw new Exception("binName missing in data file:" + binColumnDefs.get(i).binNameHeader); } } } } else { if (params.ignoreFirstLine) binColumnDefs.get(i).binNameHeader = binName.get(binColumnDefs.get(i).binNamePos); } } if (binColumnDefs.get(i).staticValue) { } else { if (binColumnDefs.get(i).binValuePos < 0) { if (binColumnDefs.get(i).columnName == null) { if (binColumnDefs.get(i).jsonPath == null) { log.error("dynamic bin having improper info"); //TODO } else { //TODO check for json_path } } else { if (params.ignoreFirstLine) { if (binName.contains(binColumnDefs.get(i).binValueHeader)) { binColumnDefs.get(i).binValuePos = binName .indexOf(binColumnDefs.get(i).binValueHeader); } else if (!binColumnDefs.get(i).binValueHeader.toLowerCase() .equals(Constants.SYSTEM_TIME)) { throw new Exception("Wrong column name mentioned in config file:" + binColumnDefs.get(i).binValueHeader); } } } } else { if (params.ignoreFirstLine) binColumnDefs.get(i).binValueHeader = binName.get(binColumnDefs.get(i).binValuePos); } //check for missing entries in config file if (binColumnDefs.get(i).binValuePos < 0 && binColumnDefs.get(i).binValueHeader == null) { throw new Exception("Information missing(Value header or bin mapping) in config file:" + binColumnDefs.get(i)); } //check for proper data type in config file. if (binColumnDefs.get(i).srcType == null) { throw new Exception( "Source data type is not properly mentioned:" + binColumnDefs.get(i)); } //check for valid destination type if ((binColumnDefs.get(i).srcType.equals(SrcColumnType.TIMESTAMP) || binColumnDefs.get(i).srcType.equals(SrcColumnType.BLOB)) && binColumnDefs.get(i).dstType == null) { throw new Exception("Destination type is not mentioned: " + binColumnDefs.get(i)); } //check for encoding if (binColumnDefs.get(i).dstType != null && binColumnDefs.get(i).encoding == null) { throw new Exception( "Encoding is not given for src-dst type conversion:" + binColumnDefs.get(i)); } //check for valid encoding if (binColumnDefs.get(i).srcType.equals(SrcColumnType.BLOB) && !binColumnDefs.get(i).encoding.equals(Constants.HEX_ENCODING)) { throw new Exception("Wrong encoding for blob data:" + binColumnDefs.get(i)); } } //Check static bin name mapped to dynamic bin value if ((binColumnDefs.get(i).binNamePos == binColumnDefs.get(i).binValuePos) && (binColumnDefs.get(i).binNamePos != -1)) { throw new Exception("Static bin name mapped to dynamic bin value:" + binColumnDefs.get(i)); } //check for missing entries in config file if (binColumnDefs.get(i).binNameHeader == null && binColumnDefs.get(i).binNameHeader.length() > Constants.BIN_NAME_LENGTH) { throw new Exception("Information missing binName or large binName in config file:" + binColumnDefs.get(i)); } } } log.info(params.toString()); log.debug("MetadataConfig:" + metadataColumnDefs); log.debug("BinColumnDefs:" + binColumnDefs); // Start PrintStat thread statPrinter.start(); // Reader pool size ExecutorService readerPool = Executors.newFixedThreadPool( nReaderThreads > allFileNames.size() ? allFileNames.size() : nReaderThreads); log.info("Reader pool size : " + nReaderThreads); // Submit all tasks to writer threadpool. for (String aFile : allFileNames) { log.debug("Submitting task for: " + aFile); readerPool.submit(new AerospikeLoad(aFile, client, params)); } // Wait for reader pool to complete readerPool.shutdown(); log.info("Shutdown down reader thread pool"); while (!readerPool.isTerminated()) ; //readerPool.awaitTermination(20, TimeUnit.MINUTES); log.info("Reader thread pool terminated"); // Wait for writer pool to complete after getting all tasks from reader pool writerPool.shutdown(); log.info("Shutdown down writer thread pool"); while (!writerPool.isTerminated()) ; log.info("Writer thread pool terminated"); // Print final statistic of aerospike-loader. log.info("Final Statistics of importer: (Succesfull Writes = " + counters.write.writeCount.get() + ", " + "Errors=" + (counters.write.writeErrors.get() + counters.write.readErrors.get() + counters.write.processingErrors.get()) + "(" + (counters.write.writeErrors.get()) + "-Write," + counters.write.readErrors.get() + "-Read," + counters.write.processingErrors.get() + "-Processing)"); } catch (Exception e) { log.error(e); if (log.isDebugEnabled()) { e.printStackTrace(); } } finally { // Stop statistic printer thread. statPrinter.interrupt(); log.info("Aerospike loader completed"); } }
From source file:com.puzzle.module.send.sign.SignXml.java
public static void main(String[] args) throws Exception { ClientApi api = ClientApi.getSingletonClientApi(); String xml = FileUtils.readFileToString(new File("D:\\work\\myeclipse2014\\workspace\\Ukey\\sign.xml"), "UTF-8"); // ExecutorService s = Executors.newFixedThreadPool(3); for (int i = 0; i < 1; i++) { s.execute(new signTest(xml)); }//www . j ava 2 s. co m // String signData = Base64.encodeBase64String(signedData); // FileUtils.writeStringToFile(new File("D:\\sign.txt"), signData, "UTF-8"); // System.out.println(signData); }
From source file:com.bt.aloha.batchtest.PerformanceBatchTest.java
public static void main(String[] args) throws Exception { MultistackApplicationContextManager manager = null; try {//from www. j av a2s . c o m manager = new MultistackApplicationContextManager(new String[] { "batchTestApplicationContext.xml", "propertyListenerApplicationContext.performance.xml" }, null); manager.injectManagerIntoApplicatonContext1Beans(); } catch (Exception e) { log.error(e); e.printStackTrace(); System.exit(1); } log.info("Loading application context"); boolean success = true; PerformanceMeasurmentDao dao = null; //ClassPathXmlApplicationContext applicationContext = new ClassPathXmlApplicationContext("batchTestApplicationContext.xml"); ClassPathXmlApplicationContext applicationContext = manager.getApplicationContext1(); dao = (PerformanceMeasurmentDao) applicationContext.getBean("performanceMeasurementDaoBean"); PerformanceBatchTest performanceBatchTest = (PerformanceBatchTest) applicationContext .getBean("performanceBatchTestBean"); int start = performanceBatchTest.getNumberOfInitialConcurrentStarts(); int max = performanceBatchTest.getNumberOfMaxConcurrentStarts(); int inc = performanceBatchTest.getNumberOfConcurrentStartsIncrements(); long runId = dao.generateId(); performanceBatchTest.setApplicationContext(applicationContext); performanceBatchTest.resetDb(); for (int currNumberOfAppThreads = start; currNumberOfAppThreads <= max; currNumberOfAppThreads += inc) { performanceBatchTest.init(); performanceBatchTest.addBatchScenarios(); performanceBatchTest.setNumberOfConcurrentStarts(currNumberOfAppThreads); logSystemInformation(performanceBatchTest); performanceBatchTest.setExecutorService(Executors.newFixedThreadPool(currNumberOfAppThreads)); log.info("Running tests with " + currNumberOfAppThreads + " concurrent threads"); performanceBatchTest.run(); performanceBatchTest.currentMetrics .setThreadInfo(String.format(Metrics.TI_STRING, currNumberOfAppThreads, start, max, inc)); performanceBatchTest.currentMetrics.setTestType(performanceBatchTest.getTestType()); dao.record("Call", runId, performanceBatchTest.currentMetrics); performanceBatchTest.executorService.shutdownNow(); performanceBatchTest.results.clear(); success &= performanceBatchTest.overallSuccess; } applicationContext.destroy(); if (dao != null) { List<Metrics> metrics = dao.findMetricsByRunId(runId); Map<Long, List<Metrics>> m = new HashMap<Long, List<Metrics>>(); m.put(runId, metrics); Chart c = new Chart(m); //String xLabel = String.format("Runs - %s calls per thread, %s min threads, %s max theads, %s increment", cpt, start, max, inc); c.saveChart(new File("unitPerSecond.jpg"), "UPS with Std deviation", "threads", "units per second"); m = dao.findLastXMetricsForTestType(5, performanceBatchTest.getTestType()); c = new Chart(m); c.saveCombinedChart(new File("unitPerSecond-historical.jpg"), "Runs Per Second", "threads", "runs per second", "Standard Deviation", "threads", "std. deviation"); } try { // allow sipp to settle down (in terms of sending its responses and us being up to receive them) Thread.sleep(30000); } catch (Throwable tex) { } System.exit(success ? 0 : 1); }
From source file:edu.berkeley.sparrow.examples.BBackend.java
public static void main(String[] args) throws IOException, TException { ipAddress = InetAddress.getLocalHost().toString(); OptionParser parser = new OptionParser(); parser.accepts("c", "configuration file").withRequiredArg().ofType(String.class); parser.accepts("help", "print help statement"); OptionSet options = parser.parse(args); if (options.has("help")) { parser.printHelpOn(System.out); System.exit(-1);/*from w w w. j a v a2 s.com*/ } // Logger configuration: log to the console BasicConfigurator.configure(); Configuration conf = new PropertiesConfiguration(); if (options.has("c")) { String configFile = (String) options.valueOf("c"); try { conf = new PropertiesConfiguration(configFile); } catch (ConfigurationException e) { } } // Start backend server LOG.setLevel(Level.toLevel(conf.getString(LOG_LEVEL, DEFAULT_LOG_LEVEL))); LOG.debug("debug logging on"); int listenPort = conf.getInt(LISTEN_PORT, DEFAULT_LISTEN_PORT); int nodeMonitorPort = conf.getInt(NODE_MONITOR_PORT, NodeMonitorThrift.DEFAULT_NM_THRIFT_PORT); batchingDelay = conf.getLong(BATCHING_DELAY, DEFAULT_BATCHING_DELAY); String nodeMonitorHost = conf.getString(NODE_MONITOR_HOST, DEFAULT_NODE_MONITOR_HOST); int workerThread = conf.getInt(WORKER_THREADS, DEFAULT_WORKER_THREADS); appClientAdress = InetAddress.getByName(conf.getString(APP_CLIENT_IP)); appClientPortNumber = conf.getInt(APP_CLIENT_PORT_NUMBER, DEFAULT_APP_CLIENT_PORT_NUMBER); executor = Executors.newFixedThreadPool(workerThread); // Starting logging of results resultLog = new SynchronizedWrite("ResultsBackend.txt"); Thread resultLogTh = new Thread(resultLog); resultLogTh.start(); BBackend protoBackend = new BBackend(); BackendService.Processor<BackendService.Iface> processor = new BackendService.Processor<BackendService.Iface>( protoBackend); TServers.launchSingleThreadThriftServer(listenPort, processor); protoBackend.initialize(listenPort, nodeMonitorHost, nodeMonitorPort); }
From source file:Main.java
public final static ExecutorService getComputingPool(float loadFactor) { return Executors.newFixedThreadPool((int) (Runtime.getRuntime().availableProcessors() / (1 - loadFactor))); }
From source file:Main.java
public static ExecutorService newExecutor(int threadSize) { return Executors.newFixedThreadPool(threadSize); }
From source file:Main.java
public static ExecutorService getFixedThreadPool(int count) { return Executors.newFixedThreadPool(count); }
From source file:Main.java
public static void newFixedThreadPool(Runnable runnable, int size) { ExecutorService fixedThreadPool = Executors.newFixedThreadPool(size); fixedThreadPool.execute(runnable);/*from w w w. j a v a2 s .co m*/ }
From source file:Main.java
private static void initService() { if (executorService == null) { executorService = Executors.newFixedThreadPool(maxThreadNum); } }