List of usage examples for java.util.concurrent Executors newFixedThreadPool
public static ExecutorService newFixedThreadPool(int nThreads)
From source file:com.aliyun.odps.volume.VolumeFSInputStream.java
public VolumeFSInputStream(String path, VolumeFSClient volumeClient, Long fileLength, Configuration conf) throws IOException { this.path = path; this.volumeFSClient = volumeClient; this.seekOptimization = conf.getBoolean(VolumeFileSystemConfigKeys.ODPS_VOLUME_SEEK_OPTIMIZATION_ENABLED, false);/* w w w . j a v a 2s. c om*/ if (this.seekOptimization) { this.blockSize = conf.getLong(VolumeFileSystemConfigKeys.ODPS_VOLUME_BLOCK_SIZE, VolumeFSConstants.DEFAULT_VOLUME_BLOCK_SIZE); } this.fileLength = fileLength; this.closed = false; this.uuid = UUID.randomUUID().toString(); buffer_block_dir = new File(conf.get(VolumeFileSystemConfigKeys.ODPS_VOLUME_BLOCK_BUFFER_DIR, VolumeFSConstants.DEFAULT_VOLUME_BLOCK_BUFFER_DIR)); if (!buffer_block_dir.exists() && !buffer_block_dir.mkdirs()) { throw new IOException("Cannot create Volume block buffer directory: " + buffer_block_dir); } if (seekOptimization) { executorService = Executors.newFixedThreadPool(1); } }
From source file:com.supermy.flume.interceptor.RuleThreadSearchAndReplaceInterceptor.java
@Override public void initialize() { // executorService = Executors.newFixedThreadPool(threadNum); executorService = Executors.newFixedThreadPool(threadPool); //executorService = Executors.newCachedThreadPool(); f = new File(searchReplaceDsl); // fixme }
From source file:org.bpmscript.process.hibernate.SpringHibernateInstanceManagerTest.java
public void testInstanceManagerSeparateThread() throws Exception { ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext( "/org/bpmscript/endtoend/spring.xml"); try {/*from ww w . ja v a 2 s .c o m*/ final IInstanceManager instanceManager = (IInstanceManager) context.getBean("instanceManager"); final String pid1 = instanceManager.createInstance("parentVersion", "definitionId", "test", IJavascriptProcessDefinition.DEFINITION_TYPE_JAVASCRIPT, "one"); IInstance instance = instanceManager.getInstance(pid1); assertNotNull(instance); instanceManager.createInstance("parentVersion", "definitionId", "test", IJavascriptProcessDefinition.DEFINITION_TYPE_JAVASCRIPT, "two"); ExecutorService executorService = Executors.newFixedThreadPool(2); final AtomicReference<Queue<String>> results = new AtomicReference<Queue<String>>( new LinkedList<String>()); Future<Object> future1 = executorService.submit(new Callable<Object>() { public Object call() throws Exception { return instanceManager.doWithInstance(pid1, new IInstanceCallback() { public IExecutorResult execute(IInstance instance) throws Exception { log.info("locking one"); Thread.sleep(2000); results.get().add("one"); return new IgnoredResult("", "", ""); } }); } }); Thread.sleep(100); assertNotNull(future1.get()); } finally { context.destroy(); } }
From source file:com.pinterest.terrapin.storage.HFileReaderTest.java
@BeforeClass public static void setUp() throws Exception { int randomNum = (int) (Math.random() * Integer.MAX_VALUE); hfilePath = "/tmp/hfile-" + randomNum; Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); keyValueMap = Maps.newHashMapWithExpectedSize(10000); errorKeys = Sets.newHashSetWithExpectedSize(2000); StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, new CacheConfig(conf), fs, 4096) .withFilePath(new Path(hfilePath)).withCompression(Compression.Algorithm.NONE).build(); // Add upto 10K values. for (int i = 0; i < 10000; i++) { byte[] key = String.format("%04d", i).getBytes(); byte[] value = null; // Add a couple of empty values for testing and making sure we return them. if (i <= 1) { value = "".getBytes(); } else {/*from w w w. j ava2 s. c om*/ value = ("v" + (i + 1)).getBytes(); } KeyValue kv = new KeyValue(key, Bytes.toBytes("cf"), Bytes.toBytes(""), value); writer.append(kv); keyValueMap.put(ByteBuffer.wrap(key), ByteBuffer.wrap(value)); if (i >= 4000 && i < 6000) { errorKeys.add(ByteBuffer.wrap(key)); } } writer.close(); hfileReader = new TestHFileReader(fs, hfilePath, new CacheConfig(conf), new ExecutorServiceFuturePool(Executors.newFixedThreadPool(1)), errorKeys); }
From source file:net.famzangl.minecraft.minebot.PlayerUpdateHandler.java
public PlayerUpdateHandler() { sendThread = Executors.newFixedThreadPool(2); }
From source file:de.tbuchloh.kiskis.cracker.PasswordCracker.java
public String crackPassword() { final Long totalEstimation = _passwordCreator.estimateTotalCount(); _progressListener.notifyTotalCount(totalEstimation); _progressListener.notifyStartTime(System.currentTimeMillis()); final AtomicBoolean found = new AtomicBoolean(false); final Callable<String> callable = new Callable<String>() { @Override/*from ww w . ja va2s . c o m*/ public String call() throws Exception { String guess; while (!found.get() && (guess = _passwordCreator.create()) != null) { _progressListener.notifyTry(guess); if (_tester.test(guess)) { found.set(true); return guess; } } return null; } }; final int cpus = ManagementFactory.getOperatingSystemMXBean().getAvailableProcessors(); LOG.info(String.format("Found %1$d cpus ...", cpus)); final ExecutorService threadPool = Executors.newFixedThreadPool(cpus); final Collection<Callable<String>> tasks = new ArrayList<Callable<String>>(); for (int i = 0; i < cpus; ++i) { tasks.add(callable); } try { final List<Future<String>> futures = threadPool.invokeAll(tasks); for (final Future<String> future : futures) { final String guessedPwd = future.get(); if (guessedPwd != null) { return guessedPwd; } } return null; } catch (final InterruptedException e) { throw new KisKisRuntimeException("InterruptedException", e); } catch (final ExecutionException e) { throw new KisKisRuntimeException("ExecutionException", e); } }
From source file:com.amazonaws.services.kinesis.multilang.StreamingRecordProcessorTest.java
@Before public void prepare() throws IOException, InterruptedException, ExecutionException { // Fake command String command = "derp"; systemExitCount = 0;//from w w w . ja va2 s . co m // Mocks ExecutorService executor = Executors.newFixedThreadPool(3); final Process process = Mockito.mock(Process.class); messageWriter = Mockito.mock(MessageWriter.class); messageReader = Mockito.mock(MessageReader.class); errorReader = Mockito.mock(DrainChildSTDERRTask.class); recordProcessor = new MultiLangRecordProcessor(new ProcessBuilder(), executor, new ObjectMapper(), messageWriter, messageReader, errorReader) { // Just don't do anything when we exit. void exit() { systemExitCount += 1; } // Inject our mock process Process startProcess() { return process; } }; // Our process will return mock streams InputStream inputStream = Mockito.mock(InputStream.class); InputStream errorStream = Mockito.mock(InputStream.class); OutputStream outputStream = Mockito.mock(OutputStream.class); Mockito.doReturn(inputStream).when(process).getInputStream(); Mockito.doReturn(errorStream).when(process).getErrorStream(); Mockito.doReturn(outputStream).when(process).getOutputStream(); Mockito.doReturn(Mockito.mock(Future.class)).when(messageReader).drainSTDOUT(); Future<Boolean> trueFuture = Mockito.mock(Future.class); Mockito.doReturn(true).when(trueFuture).get(); Mockito.doReturn(trueFuture).when(messageWriter).writeInitializeMessage(Mockito.anyString()); Mockito.doReturn(trueFuture).when(messageWriter).writeCheckpointMessageWithError(Mockito.anyString(), Mockito.any(Throwable.class)); Mockito.doReturn(trueFuture).when(messageWriter).writeProcessRecordsMessage(Mockito.anyList()); Mockito.doReturn(trueFuture).when(messageWriter).writeShutdownMessage(Mockito.any(ShutdownReason.class)); }
From source file:com.ottogroup.bi.spqr.pipeline.MicroPipelineManager.java
/** * Initializes the micro pipeline manager * @param processingNodeId identifier of node this manager lives on * @param componentRepository reference to {@link ComponentRepository} which provides access to all {@link MicroPipelineComponent} * @param maxNumberOfThreads max. number of threads assigned to {@link ExecutorService} (1 = single threaded, n = fixed number of threads, other = cached thread pool) * @throws RequiredInputMissingException *//*from ww w.ja va 2s . c o m*/ public MicroPipelineManager(final String processingNodeId, final ComponentRepository componentRepository, final int maxNumberOfThreads) throws RequiredInputMissingException { ////////////////////////////////////////////////////////////////////////////// // validate provided input if (componentRepository == null) throw new RequiredInputMissingException("Missing required component repository"); if (StringUtils.isBlank(processingNodeId)) throw new RequiredInputMissingException("Missing required processing node identifier"); // ////////////////////////////////////////////////////////////////////////////// this.processingNodeId = StringUtils.lowerCase(StringUtils.trim(processingNodeId)); this.microPipelineFactory = new MicroPipelineFactory(this.processingNodeId, componentRepository); if (maxNumberOfThreads == 1) this.executorService = Executors.newSingleThreadExecutor(); else if (maxNumberOfThreads > 1) this.executorService = Executors.newFixedThreadPool(maxNumberOfThreads); else this.executorService = Executors.newCachedThreadPool(); }
From source file:com.taobao.pushit.server.PushitBroker.java
public PushitBroker(Properties props) throws IOException { int port = DEFAULT_PORT; int threadPoolSize = DEFAULT_THREAD_POOL_SIZE; if (!StringUtils.isBlank(props.getProperty("port"))) { port = Integer.parseInt(props.getProperty("port")); }/*w w w . j a va 2 s . co m*/ if (!StringUtils.isBlank(props.getProperty("requestThreadPoolSize"))) { threadPoolSize = Integer.parseInt(props.getProperty("requestThreadPoolSize")); } requestExecutor = (ThreadPoolExecutor) Executors.newFixedThreadPool(threadPoolSize); ServerConfig serverConfig = new ServerConfig(); serverConfig.setPort(port); serverConfig.setWireFormatType(new PushitWireFormatType()); remotingServer = RemotingFactory.newRemotingServer(serverConfig); pushService = new PushService(remotingServer); String hosts = props.getProperty("cluster_hosts"); if (StringUtils.isBlank(hosts)) { log.warn(","); } clusterService = new ClusterService(hosts); remotingServer.registerProcessor(NotifyCommand.class, new NotifyCommandProecssor()); remotingServer.registerProcessor(InterestCommand.class, new ClientInterestsProcessor()); remotingServer.registerProcessor(BroadCastCommand.class, new BroadcastProcessor()); remotingServer.addConnectionLifeCycleListener(new ConnectionClosedListener(remotingServer)); // this.remotingServer.addConnectionLifeCycleListener(new // ConnectionMetaDataTimerListener()); int connectionThreshold = DEFAULT_CONN_THRESHOLD; int ipCountCheckInterval = DEFAULT_IPCOUNT_CHECK_INTERVAL; int ipCountThreshold = DEFAULT_IPCOUNT_THRESHOLD; if (!StringUtils.isBlank(props.getProperty("conn_threshold"))) { connectionThreshold = Integer.parseInt(props.getProperty("conn_threshold")); } if (!StringUtils.isBlank(props.getProperty("ip_check_interval"))) { ipCountCheckInterval = Integer.parseInt(props.getProperty("ip_check_interval")); } if (!StringUtils.isBlank(props.getProperty("ip_count_threshold"))) { ipCountThreshold = Integer.parseInt(props.getProperty("ip_count_threshold")); } this.remotingServer.addConnectionLifeCycleListener( new ConnectionNumberListener(connectionThreshold, ipCountThreshold, ipCountCheckInterval)); }
From source file:ivory.ltr.GreedyLearn.java
public void train(String featFile, String modelOutputFile, int numModels, String metricClassName, boolean pruneCorrelated, double correlationThreshold, boolean logFeatures, boolean productFeatures, boolean quotientFeatures, int numThreads) throws IOException, InterruptedException, ExecutionException, ConfigurationException, InstantiationException, IllegalAccessException, ClassNotFoundException { // read training instances Instances trainInstances = new Instances(featFile); // get feature map (mapping from feature names to feature number) Map<String, Integer> featureMap = trainInstances.getFeatureMap(); // construct initial model Model initialModel = new Model(); // initialize feature pools Map<Model, ArrayList<Feature>> featurePool = new HashMap<Model, ArrayList<Feature>>(); featurePool.put(initialModel, new ArrayList<Feature>()); // add simple features to feature pools for (String featureName : featureMap.keySet()) { featurePool.get(initialModel).add(new SimpleFeature(featureMap.get(featureName), featureName)); }/* w ww . java2 s . c om*/ // eliminate document-independent features List<Feature> constantFeatures = new ArrayList<Feature>(); for (int i = 0; i < featurePool.size(); i++) { Feature f = featurePool.get(initialModel).get(i); if (trainInstances.featureIsConstant(f)) { System.err.println("Feature " + f.getName() + " is constant -- removing from feature pool!"); constantFeatures.add(f); } } featurePool.get(initialModel).removeAll(constantFeatures); // initialize score tables Map<Model, ScoreTable> scoreTable = new HashMap<Model, ScoreTable>(); scoreTable.put(initialModel, new ScoreTable(trainInstances)); // initialize model queue List<Model> models = new ArrayList<Model>(); models.add(initialModel); // set up threading ExecutorService threadPool = Executors.newFixedThreadPool(numThreads); Map<Model, ArrayList<ArrayList<Feature>>> featureBatches = new HashMap<Model, ArrayList<ArrayList<Feature>>>(); featureBatches.put(initialModel, new ArrayList<ArrayList<Feature>>()); for (int i = 0; i < numThreads; i++) { featureBatches.get(initialModel).add(new ArrayList<Feature>()); } for (int i = 0; i < featurePool.get(initialModel).size(); i++) { featureBatches.get(initialModel).get(i % numThreads).add(featurePool.get(initialModel).get(i)); } // greedily add features double curMetric = 0.0; double prevMetric = Double.NEGATIVE_INFINITY; int iter = 1; while (curMetric - prevMetric > TOLERANCE) { Map<ModelFeaturePair, AlphaMeasurePair> modelFeaturePairMeasures = new HashMap<ModelFeaturePair, AlphaMeasurePair>(); // update models for (Model model : models) { List<Future<Map<Feature, AlphaMeasurePair>>> futures = new ArrayList<Future<Map<Feature, AlphaMeasurePair>>>(); for (int i = 0; i < numThreads; i++) { // construct measure Measure metric = (Measure) Class.forName(metricClassName).newInstance(); // line searcher LineSearch search = new LineSearch(model, featureBatches.get(model).get(i), scoreTable.get(model), metric); Future<Map<Feature, AlphaMeasurePair>> future = threadPool.submit(search); futures.add(future); } for (int i = 0; i < numThreads; i++) { Map<Feature, AlphaMeasurePair> featAlphaMeasureMap = futures.get(i).get(); for (Feature f : featAlphaMeasureMap.keySet()) { AlphaMeasurePair featAlphaMeasure = featAlphaMeasureMap.get(f); modelFeaturePairMeasures.put(new ModelFeaturePair(model, f), featAlphaMeasure); } } } // sort model-feature pairs List<ModelFeaturePair> modelFeaturePairs = new ArrayList<ModelFeaturePair>( modelFeaturePairMeasures.keySet()); Collections.sort(modelFeaturePairs, new ModelFeatureComparator(modelFeaturePairMeasures)); // preserve current list of models List<Model> oldModels = new ArrayList<Model>(models); // add best model feature pairs to pool models = new ArrayList<Model>(); //Lidan: here consider top-K features, rather than just the best one for (int i = 0; i < numModels; i++) { Model model = modelFeaturePairs.get(i).model; Feature feature = modelFeaturePairs.get(i).feature; String bestFeatureName = feature.getName(); AlphaMeasurePair bestAlphaMeasure = modelFeaturePairMeasures.get(modelFeaturePairs.get(i)); System.err.println("Model = " + model); System.err.println("Best feature: " + bestFeatureName); System.err.println("Best alpha: " + bestAlphaMeasure.alpha); System.err.println("Best measure: " + bestAlphaMeasure.measure); Model newModel = new Model(model); models.add(newModel); ArrayList<ArrayList<Feature>> newFeatureBatch = new ArrayList<ArrayList<Feature>>(); for (ArrayList<Feature> fb : featureBatches.get(model)) { newFeatureBatch.add(new ArrayList<Feature>(fb)); } featureBatches.put(newModel, newFeatureBatch); featurePool.put(newModel, new ArrayList<Feature>(featurePool.get(model))); // add auxiliary features (for atomic features only) if (featureMap.containsKey(bestFeatureName)) { int bestFeatureIndex = featureMap.get(bestFeatureName); // add log features, if requested if (logFeatures) { Feature logFeature = new LogFeature(bestFeatureIndex, "log(" + bestFeatureName + ")"); featureBatches.get(newModel).get(bestFeatureIndex % numThreads).add(logFeature); featurePool.get(newModel).add(logFeature); } // add product features, if requested if (productFeatures) { for (String featureNameB : featureMap.keySet()) { int indexB = featureMap.get(featureNameB); Feature prodFeature = new ProductFeature(bestFeatureIndex, indexB, bestFeatureName + "*" + featureNameB); featureBatches.get(newModel).get(indexB % numThreads).add(prodFeature); featurePool.get(newModel).add(prodFeature); } } // add quotient features, if requested if (quotientFeatures) { for (String featureNameB : featureMap.keySet()) { int indexB = featureMap.get(featureNameB); Feature divFeature = new QuotientFeature(bestFeatureIndex, indexB, bestFeatureName + "/" + featureNameB); featureBatches.get(newModel).get(indexB % numThreads).add(divFeature); featurePool.get(newModel).add(divFeature); } } } // prune highly correlated features if (pruneCorrelated) { if (!newModel.containsFeature(feature)) { List<Feature> correlatedFeatures = new ArrayList<Feature>(); for (Feature f : featurePool.get(newModel)) { if (f == feature) { continue; } double correl = trainInstances.getCorrelation(f, feature); if (correl > correlationThreshold) { System.err.println("Pruning highly correlated feature: " + f.getName()); correlatedFeatures.add(f); } } for (ArrayList<Feature> batch : featureBatches.get(newModel)) { batch.removeAll(correlatedFeatures); } featurePool.get(newModel).removeAll(correlatedFeatures); } } // update score table if (iter == 0) { scoreTable.put(newModel, scoreTable.get(model).translate(feature, 1.0, 1.0)); newModel.addFeature(feature, 1.0); } else { scoreTable.put(newModel, scoreTable.get(model).translate(feature, bestAlphaMeasure.alpha, 1.0 / (1.0 + bestAlphaMeasure.alpha))); newModel.addFeature(feature, bestAlphaMeasure.alpha); } } for (Model model : oldModels) { featurePool.remove(model); featureBatches.remove(model); scoreTable.remove(model); } // update metrics prevMetric = curMetric; curMetric = modelFeaturePairMeasures.get(modelFeaturePairs.get(0)).measure; iter++; } // serialize model System.out.println("Final Model: " + models.get(0)); models.get(0).write(modelOutputFile); threadPool.shutdown(); }