List of usage examples for java.util.concurrent ConcurrentLinkedQueue ConcurrentLinkedQueue
public ConcurrentLinkedQueue()
From source file:org.apache.storm.daemon.DrpcServer.java
protected ConcurrentLinkedQueue<DRPCRequest> acquireQueue(String function) { ConcurrentLinkedQueue<DRPCRequest> reqQueue = requestQueues.get(function); if (reqQueue == null) { reqQueue = new ConcurrentLinkedQueue<>(); ConcurrentLinkedQueue<DRPCRequest> old = requestQueues.putIfAbsent(function, reqQueue); if (old != null) { reqQueue = old;/*from w w w.jav a 2 s . c o m*/ } } return reqQueue; }
From source file:com.ibm.crail.tools.CrailBenchmark.java
void readRandom(String filename, int size, int loop, boolean buffered) throws Exception { System.out.println("readRandom, filename " + filename + ", size " + size + ", loop " + loop + ", buffered " + buffered);// ww w . j a v a2 s .c om CrailBuffer buf = null; if (size == CrailConstants.BUFFER_SIZE) { buf = fs.allocateBuffer(); } else if (size < CrailConstants.BUFFER_SIZE) { CrailBuffer _buf = fs.allocateBuffer(); _buf.clear().limit(size); buf = _buf.slice(); } else { buf = OffHeapBuffer.wrap(ByteBuffer.allocateDirect(size)); } //warmup ConcurrentLinkedQueue<CrailBuffer> bufferQueue = new ConcurrentLinkedQueue<CrailBuffer>(); bufferQueue.add(buf); warmUp(filename, warmup, bufferQueue); //benchmark System.out.println("starting benchmark..."); fs.getStatistics().reset(); CrailFile file = fs.lookup(filename).get().asFile(); CrailBufferedInputStream bufferedStream = file.getBufferedInputStream(file.getCapacity()); CrailInputStream directStream = file.getDirectInputStream(file.getCapacity()); double sumbytes = 0; double ops = 0; long _range = file.getCapacity() - ((long) buf.capacity()); double range = (double) _range; Random random = new Random(); long start = System.currentTimeMillis(); while (ops < loop) { if (buffered) { buf.clear(); double _offset = range * random.nextDouble(); long offset = (long) _offset; directStream.seek(offset); double ret = (double) directStream.read(buf).get().getLen(); if (ret > 0) { sumbytes = sumbytes + ret; ops = ops + 1.0; } else { break; } } else { buf.clear(); double _offset = range * random.nextDouble(); long offset = (long) _offset; bufferedStream.seek(offset); double ret = (double) bufferedStream.read(buf.getByteBuffer()); if (ret > 0) { sumbytes = sumbytes + ret; ops = ops + 1.0; } else { break; } } } long end = System.currentTimeMillis(); double executionTime = ((double) (end - start)) / 1000.0; double throughput = 0.0; double latency = 0.0; double sumbits = sumbytes * 8.0; if (executionTime > 0) { throughput = sumbits / executionTime / 1000.0 / 1000.0; latency = 1000000.0 * executionTime / ops; } bufferedStream.close(); directStream.close(); System.out.println("execution time " + executionTime); System.out.println("ops " + ops); System.out.println("sumbytes " + sumbytes); System.out.println("throughput " + throughput); System.out.println("latency " + latency); fs.getStatistics().print("close"); }
From source file:com.nearinfinity.hbase.dsl.HBase.java
private Queue<Delete> getDeletes(byte[] tableName) { Queue<Delete> queue = deletesMap.get(tableName); if (queue == null) { queue = new ConcurrentLinkedQueue<Delete>(); deletesMap.put(tableName, queue); }//w w w . j a v a 2s. c om return queue; }
From source file:ua.pp.serga.socketio.IOConnection.java
/** * Transport connected./* www.j av a2s . c o m*/ * * {@link IOTransport} calls this when a connection is established. */ public void transportConnected() { setState(STATE_READY); if (reconnectTask != null) { reconnectTask.cancel(); reconnectTask = null; } resetTimeout(); synchronized (outputBuffer) { if (transport.canSendBulk()) { ConcurrentLinkedQueue<String> outputBuffer = this.outputBuffer; this.outputBuffer = new ConcurrentLinkedQueue<String>(); try { // DEBUG String[] texts = outputBuffer.toArray(new String[outputBuffer.size()]); logger.info("Bulk start:"); for (String text : texts) { logger.info("> " + text); } logger.info("Bulk end"); // DEBUG END transport.sendBulk(texts); } catch (IOException e) { this.outputBuffer = outputBuffer; } } else { String text; while ((text = outputBuffer.poll()) != null) sendPlain(text); } this.keepAliveInQueue = false; } }
From source file:com.btoddb.fastpersitentqueue.JournalMgrIT.java
@Test public void testThreading() throws IOException, ExecutionException { final int numEntries = 10000; final int numPushers = 3; int numPoppers = 3; final Random pushRand = new Random(1000L); final Random popRand = new Random(1000000L); final ConcurrentLinkedQueue<FpqEntry> events = new ConcurrentLinkedQueue<FpqEntry>(); final AtomicInteger pusherFinishCount = new AtomicInteger(); final AtomicInteger numPops = new AtomicInteger(); final AtomicLong pushSum = new AtomicLong(); final AtomicLong popSum = new AtomicLong(); mgr.setMaxJournalFileSize(1000);/*w w w. ja va 2 s . c om*/ mgr.init(); ExecutorService execSrvc = Executors.newFixedThreadPool(numPushers + numPoppers); Set<Future> futures = new HashSet<Future>(); // start pushing for (int i = 0; i < numPushers; i++) { Future future = execSrvc.submit(new Runnable() { @Override public void run() { for (int i = 0; i < numEntries; i++) { try { long x = idGen.incrementAndGet(); FpqEntry entry = mgr.append(new FpqEntry(x, new byte[100])); events.offer(entry); pushSum.addAndGet(x); if (x % 500 == 0) { System.out.println("pushed ID = " + x); } Thread.sleep(pushRand.nextInt(5)); } catch (Exception e) { e.printStackTrace(); } } pusherFinishCount.incrementAndGet(); } }); futures.add(future); } // start popping for (int i = 0; i < numPoppers; i++) { Future future = execSrvc.submit(new Runnable() { @Override public void run() { while (pusherFinishCount.get() < numPushers || !events.isEmpty()) { try { FpqEntry entry; while (null != (entry = events.poll())) { if (entry.getId() % 500 == 0) { System.out.println("popped ID = " + entry.getId()); } popSum.addAndGet(entry.getId()); numPops.incrementAndGet(); mgr.reportTake(entry); Thread.sleep(popRand.nextInt(5)); } } catch (Exception e) { e.printStackTrace(); } } } }); futures.add(future); } boolean finished = false; while (!finished) { try { for (Future f : futures) { f.get(); } finished = true; } catch (InterruptedException e) { // ignore Thread.interrupted(); } } assertThat(numPops.get(), is(numEntries * numPushers)); assertThat(popSum.get(), is(pushSum.get())); assertThat(mgr.getJournalIdMap().entrySet(), hasSize(1)); assertThat(FileUtils.listFiles(theDir, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE), hasSize(1)); }
From source file:net.dv8tion.jda.core.audio.AudioConnection.java
private synchronized void setupReceiveThread() { if (receiveThread == null) { receiveThread = new Thread(AudioManagerImpl.AUDIO_THREADS, threadIdentifier + " Receiving Thread") { @Override//from w w w .jav a 2s.c o m public void run() { try { udpSocket.setSoTimeout(1000); } catch (SocketException e) { LOG.log(e); } while (!udpSocket.isClosed() && !this.isInterrupted()) { DatagramPacket receivedPacket = new DatagramPacket(new byte[1920], 1920); try { udpSocket.receive(receivedPacket); if (receiveHandler != null && (receiveHandler.canReceiveUser() || receiveHandler.canReceiveCombined()) && webSocket.getSecretKey() != null) { if (!couldReceive) { couldReceive = true; sendSilentPackets(); } AudioPacket decryptedPacket = AudioPacket.decryptAudioPacket(receivedPacket, webSocket.getSecretKey()); int ssrc = decryptedPacket.getSSRC(); final long userId = ssrcMap.get(ssrc); Decoder decoder = opusDecoders.get(ssrc); if (userId == ssrcMap.getNoEntryValue()) { byte[] audio = decryptedPacket.getEncodedAudio(); //If the bytes are silence, then this was caused by a User joining the voice channel, // and as such, we haven't yet received information to pair the SSRC with the UserId. if (!Arrays.equals(audio, silenceBytes)) LOG.debug("Received audio data with an unknown SSRC id. Ignoring"); continue; } if (decoder == null) { decoder = new Decoder(ssrc); opusDecoders.put(ssrc, decoder); } if (!decoder.isInOrder(decryptedPacket.getSequence())) { LOG.trace("Got out-of-order audio packet. Ignoring."); continue; } User user = getJDA().getUserById(userId); if (user == null) LOG.warn( "Received audio data with a known SSRC, but the userId associate with the SSRC is unknown to JDA!"); else { // if (decoder.wasPacketLost(decryptedPacket.getSequence())) // { // LOG.debug("Packet(s) missed. Using Opus packetloss-compensation."); // short[] decodedAudio = decoder.decodeFromOpus(null); // receiveHandler.handleUserAudio(new UserAudio(user, decodedAudio)); // } short[] decodedAudio = decoder.decodeFromOpus(decryptedPacket); //If decodedAudio is null, then the Opus decode failed, so throw away the packet. if (decodedAudio == null) { LOG.trace( "Received audio data but Opus failed to properly decode, instead it returned an error"); } else { if (receiveHandler.canReceiveUser()) { receiveHandler.handleUserAudio(new UserAudio(user, decodedAudio)); } if (receiveHandler.canReceiveCombined()) { Queue<Pair<Long, short[]>> queue = combinedQueue.get(user); if (queue == null) { queue = new ConcurrentLinkedQueue<>(); combinedQueue.put(user, queue); } queue.add(Pair.<Long, short[]>of(System.currentTimeMillis(), decodedAudio)); } } } } else if (couldReceive) { couldReceive = false; sendSilentPackets(); } } catch (SocketTimeoutException e) { //Ignore. We set a low timeout so that we wont block forever so we can properly shutdown the loop. } catch (SocketException e) { //The socket was closed while we were listening for the next packet. //This is expected. Ignore the exception. The thread will exit during the next while // iteration because the udpSocket.isClosed() will return true. } catch (Exception e) { LOG.log(e); } } } }; receiveThread.setDaemon(true); receiveThread.start(); } if (receiveHandler.canReceiveCombined()) { setupCombinedExecutor(); } }
From source file:org.languagetool.rules.spelling.suggestions.SuggestionChangesTest.java
public void testChanges() throws IOException, InterruptedException { File configFile = new File(System.getProperty("config", "SuggestionChangesTestConfig.json")); ObjectMapper mapper = new ObjectMapper(new JsonFactory().enable(JsonParser.Feature.ALLOW_COMMENTS)); SuggestionChangesTestConfig config = mapper.readValue(configFile, SuggestionChangesTestConfig.class); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss"); String timestamp = dateFormat.format(new Date()); Path loggingFile = Paths.get(config.logDir, String.format("suggestionChangesExperiment_%s.log", timestamp)); Path datasetFile = Paths.get(config.logDir, String.format("suggestionChangesExperiment_%s.csv", timestamp)); BufferedWriter writer = Files.newBufferedWriter(loggingFile); CSVPrinter datasetWriter = new CSVPrinter(Files.newBufferedWriter(datasetFile), CSVFormat.DEFAULT.withEscape('\\')); List<String> datasetHeader = new ArrayList<>( Arrays.asList("sentence", "correction", "covered", "replacement", "dataset_id")); SuggestionsChanges.init(config, writer); writer.write("Evaluation configuration: \n"); String configContent = String.join("\n", Files.readAllLines(configFile.toPath())); writer.write(configContent);/*from w ww .j a v a 2 s .c o m*/ writer.write("\nRunning experiments: \n"); int experimentId = 0; for (SuggestionChangesExperiment experiment : SuggestionsChanges.getInstance().getExperiments()) { experimentId++; writer.write(String.format("#%d: %s%n", experimentId, experiment)); datasetHeader.add(String.format("experiment_%d_suggestions", experimentId)); datasetHeader.add(String.format("experiment_%d_metadata", experimentId)); datasetHeader.add(String.format("experiment_%d_suggestions_metadata", experimentId)); } writer.newLine(); datasetWriter.printRecord(datasetHeader); BlockingQueue<SuggestionTestData> tasks = new LinkedBlockingQueue<>(1000); ConcurrentLinkedQueue<Pair<SuggestionTestResultData, String>> results = new ConcurrentLinkedQueue<>(); List<SuggestionTestThread> threads = new ArrayList<>(); for (int i = 0; i < Runtime.getRuntime().availableProcessors(); i++) { SuggestionTestThread worker = new SuggestionTestThread(tasks, results); worker.start(); threads.add(worker); } // Thread for writing results from worker threads into CSV Thread logger = new Thread(() -> { try { long messages = 0; //noinspection InfiniteLoopStatement while (true) { Pair<SuggestionTestResultData, String> message = results.poll(); if (message != null) { writer.write(message.getRight()); SuggestionTestResultData result = message.getLeft(); int datasetId = 1 + config.datasets.indexOf(result.getInput().getDataset()); if (result != null && result.getSuggestions() != null && !result.getSuggestions().isEmpty() && result.getSuggestions().stream() .noneMatch(m -> m.getSuggestedReplacements() == null || m.getSuggestedReplacements().isEmpty())) { List<Object> record = new ArrayList<>(Arrays.asList(result.getInput().getSentence(), result.getInput().getCorrection(), result.getInput().getCovered(), result.getInput().getReplacement(), datasetId)); for (RuleMatch match : result.getSuggestions()) { List<String> suggestions = match.getSuggestedReplacements(); record.add(mapper.writeValueAsString(suggestions)); // features extracted by SuggestionsOrdererFeatureExtractor record.add(mapper.writeValueAsString(match.getFeatures())); List<SortedMap<String, Float>> suggestionsMetadata = new ArrayList<>(); for (SuggestedReplacement replacement : match.getSuggestedReplacementObjects()) { suggestionsMetadata.add(replacement.getFeatures()); } record.add(mapper.writeValueAsString(suggestionsMetadata)); } datasetWriter.printRecord(record); } if (++messages % 1000 == 0) { writer.flush(); System.out.printf("Evaluated %d corrections.%n", messages); } } } } catch (IOException e) { throw new RuntimeException(e); } }); logger.setDaemon(true); logger.start(); // format straight from database dump String[] header = { "id", "sentence", "correction", "language", "rule_id", "suggestion_pos", "accept_language", "country", "region", "created_at", "updated_at", "covered", "replacement", "text_session_id", "client" }; int datasetId = 0; // read data, send to worker threads via queue for (SuggestionChangesDataset dataset : config.datasets) { writer.write(String.format("Evaluating dataset #%d: %s.%n", ++datasetId, dataset)); CSVFormat format = CSVFormat.DEFAULT; if (dataset.type.equals("dump")) { format = format.withEscape('\\').withNullString("\\N").withHeader(header); } else if (dataset.type.equals("artificial")) { format = format.withEscape('\\').withFirstRecordAsHeader(); } try (CSVParser parser = new CSVParser(new FileReader(dataset.path), format)) { for (CSVRecord record : parser) { String lang = record.get("language"); String rule = dataset.type.equals("dump") ? record.get("rule_id") : ""; String covered = record.get("covered"); String replacement = record.get("replacement"); String sentence = record.get("sentence"); String correction = record.isSet("correction") ? record.get("correction") : ""; String acceptLanguage = dataset.type.equals("dump") ? record.get("accept_language") : ""; if (sentence == null || sentence.trim().isEmpty()) { continue; } if (!config.language.equals(lang)) { continue; // TODO handle auto maybe? } if (dataset.type.equals("dump") && !config.rule.equals(rule)) { continue; } // correction column missing in export from doccano; workaround if (dataset.enforceCorrect && !record.isSet("correction")) { throw new IllegalStateException("enforceCorrect in dataset configuration enabled," + " but column 'correction' is not set for entry " + record); } if (dataset.type.equals("dump") && dataset.enforceAcceptLanguage) { if (acceptLanguage != null) { String[] entries = acceptLanguage.split(",", 2); if (entries.length == 2) { String userLanguage = entries[0]; // TODO: what to do with e.g. de-AT,de-DE;... if (!config.language.equals(userLanguage)) { continue; } } } } tasks.put(new SuggestionTestData(lang, sentence, covered, replacement, correction, dataset)); } } } for (Thread t : threads) { t.join(); } logger.join(10000L); logger.interrupt(); datasetWriter.close(); }
From source file:com.spotify.heroic.metadata.elasticsearch.MetadataBackendKV.java
/** * Collect the result of a list of operations and convert into a {@link * com.spotify.heroic.metadata.DeleteSeries}. * * @return a {@link eu.toolchain.async.StreamCollector} *//* w w w . ja va2 s. c om*/ private StreamCollector<Void, DeleteSeries> newDeleteCollector() { return new StreamCollector<Void, DeleteSeries>() { final ConcurrentLinkedQueue<Throwable> errors = new ConcurrentLinkedQueue<>(); @Override public void resolved(final Void result) throws Exception { } @Override public void failed(final Throwable cause) throws Exception { errors.add(cause); } @Override public void cancelled() throws Exception { } @Override public DeleteSeries end(final int resolved, final int failed, final int cancelled) throws Exception { final List<RequestError> errors = this.errors.stream().map(QueryError::fromThrowable) .collect(Collectors.toList()); return new DeleteSeries(errors, resolved, failed + cancelled); } }; }
From source file:org.apache.streams.sysomos.provider.SysomosProvider.java
private Queue<StreamsDatum> constructQueue() { return new ConcurrentLinkedQueue<>(); }
From source file:com.nearinfinity.hbase.dsl.HBase.java
private Queue<Put> getPuts(byte[] tableName) { Queue<Put> queue = putsMap.get(tableName); if (queue == null) { queue = new ConcurrentLinkedQueue<Put>(); putsMap.put(tableName, queue);/* w w w. j a v a2s. co m*/ } return queue; }