List of usage examples for java.util LinkedList isEmpty
boolean isEmpty();
From source file:com.icantrap.collections.dawg.DawgBuilder.java
/** * Builds the dawg based on the words added. * * @return the new Dawg instance/*from w ww . ja v a 2s . c om*/ */ public Dawg build() { compress(); for (Node node : nodeList) node.index = -1; LinkedList<Node> stack = new LinkedList<Node>(); nodeList.clear(); stack.clear(); stack.addLast(root); int index = 0; while (!stack.isEmpty()) { Node ptr = stack.removeFirst(); if (-1 == ptr.index) ptr.index = index++; nodeList.add(ptr); for (Node nextChild : ptr.nextChildren) stack.addLast(nextChild); if (null != ptr.child) stack.addLast(ptr.child); } int[] ints = new int[index]; for (Node node : nodeList) ints[node.index] = node.toInteger(); return new Dawg(ints); }
From source file:com.zjy.mongo.splitter.ShardChunkMongoSplitter.java
@Override public List<InputSplit> calculateSplits() throws SplitFailedException { boolean targetShards = MongoConfigUtil.canReadSplitsFromShards(getConfiguration()); DB configDB = getConfigDB();/*from www . jav a 2 s . c o m*/ DBCollection chunksCollection = configDB.getCollection("chunks"); MongoClientURI inputURI = MongoConfigUtil.getInputURI(getConfiguration()); String inputNS = inputURI.getDatabase() + "." + inputURI.getCollection(); DBCursor cur = chunksCollection.find(new BasicDBObject("ns", inputNS)); int numChunks = 0; Map<String, String> shardsMap = null; if (targetShards) { try { shardsMap = getShardsMap(); } catch (Exception e) { //Something went wrong when trying to //read the shards data from the config server, //so abort the splitting throw new SplitFailedException("Couldn't get shards information from config server", e); } } List<String> mongosHostNames = MongoConfigUtil.getInputMongosHosts(getConfiguration()); if (targetShards && mongosHostNames.size() > 0) { throw new SplitFailedException( "Setting both mongo.input.split.read_from_shards and mongo.input.mongos_hosts" + " does not make sense. "); } if (mongosHostNames.size() > 0) { LOG.info("Using multiple mongos instances (round robin) for reading input."); } Map<String, LinkedList<InputSplit>> shardToSplits = new HashMap<String, LinkedList<InputSplit>>(); try { while (cur.hasNext()) { final BasicDBObject row = (BasicDBObject) cur.next(); BasicDBObject chunkLowerBound = (BasicDBObject) row.get("min"); BasicDBObject chunkUpperBound = (BasicDBObject) row.get("max"); MongoInputSplit chunkSplit = createSplitFromBounds(chunkLowerBound, chunkUpperBound); chunkSplit.setInputURI(inputURI); String shard = (String) row.get("shard"); if (targetShards) { //The job is configured to target shards, so replace the //mongos hostname with the host of the shard's servers String shardHosts = shardsMap.get(shard); if (shardHosts == null) { throw new SplitFailedException("Couldn't find shard ID: " + shard + " in config.shards."); } MongoClientURI newURI = rewriteURI(inputURI, shardHosts); chunkSplit.setInputURI(newURI); } else if (mongosHostNames.size() > 0) { //Multiple mongos hosts are specified, so //choose a host name in round-robin fashion //and rewrite the URI using that hostname. //This evenly distributes the load to avoid //pegging a single mongos instance. String roundRobinHost = mongosHostNames.get(numChunks % mongosHostNames.size()); MongoClientURI newURI = rewriteURI(inputURI, roundRobinHost); chunkSplit.setInputURI(newURI); } LinkedList<InputSplit> shardList = shardToSplits.get(shard); if (shardList == null) { shardList = new LinkedList<InputSplit>(); shardToSplits.put(shard, shardList); } chunkSplit.setKeyField(MongoConfigUtil.getInputKey(getConfiguration())); shardList.add(chunkSplit); numChunks++; } } finally { MongoConfigUtil.close(configDB.getMongo()); } final List<InputSplit> splits = new ArrayList<InputSplit>(numChunks); int splitIndex = 0; while (splitIndex < numChunks) { Set<String> shardSplitsToRemove = new HashSet<String>(); for (Entry<String, LinkedList<InputSplit>> shardSplits : shardToSplits.entrySet()) { LinkedList<InputSplit> shardSplitsList = shardSplits.getValue(); InputSplit split = shardSplitsList.pop(); splits.add(splitIndex, split); splitIndex++; if (shardSplitsList.isEmpty()) { shardSplitsToRemove.add(shardSplits.getKey()); } } for (String shardName : shardSplitsToRemove) { shardToSplits.remove(shardName); } } return splits; }
From source file:com.kixeye.chassis.support.metrics.aws.MetricsCloudWatchReporter.java
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { logger.info("Starting metrics publishing to AWS CloudWatch."); LinkedList<PutMetricDataRequest> requests = new LinkedList<>(); addMetricData(gauges, counters, histograms, meters, timers, requests, new Date()); if (requests.isEmpty()) { logger.debug("No metric data to send to AWS."); return;/*from www. ja va2 s . c om*/ } for (PutMetricDataRequest request : requests) { try { for (MetricDatum datum : request.getMetricData()) { logger.debug("Sending metric " + datum); } cloudWatch.putMetricData(request); } catch (Exception e) { logger.error("Failed to log metrics to CloudWatch discarding metrics for this attempt...", e); return; } } logger.info("Finished metrics publishing to AWS CloudWatch."); }
From source file:com.splicemachine.derby.stream.function.merge.AbstractMergeJoinFlatMapFunction.java
protected void initRightScan(PeekingIterator<LocatedRow> leftPeekingIterator) throws StandardException { ExecRow firstHashRow = joinOperation.getKeyRow(leftPeekingIterator.peek().getRow()); ExecRow startPosition = joinOperation.getRightResultSet().getStartPosition(); int[] columnOrdering = getColumnOrdering(joinOperation.getRightResultSet()); int nCols = startPosition != null ? startPosition.nColumns() : 0; ExecRow scanStartOverride = null;/*w ww .java 2s . co m*/ int[] scanKeys = null; // If start row of right table scan has as many columns as key colummns of the table, cannot further // narrow down scan space, so return right tabel scan start row. if (nCols == columnOrdering.length) { scanStartOverride = startPosition; scanKeys = columnOrdering; } else { int[] rightHashKeys = joinOperation.getRightHashKeys(); // Find valid hash column values to narrow down right scan. The valid hash columns must: // 1) not be used as a start key for inner table scan // 2) be consecutive // 3) be a key column LinkedList<Pair<Integer, Integer>> hashColumnIndexList = new LinkedList<>(); for (int i = 0; i < rightHashKeys.length; ++i) { if (rightHashKeys[i] > nCols - 1) { if ((hashColumnIndexList.isEmpty() || hashColumnIndexList.getLast().getValue() == rightHashKeys[i] - 1) && isKeyColumn(columnOrdering, rightHashKeys[i])) { hashColumnIndexList.add(new ImmutablePair<Integer, Integer>(i, rightHashKeys[i])); } else { break; } } } scanStartOverride = new ValueRow(nCols + hashColumnIndexList.size()); if (startPosition != null) { for (int i = 1; i <= startPosition.nColumns(); ++i) { scanStartOverride.setColumn(i, startPosition.getColumn(i)); } } for (int i = 0; i < hashColumnIndexList.size(); ++i) { Pair<Integer, Integer> hashColumnIndex = hashColumnIndexList.get(i); int index = hashColumnIndex.getKey(); scanStartOverride.setColumn(nCols + i + 1, firstHashRow.getColumn(index + 1)); } // Scan key should include columns // 1) preceding the first hash column, these columns are in the form of "col=constant" // 2) all hash columns that are key columns scanKeys = new int[hashColumnIndexList.size() + rightHashKeys[0]]; for (int i = 0; i < rightHashKeys[0]; ++i) { scanKeys[i] = i; } for (int i = 0; i < hashColumnIndexList.size(); ++i) { Pair<Integer, Integer> hashColumnIndex = hashColumnIndexList.get(i); int colPos = hashColumnIndex.getValue(); scanKeys[rightHashKeys[0] + i] = colPos; } } ((BaseActivation) joinOperation.getActivation()).setScanStartOverride(scanStartOverride); ((BaseActivation) joinOperation.getActivation()).setScanKeys(scanKeys); if (startPosition != null) { ((BaseActivation) joinOperation.getActivation()).setScanStopOverride(startPosition); } }
From source file:com.asakusafw.runtime.stage.launcher.LauncherOptionsParser.java
private String consumeApplicationClassName(LinkedList<String> rest) { if (rest.isEmpty()) { throw new IllegalArgumentException("the first argument must be target application class name"); }/*from ww w .j a v a 2s.c o m*/ return rest.removeFirst(); }
From source file:com.github.lindenb.jvarkit.tools.misc.BamTile.java
@Override public Collection<Throwable> call(final String inputName) throws Exception { SAMRecordIterator iter = null;//from w ww . j a va 2 s. c om SamReader sfr = null; SAMFileWriter sfw = null; try { sfr = openSamReader(inputName); SAMFileHeader header1 = sfr.getFileHeader(); if (header1 == null) { return wrapException("File header missing"); } if (header1.getSortOrder() != SAMFileHeader.SortOrder.coordinate) { return wrapException("File header not sorted on coordinate"); } SAMFileHeader header2 = header1.clone(); header2.addComment(getName() + ":" + getVersion() + ":" + getProgramCommandLine()); sfw = openSAMFileWriter(header2, true); SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(header1); iter = sfr.iterator(); LinkedList<SAMRecord> buffer = new LinkedList<>(); for (;;) { SAMRecord rec = null; if (iter.hasNext()) { rec = progress.watch(iter.next()); if (rec.getReadUnmappedFlag()) continue; if (!buffer.isEmpty()) { SAMRecord last = buffer.getLast(); if (last.getReferenceIndex() == rec.getReferenceIndex() && last.getAlignmentStart() <= rec.getAlignmentStart() && last.getAlignmentEnd() >= rec.getAlignmentEnd()) { continue; } } } if (rec == null || (!buffer.isEmpty() && buffer.getLast().getReferenceIndex() != rec.getReferenceIndex())) { while (!buffer.isEmpty()) { sfw.addAlignment(buffer.removeFirst()); } if (rec == null) break; } buffer.add(rec); if (buffer.size() > 2) { int index = buffer.size(); SAMRecord prev = buffer.get(index - 3); SAMRecord curr = buffer.get(index - 2); SAMRecord next = buffer.get(index - 1); if (prev.getAlignmentEnd() >= next.getAlignmentStart() || curr.getAlignmentEnd() <= prev.getAlignmentEnd()) { buffer.remove(index - 2); } else if (curr.getAlignmentStart() == prev.getAlignmentStart() && curr.getAlignmentEnd() > prev.getAlignmentEnd()) { buffer.remove(index - 3); } } while (buffer.size() > 3) { sfw.addAlignment(buffer.removeFirst()); } } progress.finish(); LOG.info("done"); return Collections.emptyList(); } catch (Exception err) { return wrapException(err); } finally { CloserUtil.close(iter); CloserUtil.close(sfr); CloserUtil.close(sfw); } }
From source file:org.springframework.cloud.contract.verifier.util.DelegatingJsonVerifiable.java
private String createMethodString() { LinkedList<String> queue = new LinkedList<>(this.methodsBuffer); StringBuilder stringBuffer = new StringBuilder(); while (!queue.isEmpty()) { stringBuffer.append(queue.remove()); }/*from ww w .j ava 2s. c o m*/ return stringBuffer.toString(); }
From source file:org.codice.ddf.security.filter.delegate.DelegateServletFilter.java
@Override public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException { HttpServletRequest httpRequest = (HttpServletRequest) servletRequest; if (contextPolicyManager != null) { String contextPath = !StringUtils.isBlank(httpRequest.getContextPath()) ? httpRequest.getContextPath() : httpRequest.getServletPath() + httpRequest.getPathInfo(); if (contextPolicyManager.isWhiteListed(contextPath)) { LOGGER.debug(//from ww w. j av a 2 s . co m "Current Context path {} has been white listed by the local policy, no authentication or authorization filters will be applied.", contextPath); filterChain.doFilter(servletRequest, servletResponse); return; } } LinkedList<ServiceReference<Filter>> serviceRefs = new LinkedList<ServiceReference<Filter>>(); try { serviceRefs.addAll(ctx.getServiceReferences(Filter.class, null)); } catch (InvalidSyntaxException e) { LOGGER.warn("Could not lookup service references.", e); } if (!serviceRefs.isEmpty()) { LOGGER.debug("Found {} filter, now filtering...", serviceRefs.size()); ProxyFilterChain chain = new ProxyFilterChain(filterChain); Iterator<ServiceReference<Filter>> reverseIterator = serviceRefs.descendingIterator(); while (reverseIterator.hasNext()) { ServiceReference<Filter> curReference = reverseIterator.next(); Filter curFilter = ctx.getService(curReference); if (!curFilter.getClass().toString().equals(this.getClass().toString())) { chain.addFilter(curFilter); } reverseIterator.remove(); } chain.doFilter(servletRequest, servletResponse); } else { LOGGER.debug("Did not find any filters"); filterChain.doFilter(servletRequest, servletResponse); } }
From source file:ca.uhn.fhir.context.RuntimeResourceDefinition.java
private void fillBasics(StructureElement theElement, BaseRuntimeElementDefinition<?> def, LinkedList<String> path, BaseRuntimeDeclaredChildDefinition theChild) { if (path.isEmpty()) { path.add(def.getName());/* ww w. j a v a 2 s. c om*/ theElement.setName(def.getName()); } else { path.add(WordUtils.uncapitalize(theChild.getElementName())); theElement.setName(theChild.getElementName()); } theElement.setPath(StringUtils.join(path, '.')); }
From source file:mitm.djigzo.web.services.security.HMACFilterImpl.java
private void calculateHMACs(MarkupWriter writer, List<String> hmacs) { Document document = writer.getDocument(); if (document != null) { Element root = document.getRootElement(); if (root != null) { LinkedList<Element> queue = new LinkedList<Element>(); queue.add(root);/*from w ww.jav a 2 s . c om*/ while (!queue.isEmpty()) { Element element = queue.removeFirst(); if (element == null) { continue; } String elementName = element.getAttribute("name"); if (elementName != null) { elementName = elementName.trim().toLowerCase(); } if (protectedElements.contains(elementName)) { /* * It's a protected item so we should calculate the HMAC of the value */ String value = element.getAttribute("value"); String hmac; try { hmac = calculateHMAC(value, true /* create ASO if not exist */); } catch (InvalidKeyException e) { throw new DjigzoRuntimeException(e); } catch (NoSuchAlgorithmException e) { throw new DjigzoRuntimeException(e); } if (hmac == null) { throw new DjigzoRuntimeException("hmac is null."); } if (hmacs != null) { hmacs.add(hmac); } else { /* * Add the HMAC checksum as a hidden element */ element.element("input", "type", "hidden", "name", HMAC_PARAMETER, "value", hmac); } } for (Node n : element.getChildren()) { Element child = null; if (n instanceof Element) { child = (Element) n; } if (child != null) queue.addLast(child); } } } } }