Example usage for java.util LinkedList peek

List of usage examples for java.util LinkedList peek

Introduction

In this page you can find the example usage for java.util LinkedList peek.

Prototype

public E peek() 

Source Link

Document

Retrieves, but does not remove, the head (first element) of this list.

Usage

From source file:Main.java

public static void main(String[] args) {

    // create a LinkedList
    LinkedList<String> list = new LinkedList<String>();

    // add some elements
    list.add("Hello");
    list.add("from java2s.com");
    list.add("10");

    // print the list
    System.out.println("LinkedList:" + list);

    // peek at the head of the list
    System.out.println("Head of the list:" + list.peek());
}

From source file:com.linuxbox.util.queueservice.mongodb.JavaQueueService.java

@Override
public synchronized QueueEntry dequeue(String identifier) throws QueueServiceException {
    LinkedList<QueueEntry> list = queue.get(identifier);
    if (list == null) {
        return null;
    }/*from   ww  w  . j  av  a2s.  c o m*/
    QueueEntry entry = list.removeFirst();
    if (list.peek() == null) {
        queue.remove(identifier);
    }
    dequeued.push(entry);
    return (entry);
}

From source file:com.addthis.hydra.data.query.QueryElementNode.java

public Iterator<DataTreeNode> getNodes(LinkedList<DataTreeNode> stack) {
    List<DataTreeNode> ret = null;
    if (up()) {/*from  w  w w  .  java2 s  .  com*/
        ret = new ArrayList<>(1);
        ret.add(stack.get(1));
        return ret.iterator();
    }
    DataTreeNode parent = stack.peek();
    DataTreeNode defaultNode = null;
    if (defaultValue != null) {
        defaultNode = new VirtualTreeNode(defaultValue, defaultHits);
    }
    try {
        DataTreeNode tmp;
        if (path != null) {
            DataTreeNode refnode = followPath(parent.getTreeRoot(), path);
            return refnode != null ? new ReferencePathIterator(refnode, parent) : null;
        }
        if (trap != null) {
            for (String name : trap) {
                for (ClosableIterator<DataTreeNode> iter = parent.getIterator(); iter.hasNext();) {
                    tmp = iter.next();
                    if (regex()) {
                        if (tmp.getName().matches(name)) {
                            iter.close();
                            return null;
                        }
                    } else {
                        if (tmp.getName().equals(name)) {
                            iter.close();
                            return null;
                        }
                    }
                }
            }
        }
        if ((match == null) && (regex == null) && (data == null)) {
            Iterator<DataTreeNode> result = parent.getIterator();
            if (result.hasNext() || (defaultNode == null)) {
                return result;
            } else {
                return Iterators.singletonIterator(defaultNode);
            }
        }
        ret = new LinkedList<>();
        if (match != null) {
            if (regex()) {
                if (regexPatterns == null) {
                    regexPatterns = new Pattern[match.length];
                    for (int i = 0; i < match.length; i++) {
                        regexPatterns[i] = Pattern.compile(match[i]);
                    }
                }
                for (Iterator<DataTreeNode> iter = parent.getIterator(); iter.hasNext();) {
                    tmp = iter.next();
                    for (Pattern name : regexPatterns) {
                        if (name.matcher(tmp.getName()).matches() ^ not()) {
                            ret.add(tmp);
                        }
                    }
                }
            } else if (range()) {
                if (match.length == 0) {
                    return parent.getIterator();
                } else if (match.length == 1) {
                    return parent.getIterator(match[0]);
                } else {
                    ArrayList<Iterator<DataTreeNode>> metaIterator = new ArrayList<>();
                    for (String name : match) {
                        metaIterator.add(parent.getIterator(name));
                    }
                    return Iterators.concat(metaIterator.iterator());
                }
            } else if (rangeStrict()) {
                return parent.getIterator(match.length > 0 ? match[0] : null,
                        match.length > 1 ? match[1] : null);
            } else if (data == null) {
                return new LazyNodeMatch(parent, match, defaultNode);
            } else {
                for (String name : match) {
                    DataTreeNode find = parent.getNode(name);
                    if (find != null) {
                        ret.add(find);
                    }
                }
            }
        }
        if (data != null) {
            if (regex()) {
                if (parent.getDataMap() != null) {
                    for (Map.Entry<String, TreeNodeData> actor : parent.getDataMap().entrySet()) {
                        int memSize = CodecBin2.encodeBytes(actor.getValue()).length;
                        ReadTreeNode memNode = new ReadTreeNode(actor.getKey(), memSize);
                        ret.add(memNode);
                    }
                }
            } else {
                DataTreeNodeActor actor = parent.getData(data);
                if (actor != null) {
                    Collection<DataTreeNode> nodes = actor.onNodeQuery(dataKey);
                    if (nodes != null) {
                        ret.addAll(nodes);
                    }
                }
            }
        }
    } catch (RuntimeException ex) {
        throw ex;
    } catch (Exception ex) {
        ex.printStackTrace();
    }
    if ((ret.size() == 0) && (defaultNode != null)) {
        return Iterators.singletonIterator(defaultNode);
    } else {
        return ret.iterator();
    }
}

From source file:jp.co.atware.solr.geta.GETAssocComponent.java

/**
 * GETAssoc?????<code>NamedList</code>???????
 * /*from   w  w w.j a  v  a  2s .co m*/
 * @param inputStream GETAssoc??
 * @return <code>NamedList</code>?
 * @throws FactoryConfigurationError
 * @throws IOException
 */
protected NamedList<Object> convertResult(InputStream inputStream)
        throws FactoryConfigurationError, IOException {
    NamedList<Object> result = new NamedList<Object>();
    LinkedList<NamedList<Object>> stack = new LinkedList<NamedList<Object>>();
    stack.push(result);
    try {
        XMLStreamReader xml = XMLInputFactory.newInstance().createXMLStreamReader(inputStream);
        while (xml.hasNext()) {
            switch (xml.getEventType()) {
            case XMLStreamConstants.START_ELEMENT:
                NamedList<Object> element = new NamedList<Object>();
                stack.peek().add(xml.getName().toString(), element);
                stack.push(element);
                for (int i = 0; i < xml.getAttributeCount(); i++) {
                    String name = xml.getAttributeName(i).toString();
                    String value = xml.getAttributeValue(i);
                    ValueOf valueOf = valueTransMap.get(name);
                    if (valueOf != null) {
                        try {
                            element.add(name, valueOf.toValue(value));
                        } catch (NumberFormatException e) {
                            element.add(name, value);
                        }
                    } else {
                        element.add(name, value);
                    }
                }
                break;
            case XMLStreamConstants.END_ELEMENT:
                stack.pop();
                break;
            default:
                break;
            }
            xml.next();

        }
        xml.close();
    } catch (XMLStreamException e) {
        throw new IOException(e);
    }

    LOG.debug(result.toString());
    return result;
}

From source file:bamboo.trove.full.FullReindexWarcManager.java

private void checkPersistence() {
    // Persist progress back to the database if we can
    LinkedList<ToIndex> iHopeThisIsDone = (LinkedList<ToIndex>) allBatches.peek();
    if (iHopeThisIsDone == null)
        return;//w  w  w. jav  a  2  s.  co m

    boolean itIsDone = false;
    boolean keepGoing = true;
    long warcId = 0;

    // Until we find something still active, keep trying
    while (keepGoing) {
        ToIndex warcToIndex = iHopeThisIsDone.peek();
        if (warcToIndex == null) {
            itIsDone = true;
            keepGoing = false;
            continue;
        }
        warcId = warcToIndex.getId();
        if (!warcToIndex.hasBeenRetrieved) {
            // We haven't indexed this far yet!
            keepGoing = false;
            continue;
        }

        // If it is still being tracked...
        if (warcTracking.containsKey(warcId)) {
            WarcProgressManager warc = warcTracking.get(warcId);
            // It might only be tracked because of errors... which are persisted separately
            if (warc.finished() && warc.hasErrors()) {
                iHopeThisIsDone.poll();

            } else {
                // There is work left in this batch. Stop checking
                keepGoing = false;
            }

            // Not tracked. This warc is done
        } else {
            iHopeThisIsDone.poll();
        }
    }

    // All warcs are completed in this batch
    if (itIsDone) {
        dao.updateLastId(warcId);
        persistedWarcId = warcId;
        log.info("Persisting progress for ID '{}'. Currently monitoring {} batches", warcId, allBatches.size());
        // Clear it from the head
        allBatches.poll();
    }
}

From source file:org.apache.storm.scheduler.IsolationScheduler.java

@Override
public void schedule(Topologies topologies, Cluster cluster) {
    Set<String> origBlacklist = cluster.getBlacklistedHosts();
    List<TopologyDetails> isoTopologies = isolatedTopologies(topologies.getTopologies());
    Set<String> isoIds = extractTopologyIds(isoTopologies);
    Map<String, Set<Set<ExecutorDetails>>> topologyWorkerSpecs = topologyWorkerSpecs(isoTopologies);
    Map<String, Map<Integer, Integer>> topologyMachineDistributions = topologyMachineDistributions(
            isoTopologies);//from ww w.j  a va  2 s . c  o  m
    Map<String, List<AssignmentInfo>> hostAssignments = hostAssignments(cluster);

    for (Map.Entry<String, List<AssignmentInfo>> entry : hostAssignments.entrySet()) {
        List<AssignmentInfo> assignments = entry.getValue();
        String topologyId = assignments.get(0).getTopologyId();
        Map<Integer, Integer> distribution = topologyMachineDistributions.get(topologyId);
        Set<Set<ExecutorDetails>> workerSpecs = topologyWorkerSpecs.get(topologyId);
        int numWorkers = assignments.size();

        if (isoIds.contains(topologyId) && checkAssignmentTopology(assignments, topologyId)
                && distribution.containsKey(numWorkers)
                && checkAssignmentWorkerSpecs(assignments, workerSpecs)) {
            decrementDistribution(distribution, numWorkers);
            for (AssignmentInfo ass : assignments) {
                workerSpecs.remove(ass.getExecutors());
            }
            cluster.blacklistHost(entry.getKey());
        } else {
            for (AssignmentInfo ass : assignments) {
                if (isoIds.contains(ass.getTopologyId())) {
                    cluster.freeSlot(ass.getWorkerSlot());
                }
            }
        }
    }

    Map<String, Set<WorkerSlot>> hostUsedSlots = hostToUsedSlots(cluster);
    LinkedList<HostAssignableSlots> hss = hostAssignableSlots(cluster);
    for (Map.Entry<String, Set<Set<ExecutorDetails>>> entry : topologyWorkerSpecs.entrySet()) {
        String topologyId = entry.getKey();
        Set<Set<ExecutorDetails>> executorSet = entry.getValue();
        List<Integer> workerNum = distributionToSortedAmounts(topologyMachineDistributions.get(topologyId));
        for (Integer num : workerNum) {
            HostAssignableSlots hostSlots = hss.peek();
            List<WorkerSlot> slot = hostSlots != null ? hostSlots.getWorkerSlots() : null;

            if (slot != null && slot.size() >= num) {
                hss.poll();
                cluster.freeSlots(hostUsedSlots.get(hostSlots.getHostName()));
                for (WorkerSlot tmpSlot : slot.subList(0, num)) {
                    Set<ExecutorDetails> executor = removeElemFromExecutorsSet(executorSet);
                    cluster.assign(tmpSlot, topologyId, executor);
                }
                cluster.blacklistHost(hostSlots.getHostName());
            }
        }
    }

    List<String> failedTopologyIds = extractFailedTopologyIds(topologyWorkerSpecs);
    if (failedTopologyIds.size() > 0) {
        LOG.warn("Unable to isolate topologies " + failedTopologyIds
                + ". No machine had enough worker slots to run the remaining workers for these topologies. "
                + "Clearing all other resources and will wait for enough resources for "
                + "isolated topologies before allocating any other resources.");
        // clear workers off all hosts that are not blacklisted
        Map<String, Set<WorkerSlot>> usedSlots = hostToUsedSlots(cluster);
        Set<Map.Entry<String, Set<WorkerSlot>>> entries = usedSlots.entrySet();
        for (Map.Entry<String, Set<WorkerSlot>> entry : entries) {
            if (!cluster.isBlacklistedHost(entry.getKey())) {
                cluster.freeSlots(entry.getValue());
            }
        }
    } else {
        // run default scheduler on non-isolated topologies
        Set<String> allocatedTopologies = allocatedTopologies(topologyWorkerSpecs);
        Topologies leftOverTopologies = leftoverTopologies(topologies, allocatedTopologies);
        DefaultScheduler.defaultSchedule(leftOverTopologies, cluster);
    }
    cluster.setBlacklistedHosts(origBlacklist);
}

From source file:org.commoncrawl.service.parser.server.ParseWorker.java

@Override
public Document buildDocument(InstructionsPool instructionsPool, FileOutputStream optionalOutputStream)
        throws IOException {

    //LOG.info("Build Document Called");
    List<Integer> operations = instructionsPool.operations;
    List<String> arguments = instructionsPool.arguments;
    LinkedList<Integer> nodeStack = new LinkedList<Integer>();
    LinkedList<BlockObjectInContext> blockStack = new LinkedList<BlockObjectInContext>();
    HTMLMeta meta = null;/*ww  w. j a  v a 2  s .  c  om*/

    for (int i = 0; i < operations.size(); i++) {
        int domOperation = operations.get(i);
        String domArgument = arguments.get(i);
        //System.out.println("Operation :" + ParserInstruction.getOperationString(domOperation)+" Arg:~" + domArgument+"~");
        switch (domOperation) {
        // Open node :
        case ParserInstruction.OpenNode:
        case ParserInstruction.AddLeaf: {
            activeLink = null;
            blockInConstruction = null;
            String nodeName = domArgument.toLowerCase();

            // append new-line of start of a block level tag ... 
            if (domOperation == ParserInstruction.OpenNode && blockLevelHTMLTags.contains(nodeName)) {
                if (textAccumulator.length() != 0
                        && textAccumulator.charAt(textAccumulator.length() - 1) != '\n')
                    textAccumulator.append("\n");
            }

            if (nodeName.equals("meta")) {
                meta = new HTMLMeta();
            } else if (linkTypeToSrcMap.containsKey(nodeName)) {
                //LOG.info("Node:" + nodeName + " is of type Link. Adding to LinksUnderConst");
                activeLink = new LinkUnderConstruction(nodeName, blockStack.peek());
                linksUnderConstruction.push(activeLink);
            } else if (nodeName.equals("head")) {
                inHeadTag++;
            } else if (nodeName.equals("base")) {
                if (inHeadTag != 0) {
                    inBase++;
                }
            } else if (nodeName.equals("table") || nodeName.equals("div")) {
                blockInConstruction = new BlockObjectInContext(blockStack.peek(), nodeName, ++blockId);
                blockStack.push(blockInConstruction);
            } else if (nodeName.equals("tr") || nodeName.equals("th")) {
                BlockObjectInContext table = blockStack.peek();
                if (table != null) {
                    table.rowNumber++;
                    table.cellNumber = -1;
                }
            } else if (nodeName.equals("td")) {
                BlockObjectInContext table = blockStack.peek();
                if (table != null) {
                    table.cellNumber++;
                }
            }
            nodeStack.push(i);
        }
            break;
        // Close node :
        case ParserInstruction.CloseNode:
        case ParserInstruction.CloseLeaf: {
            int arguementPos = nodeStack.pop();
            String nodeName = arguments.get(arguementPos).toLowerCase();

            // append new-line of start of a block level tag ... 
            if (domOperation == ParserInstruction.CloseNode && blockLevelHTMLTags.contains(nodeName)) {
                if (textAccumulator.length() != 0
                        && textAccumulator.charAt(textAccumulator.length() - 1) != '\n')
                    textAccumulator.append("\n");
            }

            //LOG.info("Close Node Called on Node:" + nodeName);
            if (nodeName.equals("head")) {
                inHeadTag--;
            } else if (nodeName.equals("base")) {
                if (inHeadTag != 0) {
                    inBase--;
                }
            } else if (linkTypeToSrcMap.containsKey(nodeName)) {
                //LOG.info("Node:" + nodeName + " is a Link Type");
                LinkUnderConstruction linkPartial = linksUnderConstruction.pop();
                if (linkPartial != null) {
                    //LOG.info("POPed a partial LinkObject of type:" + linkPartial.type);
                    Link link = linkPartial.buildLink();
                    if (link != null) {
                        activeParseResult.getExtractedLinks().add(link);
                    }
                }
            } else if (nodeName.equals("table") || nodeName.equals("div")) {
                blockStack.pop();
            } else if (nodeName.equals("meta")) {
                if (meta != null) {
                    activeParseResult.getMetaTags().add(meta);
                    meta = null;
                }
            }
            if (textAccumulator.length() != 0
                    && !Character.isWhitespace(textAccumulator.charAt(textAccumulator.length() - 1))) {
                textAccumulator.append(" ");
            }

        }
            break;
        case ParserInstruction.AddText: {
            Integer arguementPos = nodeStack.peek();
            String nodeName = (arguementPos != null) ? arguments.get(arguementPos).toLowerCase() : null;
            LinkUnderConstruction link = linksUnderConstruction.peek();

            if (link != null) {
                if (link.linkText.length() != 0)
                    link.linkText += " ";
                link.linkText += domArgument.trim();
            }
            if (nodeName == null || !ignoreTextTagSet.contains(nodeName.toLowerCase())) {
                textAccumulator.append(domArgument);
            }

        }
            break;
        //        case ParserInstruction.AddContent:
        //          System.out.println("AddContent:"+domArgument);
        //          break;

        case ParserInstruction.WriteAttributeKey: {

            // grab key name .. 
            String key = domArgument.toLowerCase();

            // and lookahead one to grab attribute value ... 
            i++;

            if (i < operations.size() && operations.get(i) == ParserInstruction.WriteAttributeValue) {
                // grab value ... 
                String value = arguments.get(i);

                // if metatag capture key/value ... 
                if (meta != null) {
                    // create a new attribute object  
                    HTMLMetaAttribute attribute = new HTMLMetaAttribute();

                    attribute.setName(key);
                    attribute.setValue(value);

                    // append to meta tag 
                    meta.getAttributes().add(attribute);
                } else {
                    if (key.equals("href") && inBase != 0) {
                        if (value.length() != 0) {
                            try {
                                baseURL = new URL(value);
                            } catch (Exception e) {
                                LOG.error(CCStringUtils.stringifyException(e));
                                throw new IOException(e);
                            }
                        }
                    } else if (activeLink != null) {
                        if (linkTypeToSrcMap.get(activeLink.type).equalsIgnoreCase(key)) {
                            activeLink.linkURL = value;
                        } else {
                            activeLink.jsonObject.addProperty(key, value);
                        }
                    } else if (blockInConstruction != null) {
                        if (key.equals("class")) {
                            blockInConstruction.classId = value;
                        } else if (key.equals("id")) {
                            blockInConstruction.htmlId = value;
                        }
                    }
                }
            } else {
                // rewind and let outer control block deal with it 
                --i;
            }
        }
            break;

        case ParserInstruction.SetTitle: {
            activeParseResult.setTitle(domArgument);
        }
            break;
        //        case ParserInstruction.AddEntity:
        //          System.out.println("AddEntity:" + domArgument);
        //            break;
        //        case ParserInstruction.AddComment:
        //          System.out.println("AddComment:" + domArgument); 
        //            break;        case ParserInstruction.SetTitle:
        //          System.out.println("SetTitle:" + domArgument);
        //            break;
        //        }
        }
    }
    return null;
}

From source file:org.deegree.framework.xml.XMLFragment.java

/**
 * reads the encoding of a XML document from its header. If no header available
 * <code>CharsetUtils.getSystemCharset()</code> will be returned
 * //from  w  w w  .jav  a2s  .c  o m
 * @param pbis
 * @return encoding of a XML document
 * @throws IOException
 */
private String readEncoding(PushbackInputStream pbis) throws IOException {
    byte[] b = new byte[80];
    String s = "";
    int rd = 0;

    LinkedList<byte[]> bs = new LinkedList<byte[]>();
    LinkedList<Integer> rds = new LinkedList<Integer>();
    while (rd < 80) {
        rds.addFirst(pbis.read(b));
        if (rds.peek() == -1) {
            rds.poll();
            break;
        }
        rd += rds.peek();
        s += new String(b, 0, rds.peek()).toLowerCase();
        bs.addFirst(b);
        b = new byte[80];
    }

    String encoding = CharsetUtils.getSystemCharset();
    if (s.indexOf("?>") > -1) {
        int p = s.indexOf("encoding=");
        if (p > -1) {
            StringBuffer sb = new StringBuffer();
            int k = p + 1 + "encoding=".length();
            while (s.charAt(k) != '"' && s.charAt(k) != '\'') {
                sb.append(s.charAt(k++));
            }
            encoding = sb.toString();
        }
    }
    while (!bs.isEmpty()) {
        pbis.unread(bs.poll(), 0, rds.poll());
    }

    return encoding;
}

From source file:org.eclipse.scada.configuration.recipe.lib.internal.DefaultExecutableFactory.java

private Method gatherMethods(final Class<?> clazz, final String methodName) {
    final LinkedList<Method> methods = new LinkedList<>();
    for (final Method m : clazz.getMethods()) {
        if (m.getName().equals(methodName)) {
            methods.add(m);// w  ww .ja  va 2s  .  c  o  m
        }
    }
    if (methods.size() == 1) {
        return methods.peek();
    } else if (methods.isEmpty()) {
        throw new IllegalStateException(
                String.format("Method '%s' not found on class '%s'", methodName, clazz.getName()));
    } else {
        throw new IllegalStateException(String.format(
                "Method '%s' of class '%s' is polymorphic. This is now allowed for the recipe target classes.",
                methodName, clazz.getName()));
    }
}

From source file:org.graphipedia.wikipedia.parser.SimpleStaxParser.java

/**
 * Parses the elements in the XML file.//from ww  w.  java2  s .c  o m
 * @param reader The XML stream.
 * @throws XMLStreamException when something goes wrong while parsing the XML file.
 */
private void parseElements(XMLStreamReader reader) throws XMLStreamException {
    LinkedList<String> elementStack = new LinkedList<String>();
    StringBuilder textBuffer = new StringBuilder();
    List<String> attributeValues = new ArrayList<String>();

    while (reader.hasNext()) {
        switch (reader.next()) {
        case XMLEvent.START_ELEMENT:
            String startElement = reader.getName().getLocalPart();
            elementStack.push(startElement);
            attributeValues = new ArrayList<String>();
            if (isInterestingWithAttributes(startElement)) {
                int noAttributes = reader.getAttributeCount();
                for (int i = 0; i < noAttributes; i += 1)
                    attributeValues.add(reader.getAttributeValue(i));
            }
            textBuffer.setLength(0);
            break;
        case XMLEvent.END_ELEMENT:
            String element = elementStack.pop();
            if (isInterestingWithAttributes(element)) {
                if (!handleElement(element, textBuffer.toString().trim(), attributeValues))
                    return;
            } else if (isInteresting(element)) {
                if (!handleElement(element, textBuffer.toString().trim()))
                    return;
            }
            break;
        case XMLEvent.CHARACTERS:
            if (isInteresting(elementStack.peek())) {
                textBuffer.append(reader.getText());
            }
            break;
        }
    }
}