Example usage for java.util Deque peek

List of usage examples for java.util Deque peek

Introduction

In this page you can find the example usage for java.util Deque peek.

Prototype

E peek();

Source Link

Document

Retrieves, but does not remove, the head of the queue represented by this deque (in other words, the first element of this deque), or returns null if this deque is empty.

Usage

From source file:de.escalon.hypermedia.hydra.serialize.JacksonHydraSerializer.java

private void serializeContext(Object bean, JsonGenerator jgen, SerializerProvider serializerProvider,
        Deque<String> deque) throws IOException {
    try {/*from w  ww .  j  a va2 s  .c o m*/
        // TODO use serializerProvider.getAttributes to hold a stack of contexts
        // and check if we need to write a context for the current bean at all
        // If it is in the same vocab: no context
        // If the terms are already defined in the context: no context

        SerializationConfig config = serializerProvider.getConfig();
        final Class<?> mixInClass = config.findMixInClassFor(bean.getClass());

        String vocab = getVocab(bean, mixInClass);
        Map<String, Object> terms = getTerms(bean, mixInClass);

        final String currentVocab = deque.peek();

        deque.push(vocab);
        boolean mustWriteContext;
        if (currentVocab == null || !vocab.equals(currentVocab)) {
            mustWriteContext = true;
        } else {
            // only write if bean has terms
            if (terms.isEmpty()) {
                mustWriteContext = false;
            } else {
                // TODO actually, need not repeat vocab in context if same
                mustWriteContext = true;
            }
        }

        if (mustWriteContext) {
            // begin context
            // default context: schema.org vocab or vocab package annotation
            jgen.writeObjectFieldStart("@context");
            // TODO do not repeat vocab if already defined in current context
            if (currentVocab == null || !vocab.equals(currentVocab)) {
                jgen.writeStringField(AT_VOCAB, vocab);
            }

            for (Map.Entry<String, Object> termEntry : terms.entrySet()) {
                if (termEntry.getValue() instanceof String) {
                    jgen.writeStringField(termEntry.getKey(), termEntry.getValue().toString());
                } else {
                    jgen.writeObjectField(termEntry.getKey(), termEntry.getValue());
                }
            }

            jgen.writeEndObject();
        }

        // end context

        // TODO build the context from @Vocab and @Term and @Expose and write it as local or external context with
        // TODO jsonld extension (using apt?)
        // TODO also allow manually created jsonld contexts
        // TODO how to define a context containing several context objects? @context is then an array of
        // TODO external context strings pointing to json-ld, and json objects containing terms
        // TODO another option: create custom vocabulary without reference to public vocabs
        // TODO support additionalType from goodrelations
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:de.escalon.hypermedia.spring.hydra.LinkListSerializer.java

@Override
public void serialize(List<Link> links, JsonGenerator jgen, SerializerProvider serializerProvider)
        throws IOException {

    try {//from  w  w  w. j  a v  a2 s . com
        Collection<Link> simpleLinks = new ArrayList<Link>();
        Collection<Affordance> affordances = new ArrayList<Affordance>();
        Collection<Link> templatedLinks = new ArrayList<Link>();
        Collection<Affordance> collectionAffordances = new ArrayList<Affordance>();
        Link selfRel = null;
        for (Link link : links) {
            if (link instanceof Affordance) {
                final Affordance affordance = (Affordance) link;
                final List<ActionDescriptor> actionDescriptors = affordance.getActionDescriptors();
                if (!actionDescriptors.isEmpty()) {
                    // TODO: consider to use Link href for template even if it is not compatible
                    if (affordance.getUriTemplateComponents().hasVariables()) {
                        // TODO resolve rel against context
                        if ("hydra:search".equals(affordance.getRel())
                                || Cardinality.SINGLE == affordance.getCardinality()) {
                            templatedLinks.add(affordance);
                        } else {
                            collectionAffordances.add(affordance);
                        }
                    } else {
                        // if all required variables are satisfied, the url can be used as identifier
                        // by stripping optional variables
                        if (!affordance.isSelfRel() && Cardinality.COLLECTION == affordance.getCardinality()) {
                            collectionAffordances.add(affordance);
                        } else {
                            affordances.add(affordance);
                        }
                    }
                } else {
                    if (affordance.isTemplated()) {
                        templatedLinks.add(affordance);
                    } else {
                        simpleLinks.add(affordance);
                    }
                }
            } else if (link.isTemplated()) {
                templatedLinks.add(link);
            } else {
                simpleLinks.add(link);
            }
            if ("self".equals(link.getRel())) {
                selfRel = link;
            }
        }

        for (Link templatedLink : templatedLinks) {
            // templated affordance might turn out to have all variables satisfied or
            // only optional unsatisfied variables
            ActionDescriptor actionDescriptorForHttpGet = getActionDescriptorForHttpGet(templatedLink);
            // TODO handle rev here
            String rel = templatedLink.getRel();
            writeIriTemplate(rel, templatedLink.getHref(), templatedLink.getVariableNames(),
                    actionDescriptorForHttpGet, jgen);
        }
        @SuppressWarnings("unchecked")
        Deque<LdContext> contextStack = (Deque<LdContext>) serializerProvider
                .getAttribute(JacksonHydraSerializer.KEY_LD_CONTEXT);
        String currentVocab = (contextStack != null && !contextStack.isEmpty()) ? contextStack.peek().vocab
                : null;

        // related collections
        if (!collectionAffordances.isEmpty()) {

            jgen.writeArrayFieldStart("hydra:collection");

            for (Affordance collectionAffordance : collectionAffordances) {
                jgen.writeStartObject();
                jgen.writeStringField(JsonLdKeywords.AT_TYPE, "hydra:Collection");
                PartialUriTemplateComponents templateComponents = collectionAffordance
                        .getUriTemplateComponents();
                if (!collectionAffordance.isBaseUriTemplated()
                        && !collectionAffordance.hasUnsatisfiedRequiredVariables()) {
                    String collectionUri = templateComponents.getBaseUri() + templateComponents.getQueryHead();
                    jgen.writeStringField(JsonLdKeywords.AT_ID, collectionUri);
                }
                if (templateComponents.hasVariables()) {
                    ActionDescriptor actionDescriptorForHttpGet = getActionDescriptorForHttpGet(
                            collectionAffordance);
                    writeIriTemplate("hydra:search", templateComponents.toString(),
                            templateComponents.getVariableNames(), actionDescriptorForHttpGet, jgen);
                }
                jgen.writeObjectFieldStart("hydra:manages");
                // do we have a collection holder which is not owner of the affordance?
                TypedResource collectionHolder = collectionAffordance.getCollectionHolder();
                if (collectionAffordance.getRev() != null) {
                    jgen.writeStringField("hydra:property", collectionAffordance.getRev());
                    if (collectionHolder != null) {
                        // can't use writeObjectField, it won't inherit the context stack
                        writeCollectionHolder("hydra:object", collectionHolder, jgen);
                    } else if (selfRel != null) {
                        jgen.writeStringField("hydra:object", selfRel.getHref());
                    }
                } else if (collectionAffordance.getRel() != null) {
                    jgen.writeStringField("hydra:property", collectionAffordance.getRel());
                    if (collectionHolder != null) {
                        // can't use writeObjectField, it won't inherit the context stack
                        writeCollectionHolder("hydra:subject", collectionHolder, jgen);
                    } else if (selfRel != null) {
                        jgen.writeStringField("hydra:subject", selfRel.getHref());
                    }
                }
                jgen.writeEndObject(); // end manages

                List<ActionDescriptor> actionDescriptors = collectionAffordance.getActionDescriptors();
                if (!actionDescriptors.isEmpty()) {
                    jgen.writeArrayFieldStart("hydra:operation");
                }
                writeActionDescriptors(jgen, currentVocab, actionDescriptors);
                if (!actionDescriptors.isEmpty()) {
                    jgen.writeEndArray(); // end hydra:operation
                }

                jgen.writeEndObject(); // end collection
            }
            jgen.writeEndArray();
        }

        for (Affordance affordance : affordances) {
            final String rel = affordance.getRel();
            List<ActionDescriptor> actionDescriptors = affordance.getActionDescriptors();

            if (!actionDescriptors.isEmpty()) {
                if (!Link.REL_SELF.equals(rel)) {
                    jgen.writeObjectFieldStart(rel); // begin rel
                }
                jgen.writeStringField(JsonLdKeywords.AT_ID, affordance.getHref());
                jgen.writeArrayFieldStart("hydra:operation");
            }

            writeActionDescriptors(jgen, currentVocab, actionDescriptors);

            if (!actionDescriptors.isEmpty()) {
                jgen.writeEndArray(); // end hydra:operation

                if (!Link.REL_SELF.equals(rel)) {
                    jgen.writeEndObject(); // end rel
                }
            }
        }

        for (Link simpleLink : simpleLinks) {
            final String rel = simpleLink.getRel();
            if (Link.REL_SELF.equals(rel)) {
                jgen.writeStringField("@id", simpleLink.getHref());
            } else {
                String linkAttributeName = IanaRels.isIanaRel(rel) ? IANA_REL_PREFIX + rel : rel;
                jgen.writeObjectFieldStart(linkAttributeName);
                jgen.writeStringField("@id", simpleLink.getHref());
                jgen.writeEndObject();
            }
        }
    } catch (IntrospectionException e) {
        throw new RuntimeException(e);
    }
}

From source file:edu.berkeley.compbio.phyloutils.HugenholtzTaxonomyService.java

@NotNull
private synchronized Integer getUniqueNodeForMultilevelName(String[] taxa) throws NoSuchNodeException {
    List<String> reverseTaxa = new ArrayList(Arrays.asList(taxa.clone()));
    Collections.reverse(reverseTaxa);

    //final String firstS = reverseTaxa.remove(0);
    //Collection<Integer> trav = null; // = nameToIdMap.get(firstS);

    /*while (trav.isEmpty())
       {//from   ww  w  .  j a  v  a  2s .co m
       logger.warn("IGNORING Node " + s + " not found in " + DSStringUtils.join(taxa, "; "));
       continue;
       }
    */

    Set<Deque<Integer>> paths = null;
    HashMultimap<String, Integer> nameToIdsMap = (HashMultimap<String, Integer>) nameToIdsMapStub.get();
    HashMultimap<String, Integer> extraNameToIdsMap = (HashMultimap<String, Integer>) extraNameToIdsMapStub
            .get();

    for (String s : reverseTaxa) {
        Collection<Integer> matchingNodes = nameToIdsMap.get(s);
        if (matchingNodes.isEmpty()) {
            matchingNodes = extraNameToIdsMap.get(s);
        }
        if (matchingNodes.isEmpty()) {
            logger.debug("IGNORING Node " + s + " not found in " + DSStringUtils.join(taxa, "; "));
        } else {
            //   Set<Integer> nextTrav = new HashSet<Integer>();
            if (paths == null) {
                paths = new HashSet<Deque<Integer>>(matchingNodes.size());
                //nextTrav.addAll(matchingNodes);
                for (Integer node : matchingNodes) {
                    Deque<Integer> l = new LinkedList<Integer>();
                    l.add(node);
                    paths.add(l);
                }
            } else {
                BasicRootedPhylogeny<Integer> theIntegerTree = (BasicRootedPhylogeny<Integer>) theIntegerTreeStub
                        .get();

                Set<Deque<Integer>> okPaths = new HashSet<Deque<Integer>>();
                for (Deque<Integer> path : paths) {
                    Integer descendant = path.peek();
                    for (Integer ancestor : matchingNodes) {
                        if (theIntegerTree.isDescendant(ancestor, descendant)) {
                            path.addFirst(ancestor);
                            okPaths.add(path);
                        }
                    }
                }
                paths = okPaths; // ditch any paths that didn't have an ancestor added this round
            }

            if (paths.isEmpty()) {
                // we get here only if
                //  a) there was more than one live path on the last round
                //  b) none of those paths are descendants of the matches at the current level
                throw new NoSuchNodeException(
                        "Requested classification path does not match tree: " + DSStringUtils.join(taxa, "; "));
            }

            // if all the paths converge on exactly one node, call it a match, even if higher levels of the tree don't match.

            if (matchingNodes.size() == 1) {
                return commonAncestor(paths);
            }
        }
    }
    throw new NoSuchNodeException("Multiple distinct matching paths: " + DSStringUtils.join(taxa, "; "));
    //return commonAncestor(paths);
}

From source file:net.solarnetwork.util.JavaBeanXmlSerializer.java

/**
 * Parse XML into a simple Map structure.
 * // w ww  .jav  a  2s . co m
 * @param in
 *        the input stream to parse
 * @return a Map of the XML
 */
public Map<String, Object> parseXml(InputStream in) {
    Deque<Map<String, Object>> stack = new LinkedList<Map<String, Object>>();
    Map<String, Object> result = null;
    XMLStreamReader reader = startParse(in);
    try {
        int eventType;
        boolean parsing = true;
        while (parsing) {
            eventType = reader.next();
            switch (eventType) {
            case XMLStreamConstants.END_DOCUMENT:
                parsing = false;
                break;

            case XMLStreamConstants.START_ELEMENT:
                String name = reader.getLocalName();
                if (stack.isEmpty()) {
                    result = new LinkedHashMap<String, Object>();
                    stack.push(result);
                } else {
                    Map<String, Object> el = new LinkedHashMap<String, Object>();
                    putMapValue(stack.peek(), name, el);
                    stack.push(el);
                }
                parseElement(stack.peek(), reader);
                break;

            case XMLStreamConstants.END_ELEMENT:
                stack.pop();
                break;

            }
        }
    } catch (XMLStreamException e) {
        throw new RuntimeException(e);
    } finally {
        endParse(reader);
    }
    return result;
}

From source file:edu.upenn.cis.orchestra.workloadgenerator.Generator.java

public void findSimpleCycles(List<List<Integer>> cycles, List<List<Object>> mappings) {
    // First, index the edges
    List<List<Integer>> edges = new ArrayList<List<Integer>>();

    for (int i = 0; i < _peers.size(); i++) {
        edges.add(new ArrayList<Integer>());
    }//from  w w w.  j  a  v a2  s  .  co m

    for (List<Object> thisMapping : mappings) {
        edges.get((Integer) thisMapping.get(0)).add((Integer) thisMapping.get(1));
    }

    for (List<Integer> thisEdge : edges) {
        Collections.sort(thisEdge);
    }

    // Find simple cycles as follows:
    // - Handle the peers in order
    // - Find simple cycles where the smallest node in the cycle
    // is the peer
    cycles.clear();
    for (int i = 0; i < _peers.size(); i++) {
        Deque<List<Integer>> paths = new ArrayDeque<List<Integer>>();
        paths.push(new ArrayList<Integer>());
        paths.peek().add(i);
        while (0 != paths.size()) {
            List<Integer> path = paths.pop();
            for (Integer j : edges.get(path.get(path.size() - 1))) {
                if (j.equals(i)) {
                    List<Integer> cycle = new ArrayList<Integer>();
                    cycle.addAll(path);
                    cycle.add(j);
                    cycles.add(cycle);
                } else if (j > i && !path.contains(j)) {
                    List<Integer> newPath = new ArrayList<Integer>();
                    newPath.addAll(path);
                    newPath.add(j);
                    paths.push(newPath);
                }
            }
        }
    }
}

From source file:jetbrains.exodus.entitystore.PersistentEntityStoreImpl.java

@Override
@Nullable// ww w .  j a va 2  s. c om
public PersistentStoreTransaction getCurrentTransaction() {
    final Thread thread = Thread.currentThread();
    final Deque<PersistentStoreTransaction> stack = txns.get(thread);
    return stack == null ? null : stack.peek();
}

From source file:jetbrains.exodus.entitystore.PersistentEntityStoreImpl.java

void deregisterTransaction(@NotNull final PersistentStoreTransaction txn) {
    final Thread thread = Thread.currentThread();
    final Deque<PersistentStoreTransaction> stack = txns.get(thread);
    if (stack == null) {
        throw new EntityStoreException("Transaction was already finished");
    }/*w w  w . j ava 2 s  .co m*/
    if (txn != stack.peek()) {
        throw new EntityStoreException("Can't finish transaction: nested transaction is not finished");
    }
    stack.pop();
    if (stack.isEmpty()) {
        txns.remove(thread);
    }
}

From source file:nl.knaw.huc.di.tag.tagml.importer.TAGMLListener.java

private TAGMarkup removeFromOpenMarkup(MarkupNameContext ctx) {
    String markupName = ctx.name().getText();
    String extendedMarkupName = markupName;
    extendedMarkupName = withPrefix(ctx, extendedMarkupName);
    extendedMarkupName = withSuffix(ctx, extendedMarkupName);

    boolean isSuspend = ctx.prefix() != null && ctx.prefix().getText().equals(TAGML.SUSPEND_PREFIX);

    Set<String> layers = deduceLayers(ctx, markupName, extendedMarkupName);

    boolean layerSuffixNeeded = !(layers.size() == 1 && layers.iterator().next().equals(TAGML.DEFAULT_LAYER));
    String foundLayerSuffix = layerSuffixNeeded
            ? TAGML.DIVIDER/*w  w w .  j av a 2s . c  o m*/
                    + layers.stream().filter(l -> !TAGML.DEFAULT_LAYER.equals(l)).sorted().collect(joining(","))
            : "";

    extendedMarkupName += foundLayerSuffix;
    removeFromMarkupStack2(extendedMarkupName, state.allOpenMarkup);
    TAGMarkup markup = null;
    for (String l : layers) {
        state.openMarkup.putIfAbsent(l, new ArrayDeque<>());
        Deque<TAGMarkup> markupStack = state.openMarkup.get(l);
        markup = removeFromMarkupStack(extendedMarkupName, markupStack);
        if (markup == null) {
            AtomicReference<String> emn = new AtomicReference<>(extendedMarkupName);
            boolean markupIsOpen = markupStack.stream().map(TAGMarkup::getExtendedTag)
                    .anyMatch(et -> emn.get().equals(et));
            if (!markupIsOpen) {
                errorListener.addError("%s Close tag <%s] found without corresponding open tag.",
                        errorPrefix(ctx), extendedMarkupName);
                return null;
            } else if (!isSuspend) {
                TAGMarkup expected = markupStack.peek();
                if (expected.hasTag(BRANCH)) {
                    errorListener.addBreakingError(
                            "%s Markup [%s> opened before branch %s, should not be closed in a branch.",
                            errorPrefix(ctx), extendedMarkupName, currentTextVariationState().branch + 1);
                }
                String hint = l.isEmpty() ? " Use separate layers to allow for overlap." : "";
                errorListener.addBreakingError("%s Close tag <%s] found, expected %s.%s", errorPrefix(ctx),
                        extendedMarkupName, closeTag(expected), hint);
                return null;
            } else {
                markup = removeFromMarkupStack2(extendedMarkupName, markupStack);
            }
        }
        document.closeMarkupInLayer(markup, l);
    }
    // for the last closing tag, close the markup for the default layer
    if (!layers.contains(DEFAULT_LAYER) && markup.getLayers().contains(DEFAULT_LAYER)) {
        Deque<TAGMarkup> markupDeque = state.openMarkup.get(DEFAULT_LAYER);
        removeFromMarkupStack(extendedMarkupName, markupDeque);
        document.closeMarkupInLayer(markup, DEFAULT_LAYER);
    }

    PrefixContext prefixNode = ctx.prefix();
    if (prefixNode != null) {
        String prefixNodeText = prefixNode.getText();
        if (prefixNodeText.equals(OPTIONAL_PREFIX)) {
            // optional
            // TODO

        } else if (prefixNodeText.equals(SUSPEND_PREFIX)) {
            // suspend
            for (String l : layers) {
                state.suspendedMarkup.putIfAbsent(l, new ArrayDeque<>());
                state.suspendedMarkup.get(l).add(markup);
            }
        }
    }
    state.eof = (markup.getDbId().equals(state.rootMarkupId));
    if (isSuspend && state.eof) {
        TAGMarkup rootMarkup = store.getMarkup(state.rootMarkupId);
        errorListener.addBreakingError("%s The root markup %s cannot be suspended.", errorPrefix(ctx),
                rootMarkup);
    }
    return markup;
}

From source file:nl.knaw.huc.di.tag.tagml.importer.TAGMLListener.java

private TAGMarkup removeFromMarkupStack(String extendedTag, Deque<TAGMarkup> markupStack) {
    if (markupStack == null || markupStack.isEmpty()) {
        return null;
    }//  w w w  .  ja  v  a2 s  .co  m
    final TAGMarkup expected = markupStack.peek();
    if (extendedTag.equals(expected.getExtendedTag())) {
        markupStack.pop();
        currentTextVariationState().removeOpenMarkup(expected);
        return expected;
    }
    return null;
}

From source file:no.sesat.search.http.filters.SiteLocatorFilter.java

private static long tryLock(final HttpServletRequest request, final Deque<ServletRequest> deque,
        final Lock lock, long timeLeft) {

    final long start = System.currentTimeMillis();

    try {/*  w  w  w  .  java  2s.  co  m*/
        do {
            timeLeft = WAIT_TIME - (System.currentTimeMillis() - start);

            // let's sleep. sleeping too long results in 409 response
            if (0 >= timeLeft || !lock.tryLock(timeLeft, TimeUnit.MILLISECONDS)) {
                // we timed out or got the lock. waiting is over
                break;

            } else if (deque.peek() != request) {
                // we've acquired the lock but we're not at front of deque
                // release the lock and try again
                lock.unlock();
            }
        } while (deque.peek() != request);

    } catch (InterruptedException ie) {
        LOG.error("Failed using user's lock", ie);
    }

    return timeLeft;
}