Example usage for java.util LinkedList descendingIterator

List of usage examples for java.util LinkedList descendingIterator

Introduction

In this page you can find the example usage for java.util LinkedList descendingIterator.

Prototype

public Iterator<E> descendingIterator() 

Source Link

Usage

From source file:Main.java

public static void main(String[] args) {
    // create a LinkedList
    LinkedList<String> list = new LinkedList<String>();

    // add some elements
    list.add("Hello");
    list.add("from java2s.com");
    list.add("10");

    // print the list
    System.out.println("LinkedList:" + list);

    // set Iterator as descending
    Iterator x = list.descendingIterator();

    // print list with descending order
    while (x.hasNext()) {
        System.out.println(x.next());
    }/*from  w w w  . j  a  v a 2  s .  c  o m*/
}

From source file:net.codestory.http.templating.helpers.EachReverseHelperSource.java

private static <T> Iterator<T> reverse(Iterable<T> values) {
    LinkedList<T> reversed = new LinkedList<>();
    for (T value : values) {
        reversed.add(value);//  w ww .  ja  v a2  s. c om
    }
    return reversed.descendingIterator();
}

From source file:org.omg.bpmn.miwg.util.xml.XPathUtil.java

/**
 * Builds an absolute XPath string from the given node.
 * /*from  w  ww  .ja v  a2 s  .  c o  m*/
 * @param node
 * @param context namespace context used to determine correct namespace prefixes, see
 *            {@link SignavioNamespaceContext}
 * @return an XPath string or null (if no node was given)
 */
public static String getXPathString(Node node, NamespaceContext context) {
    if (node == null) {
        return null;
    }

    LinkedList<String> xpathSteps = new LinkedList<String>();
    String string = getNodeString(node, context);
    xpathSteps.add(string);
    Node current;
    if (node instanceof Attr)
        current = ((Attr) node).getOwnerElement();
    else
        current = node.getParentNode();

    while (current != node.getOwnerDocument()) {
        xpathSteps.add(getNodeString(current, context));
        current = current.getParentNode();
    }

    StringBuffer buff = new StringBuffer();
    Iterator<String> it = xpathSteps.descendingIterator();
    while (it.hasNext()) {
        buff.append("/" + it.next());
    }
    return buff.toString();
}

From source file:org.eclipse.gyrex.jobs.internal.schedules.ScheduleImpl.java

public static void checkExecutionSequenceForLoops(final IScheduleEntry entry,
        final LinkedList<String> executionSequence, final Collection<String> precedingEntries)
        throws IllegalArgumentException {
    for (final String precedingEntryId : precedingEntries) {
        if (executionSequence.contains(precedingEntryId))
            throw new IllegalArgumentException(String.format(
                    "Found loop in schedule %s for preceding entry %s of entry %s in execution sequence %s.",
                    entry.getSchedule().getId(), precedingEntryId, entry.getId(),
                    StringUtils.join(executionSequence.descendingIterator(), "->")));
        else {/*  w w  w  . j  a v a2s.  c  o m*/
            // no loop, add to sequence and continue check with the entry
            executionSequence.add(precedingEntryId);
            final ScheduleEntryImpl precedingEntry = ((ScheduleImpl) entry.getSchedule())
                    .getEntry(precedingEntryId);
            checkExecutionSequenceForLoops(precedingEntry, executionSequence,
                    precedingEntry.getPrecedingEntries());
        }
    }
}

From source file:org.apache.htrace.core.JavaPropertyConfiguration.java

private JavaPropertyConfiguration(LinkedList<String> prefixes) {
    this.prefixes = new String[prefixes.size()];
    int i = 0;//  w  ww .ja v  a  2 s .c o  m
    for (Iterator<String> it = prefixes.descendingIterator(); it.hasNext();) {
        this.prefixes[i++] = it.next();
    }
}

From source file:com.offbynull.voip.audio.gateways.io.OutputWriteRunnable.java

@Override
public void run() {
    LOG.info("Output thread started: {}", openOutputDevice);
    try {//from w w  w  .  jav  a  2 s  .  c o  m
        byte[] internalBuffer = new byte[bufferSize];

        while (true) {
            LinkedList<OutputData> readBuffers = dumpQueue();
            Iterator<OutputData> readBuffersIt = readBuffers.descendingIterator();
            int remainingAmount = internalBuffer.length;
            int requiredAmount = 0;
            int copyAmount = 0;
            while (remainingAmount > 0 && readBuffersIt.hasNext()) {
                OutputData readBuffer = readBuffersIt.next();
                byte[] readBufferData = readBuffer.getData();
                requiredAmount = readBufferData.length;

                copyAmount = Math.min(remainingAmount, requiredAmount);
                int copyFrom = requiredAmount - copyAmount;
                int copyTo = remainingAmount - copyAmount;

                System.arraycopy(readBufferData, copyFrom, internalBuffer, copyTo, copyAmount);

                remainingAmount -= copyAmount;
            }

            if (copyAmount != requiredAmount || readBuffersIt.hasNext()) { // more than 1 buffer or some data not copied, show a warning
                LOG.info("Excess data read: {} buffers -- only playing last {} bytes", readBuffers.size(),
                        bufferSize);
            }

            try {
                openOutputDevice.write(internalBuffer, remainingAmount,
                        internalBuffer.length - remainingAmount);
            } catch (IllegalArgumentException iae) {
                LOG.warn("Output buffer potentially malformed: {}", iae.toString());
            }
        }
    } catch (Exception e) {
        LOG.info("Output thread stopped: {}", e.toString());
    }
}

From source file:org.apache.mahout.freqtermsets.PFPGrowth.java

public static void loadEarlierFHashMaps(JobContext context, Parameters params, long intervalStart,
        OpenIntObjectHashMap<String> idStringMapOut, OpenObjectIntHashMap<String> stringIdMapOut)
        throws IOException {
    // I resist the urge to cache this list because I don't know what exactly would happen
    // when the job is run in hadoop where every job has its own JVM.. will static
    // fields somehow leak? Can I be sure that the static WeakHashMap used as a cache is mine?
    // FINALLY.. the list would be loaded only twice, once for mapper, and once for reducer

    OpenObjectLongHashMap<String> prevFLists = PFPGrowth.readOlderCachedFLists(context.getConfiguration(),
            intervalStart, TimeWeightFunction.getDefault(params));

    LinkedList<String> terms = Lists.newLinkedList();
    prevFLists.keysSortedByValue(terms);
    Iterator<String> termsIter = terms.descendingIterator();
    while (termsIter.hasNext()) {

        String t = termsIter.next();
        int id = Hashing.murmur3_32().hashString(t, Charset.forName("UTF-8")).asInt();
        int c = 0;
        while (idStringMapOut.containsKey(id)) {
            // Best effort
            if (c < t.length()) {
                id = Hashing.murmur3_32((int) t.charAt(c++)).hashString(t, Charset.forName("UTF-8")).asInt();
            } else {
                ++id;//from  w w  w.  j  a  v a2  s.  com
            }
        }

        idStringMapOut.put(id, t);
        stringIdMapOut.put(t, id);
    }
}

From source file:org.apache.mahout.freqtermsets.PFPGrowth.java

public static long readTermFreqHashMap(Configuration conf, OpenObjectLongHashMap<String> fMap)
        throws IOException {

    long totalNterms = 0;

    switch (runMode) {
    case Batch:/* w  w w.  j  a v  a2  s  . co  m*/
    case SlidingWin:
        for (Pair<String, Long> e : readCachedFList(conf)) {
            fMap.put(e.getFirst(), e.getSecond());
            totalNterms += e.getSecond();
        }
        break;
    case BlockUpdate:

        Parameters params = new Parameters(conf.get(PFPGrowth.PFP_PARAMETERS, ""));
        long currWindowStart = Long.parseLong(params.get(PFPGrowth.PARAM_INTERVAL_START));

        OpenObjectLongHashMap<String> prevFLists = readOlderCachedFLists(conf, currWindowStart,
                TimeWeightFunction.getDefault(params));
        LinkedList<String> terms = Lists.newLinkedList();
        prevFLists.keysSortedByValue(terms);
        Iterator<String> termsIter = terms.descendingIterator();

        while (termsIter.hasNext()) {
            String t = termsIter.next();
            long freq = prevFLists.get(t);
            fMap.put(t, freq);
            totalNterms += freq;
        }
        break;
    }
    return totalNterms;
}

From source file:org.apache.hadoop.hbase.regionserver.CompactionPipeline.java

private boolean validateSuffixList(LinkedList<ImmutableSegment> suffix) {
    if (suffix.isEmpty()) {
        // empty suffix is always valid
        return true;
    }/* www  . j av  a 2 s .co m*/

    Iterator<ImmutableSegment> pipelineBackwardIterator = pipeline.descendingIterator();
    Iterator<ImmutableSegment> suffixBackwardIterator = suffix.descendingIterator();
    ImmutableSegment suffixCurrent;
    ImmutableSegment pipelineCurrent;
    for (; suffixBackwardIterator.hasNext();) {
        if (!pipelineBackwardIterator.hasNext()) {
            // a suffix longer than pipeline is invalid
            return false;
        }
        suffixCurrent = suffixBackwardIterator.next();
        pipelineCurrent = pipelineBackwardIterator.next();
        if (suffixCurrent != pipelineCurrent) {
            // non-matching suffix
            return false;
        }
    }
    // suffix matches pipeline suffix
    return true;
}

From source file:net.dv8tion.jda.core.MessageHistory.java

public RestAction<List<Message>> retrieveFuture(int amount) {
    if (amount > 100 || amount < 0)
        throw new IllegalArgumentException(
                "Message retrieval limit is between 1 and 100 messages. No more, no less. Limit provided: "
                        + amount);//from ww w  .  j  a v a 2  s . co  m

    if (history.isEmpty())
        throw new IllegalStateException(
                "No messageId  is stored to use as the marker between the future and past."
                        + "Either use MessageHistory(MessageChannel, String) or make a call to retrievePast(int) first.");

    Route.CompiledRoute route = Route.Messages.GET_MESSAGE_HISTORY_AFTER.compile(channel.getId(),
            Integer.toString(amount), history.firstKey());
    return new RestAction<List<Message>>(api, route, null) {
        @Override
        protected void handleResponse(Response response, Request request) {
            if (!response.isOk())
                request.onFailure(response);

            EntityBuilder builder = EntityBuilder.get(api);
            LinkedList<Message> msgs = new LinkedList<>();
            JSONArray historyJson = response.getArray();

            for (int i = 0; i < historyJson.length(); i++)
                msgs.add(builder.createMessage(historyJson.getJSONObject(i)));

            for (Iterator<Message> it = msgs.descendingIterator(); it.hasNext();) {
                Message m = it.next();
                history.put(0, m.getId(), m);
            }

            request.onSuccess(msgs);
        }
    };
}