Example usage for java.lang Iterable Iterable

List of usage examples for java.lang Iterable Iterable

Introduction

In this page you can find the example usage for java.lang Iterable Iterable.

Prototype

Iterable

Source Link

Usage

From source file:org.apache.cayenne.map.ObjEntity.java

/**
 * Returns an Iterable instance over expression path components based on
 * this entity./*from www  . j  av a  2s  . c o m*/
 * 
 * @since 3.0
 */
@Override
@SuppressWarnings("unchecked")
public Iterable<PathComponent<ObjAttribute, ObjRelationship>> resolvePath(final Expression pathExp,
        final Map aliasMap) {

    if (pathExp.getType() == Expression.OBJ_PATH) {

        return new Iterable<PathComponent<ObjAttribute, ObjRelationship>>() {

            public Iterator iterator() {
                return new PathComponentIterator(ObjEntity.this, (String) pathExp.getOperand(0), aliasMap);
            }
        };
    }

    throw new ExpressionException(
            "Invalid expression type: '" + pathExp.expName() + "',  OBJ_PATH is expected.");
}

From source file:org.orekit.models.earth.tessellation.EllipsoidTessellator.java

/** Get an iterator over mesh nodes indices.
 * @param minIndex minimum node index/*  ww  w  .  j  a  v  a  2 s . co m*/
 * @param maxIndex maximum node index
 * @param truncateLast true if we can reduce last tile
 * @return iterator over mesh nodes indices
 */
private Iterable<Range> nodesIndices(final int minIndex, final int maxIndex, final boolean truncateLast) {

    final int first;
    if (truncateLast) {

        // truncate last tile rather than balance tiles around the zone of interest
        first = minIndex;

    } else {

        // balance tiles around the zone of interest rather than truncate last tile

        // number of tiles needed to cover the full indices range
        final int range = maxIndex - minIndex;
        final int nbTiles = (range + quantization - 1) / quantization;

        // extra nodes that must be added to complete the tiles
        final int extraNodes = nbTiles * quantization - range;

        // balance the extra nodes before min index and after maxIndex
        final int extraBefore = (extraNodes + 1) / 2;

        first = minIndex - extraBefore;

    }

    return new Iterable<Range>() {

        /** {@inheritDoc} */
        @Override
        public Iterator<Range> iterator() {
            return new Iterator<Range>() {

                private int nextLower = first;

                /** {@inheritDoc} */
                @Override
                public boolean hasNext() {
                    return nextLower < maxIndex;
                }

                /** {@inheritDoc} */
                @Override
                public Range next() {

                    if (nextLower >= maxIndex) {
                        throw new NoSuchElementException();
                    }
                    final int lower = nextLower;

                    nextLower += quantization;
                    if (truncateLast && nextLower > maxIndex && lower < maxIndex) {
                        // truncate last tile
                        nextLower = maxIndex;
                    }

                    return new Range(lower, nextLower);

                }

                /** {@inheritDoc} */
                @Override
                public void remove() {
                    throw new UnsupportedOperationException();
                }

            };
        }
    };

}

From source file:org.apache.accumulo.pig.Bytes.java

/**
 * Iterate over keys within the passed inclusive range.
 *//*from   w w  w  .  ja v a  2 s  . c  o  m*/
public static Iterable<byte[]> iterateOnSplits(final byte[] a, final byte[] b, final int num) {
    byte[] aPadded;
    byte[] bPadded;
    if (a.length < b.length) {
        aPadded = padTail(a, b.length - a.length);
        bPadded = b;
    } else if (b.length < a.length) {
        aPadded = a;
        bPadded = padTail(b, a.length - b.length);
    } else {
        aPadded = a;
        bPadded = b;
    }
    if (compareTo(aPadded, bPadded) >= 0) {
        throw new IllegalArgumentException("b <= a");
    }
    if (num <= 0) {
        throw new IllegalArgumentException("num cannot be < 0");
    }
    byte[] prependHeader = { 1, 0 };
    final BigInteger startBI = new BigInteger(add(prependHeader, aPadded));
    final BigInteger stopBI = new BigInteger(add(prependHeader, bPadded));
    final BigInteger diffBI = stopBI.subtract(startBI);
    final BigInteger splitsBI = BigInteger.valueOf(num + 1);
    if (diffBI.compareTo(splitsBI) < 0) {
        return null;
    }
    final BigInteger intervalBI;
    try {
        intervalBI = diffBI.divide(splitsBI);
    } catch (Exception e) {
        LOG.error("Exception caught during division", e);
        return null;
    }

    final Iterator<byte[]> iterator = new Iterator<byte[]>() {
        private int i = -1;

        @Override
        public boolean hasNext() {
            return i < num + 1;
        }

        @Override
        public byte[] next() {
            i++;
            if (i == 0) {
                return a;
            }
            if (i == num + 1) {
                return b;
            }

            BigInteger curBI = startBI.add(intervalBI.multiply(BigInteger.valueOf(i)));
            byte[] padded = curBI.toByteArray();
            if (padded[1] == 0) {
                padded = tail(padded, padded.length - 2);
            } else {
                padded = tail(padded, padded.length - 1);
            }
            return padded;
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException();
        }
    };

    return new Iterable<byte[]>() {
        @Override
        public Iterator<byte[]> iterator() {
            return iterator;
        }
    };
}

From source file:org.apache.nifi.controller.repository.StandardProcessSession.java

private void updateProvenanceRepo(final Checkpoint checkpoint) {
    // Update Provenance Repository
    final ProvenanceEventRepository provenanceRepo = context.getProvenanceRepository();

    // We need to de-dupe the events that we've created and those reported to the provenance reporter,
    // in case the Processor developer submitted the same events to the reporter. So we use a LinkedHashSet
    // for this, so that we are able to ensure that the events are submitted in the proper order.
    final Set<ProvenanceEventRecord> recordsToSubmit = new LinkedHashSet<>();
    final Map<String, Set<ProvenanceEventType>> eventTypesPerFlowFileId = new HashMap<>();

    final Set<ProvenanceEventRecord> processorGenerated = checkpoint.reportedEvents;

    // We first want to submit FORK events because if the Processor is going to create events against
    // a FlowFile, that FlowFile needs to be shown to be created first.
    // However, if the Processor has generated a FORK event, we don't want to use the Framework-created one --
    // we prefer to use the event generated by the Processor. We can determine this by checking if the Set of events genereated
    // by the Processor contains any of the FORK events that we generated
    for (final Map.Entry<FlowFile, ProvenanceEventBuilder> entry : checkpoint.forkEventBuilders.entrySet()) {
        final ProvenanceEventBuilder builder = entry.getValue();
        final FlowFile flowFile = entry.getKey();

        updateEventContentClaims(builder, flowFile, checkpoint.records.get(flowFile));
        final ProvenanceEventRecord event = builder.build();

        if (!event.getChildUuids().isEmpty() && !isSpuriousForkEvent(event, checkpoint.removedFlowFiles)) {
            // If framework generated the event, add it to the 'recordsToSubmit' Set.
            if (!processorGenerated.contains(event)) {
                recordsToSubmit.add(event);
            }/*from  w  w  w . j  a  va  2  s . c om*/

            // Register the FORK event for each child and each parent.
            for (final String childUuid : event.getChildUuids()) {
                addEventType(eventTypesPerFlowFileId, childUuid, event.getEventType());
            }
            for (final String parentUuid : event.getParentUuids()) {
                addEventType(eventTypesPerFlowFileId, parentUuid, event.getEventType());
            }
        }
    }

    // Now add any Processor-reported events.
    for (final ProvenanceEventRecord event : processorGenerated) {
        if (isSpuriousForkEvent(event, checkpoint.removedFlowFiles)) {
            continue;
        }

        // Check if the event indicates that the FlowFile was routed to the same
        // connection from which it was pulled (and only this connection). If so, discard the event.
        if (isSpuriousRouteEvent(event, checkpoint.records)) {
            continue;
        }

        recordsToSubmit.add(event);
        addEventType(eventTypesPerFlowFileId, event.getFlowFileUuid(), event.getEventType());
    }

    // Finally, add any other events that we may have generated.
    for (final List<ProvenanceEventRecord> eventList : checkpoint.generatedProvenanceEvents.values()) {
        for (final ProvenanceEventRecord event : eventList) {
            if (isSpuriousForkEvent(event, checkpoint.removedFlowFiles)) {
                continue;
            }

            recordsToSubmit.add(event);
            addEventType(eventTypesPerFlowFileId, event.getFlowFileUuid(), event.getEventType());
        }
    }

    // Check if content or attributes changed. If so, register the appropriate events.
    for (final StandardRepositoryRecord repoRecord : checkpoint.records.values()) {
        final ContentClaim original = repoRecord.getOriginalClaim();
        final ContentClaim current = repoRecord.getCurrentClaim();

        boolean contentChanged = false;
        if (original == null && current != null) {
            contentChanged = true;
        }
        if (original != null && current == null) {
            contentChanged = true;
        }
        if (original != null && current != null && !original.equals(current)) {
            contentChanged = true;
        }

        final FlowFileRecord curFlowFile = repoRecord.getCurrent();
        final String flowFileId = curFlowFile.getAttribute(CoreAttributes.UUID.key());
        boolean eventAdded = false;

        if (checkpoint.removedFlowFiles.contains(flowFileId)) {
            continue;
        }

        final boolean newFlowFile = repoRecord.getOriginal() == null;
        if (contentChanged && !newFlowFile) {
            recordsToSubmit
                    .add(provenanceReporter.build(curFlowFile, ProvenanceEventType.CONTENT_MODIFIED).build());
            addEventType(eventTypesPerFlowFileId, flowFileId, ProvenanceEventType.CONTENT_MODIFIED);
            eventAdded = true;
        }

        if (checkpoint.createdFlowFiles.contains(flowFileId)) {
            final Set<ProvenanceEventType> registeredTypes = eventTypesPerFlowFileId.get(flowFileId);
            boolean creationEventRegistered = false;
            if (registeredTypes != null) {
                if (registeredTypes.contains(ProvenanceEventType.CREATE)
                        || registeredTypes.contains(ProvenanceEventType.FORK)
                        || registeredTypes.contains(ProvenanceEventType.JOIN)
                        || registeredTypes.contains(ProvenanceEventType.RECEIVE)
                        || registeredTypes.contains(ProvenanceEventType.FETCH)) {
                    creationEventRegistered = true;
                }
            }

            if (!creationEventRegistered) {
                recordsToSubmit.add(provenanceReporter.build(curFlowFile, ProvenanceEventType.CREATE).build());
                eventAdded = true;
            }
        }

        if (!eventAdded && !repoRecord.getUpdatedAttributes().isEmpty()) {
            // We generate an ATTRIBUTES_MODIFIED event only if no other event has been
            // created for the FlowFile. We do this because all events contain both the
            // newest and the original attributes, so generating an ATTRIBUTES_MODIFIED
            // event is redundant if another already exists.
            if (!eventTypesPerFlowFileId.containsKey(flowFileId)) {
                recordsToSubmit.add(
                        provenanceReporter.build(curFlowFile, ProvenanceEventType.ATTRIBUTES_MODIFIED).build());
                addEventType(eventTypesPerFlowFileId, flowFileId, ProvenanceEventType.ATTRIBUTES_MODIFIED);
            }
        }
    }

    // We want to submit the 'recordsToSubmit' collection, followed by the auto-terminated events to the Provenance Repository.
    // We want to do this with a single call to ProvenanceEventRepository#registerEvents because it may be much more efficient
    // to do so.
    // However, we want to modify the events in 'recordsToSubmit' to obtain the data from the most recent version of the FlowFiles
    // (except for SEND events); see note below as to why this is
    // Therefore, we create an Iterable that can iterate over each of these events, modifying them as needed, and returning them
    // in the appropriate order. This prevents an unnecessary step of creating an intermediate List and adding all of those values
    // to the List.
    // This is done in a similar veign to how Java 8's streams work, iterating over the events and returning a processed version
    // one-at-a-time as opposed to iterating over the entire Collection and putting the results in another Collection. However,
    // we don't want to change the Framework to require Java 8 at this time, because it's not yet as prevalent as we would desire
    final Map<String, FlowFileRecord> flowFileRecordMap = new HashMap<>();
    for (final StandardRepositoryRecord repoRecord : checkpoint.records.values()) {
        final FlowFileRecord flowFile = repoRecord.getCurrent();
        flowFileRecordMap.put(flowFile.getAttribute(CoreAttributes.UUID.key()), flowFile);
    }

    final List<ProvenanceEventRecord> autoTermEvents = checkpoint.autoTerminatedEvents;
    final Iterable<ProvenanceEventRecord> iterable = new Iterable<ProvenanceEventRecord>() {
        final Iterator<ProvenanceEventRecord> recordsToSubmitIterator = recordsToSubmit.iterator();
        final Iterator<ProvenanceEventRecord> autoTermIterator = autoTermEvents == null ? null
                : autoTermEvents.iterator();

        @Override
        public Iterator<ProvenanceEventRecord> iterator() {
            return new Iterator<ProvenanceEventRecord>() {
                @Override
                public boolean hasNext() {
                    return recordsToSubmitIterator.hasNext()
                            || autoTermIterator != null && autoTermIterator.hasNext();
                }

                @Override
                public ProvenanceEventRecord next() {
                    if (recordsToSubmitIterator.hasNext()) {
                        final ProvenanceEventRecord rawEvent = recordsToSubmitIterator.next();

                        // Update the Provenance Event Record with all of the info that we know about the event.
                        // For SEND events, we do not want to update the FlowFile info on the Event, because the event should
                        // reflect the FlowFile as it was sent to the remote system. However, for other events, we want to use
                        // the representation of the FlowFile as it is committed, as this is the only way in which it really
                        // exists in our system -- all other representations are volatile representations that have not been
                        // exposed.
                        return enrich(rawEvent, flowFileRecordMap, checkpoint.records,
                                rawEvent.getEventType() != ProvenanceEventType.SEND);
                    } else if (autoTermIterator != null && autoTermIterator.hasNext()) {
                        return enrich(autoTermIterator.next(), flowFileRecordMap, checkpoint.records, true);
                    }

                    throw new NoSuchElementException();
                }

                @Override
                public void remove() {
                    throw new UnsupportedOperationException();
                }
            };
        }
    };

    provenanceRepo.registerEvents(iterable);
}

From source file:org.regenstrief.util.Util.java

public final static <E> Iterable<E> wrap(final Iterator<E> iter) {
    return new Iterable<E>() {
        @Override/* w  ww.ja  v  a  2 s. c  o  m*/
        public Iterator<E> iterator() {
            return iter;
        }
    };
}

From source file:com.gargoylesoftware.htmlunit.html.DomNode.java

/**
 * @return an Iterable over the children of this node
 *//*ww  w .  ja v  a2  s  .  c  o m*/
public final Iterable<DomNode> getChildren() {
    return new Iterable<DomNode>() {
        public Iterator<DomNode> iterator() {
            return new ChildIterator();
        }
    };
}

From source file:org.nuclos.common.collection.CollectionUtils.java

/**
 * Returns an enumeration as an iterable.
 * @param enumeration the enumeration/*  ww  w .j  av  a 2  s  . co  m*/
 * @return an iterable wrapping the enumeration (note: remove is not supported)
 * @precondition enumeration != null
 * @postcondition result != null
 */
public static <T> Iterable<T> asIterable(final Enumeration<T> enumeration) {
    return new Iterable<T>() {
        @Override
        public Iterator<T> iterator() {
            return new Iterator<T>() {
                @Override
                public boolean hasNext() {
                    return enumeration.hasMoreElements();
                }

                @Override
                public T next() {
                    return enumeration.nextElement();
                }

                @Override
                public void remove() {
                    throw new UnsupportedOperationException();
                }
            };
        }
    };
}

From source file:com.gargoylesoftware.htmlunit.html.DomNode.java

/**
 * Returns an {@link Iterable} that will recursively iterate over all of this node's descendants,
 * including {@link DomText} elements, {@link DomComment} elements, etc. If you want to iterate
 * only over {@link HtmlElement} descendants, please use {@link #getHtmlElementDescendants()}.
 * @return an {@link Iterable} that will recursively iterate over all of this node's descendants
 *///from  w w w .  j  av  a  2 s  .c o  m
public final Iterable<DomNode> getDescendants() {
    return new Iterable<DomNode>() {
        public Iterator<DomNode> iterator() {
            return new DescendantElementsIterator<DomNode>(DomNode.class);
        }
    };
}

From source file:org.nuclos.common.collection.CollectionUtils.java

/**
 * Returns an iterator as an iterable. The returned iterable can only be used once.
 */// ww w  . j av  a2s .  c om
public static <T> Iterable<T> asIterable(final Iterator<T> iterator) {
    return new Iterable<T>() {
        boolean consumed = false;

        @Override
        public Iterator<T> iterator() {
            if (consumed)
                throw new IllegalStateException("Iterator already used");
            consumed = true;
            return iterator;
        }
    };
}

From source file:com.gargoylesoftware.htmlunit.html.DomNode.java

/**
 * Returns an {@link Iterable} that will recursively iterate over all of this node's {@link HtmlElement}
 * descendants. If you want to iterate over all descendants (including {@link DomText} elements,
 * {@link DomComment} elements, etc.), please use {@link #getDescendants()}.
 * @return an {@link Iterable} that will recursively iterate over all of this node's {@link HtmlElement}
 *         descendants/* w  ww  . j ava  2s.  c  om*/
 */
public final Iterable<HtmlElement> getHtmlElementDescendants() {
    return new Iterable<HtmlElement>() {
        public Iterator<HtmlElement> iterator() {
            return new DescendantElementsIterator<HtmlElement>(HtmlElement.class);
        }
    };
}