List of usage examples for com.google.common.collect Iterators transform
public static <F, T> Iterator<T> transform(final Iterator<F> fromIterator, final Function<? super F, ? extends T> function)
From source file:org.fcrepo.kernel.impl.rdf.impl.HierarchyRdfContext.java
private Iterator<Triple> childrenContext() throws RepositoryException { final Iterator<javax.jcr.Node> niceChildren = Iterators.filter(new NodeIterator(node().getNodes()), not(nastyChildren));//w ww .ja v a 2 s . com final Iterator<javax.jcr.Node> salientChildren; if (options.hasOffset()) { final int offset = options.getOffset(); Iterators.advance(niceChildren, offset); } if (options.hasLimit()) { salientChildren = Iterators.limit(niceChildren, options.getLimit()); } else { salientChildren = niceChildren; } return Iterators.concat(Iterators.transform(salientChildren, child2triples())); }
From source file:org.locationtech.geogig.plumbing.DeepCopy.java
private void copyObjects(final ObjectStore from, final ObjectStore to, final Supplier<Iterator<Node>> nodesToMove, final Set<ObjectId> metadataIds) { Iterable<ObjectId> ids = new Iterable<ObjectId>() { final Function<Node, ObjectId> asId = (node) -> { Optional<ObjectId> metadataId = node.getMetadataId(); if (metadataId.isPresent()) { metadataIds.add(node.getMetadataId().get()); }/*from w ww . j a v a 2 s. c om*/ ObjectId id = node.getObjectId(); return id; }; @Override public Iterator<ObjectId> iterator() { Iterator<Node> iterator = nodesToMove.get(); Iterator<ObjectId> ids = Iterators.transform(iterator, asId); return ids; } }; // store objects into the target db and remove them from the origin db in one shot to.putAll(from.getAll(ids)); }
From source file:org.apache.jackrabbit.oak.jcr.delegate.AuthorizableDelegator.java
@Override public Iterator<Group> memberOf() throws RepositoryException { return sessionDelegate.perform(new SessionOperation<Iterator<Group>>("memberOf") { @Nonnull/*from w w w . j av a 2 s. c o m*/ @Override public Iterator<Group> perform() throws RepositoryException { Iterator<Group> groups = delegate.memberOf(); return Iterators.transform(groups, new Function<Group, Group>() { @Nullable @Override public Group apply(@Nullable Group group) { return GroupDelegator.wrap(sessionDelegate, group); } }); } }); }
From source file:org.locationtech.geogig.remotes.pack.PreparePackOp.java
private Set<ObjectId> resolveHeadCommits(List<RefRequest> refs, boolean isTags, Predicate<? super RefRequest> filter, Function<? super RefRequest, ? extends ObjectId> function) { Iterable<ObjectId> ids = transform(filter(refs, filter), function); if (isTags) { Iterator<RevTag> tags = objectDatabase().getAll(ids, NOOP_LISTENER, RevTag.class); ids = newArrayList(Iterators.transform(tags, (t) -> t.getCommitId())); }// w w w . j a va 2 s . c om return Sets.newHashSet(ids); }
From source file:org.locationtech.geogig.geotools.plumbing.ExportDiffOp.java
/** * Executes the export operation using the parameters that have been specified. * /*from w w w.jav a 2 s .co m*/ * @return a FeatureCollection with the specified features */ @Override protected SimpleFeatureStore _call() { final SimpleFeatureStore targetStore = getTargetStore(); final String refspec = old ? oldRef : newRef; final RevTree rootTree = resolveRootTree(refspec); final NodeRef typeTreeRef = resolTypeTreeRef(refspec, path, rootTree); final ObjectId defaultMetadataId = typeTreeRef.getMetadataId(); final ProgressListener progressListener = getProgressListener(); progressListener.started(); progressListener.setDescription("Exporting diffs for path '" + path + "'... "); FeatureCollection<SimpleFeatureType, SimpleFeature> asFeatureCollection = new BaseFeatureCollection<SimpleFeatureType, SimpleFeature>() { @Override public FeatureIterator<SimpleFeature> features() { Iterator<DiffEntry> diffs = command(DiffOp.class).setOldVersion(oldRef).setNewVersion(newRef) .setFilter(path).call(); final Iterator<SimpleFeature> plainFeatures = getFeatures(diffs, old, objectDatabase(), defaultMetadataId, progressListener); Iterator<Optional<Feature>> transformed = Iterators.transform(plainFeatures, ExportDiffOp.this.function); Iterator<SimpleFeature> filtered = Iterators .filter(Iterators.transform(transformed, new Function<Optional<Feature>, SimpleFeature>() { @Override public SimpleFeature apply(Optional<Feature> input) { return (SimpleFeature) (input.isPresent() ? input.get() : null); } }), Predicates.notNull()); return new DelegateFeatureIterator<SimpleFeature>(filtered); } }; // add the feature collection to the feature store final Transaction transaction; if (transactional) { transaction = new DefaultTransaction("create"); } else { transaction = Transaction.AUTO_COMMIT; } try { targetStore.setTransaction(transaction); try { targetStore.addFeatures(asFeatureCollection); transaction.commit(); } catch (final Exception e) { if (transactional) { transaction.rollback(); } Throwables.propagateIfInstanceOf(e, GeoToolsOpException.class); throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD); } finally { transaction.close(); } } catch (IOException e) { throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD); } progressListener.complete(); return targetStore; }
From source file:org.locationtech.geogig.api.plumbing.DeepCopy.java
private void copyObjects(final ObjectDatabase from, final ObjectDatabase to, final Supplier<Iterator<Node>> nodesToMove, final Set<ObjectId> metadataIds) { Iterable<ObjectId> ids = new Iterable<ObjectId>() { final Function<Node, ObjectId> asId = new Function<Node, ObjectId>() { @Override/*w ww. ja v a 2 s .c o m*/ public ObjectId apply(Node input) { Optional<ObjectId> metadataId = input.getMetadataId(); if (metadataId.isPresent()) { metadataIds.add(input.getMetadataId().get()); } ObjectId id = input.getObjectId(); return id; } }; @Override public Iterator<ObjectId> iterator() { Iterator<Node> iterator = nodesToMove.get(); Iterator<ObjectId> ids = Iterators.transform(iterator, asId); return ids; } }; // store objects into the target db and remove them from the origin db in one shot to.putAll(from.getAll(ids)); }
From source file:io.druid.query.groupby.epinephelinae.SpillingGrouper.java
@Override public Iterator<Entry<KeyType>> iterator(final boolean sorted) { final List<Iterator<Entry<KeyType>>> iterators = new ArrayList<>(1 + files.size()); iterators.add(grouper.iterator(sorted)); for (final File file : files) { final MappingIterator<Entry<KeyType>> fileIterator = read(file, keySerde.keyClazz()); iterators.add(Iterators.transform(fileIterator, new Function<Entry<KeyType>, Entry<KeyType>>() { @Override/*w w w .j av a 2 s . co m*/ public Entry<KeyType> apply(Entry<KeyType> entry) { final Object[] deserializedValues = new Object[entry.getValues().length]; for (int i = 0; i < deserializedValues.length; i++) { deserializedValues[i] = aggregatorFactories[i].deserialize(entry.getValues()[i]); if (deserializedValues[i] instanceof Integer) { // Hack to satisfy the groupBy unit tests; perhaps we could do better by adjusting Jackson config. deserializedValues[i] = ((Integer) deserializedValues[i]).longValue(); } } return new Entry<>(entry.getKey(), deserializedValues); } })); closeables.add(fileIterator); } return Groupers.mergeIterators(iterators, sorted); }
From source file:org.apache.mahout.math.hadoop.stochasticsvd.SSVDHelper.java
public static Iterator<Pair<Writable, Vector>> drmIterator(FileSystem fs, Path glob, Configuration conf, Deque<Closeable> closeables) throws IOException { SequenceFileDirIterator<Writable, VectorWritable> ret = new SequenceFileDirIterator<Writable, VectorWritable>( glob, PathType.GLOB, PathFilters.logsCRCFilter(), PARTITION_COMPARATOR, true, conf); closeables.addFirst(ret);/*w w w .ja v a 2 s. com*/ return Iterators.transform(ret, new Function<Pair<Writable, VectorWritable>, Pair<Writable, Vector>>() { @Override public Pair<Writable, Vector> apply(Pair<Writable, VectorWritable> p) { return new Pair(p.getFirst(), p.getSecond().get()); } }); }
From source file:edu.illinois.cs.cogcomp.bigdata.mapdb.MapDB.java
public static <K extends Comparable<K>, V> BTreeMap<K, V> batchCreate(final NavigableMap<K, V> map, BTreeMapMaker maker) {//w w w.j ava2s .c o m if (map.keySet().iterator().next() instanceof String) throw new IllegalArgumentException("Unicode String comparator is inconsistent."); return maker.pumpSource(Iterators.transform(map.descendingMap().entrySet().iterator(), new Function<Entry<K, V>, Tuple2<K, V>>() { public Tuple2<K, V> apply(Entry<K, V> e) { return new Tuple2<K, V>(e.getKey(), e.getValue()); } })).make(); }
From source file:org.fcrepo.kernel.impl.rdf.impl.NodeTypeRdfContext.java
/** * Convert a NodeType into an RDF stream by capturing the supertypes, node * definitions, and property definitions of the type as RDFS triples. * * @param nodeType the node type/*from w ww . j a v a2s. c om*/ * @throws RepositoryException if repository exception occurred */ public NodeTypeRdfContext(final NodeType nodeType) throws RepositoryException { super(); final Node nodeTypeResource = getResource(nodeType).asNode(); final String nodeTypeName = nodeType.getName(); LOGGER.trace("Adding triples for nodeType: {} with URI: {}", nodeTypeName, nodeTypeResource.getURI()); concat(Collections2.transform(copyOf(nodeType.getDeclaredSupertypes()), new Function<NodeType, Triple>() { @Override public Triple apply(final NodeType input) { final Node supertypeNode; try { supertypeNode = getResource(input).asNode(); LOGGER.trace("Adding triple for nodeType: {} with subclass: {}", nodeTypeName, supertypeNode.getURI()); return create(nodeTypeResource, subClassOf.asNode(), supertypeNode); } catch (final RepositoryException e) { throw propagate(e); } } })); concat(Iterators .concat(Iterators.transform( Iterators.filter(forArray(nodeType.getDeclaredChildNodeDefinitions()), not(isWildcardResidualDefinition)), new NodeDefinitionToTriples(nodeTypeResource)))); concat(Iterators .concat(Iterators.transform( Iterators.filter(forArray(nodeType.getDeclaredPropertyDefinitions()), not(isWildcardResidualDefinition)), new PropertyDefinitionToTriples(nodeTypeResource)))); concat(create(nodeTypeResource, type.asNode(), Class.asNode()), create(nodeTypeResource, label.asNode(), createLiteral(nodeTypeName))); }