Example usage for com.google.common.collect Iterables getLast

List of usage examples for com.google.common.collect Iterables getLast

Introduction

In this page you can find the example usage for com.google.common.collect Iterables getLast.

Prototype

@Nullable
public static <T> T getLast(Iterable<? extends T> iterable, @Nullable T defaultValue) 

Source Link

Document

Returns the last element of iterable or defaultValue if the iterable is empty.

Usage

From source file:org.apache.aurora.scheduler.storage.mem.MemJobUpdateStore.java

private static JobUpdateState synthesizeUpdateState(JobUpdateDetails update) {
    JobUpdateState state = new JobUpdateState();

    JobUpdateEvent firstEvent = Iterables.getFirst(update.getUpdateEvents(), null);
    if (firstEvent != null) {
        state.setCreatedTimestampMs(firstEvent.getTimestampMs());
    }//from w  w  w  .  j  ava  2s .  c om

    JobUpdateEvent lastEvent = Iterables.getLast(update.getUpdateEvents(), null);
    if (lastEvent != null) {
        state.setStatus(lastEvent.getStatus());
        state.setLastModifiedTimestampMs(lastEvent.getTimestampMs());
    }

    JobInstanceUpdateEvent lastInstanceEvent = Iterables.getLast(update.getInstanceEvents(), null);
    if (lastInstanceEvent != null) {
        state.setLastModifiedTimestampMs(
                Longs.max(state.getLastModifiedTimestampMs(), lastInstanceEvent.getTimestampMs()));
    }

    return state;
}

From source file:org.eclipse.elk.tree.p3place.NodePlacer.java

/**
 * In this first postorder walk, every node of the tree is assigned a preliminary x-coordinate
 * (held in property PRELIM). In addition, internal nodes are given modifiers, which will be
 * used to move their offspring to the right (held in property MODIFIER).
 * /*w  ww.  j  a v  a  2 s. co  m*/
 * @param cN
 *            the root level of the tree
 * @param level
 *            the index of the passed level
 */
private void firstWalk(final TNode cN, final int level) {
    cN.setProperty(Properties.MODIFIER, 0d);
    TNode lS = cN.getProperty(Properties.LEFTSIBLING);

    if (cN.isLeaf()) {
        if (lS != null) {
            /**
             * Determine the preliminary x-coordinate based on: the preliminary x-coordinate of
             * the left sibling, the separation between sibling nodes, and tHe mean size of left
             * sibling and current node.
             */
            double p = lS.getProperty(Properties.PRELIM) + spacing + meanNodeWidth(lS, cN);
            cN.setProperty(Properties.PRELIM, p);
        } else {
            /** No sibling on the left to worry about. */
            cN.setProperty(Properties.PRELIM, 0d);
        }
    } else {
        /**
         * This Node is not a leaf, so call this procedure recursively for each of its
         * offspring.
         */
        for (TNode child : cN.getChildren()) {
            firstWalk(child, level + 1);
        }

        /**
         * Set the prelim and modifer for this node by determine the midpoint of its offsprings
         * and the middle node size of the node and its left sibling
         */
        TNode lM = Iterables.getFirst(cN.getChildren(), null);
        TNode rM = Iterables.getLast(cN.getChildren(), null);
        double midPoint = (rM.getProperty(Properties.PRELIM) + lM.getProperty(Properties.PRELIM)) / 2f;

        if (lS != null) {
            /** This Node has a left sibling so its offsprings must be shifted to the right */
            double p = lS.getProperty(Properties.PRELIM) + spacing + meanNodeWidth(lS, cN);
            cN.setProperty(Properties.PRELIM, p);
            cN.setProperty(Properties.MODIFIER, cN.getProperty(Properties.PRELIM) - midPoint);
            /** shift the offsprings of this node to the right */
            apportion(cN, level);
        } else {
            /** No sibling on the left to worry about. */
            cN.setProperty(Properties.PRELIM, midPoint);
        }
    }
}

From source file:org.apache.metron.indexing.dao.InMemoryDao.java

private static boolean isMatch(String query, Map<String, Object> doc) {
    if (query == null) {
        return false;
    }//from  w  w  w  .  j a  v a 2 s.c om
    if (query.equals("*")) {
        return true;
    }
    if (query.contains(":")) {
        Iterable<String> splits = Splitter.on(":").split(query.trim());
        String field = Iterables.getFirst(splits, "");
        String val = Iterables.getLast(splits, "");

        // Immediately quit if there's no value ot find
        if (val == null) {
            return false;
        }

        // Check if we're looking into a nested field.  The '|' is arbitrarily chosen.
        String nestingField = null;
        if (field.contains("|")) {
            Iterable<String> fieldSplits = Splitter.on('|').split(field);
            nestingField = Iterables.getFirst(fieldSplits, null);
            field = Iterables.getLast(fieldSplits, null);
        }
        if (nestingField == null) {
            // Just grab directly
            Object o = doc.get(field);
            return val.equals(o);
        } else {
            // We need to look into a nested field for the value
            @SuppressWarnings("unchecked")
            List<Map<String, Object>> nestedList = (List<Map<String, Object>>) doc.get(nestingField);
            if (nestedList == null) {
                return false;
            } else {
                for (Map<String, Object> nestedEntry : nestedList) {
                    if (val.equals(nestedEntry.get(field))) {
                        return true;
                    }
                }
            }
        }
    }
    return false;
}

From source file:org.eclipse.elk.alg.mrtree.p3place.NodePlacer.java

/**
 * In this first postorder walk, every node of the tree is assigned a preliminary x-coordinate
 * (held in property PRELIM). In addition, internal nodes are given modifiers, which will be
 * used to move their offspring to the right (held in property MODIFIER).
 * //from  ww w  .ja v a2  s . c om
 * @param cN
 *            the root level of the tree
 * @param level
 *            the index of the passed level
 */
private void firstWalk(final TNode cN, final int level) {
    cN.setProperty(InternalProperties.MODIFIER, 0d);
    TNode lS = cN.getProperty(InternalProperties.LEFTSIBLING);

    if (cN.isLeaf()) {
        if (lS != null) {
            /**
             * Determine the preliminary x-coordinate based on: the preliminary x-coordinate of
             * the left sibling, the separation between sibling nodes, and tHe mean size of left
             * sibling and current node.
             */
            double p = lS.getProperty(InternalProperties.PRELIM) + spacing + meanNodeWidth(lS, cN);
            cN.setProperty(InternalProperties.PRELIM, p);
        } else {
            /** No sibling on the left to worry about. */
            cN.setProperty(InternalProperties.PRELIM, 0d);
        }
    } else {
        /**
         * This Node is not a leaf, so call this procedure recursively for each of its
         * offspring.
         */
        for (TNode child : cN.getChildren()) {
            firstWalk(child, level + 1);
        }

        /**
         * Set the prelim and modifer for this node by determine the midpoint of its offsprings
         * and the middle node size of the node and its left sibling
         */
        TNode lM = Iterables.getFirst(cN.getChildren(), null);
        TNode rM = Iterables.getLast(cN.getChildren(), null);
        double midPoint = (rM.getProperty(InternalProperties.PRELIM)
                + lM.getProperty(InternalProperties.PRELIM)) / 2f;

        if (lS != null) {
            /** This Node has a left sibling so its offsprings must be shifted to the right */
            double p = lS.getProperty(InternalProperties.PRELIM) + spacing + meanNodeWidth(lS, cN);
            cN.setProperty(InternalProperties.PRELIM, p);
            cN.setProperty(InternalProperties.MODIFIER, cN.getProperty(InternalProperties.PRELIM) - midPoint);
            /** shift the offsprings of this node to the right */
            apportion(cN, level);
        } else {
            /** No sibling on the left to worry about. */
            cN.setProperty(InternalProperties.PRELIM, midPoint);
        }
    }
}

From source file:org.apache.mailbox.tools.indexer.ReIndexerImpl.java

private ImpactingMessageEvent findMostRelevant(Collection<ImpactingMessageEvent> messageEvents) {
    for (ImpactingMessageEvent impactingMessageEvent : messageEvents) {
        if (impactingMessageEvent.getType().equals(ImpactingEventType.Deletion)) {
            return impactingMessageEvent;
        }//from  w ww . j a  va  2  s  . co m
    }
    return Iterables.getLast(messageEvents, null);
}

From source file:org.onebusaway.nyc.vehicle_tracking.impl.inference.BlockStateSamplingStrategyImpl.java

@Override
public BlockStateObservation samplePriorScheduleState(BlockInstance blockInstance, Observation obs) {

    /*/* w ww  .  j ava 2s .  com*/
     * Our initial block proposals will yield 0 d.a.b. in some cases. It could
     * be that there is no snapped position for a block, yet it isn't actually
     * deadheading-before, it could be deadheading-during. That is why we sample
     * schedule deviations around the current obs time when the obs time is
     * after the block's start.
     */
    final double currentTime = (obs.getTime() - blockInstance.getServiceDate()) / 1000;

    /*
     * Get the location at for the current time, then sample a location
     * based on that time and the travel time to that location (for when
     * we're not anywhere nearby).
     */
    final int startSchedTime = Iterables.getFirst(blockInstance.getBlock().getStopTimes(), null).getStopTime()
            .getArrivalTime();
    final int endSchedTime = Iterables.getLast(blockInstance.getBlock().getStopTimes(), null).getStopTime()
            .getDepartureTime();

    final double timeToGetToCurrentTimeLoc;
    if (currentTime > startSchedTime && currentTime < endSchedTime && obs.getPreviousObservation() != null) {
        final ScheduledBlockLocation blockLocation = _scheduledBlockLocationService
                .getScheduledBlockLocationFromScheduledTime(blockInstance.getBlock(), (int) currentTime);

        /*
         * If the current time puts us in deadhead-during between trips, then
         * it's possible that the block location will be at the start
         * of the previous trip (potentially very far away), so we skip
         * these situations.
         */
        if (JourneyStateTransitionModel.isLocationOnATrip(blockLocation)) {
            final double impliedVelocity = obs.getDistanceMoved() / obs.getTimeDelta();
            timeToGetToCurrentTimeLoc = TurboButton.distance(blockLocation.getLocation(), obs.getLocation())
                    / impliedVelocity;
        } else {
            timeToGetToCurrentTimeLoc = 0d;
        }
    } else {
        timeToGetToCurrentTimeLoc = 0d;
    }

    /*
     * TODO Note that we're using the non-run-matching prior distribution.
     * Should we?
     */
    final StudentTDistribution schedDist = ScheduleLikelihood.getSchedDevNonRunDist();
    final double schedTimeError = 60d * schedDist.sample(ParticleFactoryImpl.getLocalRng());
    double newSchedTime = currentTime + timeToGetToCurrentTimeLoc
            + Math.max(schedTimeError, -timeToGetToCurrentTimeLoc / 3d);

    if (Double.isInfinite(newSchedTime))
        return null;

    BlockStateObservation schedState;
    if (newSchedTime < startSchedTime) {
        schedState = _blocksFromObservationService.getBlockStateObservationFromDist(obs, blockInstance, 0.0);
    } else if (endSchedTime < newSchedTime) {
        return null;
    } else {
        /**
         * Important note about prior distribution sampling: to reduce/remove
         * confusion caused by deadhead states having no pre-defined trajectory,
         * we simply don't allow prior sampling of deadhead states for certain
         * situations.
         */
        schedState = _blocksFromObservationService.getBlockStateObservationFromTime(obs, blockInstance,
                (int) newSchedTime);
        if (!schedState.isOnTrip()) {
            return null;
        }
    }

    return orientationCheck(null, schedState, obs);
}

From source file:am.ik.categolj3.api.git.GitStore.java

Pair<Author, Author> getAuthor(Path path) {
    Path p = gitProperties.getBaseDir().toPath().relativize(path);
    try {//  ww  w.ja va 2 s  .co m
        Iterable<RevCommit> commits = git.log().addPath(p.toString().replace("\\", "/")).call();
        RevCommit updated = Iterables.getFirst(commits, null);
        RevCommit created = Iterables.getLast(commits, updated);
        return new Pair<>(author(created), author(updated));
    } catch (GitAPIException e) {
        throw new IllegalStateException(e);
    }
}

From source file:com.imaginarycode.minecraft.redisbungee.util.UUIDTranslator.java

public final String getNameFromUuid(@NonNull UUID player, boolean expensiveLookups) {
    // If the player is online, give them their UUID.
    // Remember, local data > remote data.
    if (ProxyServer.getInstance().getPlayer(player) != null)
        return ProxyServer.getInstance().getPlayer(player).getName();

    // Check if it exists in the map
    CachedUUIDEntry cachedUUIDEntry = uuidToNameMap.get(player);
    if (cachedUUIDEntry != null) {
        if (!cachedUUIDEntry.expired())
            return cachedUUIDEntry.getName();
        else//from  w  w w .ja v a2 s.com
            uuidToNameMap.remove(player);
    }

    // Okay, it wasn't locally cached. Let's try Redis.
    try (Jedis jedis = plugin.getPool().getResource()) {
        String stored = jedis.hget("uuid-cache", player.toString());
        if (stored != null) {
            // Found an entry value. Deserialize it.
            CachedUUIDEntry entry = RedisBungee.getGson().fromJson(stored, CachedUUIDEntry.class);

            // Check for expiry:
            if (entry.expired()) {
                jedis.hdel("uuid-cache", player.toString());
                // Doesn't hurt to also remove the named entry as well.
                // TODO: Since UUIDs are fixed, we could look up the name and see if the UUID matches.
                jedis.hdel("uuid-cache", entry.getName());
            } else {
                nameToUuidMap.put(entry.getName().toLowerCase(), entry);
                uuidToNameMap.put(player, entry);
                return entry.getName();
            }
        }

        if (!expensiveLookups || !ProxyServer.getInstance().getConfig().isOnlineMode())
            return null;

        // That didn't work. Let's ask Mojang. This call may fail, because Mojang is insane.
        String name;
        try {
            List<String> nameHist = NameFetcher.nameHistoryFromUuid(player);
            name = Iterables.getLast(nameHist, null);
        } catch (Exception e) {
            plugin.getLogger().log(Level.SEVERE, "Unable to fetch name from Mojang for " + player, e);
            return null;
        }

        if (name != null) {
            persistInfo(name, player, jedis);
            return name;
        }

        return null;
    } catch (JedisException e) {
        plugin.getLogger().log(Level.SEVERE, "Unable to fetch name for " + player, e);
        return null;
    }
}

From source file:org.kalypso.ogc.sensor.timeseries.base.CacheTimeSeriesVisitor.java

@Override
public DateRange getDateRange() {
    final Set<Date> keys = m_values.keySet();
    if (keys.isEmpty())
        return null;

    final Date from = Iterables.getFirst(keys, null);
    final Date to = Iterables.getLast(keys, null);

    return new DateRange(from, to);
}

From source file:com.cinchapi.concourse.server.storage.temp.Limbo.java

/**
 * Return a time series that contains the values stored for {@code key} in
 * {@code record} at each modification timestamp between {@code start}
 * (inclusive) and {@code end} (exclusive).
 * /* ww w . ja  va  2  s. c o  m*/
 * @param key the field name
 * @param record the record id
 * @param start the start timestamp (inclusive)
 * @param end the end timestamp (exclusive)
 * @param context the prior context
 * @return a {@link Map mapping} from modification timestamp to a non-empty
 *         {@link Set} of values that were contained at that timestamp
 */
public Map<Long, Set<TObject>> chronologize(String key, long record, long start, long end,
        Map<Long, Set<TObject>> context) {
    Set<TObject> snapshot = Iterables.getLast(context.values(), Sets.<TObject>newLinkedHashSet());
    if (snapshot.isEmpty() && !context.isEmpty()) {
        // CON-474: Empty set is placed in the context if it was the last
        // snapshot know to the database
        context.remove(Time.NONE);
    }
    for (Iterator<Write> it = iterator(); it.hasNext();) {
        Write write = it.next();
        long timestamp = write.getVersion();
        if (timestamp >= end) {
            break;
        } else {
            Text writeKey = write.getKey();
            long writeRecord = write.getRecord().longValue();
            Action action = write.getType();
            if (writeKey.toString().equals(key) && writeRecord == record) {
                snapshot = Sets.newLinkedHashSet(snapshot);
                Value writeValue = write.getValue();
                if (action == Action.ADD) {
                    snapshot.add(writeValue.getTObject());
                } else if (action == Action.REMOVE) {
                    snapshot.remove(writeValue.getTObject());
                }
                if (timestamp >= start && !snapshot.isEmpty()) {
                    context.put(timestamp, snapshot);
                }
            }
        }
    }
    return context;
}