List of usage examples for java.util TreeMap headMap
public NavigableMap<K, V> headMap(K toKey, boolean inclusive)
From source file:Main.java
public static void main(String[] args) { TreeMap<Integer, String> treemap = new TreeMap<Integer, String>(); TreeMap<Integer, String> treemapheadincl = new TreeMap<Integer, String>(); // populating tree map treemap.put(2, "two"); treemap.put(1, "one"); treemap.put(3, "three"); treemap.put(6, "six"); treemap.put(5, "from java2s.com"); // getting head map inclusive 3 treemapheadincl = treemap.headMap(3, true); System.out.println("Checking values of the map"); System.out.println("Value is: " + treemapheadincl); }
From source file:Main.java
public static void main(String[] a) { TreeMap<String, String> map = new TreeMap<String, String>(); map.put("key1", "value1"); map.put("key2", "value2"); map.put("key3", "value3"); if (!map.isEmpty()) { String last = map.lastKey(); boolean first = true; do {// www . ja va 2 s . c o m if (!first) { System.out.print(", "); } System.out.print(last); last = map.headMap(last, true).lastKey(); first = false; } while (last != map.firstKey()); System.out.println(); } }
From source file:org.orekit.models.earth.GeoMagneticFieldFactory.java
/** Gets a geomagnetic field model for the given year. In case the specified * year does not match an existing model epoch, the resulting field is * generated by either time-transforming an existing model using its secular * variation coefficients, or by linear interpolating two existing models. * @param type the type of the field (e.g. WMM or IGRF) * @param models all loaded field models, sorted by their epoch * @param year the epoch of the resulting field model * @return a {@link GeoMagneticField} model for the given year * @throws OrekitException if the specified year is out of range of the available models *//*w w w . java 2 s .c o m*/ private static GeoMagneticField getModel(final FieldModel type, final TreeMap<Integer, GeoMagneticField> models, final double year) throws OrekitException { final int epochKey = (int) (year * 100d); final SortedMap<Integer, GeoMagneticField> head = models.headMap(epochKey, true); if (head.isEmpty()) { throw new OrekitException(OrekitMessages.NON_EXISTENT_GEOMAGNETIC_MODEL, type.name(), year); } GeoMagneticField model = models.get(head.lastKey()); if (model.getEpoch() < year) { if (model.supportsTimeTransform()) { model = model.transformModel(year); } else { final SortedMap<Integer, GeoMagneticField> tail = models.tailMap(epochKey, false); if (tail.isEmpty()) { throw new OrekitException(OrekitMessages.NON_EXISTENT_GEOMAGNETIC_MODEL, type.name(), year); } final GeoMagneticField secondModel = models.get(tail.firstKey()); if (secondModel != model) { model = model.transformModel(secondModel, year); } } } return model; }
From source file:org.apache.ambari.server.controller.metrics.timeline.cache.TimelineMetricCacheEntryFactory.java
private void updateExistingMetricValues(TimelineMetrics existingMetrics, Long requestedStartTime, Long requestedEndTime, boolean removeAll) { for (TimelineMetric existingMetric : existingMetrics.getMetrics()) { if (removeAll) { existingMetric.setMetricValues(new TreeMap<Long, Double>()); } else {/* w w w . ja va 2 s.co m*/ TreeMap<Long, Double> existingMetricValues = existingMetric.getMetricValues(); LOG.trace( "Existing metric: " + existingMetric.getMetricName() + " # " + existingMetricValues.size()); // Retain only the values that are within the [requestStartTime, requestedEndTime] window existingMetricValues.headMap(requestedStartTime, false).clear(); existingMetricValues.tailMap(requestedEndTime, false).clear(); } } }
From source file:org.jamocha.dn.compiler.pathblocks.PathBlocks.java
protected static List<PathRule> createOutput(final List<Either<Rule, ExistentialProxy>> rules, final PathBlockSet resultBlockSet) { final Function<? super Block, ? extends Integer> characteristicNumber = block -> block .getFlatFilterInstances().size() / block.getRulesOrProxies().size(); final TreeMap<Integer, CursorableLinkedList<Block>> blockMap = resultBlockSet.getBlocks().stream() .collect(groupingBy(characteristicNumber, TreeMap::new, toCollection(CursorableLinkedList::new))); // iterate over all the filter proxies ever used for (final FilterProxy filterProxy : FilterProxy.getFilterProxies()) { final Set<ExistentialProxy> existentialProxies = filterProxy.getProxies(); // determine the largest characteristic number of the blocks containing filter instances // of one of the existential proxies (choice is arbitrary, since the filters and the // conflicts are identical if they belong to the same filter). final OptionalInt optMax = resultBlockSet.getRuleInstanceToBlocks() .computeIfAbsent(Either.right(existentialProxies.iterator().next()), newHashSet()).stream() .mapToInt(composeToInt(characteristicNumber, Integer::intValue)).max(); if (!optMax.isPresent()) continue; final int eCN = optMax.getAsInt(); // get the list to append the blocks using the existential closure filter INSTANCE to final CursorableLinkedList<Block> targetList = blockMap.get(eCN); // for every existential part for (final ExistentialProxy existentialProxy : existentialProxies) { final FilterInstance exClosure = existentialProxy.getExistentialClosure(); // create a list storing the blocks to move final List<Block> toMove = new ArrayList<>(); for (final CursorableLinkedList<Block> blockList : blockMap.headMap(eCN, true).values()) { // iterate over the blocks in the current list for (final ListIterator<Block> iterator = blockList.listIterator(); iterator.hasNext();) { final Block current = iterator.next(); // if the current block uses the current existential closure filter // INSTANCE, it has to be moved if (current.getFlatFilterInstances().contains(exClosure)) { iterator.remove(); toMove.add(current); }//from w w w . j av a2s . com } } // append the blocks to be moved (they were only removed so far) targetList.addAll(toMove); } } final Set<FilterInstance> constructedFIs = new HashSet<>(); final Map<Either<Rule, ExistentialProxy>, Map<FilterInstance, Set<FilterInstance>>> ruleToJoinedWith = new HashMap<>(); final Map<Set<FilterInstance>, PathFilterList> joinedWithToComponent = new HashMap<>(); // at this point, the network can be constructed for (final CursorableLinkedList<Block> blockList : blockMap.values()) { for (final Block block : blockList) { final List<Either<Rule, ExistentialProxy>> blockRules = Lists .newArrayList(block.getRulesOrProxies()); final Set<List<FilterInstance>> filterInstanceColumns = Block .getFilterInstanceColumns(block.getFilters(), block.getRuleToFilterToRow(), blockRules); // since we are considering blocks, it is either the case that all filter // instances of the column have been constructed or none of them have final PathSharedListWrapper sharedListWrapper = new PathSharedListWrapper(blockRules.size()); final Map<Either<Rule, ExistentialProxy>, PathSharedList> ruleToSharedList = IntStream .range(0, blockRules.size()).boxed() .collect(toMap(blockRules::get, sharedListWrapper.getSharedSiblings()::get)); final List<List<FilterInstance>> columnsToConstruct, columnsAlreadyConstructed; { final Map<Boolean, List<List<FilterInstance>>> partition = filterInstanceColumns.stream() .collect(partitioningBy(column -> Collections.disjoint(column, constructedFIs))); columnsAlreadyConstructed = partition.get(Boolean.FALSE); columnsToConstruct = partition.get(Boolean.TRUE); } if (!columnsAlreadyConstructed.isEmpty()) { final Map<PathSharedList, LinkedHashSet<PathFilterList>> sharedPart = new HashMap<>(); for (final List<FilterInstance> column : columnsAlreadyConstructed) { for (final FilterInstance fi : column) { sharedPart .computeIfAbsent(ruleToSharedList.get(fi.getRuleOrProxy()), newLinkedHashSet()) .add(joinedWithToComponent .get(ruleToJoinedWith.get(fi.getRuleOrProxy()).get(fi))); } } sharedListWrapper.addSharedColumns(sharedPart); } for (final List<FilterInstance> column : columnsToConstruct) { sharedListWrapper.addSharedColumn(column.stream().collect( toMap(fi -> ruleToSharedList.get(fi.getRuleOrProxy()), FilterInstance::convert))); } constructedFIs.addAll(block.getFlatFilterInstances()); for (final Entry<Either<Rule, ExistentialProxy>, Map<Filter, FilterInstancesSideBySide>> entry : block .getRuleToFilterToRow().entrySet()) { final Either<Rule, ExistentialProxy> rule = entry.getKey(); final Set<FilterInstance> joined = entry.getValue().values().stream() .flatMap(sbs -> sbs.getInstances().stream()).collect(toSet()); final Map<FilterInstance, Set<FilterInstance>> joinedWithMapForThisRule = ruleToJoinedWith .computeIfAbsent(rule, newHashMap()); joined.forEach(fi -> joinedWithMapForThisRule.put(fi, joined)); joinedWithToComponent.put(joined, ruleToSharedList.get(rule)); } } } final List<PathRule> pathRules = new ArrayList<>(); for (final Either<Rule, ExistentialProxy> either : rules) { if (either.isRight()) { continue; } final List<PathFilterList> pathFilterLists = Stream .concat(either.left().get().existentialProxies.values().stream().map(p -> Either.right(p)), Stream.of(either)) .flatMap(e -> ruleToJoinedWith.getOrDefault(e, Collections.emptyMap()).values().stream() .distinct()) .map(joinedWithToComponent::get).collect(toList()); pathRules.add(either.left().get().getOriginal().toPathRule(PathFilterList.toSimpleList(pathFilterLists), pathFilterLists.size() > 1 ? InitialFactPathsFinder.gather(pathFilterLists) : Collections.emptySet())); } return pathRules; }