Example usage for java.util SortedMap entrySet

List of usage examples for java.util SortedMap entrySet

Introduction

In this page you can find the example usage for java.util SortedMap entrySet.

Prototype

Set<Map.Entry<K, V>> entrySet();

Source Link

Document

Returns a Set view of the mappings contained in this map.

Usage

From source file:org.optaplanner.benchmark.impl.result.PlannerBenchmarkResult.java

private List<List<SolverBenchmarkResult>> createSameRankingListList(BenchmarkReport benchmarkReport,
        List<SolverBenchmarkResult> rankableSolverBenchmarkResultList) {
    List<List<SolverBenchmarkResult>> sameRankingListList = new ArrayList<List<SolverBenchmarkResult>>(
            rankableSolverBenchmarkResultList.size());
    if (benchmarkReport.getSolverRankingComparator() != null) {
        Comparator<SolverBenchmarkResult> comparator = Collections
                .reverseOrder(benchmarkReport.getSolverRankingComparator());
        Collections.sort(rankableSolverBenchmarkResultList, comparator);
        List<SolverBenchmarkResult> sameRankingList = null;
        SolverBenchmarkResult previousSolverBenchmarkResult = null;
        for (SolverBenchmarkResult solverBenchmarkResult : rankableSolverBenchmarkResultList) {
            if (previousSolverBenchmarkResult == null
                    || comparator.compare(previousSolverBenchmarkResult, solverBenchmarkResult) != 0) {
                // New rank
                sameRankingList = new ArrayList<SolverBenchmarkResult>();
                sameRankingListList.add(sameRankingList);
            }//  www. j  a  va 2  s.  co m
            sameRankingList.add(solverBenchmarkResult);
            previousSolverBenchmarkResult = solverBenchmarkResult;
        }
    } else if (benchmarkReport.getSolverRankingWeightFactory() != null) {
        SortedMap<Comparable, List<SolverBenchmarkResult>> rankedMap = new TreeMap<Comparable, List<SolverBenchmarkResult>>(
                Collections.reverseOrder());
        for (SolverBenchmarkResult solverBenchmarkResult : rankableSolverBenchmarkResultList) {
            Comparable rankingWeight = benchmarkReport.getSolverRankingWeightFactory()
                    .createRankingWeight(rankableSolverBenchmarkResultList, solverBenchmarkResult);
            List<SolverBenchmarkResult> sameRankingList = rankedMap.get(rankingWeight);
            if (sameRankingList == null) {
                sameRankingList = new ArrayList<SolverBenchmarkResult>();
                rankedMap.put(rankingWeight, sameRankingList);
            }
            sameRankingList.add(solverBenchmarkResult);
        }
        for (Map.Entry<Comparable, List<SolverBenchmarkResult>> entry : rankedMap.entrySet()) {
            sameRankingListList.add(entry.getValue());
        }
    } else {
        throw new IllegalStateException("Ranking is impossible"
                + " because solverRankingComparator and solverRankingWeightFactory are null.");
    }
    return sameRankingListList;
}

From source file:jenkins.scm.impl.subversion.SubversionSCMSource.java

void fetch(@NonNull TaskListener listener, @NonNull final SVNRepositoryView repository, long rev,
        @NonNull final String repoPath, @NonNull SortedSet<List<String>> paths, @NonNull List<String> prefix,
        @NonNull List<String> realPath, @NonNull SortedSet<List<String>> excludedPaths,
        @CheckForNull SCMSourceCriteria branchCriteria, @NonNull SCMHeadObserver observer)
        throws IOException, SVNException {
    String svnPath = SVNPathUtil.append(repoPath, StringUtils.join(realPath, '/'));
    assert prefix.size() == realPath.size();
    assert wildcardStartsWith(realPath, prefix);
    SortedMap<List<String>, SortedSet<List<String>>> includePaths = groupPaths(paths, prefix);
    listener.getLogger().println("Checking directory " + svnPath + (rev > -1 ? "@" + rev : "@HEAD"));
    SVNRepositoryView.NodeEntry node = repository.getNode(svnPath, rev);
    if (!SVNNodeKind.DIR.equals(node.getType()) || node.getChildren() == null) {
        return;//from ww  w .jav  a2  s.  c o  m
    }
    for (Map.Entry<List<String>, SortedSet<List<String>>> entry : includePaths.entrySet()) {
        for (List<String> path : entry.getValue()) {
            String name = path.get(prefix.size());
            SVNRepositoryView.ChildEntry[] children = node.getChildren().clone();
            Arrays.sort(children, new Comparator<SVNRepositoryView.ChildEntry>() {
                public int compare(SVNRepositoryView.ChildEntry o1, SVNRepositoryView.ChildEntry o2) {
                    long diff = o2.getRevision() - o1.getRevision();
                    return diff < 0 ? -1 : diff > 0 ? 1 : 0;
                }
            });
            for (final SVNRepositoryView.ChildEntry svnEntry : children) {
                if (svnEntry.getType() == SVNNodeKind.DIR && isMatch(svnEntry.getName(), name)) {
                    List<String> childPrefix = copyAndAppend(prefix, name);
                    List<String> childRealPath = copyAndAppend(realPath, svnEntry.getName());
                    if (wildcardStartsWith(childRealPath, excludedPaths)) {
                        continue;
                    }
                    if (path.equals(childPrefix)) {
                        final String childPath = StringUtils.join(childRealPath, '/');
                        final String candidateRootPath = SVNPathUtil.append(repoPath, childPath);
                        final long candidateRevision = svnEntry.getRevision();
                        final long lastModified = svnEntry.getLastModified();
                        listener.getLogger().println(
                                "Checking candidate branch " + candidateRootPath + "@" + candidateRevision);
                        if (branchCriteria == null || branchCriteria.isHead(new SCMSourceCriteria.Probe() {
                            @Override
                            public String name() {
                                return childPath;
                            }

                            @Override
                            public long lastModified() {
                                return lastModified;
                            }

                            @Override
                            public boolean exists(@NonNull String path) throws IOException {
                                try {
                                    return repository.checkPath(SVNPathUtil.append(candidateRootPath, path),
                                            candidateRevision) != SVNNodeKind.NONE;
                                } catch (SVNException e) {
                                    throw new IOException(e);
                                }
                            }
                        }, listener)) {
                            listener.getLogger().println("Met criteria");
                            SCMHead head = new SCMHead(childPath);
                            observer.observe(head, new SCMRevisionImpl(head, svnEntry.getRevision()));
                            if (!observer.isObserving()) {
                                return;
                            }
                        } else {
                            listener.getLogger().println("Does not meet criteria");
                        }
                    } else {
                        fetch(listener, repository, svnEntry.getRevision(), repoPath, paths, childPrefix,
                                childRealPath, excludedPaths, branchCriteria, observer);
                    }
                }
            }
        }
    }
}

From source file:org.calrissian.accumulorecipes.commons.hadoop.GroupedKeyRangePartitioner.java

private synchronized Text[] getCutPoints() throws IOException {
    if (cutPointArray == null) {

        Path[] cf = DistributedCache.getLocalCacheFiles(conf);
        if (cf != null) {
            Map<String, String> curFilesAndGroups = getCurFilesAndGroups();
            SortedMap<String, SortedSet<String>> cutPointMap = new TreeMap<String, SortedSet<String>>();
            for (Path path : cf) {
                String group = null;
                for (Map.Entry<String, String> groupSplits : curFilesAndGroups.entrySet()) {
                    if (path.toString().endsWith(groupSplits.getKey()))
                        group = groupSplits.getValue();
                }//from   w ww .  jav  a 2  s.  co m

                if (group != null) {
                    Scanner in = new Scanner(new BufferedReader(new FileReader(path.toString())));

                    try {
                        while (in.hasNextLine()) {
                            String split = new String(Base64.decodeBase64(in.nextLine().getBytes()));

                            SortedSet<String> splits = cutPointMap.get(group);
                            if (splits == null) {
                                splits = new TreeSet<String>();
                                cutPointMap.put(group, splits);
                            }
                        }

                        SortedSet<Text> treeSet = new TreeSet<Text>();
                        for (Map.Entry<String, SortedSet<String>> entry : cutPointMap.entrySet()) {
                            treeSet.add(new Text(entry.getKey() + NULL_BYTE + NULL_BYTE));

                            for (String string : entry.getValue())
                                treeSet.add(new Text(entry.getKey() + NULL_BYTE + string));

                            treeSet.add(new Text(entry.getKey() + NULL_BYTE + END_BYTE));
                        }

                        cutPointArray = treeSet.toArray(new Text[] {});
                    } finally {
                        in.close();
                    }

                    break;
                } else {
                    throw new FileNotFoundException(
                            "A file was not found in distribution cache files: " + path.toString());
                }
            }
        }
    }
    return cutPointArray;
}

From source file:org.apache.falcon.resource.proxy.ExtensionManagerProxy.java

private void submitEntities(String extensionName, String jobName, SortedMap<EntityType, List<Entity>> entityMap,
        InputStream configStream, HttpServletRequest request)
        throws FalconException, IOException, JAXBException {
    List<Entity> feeds = entityMap.get(EntityType.FEED);
    List<Entity> processes = entityMap.get(EntityType.PROCESS);
    validateFeeds(feeds);/*w w  w .ja v  a  2  s. co  m*/
    validateProcesses(processes);
    List<String> feedNames = new ArrayList<>();
    List<String> processNames = new ArrayList<>();

    ExtensionMetaStore metaStore = ExtensionStore.getMetaStore();
    byte[] configBytes = null;
    if (configStream != null) {
        configBytes = IOUtils.toByteArray(configStream);
    }
    for (Map.Entry<EntityType, List<Entity>> entry : entityMap.entrySet()) {
        for (final Entity entity : entry.getValue()) {
            if (entity.getEntityType().equals(EntityType.FEED)) {
                feedNames.add(entity.getName());
            } else {
                processNames.add(entity.getName());
            }
        }
    }
    metaStore.storeExtensionJob(jobName, extensionName, feedNames, processNames, configBytes);

    for (Map.Entry<EntityType, List<Entity>> entry : entityMap.entrySet()) {
        for (final Entity entity : entry.getValue()) {
            final HttpServletRequest bufferedRequest = getEntityStream(entity, entity.getEntityType(), request);
            final Set<String> colos = getApplicableColos(entity.getEntityType().toString(), entity);
            entityProxyUtil.proxySubmit(entity.getEntityType().toString(), bufferedRequest, entity, colos);
            if (!embeddedMode) {
                super.submit(bufferedRequest, entity.getEntityType().toString(), currentColo);
            }
        }
    }
}

From source file:jenkins.scm.impl.subversion.SubversionSCMSource.java

/**
 * Groups a set of path segments based on a supplied prefix.
 *
 * @param pathSegments the input path segments.
 * @param prefix       the prefix to group on.
 * @return a map, all keys will {@link #startsWith(java.util.List, java.util.List)} the input prefix and be longer
 *         than the input prefix, all values will {@link #startsWith(java.util.List,
 *         java.util.List)} their corresponding key.
 *//*from  w  w w  .  j av a2 s  . c  o m*/
@NonNull
static SortedMap<List<String>, SortedSet<List<String>>> groupPaths(
        @NonNull SortedSet<List<String>> pathSegments, @NonNull List<String> prefix) {
    // ensure pre-condition is valid and ensure we are using a copy
    pathSegments = filterPaths(pathSegments, prefix);

    SortedMap<List<String>, SortedSet<List<String>>> result = new TreeMap<List<String>, SortedSet<List<String>>>(
            COMPARATOR);
    while (!pathSegments.isEmpty()) {
        List<String> longestPrefix = null;
        int longestIndex = -1;
        for (List<String> pathSegment : pathSegments) {
            if (longestPrefix == null) {
                longestPrefix = pathSegment;
                longestIndex = indexOfNextWildcard(pathSegment, prefix.size());

            } else {
                int index = indexOfNextWildcard(pathSegment, prefix.size());
                if (index > longestIndex) {
                    longestPrefix = pathSegment;
                    longestIndex = index;
                }
            }
        }
        assert longestPrefix != null;
        longestPrefix = new ArrayList<String>(longestPrefix.subList(0, longestIndex));
        SortedSet<List<String>> group = filterPaths(pathSegments, longestPrefix);
        result.put(longestPrefix, group);
        pathSegments.removeAll(group);
    }
    String optimization;
    while (null != (optimization = getOptimizationPoint(result.keySet(), prefix.size()))) {
        List<String> optimizedPrefix = copyAndAppend(prefix, optimization);
        SortedSet<List<String>> optimizedGroup = new TreeSet<List<String>>(COMPARATOR);
        for (Iterator<Map.Entry<List<String>, SortedSet<List<String>>>> iterator = result.entrySet()
                .iterator(); iterator.hasNext();) {
            Map.Entry<List<String>, SortedSet<List<String>>> entry = iterator.next();
            if (startsWith(entry.getKey(), optimizedPrefix)) {
                iterator.remove();
                optimizedGroup.addAll(entry.getValue());
            }
        }
        result.put(optimizedPrefix, optimizedGroup);
    }
    return result;
}

From source file:org.apache.hadoop.hbase.master.TestCatalogJanitor.java

/**
 * Make sure parent with specified end key gets cleaned up even if daughter is cleaned up before it.
 *
 * @param rootDir the test case name, used as the HBase testing utility root
 * @param lastEndKey the end key of the split parent
 * @throws IOException/* w ww .  j  a v  a  2  s.c om*/
 * @throws InterruptedException
 */
private void parentWithSpecifiedEndKeyCleanedEvenIfDaughterGoneFirst(final String rootDir,
        final byte[] lastEndKey) throws IOException, InterruptedException {
    HBaseTestingUtility htu = new HBaseTestingUtility();
    setRootDirAndCleanIt(htu, rootDir);
    Server server = new MockServer(htu);
    MasterServices services = new MockMasterServices(server);
    CatalogJanitor janitor = new CatalogJanitor(server, services);
    final HTableDescriptor htd = createHTableDescriptor();

    // Create regions: aaa->{lastEndKey}, aaa->ccc, aaa->bbb, bbb->ccc, etc.

    // Parent
    HRegionInfo parent = new HRegionInfo(htd.getTableName(), Bytes.toBytes("aaa"), lastEndKey);
    // Sleep a second else the encoded name on these regions comes out
    // same for all with same start key and made in same second.
    Thread.sleep(1001);

    // Daughter a
    HRegionInfo splita = new HRegionInfo(htd.getTableName(), Bytes.toBytes("aaa"), Bytes.toBytes("ccc"));
    Thread.sleep(1001);
    // Make daughters of daughter a; splitaa and splitab.
    HRegionInfo splitaa = new HRegionInfo(htd.getTableName(), Bytes.toBytes("aaa"), Bytes.toBytes("bbb"));
    HRegionInfo splitab = new HRegionInfo(htd.getTableName(), Bytes.toBytes("bbb"), Bytes.toBytes("ccc"));

    // Daughter b
    HRegionInfo splitb = new HRegionInfo(htd.getTableName(), Bytes.toBytes("ccc"), lastEndKey);
    Thread.sleep(1001);
    // Make Daughters of daughterb; splitba and splitbb.
    HRegionInfo splitba = new HRegionInfo(htd.getTableName(), Bytes.toBytes("ccc"), Bytes.toBytes("ddd"));
    HRegionInfo splitbb = new HRegionInfo(htd.getTableName(), Bytes.toBytes("ddd"), lastEndKey);

    // First test that our Comparator works right up in CatalogJanitor.
    // Just fo kicks.
    SortedMap<HRegionInfo, Result> regions = new TreeMap<HRegionInfo, Result>(
            new CatalogJanitor.SplitParentFirstComparator());
    // Now make sure that this regions map sorts as we expect it to.
    regions.put(parent, createResult(parent, splita, splitb));
    regions.put(splitb, createResult(splitb, splitba, splitbb));
    regions.put(splita, createResult(splita, splitaa, splitab));
    // Assert its properly sorted.
    int index = 0;
    for (Map.Entry<HRegionInfo, Result> e : regions.entrySet()) {
        if (index == 0) {
            assertTrue(e.getKey().getEncodedName().equals(parent.getEncodedName()));
        } else if (index == 1) {
            assertTrue(e.getKey().getEncodedName().equals(splita.getEncodedName()));
        } else if (index == 2) {
            assertTrue(e.getKey().getEncodedName().equals(splitb.getEncodedName()));
        }
        index++;
    }

    // Now play around with the cleanParent function.  Create a ref from splita
    // up to the parent.
    Path splitaRef = createReferences(services, htd, parent, splita, Bytes.toBytes("ccc"), false);
    // Make sure actual super parent sticks around because splita has a ref.
    assertFalse(janitor.cleanParent(parent, regions.get(parent)));

    //splitba, and split bb, do not have dirs in fs.  That means that if
    // we test splitb, it should get cleaned up.
    assertTrue(janitor.cleanParent(splitb, regions.get(splitb)));

    // Now remove ref from splita to parent... so parent can be let go and so
    // the daughter splita can be split (can't split if still references).
    // BUT make the timing such that the daughter gets cleaned up before we
    // can get a chance to let go of the parent.
    FileSystem fs = FileSystem.get(htu.getConfiguration());
    assertTrue(fs.delete(splitaRef, true));
    // Create the refs from daughters of splita.
    Path splitaaRef = createReferences(services, htd, splita, splitaa, Bytes.toBytes("bbb"), false);
    Path splitabRef = createReferences(services, htd, splita, splitab, Bytes.toBytes("bbb"), true);

    // Test splita.  It should stick around because references from splitab, etc.
    assertFalse(janitor.cleanParent(splita, regions.get(splita)));

    // Now clean up parent daughter first.  Remove references from its daughters.
    assertTrue(fs.delete(splitaaRef, true));
    assertTrue(fs.delete(splitabRef, true));
    assertTrue(janitor.cleanParent(splita, regions.get(splita)));

    // Super parent should get cleaned up now both splita and splitb are gone.
    assertTrue(janitor.cleanParent(parent, regions.get(parent)));

    services.stop("test finished");
    janitor.join();
}

From source file:org.apache.maven.archetype.ui.generation.DefaultArchetypeSelectionQueryer.java

private Archetype selectVersion(Map<String, List<Archetype>> catalogs, String groupId, String artifactId)
        throws PrompterException {
    SortedMap<ArtifactVersion, Archetype> archetypeVersionsMap = new TreeMap<ArtifactVersion, Archetype>();

    for (Map.Entry<String, List<Archetype>> entry : catalogs.entrySet()) {
        for (Archetype archetype : entry.getValue()) {
            if (!groupId.equals(archetype.getGroupId()) || !artifactId.equals(archetype.getArtifactId())) {
                continue;
            }/*from w  w w .j a  v  a2s . com*/

            ArtifactVersion version = new DefaultArtifactVersion(archetype.getVersion());

            // don't override the first catalog containing a defined version of the artifact
            if (!archetypeVersionsMap.containsKey(version)) {
                archetypeVersionsMap.put(version, archetype);
            }
        }
    }

    if (archetypeVersionsMap.size() == 1) {
        return archetypeVersionsMap.values().iterator().next();
    }

    // let the user choose between available versions
    StringBuilder query = new StringBuilder("Choose " + groupId + ":" + artifactId + " version: \n");

    List<String> answers = new ArrayList<String>();
    Map<String, Archetype> answerMap = new HashMap<String, Archetype>();

    int counter = 1;
    String mapKey = null;

    for (Map.Entry<ArtifactVersion, Archetype> entry : archetypeVersionsMap.entrySet()) {
        ArtifactVersion version = entry.getKey();
        Archetype archetype = entry.getValue();

        mapKey = String.valueOf(counter);

        query.append(mapKey + ": " + version + "\n");

        answers.add(mapKey);

        answerMap.put(mapKey, archetype);

        counter++;
    }

    query.append("Choose a number: ");

    Archetype archetype = null;

    do {
        String answer = prompter.prompt(query.toString(), answers, mapKey);

        archetype = answerMap.get(answer);
    } while (archetype == null);

    return archetype;
}

From source file:com.cafbit.netlib.dns.DNSMessage.java

public String toString() {
    StringBuilder sb = new StringBuilder();

    // questions/*w w w .j a v  a 2 s .c  om*/
    for (DNSQuestion q : questions) {
        sb.append("\nQuestion: " + q.toString() + "\n");
    }

    // group answers by name
    SortedMap<String, List<DNSAnswer>> answersByName = new TreeMap<String, List<DNSAnswer>>();

    for (DNSAnswer a : answers) {
        List<DNSAnswer> list;
        if (answersByName.containsKey(a.name)) {
            list = answersByName.get(a.name);
        } else {
            list = new LinkedList<DNSAnswer>();
            answersByName.put(a.name, list);
        }
        list.add(a);
    }

    for (DNSAnswer a : authorities) {
        List<DNSAnswer> list;
        if (answersByName.containsKey(a.name)) {
            list = answersByName.get(a.name);
        } else {
            list = new LinkedList<DNSAnswer>();
            answersByName.put(a.name, list);
        }
        list.add(a);
    }

    for (DNSAnswer a : addrecords) {
        List<DNSAnswer> list;
        if (answersByName.containsKey(a.name)) {
            list = answersByName.get(a.name);
        } else {
            list = new LinkedList<DNSAnswer>();
            answersByName.put(a.name, list);
        }
        list.add(a);
    }

    for (Map.Entry<String, List<DNSAnswer>> entry : answersByName.entrySet()) {
        sb.append(entry.getKey() + "\n");
        for (DNSAnswer a : entry.getValue()) {
            sb.append("  " + a.type.toString() + " " + a.getRdataString() + "\n");
        }
    }

    return sb.toString();
}