Example usage for java.util NavigableMap get

List of usage examples for java.util NavigableMap get

Introduction

In this page you can find the example usage for java.util NavigableMap get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:org.lilyproject.repository.impl.HBaseTypeManager.java

private List<RecordType> extractRecordType(SchemaId id, Long version, Result result)
        throws RecordTypeNotFoundException {
    NavigableMap<byte[], byte[]> nonVersionableColumnFamily = result.getFamilyMap(TypeCf.DATA.bytes);
    QName name = decodeName(nonVersionableColumnFamily.get(TypeColumn.RECORDTYPE_NAME.bytes));
    List<KeyValue> existingVersions = result.getColumn(TypeCf.DATA.bytes, TypeColumn.VERSION.bytes);
    Long existingMaxVersion = Bytes.toLong(result.getValue(TypeCf.DATA.bytes, TypeColumn.VERSION.bytes));

    if (version != null) {
        if (existingMaxVersion < version) {
            throw new RecordTypeNotFoundException(id, version);
        }//  ww  w  . j  ava  2  s  .  c  o  m
        RecordType recordType = newRecordType(id, name);
        recordType.setVersion(version);
        extractFieldTypeEntries(result, version, recordType);
        extractSupertypes(result, version, recordType);
        return Lists.newArrayList(recordType);
    } else {
        List<RecordType> recordTypes = Lists.newArrayList();
        for (KeyValue existingVersion : existingVersions) {
            long oneOfTheExistingVersions = Bytes.toLong(existingVersion.getValue());
            RecordType recordType = newRecordType(id, name);
            recordType.setVersion(oneOfTheExistingVersions);
            extractFieldTypeEntries(result, oneOfTheExistingVersions, recordType);
            extractSupertypes(result, oneOfTheExistingVersions, recordType);
            recordTypes.add(recordType);
        }
        return recordTypes;
    }
}

From source file:org.lilyproject.repository.impl.HBaseTypeManager.java

private void extractFieldTypeEntries(Result result, Long version, RecordType recordType) {
    if (version != null) {
        NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> allVersionsMap = result.getMap();
        NavigableMap<byte[], NavigableMap<Long, byte[]>> fieldTypeEntriesVersionsMap = allVersionsMap
                .get(TypeCf.FIELDTYPE_ENTRY.bytes);
        if (fieldTypeEntriesVersionsMap != null) {
            for (Entry<byte[], NavigableMap<Long, byte[]>> entry : fieldTypeEntriesVersionsMap.entrySet()) {
                SchemaId fieldTypeId = new SchemaIdImpl(entry.getKey());
                Entry<Long, byte[]> ceilingEntry = entry.getValue().ceilingEntry(version);
                if (ceilingEntry != null) {
                    FieldTypeEntry fieldTypeEntry = decodeFieldTypeEntry(ceilingEntry.getValue(), fieldTypeId);
                    if (fieldTypeEntry != null) {
                        recordType.addFieldTypeEntry(fieldTypeEntry);
                    }//w ww.jav a 2 s.  c o m
                }
            }
        }
    } else {
        NavigableMap<byte[], byte[]> versionableMap = result.getFamilyMap(TypeCf.FIELDTYPE_ENTRY.bytes);
        if (versionableMap != null) {
            for (Entry<byte[], byte[]> entry : versionableMap.entrySet()) {
                SchemaId fieldTypeId = new SchemaIdImpl(entry.getKey());
                FieldTypeEntry fieldTypeEntry = decodeFieldTypeEntry(entry.getValue(), fieldTypeId);
                if (fieldTypeEntry != null) {
                    recordType.addFieldTypeEntry(fieldTypeEntry);
                }
            }
        }
    }
}

From source file:org.lilyproject.repository.impl.HBaseTypeManager.java

private void extractSupertypes(Result result, Long version, RecordType recordType) {
    if (version != null) {
        NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> allVersionsMap = result.getMap();
        NavigableMap<byte[], NavigableMap<Long, byte[]>> supertypeVersionsMap = allVersionsMap
                .get(TypeCf.SUPERTYPE.bytes);
        if (supertypeVersionsMap != null) {
            for (Entry<byte[], NavigableMap<Long, byte[]>> entry : supertypeVersionsMap.entrySet()) {
                SchemaId supertypeId = new SchemaIdImpl(entry.getKey());
                Entry<Long, byte[]> ceilingEntry = entry.getValue().ceilingEntry(version);
                if (ceilingEntry != null) {
                    if (!isDeletedField(ceilingEntry.getValue())) {
                        recordType.addSupertype(supertypeId, Bytes.toLong(ceilingEntry.getValue()));
                    }// w  ww  .ja v  a 2s  . c  o m
                }
            }
        }
    } else {
        NavigableMap<byte[], byte[]> supertypeMap = result.getFamilyMap(TypeCf.SUPERTYPE.bytes);
        if (supertypeMap != null) {
            for (Entry<byte[], byte[]> entry : supertypeMap.entrySet()) {
                if (!isDeletedField(entry.getValue())) {
                    recordType.addSupertype(new SchemaIdImpl(entry.getKey()), Bytes.toLong(entry.getValue()));
                }
            }
        }
    }
}

From source file:org.lilyproject.repository.impl.HBaseTypeManager.java

private FieldType extractFieldType(SchemaId id, Result result)
        throws RepositoryException, InterruptedException {
    NavigableMap<byte[], byte[]> nonVersionableColumnFamily = result.getFamilyMap(TypeCf.DATA.bytes);
    QName name;/*from w w w  .  ja va  2 s . co m*/
    name = decodeName(nonVersionableColumnFamily.get(TypeColumn.FIELDTYPE_NAME.bytes));
    ValueType valueType = decodeValueType(nonVersionableColumnFamily.get(TypeColumn.FIELDTYPE_VALUETYPE.bytes));
    Scope scope = Scope
            .valueOf(Bytes.toString(nonVersionableColumnFamily.get(TypeColumn.FIELDTYPE_SCOPE.bytes)));
    return new FieldTypeImpl(id, valueType, name, scope);
}

From source file:org.lilyproject.tools.recordrowvisualizer.RecordRowVisualizer.java

@Override
public int run(CommandLine cmd) throws Exception {
    int result = super.run(cmd);
    if (result != 0) {
        return result;
    }/*w ww  . j  ava2s. c o  m*/

    String recordIdString = cmd.getOptionValue(recordIdOption.getOpt());
    if (recordIdString == null) {
        System.out.println("Specify record id with -" + recordIdOption.getOpt());
        return 1;
    }

    String tableName;
    if (cmd.hasOption(tableOption.getOpt())) {
        tableName = cmd.getOptionValue(tableOption.getOpt());
    } else {
        tableName = Table.RECORD.name;
    }

    IdGenerator idGenerator = new IdGeneratorImpl();
    RecordId recordId = idGenerator.fromString(recordIdString);

    recordRow = new RecordRow();
    recordRow.recordId = recordId;

    // HBase record table
    Configuration conf = HBaseConfiguration.create();
    conf.set("hbase.zookeeper.quorum", zkConnectionString);
    HTableInterface table = new HTable(conf, tableName);

    // Type manager
    zk = new StateWatchingZooKeeper(zkConnectionString, zkSessionTimeout);
    typeMgr = new HBaseTypeManager(idGenerator, conf, zk, new HBaseTableFactoryImpl(conf));

    Get get = new Get(recordId.toBytes());
    get.setMaxVersions();
    Result row = table.get(get);

    NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> root = row.getMap();

    readColumns(root.get(RecordCf.DATA.bytes));

    byte[][] treatedColumnFamilies = { RecordCf.DATA.bytes };

    for (byte[] cf : root.keySet()) {
        if (!isInArray(cf, treatedColumnFamilies)) {
            recordRow.unknownColumnFamilies.add(Bytes.toString(cf));
        }
    }

    executeTemplate("recordrow2html.ftl", Collections.<String, Object>singletonMap("row", recordRow),
            System.out);

    return 0;
}

From source file:org.mahasen.util.SearchUtil.java

/**
 * @param propertyTreeId/*from   w ww . ja v a  2 s.  c om*/
 * @param initialValue
 * @param lastValue
 * @return
 * @throws InterruptedException
 * @throws MahasenException
 */
private Vector<Id> getResourceIdVector(Id propertyTreeId, String initialValue, String lastValue)
        throws InterruptedException, MahasenException {

    Vector<Id> resultantIds = new Vector<Id>();
    TreeMap propertyTree = mahasenManager.lookupPropertyTreeDHT(propertyTreeId);

    if (propertyTree == null) {
        throw new MahasenException("Property not found");
    } else {

        if (propertyTree.firstKey() instanceof String) {

            System.out.println("this is the property tree " + propertyTree);
            NavigableMap<String, Vector<Id>> resultMap = propertyTree.subMap(initialValue.toLowerCase(), true,
                    lastValue.toLowerCase(), true);

            Iterator keys = resultMap.keySet().iterator();

            while (keys.hasNext()) {
                resultantIds.addAll(resultMap.get(keys.next()));
            }

        } else if (propertyTree.firstKey() instanceof Integer) {

            System.out.println("this is the property tree " + propertyTree);
            NavigableMap<Integer, Vector<Id>> resultMap = propertyTree.subMap(Integer.valueOf(initialValue),
                    true, Integer.valueOf(lastValue), true);

            Iterator keys = resultMap.keySet().iterator();

            while (keys.hasNext()) {
                resultantIds.addAll(resultMap.get(keys.next()));
            }
        }
    }

    return resultantIds;
}

From source file:org.opencb.opencga.storage.hadoop.variant.VariantHbaseDBAdaptor.java

@Override
public QueryResult<Variant> getAllVariantsByRegionAndStudy(Region region, String sourceId,
        QueryOptions options) {/*from   w  w w.  j  a v a  2 s  . co m*/
    Long start, end, dbstart, dbend;
    start = System.currentTimeMillis();
    QueryResult<Variant> queryResult = new QueryResult<>(
            String.format("%s:%d-%d", region.getChromosome(), region.getStart(), region.getEnd()));
    List<Variant> results = new LinkedList<>();

    boolean includeSamples;
    boolean includeStats;
    boolean includeEffects;
    if (!options.containsKey("samples") && !options.containsKey("stats") && !options.containsKey("effects")) {
        includeSamples = true;
        includeStats = true;
        includeEffects = true;
    } else {
        includeSamples = options.containsKey("samples") && options.getBoolean("samples");
        includeStats = options.containsKey("stats") && options.getBoolean("stats");
        includeEffects = options.containsKey("effects") && options.getBoolean("effects");
    }

    try {
        String startRow = buildRowkey(region.getChromosome(), Long.toString(region.getStart()));
        String stopRow = buildRowkey(region.getChromosome(), Long.toString(region.getEnd()));
        HTable table = new HTable(admin.getConfiguration(), tableName);
        dbstart = System.currentTimeMillis();
        Scan regionScan = new Scan(startRow.getBytes(), stopRow.getBytes());
        ResultScanner scanres = table.getScanner(regionScan);
        dbend = System.currentTimeMillis();
        queryResult.setDbTime(dbend - dbstart);

        // Iterate over results and, optionally, their samples and statistics
        for (Result result : scanres) {
            String[] rowkeyParts = new String(result.getRow(), CHARSET_UTF_8).split("_");
            String chromosome = rowkeyParts[0].replaceFirst("^0+(?!$)", "");
            int position = Integer.parseInt(rowkeyParts[1]);

            // Get basic result fields from Protocol Buffers message
            NavigableMap<byte[], byte[]> infoMap = result.getFamilyMap("i".getBytes());
            byte[] byteInfo = infoMap.get((sourceId + "_data").getBytes());
            VariantFieldsProtos.VariantInfo protoInfo = VariantFieldsProtos.VariantInfo.parseFrom(byteInfo);
            String reference = protoInfo.getReference();
            String alternate = StringUtils.join(protoInfo.getAlternateList(), ",");
            String format = StringUtils.join(protoInfo.getFormatList(), ":");
            Variant variant = new Variant(chromosome, position, position, reference, alternate);

            // Set samples if requested
            if (includeSamples) {
                NavigableMap<byte[], byte[]> sampleMap = result.getFamilyMap("d".getBytes());
                Map<String, Map<String, String>> resultSampleMap = new HashMap<>();

                // Set samples
                for (byte[] s : sampleMap.keySet()) {
                    String sampleName = (new String(s, CHARSET_UTF_8)).replaceAll(sourceId + "_", "");
                    VariantFieldsProtos.VariantSample sample = VariantFieldsProtos.VariantSample
                            .parseFrom(sampleMap.get(s));
                    String sample1 = sample.getSample();
                    String[] values = sample1.split(":");
                    String[] fields = format.split(":");
                    Map<String, String> singleSampleMap = new HashMap<>();
                    for (int i = 0; i < fields.length; i++) {
                        singleSampleMap.put(fields[i], values[i]);
                    }
                    // TODO 
                    //                        variant.addSampleData(sampleName, singleSampleMap);
                }
            }

            // Set stats if requested
            if (includeStats) {
                byte[] byteStats = infoMap.get((sourceId + "_stats").getBytes());
                VariantFieldsProtos.VariantStats protoStats = VariantFieldsProtos.VariantStats
                        .parseFrom(byteStats);
                VariantStats variantStats = new VariantStats(chromosome, position, reference, alternate,
                        protoStats.getMaf(), protoStats.getMgf(), protoStats.getMafAllele(),
                        protoStats.getMgfGenotype(), protoStats.getMissingAlleles(),
                        protoStats.getMissingGenotypes(), protoStats.getMendelianErrors(),
                        protoStats.getIsIndel(), protoStats.getCasesPercentDominant(),
                        protoStats.getControlsPercentDominant(), protoStats.getCasesPercentRecessive(),
                        protoStats.getControlsPercentRecessive());
                variant.setStats(variantStats);
            }

            if (includeEffects) {
                QueryResult<VariantEffect> queryEffects = getEffectsByVariant(variant, options);
                variant.setEffect(queryEffects.getResult());
            }

            results.add(variant);
        }
    } catch (IOException e) {
        System.err.println(e.getClass().getName() + ": " + e.getMessage());
    }
    queryResult.setResult(results);
    queryResult.setNumResults(results.size());
    end = System.currentTimeMillis();
    queryResult.setTime(end - start);
    return queryResult;
}

From source file:org.opencb.opencga.storage.hadoop.variant.VariantHbaseDBAdaptor.java

public List<Variant> getRecordSimpleStats(String study, int missing_gt, float maf, String maf_allele) {
    BasicDBObject compare = new BasicDBObject("studies.stats.allele_maf", maf_allele)
            .append("studies.stats.MAF", maf).append("studies.stats.missing", missing_gt);
    List<Get> hbaseQuery = new ArrayList<>();
    DBCollection collection = db.getCollection("variants");
    Iterator<DBObject> result = collection.find(compare);
    String chromosome = new String();
    while (result.hasNext()) {
        DBObject variant = result.next();
        String position = variant.get("_id").toString();
        //hbase query construction
        Get get = new Get(position.getBytes());
        hbaseQuery.add(get);//from   w  w  w .  j  av  a  2s.  c  om
    }
    //Complete results, from HBase

    tableName = study;
    effectTableName = tableName + "effect";
    Map<String, Variant> resultsMap = new HashMap<>();

    try {
        HTable table = new HTable(admin.getConfiguration(), tableName);
        HTable effectTable = new HTable(admin.getConfiguration(), effectTableName);
        Result[] hbaseResultEffect = effectTable.get(hbaseQuery);
        Result[] hbaseResultStats = table.get(hbaseQuery);

        //            List<Variant> results = new LinkedList<>();
        for (Result r : hbaseResultStats) {
            String position = new String(r.getRow(), CHARSET_UTF_8);
            String[] aux = position.split("_");
            String inner_position = aux[1];
            String chr = aux[0];
            //position parsing
            if (chr.startsWith("0")) {
                chr = chr.substring(1);
            }
            while (inner_position.startsWith("0")) {
                inner_position = inner_position.substring(1);
            }
            List<VariantFieldsProtos.VariantSample> samples = new LinkedList<>();
            NavigableMap<byte[], byte[]> infoMap = r.getFamilyMap("i".getBytes());
            byte[] byteStats = infoMap.get((study + "_stats").getBytes());
            VariantFieldsProtos.VariantStats stats = VariantFieldsProtos.VariantStats.parseFrom(byteStats);
            byte[] byteInfo = infoMap.get((study + "_data").getBytes());
            VariantFieldsProtos.VariantInfo info = VariantFieldsProtos.VariantInfo.parseFrom(byteInfo);
            String alternate = StringUtils.join(info.getAlternateList(), ", ");
            String reference = info.getReference();
            Variant partialResult = new Variant(chr, Integer.parseInt(inner_position),
                    Integer.parseInt(inner_position), reference, alternate);
            String format = StringUtils.join(info.getFormatList(), ":");
            NavigableMap<byte[], byte[]> sampleMap = r.getFamilyMap("d".getBytes());
            Map<String, Map<String, String>> resultSampleMap = new HashMap<>();
            //                StringBuilder sampleRaw = new StringBuilder();
            for (byte[] s : sampleMap.keySet()) {
                String qual = (new String(s, CHARSET_UTF_8)).replaceAll(study + "_", "");
                VariantFieldsProtos.VariantSample sample = VariantFieldsProtos.VariantSample
                        .parseFrom(sampleMap.get(s));
                String sample1 = sample.getSample();
                String[] values = sample1.split(":");
                String[] fields = format.split(":");
                Map<String, String> singleSampleMap = new HashMap<>();
                for (int i = 0; i < fields.length; i++) {
                    singleSampleMap.put(fields[i], values[i]);
                }
                resultSampleMap.put(qual, singleSampleMap);

            }
            VariantStats variantStats = new VariantStats(chromosome, Integer.parseInt(inner_position),
                    reference, alternate, stats.getMaf(), stats.getMgf(), stats.getMafAllele(),
                    stats.getMgfGenotype(), stats.getMissingAlleles(), stats.getMissingGenotypes(),
                    stats.getMendelianErrors(), stats.getIsIndel(), stats.getCasesPercentDominant(),
                    stats.getControlsPercentDominant(), stats.getCasesPercentRecessive(),
                    stats.getControlsPercentRecessive());
            partialResult.setStats(variantStats);
            resultsMap.put(new String(r.getRow(), CHARSET_UTF_8), partialResult);
        }

        for (Result r : hbaseResultEffect) {
            if (!r.isEmpty()) {
                NavigableMap<byte[], byte[]> effectMap = r.getFamilyMap("e".getBytes());
                Variant partialResult = resultsMap.get(new String(r.getRow(), CHARSET_UTF_8));
                System.out.println("Recuperado " + partialResult.toString());
                String s = partialResult.getReference() + "_" + partialResult.getAlternate();
                VariantEffectProtos.EffectInfo effectInfo = VariantEffectProtos.EffectInfo
                        .parseFrom(effectMap.get(s.getBytes()));
                VariantEffect variantEffect = new VariantEffect(partialResult.getChromosome(),
                        (int) partialResult.getStart(), partialResult.getReference(),
                        partialResult.getAlternate(), effectInfo.getFeatureId(), effectInfo.getFeatureName(),
                        effectInfo.getFeatureType(), effectInfo.getFeatureBiotype(),
                        effectInfo.getFeatureChromosome(), effectInfo.getFeatureStart(),
                        effectInfo.getFeatureEnd(), effectInfo.getFeatureStrand(), effectInfo.getSnpId(),
                        effectInfo.getAncestral(), effectInfo.getAlternative(), effectInfo.getGeneId(),
                        effectInfo.getTranscriptId(), effectInfo.getGeneName(), effectInfo.getConsequenceType(),
                        effectInfo.getConsequenceTypeObo(), effectInfo.getConsequenceTypeDesc(),
                        effectInfo.getConsequenceTypeType(), effectInfo.getAaPosition(),
                        effectInfo.getAminoacidChange(), effectInfo.getCodonChange());
                resultsMap.put(new String(r.getRow(), CHARSET_UTF_8), partialResult);
            }
        }
    } catch (InvalidProtocolBufferException e) {
        System.err.println(e.getClass().getName() + ": " + e.getMessage());
    } catch (IOException e) {
        System.err.println(e.getClass().getName() + ": " + e.getMessage());
    }

    List<Variant> results = new ArrayList<>(resultsMap.values());
    return results;
}

From source file:org.openstreetmap.osmosis.hbase.utility.MockHTable.java

private static List<KeyValue> toKeyValue(byte[] row,
        NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, long timestampStart,
        long timestampEnd, int maxVersions) {
    LOG.info("Maxversions: " + maxVersions);
    List<KeyValue> ret = new ArrayList<KeyValue>();
    for (byte[] family : rowdata.keySet())
        for (byte[] qualifier : rowdata.get(family).keySet()) {
            int versionsAdded = 0;
            //LOG.info("num cells: " + rowdata.get(family).get(qualifier).descendingMap().entrySet().size());
            for (Map.Entry<Long, byte[]> tsToVal : rowdata.get(family).get(qualifier).descendingMap()
                    .entrySet()) {//from   w  w w. j a v a2s .c  o m
                if (versionsAdded++ == maxVersions)
                    break;
                Long timestamp = tsToVal.getKey();
                if (timestamp < timestampStart)
                    continue;
                if (timestamp > timestampEnd)
                    continue;
                byte[] value = tsToVal.getValue();
                ret.add(new KeyValue(row, family, qualifier, timestamp, value));
            }
        }
    return ret;
}

From source file:org.structnetalign.merge.BronKerboschMergeJob.java

@Override
public List<NavigableSet<Integer>> call() throws Exception {

    logger.info("Searching for cliques on job " + index + " containing " + graph.getVertexCount()
            + " vertices and " + graph.getHomologyCount() + " homology edges");

    // find the cliques
    BronKerboschCliqueFinder<Integer, HomologyEdge> finder = new BronKerboschCliqueFinder<>();

    // these cliques are ordered from largest to smallest
    Collection<Set<Integer>> cliques = finder.transform(graph.getHomology());

    // just report the cliques we're using
    logger.info("Job " + index + ": " + "Found " + cliques.size() + " maximal cliques");
    int i = 1;/* ww w  .  j  av a 2 s  . co  m*/
    for (Set<Integer> clique : cliques) {
        logger.debug("Job " + index + ": " + "Clique " + i + ": " + clique);
        i++;
    }

    // partition the cliques by sets of interactions
    // we call these (maximal) degenerate sets
    List<NavigableSet<Integer>> simpleDegenerateSets = new ArrayList<NavigableSet<Integer>>();
    for (Set<Integer> clique : cliques) {
        NavigableMap<String, NavigableSet<Integer>> degenerateSetMap = new TreeMap<>();
        for (int v : clique) {
            Collection<Integer> neighbors = graph.getInteractionNeighbors(v);
            String hash = hashVertexInteractions(neighbors);
            NavigableSet<Integer> degenerateSet = degenerateSetMap.get(hash);
            if (degenerateSet == null) {
                degenerateSet = new TreeSet<>();
                degenerateSetMap.put(hash, degenerateSet);
            }
            degenerateSet.add(v);
            logger.trace("Job " + index + ": " + "Found " + hash + " --> " + degenerateSetMap.get(hash));
        }
        for (NavigableSet<Integer> set : degenerateSetMap.values()) {
            simpleDegenerateSets.add(set);
        }
    }

    /*
     * Now sort the degenerate sets from largest to smallest.
     * Take into account the edge case where the sizes are the same.
     */
    Comparator<NavigableSet<Integer>> comparator = new Comparator<NavigableSet<Integer>>() {
        @Override
        public int compare(NavigableSet<Integer> clique1, NavigableSet<Integer> clique2) {
            if (CollectionUtils.isEqualCollection(clique1, clique2))
                return 0;
            if (clique1.size() < clique2.size()) {
                return 1;
            } else if (clique1.size() > clique2.size()) {
                return -1;
            } else {
                Iterator<Integer> iter1 = clique1.iterator();
                Iterator<Integer> iter2 = clique2.iterator();
                while (iter1.hasNext()) { // we know they're the same size
                    int v1 = iter1.next();
                    int v2 = iter2.next();
                    if (v1 < v2) {
                        return 1;
                    } else if (v1 > v2) {
                        return -1;
                    }
                }
            }
            // they're the same throughout, so they're equal
            return 0;
        }
    };
    List<NavigableSet<Integer>> sortedDegenerateSets = new ArrayList<>(simpleDegenerateSets.size());
    sortedDegenerateSets.addAll(simpleDegenerateSets);
    Collections.sort(sortedDegenerateSets, comparator);

    /*
     * Now we want to return only the maximal maximal degenerate sets.
     */

    TreeSet<String> verticesAlreadyUsed = new TreeSet<String>();

    List<NavigableSet<Integer>> finalDegenerateSets = new ArrayList<>(sortedDegenerateSets.size());

    int nTrivial = 0;
    int nWeak = 0; // a degenerate set is weak if it contains a vertex that is added first

    forcliques: for (NavigableSet<Integer> set : sortedDegenerateSets) {

        // discard trivial degenerate sets
        if (set.size() < 2) {
            nTrivial++;
            continue;
        }

        // verify that we haven't already used any vertex in this degenerate set
        for (int v : set) {
            String hash = NetworkUtils.hash(v); // use MD5 for safety
            if (verticesAlreadyUsed.contains(hash)) {
                // discard this degenerate set and do NOT say we've used any of these vertices
                nWeak++;
                continue forcliques;
            }
        }

        // we haven't used any vertex in this degenerate set
        // now add all of these vertices
        // do NOT add before, or we'll add vertices we haven't used yet
        for (int v : set) {
            String hash = NetworkUtils.hash(v);
            verticesAlreadyUsed.add(hash);
        }
        finalDegenerateSets.add(set); // keep this degenerate set
    }

    logger.info("Job " + index + ": " + "Found " + finalDegenerateSets.size()
            + " strong nontrivial maximal degenerate sets found (" + nTrivial + " trivial and " + nWeak
            + " weak)");

    return finalDegenerateSets;
}