Example usage for java.util SortedSet contains

List of usage examples for java.util SortedSet contains

Introduction

In this page you can find the example usage for java.util SortedSet contains.

Prototype

boolean contains(Object o);

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:org.codehaus.mojo.license.ThirdPartyReportMojo.java

private Collection<ThirdPartyDetails> createThirdPartyDetails()
        throws IOException, ThirdPartyToolException, ProjectBuildingException, MojoFailureException {

    // load dependencies of the project
    SortedMap<String, MavenProject> projectDependencies = getHelper().loadDependencies(this);

    // create licenseMap from it
    LicenseMap licenseMap = getHelper().createLicenseMap(projectDependencies);

    // Get unsafe dependencies (dependencies with no license in pom)
    SortedSet<MavenProject> dependenciesWithNoLicense = getHelper().getProjectsWithNoLicense(licenseMap);

    // compute safe dependencies (with pom licenses)
    Set<MavenProject> dependenciesWithPomLicense = new TreeSet<MavenProject>(
            MojoHelper.newMavenProjectComparator());
    dependenciesWithPomLicense.addAll(projectDependencies.values());

    if (CollectionUtils.isNotEmpty(dependenciesWithNoLicense)) {
        // there is some unsafe dependencies, remove them from safe dependencies
        dependenciesWithPomLicense.removeAll(dependenciesWithNoLicense);

        if (useMissingFile) {
            // Resolve unsafe dependencies using missing files, this will update licenseMap and unsafeDependencies
            getHelper().createUnsafeMapping(licenseMap, missingFile, useRepositoryMissingFiles,
                    dependenciesWithNoLicense, projectDependencies);
        }/*  w w w  . ja v  a  2 s  .  c  o m*/
    }

    // LicenseMap is now complete, let's merge licenses if necessary
    getHelper().mergeLicenses(licenseMerges, licenseMap);

    // let's build thirdparty details for each dependencies
    Collection<ThirdPartyDetails> details = new ArrayList<ThirdPartyDetails>();

    for (Map.Entry<MavenProject, String[]> entry : licenseMap.toDependencyMap().entrySet()) {
        MavenProject dependency = entry.getKey();
        String[] licenses = entry.getValue();
        ThirdPartyDetails detail = new DefaultThirdPartyDetails(dependency);
        details.add(detail);
        if (dependenciesWithPomLicense.contains(dependency)) {

            // this is a pom licenses
            detail.setPomLicenses(licenses);
        } else if (!dependenciesWithNoLicense.contains(dependency)) {

            // this is a third-party licenses
            detail.setThirdPartyLicenses(licenses);
        }
    }
    return details;
}

From source file:org.dellapenna.research.ldr.Popolazione.java

/**
 * Vede se ci sono incompatibilit nelle posizioni dei vettori creando una
 * sorta di mutazione/*from   w  ww  . j  a v  a 2 s  . co  m*/
 *
 * @param quadratiPosL1 posizioni originarie primo individuo
 * @param quadratiPosL2 posizioni originarie secondo individuo
 * @param newQuadratiL2 nuove posizioni compatibili con l'indivuo in esame.
 */
private void checkIncompatibilita(ArrayList<Integer> quadratiPosL1, ArrayList<Integer> quadratiPosL2,
        ArrayList<Integer> newQuadratiL2) {

    //Insieme di posizioni di L1 e L2
    Set elementiL1 = new TreeSet();
    Set elementiL2 = new TreeSet();
    SortedSet elementiNewL2 = new TreeSet();

    Integer newPos = null;

    // stampo i due vettori 
    for (Integer pos : quadratiPosL1) {
        elementiL1.add(pos);
    }

    for (Integer pos : quadratiPosL2) {
        elementiL2.add(pos);
    }

    //TODO deve migliorare crea delle chiavi che alla fine sono uguali a quelle di L1
    //per tutti gli elementi di L2
    for (Integer pos2 : quadratiPosL2) {
        newPos = pos2;
        //se pos2  contenuto in L1
        if (elementiL1.contains(pos2)) {
            //aggiorno la posizione presente in pos2
            do {
                //evito lo zero e includo il la size !!! importante !!! size deve essere stessa lunghezza delle linea deformabile.
                newPos = ((newPos + 2) % lungLinea) + 1;
                //Finche non ho elementi non contenuti ne in L1 ne in L2    
            } while ((elementiL1.contains(newPos)) || (elementiL2.contains(newPos))
                    || (elementiNewL2.contains(newPos)));

            elementiNewL2.add(newPos);
        } else {
            elementiNewL2.add(newPos);
        }

    }

    for (Object numeroPos : elementiNewL2) {
        Integer pos = (Integer) numeroPos;
        newQuadratiL2.add(pos);

    }

}

From source file:org.torproject.collector.bridgedescs.BridgeSnapshotReader.java

/**
 * Reads the half-hourly snapshots of bridge descriptors from Bifroest.
 *///from ww w  . ja v  a 2 s. co m
public BridgeSnapshotReader(BridgeDescriptorParser bdp, File bridgeDirectoriesDir, File statsDirectory)
        throws ConfigurationException {

    if (bdp == null || bridgeDirectoriesDir == null || statsDirectory == null) {
        throw new IllegalArgumentException();
    }

    SortedSet<String> parsed = new TreeSet<String>();
    File bdDir = bridgeDirectoriesDir;
    File pbdFile = new File(statsDirectory, "parsed-bridge-directories");
    boolean modified = false;
    if (bdDir.exists()) {
        if (pbdFile.exists()) {
            logger.debug("Reading file " + pbdFile.getAbsolutePath() + "...");
            try {
                BufferedReader br = new BufferedReader(new FileReader(pbdFile));
                String line = null;
                while ((line = br.readLine()) != null) {
                    parsed.add(line);
                }
                br.close();
                logger.debug("Finished reading file " + pbdFile.getAbsolutePath() + ".");
            } catch (IOException e) {
                logger.warn("Failed reading file " + pbdFile.getAbsolutePath() + "!", e);
                return;
            }
        }
        logger.debug("Importing files in directory " + bridgeDirectoriesDir + "/...");
        Set<String> descriptorImportHistory = new HashSet<String>();
        int parsedFiles = 0;
        int skippedFiles = 0;
        int parsedStatuses = 0;
        int parsedServerDescriptors = 0;
        int skippedServerDescriptors = 0;
        int parsedExtraInfoDescriptors = 0;
        int skippedExtraInfoDescriptors = 0;
        Stack<File> filesInInputDir = new Stack<File>();
        filesInInputDir.add(bdDir);
        while (!filesInInputDir.isEmpty()) {
            File pop = filesInInputDir.pop();
            if (pop.isDirectory()) {
                for (File f : pop.listFiles()) {
                    filesInInputDir.add(f);
                }
            } else if (!parsed.contains(pop.getName())) {
                try {
                    FileInputStream in = new FileInputStream(pop);
                    if (in.available() > 0) {
                        TarArchiveInputStream tais = null;
                        if (pop.getName().endsWith(".tar.gz")) {
                            GzipCompressorInputStream gcis = new GzipCompressorInputStream(in);
                            tais = new TarArchiveInputStream(gcis);
                        } else if (pop.getName().endsWith(".tar")) {
                            tais = new TarArchiveInputStream(in);
                        } else {
                            continue;
                        }
                        BufferedInputStream bis = new BufferedInputStream(tais);
                        String fn = pop.getName();
                        String[] fnParts = fn.split("-");
                        if (fnParts.length != 5) {
                            logger.warn("Invalid bridge descriptor tarball file name: " + fn + ".  Skipping.");
                            continue;
                        }
                        String authorityPart = String.format("%s-%s-", fnParts[0], fnParts[1]);
                        String datePart = String.format("%s-%s-%s", fnParts[2], fnParts[3], fnParts[4]);
                        String authorityFingerprint;
                        switch (authorityPart) {
                        case "from-tonga-":
                            authorityFingerprint = "4A0CCD2DDC7995083D73F5D667100C8A5831F16D";
                            break;
                        case "from-bifroest-":
                            authorityFingerprint = "1D8F3A91C37C5D1C4C19B1AD1D0CFBE8BF72D8E1";
                            break;
                        default:
                            logger.warn("Did not recognize the bridge authority that " + "generated " + fn
                                    + ".  Skipping.");
                            continue;
                        }
                        String dateTime = datePart.substring(0, 10) + " " + datePart.substring(11, 13) + ":"
                                + datePart.substring(13, 15) + ":" + datePart.substring(15, 17);
                        while ((tais.getNextTarEntry()) != null) {
                            ByteArrayOutputStream baos = new ByteArrayOutputStream();
                            int len;
                            byte[] data = new byte[1024];
                            while ((len = bis.read(data, 0, 1024)) >= 0) {
                                baos.write(data, 0, len);
                            }
                            byte[] allData = baos.toByteArray();
                            if (allData.length == 0) {
                                continue;
                            }
                            String fileDigest = Hex.encodeHexString(DigestUtils.sha(allData));
                            String ascii = new String(allData, "US-ASCII");
                            BufferedReader br3 = new BufferedReader(new StringReader(ascii));
                            String firstLine = null;
                            while ((firstLine = br3.readLine()) != null) {
                                if (firstLine.startsWith("@")) {
                                    continue;
                                } else {
                                    break;
                                }
                            }
                            if (firstLine == null) {
                                continue;
                            }
                            if (firstLine.startsWith("published ") || firstLine.startsWith("flag-thresholds ")
                                    || firstLine.startsWith("r ")) {
                                bdp.parse(allData, dateTime, authorityFingerprint);
                                parsedStatuses++;
                            } else if (descriptorImportHistory.contains(fileDigest)) {
                                /* Skip server descriptors or extra-info descriptors if
                                 * we parsed them before. */
                                skippedFiles++;
                                continue;
                            } else {
                                int start = -1;
                                int sig = -1;
                                int end = -1;
                                String startToken = firstLine.startsWith("router ") ? "router " : "extra-info ";
                                String sigToken = "\nrouter-signature\n";
                                String endToken = "\n-----END SIGNATURE-----\n";
                                while (end < ascii.length()) {
                                    start = ascii.indexOf(startToken, end);
                                    if (start < 0) {
                                        break;
                                    }
                                    sig = ascii.indexOf(sigToken, start);
                                    if (sig < 0) {
                                        break;
                                    }
                                    sig += sigToken.length();
                                    end = ascii.indexOf(endToken, sig);
                                    if (end < 0) {
                                        break;
                                    }
                                    end += endToken.length();
                                    byte[] descBytes = new byte[end - start];
                                    System.arraycopy(allData, start, descBytes, 0, end - start);
                                    String descriptorDigest = Hex.encodeHexString(DigestUtils.sha(descBytes));
                                    if (!descriptorImportHistory.contains(descriptorDigest)) {
                                        bdp.parse(descBytes, dateTime, authorityFingerprint);
                                        descriptorImportHistory.add(descriptorDigest);
                                        if (firstLine.startsWith("router ")) {
                                            parsedServerDescriptors++;
                                        } else {
                                            parsedExtraInfoDescriptors++;
                                        }
                                    } else {
                                        if (firstLine.startsWith("router ")) {
                                            skippedServerDescriptors++;
                                        } else {
                                            skippedExtraInfoDescriptors++;
                                        }
                                    }
                                }
                            }
                            descriptorImportHistory.add(fileDigest);
                            parsedFiles++;
                        }
                        bis.close();
                    }
                    in.close();

                    /* Let's give some memory back, or we'll run out of it. */
                    System.gc();

                    parsed.add(pop.getName());
                    modified = true;
                } catch (IOException e) {
                    logger.warn("Could not parse bridge snapshot " + pop.getName() + "!", e);
                    continue;
                }
            }
        }
        logger.debug("Finished importing files in directory " + bridgeDirectoriesDir
                + "/.  In total, we parsed " + parsedFiles + " files (skipped " + skippedFiles + ") containing "
                + parsedStatuses + " statuses, " + parsedServerDescriptors + " server descriptors (skipped "
                + skippedServerDescriptors + "), and " + parsedExtraInfoDescriptors + " extra-info descriptors "
                + "(skipped " + skippedExtraInfoDescriptors + ").");
        if (!parsed.isEmpty() && modified) {
            logger.debug("Writing file " + pbdFile.getAbsolutePath() + "...");
            pbdFile.getParentFile().mkdirs();
            try (BufferedWriter bw = new BufferedWriter(new FileWriter(pbdFile))) {
                for (String f : parsed) {
                    bw.append(f + "\n");
                }
                logger.debug("Finished writing file " + pbdFile.getAbsolutePath() + ".");
            } catch (IOException e) {
                logger.warn("Failed writing file " + pbdFile.getAbsolutePath() + "!", e);
            }
        }
    }
}

From source file:com.gargoylesoftware.htmlunit.html.HtmlPage.java

private void removeElement(final Map<String, SortedSet<DomElement>> map, final DomElement element,
        final String attribute, final boolean recurse) {
    // first try real attributes
    String value = element.getAttribute(attribute);

    if (DomElement.ATTRIBUTE_NOT_DEFINED == value && !(element instanceof HtmlApplet)) {
        // second try are JavaScript attributes
        // ...but applets are a bit special so ignore them
        final ScriptableObject scriptObject = element.getScriptableObject();
        // we have to make sure the scriptObject has a slot for the given attribute.
        // just using get() may use e.g. getWithPreemption().
        if (scriptObject.has(attribute, scriptObject)) {
            final Object jsValue = scriptObject.get(attribute, scriptObject);
            if (jsValue != null && jsValue != Scriptable.NOT_FOUND && jsValue instanceof String) {
                value = (String) jsValue;
            }/*from  w  w w.j  a v a  2  s. co m*/
        }
    }

    if (!StringUtils.isEmpty(value)) {
        final SortedSet<DomElement> elements = map.remove(value);
        if (elements != null && (elements.size() != 1 || !elements.contains(element))) {
            elements.remove(element);
            map.put(value, elements);
        }
    }
    if (recurse) {
        for (final DomElement child : element.getChildElements()) {
            removeElement(map, child, attribute, true);
        }
    }
}

From source file:org.codehaus.mojo.license.AbstractThirdPartyReportMojo.java

Collection<ThirdPartyDetails> createThirdPartyDetails(MavenProject project, boolean loadArtifacts)
        throws IOException, ThirdPartyToolException, ProjectBuildingException, MojoFailureException,
        DependenciesToolException {//from www.  j a va  2  s .  c o  m

    if (loadArtifacts) {
        dependenciesTool.loadProjectArtifacts(localRepository, project.getRemoteArtifactRepositories(),
                project);
    }

    ThirdPartyHelper thirdPartyHelper = new DefaultThirdPartyHelper(project, encoding, verbose,
            dependenciesTool, thirdPartyTool, localRepository, project.getRemoteArtifactRepositories(),
            getLog());
    // load dependencies of the project
    SortedMap<String, MavenProject> projectDependencies = thirdPartyHelper.loadDependencies(this);

    // create licenseMap from it
    LicenseMap licenseMap = thirdPartyHelper.createLicenseMap(projectDependencies);

    // Get unsafe dependencies (dependencies with no license in pom)
    SortedSet<MavenProject> dependenciesWithNoLicense = thirdPartyHelper.getProjectsWithNoLicense(licenseMap);

    // compute safe dependencies (with pom licenses)
    Set<MavenProject> dependenciesWithPomLicense = new TreeSet<MavenProject>(
            MojoHelper.newMavenProjectComparator());
    dependenciesWithPomLicense.addAll(projectDependencies.values());

    if (CollectionUtils.isNotEmpty(dependenciesWithNoLicense)) {
        // there is some unsafe dependencies, remove them from safe dependencies
        dependenciesWithPomLicense.removeAll(dependenciesWithNoLicense);

        if (useMissingFile) {
            // Resolve unsafe dependencies using missing files, this will update licenseMap and unsafeDependencies
            thirdPartyHelper.createUnsafeMapping(licenseMap, missingFile, useRepositoryMissingFiles,
                    dependenciesWithNoLicense, projectDependencies);
        }
    }

    // LicenseMap is now complete, let's merge licenses if necessary
    thirdPartyHelper.mergeLicenses(licenseMerges, licenseMap);

    // Add override licenses
    thirdPartyTool.overrideLicenses(licenseMap, projectDependencies, encoding, overrideFile);

    // let's build third party details for each dependencies
    Collection<ThirdPartyDetails> details = new ArrayList<ThirdPartyDetails>();

    for (Map.Entry<MavenProject, String[]> entry : licenseMap.toDependencyMap().entrySet()) {
        MavenProject dependency = entry.getKey();
        String[] licenses = entry.getValue();
        ThirdPartyDetails detail = new DefaultThirdPartyDetails(dependency);
        details.add(detail);
        if (dependenciesWithPomLicense.contains(dependency)) {

            // this is a pom licenses
            detail.setPomLicenses(licenses);
        } else if (!dependenciesWithNoLicense.contains(dependency)) {

            // this is a third-party licenses
            detail.setThirdPartyLicenses(licenses);
        }
    }
    return details;
}

From source file:org.dllearner.reasoning.SPARQLReasoner.java

public boolean isSuperClassOf(OWLClass sup, OWLClass sub, boolean direct) {
    String query = direct ? SPARQLQueryUtils.SELECT_SUPERCLASS_OF_QUERY
            : SPARQLQueryUtils.SELECT_SUPERCLASS_OF_QUERY_RDFS;
    query = String.format(query, sub.toStringID());
    ResultSet rs = executeSelectQuery(query);
    SortedSet<OWLClass> superClasses = asOWLEntities(EntityType.CLASS, rs, "var1");
    return superClasses.contains(sup);
}

From source file:org.apache.cassandra.db.CompactionManager.java

/**
 * This function goes over each file and removes the keys that the node is not responsible for
 * and only keeps keys that this node is responsible for.
 *
 * @throws IOException/*from ww w  .j ava  2 s. com*/
 */
private void doCleanupCompaction(ColumnFamilyStore cfs, Collection<SSTableReader> sstables,
        NodeId.OneShotRenewer renewer) throws IOException {
    assert !cfs.isIndex();
    Table table = cfs.table;
    Collection<Range> ranges = StorageService.instance.getLocalRanges(table.name);
    boolean isCommutative = cfs.metadata.getDefaultValidator().isCommutative();
    if (ranges.isEmpty()) {
        logger.info("Cleanup cannot run before a node has joined the ring");
        return;
    }

    for (SSTableReader sstable : sstables) {
        long startTime = System.currentTimeMillis();
        long totalkeysWritten = 0;

        int expectedBloomFilterSize = Math.max(DatabaseDescriptor.getIndexInterval(),
                (int) (SSTableReader.getApproximateKeyCount(Arrays.asList(sstable))));
        if (logger.isDebugEnabled())
            logger.debug("Expected bloom filter size : " + expectedBloomFilterSize);

        SSTableWriter writer = null;
        try {
            logger.info("Cleaning up " + sstable);
            // Calculate the expected compacted filesize
            long expectedRangeFileSize = cfs.getExpectedCompactedFileSize(Arrays.asList(sstable)) / 2;
            String compactionFileLocation = table.getDataFileLocation(expectedRangeFileSize);
            if (compactionFileLocation == null)
                throw new IOException("disk full");

            SSTableScanner scanner = sstable.getDirectScanner(CompactionIterator.FILE_BUFFER_SIZE);
            SortedSet<ByteBuffer> indexedColumns = cfs.getIndexedColumns();
            CleanupInfo ci = new CleanupInfo(sstable, scanner);
            executor.beginCompaction(ci);
            try {
                while (scanner.hasNext()) {
                    SSTableIdentityIterator row = (SSTableIdentityIterator) scanner.next();
                    if (Range.isTokenInRanges(row.getKey().token, ranges)) {
                        writer = maybeCreateWriter(cfs, compactionFileLocation, expectedBloomFilterSize, writer,
                                Collections.singletonList(sstable));
                        writer.append(getCompactedRow(row, sstable.descriptor, false));
                        totalkeysWritten++;
                    } else {
                        cfs.invalidateCachedRow(row.getKey());
                        if (!indexedColumns.isEmpty() || isCommutative) {
                            while (row.hasNext()) {
                                IColumn column = row.next();
                                if (column instanceof CounterColumn)
                                    renewer.maybeRenew((CounterColumn) column);
                                if (indexedColumns.contains(column.name()))
                                    Table.cleanupIndexEntry(cfs, row.getKey().key, column);
                            }
                        }
                    }
                }
            } finally {
                scanner.close();
                executor.finishCompaction(ci);
            }
        } finally {
            cfs.getDataTracker().unmarkCompacting(Arrays.asList(sstable));
        }

        List<SSTableReader> results = new ArrayList<SSTableReader>();
        if (writer != null) {
            SSTableReader newSstable = writer.closeAndOpenReader(sstable.maxDataAge);
            results.add(newSstable);

            String format = "Cleaned up to %s.  %,d to %,d (~%d%% of original) bytes for %,d keys.  Time: %,dms.";
            long dTime = System.currentTimeMillis() - startTime;
            long startsize = sstable.length();
            long endsize = newSstable.length();
            double ratio = (double) endsize / (double) startsize;
            logger.info(String.format(format, writer.getFilename(), startsize, endsize, (int) (ratio * 100),
                    totalkeysWritten, dTime));
        }

        // flush to ensure we don't lose the tombstones on a restart, since they are not commitlog'd
        for (ByteBuffer columnName : cfs.getIndexedColumns()) {
            try {
                cfs.getIndexedColumnFamilyStore(columnName).forceBlockingFlush();
            } catch (ExecutionException e) {
                throw new RuntimeException(e);
            } catch (InterruptedException e) {
                throw new AssertionError(e);
            }
        }
        cfs.replaceCompactedSSTables(Arrays.asList(sstable), results);
    }
}

From source file:org.dllearner.reasoning.SPARQLReasoner.java

@Override
public boolean hasTypeImpl(OWLClassExpression description, OWLIndividual individual) {
    if (description.isOWLThing()) { // owl:Thing -> TRUE
        return true;
    } else if (description.isOWLNothing()) { // owl:Nothing -> FALSE
        return false;
    } else if (!description.isAnonymous()) { // atomic classes
        String query = String.format("ASK {<%s> a <%s>}", individual.toStringID(),
                description.asOWLClass().toStringID());
        boolean result = executeAskQuery(query);
        return result;
    } else { // complex class expressions
        //TODO use ASK queries
        SortedSet<OWLIndividual> individuals = getIndividuals(description, Collections.singleton(individual));
        return individuals.contains(individual);
        //         String queryBody = converter.convert("?ind", description);
        //         queryBody = queryBody.replace("?ind", "<" + individual.toStringID() + ">");
        //         String query = "ASK {" + queryBody + "}";
        //         // FIXME universal and cardinality restrictions do not work with ASK queries
    }/*from   w  w  w .  j  a  v a 2s . com*/
}

From source file:com.gargoylesoftware.htmlunit.html.HtmlPage.java

private void addElement(final Map<String, SortedSet<DomElement>> map, final DomElement element,
        final String attribute, final boolean recurse) {
    // first try real attributes
    String value = element.getAttribute(attribute);

    if (DomElement.ATTRIBUTE_NOT_DEFINED == value && !(element instanceof HtmlApplet)) {
        // second try are JavaScript attributes
        // ...but applets are a bit special so ignore them
        final ScriptableObject scriptObject = element.getScriptableObject();
        // we have to make sure the scriptObject has a slot for the given attribute.
        // just using get() may use e.g. getWithPreemption().
        if (scriptObject.has(attribute, scriptObject)) {
            final Object jsValue = scriptObject.get(attribute, scriptObject);
            if (jsValue != null && jsValue != Scriptable.NOT_FOUND && jsValue instanceof String) {
                value = (String) jsValue;
            }/*from  w ww.  j  a  v  a 2s  .  c o  m*/
        }
    }

    if (DomElement.ATTRIBUTE_NOT_DEFINED != value) {
        SortedSet<DomElement> elements = map.get(value);
        if (elements == null) {
            elements = new TreeSet<>(documentPositionComparator);
            elements.add(element);
            map.put(value, elements);
        } else if (!elements.contains(element)) {
            elements.add(element);
        }
    }
    if (recurse) {
        for (final DomElement child : element.getChildElements()) {
            addElement(map, child, attribute, true);
        }
    }
}

From source file:org.apache.cassandra.db.compaction.CompactionManager.java

/**
 * This function goes over each file and removes the keys that the node is not responsible for
 * and only keeps keys that this node is responsible for.
 *
 * @throws IOException/*w w  w .j  a  v a2 s .com*/
 */
private void doCleanupCompaction(ColumnFamilyStore cfs, Collection<SSTableReader> sstables,
        NodeId.OneShotRenewer renewer) throws IOException {
    assert !cfs.isIndex();
    Table table = cfs.table;
    Collection<Range> ranges = StorageService.instance.getLocalRanges(table.name);
    boolean isCommutative = cfs.metadata.getDefaultValidator().isCommutative();
    if (ranges.isEmpty()) {
        logger.info("Cleanup cannot run before a node has joined the ring");
        return;
    }

    for (SSTableReader sstable : sstables) {
        CompactionController controller = new CompactionController(cfs, Collections.singletonList(sstable),
                getDefaultGcBefore(cfs), false);
        long startTime = System.currentTimeMillis();

        long totalkeysWritten = 0;

        int expectedBloomFilterSize = Math.max(DatabaseDescriptor.getIndexInterval(),
                (int) (SSTableReader.getApproximateKeyCount(Arrays.asList(sstable))));
        if (logger.isDebugEnabled())
            logger.debug("Expected bloom filter size : " + expectedBloomFilterSize);

        SSTableWriter writer = null;

        logger.info("Cleaning up " + sstable);
        // Calculate the expected compacted filesize
        long expectedRangeFileSize = cfs.getExpectedCompactedFileSize(Arrays.asList(sstable)) / 2;
        String compactionFileLocation = table.getDataFileLocation(expectedRangeFileSize);
        if (compactionFileLocation == null)
            throw new IOException("disk full");

        SSTableScanner scanner = sstable.getDirectScanner(CompactionIterator.FILE_BUFFER_SIZE);
        SortedSet<ByteBuffer> indexedColumns = cfs.getIndexedColumns();
        CleanupInfo ci = new CleanupInfo(sstable, scanner);
        executor.beginCompaction(ci);
        try {
            while (scanner.hasNext()) {
                SSTableIdentityIterator row = (SSTableIdentityIterator) scanner.next();
                if (Range.isTokenInRanges(row.getKey().token, ranges)) {
                    AbstractCompactedRow compactedRow = controller.getCompactedRow(row);
                    if (compactedRow.isEmpty())
                        continue;
                    writer = maybeCreateWriter(cfs, compactionFileLocation, expectedBloomFilterSize, writer,
                            Collections.singletonList(sstable));
                    writer.append(compactedRow);
                    totalkeysWritten++;
                } else {
                    cfs.invalidateCachedRow(row.getKey());
                    if (!indexedColumns.isEmpty() || isCommutative) {
                        while (row.hasNext()) {
                            IColumn column = row.next();
                            if (column instanceof CounterColumn)
                                renewer.maybeRenew((CounterColumn) column);
                            if (indexedColumns.contains(column.name()))
                                Table.cleanupIndexEntry(cfs, row.getKey().key, column);
                        }
                    }
                }
            }
        } finally {
            scanner.close();
            executor.finishCompaction(ci);
        }

        List<SSTableReader> results = new ArrayList<SSTableReader>();
        if (writer != null) {
            SSTableReader newSstable = writer.closeAndOpenReader(sstable.maxDataAge);
            results.add(newSstable);

            String format = "Cleaned up to %s.  %,d to %,d (~%d%% of original) bytes for %,d keys.  Time: %,dms.";
            long dTime = System.currentTimeMillis() - startTime;
            long startsize = sstable.length();
            long endsize = newSstable.length();
            double ratio = (double) endsize / (double) startsize;
            logger.info(String.format(format, writer.getFilename(), startsize, endsize, (int) (ratio * 100),
                    totalkeysWritten, dTime));
        }

        // flush to ensure we don't lose the tombstones on a restart, since they are not commitlog'd
        for (ByteBuffer columnName : cfs.getIndexedColumns()) {
            try {
                cfs.getIndexedColumnFamilyStore(columnName).forceBlockingFlush();
            } catch (ExecutionException e) {
                throw new RuntimeException(e);
            } catch (InterruptedException e) {
                throw new AssertionError(e);
            }
        }
        cfs.replaceCompactedSSTables(Arrays.asList(sstable), results);
    }
}