Example usage for java.util SortedSet remove

List of usage examples for java.util SortedSet remove

Introduction

In this page you can find the example usage for java.util SortedSet remove.

Prototype

boolean remove(Object o);

Source Link

Document

Removes the specified element from this set if it is present (optional operation).

Usage

From source file:org.apache.ctakes.ytex.kernel.evaluator.CorpusKernelEvaluatorImpl.java

public void evaluateKernelOnCorpus(final Map<Long, Node> instanceIDMap, int nMod, int nSlice,
        boolean evalTest) {
    KernelEvaluation kernelEvaluationTmp = new KernelEvaluation();
    kernelEvaluationTmp.setExperiment(this.getExperiment());
    kernelEvaluationTmp.setFoldId(this.getFoldId());
    kernelEvaluationTmp.setLabel(this.getLabel());
    kernelEvaluationTmp.setCorpusName(this.getName());
    kernelEvaluationTmp.setParam1(getParam1());
    kernelEvaluationTmp.setParam2(getParam2());
    final KernelEvaluation kernelEvaluation = this.kernelEvaluationDao.storeKernelEval(kernelEvaluationTmp);
    final List<Long> documentIds = new ArrayList<Long>();
    final List<Long> testDocumentIds = new ArrayList<Long>();
    loadDocumentIds(documentIds, testDocumentIds, instanceIDQuery);
    if (!evalTest) {
        // throw away the test ids if we're not going to evaluate them
        testDocumentIds.clear();/* w  ww  . j  av  a2s. co  m*/
    }
    int nStart = 0;
    int nEnd = documentIds.size();
    int total = documentIds.size();
    if (nMod > 0) {
        nMod = Math.min(total, nMod);
    }
    if (nMod > 0 && nSlice > nMod) {
        log.info("more slices than documents, skipping slice: " + nSlice);
        return;
    }
    if (nMod > 0) {
        int sliceSize = total / nMod;
        nStart = sliceSize * (nSlice - 1);
        if (nSlice != nMod)
            nEnd = nStart + sliceSize;
    }
    for (int i = nStart; i < nEnd; i++) {
        // left hand side of kernel evaluation
        final long instanceId1 = documentIds.get(i);
        if (log.isInfoEnabled())
            log.info("evaluating kernel for instance_id1 = " + instanceId1);
        // list of instance ids right hand side of kernel evaluation
        final SortedSet<Long> rightDocumentIDs = new TreeSet<Long>(testDocumentIds);
        if (i < documentIds.size()) {
            // rightDocumentIDs.addAll(documentIds.subList(i + 1,
            // documentIds.size() - 1));
            rightDocumentIDs.addAll(documentIds.subList(i, documentIds.size()));
        }
        // remove instances already evaluated
        for (KernelEvaluationInstance kEval : this.kernelEvaluationDao
                .getAllKernelEvaluationsForInstance(kernelEvaluation, instanceId1)) {
            rightDocumentIDs.remove(
                    instanceId1 == kEval.getInstanceId1() ? kEval.getInstanceId2() : kEval.getInstanceId1());
        }
        // kernel evaluations for this instance are done in a single tx
        // hibernate can batch insert these
        txTemplate.execute(new TransactionCallback<Object>() {

            @Override
            public Object doInTransaction(TransactionStatus arg0) {
                evalInstance(instanceIDMap, kernelEvaluation, instanceId1, rightDocumentIDs);
                return null;
            }
        });

    }
}

From source file:org.apache.flume.channel.file.Log.java

/**
 * Write the current checkpoint object and then swap objects so that
 * the next checkpoint occurs on the other checkpoint directory.
 *
 * Synchronization is not required because this method acquires a
 * write lock. So this method gets exclusive access to all the
 * data structures this method accesses.
 * @param force  a flag to force the writing of checkpoint
 * @throws IOException if we are unable to write the checkpoint out to disk
 *///from w w  w.  j ava2s. c o m
private Boolean writeCheckpoint(Boolean force) throws Exception {
    boolean checkpointCompleted = false;
    long usableSpace = checkpointDir.getUsableSpace();
    if (usableSpace <= minimumRequiredSpace) {
        throw new IOException("Usable space exhaused, only " + usableSpace + " bytes remaining, required "
                + minimumRequiredSpace + " bytes");
    }
    boolean lockAcquired = tryLockExclusive();
    if (!lockAcquired) {
        return false;
    }
    SortedSet<Integer> logFileRefCountsAll = null, logFileRefCountsActive = null;
    try {
        if (queue.checkpoint(force)) {
            long logWriteOrderID = queue.getLogWriteOrderID();

            //Since the active files might also be in the queue's fileIDs,
            //we need to either move each one to a new set or remove each one
            //as we do here. Otherwise we cannot make sure every element in
            //fileID set from the queue have been updated.
            //Since clone is smarter than insert, better to make
            //a copy of the set first so that we can use it later.
            logFileRefCountsAll = queue.getFileIDs();
            logFileRefCountsActive = new TreeSet<Integer>(logFileRefCountsAll);

            int numFiles = logFiles.length();
            for (int i = 0; i < numFiles; i++) {
                LogFile.Writer logWriter = logFiles.get(i);
                int logFileID = logWriter.getLogFileID();
                File logFile = logWriter.getFile();
                LogFile.MetaDataWriter writer = LogFileFactory.getMetaDataWriter(logFile, logFileID);
                try {
                    writer.markCheckpoint(logWriter.position(), logWriteOrderID);
                } finally {
                    writer.close();
                }
                logFileRefCountsAll.remove(logFileID);
                LOGGER.info("Updated checkpoint for file: " + logFile + " position: " + logWriter.position()
                        + " logWriteOrderID: " + logWriteOrderID);
            }

            // Update any inactive data files as well
            Iterator<Integer> idIterator = logFileRefCountsAll.iterator();
            while (idIterator.hasNext()) {
                int id = idIterator.next();
                LogFile.RandomReader reader = idLogFileMap.remove(id);
                File file = reader.getFile();
                reader.close();
                LogFile.MetaDataWriter writer = LogFileFactory.getMetaDataWriter(file, id);
                try {
                    writer.markCheckpoint(logWriteOrderID);
                } finally {
                    writer.close();
                }
                reader = LogFileFactory.getRandomReader(file, encryptionKeyProvider);
                idLogFileMap.put(id, reader);
                LOGGER.debug("Updated checkpoint for file: " + file + "logWriteOrderID " + logWriteOrderID);
                idIterator.remove();
            }
            Preconditions.checkState(logFileRefCountsAll.size() == 0,
                    "Could not update all data file timestamps: " + logFileRefCountsAll);
            //Add files from all log directories
            for (int index = 0; index < logDirs.length; index++) {
                logFileRefCountsActive.add(logFiles.get(index).getLogFileID());
            }
            checkpointCompleted = true;
        }
    } finally {
        unlockExclusive();
    }
    //Do the deletes outside the checkpointWriterLock
    //Delete logic is expensive.
    if (open && checkpointCompleted) {
        removeOldLogs(logFileRefCountsActive);
    }
    //Since the exception is not caught, this will not be returned if
    //an exception is thrown from the try.
    return true;
}

From source file:org.apache.hadoop.hbase.regionserver.Memcache.java

private boolean remove(final NavigableSet<KeyValue> set, final KeyValue kv) {
    SortedSet<KeyValue> s = set.tailSet(kv);
    if (s.isEmpty()) {
        return false;
    }/*from  w w  w. ja v a2  s. c o m*/
    boolean removed = false;
    for (KeyValue k : s) {
        if (this.comparatorIgnoreType.compare(k, kv) == 0) {
            // Same r/c/ts.  Remove it.
            s.remove(k);
            removed = true;
            continue;
        }
        break;
    }
    return removed;
}

From source file:org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender.java

private void updateMessageDetails(String message, Long eventTimeSeconds,
        Map<String, SortedMap<Long, Integer>> map, SortedMap<Long, Integer> timestampsCount,
        SortedSet<PurgeElement> purgeInformation) {
    synchronized (lock) {
        if (map.containsKey(message)) {
            SortedMap<Long, Integer> tmp = map.get(message);
            Long lastMessageTime = tmp.lastKey();
            int value = 1;
            if (tmp.containsKey(eventTimeSeconds)) {
                value = tmp.get(eventTimeSeconds) + 1;
            }//from   w  w w  .  ja  v  a 2  s . c o m
            tmp.put(eventTimeSeconds, value);
            purgeInformation.remove(new PurgeElement(message, lastMessageTime));
        } else {
            SortedMap<Long, Integer> value = new TreeMap<>();
            value.put(eventTimeSeconds, 1);
            map.put(message, value);
            if (map.size() > maxUniqueMessages * 2) {
                cleanupTimer.cancel();
                cleanupTimer = new Timer();
                cleanupTimer.schedule(new ErrorAndWarningsCleanup(), 0);
            }
        }
        purgeInformation.add(new PurgeElement(message, eventTimeSeconds));
        int newValue = 1;
        if (timestampsCount.containsKey(eventTimeSeconds)) {
            newValue = timestampsCount.get(eventTimeSeconds) + 1;
        }
        timestampsCount.put(eventTimeSeconds, newValue);
    }
}

From source file:org.artifactory.repo.index.MavenIndexerServiceImpl.java

private IndexerDescriptor getAndCheckDescriptor() {
    IndexerDescriptor descriptor = centralConfig.getDescriptor().getIndexer();
    if (descriptor != null) {
        SortedSet<RepoBaseDescriptor> set = new TreeSet<RepoBaseDescriptor>();
        if (descriptor.getExcludedRepositories() == null) {
            //Auto exclude all remote and virtual repos
            set.addAll(repositoryService.getRemoteRepoDescriptors());
            set.addAll(getAllVirtualReposExceptGlobal());
        } else {/*from w w w. j a  v  a 2s .c  o m*/
            set.addAll(descriptor.getExcludedRepositories());
            // Always remove globalVirtual one
            VirtualRepoDescriptor dummyGlobal = new VirtualRepoDescriptor();
            dummyGlobal.setKey(VirtualRepoDescriptor.GLOBAL_VIRTUAL_REPO_KEY);
            set.remove(dummyGlobal);
        }
        descriptor.setExcludedRepositories(set);
    }
    return descriptor;
}

From source file:org.codehaus.mojo.license.api.DefaultThirdPartyTool.java

private void resolveUnsafe(SortedSet<MavenProject> unsafeDependencies, LicenseMap licenseMap,
        Map<String, MavenProject> unsafeProjects, SortedProperties unsafeMappings, SortedProperties result) {
    for (String id : unsafeProjects.keySet()) {

        if (unsafeMappings.containsKey(id)) {

            String license = (String) unsafeMappings.get(id);
            if (StringUtils.isEmpty(license)) {

                // empty license means not fill, skip it
                continue;
            }/*  w w w  . j  a va2  s. co  m*/

            // found a resolved unsafe dependency in the missing third party file
            MavenProject resolvedProject = unsafeProjects.get(id);
            unsafeDependencies.remove(resolvedProject);

            // push back to
            result.put(id, license.trim());

            addLicense(licenseMap, resolvedProject, license);
        }
    }
}

From source file:org.codehaus.mojo.license.api.DefaultThirdPartyTool.java

/**
 * {@inheritDoc}/*from  www . j a v a  2 s.c o  m*/
 */
public SortedProperties loadUnsafeMapping(LicenseMap licenseMap, SortedMap<String, MavenProject> artifactCache,
        String encoding, File missingFile) throws IOException {
    SortedSet<MavenProject> unsafeDependencies = getProjectsWithNoLicense(licenseMap, false);

    SortedProperties unsafeMappings = new SortedProperties(encoding);

    if (missingFile.exists()) {
        // there is some unsafe dependencies

        getLogger().info("Load missing file " + missingFile);

        // load the missing file
        unsafeMappings.load(missingFile);
    }

    // get from the missing file, all unknown dependencies
    List<String> unknownDependenciesId = new ArrayList<String>();

    // coming from maven-license-plugin, we used the full g/a/v/c/t. Now we remove classifier and type
    // since GAV is good enough to qualify a license of any artifact of it...
    Map<String, String> migrateKeys = migrateMissingFileKeys(unsafeMappings.keySet());

    for (Object o : migrateKeys.keySet()) {
        String id = (String) o;
        String migratedId = migrateKeys.get(id);

        MavenProject project = artifactCache.get(migratedId);
        if (project == null) {
            // now we are sure this is a unknown dependency
            unknownDependenciesId.add(id);
        } else {
            if (!id.equals(migratedId)) {

                // migrates id to migratedId
                getLogger().info("Migrates [" + id + "] to [" + migratedId + "] in the missing file.");
                Object value = unsafeMappings.get(id);
                unsafeMappings.remove(id);
                unsafeMappings.put(migratedId, value);
            }
        }
    }

    if (!unknownDependenciesId.isEmpty()) {

        // there is some unknown dependencies in the missing file, remove them
        for (String id : unknownDependenciesId) {
            getLogger().warn(
                    "dependency [" + id + "] does not exist in project, remove it from the missing file.");
            unsafeMappings.remove(id);
        }

        unknownDependenciesId.clear();
    }

    // push back loaded dependencies
    for (Object o : unsafeMappings.keySet()) {
        String id = (String) o;

        MavenProject project = artifactCache.get(id);
        if (project == null) {
            getLogger().warn("dependency [" + id + "] does not exist in project.");
            continue;
        }

        String license = (String) unsafeMappings.get(id);
        if (StringUtils.isEmpty(license)) {

            // empty license means not fill, skip it
            continue;
        }

        // add license in map
        addLicense(licenseMap, project, license);

        // remove unknown license
        unsafeDependencies.remove(project);
    }

    if (unsafeDependencies.isEmpty()) {

        // no more unknown license in map
        licenseMap.remove(LicenseMap.UNKNOWN_LICENSE_MESSAGE);
    } else {

        // add a "with no value license" for missing dependencies
        for (MavenProject project : unsafeDependencies) {
            String id = MojoHelper.getArtifactId(project.getArtifact());
            if (getLogger().isDebugEnabled()) {
                getLogger().debug("dependency [" + id + "] has no license, add it in the missing file.");
            }
            unsafeMappings.setProperty(id, "");
        }
    }
    return unsafeMappings;
}

From source file:org.codehaus.mojo.license.DefaultThirdPartyTool.java

/**
 * {@inheritDoc}//w  ww.j ava 2 s  .  co  m
 */
@Override
public SortedProperties loadThirdPartyDescriptorsForUnsafeMapping(String encoding,
        Collection<MavenProject> projects, SortedSet<MavenProject> unsafeDependencies, LicenseMap licenseMap,
        ArtifactRepository localRepository, List<ArtifactRepository> remoteRepositories)
        throws ThirdPartyToolException, IOException {

    SortedProperties result = new SortedProperties(encoding);
    Map<String, MavenProject> unsafeProjects = new HashMap<String, MavenProject>();
    for (MavenProject unsafeDependency : unsafeDependencies) {
        String id = MojoHelper.getArtifactId(unsafeDependency.getArtifact());
        unsafeProjects.put(id, unsafeDependency);
    }

    for (MavenProject mavenProject : projects) {

        if (CollectionUtils.isEmpty(unsafeDependencies)) {

            // no more unsafe dependencies to find
            break;
        }

        File thirdPartyDescriptor = resolvThirdPartyDescriptor(mavenProject, localRepository,
                remoteRepositories);

        if (thirdPartyDescriptor != null && thirdPartyDescriptor.exists()
                && thirdPartyDescriptor.length() > 0) {

            if (getLogger().isInfoEnabled()) {
                getLogger().info("Detects third party descriptor " + thirdPartyDescriptor);
            }

            // there is a third party file detected form the given dependency
            SortedProperties unsafeMappings = new SortedProperties(encoding);

            if (thirdPartyDescriptor.exists()) {

                getLogger().debug("Load missing file " + thirdPartyDescriptor);

                // load the missing file
                unsafeMappings.load(thirdPartyDescriptor);
            }

            for (String id : unsafeProjects.keySet()) {

                if (unsafeMappings.containsKey(id)) {

                    String license = (String) unsafeMappings.get(id);
                    if (StringUtils.isEmpty(license)) {

                        // empty license means not fill, skip it
                        continue;
                    }

                    // found a resolved unsafe dependency in the missing third party file
                    MavenProject resolvedProject = unsafeProjects.get(id);
                    unsafeDependencies.remove(resolvedProject);

                    // push back to
                    result.put(id, license.trim());

                    addLicense(licenseMap, resolvedProject, license);
                }
            }
        }
    }
    return result;
}

From source file:org.codehaus.mojo.license.LicenseMap.java

protected SortedProperties loadUnsafeMapping(SortedMap<String, MavenProject> artifactCache, String encoding,
        File missingFile) throws IOException, ProjectBuildingException {

    SortedSet<MavenProject> unsafeDependencies = getUnsafeDependencies();

    SortedProperties unsafeMappings = new SortedProperties(encoding);

    // there is some unsafe dependencies
    if (missingFile.exists()) {

        getLog().info("Load missing file " + missingFile);

        // load the missing file
        unsafeMappings.load(missingFile);
    }/*ww  w . j av a  2 s  .  c om*/

    // get from the missing file, all unknown dependencies
    List<String> unknownDependenciesId = new ArrayList<String>();

    // migrate unsafe mapping (before version 3.0 we do not have the type of
    // dependency in the missing file, now we must deal with it, so check it

    List<String> migrateId = new ArrayList<String>();
    //        SortedMap<String, MavenProject> artifactCache = AbstractAddThirdPartyMojo.getArtifactCache();
    for (Object o : unsafeMappings.keySet()) {
        String id = (String) o;
        MavenProject project = artifactCache.get(id);
        if (project == null) {
            // try with the --jar type
            project = artifactCache.get(id + "--jar");
            if (project == null) {

                // now we are sure this is a unknown dependency
                unknownDependenciesId.add(id);
            } else {

                // this dependency must be migrated
                migrateId.add(id);
            }
        }
    }

    if (!unknownDependenciesId.isEmpty()) {

        // there is some unknown dependencies in the missing file, remove them
        for (String id : unknownDependenciesId) {
            getLog().warn(
                    "dependency [" + id + "] does not exists in project, remove it from the missing file.");
            unsafeMappings.remove(id);
        }

        unknownDependenciesId.clear();
    }

    if (!migrateId.isEmpty()) {

        // there is some dependencies to migrate in the missing file
        for (String id : migrateId) {
            String newId = id + "--jar";
            getLog().info("Migrate " + id + " to " + newId + " in the missing file.");
            Object value = unsafeMappings.get(id);
            unsafeMappings.remove(id);
            unsafeMappings.put(newId, value);
        }

        migrateId.clear();
    }

    // push back loaded dependencies
    for (Object o : unsafeMappings.keySet()) {
        String id = (String) o;

        MavenProject project = artifactCache.get(id);
        if (project == null) {
            getLog().warn("dependency [" + id + "] does not exists in project.");
            continue;
        }

        String license = (String) unsafeMappings.get(id);
        if (StringUtils.isEmpty(license)) {

            // empty license means not fill, skip it
            continue;
        }

        // add license in map
        License l = new License();
        l.setName(license.trim());
        l.setUrl(license.trim());

        // add license
        addLicense(project, Arrays.asList(l));

        // remove unknown license
        unsafeDependencies.remove(project);
    }

    if (unsafeDependencies.isEmpty()) {

        // no more unknown license in map
        remove(getUnknownLicenseMessage());
    } else {

        // add a "with no value license" for missing dependencies
        for (MavenProject project : unsafeDependencies) {
            String id = ArtifactHelper.getArtifactId(project.getArtifact());
            unsafeMappings.setProperty(id, "");
        }
    }
    return unsafeMappings;
}

From source file:org.dllearner.reasoning.SPARQLReasoner.java

private OWLClassExpression computeDomain(OWLProperty property) {
    String query = String.format("SELECT ?domain WHERE {" + "<%s> <%s> ?domain. FILTER(isIRI(?domain))" + "}",
            property.toStringID(), RDFS.domain.getURI());

    try (QueryExecution qe = qef.createQueryExecution(query)) {
        ResultSet rs = qe.execSelect();
        SortedSet<OWLClassExpression> domains = new TreeSet<>();
        while (rs.hasNext()) {
            QuerySolution qs = rs.next();
            domains.add(df.getOWLClass(IRI.create(qs.getResource("domain").getURI())));
        }//from www  .  j  av  a2s.c  o m
        domains.remove(df.getOWLThing());
        if (domains.size() == 1) {
            return domains.first();
        } else if (domains.size() > 1) {
            return df.getOWLObjectIntersectionOf(domains);
        }
        return df.getOWLThing();
    } catch (Exception e) {
        logger.error("Failed to compute the domain for " + property + ".", e);
    }
    return null;
}