Example usage for java.util SortedSet remove

List of usage examples for java.util SortedSet remove

Introduction

In this page you can find the example usage for java.util SortedSet remove.

Prototype

boolean remove(Object o);

Source Link

Document

Removes the specified element from this set if it is present (optional operation).

Usage

From source file:org.bedework.timezones.common.leveldb.LdbCachedData.java

private void updateFromDiffEntry(final String dtstamp, final AliasMaps amaps, final DiffListEntry dle)
        throws TzException {
    try {//from   w  ww. ja v a2 s.c  om
        open();

        final String id = dle.tzid;

        if (!dle.aliasChangeOnly) {
            TzDbSpec dbspec = getSpec(id);

            if (dbspec != null) {
                if (dle.add) {
                    throw new TzException("Inconsistent change list");
                }
            } else {
                if (!dle.add) {
                    throw new TzException("Inconsistent change list");
                }
                dbspec = new TzDbSpec();
                dbspec.setName(id);
            }

            dbspec.setDtstamp(dtstamp);
            dbspec.setSource(cfg.getPrimaryUrl());
            dbspec.setActive(true);
            dbspec.setVtimezone(TzServerUtil.getCalHdr() + dle.tzSpec + TzServerUtil.getCalTlr());

            // XXX Localized names?

            putTzSpec(dbspec);
        }

        if (Util.isEmpty(dle.aliases)) {
            return;
        }

        final SortedSet<String> aliases = amaps.byTzid.get(id);

        for (final String a : dle.aliases) {
            TzAlias alias = getTzAlias(a);

            if (alias == null) {
                alias = new TzAlias(a);
            }

            alias.addTargetId(id);

            putTzAlias(alias);

            aliases.remove(a);
        }

        /* remaining aliases should be deleted */
        for (final String alias : aliases) {
            final TzAlias tza = getTzAlias(alias);
            removeTzAlias(tza);
        }
    } catch (final TzException tze) {
        throw tze;
    } catch (final Throwable t) {
        throw new TzException(t);
    } finally {
        close();
    }
}

From source file:net.sourceforge.fenixedu.domain.Lesson.java

public SortedSet<YearMonthDay> getAllPossibleDatesToInsertSummary() {

    HourMinuteSecond now = new HourMinuteSecond();
    YearMonthDay currentDate = new YearMonthDay();
    SortedSet<YearMonthDay> datesToInsert = getAllLessonDatesUntil(currentDate);

    for (Summary summary : getAssociatedSummaries()) {
        YearMonthDay summaryDate = summary.getSummaryDateYearMonthDay();
        datesToInsert.remove(summaryDate);
    }//from w  w w  .  j  a  va  2s . c  o m

    for (Iterator<YearMonthDay> iter = datesToInsert.iterator(); iter.hasNext();) {
        YearMonthDay date = iter.next();
        if (!isTimeValidToInsertSummary(now, date)) {
            iter.remove();
        }
    }

    return datesToInsert;
}

From source file:net.sourceforge.fenixedu.domain.Lesson.java

private SortedSet<YearMonthDay> getAllLessonInstancesDatesToCreate(YearMonthDay startDate, YearMonthDay endDate,
        Boolean createLessonInstances) {

    if (startDate != null && endDate != null && !startDate.isAfter(endDate) && createLessonInstances) {

        SortedSet<YearMonthDay> possibleLessonDates = getAllValidLessonDatesWithoutInstancesDates(startDate,
                endDate);/*w w  w.  j a v a2s  .  c  om*/
        List<LessonInstance> allLessonInstancesUntil = getAllLessonInstancesUntil(endDate.toLocalDate());

        for (LessonInstance lessonInstance : allLessonInstancesUntil) {
            possibleLessonDates.remove(lessonInstance.getDay());
        }

        return possibleLessonDates;
    }
    return new TreeSet<YearMonthDay>();
}

From source file:org.bedework.timezones.common.leveldb.LdbCachedData.java

/** Call the primary server and get a list of data that's changed since we last
 * looked. Then fetch each changed timezone and update the db.
 *
 * <p>We try not to keep the db locked for long periods</p>
 *
 * @return true if we successfully contacted the server
 * @throws TzException//from  w w w  .j  av a2 s  . c o m
 */
private synchronized boolean updateFromPrimary() throws TzException {
    if (debug) {
        trace("Updating from primary");
    }

    try {
        if (cfg.getPrimaryServer()) {
            // We are a primary. No update needed
            if (debug) {
                trace("We are a primary: exit");
            }

            return true; // good enough
        }

        if (cfg.getPrimaryUrl() == null) {
            warn("No primary URL: exit");

            return true; // good enough
        }

        /* Get the list of changed tzs from the primary */

        final Timezones tzs = new TimezonesImpl();
        tzs.init(cfg.getPrimaryUrl());

        final String changedSince = cfg.getDtstamp();

        final long startTime = System.currentTimeMillis();
        long fetchTime = 0;

        final TimezoneListType tzl;

        try {
            tzl = tzs.getList(changedSince);
        } catch (final TzUnknownHostException tuhe) {
            error("Unknown host exception contacting " + cfg.getPrimaryUrl());
            return false;
        } catch (final Throwable t) {
            error("Exception contacting " + cfg.getPrimaryUrl());
            error(t);
            return false;
        }

        final String svrCs = tzl.getDtstamp();

        if ((changedSince == null) || !svrCs.equals(changedSince)) {
            cfg.setDtstamp(svrCs);

            TzServerUtil.saveConfig();
        }

        primaryFetches++;
        lastFetchCt = tzl.getTimezones().size();

        String isAre = "are";
        String theS = "s";

        if (lastFetchCt == 1) {
            isAre = "is";
            theS = "";
        }

        info("There " + isAre + " " + lastFetchCt + " timezone" + theS + " to fetch");

        final List<TzEntry> tzEntries = new ArrayList<>();

        /* First go through the returned list and get our own spec.
           Need the db for that.
         */
        try {
            open();

            for (final TimezoneType sum : tzl.getTimezones()) {
                final TzEntry entry = new TzEntry();

                entry.id = sum.getTzid();
                entry.sum = sum;
                if (debug) {
                    trace("Get db spec for timezone " + entry.id);
                }

                entry.dbspec = getSpec(entry.id);

                tzEntries.add(entry);
            }
        } finally {
            close();
        }

        /* Now fetch the timezones from the primary - no db needed
         */

        for (final TzEntry entry : tzEntries) {
            if (debug) {
                trace("Fetching timezone " + entry.id);
            }

            String etag = null;
            if (entry.dbspec != null) {
                etag = entry.dbspec.getEtag();
            }

            final long startFetch = System.currentTimeMillis();
            final TaggedTimeZone ttz = tzs.getTimeZone(entry.id, etag);

            fetchTime += System.currentTimeMillis() - startFetch;

            if ((ttz != null) && (ttz.vtz == null)) {
                // No change
                continue;
            }

            if (ttz == null) {
                warn("Received timezone id " + entry.id + " but not available.");
                continue;
            }

            entry.ttz = ttz;
        }

        /* Go through the entries and try to update.
         * If ttz is null no update needed.
         * If dbspec is null it's an add.
         */

        final AliasMaps amaps = buildAliasMaps();

        try {
            open();

            for (final TzEntry entry : tzEntries) {
                if (debug) {
                    trace("Processing timezone " + entry.id);
                }

                if (entry.ttz == null) {
                    if (debug) {
                        trace("No change.");
                    }
                    continue;
                }

                final boolean add = entry.dbspec == null;

                if (add) {
                    // Create a new one
                    entry.dbspec = new TzDbSpec();
                }

                entry.dbspec.setName(entry.id);
                entry.dbspec.setEtag(entry.ttz.etag);
                entry.dbspec.setDtstamp(DateTimeUtil.rfcDateTimeUTC(entry.sum.getLastModified()));
                entry.dbspec.setSource(cfg.getPrimaryUrl());
                entry.dbspec.setActive(true);
                entry.dbspec.setVtimezone(entry.ttz.vtz);

                if (!Util.isEmpty(entry.sum.getLocalNames())) {
                    final Set<LocalizedString> dns;

                    if (add) {
                        dns = new TreeSet<>();
                        entry.dbspec.setDisplayNames(dns);
                    } else {
                        dns = entry.dbspec.getDisplayNames();
                        dns.clear(); // XXX not good - forces delete and recreate
                    }

                    for (final LocalNameType ln : entry.sum.getLocalNames()) {
                        final LocalizedString ls = new LocalizedString(ln.getLang(), ln.getValue());

                        dns.add(ls);
                    }
                }

                putTzSpec(entry.dbspec);

                /* Get all aliases for this id */
                final SortedSet<String> aliases = amaps.byTzid.get(entry.id);

                if (!Util.isEmpty(entry.sum.getAliases())) {
                    for (final String a : entry.sum.getAliases()) {
                        TzAlias tza = amaps.byAlias.get(a);

                        if (tza == null) {
                            tza = new TzAlias(a);
                        }

                        tza.addTargetId(entry.id);

                        putTzAlias(tza);

                        /* We've seen this alias. Remove from the list */
                        if (aliases != null) {
                            aliases.remove(a);
                        }
                    }
                }

                if (aliases != null) {
                    /* remaining aliases should be deleted */
                    for (final String alias : aliases) {
                        final TzAlias tza = getTzAlias(alias);
                        removeTzAlias(tza);
                    }
                }
            }
        } finally {
            close();
        }

        info("Total time: " + TzServerUtil.printableTime(System.currentTimeMillis() - startTime));
        info("Fetch time: " + TzServerUtil.printableTime(fetchTime));
        lastFetchStatus = "Success";
    } catch (final TzException tze) {
        lastFetchStatus = "Failed";
        throw tze;
    } catch (final Throwable t) {
        lastFetchStatus = "Failed";
        throw new TzException(t);
    }

    return true;
}

From source file:com.gargoylesoftware.htmlunit.html.HtmlPage.java

private void removeElement(final Map<String, SortedSet<DomElement>> map, final DomElement element,
        final String attribute, final boolean recurse) {
    // first try real attributes
    String value = element.getAttribute(attribute);

    if (DomElement.ATTRIBUTE_NOT_DEFINED == value && !(element instanceof HtmlApplet)) {
        // second try are JavaScript attributes
        // ...but applets are a bit special so ignore them
        final ScriptableObject scriptObject = element.getScriptableObject();
        // we have to make sure the scriptObject has a slot for the given attribute.
        // just using get() may use e.g. getWithPreemption().
        if (scriptObject.has(attribute, scriptObject)) {
            final Object jsValue = scriptObject.get(attribute, scriptObject);
            if (jsValue != null && jsValue != Scriptable.NOT_FOUND && jsValue instanceof String) {
                value = (String) jsValue;
            }// w w  w  .ja  va2 s  .  c  o m
        }
    }

    if (!StringUtils.isEmpty(value)) {
        final SortedSet<DomElement> elements = map.remove(value);
        if (elements != null && (elements.size() != 1 || !elements.contains(element))) {
            elements.remove(element);
            map.put(value, elements);
        }
    }
    if (recurse) {
        for (final DomElement child : element.getChildElements()) {
            removeElement(map, child, attribute, true);
        }
    }
}

From source file:org.jahia.tools.maven.plugins.LegalArtifactAggregator.java

private void processJarFile(InputStream inputStream, String jarFilePath, JarMetadata contextJarMetadata,
        boolean processMavenPom, int level, boolean lookForNotice, boolean lookForLicense,
        boolean processingSources) throws IOException {
    // if we don't need to find either a license or notice, don't process the jar at all
    if (!lookForLicense && !lookForNotice) {
        return;/*from   w  w  w.jav a 2 s . co m*/
    }

    final String indent = getIndent(level);
    output(indent, "Processing JAR " + jarFilePath + "...", false, true);

    // JarFile realJarFile = new JarFile(jarFile);
    JarInputStream jarInputStream = new JarInputStream(inputStream);
    String bundleLicense = null;
    Manifest manifest = jarInputStream.getManifest();
    if (manifest != null && manifest.getMainAttributes() != null) {
        bundleLicense = manifest.getMainAttributes().getValue("Bundle-License");
        if (bundleLicense != null) {
            output(indent, "Found Bundle-License attribute with value:" + bundleLicense);
            KnownLicense knownLicense = getKnowLicenseByName(bundleLicense);
            // this data is not reliable, especially on the ServiceMix repackaged bundles
        }
    }
    String pomFilePath = null;
    byte[] pomByteArray = null;

    final String jarFileName = getJarFileName(jarFilePath);
    if (contextJarMetadata == null) {
        contextJarMetadata = jarDatabase.get(jarFileName);
        if (contextJarMetadata == null) {
            // compute project name
            contextJarMetadata = new JarMetadata(jarFilePath, jarFileName);
        }
        jarDatabase.put(jarFileName, contextJarMetadata);
    }

    Notice notice;
    JarEntry curJarEntry = null;
    while ((curJarEntry = jarInputStream.getNextJarEntry()) != null) {

        if (!curJarEntry.isDirectory()) {
            final String fileName = curJarEntry.getName();
            if (lookForNotice && isNotice(fileName, jarFilePath)) {

                output(indent, "Processing notice found in " + curJarEntry + "...");

                InputStream noticeInputStream = jarInputStream;
                List<String> noticeLines = IOUtils.readLines(noticeInputStream);
                notice = new Notice(noticeLines);

                Map<String, Notice> notices = contextJarMetadata.getNoticeFiles();
                if (notices == null) {
                    notices = new TreeMap<>();
                    notices.put(fileName, notice);
                    output(indent, "Found first notice " + curJarEntry);
                } else if (!notices.containsValue(notice)) {
                    output(indent, "Found additional notice " + curJarEntry);
                    notices.put(fileName, notice);
                } else {
                    output(indent, "Duplicated notice in " + curJarEntry);
                    notices.put(fileName, notice);
                    duplicatedNotices.add(jarFilePath);
                }

                // IOUtils.closeQuietly(noticeInputStream);
            } else if (processMavenPom && fileName.endsWith("pom.xml")) {
                // remember pom file path in case we need it
                pomFilePath = curJarEntry.getName();
                ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
                IOUtils.copy(jarInputStream, byteArrayOutputStream);
                pomByteArray = byteArrayOutputStream.toByteArray();

            } else if (lookForLicense && isLicense(fileName, jarFilePath)) {

                output(indent, "Processing license found in " + curJarEntry + "...");
                InputStream licenseInputStream = jarInputStream;
                List<String> licenseLines = IOUtils.readLines(licenseInputStream);

                LicenseFile licenseFile = new LicenseFile(jarFilePath, fileName, jarFilePath, licenseLines);

                resolveKnownLicensesByText(licenseFile);

                if (StringUtils.isNotBlank(licenseFile.getAdditionalLicenseText())
                        && StringUtils.isNotBlank(licenseFile.getAdditionalLicenseText().trim())) {
                    KnownLicense knownLicense = new KnownLicense();
                    knownLicense.setId(FilenameUtils.getBaseName(jarFilePath) + "-additional-terms");
                    knownLicense
                            .setName("Additional license terms from " + FilenameUtils.getBaseName(jarFilePath));
                    List<TextVariant> textVariants = new ArrayList<>();
                    TextVariant textVariant = new TextVariant();
                    textVariant.setId("default");
                    textVariant.setDefaultVariant(true);
                    textVariant.setText(Pattern.quote(licenseFile.getAdditionalLicenseText()));
                    textVariants.add(textVariant);
                    knownLicense.setTextVariants(textVariants);
                    knownLicense.setTextToUse(licenseFile.getAdditionalLicenseText());
                    knownLicense.setViral(licenseFile.getText().toLowerCase().contains("gpl"));
                    knownLicenses.getLicenses().put(knownLicense.getId(), knownLicense);
                    licenseFile.getKnownLicenses().add(knownLicense);
                    licenseFile.getKnownLicenseKeys().add(knownLicense.getId());
                }

                for (KnownLicense knownLicense : licenseFile.getKnownLicenses()) {
                    SortedSet<LicenseFile> licenseFiles = knownLicensesFound.get(knownLicense);
                    if (licenseFiles != null) {
                        if (!licenseFiles.contains(licenseFile)) {
                            licenseFiles.add(licenseFile);
                        }
                        knownLicensesFound.put(knownLicense, licenseFiles);
                    } else {
                        licenseFiles = new TreeSet<>();
                        licenseFiles.add(licenseFile);
                        knownLicensesFound.put(knownLicense, licenseFiles);
                    }
                }

                Map<String, LicenseFile> licenseFiles = contextJarMetadata.getLicenseFiles();
                if (licenseFiles == null) {
                    licenseFiles = new TreeMap<>();
                }
                if (licenseFiles.containsKey(fileName)) {
                    // warning we already have a license file here, what should we do ?
                    output(indent, "License file already exists for " + jarFilePath + " will override it !",
                            true, false);
                    licenseFiles.remove(fileName);
                }
                licenseFiles.put(fileName, licenseFile);

                // IOUtils.closeQuietly(licenseInputStream);

            } else if (fileName.endsWith(".jar")) {
                InputStream embeddedJarInputStream = jarInputStream;
                ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
                IOUtils.copy(embeddedJarInputStream, byteArrayOutputStream);
                final JarMetadata embeddedJarMetadata = new JarMetadata(jarFilePath, getJarFileName(fileName));

                if (embeddedJarMetadata != null) {
                    embeddedJarMetadata.setJarContents(byteArrayOutputStream.toByteArray());
                    contextJarMetadata.getEmbeddedJars().add(embeddedJarMetadata);
                }
            } else if (fileName.endsWith(".class")) {
                String className = fileName.substring(0, fileName.length() - ".class".length()).replaceAll("/",
                        ".");
                int lastPoint = className.lastIndexOf(".");
                String packageName = null;
                if (lastPoint > 0) {
                    packageName = className.substring(0, lastPoint);
                    SortedSet<String> currentJarPackages = jarDatabase
                            .get(FilenameUtils.getBaseName(jarFilePath)).getPackages();
                    if (currentJarPackages == null) {
                        currentJarPackages = new TreeSet<>();
                    }
                    currentJarPackages.add(packageName);
                }
            }

        }
        jarInputStream.closeEntry();
    }

    jarInputStream.close();
    jarInputStream = null;

    if (!contextJarMetadata.getEmbeddedJars().isEmpty()) {
        for (JarMetadata embeddedJarMetadata : contextJarMetadata.getEmbeddedJars()) {
            if (embeddedJarMetadata.getJarContents() != null) {
                ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(
                        embeddedJarMetadata.getJarContents());
                processJarFile(byteArrayInputStream, contextJarMetadata.toString(), null, true, level, true,
                        true, processingSources);
            } else {
                output(indent, "Couldn't find dependency for embedded JAR " + contextJarMetadata, true, false);
            }
        }
    }

    if (processMavenPom) {
        if (pomFilePath == null) {
            output(indent, "No POM found in " + jarFilePath);
        } else {
            output(indent, "Processing POM found at " + pomFilePath + " in " + jarFilePath + "...");
            ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(pomByteArray);
            processJarPOM(byteArrayInputStream, pomFilePath, jarFilePath, contextJarMetadata, lookForNotice,
                    lookForLicense, contextJarMetadata.getEmbeddedJars(), level + 1, processingSources);
        }
    }

    if (lookForLicense || lookForNotice) {
        if (lookForLicense) {
            output(indent, "No license found in " + jarFilePath);
        }
        if (lookForNotice) {
            output(indent, "No notice found in " + jarFilePath);
        }

        if (pomFilePath == null && lookForLicense && lookForNotice) {
            if (StringUtils.isBlank(contextJarMetadata.getVersion())) {
                output(indent, "Couldn't resolve version for JAR " + contextJarMetadata
                        + ", can't query Maven Central repository without version !");
            } else {
                List<Artifact> mavenCentralArtifacts = findArtifactInMavenCentral(contextJarMetadata.getName(),
                        contextJarMetadata.getVersion(), contextJarMetadata.getClassifier());
                if (mavenCentralArtifacts != null && mavenCentralArtifacts.size() == 1) {
                    Artifact mavenCentralArtifact = mavenCentralArtifacts.get(0);
                    Artifact resolvedArtifact = resolveArtifact(mavenCentralArtifact, level);
                    if (resolvedArtifact != null) {
                        // we have a copy of the local artifact, let's request the sources for it.
                        if (!processingSources && !"sources".equals(contextJarMetadata.getClassifier())) {
                            final Artifact artifact = new DefaultArtifact(resolvedArtifact.getGroupId(),
                                    resolvedArtifact.getArtifactId(), "sources", "jar",
                                    resolvedArtifact.getVersion());
                            File sourceJar = getArtifactFile(artifact, level);
                            if (sourceJar != null && sourceJar.exists()) {
                                FileInputStream sourceJarInputStream = new FileInputStream(sourceJar);
                                processJarFile(sourceJarInputStream, sourceJar.getPath(), contextJarMetadata,
                                        false, level + 1, lookForNotice, lookForLicense, true);
                                IOUtils.closeQuietly(sourceJarInputStream);
                            }
                        } else {
                            // we are already processing a sources artifact, we need to load the pom artifact to extract information from there
                            final Artifact artifact = new DefaultArtifact(resolvedArtifact.getGroupId(),
                                    resolvedArtifact.getArtifactId(), null, "pom",
                                    resolvedArtifact.getVersion());
                            File artifactPom = getArtifactFile(artifact, level);
                            if (artifactPom != null && artifactPom.exists()) {
                                output(indent, "Processing POM for " + artifact + "...");
                                processPOM(lookForNotice, lookForLicense, jarFilePath, contextJarMetadata,
                                        contextJarMetadata.getEmbeddedJars(), level + 1,
                                        new FileInputStream(artifactPom), processingSources);
                            }
                        }
                    } else {
                        output(indent, "===>  Couldn't resolve artifact " + mavenCentralArtifact
                                + " in Maven Central. Please resolve license and notice files manually!", false,
                                true);
                    }
                } else {
                    output(indent, "===>  Couldn't find nor POM, license or notice. Please check manually!",
                            false, true);
                }
            }
        }
    }

    output(indent, "Done processing JAR " + jarFilePath + ".", false, true);

}

From source file:org.alfresco.repo.domain.node.AbstractNodeDAOImpl.java

/**
 * Bulk-fetch the nodes for a given store.  All nodes passed in are fetched.
 *//*from   ww  w . ja  va  2s  . c o m*/
private void cacheNodesNoBatch(List<Node> nodes) {
    // Get the nodes
    SortedSet<Long> aspectNodeIds = new TreeSet<Long>();
    SortedSet<Long> propertiesNodeIds = new TreeSet<Long>();
    Map<Long, NodeVersionKey> nodeVersionKeysFromCache = new HashMap<Long, NodeVersionKey>(nodes.size() * 2); // Keep for quick lookup
    for (Node node : nodes) {
        Long nodeId = node.getId();
        NodeVersionKey nodeVersionKey = node.getNodeVersionKey();
        node.lock(); // Prevent unexpected edits of values going into the cache
        nodesCache.setValue(nodeId, node);
        if (propertiesCache.getValue(nodeVersionKey) == null) {
            propertiesNodeIds.add(nodeId);
        }
        if (aspectsCache.getValue(nodeVersionKey) == null) {
            aspectNodeIds.add(nodeId);
        }
        nodeVersionKeysFromCache.put(nodeId, nodeVersionKey);
    }

    if (logger.isDebugEnabled()) {
        logger.debug("Pre-loaded " + propertiesNodeIds.size() + " properties");
        logger.debug("Pre-loaded " + propertiesNodeIds.size() + " aspects");
    }

    Map<NodeVersionKey, Set<QName>> nodeAspects = selectNodeAspects(aspectNodeIds);
    for (Map.Entry<NodeVersionKey, Set<QName>> entry : nodeAspects.entrySet()) {
        NodeVersionKey nodeVersionKeyFromDb = entry.getKey();
        Long nodeId = nodeVersionKeyFromDb.getNodeId();
        Set<QName> qnames = entry.getValue();
        setNodeAspectsCached(nodeId, qnames);
        aspectNodeIds.remove(nodeId);
    }
    // Cache the absence of aspects too!
    for (Long nodeId : aspectNodeIds) {
        setNodeAspectsCached(nodeId, Collections.<QName>emptySet());
    }

    // First ensure all content data are pre-cached, so we don't have to load them individually when converting properties
    contentDataDAO.cacheContentDataForNodes(propertiesNodeIds);

    // Now bulk load the properties
    Map<NodeVersionKey, Map<NodePropertyKey, NodePropertyValue>> propsByNodeId = selectNodeProperties(
            propertiesNodeIds);
    for (Map.Entry<NodeVersionKey, Map<NodePropertyKey, NodePropertyValue>> entry : propsByNodeId.entrySet()) {
        Long nodeId = entry.getKey().getNodeId();
        Map<NodePropertyKey, NodePropertyValue> propertyValues = entry.getValue();
        Map<QName, Serializable> props = nodePropertyHelper.convertToPublicProperties(propertyValues);
        setNodePropertiesCached(nodeId, props);
    }
}

From source file:org.aika.network.neuron.lattice.AndNode.java

public static SortedMap<InputNode, Node> computeParents(Set<InputNode> inputs) {
    HashSet<Node> visited = new HashSet<>();
    SortedMap<InputNode, Node> parents = new TreeMap<>();

    for (InputNode a : inputs) {
        SortedSet<InputNode> childInputs = new TreeSet<>(inputs);
        childInputs.remove(a);
        if (!a.computeAndParents(childInputs, parents, visited)) {
            return null;
        }/*from  w ww.  j  av a 2 s.  c  o  m*/
    }

    return parents;
}

From source file:org.apache.accumulo.server.gc.SimpleGarbageCollector.java

/**
 * This method removes candidates from the candidate list under two conditions: 1. They are in the same folder as a bulk processing file, if that option is
 * selected 2. They are still in use in the file column family in the METADATA table
 *///from  w w w.j av a2s. c  o m
public void confirmDeletes(SortedSet<String> candidates) throws AccumuloException {

    Scanner scanner;
    if (offline) {
        try {
            scanner = new OfflineMetadataScanner();
        } catch (IOException e) {
            throw new IllegalStateException("Unable to create offline metadata scanner", e);
        }
    } else {
        try {
            scanner = new IsolatedScanner(instance.getConnector(credentials)
                    .createScanner(Constants.METADATA_TABLE_NAME, Constants.NO_AUTHS));
        } catch (AccumuloSecurityException ex) {
            throw new AccumuloException(ex);
        } catch (TableNotFoundException ex) {
            throw new AccumuloException(ex);
        }
    }

    // skip candidates that are in a bulk processing folder
    if (checkForBulkProcessingFiles) {

        log.debug("Checking for bulk processing flags");

        scanner.setRange(Constants.METADATA_BLIP_KEYSPACE);

        // WARNING: This block is IMPORTANT
        // You MUST REMOVE candidates that are in the same folder as a bulk
        // processing flag!

        for (Entry<Key, Value> entry : scanner) {
            String blipPath = entry.getKey().getRow().toString()
                    .substring(Constants.METADATA_BLIP_FLAG_PREFIX.length());
            Iterator<String> tailIter = candidates.tailSet(blipPath).iterator();
            int count = 0;
            while (tailIter.hasNext()) {
                if (tailIter.next().startsWith(blipPath)) {
                    count++;
                    tailIter.remove();
                } else {
                    break;
                }
            }

            if (count > 0)
                log.debug("Folder has bulk processing flag: " + blipPath);

        }
    }

    // skip candidates that are still in use in the file column family in
    // the metadata table
    scanner.clearColumns();
    scanner.fetchColumnFamily(Constants.METADATA_DATAFILE_COLUMN_FAMILY);
    scanner.fetchColumnFamily(Constants.METADATA_SCANFILE_COLUMN_FAMILY);
    ColumnFQ.fetch(scanner, Constants.METADATA_DIRECTORY_COLUMN);

    TabletIterator tabletIterator = new TabletIterator(scanner, Constants.METADATA_KEYSPACE, false, true);

    while (tabletIterator.hasNext()) {
        Map<Key, Value> tabletKeyValues = tabletIterator.next();

        for (Entry<Key, Value> entry : tabletKeyValues.entrySet()) {
            if (entry.getKey().getColumnFamily().equals(Constants.METADATA_DATAFILE_COLUMN_FAMILY)
                    || entry.getKey().getColumnFamily().equals(Constants.METADATA_SCANFILE_COLUMN_FAMILY)) {

                String cf = entry.getKey().getColumnQualifier().toString();
                String delete;
                if (cf.startsWith("../")) {
                    delete = cf.substring(2);
                } else {
                    String table = new String(KeyExtent.tableOfMetadataRow(entry.getKey().getRow()));
                    delete = "/" + table + cf;
                }
                // WARNING: This line is EXTREMELY IMPORTANT.
                // You MUST REMOVE candidates that are still in use
                if (candidates.remove(delete))
                    log.debug("Candidate was still in use in the METADATA table: " + delete);

                String path = delete.substring(0, delete.lastIndexOf('/'));
                if (candidates.remove(path))
                    log.debug("Candidate was still in use in the METADATA table: " + path);
            } else if (Constants.METADATA_DIRECTORY_COLUMN.hasColumns(entry.getKey())) {
                String table = new String(KeyExtent.tableOfMetadataRow(entry.getKey().getRow()));
                String delete = "/" + table + entry.getValue().toString();
                if (candidates.remove(delete))
                    log.debug("Candidate was still in use in the METADATA table: " + delete);
            } else
                throw new AccumuloException(
                        "Scanner over metadata table returned unexpected column : " + entry.getKey());
        }
    }
}

From source file:org.apache.cassandra.db.Table.java

/**
 * This method adds the row to the Commit Log associated with this table.
 * Once this happens the data associated with the individual column families
 * is also written to the column family store's memtable.
*//*from   w  w w . j  a v  a2s.  c  o  m*/
public void apply(RowMutation mutation, Object serializedMutation, boolean writeCommitLog) throws IOException {
    HashMap<ColumnFamilyStore, Memtable> memtablesToFlush = new HashMap<ColumnFamilyStore, Memtable>(2);

    // write the mutation to the commitlog and memtables
    flusherLock.readLock().lock();
    try {
        if (writeCommitLog)
            CommitLog.instance().add(mutation, serializedMutation);

        DecoratedKey key = StorageService.getPartitioner().decorateKey(mutation.key());
        for (ColumnFamily cf : mutation.getColumnFamilies()) {
            ColumnFamilyStore cfs = columnFamilyStores.get(cf.id());
            if (cfs == null) {
                logger.error("Attempting to mutate non-existant column family " + cf.id());
                continue;
            }

            SortedSet<byte[]> mutatedIndexedColumns = null;
            for (byte[] column : cfs.getIndexedColumns()) {
                if (cf.getColumnNames().contains(column)) {
                    if (mutatedIndexedColumns == null)
                        mutatedIndexedColumns = new TreeSet<byte[]>(FBUtilities.byteArrayComparator);
                    mutatedIndexedColumns.add(column);
                }
            }

            if (mutatedIndexedColumns == null) {
                // just update the actual value, no extra synchronization
                applyCF(cfs, key, cf, memtablesToFlush);
            } else {
                synchronized (indexLockFor(mutation.key())) {
                    // read old indexed values
                    QueryFilter filter = QueryFilter.getNamesFilter(key,
                            new QueryPath(cfs.getColumnFamilyName()), mutatedIndexedColumns);
                    ColumnFamily oldIndexedColumns = cfs.getColumnFamily(filter);

                    // ignore obsolete column updates
                    if (oldIndexedColumns != null) {
                        for (IColumn oldColumn : oldIndexedColumns) {
                            if (cfs.metadata.reconciler
                                    .reconcile((Column) oldColumn, (Column) cf.getColumn(oldColumn.name()))
                                    .equals(oldColumn)) {
                                cf.remove(oldColumn.name());
                                mutatedIndexedColumns.remove(oldColumn.name());
                                oldIndexedColumns.remove(oldColumn.name());
                            }
                        }
                    }

                    // apply the mutation
                    applyCF(cfs, key, cf, memtablesToFlush);

                    // add new index entries
                    for (byte[] columnName : mutatedIndexedColumns) {
                        IColumn column = cf.getColumn(columnName);
                        DecoratedKey<LocalToken> valueKey = cfs.getIndexKeyFor(columnName, column.value());
                        ColumnFamily cfi = cfs.newIndexedColumnFamily(columnName);
                        cfi.addColumn(new Column(mutation.key(), ArrayUtils.EMPTY_BYTE_ARRAY, column.clock()));
                        applyCF(cfs.getIndexedColumnFamilyStore(columnName), valueKey, cfi, memtablesToFlush);
                    }

                    // remove the old index entries
                    if (oldIndexedColumns != null) {
                        int localDeletionTime = (int) (System.currentTimeMillis() / 1000);
                        for (Map.Entry<byte[], IColumn> entry : oldIndexedColumns.getColumnsMap().entrySet()) {
                            byte[] columnName = entry.getKey();
                            IColumn column = entry.getValue();
                            DecoratedKey<LocalToken> valueKey = cfs.getIndexKeyFor(columnName, column.value());
                            ColumnFamily cfi = cfs.newIndexedColumnFamily(columnName);
                            cfi.deleteColumn(mutation.key(), localDeletionTime, column.clock());
                            applyCF(cfs.getIndexedColumnFamilyStore(columnName), valueKey, cfi,
                                    memtablesToFlush);
                        }
                    }
                }
            }

            ColumnFamily cachedRow = cfs.getRawCachedRow(key);
            if (cachedRow != null)
                cachedRow.addAll(cf);
        }
    } finally {
        flusherLock.readLock().unlock();
    }

    // flush memtables that got filled up.  usually mTF will be empty and this will be a no-op
    for (Map.Entry<ColumnFamilyStore, Memtable> entry : memtablesToFlush.entrySet())
        entry.getKey().maybeSwitchMemtable(entry.getValue(), writeCommitLog);
}