Example usage for java.util TreeMap remove

List of usage examples for java.util TreeMap remove

Introduction

In this page you can find the example usage for java.util TreeMap remove.

Prototype

public V remove(Object key) 

Source Link

Document

Removes the mapping for this key from this TreeMap if present.

Usage

From source file:com.irccloud.android.data.EventsDataSource.java

public void pruneEvents(int bid) {
    synchronized (events) {
        TreeMap<Long, Event> e = events.get(bid);
        while (e != null && e.size() > 50 && e.firstKey() != null) {
            e.remove(e.firstKey());
        }//from  w  ww  .j  a  va 2  s .  c  o  m
    }
}

From source file:org.opennms.netmgt.config.WmiPeerFactory.java

/**
 * Combine specific and range elements so that WMIPeerFactory has to spend
 * less time iterating all these elements.
 * TODO This really should be pulled up into PeerFactory somehow, but I'm not sure how (given that "Definition" is different for both
 * SNMP and WMI.  Maybe some sort of visitor methodology would work.  The basic logic should be fine as it's all IP address manipulation
 *
 * @throws UnknownHostException//from   w ww. ja  v a  2s.c o m
 */
static void optimize() throws UnknownHostException {

    // First pass: Remove empty definition elements
    for (Iterator<Definition> definitionsIterator = m_config.getDefinitionCollection()
            .iterator(); definitionsIterator.hasNext();) {
        Definition definition = definitionsIterator.next();
        if (definition.getSpecificCount() == 0 && definition.getRangeCount() == 0) {

            LOG.debug("optimize: Removing empty definition element");
            definitionsIterator.remove();
        }
    }

    // Second pass: Replace single IP range elements with specific elements
    for (Definition definition : m_config.getDefinitionCollection()) {
        synchronized (definition) {
            for (Iterator<Range> rangesIterator = definition.getRangeCollection().iterator(); rangesIterator
                    .hasNext();) {
                Range range = rangesIterator.next();
                if (range.getBegin().equals(range.getEnd())) {
                    definition.addSpecific(range.getBegin());
                    rangesIterator.remove();
                }
            }
        }
    }

    // Third pass: Sort specific and range elements for improved XML
    // readability and then combine them into fewer elements where possible
    for (Iterator<Definition> defIterator = m_config.getDefinitionCollection().iterator(); defIterator
            .hasNext();) {
        Definition definition = defIterator.next();

        // Sort specifics
        final TreeMap<InetAddress, String> specificsMap = new TreeMap<InetAddress, String>(
                new InetAddressComparator());
        for (String specific : definition.getSpecificCollection()) {
            specificsMap.put(InetAddressUtils.getInetAddress(specific), specific.trim());
        }

        // Sort ranges
        final TreeMap<InetAddress, Range> rangesMap = new TreeMap<InetAddress, Range>(
                new InetAddressComparator());
        for (Range range : definition.getRangeCollection()) {
            rangesMap.put(InetAddressUtils.getInetAddress(range.getBegin()), range);
        }

        // Combine consecutive specifics into ranges
        InetAddress priorSpecific = null;
        Range addedRange = null;
        for (final InetAddress specific : specificsMap.keySet()) {
            if (priorSpecific == null) {
                priorSpecific = specific;
                continue;
            }

            if (BigInteger.ONE.equals(InetAddressUtils.difference(specific, priorSpecific))
                    && InetAddressUtils.inSameScope(specific, priorSpecific)) {
                if (addedRange == null) {
                    addedRange = new Range();
                    addedRange.setBegin(InetAddressUtils.toIpAddrString(priorSpecific));
                    rangesMap.put(priorSpecific, addedRange);
                    specificsMap.remove(priorSpecific);
                }

                addedRange.setEnd(InetAddressUtils.toIpAddrString(specific));
                specificsMap.remove(specific);
            } else {
                addedRange = null;
            }

            priorSpecific = specific;
        }

        // Move specifics to ranges
        for (final InetAddress specific : new ArrayList<InetAddress>(specificsMap.keySet())) {
            for (final InetAddress begin : new ArrayList<InetAddress>(rangesMap.keySet())) {
                if (!InetAddressUtils.inSameScope(begin, specific)) {
                    continue;
                }

                if (InetAddressUtils.toInteger(begin).subtract(BigInteger.ONE)
                        .compareTo(InetAddressUtils.toInteger(specific)) > 0) {
                    continue;
                }

                Range range = rangesMap.get(begin);

                final InetAddress end = InetAddressUtils.getInetAddress(range.getEnd());

                if (InetAddressUtils.toInteger(end).add(BigInteger.ONE)
                        .compareTo(InetAddressUtils.toInteger(specific)) < 0) {
                    continue;
                }

                if (InetAddressUtils.toInteger(specific).compareTo(InetAddressUtils.toInteger(begin)) >= 0
                        && InetAddressUtils.toInteger(specific)
                                .compareTo(InetAddressUtils.toInteger(end)) <= 0) {
                    specificsMap.remove(specific);
                    break;
                }

                if (InetAddressUtils.toInteger(begin).subtract(BigInteger.ONE)
                        .equals(InetAddressUtils.toInteger(specific))) {
                    rangesMap.remove(begin);
                    rangesMap.put(specific, range);
                    range.setBegin(InetAddressUtils.toIpAddrString(specific));
                    specificsMap.remove(specific);
                    break;
                }

                if (InetAddressUtils.toInteger(end).add(BigInteger.ONE)
                        .equals(InetAddressUtils.toInteger(specific))) {
                    range.setEnd(InetAddressUtils.toIpAddrString(specific));
                    specificsMap.remove(specific);
                    break;
                }
            }
        }

        // Combine consecutive ranges
        Range priorRange = null;
        InetAddress priorBegin = null;
        InetAddress priorEnd = null;
        for (final Iterator<InetAddress> rangesIterator = rangesMap.keySet().iterator(); rangesIterator
                .hasNext();) {
            final InetAddress beginAddress = rangesIterator.next();
            final Range range = rangesMap.get(beginAddress);
            final InetAddress endAddress = InetAddressUtils.getInetAddress(range.getEnd());

            if (priorRange != null) {
                if (InetAddressUtils.inSameScope(beginAddress, priorEnd)
                        && InetAddressUtils.difference(beginAddress, priorEnd).compareTo(BigInteger.ONE) <= 0) {
                    priorBegin = new InetAddressComparator().compare(priorBegin, beginAddress) < 0 ? priorBegin
                            : beginAddress;
                    priorRange.setBegin(InetAddressUtils.toIpAddrString(priorBegin));
                    priorEnd = new InetAddressComparator().compare(priorEnd, endAddress) > 0 ? priorEnd
                            : endAddress;
                    priorRange.setEnd(InetAddressUtils.toIpAddrString(priorEnd));

                    rangesIterator.remove();
                    continue;
                }
            }

            priorRange = range;
            priorBegin = beginAddress;
            priorEnd = endAddress;
        }

        // Update changes made to sorted maps
        definition.setSpecific(specificsMap.values().toArray(new String[0]));
        definition.setRange(rangesMap.values().toArray(new Range[0]));
    }
}

From source file:com.serphacker.serposcope.db.google.GoogleSerpRescanDB.java

public void rescan(Integer specificRunId, Collection<GoogleTarget> targets, Collection<GoogleSearch> searches,
        boolean updateSummary) {
    LOG.debug("SERP rescan (bulk) : starting");
    long _start = System.currentTimeMillis();
    Map<Integer, Integer> searchCountByGroup = searchDB.countByGroup();
    Run specPrevRun = null;/*from w w w . java 2s.c o  m*/
    Map<Integer, GoogleTargetSummary> specPrevRunSummaryByTarget = new HashMap<>();

    if (specificRunId != null) {
        specPrevRun = runDB.findPrevious(specificRunId);
        if (specPrevRun != null) {
            specPrevRunSummaryByTarget = targetSummaryDB.list(specPrevRun.getId()).stream()
                    .collect(Collectors.toMap(GoogleTargetSummary::getTargetId, Function.identity()));
        }
    }

    List<GoogleRank> ranks = new ArrayList<>();
    for (GoogleTarget target : targets) {

        Map<Integer, GoogleTargetSummary> summaryByRunId = new HashMap<>();
        GoogleTargetSummary specificPreviousSummary = specPrevRunSummaryByTarget.get(target.getId());
        if (specificPreviousSummary != null) {
            summaryByRunId.put(specPrevRun.getId(), specificPreviousSummary);
        }

        for (GoogleSearch search : searches) {
            final MutableInt previousRunId = new MutableInt(0);
            final MutableInt previousRank = new MutableInt(GoogleRank.UNRANKED);
            GoogleBest searchBest = new GoogleBest(target.getGroupId(), target.getId(), search.getId(),
                    GoogleRank.UNRANKED, null, null);

            if (specPrevRun != null) {
                previousRunId.setValue(specPrevRun.getId());
                previousRank.setValue(
                        rankDB.get(specPrevRun.getId(), target.getGroupId(), target.getId(), search.getId()));
                GoogleBest specificBest = rankDB.getBest(target.getGroupId(), target.getId(), search.getId());
                if (specificBest != null) {
                    searchBest = specificBest;
                }
            }
            final GoogleBest best = searchBest;

            serpDB.stream(specificRunId, specificRunId, search.getId(), (GoogleSerp res) -> {

                int rank = GoogleRank.UNRANKED;
                String rankedUrl = null;
                for (int i = 0; i < res.getEntries().size(); i++) {
                    if (target.match(res.getEntries().get(i).getUrl())) {
                        rankedUrl = res.getEntries().get(i).getUrl();
                        rank = i + 1;
                        break;
                    }
                }

                // only update last run
                GoogleRank gRank = new GoogleRank(res.getRunId(), target.getGroupId(), target.getId(),
                        search.getId(), rank, previousRank.shortValue(), rankedUrl);
                ranks.add(gRank);
                if (ranks.size() > 2000) {
                    rankDB.insert(ranks);
                    ranks.clear();
                }

                if (updateSummary) {
                    GoogleTargetSummary summary = summaryByRunId.get(res.getRunId());
                    if (summary == null) {
                        summaryByRunId.put(res.getRunId(),
                                summary = new GoogleTargetSummary(target.getGroupId(), target.getId(),
                                        res.getRunId(), 0));
                    }
                    summary.addRankCandidat(gRank);
                }

                if (rank != GoogleRank.UNRANKED && rank <= best.getRank()) {
                    best.setRank((short) rank);
                    best.setUrl(rankedUrl);
                    best.setRunDay(res.getRunDay());
                }

                previousRunId.setValue(res.getRunId());
                previousRank.setValue(rank);
            });

            if (best.getRank() != GoogleRank.UNRANKED) {
                rankDB.insertBest(best);
            }
        }

        // fill previous summary score
        if (updateSummary) {
            TreeMap<Integer, GoogleTargetSummary> summaries = new TreeMap<>(summaryByRunId);

            GoogleTargetSummary previousSummary = null;
            for (Map.Entry<Integer, GoogleTargetSummary> entry : summaries.entrySet()) {
                GoogleTargetSummary summary = entry.getValue();
                summary.computeScoreBP(searchCountByGroup.getOrDefault(summary.getGroupId(), 0));
                if (previousSummary != null) {
                    summary.setPreviousScoreBP(previousSummary.getScoreBP());
                }
                previousSummary = summary;
            }

            if (specPrevRun != null) {
                summaries.remove(specPrevRun.getId());
            }

            if (!summaries.isEmpty()) {
                targetSummaryDB.insert(summaries.values());
            }
        }
    }

    if (!ranks.isEmpty()) {
        rankDB.insert(ranks);
        ranks.clear();
    }

    LOG.debug("SERP rescan : done, duration = {}",
            DurationFormatUtils.formatDurationHMS(System.currentTimeMillis() - _start));
}

From source file:org.apache.hadoop.hdfs.client.ShortCircuitCache.java

/**
 * Remove a replica from an evictable map.
 *
 * @param replica   The replica to remove.
 * @param map       The map to remove it from.
 *///www  . ja  v  a 2 s . co m
private void removeEvictable(ShortCircuitReplica replica, TreeMap<Long, ShortCircuitReplica> map) {
    Long evictableTimeNs = replica.getEvictableTimeNs();
    Preconditions.checkNotNull(evictableTimeNs);
    ShortCircuitReplica removed = map.remove(evictableTimeNs);
    Preconditions.checkState(removed == replica, "failed to make " + replica + " unevictable");
    replica.setEvictableTimeNs(null);
}

From source file:org.apache.hadoop.hdfs.shortcircuit.ShortCircuitCache.java

/**
 * Remove a replica from an evictable map.
 *
 * @param replica   The replica to remove.
 * @param map       The map to remove it from.
 */// w  ww  . ja va  2 s.  c o  m
private void removeEvictable(ShortCircuitReplica replica, TreeMap<Long, ShortCircuitReplica> map) {
    Long evictableTimeNs = replica.getEvictableTimeNs();
    Preconditions.checkNotNull(evictableTimeNs);
    ShortCircuitReplica removed = map.remove(evictableTimeNs);
    Preconditions.checkState(removed == replica, "failed to make %s unevictable", replica);
    replica.setEvictableTimeNs(null);
}

From source file:com.amalto.workbench.utils.XSDAnnotationsStructure.java

public void removeSchematron(String pattern) {
    TreeMap<String, String> infos = getSchematrons();
    for (Entry<String, String> entry : infos.entrySet()) {
        if (pattern.equals(entry.getValue())) {
            infos.remove(entry.getKey());
            break;
        }//w  w w.  jav a  2 s .c  om
    }
    setSchematrons(infos.values());
}

From source file:com.amalto.workbench.utils.XSDAnnotationsStructure.java

public void removeWorkflow(String pattern) {
    TreeMap<String, String> infos = getSchematrons();
    for (Entry<String, String> entry : infos.entrySet()) {
        if (pattern.equals(entry.getValue())) {
            infos.remove(entry.getKey());
            break;
        }//from  ww w.j a  va2 s. c o  m
    }
    setSchematrons(infos.values());
}

From source file:org.apache.nutch.crawl.CrawlDbReader.java

public Object query(Map<String, String> args, Configuration conf, String type, String crawlId)
        throws Exception {

    Map<String, Object> results = new HashMap<>();
    String crawlDb = crawlId + "/crawldb";

    if (type.equalsIgnoreCase("stats")) {
        boolean sort = false;
        if (args.containsKey("sort")) {
            if (args.get("sort").equalsIgnoreCase("true"))
                sort = true;/*from   w w w  .jav a2  s.c  o  m*/
        }
        TreeMap<String, Writable> stats = processStatJobHelper(crawlDb, NutchConfiguration.create(), sort);
        LongWritable totalCnt = (LongWritable) stats.get("T");
        stats.remove("T");
        results.put("totalUrls", String.valueOf(totalCnt.get()));
        Map<String, Object> statusMap = new HashMap<>();

        for (Map.Entry<String, Writable> entry : stats.entrySet()) {
            String k = entry.getKey();
            long val = 0L;
            double fval = 0.0;
            if (entry.getValue() instanceof LongWritable) {
                val = ((LongWritable) entry.getValue()).get();
            } else if (entry.getValue() instanceof FloatWritable) {
                fval = ((FloatWritable) entry.getValue()).get();
            } else if (entry.getValue() instanceof BytesWritable) {
                continue;
            }
            if (k.equals("scn")) {
                results.put("minScore", String.valueOf(fval));
            } else if (k.equals("scx")) {
                results.put("maxScore", String.valueOf(fval));
            } else if (k.equals("sct")) {
                results.put("avgScore", String.valueOf((fval / totalCnt.get())));
            } else if (k.startsWith("status")) {
                String[] st = k.split(" ");
                int code = Integer.parseInt(st[1]);
                if (st.length > 2) {
                    @SuppressWarnings("unchecked")
                    Map<String, Object> individualStatusInfo = (Map<String, Object>) statusMap
                            .get(String.valueOf(code));
                    Map<String, String> hostValues;
                    if (individualStatusInfo.containsKey("hostValues")) {
                        hostValues = (Map<String, String>) individualStatusInfo.get("hostValues");
                    } else {
                        hostValues = new HashMap<>();
                        individualStatusInfo.put("hostValues", hostValues);
                    }
                    hostValues.put(st[2], String.valueOf(val));
                } else {
                    Map<String, Object> individualStatusInfo = new HashMap<>();

                    individualStatusInfo.put("statusValue", CrawlDatum.getStatusName((byte) code));
                    individualStatusInfo.put("count", String.valueOf(val));

                    statusMap.put(String.valueOf(code), individualStatusInfo);
                }
            } else {
                results.put(k, String.valueOf(val));
            }
        }
        results.put("status", statusMap);
        return results;
    }
    if (type.equalsIgnoreCase("dump")) {
        String output = args.get("out_dir");
        String format = "normal";
        String regex = null;
        Integer retry = null;
        String status = null;
        String expr = null;
        Float sample = null;
        if (args.containsKey("format")) {
            format = args.get("format");
        }
        if (args.containsKey("regex")) {
            regex = args.get("regex");
        }
        if (args.containsKey("retry")) {
            retry = Integer.parseInt(args.get("retry"));
        }
        if (args.containsKey("status")) {
            status = args.get("status");
        }
        if (args.containsKey("expr")) {
            expr = args.get("expr");
        }
        if (args.containsKey("sample")) {
            sample = Float.parseFloat(args.get("sample"));
        }
        processDumpJob(crawlDb, output, conf, format, regex, status, retry, expr, sample);
        File dumpFile = new File(output + "/part-00000");
        return dumpFile;
    }
    if (type.equalsIgnoreCase("topN")) {
        String output = args.get("out_dir");
        long topN = Long.parseLong(args.get("nnn"));
        float min = 0.0f;
        if (args.containsKey("min")) {
            min = Float.parseFloat(args.get("min"));
        }
        processTopNJob(crawlDb, topN, min, output, conf);
        File dumpFile = new File(output + "/part-00000");
        return dumpFile;
    }

    if (type.equalsIgnoreCase("url")) {
        String url = args.get("url");
        CrawlDatum res = get(crawlDb, url, conf);
        results.put("status", res.getStatus());
        results.put("fetchTime", new Date(res.getFetchTime()));
        results.put("modifiedTime", new Date(res.getModifiedTime()));
        results.put("retriesSinceFetch", res.getRetriesSinceFetch());
        results.put("retryInterval", res.getFetchInterval());
        results.put("score", res.getScore());
        results.put("signature", StringUtil.toHexString(res.getSignature()));
        Map<String, String> metadata = new HashMap<>();
        if (res.getMetaData() != null) {
            for (Entry<Writable, Writable> e : res.getMetaData().entrySet()) {
                metadata.put(String.valueOf(e.getKey()), String.valueOf(e.getValue()));
            }
        }
        results.put("metadata", metadata);

        return results;
    }
    return results;
}

From source file:org.apache.sysml.runtime.compress.PlanningBinPacker.java

private TreeMap<Float, List<List<Integer>>> packFirstFit(List<Integer> items, List<Float> itemWeights) {
    // when searching for a bin, the first bin in the list is used
    TreeMap<Float, List<List<Integer>>> bins = new TreeMap<Float, List<List<Integer>>>();
    // first bin/*from  w  w  w  . j  a  va 2  s.c  o  m*/
    bins.put(_binWeight, createBinList());
    int numItems = items.size();
    for (int i = 0; i < numItems; i++) {
        float itemWeight = itemWeights.get(i);
        Map.Entry<Float, List<List<Integer>>> entry = bins.ceilingEntry(itemWeight);
        if (entry == null) {
            // new bin
            float newBinWeight = _binWeight - itemWeight;
            List<List<Integer>> binList = bins.get(newBinWeight);
            if (binList == null) {
                bins.put(newBinWeight, createBinList(items.get(i)));
            } else {
                List<Integer> newBin = new ArrayList<Integer>();
                newBin.add(items.get(i));
                binList.add(newBin);
            }
        } else {
            // add to the first bin in the list
            List<Integer> assignedBin = entry.getValue().remove(0);
            assignedBin.add(items.get(i));
            if (entry.getValue().size() == 0)
                bins.remove(entry.getKey());
            float newBinWeight = entry.getKey() - itemWeight;
            List<List<Integer>> newBinsList = bins.get(newBinWeight);
            if (newBinsList == null) {
                // new bin
                bins.put(newBinWeight, createBinList(assignedBin));
            } else {
                newBinsList.add(assignedBin);
            }
        }
    }
    return bins;
}

From source file:ca.sqlpower.sqlobject.TestSQLTable.java

public void testGetDerivedInstance() throws Exception {
    SQLTable derivedTable;/*  w  ww.  java2  s . c o m*/
    SQLTable table1;
    // Check to make sure it can be added to a playpen like database
    SQLDatabase pp = new SQLDatabase();
    pp.setPlayPenDatabase(true);
    pp.setParent(new StubSQLObject());
    assertNotNull(table1 = db.getTableByName("REGRESSION_TEST1"));
    derivedTable = table1.createInheritingInstance(pp);

    TreeMap<String, Object> derivedPropertyMap = new TreeMap<String, Object>(BeanUtils.describe(derivedTable));
    TreeMap<String, Object> table1PropertyMap = new TreeMap<String, Object>(BeanUtils.describe(table1));

    table1PropertyMap.remove("parent");
    table1PropertyMap.remove("SQLParent");
    table1PropertyMap.remove("schemaName");
    table1PropertyMap.remove("schema");
    table1PropertyMap.remove("parentDatabase");
    table1PropertyMap.remove("shortDisplayName");
    table1PropertyMap.remove("UUID");
    table1PropertyMap.remove("workspaceContainer");
    table1PropertyMap.remove("runnableDispatcher");
    table1PropertyMap.remove("SPListeners");

    for (Map.Entry<String, Object> property : table1PropertyMap.entrySet()) {
        assertEquals("Property \"" + property.getKey() + "\" has changed;", property.getValue(),
                derivedPropertyMap.get(property.getKey()));
    }

}