Example usage for java.util SortedMap get

List of usage examples for java.util SortedMap get

Introduction

In this page you can find the example usage for java.util SortedMap get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:hudson.plugins.jobConfigHistory.FileHistoryDaoTest.java

@Test
public void testGetRevisions() throws Exception {
    when(mockedNode.getNodeName()).thenReturn("slave1");
    createNodeRevision("2014-01-18_10-12-34", mockedNode);
    createNodeRevision("2014-01-19_10-12-34", mockedNode);
    createNodeRevision("2014-01-20_10-12-34", mockedNode);
    createNodeRevision("2014-01-20_10-21-34", mockedNode);
    SortedMap<String, HistoryDescr> revisions = sutWithUserAndNoDuplicateHistory.getRevisions(mockedNode);
    assertNotNull("Revisiosn 2014-01-18_10-12-34 should be returned.", revisions.get("2014-01-18_10-12-34"));
    assertNotNull("Revisiosn 2014-01-19_10-12-34 should be returned.", revisions.get("2014-01-19_10-12-34"));
    assertNotNull("Revisiosn 2014-01-20_10-12-34 should be returned.", revisions.get("2014-01-20_10-12-34"));
    assertNotNull("Revisiosn 2014-01-20_10-21-34 should be returned.", revisions.get("2014-01-20_10-21-34"));
}

From source file:edu.umd.cfar.lamp.viper.util.Range.java

/**
 * @see java.util.Set#contains(java.lang.Object)
 *///from w  ww .  ja  v  a 2s .co m
public boolean contains(Object o) {
    if (spans.size() == 0) {
        return false;
    } else if (o instanceof Comparable) {
        SortedMap m = spans.headMap(o);
        if (m.size() > 0) {
            Comparable e = (Comparable) m.get(m.lastKey());
            if (e.compareTo(o) > 0) {
                return true;
            }
        }
        m = spans.tailMap(o);
        if (m.size() > 0) {
            Comparable s = (Comparable) m.firstKey();
            return s.compareTo(o) == 0;
        }
        return false;
    } else {
        return withinRange((Interval) o);
    }
}

From source file:org.libreplan.business.planner.entities.TaskElement.java

private void addToResult(SortedMap<LocalDate, EffortDuration> result, LocalDate date, EffortDuration duration) {
    EffortDuration current = result.get(date) != null ? result.get(date) : zero();
    result.put(date, current.plus(duration));
}

From source file:voldemort.routing.ConsistentRoutingStrategy.java

public ConsistentRoutingStrategy(HashFunction hash, Collection<Node> nodes, int numReplicas) {
    this.numReplicas = numReplicas;
    this.hash = hash;
    // sanity check that we dont assign the same partition to multiple nodes
    SortedMap<Integer, Node> m = new TreeMap<Integer, Node>();
    for (Node n : nodes) {
        for (Integer partition : n.getPartitionIds()) {
            if (m.containsKey(partition))
                throw new IllegalArgumentException(
                        "Duplicate partition id " + partition + " in cluster configuration " + nodes);
            m.put(partition, n);/*from  w ww . j a  v  a 2 s  .  c  o m*/
        }
    }

    this.partitionToNode = new Node[m.size()];
    for (int i = 0; i < m.size(); i++) {
        if (!m.containsKey(i))
            throw new IllegalArgumentException("Invalid configuration, missing partition " + i);
        this.partitionToNode[i] = m.get(i);
    }
}

From source file:org.apache.hcatalog.hcatmix.load.HCatMapper.java

@Override
public void map(LongWritable longWritable, Text text, OutputCollector<LongWritable, IntervalResult> collector,
        final Reporter reporter) throws IOException {
    LOG.info(MessageFormat.format("Input: {0}={1}", longWritable, text));
    final List<Future<SortedMap<Long, IntervalResult>>> futures = new ArrayList<Future<SortedMap<Long, IntervalResult>>>();

    // Initialize tasks
    List<org.apache.hcatalog.hcatmix.load.tasks.Task> tasks;
    try {//from  w w  w  .j  a va 2  s  .c o  m
        tasks = initializeTasks(jobConf);
    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    ThreadCreatorTimer createNewThreads = new ThreadCreatorTimer(new TimeKeeper(timeKeeper), tasks,
            threadIncrementCount, futures, reporter);

    // Create timer thread to automatically keep on increasing threads at fixed interval
    Timer newThreadCreator = new Timer(true);
    newThreadCreator.scheduleAtFixedRate(createNewThreads, 0, threadIncrementIntervalInMillis);

    // Sleep and let the tasks get expired
    long remainingTime = timeKeeper.getRemainingTimeIncludingBuffer();
    final long sleepPeriod = 2000;
    for (long i = remainingTime; i > 0; i = i - sleepPeriod) {
        try {
            Thread.sleep(sleepPeriod);
            reporter.progress();
        } catch (InterruptedException e) {
            LOG.error("Got interrupted while sleeping for timer thread to finish");
        }
    }

    newThreadCreator.cancel();
    LOG.info("Time is over, will collect the futures now. Total number of threads: " + futures.size());
    SortedMap<Long, IntervalResult> stopWatchAggregatedTimeSeries = new TreeMap<Long, IntervalResult>();

    // Merge the corresponding time interval results received from all the threads for each time interval
    for (TaskExecutor taskExecutor : createNewThreads.getTaskExecutors()) {
        try {
            SortedMap<Long, IntervalResult> threadTimeSeries = taskExecutor.getTimeSeriesResult();
            for (Map.Entry<Long, IntervalResult> entry : threadTimeSeries.entrySet()) {
                Long timeStamp = entry.getKey();
                IntervalResult intervalResult = entry.getValue();

                if (stopWatchAggregatedTimeSeries.containsKey(timeStamp)) {
                    stopWatchAggregatedTimeSeries.get(timeStamp).addIntervalResult(intervalResult);
                } else {
                    stopWatchAggregatedTimeSeries.put(timeStamp, intervalResult);
                }
                LOG.info(MessageFormat.format("{0}: Added {1} stopwatches. Current stopwatch number: {2}",
                        timeStamp, intervalResult.getStopWatchList().size(),
                        stopWatchAggregatedTimeSeries.get(timeStamp).getStopWatchList().size()));
            }
        } catch (Exception e) {
            LOG.error("Error while getting thread results", e);
        }
    }

    // Output the consolidated result for this map along with the number of threads against time
    LOG.info("Collected all the statistics for #threads: " + createNewThreads.getThreadCount());
    SortedMap<Long, Integer> threadCountTimeSeries = createNewThreads.getThreadCountTimeSeries();
    int threadCount = 0;
    for (Map.Entry<Long, IntervalResult> entry : stopWatchAggregatedTimeSeries.entrySet()) {
        long timeStamp = entry.getKey();
        IntervalResult intervalResult = entry.getValue();
        if (threadCountTimeSeries.containsKey(timeStamp)) {
            threadCount = threadCountTimeSeries.get(timeStamp);
        }
        intervalResult.setThreadCount(threadCount);
        collector.collect(new LongWritable(timeStamp), intervalResult);
    }
}

From source file:org.formix.dsx.serialization.XmlSerializer.java

private Object getValue(XmlElement elem, Class<?> paramType, SortedMap<String, Method> parentMethods,
        Object parent) throws XmlException {
    try {//from   w w  w.  j  a v a 2  s  .  co m
        SortedMap<String, Method> methods = this.createMethodMap(paramType);

        Method paramTypeValueOfMethod = null;
        if (methods.containsKey("valueOf-String"))
            paramTypeValueOfMethod = methods.get("valueOf-String");

        if (elem.getChilds().size() == 0) {
            if (Collection.class.isAssignableFrom(paramType)) {
                return this.getCollectionValue(elem, paramType, parentMethods, parent);
            } else {
                return null;
            }
        }

        Object value = null;
        Iterator<XmlContent> contentIterator = elem.getChilds().iterator();
        XmlContent firstChild = contentIterator.next();
        while (firstChild.toString().trim().equals("") && contentIterator.hasNext()) {
            // skip blank lines
            firstChild = contentIterator.next();
        }

        if (paramType.equals(String.class)) {
            XmlText text = (XmlText) firstChild;
            value = text.getText();

        } else if (paramType.equals(Timestamp.class)) {
            value = new Timestamp(this.parseDate(firstChild.toString()).getTime());

        } else if (paramType.equals(Date.class)) {
            value = this.parseDate(firstChild.toString());

        } else if (Calendar.class.isAssignableFrom(paramType)) {
            value = new GregorianCalendar();
            ((GregorianCalendar) value).setTime(this.parseDate(firstChild.toString()));

        } else if (paramTypeValueOfMethod != null) {
            value = paramTypeValueOfMethod.invoke(null, new Object[] { firstChild.toString() });

        } else if (Collection.class.isAssignableFrom(paramType)) {
            value = this.getCollectionValue(elem, paramType, parentMethods, parent);

        } else if (Map.class.isAssignableFrom(paramType)) {
            throw new XmlException("The Map deserialization is not yet implemented.");
        } else if (this.isSetter(parentMethods, elem.getName()) && (firstChild instanceof XmlElement)) {
            XmlElement elemFirstChild = (XmlElement) firstChild;
            Class<?> specifiedType = this.getType(this.capitalize(elemFirstChild.getName()));
            value = this.deserialize(elemFirstChild, specifiedType);
        } else {
            value = this.deserialize(elem, paramType);
        }

        return value;
    } catch (Exception e) {
        throw new XmlException("Problem getting value of " + elem + " of type " + paramType.getName(), e);
    }
}

From source file:org.mitre.ccv.canopy.CcvCanopyCluster.java

/**
 * Sets the thresholds 1 and 2 using MaxLike profile.
 *
 * Issues/Pittfalls://from   w  w w  . jav  a 2s. c  om
 * <ol>
 * <ul>t2 might be to small and nothing is removed from the list
 * <ul>t1 might be to large and everything is added to a canopy
 * </ol>
 * @todo: figure out how to select threshold1 (not to big not to small)
 */
public double[] autoThreshold() throws Exception {
    LOG.info("autoThreshold: Generating distance distribution");
    //SortedMap<Double, Integer> sortMap = new TreeMap<Double, Integer>(new ReverseDoubleComparator());
    SortedMap<Double, Integer> sortMap = new TreeMap<Double, Integer>();
    // generate all the pairwise distances
    final int size = completeMatrix.getMatrix().getColumnDimension();
    for (int i = 0; i < size; ++i) {
        for (int j = i + 1; j < size; ++j) {
            // only calculate one triangle not full!
            Double d = this.cheapMetric.distance(i, j);
            //set.add(this.cheapMetric.distance(i, j));
            if (sortMap.containsKey(d)) {
                sortMap.put(d, sortMap.get(d) + 1);
            } else {
                sortMap.put(d, 1);
            }
        }
    }

    /**
    * $gnuplot
    * > set nokey
    * > set xlabel "Pairwise distance"
    * > set ylabel "Number of samples"
    * > plot "output.txt" using 1:2
    */
    /* */
    for (Iterator<Entry<Double, Integer>> i = sortMap.entrySet().iterator(); i.hasNext();) {
        Entry<Double, Integer> entry = i.next();

        //System.out.printf("%f\t%d\n", entry.getKey(), entry.getValue());
    }
    /* */

    /**
     * How many bins per samples do we want?
     * Using the two end cases at lower and upper bounds.
     */
    TH1D hist = new TH1D(completeMatrix.getMatrix().getColumnDimension() * 2, sortMap.firstKey(),
            sortMap.lastKey());
    LOG.info(String.format("autoThreshold: Packing into histogram with %d bins (%f, %f)", hist.getBins().length,
            hist.getLower(), hist.getUpper()));
    hist.pack(sortMap);
    int[] bins = hist.getBins();
    if (LOG.isDebugEnabled()) {
        if (hist.getNumberOverflows() != 0) {
            LOG.debug(
                    String.format("autoThreshold: Have %d overflows in histogram!", hist.getNumberOverflows()));
        }
        if (hist.getNumberUnderflows() != 0) {
            LOG.debug(String.format("autoThreshold: Have %d underflows in histogram!",
                    hist.getNumberUnderflows()));
        }
    }

    // print out histogram bins
    for (int i = 0; i < bins.length; i++) {
        //System.out.printf("%f\t%d\n", hist.getBinCenter(i), hist.getBinContent(i));
    }
    TSpectrum spectrum = new TSpectrum(); // use default values (sigma = 1, threshold = 0.5
    int numFound = spectrum.search(hist);
    LOG.info(String.format("autoThreshold: Found %d peaks", numFound));
    if (numFound == 0) {
        LOG.fatal("autoThreshold: No peaks found in data!");
        throw new Exception();
    }
    double xpeaks[] = spectrum.getPostionX();
    double[] rtn = new double[2]; // t1, t2
    if (numFound == 1) {
        int bin = hist.findBin(xpeaks[0]);
        // is this in the top or bottom half?
        // @todo: must be better way than this hack
        if (bin > 0) {
            bin--;
        }
        rtn[0] = hist.getBinCenter(bin); // threshold1 is only peak
        rtn[1] = (hist.getLower() + rtn[0]) / 2;
        return rtn;
    }

    // more than one peak
    /**
     * Several possible options:
     * - select t1 first than find a good t2
     * - select t2 first than find a good t1
     * 
     * make sure that there is enough samples below t2 and above t1
             
    if (xpeaks[0] > xpeaks[1]) {
    // what about sigma value: how many are between these two
    rtn[0] = xpeaks[0]; // t1
    rtn[1] = xpeaks[1];  //t2
    } else {
    rtn[0] = xpeaks[1];
    rtn[1] = xpeaks[0];
    }
    */

    // find the peak with the smallest this will be the basis for t2
    double minPeakX = hist.getUpper();
    int minPeakI = -1;
    for (int i = 0; i < numFound; i++) {
        final double x = xpeaks[i];
        if (x < minPeakX) {
            minPeakX = x;
            minPeakI = i;
        }
    }
    //System.err.printf("minPeakX=%f (%d)\n", minPeakX, minPeakI);

    // find next peak above the smallest
    // should try using something about the average and standard deviation
    // of the distribution of entries in picking this
    double min2PeakX = hist.getUpper();
    int min2PeakI = -1;
    for (int i = 0; i < numFound; i++) {
        final double x = xpeaks[i];
        if (i != minPeakI && x < min2PeakX) { // should check that it isn't equal or within sigma
            min2PeakX = x;
            min2PeakI = i;
        }
    }
    //System.err.printf("min2PeakX=%f (%d)\n", min2PeakX, min2PeakI);
    /**
    if (minPeakI + 1 < min2PeakI - 1) {
    rtn[0] = hist.getBinCenter(min2PeakI - 1);         // t1
    rtn[1] = hist.getBinCenter(minPeakI + 1);          // t2
    } else {
    // really close not good - these should be the centers
    LOG.info("autoThreshold: t1 and t2 are possbily from adjacent bins!");
    rtn[0] = min2PeakX;
    rtn[1] = minPeakX;
    }
    int t2bin = hist.findBin(minPeakX);
    if (t2bin - 1 > 0 ) {
    rtn[1] = hist.getBinCenter(t2bin - 1); // don't want the first bin?
    } else {
    rtn[1] = minPeakX;
    }
    int t1bin = hist.findBin(min2PeakX);
    if (t1bin + 1 < bins.length - 1) {  // don't want the last bin?
    rtn[0] = hist.getBinCenter(t1bin + 1);
    } else {
    rtn[0] = min2PeakX;
    }*/

    rtn[0] = min2PeakX;
    rtn[1] = minPeakX;

    /*
    double t1 = hist.getUpper();
    double t2 = hist.getLower(); */
    // print out what we found
    for (int p = 0; p < numFound; p++) {
        double xp = xpeaks[p];
        int bin = hist.findBin(xp);
        int yp = hist.getBinContent(bin); // double yp
        System.err.printf("%d\t%f\t%d\n", bin, xp, yp);
        // if(yp- Math.sqrt(yp) < fline.eval(xp)) continue
    }

    return rtn;
}

From source file:at.illecker.hama.rootbeer.examples.matrixmultiplication.compositeinput.cpu.MatrixMultiplicationBSPCpu.java

@Override
public void cleanup(BSPPeer<IntWritable, TupleWritable, IntWritable, VectorWritable, MatrixRowMessage> peer)
        throws IOException {

    // MasterTask accumulates result
    if (peer.getPeerName().equals(masterTask)) {

        // SortedMap because the final matrix rows should be in order
        SortedMap<Integer, Vector> accumlatedRows = new TreeMap<Integer, Vector>();
        MatrixRowMessage currentMatrixRowMessage = null;

        // Collect messages
        while ((currentMatrixRowMessage = peer.getCurrentMessage()) != null) {
            int rowIndex = currentMatrixRowMessage.getRowIndex();
            Vector rowValues = currentMatrixRowMessage.getRowValues().get();

            if (isDebuggingEnabled) {
                logger.writeChars("bsp,gotMsg,key=" + rowIndex + ",value=" + rowValues.toString() + "\n");
            }/*from w ww .j  a  v  a2 s  .com*/

            if (accumlatedRows.containsKey(rowIndex)) {
                accumlatedRows.get(rowIndex).assign(rowValues, Functions.PLUS);
            } else {
                accumlatedRows.put(rowIndex, new RandomAccessSparseVector(rowValues));
            }
        }

        // Write accumulated results
        for (Map.Entry<Integer, Vector> row : accumlatedRows.entrySet()) {
            if (isDebuggingEnabled) {
                logger.writeChars(
                        "bsp,write,key=" + row.getKey() + ",value=" + row.getValue().toString() + "\n");
            }
            peer.write(new IntWritable(row.getKey()), new VectorWritable(row.getValue()));
        }

    }
}

From source file:net.sourceforge.fenixedu.presentationTier.Action.publico.ViewHomepageDA.java

public ActionForward listAlumni(ActionMapping mapping, ActionForm actionForm, HttpServletRequest request,
        HttpServletResponse response) throws Exception {
    final SortedMap<Degree, SortedSet<Homepage>> homepages = new TreeMap<Degree, SortedSet<Homepage>>(
            Degree.COMPARATOR_BY_DEGREE_TYPE_AND_NAME_AND_ID);
    for (final Registration registration : rootDomainObject.getRegistrationsSet()) {

        if (registration.getActiveState().getStateType().equals(RegistrationStateType.CONCLUDED)) {

            final Degree degree = registration.getActiveStudentCurricularPlan().getDegreeCurricularPlan()
                    .getDegree();/*from www.j  a  va 2 s  .  c  om*/

            final SortedSet<Homepage> degreeHomepages;
            if (homepages.containsKey(degree)) {
                degreeHomepages = homepages.get(degree);
            } else {
                degreeHomepages = new TreeSet<Homepage>(Homepage.HOMEPAGE_COMPARATOR_BY_NAME);
                homepages.put(degree, degreeHomepages);
            }

            final Homepage homepage = registration.getPerson().getHomepage();
            if (homepage != null && homepage.getActivated()) {
                degreeHomepages.add(homepage);
            }
        }

    }

    request.setAttribute("homepages", homepages);

    final String selectedPage = request.getParameter("selectedPage");
    if (selectedPage != null) {
        request.setAttribute("selectedPage", selectedPage);
    }

    return mapping.findForward("list-homepages-alumni");
}

From source file:com.restfb.util.InsightUtilsTest.java

@Test
public void executeInsightQueriesByMetricByDate1() throws IOException {
    // note that the query that is passed to the FacebookClient WebRequestor is
    // ignored,/* w w w  . j a v a2  s .c  o  m*/
    // so arguments of executeInsightQueriesByDate:
    // (String pageObjectId, Set<String> metrics, Period period)
    // are effectively ignored. In this test we are validating the
    // WebRequestor's json
    // is properly processed
    SortedMap<String, SortedMap<Date, Object>> results = executeInsightQueriesByMetricByDate(
            createFixedResponseFacebookClient("multiResponse_2metrics_1date.json"), TEST_PAGE_OBJECT,
            toStringSet("page_fans", "page_fans_gender"), Period.DAY, Collections.singleton(d20101205_0000pst));
    Assert.assertNotNull(results);
    assertEquals(2, results.size());

    SortedMap<Date, Object> metricResult = results.get("page_fans");
    assertEquals(1, metricResult.size());
    assertEquals(3777, metricResult.get(d20101205_0000pst));

    metricResult = results.get("page_fans_gender");
    assertEquals(1, metricResult.size());
    Object metricValue = metricResult.get(d20101205_0000pst);
    Assert.assertTrue(metricValue instanceof JsonObject);
    JsonObject o = (JsonObject) metricValue;
    assertEquals(58, o.getInt("U"));
    assertEquals(1656, o.getInt("F"));
    assertEquals(2014, o.getInt("M"));
}