Example usage for java.util TreeMap lastKey

List of usage examples for java.util TreeMap lastKey

Introduction

In this page you can find the example usage for java.util TreeMap lastKey.

Prototype

public K lastKey() 

Source Link

Usage

From source file:com.opengamma.analytics.math.statistics.descriptive.ModeCalculator.java

/**
 * @param x The array of data, not null or empty
 * @return The arithmetic mean//from   w ww  .j  a  v  a 2 s  . c o m
 */
@Override
public Double evaluate(final double[] x) {
    Validate.notNull(x, "x");
    Validate.isTrue(x.length > 0, "x cannot be empty");
    if (x.length == 1) {
        return x[0];
    }
    final double[] x1 = Arrays.copyOf(x, x.length);
    Arrays.sort(x1);
    final TreeMap<Integer, Double> counts = new TreeMap<Integer, Double>();
    int count = 1;
    for (int i = 1; i < x1.length; i++) {
        if (Math.abs(x1[i] - x1[i - 1]) < EPS) {
            count++;
        } else {
            counts.put(count, x1[i - 1]);
            count = 1;
        }
    }
    if (counts.lastKey() == 1) {
        throw new MathException("Could not find mode for array; no repeated values");
    }
    return counts.lastEntry().getValue();
}

From source file:edu.synth.state.SyntHelperState.java

public void handleObsData() throws IOException {
    File obsFile = new File(Constants.OBS_DATA);
    TreeMap<Double, Double> obs = FileWorker.getSortedDoubleData(obsFile);

    synthSettings.setStartSynth(new BigDecimal(obs.firstKey()).setScale(0, RoundingMode.DOWN).intValue());
    synthSettings.setEndSynth(new BigDecimal(obs.lastKey()).setScale(0, RoundingMode.UP).intValue());

    List<String> strs = new ArrayList<String>();
    for (Entry<Double, Double> ent : obs.entrySet())
        strs.add(String.format(Locale.ENGLISH, "%1.4f %1.4f", ent.getKey(), ent.getValue()));
    FileWorker.write(obsFile, strs);/*from   w ww  . j av  a  2 s  .  c om*/
}

From source file:main.java.workload.WorkloadExecutor.java

public static Transaction streamOneTransaction(Database db, Cluster cluster, Workload wrl, WorkloadBatch wb) {

    Set<Integer> trTupleSet = null;
    Set<Integer> trDataSet = null;

    int min = 0, i = 0, n = 0, tr_id = 0;
    int type = trDistribution.sample();

    Transaction tr = null;/*  w w  w  .ja v a 2s  .  com*/

    if (!wb.getTrMap().containsKey(type))
        wb.getTrMap().put(type, new TreeMap<Integer, Transaction>());

    // new
    double rand_val = Global.rand.nextDouble();
    int toBeRemovedKey = -1;

    /**
     *  Implementing the new Workload Generation model 
     *  (Finalised as per November 20, 2014 and later improved on February 13-14, 2015)      
     */
    ++Global.global_trCount;

    // Transaction birth
    if (wb.getTrMap().get(type).isEmpty() || rand_val <= Global.percentageChangeInWorkload) {

        trTupleSet = wrl.getTrTupleSet(db, type);
        trDataSet = Workload.getTrDataSet(db, cluster, wb, trTupleSet);

        ++Global.global_trSeq;
        tr = new Transaction(Global.global_trSeq, type, trDataSet, Sim.time());

        // Add the incident transaction id
        wb.addIncidentTrId(cluster, trDataSet, Global.global_trSeq);

        // Add the newly created Transaction in the Workload Transaction map   
        wb.getTrMap().get(type).put(tr.getTr_id(), tr);

        // New improvements------------------------------------------------------------------------------
        double initial_period = (double) WorkloadExecutor.uNmax; // initialisation         
        tr.setTr_period(initial_period);

        perfm.Period.put(tr.getTr_id(), initial_period);
        Time.put(tr.getTr_id(), Sim.time());

        // Transaction repetition and retention of old transaction
    } else {

        ArrayList<Integer> idx2_id = new ArrayList<Integer>();
        ArrayList<Integer> idx_value = new ArrayList<Integer>();
        ArrayList<Integer> uT = new ArrayList<Integer>();

        TreeMap<Integer, Integer> idx2 = new TreeMap<Integer, Integer>(new ValueComparator<Integer>(idx));
        idx2.putAll(idx);

        min = Math.min(idx.size(), uNmax); // uNmax or uNmaxT

        i = 0;
        Iterator<Entry<Integer, Integer>> itr = idx2.entrySet().iterator();
        while (i < min) {
            idx2_id.add(itr.next().getKey());
            ++i;
        }

        // Deleting old Transactions
        if (idx2.size() > min) {
            toBeRemovedKey = idx2.lastKey();

            Transaction tr_old = wb.getTransaction(toBeRemovedKey);
            tr_old.calculateSpans(cluster);

            wb.removeTransaction(cluster, tr_old);
            idx.remove(toBeRemovedKey);
        }

        i = 0;
        while (i < idx2_id.size()) {
            idx_value.add(idx.get(idx2_id.get(i)));
            ++i;
        }

        i = 0;
        while (i < idx_value.size()) {
            uT.add(T.get(idx_value.get(i) - 1));
            ++i;
        }

        if (uT.size() == 1)
            n = 0;
        else
            n = Global.rand.nextInt(uT.size());

        tr_id = uT.get(n);

        tr = wb.getTransaction(tr_id);
        tr.setProcessed(false);

        // New improvements------------------------------------------------------------------------------
        double prev_period = perfm.Period.get(tr.getTr_id());
        double prev_time = Time.get(tr.getTr_id());

        double new_period = Global.expAvgWt * prev_period + (1 - Global.expAvgWt) * (Sim.time() - prev_time);

        tr.setTr_period(new_period);

        perfm.Period.remove(tr.getTr_id());
        perfm.Period.put(tr.getTr_id(), new_period);

        Time.remove(tr.getTr_id());
        Time.put(tr.getTr_id(), Sim.time());

    } // end-if-else()

    // Calculate latest Span
    tr.calculateSpans(cluster);

    // Update Idt
    tr.calculateIdt();

    if (perfm.Span.containsKey(tr.getTr_id()))
        perfm.Span.remove(tr.getTr_id());

    perfm.Span.put(tr.getTr_id(), tr.getTr_serverSpanCost());

    // Create an index entry for each newly created Transaction      
    idx.put(tr.getTr_id(), Global.global_trCount);
    T.add(tr.getTr_id());

    // New improvements------------------------------------------------------------------------------
    if (Global.global_trCount > Global.observationWindow) {

        _i = Global.global_trCount; // _i ~ Sim.time() 
        _W = Global.observationWindow; // _W ~ time 

        HashSet<Integer> unq = new HashSet<Integer>(T);
        for (int _n = (_i - _W); n <= _i; n++) {
            unq.add(T.get(_n));
        }

        // Captures the number of total unique transaction for this observation window
        perfm.Unqlen.put((_i - _W), unq.size());

        // Calculate the impact of distributed transaction per transaction basis               
        double sum_of_span_by_period = 0.0;
        sum_of_one_by_period = 0.0;

        Iterator<Integer> unq_itr = unq.iterator();
        while (unq_itr.hasNext()) {
            int unq_T = unq_itr.next();

            int span = perfm.Span.get(unq_T);
            double period = perfm.Period.get(unq_T);

            double span_by_period = span / period; // Frequency = 1/Period (f=1/t) per unit time (i.e. 1 second)
            double one_by_period = 1 / period; // Frequency = 1/Period (f=1/t) per unit time (i.e. 1 second)

            sum_of_span_by_period += span_by_period;
            sum_of_one_by_period += one_by_period;
        }

        double i_dt = (sum_of_span_by_period) / (Global.servers * sum_of_one_by_period);
        perfm.I_Dt.put((_i - _W), i_dt);

        if (Double.isNaN(i_dt))
            currentIDt = 0;
        else
            currentIDt = i_dt;

        // Reset repartitioning cooling off period
        if (WorkloadExecutor.repartitioningCoolingOff
                && Sim.time() >= WorkloadExecutor.RepartitioningCoolingOffPeriod) {

            WorkloadExecutor.repartitioningCoolingOff = false;

            Global.LOGGER.info("-----------------------------------------------------------------------------");
            Global.LOGGER.info("Simulation time: " + Sim.time() / (double) Global.observationWindow + " hrs");
            Global.LOGGER.info("Repartitioning cooling off period ends.");
            Global.LOGGER
                    .info("System will now check whether another repartitioning is required at this moment.");
            Global.LOGGER.info("Current IDt: " + currentIDt);
            Global.LOGGER.info("User defined IDt threshold: " + Global.userDefinedIDtThreshold);

            if (currentIDt < Global.userDefinedIDtThreshold) {
                Global.LOGGER.info("Repartitioning is not required at this moment.");

                //This is to disable on-demand atomic repartitioning for A-ARHC only
                if (Global.adaptive) {
                    Global.LOGGER.info("Disabling on-demand atomic repartitioning for A-ARHC ...");
                    WorkloadExecutor.isAdaptive = false;
                }

                Global.LOGGER.info("Continuing transaction processing ...");
            }
        }

        perfm.time.put((_i - _W), Sim.time());
    }

    // Add a hyperedge to workload hypergraph
    wb.addHGraphEdge(cluster, tr);

    // Collect transactional streams if data stream mining is enabled
    if (Global.streamCollection)
        Global.dsm.collectStream(cluster, tr);

    return tr;
}

From source file:ubic.gemma.model.association.coexpression.GeneCoexpressionNodeDegreeValueObject.java

private int[] asIntArray(TreeMap<Integer, Integer> nodedeg) {
    IntArrayList list = new IntArrayList();
    if (nodedeg.isEmpty())
        return this.toPrimitive(list);
    Integer maxSupport = nodedeg.lastKey();
    list.setSize(maxSupport + 1);//from  w  w  w.  j a  v  a 2  s.  co  m
    for (Integer s = 0; s <= maxSupport; s++) {
        if (nodedeg.containsKey(s)) {
            list.set(s, nodedeg.get(s));
        } else {
            list.set(s, 0);
        }
    }
    return this.toPrimitive(list);
}

From source file:ubic.gemma.model.association.coexpression.GeneCoexpressionNodeDegreeValueObject.java

private double[] asDoubleArray(TreeMap<Integer, Double> map) {
    DoubleArrayList list = new DoubleArrayList();
    if (map.isEmpty())
        return this.toPrimitive(list);
    list.setSize(Math.max(list.size(), map.lastKey() + 1));
    for (Integer s : map.keySet()) {
        list.set(s, map.get(s));//from   ww  w  .j a  v a  2  s . co  m
    }

    return this.toPrimitive(list);
}

From source file:com.linuxbox.enkive.imap.mongo.MongoImapAccountCreator.java

@Override
public void addImapMessages(String username, Date fromDate, Date toDate) throws MessageSearchException {

    Calendar startTime = Calendar.getInstance();
    startTime.setTime(fromDate);/*from   www. j a  v  a2s .c o m*/
    Calendar endTime = Calendar.getInstance();
    endTime.setTime(toDate);

    while (startTime.before(endTime)) {
        Calendar endOfMonth = (Calendar) startTime.clone();
        endOfMonth.set(Calendar.DAY_OF_MONTH, endOfMonth.getActualMaximum(Calendar.DAY_OF_MONTH));

        if (endOfMonth.after(endTime))
            endOfMonth = (Calendar) endTime.clone();

        // Need to get all messages to add
        Set<String> messageIdsToAdd = getMailboxMessageIds(username, startTime.getTime(), endOfMonth.getTime());
        // Need to add messages
        // Get top UID, add from there
        if (!messageIdsToAdd.isEmpty()) {
            // Need to check if folder exists, if not create it and add to
            // user
            // mailbox list
            DBObject mailboxObject = getMessagesFolder(username, startTime.getTime());

            @SuppressWarnings("unchecked")
            HashMap<String, String> mailboxMsgIds = (HashMap<String, String>) mailboxObject
                    .get(MongoEnkiveImapConstants.MESSAGEIDS);

            TreeMap<String, String> sortedMsgIds = new TreeMap<String, String>();
            sortedMsgIds.putAll(mailboxMsgIds);
            long i = 0;
            if (!sortedMsgIds.isEmpty())
                i = Long.valueOf(sortedMsgIds.lastKey());

            for (String msgId : messageIdsToAdd) {
                i++;
                mailboxMsgIds.put(((Long.toString(i))), msgId);
            }
            mailboxObject.put(MongoEnkiveImapConstants.MESSAGEIDS, mailboxMsgIds);

            imapCollection.findAndModify(new BasicDBObject("_id", mailboxObject.get("_id")), mailboxObject);
        }
        startTime.set(Calendar.DAY_OF_MONTH, 1);
        startTime.add(Calendar.MONTH, 1);
    }
}

From source file:edu.utexas.cs.tactex.subscriptionspredictors.LWRCustOldAppache.java

/**
 * @param candidateEval/*from   w ww  .  j  ava2s.  c om*/
 * @param e2n
 * @return
 */
@Override
public Double predictNumSubs(double candidateEval, TreeMap<Double, Double> e2n, CustomerInfo customer,
        int timeslot) {
    // tree map guarantees that keys are unique
    // so we are suppose to be able to run LWR
    // if there are at least 3 entries (even 2)

    // LWR, run n-fold cross validation with different bandwidth

    double min = e2n.firstKey();
    double max = e2n.lastKey();
    ArrayRealVector xVec = createNormalizedXVector(e2n.keySet(), min, max);
    ArrayRealVector yVec = createYVector(e2n.values());

    double bestTau = Double.MAX_VALUE;
    double bestMSE = Double.MAX_VALUE;

    ArrayList<Double> candidateTaus = new ArrayList<Double>();
    //candidateTaus.add(0.025 * SQUEEZE);
    candidateTaus.add(0.05);// * SQUEEZE);
    candidateTaus.add(0.1);// * SQUEEZE);
    candidateTaus.add(0.2);// * SQUEEZE);
    candidateTaus.add(0.3);// * SQUEEZE);
    candidateTaus.add(0.4);// * SQUEEZE);
    candidateTaus.add(0.5);// * SQUEEZE);
    candidateTaus.add(0.6);// * SQUEEZE);
    candidateTaus.add(0.7);// * SQUEEZE);
    candidateTaus.add(0.8);// * SQUEEZE);
    candidateTaus.add(0.9);// * SQUEEZE);
    candidateTaus.add(1.0);// * SQUEEZE);
    for (Double tau : candidateTaus) {
        Double mse = CrossValidationError(tau, xVec, yVec);
        if (null == mse) {
            log.error(" cp cross-validation failed, return null");
            return null;
        }
        if (mse < bestMSE) {
            bestMSE = mse;
            bestTau = tau;
        }
    }
    log.info(" cp LWR bestTau " + bestTau);
    double x0 = candidateEval;
    Double prediction = LWRPredict(xVec, yVec, normalizeX(x0, min, max), bestTau);
    if (null == prediction) {
        log.error("LWR passed CV but cannot predict on new point. falling back to interpolateOrNN()");
        log.error("e2n: " + e2n.toString());
        log.error("candidateEval " + candidateEval);
        return null;
    }
    // cast to int, and cannot be negative
    return Math.max(0, (double) (int) (double) prediction);
}

From source file:de.unidue.langtech.teaching.rp.detector.LanguageDetectorWeb1T.java

private void setTextProbability(TreeMap<Double, String> langProbs, Map<String, Double> textLogProbability) {

    System.out.println("LangProb: " + langProbs);
    System.out.println("Highest Prob: " + langProbs.lastEntry());
    Double previousValue = textLogProbability.get(langProbs.get(langProbs.lastKey()));

    if (previousValue == null) {
        previousValue = 0.0;//w  ww  .j  a v a  2 s .c o m
    }
    textLogProbability.put(langProbs.get(langProbs.lastKey()), 1 + previousValue);
    System.out.println("TextLogProb: " + textLogProbability);

}

From source file:de.suse.swamp.core.container.WorkflowManager.java

/**
 * Convenience method which evaluates recursively which templates are subworkflows of the given template
 * @return - List of templateNames/*  www . ja v a  2 s  .c o m*/
 */
public List getSubwfTypes(String wfTempName, List templates) {
    for (Iterator it = workflowTempls.values().iterator(); it.hasNext();) {
        // iterate over ordered list with versions of a template
        TreeMap versions = (TreeMap) it.next();
        WorkflowTemplate template = (WorkflowTemplate) versions.get(versions.lastKey());
        if (template.getParentWfName() != null && template.getParentWfName().equals(wfTempName)) {
            templates.add(template.getName());
            getSubwfTypes(template.getName(), templates);
        }
    }
    return templates;
}

From source file:com.romeikat.datamessie.core.sync.service.template.withIdAndVersion.EntityWithIdAndVersionSynchronizer.java

private boolean createOrUpdateBatch(final TaskExecution taskExecution, final MutableLong firstId)
        throws TaskCancelledException {
    final TaskExecutionWork work = taskExecution.startWork();

    // Load LHS//w w  w  .  j a  v a 2 s .c  o  m
    final TreeMap<Long, Long> lhsIdsWithVersion = dao
            .getIdsWithVersion(lhsSessionProvider.getStatelessSession(), firstId.getValue(), batchSizeIds);
    if (lhsIdsWithVersion.isEmpty()) {
        lhsSessionProvider.closeStatelessSession();
        rhsSessionProvider.closeStatelessSession();
        return false;
    }

    // Feedback
    final long lastId = lhsIdsWithVersion.lastKey();
    final String msg = String.format("Processing batch from %s to %s",
            LongConverter.INSTANCE.convertToString(firstId.getValue()),
            LongConverter.INSTANCE.convertToString(lastId));
    taskExecution.reportWorkStart(work, msg);

    // Create or update RHS
    createOrUpdate(lhsIdsWithVersion, lhsSessionProvider.getStatelessSession(),
            rhsSessionProvider.getStatelessSession(), taskExecution);
    firstId.setValue(lastId + 1);

    lhsSessionProvider.closeStatelessSession();
    rhsSessionProvider.closeStatelessSession();
    taskExecution.reportWorkEnd(work);
    taskExecution.checkpoint();
    return true;
}