Example usage for java.util TreeMap get

List of usage examples for java.util TreeMap get

Introduction

In this page you can find the example usage for java.util TreeMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:org.apache.hadoop.hive.ql.exec.ComputationBalancerReducer.java

void flushTableStatsInfo(FSDataOutputStream out) throws Exception {
    out.writeBytes("TableInformation\n");
    out.writeBytes("stat_num_records" + ToolBox.hiveDelimiter + stat_num_records + "\n");
    ArrayList<TreeMap<String, Integer>> _a = ToolBox.<Integer>aggregateKey(infoDict, ToolBox.hiveDelimiter, 2);

    LOG.debug("stat_num_records:  " + stat_num_records);

    for (TreeMap<String, Integer> treeMap : _a) {
        if (treeMap == null)
            continue;
        for (String _s : treeMap.keySet()) {
            out.writeBytes(_s + ToolBox.hiveDelimiter + treeMap.get(_s) + "\n");
            LOG.debug(_s + ToolBox.hiveDelimiter + treeMap.get(_s));
            if (_s.startsWith(StatsCollectionOperator.FIELDLENGTH_ATTR)) {
                double _tmp = (double) treeMap.get(_s) / (double) stat_num_records;
                String _avg = StatsCollectionOperator.AVGFIELDWIDTH
                        + _s.substring(StatsCollectionOperator.FIELDLENGTH_ATTR.length());
                out.writeBytes(_avg + ToolBox.hiveDelimiter + (long) _tmp + "\n");
            }// w ww.jav a2s  .  c  o  m

            if (_s.startsWith(StatsCollectionOperator.NULLCOUNTER_ATTR)) {
                double _tmp = (double) treeMap.get(_s) / (double) stat_num_records;
                String _avg = StatsCollectionOperator.NULLFAC_ATTR
                        + _s.substring(StatsCollectionOperator.NULLCOUNTER_ATTR.length());
                out.writeBytes(_avg + ToolBox.hiveDelimiter + _tmp + "\n");
            }
        }
    }
}

From source file:de.suse.swamp.core.container.WorkflowManager.java

/**
 * Convenience method which evaluates recursively which templates are subworkflows of the given template
 * @return - List of templateNames/*from w  w w  . ja va  2s .  co m*/
 */
public List getSubwfTypes(String wfTempName, List templates) {
    for (Iterator it = workflowTempls.values().iterator(); it.hasNext();) {
        // iterate over ordered list with versions of a template
        TreeMap versions = (TreeMap) it.next();
        WorkflowTemplate template = (WorkflowTemplate) versions.get(versions.lastKey());
        if (template.getParentWfName() != null && template.getParentWfName().equals(wfTempName)) {
            templates.add(template.getName());
            getSubwfTypes(template.getName(), templates);
        }
    }
    return templates;
}

From source file:net.spfbl.core.Peer.java

public static void dropExpired() {
    String origin = null;/*from www.ja v a  2  s.  c  om*/
    for (Peer peer : getSet()) {
        long time = System.currentTimeMillis();
        if (peer.isExpired7()) {
            if (peer.drop()) {
                Server.log(time, Core.Level.INFO, "PEERH", origin, peer.getAddress(), "EXPIRED");
            }
        } else {
            try {
                peer.refreshReputationMax();
                TreeMap<String, Binomial> reputationMap = peer.getReputationMap();
                for (String key : reputationMap.keySet()) {
                    time = System.currentTimeMillis();
                    Binomial binomial = reputationMap.get(key);
                    if (binomial.isExpired3()) {
                        binomial = peer.dropReputation(key);
                        if (binomial != null) {
                            Server.log(time, Core.Level.INFO, "PEERR", peer.getAddress(), key, "EXPIRED");
                        }
                    }
                }
            } catch (Exception ex) {
                Server.logError(ex);
            }
        }
    }
}

From source file:monasca.thresh.infrastructure.persistence.AlarmDAOImpl.java

private byte[] calculateDimensionSHA1(final Map<String, String> dimensions) {
    // Calculate dimensions sha1 hash id.
    final StringBuilder dimensionIdStringToHash = new StringBuilder("");
    if (dimensions != null) {
        // Sort the dimensions on name and value.
        TreeMap<String, String> dimensionTreeMap = new TreeMap<>(dimensions);
        for (String dimensionName : dimensionTreeMap.keySet()) {
            if (dimensionName != null && !dimensionName.isEmpty()) {
                String dimensionValue = dimensionTreeMap.get(dimensionName);
                if (dimensionValue != null && !dimensionValue.isEmpty()) {
                    dimensionIdStringToHash.append(trunc(dimensionName, MAX_COLUMN_LENGTH));
                    dimensionIdStringToHash.append(trunc(dimensionValue, MAX_COLUMN_LENGTH));
                }//ww  w  . ja va 2  s . co  m
            }
        }
    }

    final byte[] dimensionIdSha1Hash = DigestUtils.sha(dimensionIdStringToHash.toString());
    return dimensionIdSha1Hash;
}

From source file:org.apache.hadoop.chukwa.inputtools.mdl.TorqueInfoProcessor.java

private boolean loadTraceJobData(String hodId) throws IOException, SQLException {
    TreeMap<String, String> aJobData = currentHodJobs.get(hodId);
    String userId = aJobData.get("userId");
    String process = aJobData.get("process");

    StringBuffer sb = new StringBuffer();
    sb.append(torqueBinDir).append("/tracejob -n 10 -l -m -s ").append(hodId);
    String[] traceJobCommand = new String[3];
    traceJobCommand[0] = "ssh";
    traceJobCommand[1] = torqueServer;//www .j a  v a  2 s .c  o  m
    traceJobCommand[2] = sb.toString();

    String command = traceJobCommand[0] + " " + traceJobCommand[1] + " " + traceJobCommand[2];
    ProcessBuilder pb = new ProcessBuilder(traceJobCommand);

    Process p = pb.start();

    Timer timeout = new Timer();
    TorqueTimerTask torqueTimerTask = new TorqueTimerTask(p, command);
    timeout.schedule(torqueTimerTask, TorqueTimerTask.timeoutInterval * 1000);

    BufferedReader result = new BufferedReader(new InputStreamReader(p.getInputStream()));
    ErStreamHandler errorHandler = new ErStreamHandler(p.getErrorStream(), command, false);
    errorHandler.start();
    String line = null;
    String exit_status = null;
    String hosts = null;
    long timeQueued = -1;
    long startTimeValue = -1;
    long endTimeValue = -1;
    boolean findResult = false;

    while ((line = result.readLine()) != null && !findResult) {
        if (line.indexOf("end") >= 0 && line.indexOf("Exit_status") >= 0 && line.indexOf("qtime") >= 0) {
            TreeMap<String, String> jobData = new TreeMap<String, String>();
            String[] items = line.split("\\s+");
            for (int i = 0; i < items.length; i++) {
                String[] items2 = items[i].split("=");
                if (items2.length >= 2) {
                    jobData.put(items2[0], items2[1]);
                }

            }
            String startTime = jobData.get("ctime");
            startTimeValue = Long.valueOf(startTime);
            startTimeValue = startTimeValue - startTimeValue % (60);
            Timestamp startTimedb = new Timestamp(startTimeValue * 1000);

            String queueTime = jobData.get("qtime");
            long queueTimeValue = Long.valueOf(queueTime);

            String sTime = jobData.get("start");
            long sTimeValue = Long.valueOf(sTime);

            timeQueued = sTimeValue - queueTimeValue;

            String endTime = jobData.get("end");
            endTimeValue = Long.valueOf(endTime);
            endTimeValue = endTimeValue - endTimeValue % (60);
            Timestamp endTimedb = new Timestamp(endTimeValue * 1000);

            exit_status = jobData.get("Exit_status");
            hosts = jobData.get("exec_host");
            String[] items2 = hosts.split("[+]");
            int num = 0;
            for (int i = 0; i < items2.length; i++) {
                String machinetemp = items2[i];
                if (machinetemp.length() >= 3) {
                    String machine = items2[i].substring(0, items2[i].length() - 2);
                    StringBuffer data = new StringBuffer();
                    data.append("HodId=").append(hodId);
                    data.append(", Machine=").append(machine);
                    if (domain != null) {
                        data.append(".").append(domain);
                    }
                    log.info(data.toString());
                    num++;
                }
            }

            StringBuffer data = new StringBuffer();
            data.append("HodID=").append(hodId);
            data.append(", UserId=").append(userId);
            data.append(", Status=").append(exit_status);
            data.append(", TimeQueued=").append(timeQueued);
            data.append(", StartTime=").append(startTimedb);
            data.append(", EndTime=").append(endTimedb);
            data.append(", NumOfMachines=").append(num);
            log.info(data.toString());
            findResult = true;
            log.debug(" hod info for job " + hodId + " has been loaded ");
        } // if

    } // while

    try {
        errorHandler.join();
    } catch (InterruptedException ie) {
        log.error(ie.getMessage());
    }

    timeout.cancel();
    boolean tracedone = false;
    if (!findResult) {

        String traceCheckCount = aJobData.get("traceCheckCount");
        int traceCheckCountValue = Integer.valueOf(traceCheckCount);
        traceCheckCountValue = traceCheckCountValue + 1;
        aJobData.put("traceCheckCount", String.valueOf(traceCheckCountValue));

        log.debug("did not find tracejob info for job " + hodId + ", after " + traceCheckCountValue
                + " times checking");
        if (traceCheckCountValue >= 2) {
            tracedone = true;
        }
    }
    boolean finished = findResult | tracedone;
    return finished;
}

From source file:org.apache.hadoop.chukwa.inputtools.mdl.TorqueInfoProcessor.java

private boolean loadQstatData(String hodId) throws IOException, SQLException {
    TreeMap<String, String> aJobData = currentHodJobs.get(hodId);
    String userId = aJobData.get("userId");

    StringBuffer sb = new StringBuffer();
    sb.append(torqueBinDir).append("/qstat -f -1 ").append(hodId);
    String[] qstatCommand = new String[3];
    qstatCommand[0] = "ssh";
    qstatCommand[1] = torqueServer;/*from   w  w w.  ja  v  a  2  s. co  m*/
    qstatCommand[2] = sb.toString();

    String command = qstatCommand[0] + " " + qstatCommand[1] + " " + qstatCommand[2];
    ProcessBuilder pb = new ProcessBuilder(qstatCommand);
    Process p = pb.start();

    Timer timeout = new Timer();
    TorqueTimerTask torqueTimerTask = new TorqueTimerTask(p, command);
    timeout.schedule(torqueTimerTask, TorqueTimerTask.timeoutInterval * 1000);

    BufferedReader result = new BufferedReader(new InputStreamReader(p.getInputStream()));
    ErStreamHandler errorHandler = new ErStreamHandler(p.getErrorStream(), command, false);
    errorHandler.start();
    String line = null;
    String hosts = null;
    long startTimeValue = -1;
    long endTimeValue = Calendar.getInstance().getTimeInMillis();
    long executeTimeValue = Calendar.getInstance().getTimeInMillis();
    boolean qstatfinished;

    while ((line = result.readLine()) != null) {
        if (line.indexOf("ctime") >= 0) {
            String startTime = line.split("=")[1].trim();
            // Tue Sep 9 23:44:29 2008
            SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy");
            Date startTimeDate;
            try {
                startTimeDate = sdf.parse(startTime);
                startTimeValue = startTimeDate.getTime();
            } catch (ParseException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }

        }
        if (line.indexOf("mtime") >= 0) {
            String endTime = line.split("=")[1].trim();
            SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy");
            Date endTimeDate;
            try {
                endTimeDate = sdf.parse(endTime);
                endTimeValue = endTimeDate.getTime();
            } catch (ParseException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }

        }
        if (line.indexOf("etime") >= 0) {
            String executeTime = line.split("=")[1].trim();
            SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy");
            Date executeTimeDate;
            try {
                executeTimeDate = sdf.parse(executeTime);
                executeTimeValue = executeTimeDate.getTime();
            } catch (ParseException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }

        }
        if (line.indexOf("exec_host") >= 0) {
            hosts = line.split("=")[1].trim();
        }
    }

    if (hosts != null && startTimeValue >= 0) {
        String[] items2 = hosts.split("[+]");
        int num = 0;
        for (int i = 0; i < items2.length; i++) {
            String machinetmp = items2[i];
            if (machinetmp.length() > 3) {
                String machine = items2[i].substring(0, items2[i].length() - 2);
                StringBuffer data = new StringBuffer();
                data.append("HodId=").append(hodId);
                data.append(", Machine=").append(machine);
                if (domain != null) {
                    data.append(".").append(domain);
                }
                log.info(data);
                num++;
            }
        }
        Timestamp startTimedb = new Timestamp(startTimeValue);
        Timestamp endTimedb = new Timestamp(endTimeValue);
        StringBuffer data = new StringBuffer();
        long timeQueued = executeTimeValue - startTimeValue;
        data.append("HodID=").append(hodId);
        data.append(", UserId=").append(userId);
        data.append(", StartTime=").append(startTimedb);
        data.append(", TimeQueued=").append(timeQueued);
        data.append(", NumOfMachines=").append(num);
        data.append(", EndTime=").append(endTimedb);
        log.info(data);
        qstatfinished = true;

    } else {

        qstatfinished = false;
    }

    try {
        errorHandler.join();
    } catch (InterruptedException ie) {
        log.error(ie.getMessage());
    }
    result.close();
    timeout.cancel();

    return qstatfinished;
}

From source file:com.maxl.java.amikodesk.Emailer.java

public void loadMap() {
    byte[] encrypted_msg = FileOps.readBytesFromFile(Utilities.appDataFolder() + "\\access.ami.ser");
    if (encrypted_msg == null) {
        encrypted_msg = FileOps.readBytesFromFile(Constants.SHOP_FOLDER + "access.ami.ser");
        System.out.println("Loading access.ami.ser from default folder...");
    }/*from   w  w  w .jav  a2  s. co  m*/
    // Decrypt and deserialize
    if (encrypted_msg != null) {
        Crypto crypto = new Crypto();
        byte[] serialized_bytes = crypto.decrypt(encrypted_msg);
        TreeMap<String, String> map = new TreeMap<String, String>();
        map = (TreeMap<String, String>) (FileOps.deserialize(serialized_bytes));
        m_ep = ((String) map.get(m_el)).split(";")[0];
        m_es = ((String) map.get(m_el)).split(";")[1];
    }
}

From source file:org.apache.hadoop.hive.ql.exec.ComputationBalancerReducer.java

void flushMCVlist(FSDataOutputStream out) throws Exception {

    out.writeBytes("MCVList\n");
    for (String _iter_outside_ : mcvList.keySet()) {
        TreeMap<String, Integer> _TreeMap = mcvList.get(_iter_outside_);
        out.writeBytes(_iter_outside_ + "\n");
        ToolBox _tb = new ToolBox();

        for (String _s_ : _TreeMap.keySet()) {
            _tb.push(_s_, _TreeMap.get(_s_));
        }/*from  w  w  w . ja v  a  2 s.c o  m*/

        if (_TreeMap.keySet().size() > 256) {

            _tb.compact(_TreeMap, ToolBox.SortMethod.DescendSort, Integer.valueOf(256));
        } else {
            _tb.descendSort();
        }

        for (int idx = 0; idx < _tb.getCapacity(); idx++) {
            double _tmp_frac_ = ((double) _tb.getIntegeAtIdx(idx) / (double) _sampledRecordNumber_);
            out.writeBytes(
                    _iter_outside_ + ToolBox.hiveDelimiter + _tb.getStringAtIdx(idx) + ToolBox.hiveDelimiter
                            + _tb.getIntegeAtIdx(idx) + ToolBox.hiveDelimiter + _tmp_frac_ + "\n");
        }
    }

}

From source file:com.sfs.whichdoctor.analysis.AgedDebtorsAnalysisDAOImpl.java

/**
 * Calculate the overall balance.//from   w  w  w .  ja v a  2s.  co  m
 *
 * @param groups the groups
 * @return the double
 */
private double calculateBalance(final TreeMap<String, AgedDebtorsGrouping> groups) {

    double balance = 0;

    for (String orderKey : groups.keySet()) {
        AgedDebtorsGrouping group = groups.get(orderKey);

        balance += group.getTotal();
    }
    return balance;
}

From source file:edu.illinois.cs.cogcomp.core.datastructures.Lexicon.java

public void writeIntegerToFeatureStringFormat(PrintStream out) throws IOException {
    if (null == this.featureNames)
        throw new IllegalStateException("Error: Lexicon has not been configured to store feature names.");

    TreeMap<Integer, String> idToFeat = new TreeMap();

    for (String feat : this.featureNames) {
        int id = lookupId(feat);
        idToFeat.put(id, feat);//from   ww w.  j a va  2 s  .  c  o  m
    }

    for (Integer id : idToFeat.keySet()) {
        out.print(id);
        out.print("\t");
        out.print(idToFeat.get(id));
    }
    out.flush();
}