Example usage for java.util HashMap get

List of usage examples for java.util HashMap get

Introduction

In this page you can find the example usage for java.util HashMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:edu.kit.cockpit.valuationserver.valuation.AotearoaAdapter.java

public static Matrix createMatrixFromVotes(List<AttributeType> attrList, List<PreferenceE> prefList) {

    int attrListSize = attrList.size();
    Double[][] matrix = new Double[attrListSize][attrListSize];

    HashMap<String, Integer> orderingMap = new HashMap<String, Integer>();
    for (int x = 0; x < attrListSize; x++) {
        orderingMap.put(attrList.get(x).getName(), x);
        // System.out.println(attrList.get(x).getName() +", x: " + x);
    }//from ww w  .ja va 2  s. c o m

    for (PreferenceE pref : prefList) {
        double value = pref.getPreferenceAoverB().doubleValue();
        if (value < 0) {
            value = 1D / (-1D * value);
        }
        if (value == 0) {
            value = 1D;
        }
        matrix[orderingMap.get(pref.getAttributeTypeA())][orderingMap.get(pref.getAttributeTypeB())] = value;
    }

    for (int x = 0; x < attrListSize; x++) {
        matrix[x][x] = 1D;
    }

    for (int x = 0; x < attrListSize; x++) {
        for (int y = 0; y < attrListSize; y++) {
            if (matrix[x][y] == null) {
                matrix[x][y] = 1D / matrix[y][x];
            }
        }
    }

    double[][] matrix2 = new double[attrListSize][attrListSize];

    for (int x = 0; x < attrListSize; x++) {
        for (int y = 0; y < attrListSize; y++) {
            matrix2[x][y] = matrix[x][y];
        }
    }

    Matrix m = new Matrix(matrix2);

    return m;
}

From source file:com.clustercontrol.jobmanagement.util.JobMultiplicityCache.java

/**
 * status?100(StatusConstant.TYPE_RUNNING)?????????????
 *
 * ???waitQueue???// w  ww.  j  a v  a 2 s .com
 * ???waitQueue????????kick??
 * ?????waitQueue???
 *   ??????waitQueue???runningQueue?
 * @param facilityId
 */
public static boolean toRunning(JobSessionNodeEntityPK pk) {
    m_log.info("toRunning " + pk);

    String facilityId = pk.getFacilityId();

    try {
        _lock.writeLock();

        HashMap<String, Queue<JobSessionNodeEntityPK>> waitingCache = getWaitingCache();
        HashMap<String, Queue<JobSessionNodeEntityPK>> runningCache = getRunningCache();

        Queue<JobSessionNodeEntityPK> waitingQueue = waitingCache.get(facilityId);
        Queue<JobSessionNodeEntityPK> runningQueue = runningCache.get(facilityId);

        if (waitingQueue == null) {
            waitingQueue = new LinkedList<JobSessionNodeEntityPK>();
            waitingCache.put(facilityId, waitingQueue);
        }

        if (runningQueue == null) {
            runningQueue = new LinkedList<JobSessionNodeEntityPK>();
            runningCache.put(facilityId, runningQueue);
        }

        if ((runningQueue == null || !runningQueue.contains(pk)) && !waitingQueue.contains(pk)) {
            m_log.debug("toRunning add waitQueue : " + pk);
            waitingQueue.offer(pk);
        }

        storeWaitingCache(waitingCache);
        storeRunningCache(runningCache);

        if (m_log.isDebugEnabled()) {
            for (JobSessionNodeEntityPK q : runningQueue) {
                m_log.debug("toRunning runningQueue : " + q);
            }
            for (JobSessionNodeEntityPK q : waitingQueue) {
                m_log.debug("toRunning waitQueue : " + q);
            }
        }

        // ?waitQueue???
        kick(facilityId);
    } finally {
        _lock.writeUnlock();
    }
    return true;
}

From source file:com.nec.nsgui.action.cifs.CommonUtil.java

/**
* set the corresponding message for the specified Property of all the object in List
* @param objList - the list of object/*from w w  w .  j  av a2s. c o  m*/
* @param protertyName - the target Property
* @param value_msgKey - the value and the corresponding message's key in the Resource file
* @param msgResources - MessageResources
* @param request - http servlet request
*/
static public void setMsgInObj(List objList, String protertyName, HashMap value_msgKey,
        MessageResources msgResources, HttpServletRequest request) throws Exception {
    int objNumbers = objList.size();
    Object object;
    Object objValue;
    for (int i = 0; i < objNumbers; i++) {
        object = objList.get(i);
        try {
            objValue = PropertyUtils.getProperty(object, protertyName);
        } catch (Exception e) {
            throw e;
        }
        if (value_msgKey.containsKey(objValue)) {
            //need change the value to the corresponding message
            PropertyUtils.setProperty(object, protertyName, msgResources.getMessage(request.getLocale(),
                    (String) value_msgKey.get(objValue.toString())));
        }
    }
}

From source file:net.sf.maltcms.chromaui.project.spi.DBProjectFactory.java

private static void addNormalizationDescriptors(Map<String, Object> props, Map<File, File> importFileMap,
        LinkedHashMap<File, IChromatogramDescriptor> fileToDescriptor) {
    HashMap<File, INormalizationDescriptor> normalizationDescriptors = (HashMap<File, INormalizationDescriptor>) props
            .get(DBProjectVisualPanel3.PROP_FILE_TO_NORMALIZATION);
    for (File file : normalizationDescriptors.keySet()) {
        fileToDescriptor.get(importFileMap.get(file))
                .setNormalizationDescriptor(normalizationDescriptors.get(file));
    }//  ww  w. j a va 2s . c o m
}

From source file:com.clustercontrol.repository.util.FacilityTreeCache.java

/**
 * ID?FacilityInfo?//from www  .ja  v  a  2  s .  c om
 */
public static FacilityInfo getFacilityInfo(String facilityId) {
    m_log.debug("getFacilityInfo() : facilityId " + facilityId);

    // ?????????????????????????
    // (?????????????????????????)
    HashMap<String, FacilityInfo> facilityCache = getFacilityCache();

    return facilityCache.get(facilityId);
}

From source file:canreg.client.dataentry.Convert.java

private static Object lookUpPatientID(String mpCodeString, Object patientIDNumber, HashMap mpCodes) {
    Object IDNumberObj = mpCodes.get(mpCodeString);
    Object id = patientIDNumber;//w  w  w . ja va 2s .  c o m
    if (IDNumberObj == null) {
        mpCodes.put(mpCodeString, patientIDNumber);
    } else {
        id = IDNumberObj;
    }
    return id;
}

From source file:com.liusoft.dlog4j.search.SearchProxy.java

/**
 * ?//w w  w.  j a  v  a2s. co m
 * @param params
 * @return
 * @throws Exception 
 */
public static List search(SearchParameter params) throws Exception {
    if (params == null)
        return null;

    SearchEnabled searching = (SearchEnabled) params.getSearchObject().newInstance();

    StringBuffer path = new StringBuffer(_baseIndexPath);
    path.append(searching.name());
    File f = new File(path.toString());
    if (!f.exists())
        return null;

    IndexSearcher searcher = new IndexSearcher(path.toString());

    //?
    BooleanQuery comboQuery = new BooleanQuery();
    int _query_count = 0;
    StringTokenizer st = new StringTokenizer(params.getSearchKey());
    while (st.hasMoreElements()) {
        String q = st.nextToken();
        String[] indexFields = searching.getIndexFields();
        for (int i = 0; i < indexFields.length; i++) {
            QueryParser qp = new QueryParser(indexFields[i], analyzer);
            try {
                Query subjectQuery = qp.parse(q);
                comboQuery.add(subjectQuery, BooleanClause.Occur.SHOULD);
                _query_count++;
            } catch (Exception e) {
                log.error("Add query parameter failed. key=" + q, e);
            }
        }
    }

    if (_query_count == 0)//?
        return null;

    //??
    MultiFilter multiFilter = null;
    HashMap conds = params.getConditions();
    if (conds != null) {
        Iterator keys = conds.keySet().iterator();
        while (keys.hasNext()) {
            if (multiFilter == null)
                multiFilter = new MultiFilter(0);
            String key = (String) keys.next();
            multiFilter.add(new FieldFilter(key, conds.get(key).toString()));
        }
    }

    /*
     * Creates a sort, possibly in reverse,
     * by terms in the given field with the type of term values explicitly given.
     */
    SortField[] s_fields = new SortField[2];
    s_fields[0] = SortField.FIELD_SCORE;
    s_fields[1] = new SortField(searching.getKeywordField(), SortField.INT, true);
    Sort sort = new Sort(s_fields);

    Hits hits = searcher.search(comboQuery, multiFilter, sort);
    int numResults = hits.length();
    //System.out.println(numResults + " found............................");
    int result_count = Math.min(numResults, MAX_RESULT_COUNT);
    List results = new ArrayList(result_count);
    for (int i = 0; i < result_count; i++) {
        Document doc = (Document) hits.doc(i);
        //Java
        Object result = params.getSearchObject().newInstance();
        Enumeration fields = doc.fields();
        while (fields.hasMoreElements()) {
            Field field = (Field) fields.nextElement();
            //System.out.println(field.name()+" -- "+field.stringValue());
            if (CLASSNAME_FIELD.equals(field.name()))
                continue;
            //?
            if (!field.isStored())
                continue;
            //System.out.println("=========== begin to mapping ============");
            //String --> anything
            Class fieldType = getNestedPropertyType(result, field.name());
            //System.out.println(field.name()+", class = " + fieldType.getName());
            Object fieldValue = null;
            if (fieldType.equals(Date.class))
                fieldValue = new Date(Long.parseLong(field.stringValue()));
            else
                fieldValue = ConvertUtils.convert(field.stringValue(), fieldType);
            //System.out.println(fieldValue+", class = " + fieldValue.getClass().getName());
            setNestedProperty(result, field.name(), fieldValue);
        }
        results.add(result);
    }

    return results;
}

From source file:anslab2.AnsLab2.java

public static DefaultTableModel generateTable(boolean fdt, boolean rft, int dataType, ArrayList labels,
        String label) {/*  w  w w. j a  va2s . c  om*/
    //DefaultTableModel model = new DefaultTableModel();

    HashMap<String, Integer> map = new HashMap<String, Integer>();

    for (Object temp : labels) {
        Integer count = map.get(String.valueOf(temp));
        map.put(String.valueOf(temp), (count == null) ? 1 : count + 1);
    }

    Vector _label = new Vector();
    Vector _freq = new Vector();
    Vector _rel_freq = new Vector();

    if (dataType == 1 || dataType == 3) {
        for (Map.Entry<String, Integer> entry : map.entrySet()) {
            _label.add(entry.getKey());
            _freq.add(entry.getValue());
            _rel_freq.add(((double) entry.getValue() / labels.size()) * (100));
        }
        string_maps = map;
        model.addColumn(label, _label);
    }

    else if (dataType == 2) {
        TreeMap<Double, Integer> num_map = new TreeMap<Double, Integer>();

        for (Map.Entry<String, Integer> entry : map.entrySet()) {
            num_map.put(Double.valueOf(entry.getKey()), entry.getValue());
        }

        for (Map.Entry<Double, Integer> entry1 : num_map.entrySet()) {
            _label.add(entry1.getKey());
            _freq.add(entry1.getValue());
            _rel_freq.add(round(((double) entry1.getValue() / labels.size()) * (100), 2));
        }
        double_maps = num_map;
        model.addColumn(label, _label);
    }

    if (fdt == true) {
        model.addColumn("Frequency", _freq);
    }
    if (rft == true) {
        model.addColumn("Relative Frequency", _rel_freq);
    }

    return model;
}

From source file:com.clustercontrol.jobmanagement.util.JobMultiplicityCache.java

/**
 * ??/*from   ww  w  . j ava2 s . c  o m*/
 * ??????????????????
 *
 * @param facilityId
 * @return
 */
public static void kick(String facilityId) {
    m_log.debug("kick " + facilityId);

    boolean kickFlag = false;

    try {
        _lock.writeLock();

        HashMap<String, Queue<JobSessionNodeEntityPK>> waitingCache = getWaitingCache();
        HashMap<String, Queue<JobSessionNodeEntityPK>> runningCache = getRunningCache();

        Queue<JobSessionNodeEntityPK> waitingQueue = waitingCache.get(facilityId);
        Queue<JobSessionNodeEntityPK> runningQueue = runningCache.get(facilityId);

        if (waitingQueue == null || waitingQueue.size() == 0) {
            return;
        }

        if (runningQueue == null) {
            runningQueue = new LinkedList<JobSessionNodeEntityPK>();
            runningCache.put(facilityId, runningQueue);
        }

        if (isRunNow(facilityId)) {
            JpaTransactionManager jtm = new JpaTransactionManager();
            try {
                jtm.begin();
                JobSessionNodeEntityPK pk = waitingQueue.peek(); //// waitQueue??(??????)
                m_log.debug("kick remove waitQueue : " + pk);
                int status = new JobSessionNodeImpl().wait2running(pk);
                // ?
                if (status == 0) {
                    m_log.debug("kick add runningQueue : " + pk);
                    waitingQueue.poll(); //// waitQueue?
                    runningQueue.offer(pk); //// runningQueue?
                    kickFlag = true;
                }
                // ??????????
                else if (status == 1) {
                    m_log.debug("kick not add runningQueue : " + pk);
                    waitingQueue.poll(); //// waitQueue?
                    kickFlag = true;
                }
                jtm.commit();
            } catch (Exception e) {
                m_log.warn("kick : " + e.getClass().getSimpleName() + ", " + e.getMessage(), e);
                jtm.rollback();
            } finally {
                jtm.close();
            }
        }

        storeWaitingCache(waitingCache);
        storeRunningCache(runningCache);

        if (m_log.isDebugEnabled()) {
            for (JobSessionNodeEntityPK q : runningQueue) {
                m_log.debug("kick runningQueue : " + q);
            }
            for (JobSessionNodeEntityPK q : waitingQueue) {
                m_log.debug("kick waitQueue : " + q);
            }
        }

        if (kickFlag) {
            kick(facilityId);
        }
    } finally {
        _lock.writeUnlock();
    }
}

From source file:com.ibm.bi.dml.hops.globalopt.GDFEnumOptimizer.java

private static void rSetRuntimePlanConfig(Plan p, HashMap<Long, Plan> memo) {
    ExecType CLUSTER = OptimizerUtils.isSparkExecutionMode() ? ExecType.SPARK : ExecType.MR;

    //basic memoization including containment check 
    if (memo.containsKey(p.getNode().getID())) {
        Plan pmemo = memo.get(p.getNode().getID());
        if (!p.getInterestingProperties().equals(pmemo.getInterestingProperties())) {
            //replace plan in memo with new plan
            //TODO this would require additional cleanup in special cases
            if (_resolve.resolveMismatch(pmemo.getRewriteConfig(), p.getRewriteConfig()))
                memo.put(p.getNode().getID(), p);
            //logging of encounter plan mismatch
            LOG.warn("Configuration mismatch on shared node (" + p.getNode().getHop().getHopID()
                    + "). Falling back to heuristic '" + _resolve.getName() + "'.");
            LOG.warn(p.getInterestingProperties().toString());
            LOG.warn(memo.get(p.getNode().getID()).getInterestingProperties());
            _planMismatches++;/*from w ww  .j  av a 2s.com*/
            return;
        }
    }

    //set plan configuration
    Hop hop = p.getNode().getHop();
    if (hop != null) {
        RewriteConfig rc = p.getRewriteConfig();
        //set exec type
        hop.setForcedExecType(rc.getExecType());
        //set blocksizes and reblock
        hop.setRowsInBlock(rc.getBlockSize());
        hop.setColsInBlock(rc.getBlockSize());
        if (rc.getExecType() == CLUSTER) //after blocksize update
        {
            //TODO double check dataop condition - side effect from plan validity
            boolean reblock = HopRewriteUtils.alwaysRequiresReblock(hop)
                    || (hop.hasMatrixInputWithDifferentBlocksizes() && !(hop instanceof DataOp));
            hop.setRequiresReblock(reblock);
        } else
            hop.setRequiresReblock(false);
    }

    //process childs
    if (p.getChilds() != null)
        for (Plan c : p.getChilds())
            rSetRuntimePlanConfig(c, memo);

    //memoization (mark as processed)
    memo.put(p.getNode().getID(), p);
}