Example usage for java.util LinkedHashMap get

List of usage examples for java.util LinkedHashMap get

Introduction

In this page you can find the example usage for java.util LinkedHashMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.cloudmine.api.db.RequestDBOpenHelper.java

/**
 * Convert Cursor contents to a map from the request id to the request. The Map will have the same iteration order
 * as the Cursor/*  ww  w . ja  v  a  2 s  . com*/
 * @param cursor
 * @return
 */
private LinkedHashMap<Integer, RequestDBObject> createRequestMapping(Cursor cursor) {
    int idIndex = cursor.getColumnIndexOrThrow(KEY_REQUEST_ID);
    int jsonIndex = cursor.getColumnIndexOrThrow(KEY_REQUEST_JSON_BODY);
    int urlIndex = cursor.getColumnIndexOrThrow(KEY_REQUEST_TARGET_URL);
    int verbIndex = cursor.getColumnIndexOrThrow(KEY_REQUEST_VERB);
    int syncedIndex = cursor.getColumnIndexOrThrow(KEY_REQUEST_SYNCHRONIZED);
    int headerNameIndex = cursor.getColumnIndexOrThrow(KEY_HEADER_NAME);
    int headerValueIndex = cursor.getColumnIndexOrThrow(KEY_HEADER_VALUE);
    int objectIdIndex = cursor.getColumnIndexOrThrow(KEY_REQUEST_OBJECT_ID);
    int fileIdIndex = cursor.getColumnIndexOrThrow(KEY_REQUEST_FILE_ID);
    LinkedHashMap<Integer, RequestDBObject> requestMapping = new LinkedHashMap<Integer, RequestDBObject>();
    while (cursor.moveToNext()) {
        Integer id = cursor.getInt(idIndex);

        RequestDBObject request = requestMapping.get(id);
        if (request == null) {
            String json = cursor.getString(jsonIndex);
            String url = cursor.getString(urlIndex);
            String verb = cursor.getString(verbIndex);
            String objectId = cursor.getString(objectIdIndex);
            String fileId = cursor.getString(fileIdIndex);
            int syncOrdinal = cursor.getInt(syncedIndex);
            RequestDBObject.SyncStatus status = RequestDBObject.SyncStatus.getSyncStatus(syncOrdinal);
            request = new RequestDBObject(url, RequestDBObject.Verb.getVerb(verb), json, objectId, fileId, id,
                    status, new ArrayList<Header>());
            requestMapping.put(id, request);
        }
        String headerName = cursor.getString(headerNameIndex);
        String headerValue = cursor.getString(headerValueIndex);
        request.addHeader(new BasicHeader(headerName, headerValue));
    }
    return requestMapping;
}

From source file:annis.visualizers.component.grid.EventExtractor.java

/**
* Converts Salt document graph to rows./*from  ww w .java 2  s.co  m*/
*
* @param graph
* @param annotationNames
* @param startTokenIndex token index of the first token in the match
* @param endTokenIndex token index of the last token in the match
* @return
*/
public static LinkedHashMap<String, ArrayList<Row>> parseSalt(VisualizerInput input,
        List<String> annotationNames, long startTokenIndex, long endTokenIndex) {

    SDocumentGraph graph = input.getDocument().getSDocumentGraph();

    // only look at annotations which were defined by the user
    LinkedHashMap<String, ArrayList<Row>> rowsByAnnotation = new LinkedHashMap<String, ArrayList<Row>>();

    for (String anno : annotationNames) {
        rowsByAnnotation.put(anno, new ArrayList<Row>());
    }

    int eventCounter = 0;

    PDFPageHelper pageNumberHelper = new PDFPageHelper(input);

    for (SSpan span : graph.getSSpans()) {
        // calculate the left and right values of a span
        // TODO: howto get these numbers with Salt?
        long leftLong = span.getSFeature(ANNIS_NS, FEAT_LEFTTOKEN).getSValueSNUMERIC();
        long rightLong = span.getSFeature(ANNIS_NS, FEAT_RIGHTTOKEN).getSValueSNUMERIC();

        leftLong = clip(leftLong, startTokenIndex, endTokenIndex);
        rightLong = clip(rightLong, startTokenIndex, endTokenIndex);

        int left = (int) (leftLong - startTokenIndex);
        int right = (int) (rightLong - startTokenIndex);

        for (SAnnotation anno : span.getSAnnotations()) {
            ArrayList<Row> rows = rowsByAnnotation.get(anno.getQName());
            if (rows == null) {
                // try again with only the name
                rows = rowsByAnnotation.get(anno.getSName());
            }
            if (rows != null) {
                // only do something if the annotation was defined before

                // 1. give each annotation of each span an own row
                Row r = new Row();

                String id = "event_" + eventCounter++;
                GridEvent event = new GridEvent(id, left, right, anno.getSValueSTEXT());

                // check if the span is a matched node
                SFeature featMatched = span.getSFeature(ANNIS_NS, FEAT_MATCHEDNODE);
                Long match = featMatched == null ? null : featMatched.getSValueSNUMERIC();
                event.setMatch(match);

                // calculate overlapped SToken
                EList<Edge> outEdges = graph.getOutEdges(span.getSId());
                if (outEdges != null) {
                    for (Edge e : outEdges) {
                        if (e instanceof SSpanningRelation) {
                            SSpanningRelation spanRel = (SSpanningRelation) e;

                            SToken tok = spanRel.getSToken();
                            event.getCoveredIDs().add(tok.getSId());

                            // get the STextualDS of this token and add it to the event
                            EList<Edge> tokenOutEdges = graph.getOutEdges(tok.getSId());
                            if (tokenOutEdges != null) {
                                for (Edge tokEdge : tokenOutEdges) {
                                    if (tokEdge instanceof STextualRelation) {
                                        event.setTextID(((STextualRelation) tokEdge).getSTextualDS().getSId());
                                        break;
                                    }
                                }
                            }
                        }
                    }
                }

                // try to get time annotations
                double[] startEndTime = TimeHelper.getOverlappedTime(span);
                if (startEndTime.length == 1) {
                    event.setStartTime(startEndTime[0]);
                } else if (startEndTime.length == 2) {
                    event.setStartTime(startEndTime[0]);
                    event.setEndTime(startEndTime[1]);
                }

                r.addEvent(event);
                rows.add(r);

                String page = pageNumberHelper.getPageFromAnnotation(span);
                if (page != null) {
                    event.setPage(page);
                }
            }
        } // end for each annotation of span
    } // end for each span

    // 2. merge rows when possible
    for (Map.Entry<String, ArrayList<Row>> e : rowsByAnnotation.entrySet()) {
        mergeAllRowsIfPossible(e.getValue());
    }

    // 3. sort events on one row by left token index
    for (Map.Entry<String, ArrayList<Row>> e : rowsByAnnotation.entrySet()) {
        for (Row r : e.getValue()) {
            sortEventsByTokenIndex(r);
        }
    }

    // 4. split up events if they have gaps
    for (Map.Entry<String, ArrayList<Row>> e : rowsByAnnotation.entrySet()) {
        for (Row r : e.getValue()) {
            splitRowsOnGaps(r, graph, startTokenIndex, endTokenIndex);
        }
    }
    return rowsByAnnotation;
}

From source file:be.ugent.maf.cellmissy.gui.controller.analysis.doseresponse.area.AreaDRInitialController.java

/**
 * Prepare data for fitting starting from the analysis group.
 *
 * @param dRAnalysisGroup//from  www .  j  a v a  2 s .c om
 * @return LinkedHashMap That maps the concentration (log-transformed!) to
 * the replicate velocities
 */
private List<DoseResponsePair> prepareFittingData(AreaDoseResponseAnalysisGroup dRAnalysisGroup) {
    List<DoseResponsePair> result = new ArrayList<>();

    List<List<Double>> allVelocities = new ArrayList<>();
    List<Double> allLogConcentrations = new ArrayList<>();

    //put concentrations of treatment to analyze (control not included!) in list
    LinkedHashMap<Double, String> nestedMap = dRAnalysisGroup.getConcentrationsMap()
            .get(dRAnalysisGroup.getTreatmentToAnalyse());
    for (Double concentration : nestedMap.keySet()) {
        //key can only be linked with a single value, if one concentration is setup to have more than one associated concentration unit, only the last will remain
        String unit = nestedMap.get(concentration);

        Double logConcentration = AnalysisUtils.logTransform(concentration, unit);
        allLogConcentrations.add(logConcentration);
    }

    Double lowestLogConc = Collections.min(allLogConcentrations);
    //iterate through conditions
    int x = 0;
    for (PlateCondition plateCondition : dRAnalysisGroup.getVelocitiesMap().keySet()) {
        List<Double> replicateVelocities = dRAnalysisGroup.getVelocitiesMap().get(plateCondition);

        //check if this platecondition is the control
        for (Treatment treatment : plateCondition.getTreatmentList()) {
            if (treatment.getTreatmentType().getName().contains("ontrol")) {
                allLogConcentrations.add(x, lowestLogConc - 1.0);
            }
        }

        allVelocities.add(replicateVelocities);
        x++;
    }

    for (int i = 0; i < allVelocities.size(); i++) {
        result.add(new DoseResponsePair(allLogConcentrations.get(i), allVelocities.get(i)));
    }
    return result;
}

From source file:com.alibaba.wasp.plan.parser.druid.DruidDQLParser.java

private List<ColumnStruct> buildAllConditionColumns(FTable table, QueryInfo queryInfo)
        throws UnsupportedException {
    List<ColumnStruct> columns = new ArrayList<ColumnStruct>();
    LinkedHashMap<String, Field> fields = table.getColumns();
    for (String fieldname : queryInfo.getAllConditionFieldName()) {
        Field field = fields.get(fieldname);
        if (field != null) {
            ColumnStruct column = buildColumnStruct(table, queryInfo, field);
            columns.add(column);/*  w w w .j  av a 2 s  . c o m*/
        }
    }
    return columns;
}

From source file:gov.llnl.lc.infiniband.opensm.plugin.gui.chart.PortCounterXYplotPanel.java

/**
 * Creates a sample dataset./*from   w ww .  ja v a2s.co  m*/
 *
 * @param name  the dataset name.
 * @param base  the starting value.
 * @param start  the starting period.
 * @param count  the number of values to generate.
 *
 * @return The dataset.
 */
private XYDataset createDataset(OMS_Collection history) {
    // iterate through the collection, and build up a time series
    for (int j = 0; j < history.getSize(); j++) {
        OpenSmMonitorService osm = history.getOMS(j);

        // find the desired port counter, in this instance
        LinkedHashMap<String, OSM_Port> pL = osm.getFabric().getOSM_Ports();
        OSM_Port p = pL.get(OSM_Port.getOSM_PortKey(Port));
        long lValue = prevVal;
        TimeStamp ts = prevTS;
        if ((p != null) && (p.pfmPort != null)) {
            lValue = p.pfmPort.getCounter(PortCounter);
            ts = p.pfmPort.getCounterTimeStamp();
            prevVal = lValue;
            prevTS = ts;
        }

        RegularTimePeriod ms = new FixedMillisecond(ts.getTimeInMillis());
        //      TSeries.add(ms, (double)lValue);
        TSeries.addOrUpdate(ms, (double) lValue);
    }
    TimeSeriesCollection dataset = new TimeSeriesCollection();
    dataset.addSeries(TSeries);

    return dataset;
}

From source file:com.opengamma.analytics.financial.interestrate.capletstripping.CapletStrippingFunction.java

@Override
public DoubleMatrix1D evaluate(final DoubleMatrix1D x) {

    final LinkedHashMap<String, InterpolatedDoublesCurve> curves = _curveBuilder.evaluate(x);

    // set any known (i.e. fixed) curves
    if (_knownParameterTermStructures != null) {
        curves.putAll(_knownParameterTermStructures);
    }/*from w w w  .j  a v a2s. c o m*/

    //TODO for now this is tied to SABRTermStructureParameters - what to be able to drop in any volatility model that has a term structure of
    //parameters
    final VolatilityModel1D volModel = new SABRTermStructureParameters(curves.get(ALPHA), curves.get(BETA),
            curves.get(RHO), curves.get(NU));

    final double[] res = new double[_capPricers.size()];
    for (int i = 0; i < _capPricers.size(); i++) {
        res[i] = _capPricers.get(i).impliedVol(volModel);
    }

    return new DoubleMatrix1D(res);
}

From source file:gov.nih.nci.logging.api.persistence.LogMessageDAOImpl.java

/**
 * Based on SearchCriteria object populate the Hibernates Criteria Object with Expression and sort order details.
 * @param searchCriteria/*from  w ww.  jav a  2s  .c  om*/
 * @param criteria
 */
private void populateCriteria(SearchCriteria searchCriteria, Criteria criteria) {

    criteria.add(createExpressionForDate(searchCriteria));

    if (!StringUtils.isBlank(searchCriteria.getApplication())) {
        criteria.add(Expression.eq(_APPLICATION, searchCriteria.getApplication().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getLogLevel())) {
        criteria.add(Expression.eq(_LOG_LEVEL, searchCriteria.getLogLevel().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getMessage())) {
        criteria.add(Expression.like(_MESSAGE, "%" + searchCriteria.getMessage() + "%"));
    }
    if (!StringUtils.isBlank(searchCriteria.getNdc())) {
        criteria.add(Expression.like(_NDC, "%" + searchCriteria.getNdc() + "%"));
    }
    if (!StringUtils.isBlank(searchCriteria.getObjectID())) {
        criteria.add(Expression.eq(_OBJECT_ID, searchCriteria.getObjectID().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getObjectName())) {
        criteria.add(Expression.eq(_OBJECT_NAME, searchCriteria.getObjectName().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getOperation())) {
        criteria.add(Expression.eq(_OPERATION, searchCriteria.getOperation().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getOrganization())) {
        criteria.add(Expression.like(_ORGANIZATION, "%" + searchCriteria.getOrganization() + "%"));
    }
    if (!StringUtils.isBlank(searchCriteria.getServer())) {
        criteria.add(Expression.eq(_SERVER, searchCriteria.getServer().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getSessionID())) {
        criteria.add(Expression.eq(_SESSION_ID, searchCriteria.getSessionID().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getThrowable())) {
        criteria.add(Expression.eq(_THROWABLE, searchCriteria.getThrowable().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getThreadName())) {
        criteria.add(Expression.like(_THREAD, "%" + searchCriteria.getThreadName() + "%"));
    }
    if (!StringUtils.isBlank(searchCriteria.getUserName())) {
        criteria.add(Expression.eq(_USERNAME, searchCriteria.getUserName().trim()));
    }

    // Sort By criteria.
    LinkedHashMap lhp = (LinkedHashMap) searchCriteria.getSortByOrderSequence();
    if (lhp != null) {
        Iterator iter = lhp.keySet().iterator();
        while (iter.hasNext()) {
            String key = (String) iter.next();
            String value = (String) lhp.get(key);
            if (SORT_ORDER_ASCENDING.equalsIgnoreCase(value)) {
                criteria.addOrder(Order.asc(key));
            }
            if (SORT_ORDER_DESCENDING.equalsIgnoreCase(value)) {
                criteria.addOrder(Order.desc(key));
            }
        }
    }
}

From source file:com.qwazr.connectors.TableRealmConnector.java

@Override
public Account verify(String id, Credential credential) {

    // This realm only support one type of credential
    if (!(credential instanceof PasswordCredential))
        throw new RuntimeException("Unsupported credential type: " + credential.getClass().getName());

    PasswordCredential passwordCredential = (PasswordCredential) credential;

    // We request the database
    final LinkedHashMap<String, Object> row;
    try {/*from ww  w .  j  a va 2s .  c  o  m*/
        row = tableService.getRow(table_name, id, columns);
        if (row == null)
            return null;
    } catch (WebApplicationException e) {
        if (e.getResponse().getStatusInfo().getFamily() == Response.Status.Family.CLIENT_ERROR)
            return authenticationFailure("Unknown user: " + id);
        throw e;
    }

    Object password = row.get(password_column);
    if (password == null)
        return null;
    if (password instanceof String[]) {
        String[] passwordArray = (String[]) password;
        if (passwordArray.length == 0)
            return null;
        password = passwordArray[0];
    }

    // The password is stored hashed
    final String passwd = new String(passwordCredential.getPassword());
    String digest = DigestUtils.sha256Hex(passwd);
    if (!digest.equals(password))
        return authenticationFailure("Wrong password: " + id + " " + digest + '/' + passwd + '/' + password);

    //We retrieve the roles
    Object object = row.get(roles_column);
    LinkedHashSet<String> roles = new LinkedHashSet<String>();
    if (object instanceof String[]) {
        for (Object o : (String[]) object)
            roles.add(o.toString());
    } else
        roles.add(object.toString());

    return new Account() {
        @Override
        public Principal getPrincipal() {
            return new Principal() {
                @Override
                public String getName() {
                    return id;
                }
            };
        }

        @Override
        public Set<String> getRoles() {
            return roles;
        }
    };
}

From source file:com.sapito.db.dao.AbstractDao.java

/**
 * Obtiene todos los registros para una entidad dada ordenados con los
 * criterios establecidos a traves de la lista <code>ordering</code>
 * /*  w  w w.j  a  va2 s .  com*/
 * @param ordering : ["fieldName", "ASC" | "DESC"] <br>
 *                   (if != "ASC" se utiliza "DESC" por default)
 * 
 * @return Registros recuperados ordenados mediante los criterios dados
 */
public List<T> findAll(LinkedHashMap<String, String> ordering) {
    CriteriaQuery cq = entityManager.getCriteriaBuilder().createQuery();
    Root<T> root = cq.from(entityClass);
    cq.select(root);

    if (ordering != null) {
        CriteriaBuilder cb = entityManager.getCriteriaBuilder();
        Set<String> set = ordering.keySet();
        List<Order> orders = new ArrayList<>();
        for (String orderingField : set) {
            Order order = (ordering.get(orderingField).equals("ASC")) ? cb.asc(root.get(orderingField))
                    : cb.desc(root.get(orderingField));
            orders.add(order);
        }

        cq.orderBy(orders);
    }

    return entityManager.createQuery(cq).setMaxResults(MAX_RECORDS_RETURNED).getResultList();
}