Example usage for java.util LinkedHashMap keySet

List of usage examples for java.util LinkedHashMap keySet

Introduction

In this page you can find the example usage for java.util LinkedHashMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:com.miuhouse.yourcompany.student.view.widget.date.datepicker.DayPickerPagerAdapter.java

public void setCalendar(LinkedHashMap<Integer, List<Integer>> countDate, boolean isfill) {
    isFill = isfill;//from  w  w  w  .  jav  a  2  s .com
    this.countDate = countDate;

    for (Integer integer : countDate.keySet()) {
        monthCount.add(integer);
    }

}

From source file:org.egov.collection.integration.pgi.AxisAdaptor.java

private String hashAllFields(final LinkedHashMap<String, String> fields) {

    final String axisSecureSecret = collectionApplicationProperties.axisSecureSecret();
    byte[] decodedKey;
    byte[] hashValue = null;
    // Sort list with field names ascending order
    final List<String> fieldNames = new ArrayList<>(fields.keySet());
    Collections.sort(fieldNames);

    // iterate through field name list and generate message for hashing. Format: fieldname1=fieldvale1?fieldname2=fieldvalue2
    final Iterator<String> itr = fieldNames.iterator();
    final StringBuilder hashingMessage = new StringBuilder();
    int i = 0;/*from  w w w.  ja v  a2 s  .c o m*/
    while (itr.hasNext()) {
        final String fieldName = itr.next();
        final String fieldValue = fields.get(fieldName);
        if (fieldValue != null && fieldValue.length() > 0) {
            if (i != 0)
                hashingMessage.append("&");
            hashingMessage.append(fieldName).append("=").append(fieldValue);
            i++;
        }
    }
    try {
        decodedKey = Hex.decodeHex(axisSecureSecret.toCharArray());
        SecretKeySpec keySpec = new SecretKeySpec(decodedKey, "HmacSHA256");
        Mac mac = Mac.getInstance("HmacSHA256");
        mac.init(keySpec);
        byte[] hashingMessageBytes = hashingMessage.toString().getBytes(UTF8);
        hashValue = mac.doFinal(hashingMessageBytes);
    } catch (DecoderException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    } catch (NoSuchAlgorithmException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (InvalidKeyException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (UnsupportedEncodingException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    return DatatypeConverter.printHexBinary(hashValue);
}

From source file:ubic.gemma.datastructure.matrix.ExpressionDataMatrixColumnSort.java

/**
 * Sort biomaterials according to a list of ordered factors
 * /* w w w  .  ja va2 s.  c  om*/
 * @param start biomaterials to sort
 * @param factorsToUse sorted list of factors to define sort order for biomaterials, cannot be null
 * @return
 */
public static List<BioMaterial> orderBiomaterialsBySortedFactors(List<BioMaterial> start,
        List<ExperimentalFactor> factors) {

    if (start.size() == 1) {
        return start;
    }

    if (start.size() == 0) {
        throw new IllegalArgumentException("Must provide some biomaterials");
    }
    if (factors == null) {
        throw new IllegalArgumentException("Must provide sorted factors, or at least an empty list");
    }
    if (factors.isEmpty()) {
        // we're done.
        return start;
    }

    ExperimentalFactor simplest = factors.get(0);

    if (simplest == null) {
        // we're done.
        return start;
    }

    /*
     * Order this chunk by the selected factor
     */

    Map<FactorValue, List<BioMaterial>> fv2bms = buildFv2BmMap(start);
    List<BioMaterial> ordered = orderByFactor(simplest, fv2bms, start,
            new HashMap<ExperimentalFactor, Collection<BioMaterial>>());

    LinkedList<ExperimentalFactor> factorsStillToDo = new LinkedList<ExperimentalFactor>();
    factorsStillToDo.addAll(factors);
    factorsStillToDo.remove(simplest);

    if (factorsStillToDo.size() == 0) {
        /*
         * No more ordering is necessary.
         */
        return ordered;
    }

    log.debug("Factors: " + factors.size());

    /*
     * Recurse in and order each chunk. First split it up, but retaining the order we just made.
     */
    LinkedHashMap<FactorValue, List<BioMaterial>> chunks = chunkOnFactor(simplest, ordered);

    if (chunks == null) {
        // this means we should bail, gracefully.
        return start;
    }

    /*
     * Process each chunk.
     */
    List<BioMaterial> result = new ArrayList<BioMaterial>();
    for (FactorValue fv : chunks.keySet()) {
        List<BioMaterial> chunk = chunks.get(fv);

        if (chunk.size() < 2) {
            result.addAll(chunk);
        } else {
            List<BioMaterial> orderedChunk = orderBiomaterialsBySortedFactors(chunk, factorsStillToDo);
            result.addAll(orderedChunk);
        }
    }

    return result;

}

From source file:com.alibaba.wasp.plan.parser.druid.DruidDMLParser.java

/**
 * Process Delete Statement and generate QueryPlan
 * /*www  .ja v  a  2s . c  o m*/
 */
private void getDeletePlan(ParseContext context, SQLDeleteStatement sqlDeleteStatement,
        MetaEventOperation metaEventOperation) throws IOException {
    // DELETE FROM users WHERE id = 123;

    // Parse The FROM clause
    String wtableName = parseFromClause(sqlDeleteStatement.getTableSource());
    LOG.debug("UPDATE SQL From clause " + sqlDeleteStatement.getTableSource());
    // check if table exists and get Table info
    FTable table = metaEventOperation.checkAndGetTable(wtableName, false);

    // Parse The WHERE clause
    SQLExpr where = sqlDeleteStatement.getWhere();
    LOG.debug("UPDATE SQL where " + where);
    LinkedHashMap<String, Condition> eqConditions = new LinkedHashMap<String, Condition>();
    LinkedHashMap<String, Condition> ranges = new LinkedHashMap<String, Condition>();
    ParserUtils.parse(where, eqConditions, ranges);
    if (ranges.size() > 0) {
        throw new UnsupportedException("RANGE is not supported!");
    }

    // check if table has this columns
    metaEventOperation.checkAndGetFields(table, eqConditions.keySet());

    List<Pair<String, byte[]>> primaryKeyPairs = metaEventOperation.getPrimaryKeyPairList(table, eqConditions,
            null);
    if (primaryKeyPairs == null) {
        throw new NotMatchPrimaryKeyException("Not match primary key.");
    }

    byte[] primayKey = RowBuilder.build().genRowkey(primaryKeyPairs);
    DeleteAction action = new DeleteAction(wtableName, primayKey);
    if (context.isGenWholePlan()) {
        Condition entityGroupKeyCondition = ParserUtils.getCondition(table.getEntityGroupKey().getName(),
                eqConditions);
        // Get entityGroupLocation according to entity group key
        EntityGroupLocation entityGroupLocation = this.connection.locateEntityGroup(
                Bytes.toBytes(table.getTableName()),
                DruidParser.convert(table.getColumn(entityGroupKeyCondition.getFieldName()),
                        entityGroupKeyCondition.getValue()));
        action.setEntityGroupLocation(entityGroupLocation);
    }
    List<DeleteAction> actions = new ArrayList<DeleteAction>();
    actions.add(action);
    DeletePlan deletePlan = new DeletePlan(actions);
    context.setPlan(deletePlan);
    LOG.debug("DeletePlan " + deletePlan.toString());
}

From source file:com.vmware.photon.controller.cloudstore.dcp.monitors.CloudStoreCache.java

public CloudStoreCache(DcpRestClient dcpRestClient, LinkedHashMap<String, Class> paths) {
    this.dcpRestClient = dcpRestClient;

    // The cache will multiple paths according to their order in paths
    // CloudStoreCache will update its cache according to the order of
    // paths supplied. The reason we need to impose order on retrieving
    // the data is because, some resources reference other documents which
    // need to be retrieved prior to triggering the event.

    this.currentData = new LinkedHashMap();

    this.pathTypes = new HashMap();
    for (String typeName : paths.keySet()) {
        this.pathTypes.put(typeName, paths.get(typeName));
    }//from w w  w .  ja  v a 2 s  .c  om

    /*
     * Since the CloudStoreCache can cache multiple paths at the same
     * time, a prefix mapping is needed to map a prefix with all its
     * suffixes (i.e. its children). For example, if the paths variable is
     * {A,B}, and A has a1 and a2 as children, and B has b1 as a child, then
     * the prefix map would like like this:
     *    A -> map1
     *    B -> map2
     *
     *    where map1 is :
     *    a1 -> a1 Document
     *    a2 -> a2 Document
     *
     *    and map2 is:
     *    b1 -> b1 Document
     */
    for (String prefixPath : paths.keySet()) {
        this.currentData.put(prefixPath, new HashMap());
    }
}

From source file:shuffle.fwk.config.ConfigManager.java

public <T extends Object> List<T> getValues(EntryType type, Class<T> c) {
    List<T> ret = new ArrayList<T>();
    synchronized (data) {
        LinkedHashMap<String, ConfigEntry> mappings = data.get(type);
        for (String key : mappings.keySet()) {
            ConfigEntry entry = mappings.get(key);
            Object value = entry.getValue();
            if (c.isInstance(value)) {
                T casted = c.cast(value);
                ret.add(casted);/*  w  w  w. j a  v  a2 s.  c  o  m*/
            }
        }
    }
    return ret;
}

From source file:com.impetus.ankush.common.dao.impl.GenericDaoJpa.java

/**
 * Creates the clause list./*from w  w w  . j  a  va2 s .  c  o m*/
 * 
 * @param queryMap
 *            the query map
 * @param startIndex
 *            the start index
 * @param joiner
 *            the joiner
 * @return the string
 */
private String createClauseList(LinkedHashMap<String, Object> queryMap, int startIndex, String joiner) {
    List<StringBuilder> clauses = new ArrayList<StringBuilder>(queryMap.size());

    for (String property : queryMap.keySet()) {
        clauses.add(new StringBuilder("obj.").append(property).append(" = ?").append(startIndex++));
    }
    return StringUtils.join(clauses, joiner);
}

From source file:shuffle.fwk.config.ConfigManager.java

/**
 * @return//from w w w . j a  v a2 s  .  c om
 */
private LinkedHashMap<String, List<String>> getDataStrings() {
    LinkedHashMap<String, List<String>> dataToWrite = new LinkedHashMap<String, List<String>>();
    for (EntryType type : EntryType.values()) {
        if (!data.containsKey(type)) {
            continue;
        }
        List<String> linesToWrite = new ArrayList<String>();
        LinkedHashMap<String, ConfigEntry> typeMappings = data.get(type);
        for (String key : typeMappings.keySet()) {
            ConfigEntry entry = typeMappings.get(key);
            String entryString = safelyGetSaveString(entry);
            linesToWrite.add(String.format("%s %s", key, entryString));
        }
        dataToWrite.put(type.toString(), linesToWrite);
    }
    return dataToWrite;
}

From source file:org.cytoscape.kddn.internal.KddnMethods.java

/**
 * variable selection based on t-test//from  w w w .j  av  a 2 s .  co m
 * @param var
 * @param d1
 * @param d2
 * @return
 */
public static int[] variableSelection(String[] var, double[][] d1, double[][] d2) {

    LinkedHashMap<String, Integer> selectedVar = new LinkedHashMap<String, Integer>();
    LinkedHashMap<String, Double> varPvalue = new LinkedHashMap<String, Double>();

    TTest aTest = new TTest();

    for (int i = 0; i < var.length; i++) {
        double pval = 0;

        // equal variance
        //pval = aTest.homoscedasticTTest(getColumn(d1,i), getColumn(d2,i));
        // unequal variance
        pval = aTest.tTest(getColumn(d1, i), getColumn(d2, i));

        if (selectedVar.containsKey(var[i])) {
            if (varPvalue.get(var[i]) > pval) {
                selectedVar.put(var[i], i);
                varPvalue.put(var[i], pval);
            }
        } else {
            selectedVar.put(var[i], i);
            varPvalue.put(var[i], pval);
        }
    }

    int[] idx = new int[selectedVar.size()];
    int i = 0;
    for (String s : selectedVar.keySet()) {
        idx[i] = selectedVar.get(s);
        i++;
    }

    return idx;
}

From source file:net.sf.maltcms.chromaui.normalization.spi.charts.PeakGroupRtBoxPlot.java

protected String getPeakName(IPeakGroupDescriptor pgd) {
    String rt = "mean rt: " + String.format("%.2f", pgd.getMeanApexTime()) + "+/-"
            + String.format("%.2f", pgd.getApexTimeStdDev()) + "; median rt: "
            + String.format("%.2f", pgd.getMedianApexTime()) + ": ";
    LinkedHashMap<String, Integer> names = new LinkedHashMap<>();
    if (!pgd.getDisplayName().equals(pgd.getName())) {
        return rt + pgd.getDisplayName();
    }/*from   www  .jav  a  2  s  . c  om*/
    for (IPeakAnnotationDescriptor ipad : pgd.getPeakAnnotationDescriptors()) {
        if (names.containsKey(ipad.getName())) {
            names.put(ipad.getName(), names.get(ipad.getName()) + 1);
        } else {
            names.put(ipad.getName(), 1);
        }
    }
    if (names.isEmpty()) {
        return rt + "<NA>";
    }
    if (names.size() > 1) {
        StringBuilder sb = new StringBuilder();
        for (String key : names.keySet()) {
            sb.append(key);
            sb.append(" (" + names.get(key) + ")");
            sb.append(" | ");
        }
        return rt + sb.replace(sb.length() - 1, sb.length() - 1, "").toString();
    } else {
        return rt + names.keySet().toArray(new String[0])[0];
    }
}