List of usage examples for java.util LinkedHashMap keySet
public Set<K> keySet()
From source file:com.pimp.companionforband.utils.jsontocsv.writer.CSVWriter.java
private LinkedHashSet<String> collectHeaders(List<LinkedHashMap<String, String>> flatJson) { LinkedHashSet<String> headers = new LinkedHashSet<>(); for (LinkedHashMap<String, String> map : flatJson) { headers.addAll(map.keySet()); }/*from ww w . ja v a2 s. co m*/ return headers; }
From source file:com.espertech.esper.epl.join.plan.NStreamOuterQueryPlanBuilder.java
private static List<LookupInstructionPlan> buildLookupInstructions(int rootStreamNum, LinkedHashMap<Integer, int[]> substreamsPerStream, boolean[] requiredPerStream, String[] streamNames, QueryGraph queryGraph, QueryPlanIndex[] indexSpecs, EventType[] typesPerStream, OuterJoinDesc[] outerJoinDescList, boolean[] isHistorical, HistoricalStreamIndexList[] historicalStreamIndexLists, ExprEvaluatorContext exprEvaluatorContext) { List<LookupInstructionPlan> result = new LinkedList<LookupInstructionPlan>(); for (int fromStream : substreamsPerStream.keySet()) { int[] substreams = substreamsPerStream.get(fromStream); // for streams with no substreams we don't need to look up if (substreams.length == 0) { continue; }//from w w w .ja va 2 s. c o m TableLookupPlan plans[] = new TableLookupPlan[substreams.length]; HistoricalDataPlanNode historicalPlans[] = new HistoricalDataPlanNode[substreams.length]; for (int i = 0; i < substreams.length; i++) { int toStream = substreams[i]; if (isHistorical[toStream]) { // There may not be an outer-join descriptor, use if provided to build the associated expression ExprNode outerJoinExpr = null; if (outerJoinDescList.length > 0) { OuterJoinDesc outerJoinDesc; if (toStream == 0) { outerJoinDesc = outerJoinDescList[0]; } else { outerJoinDesc = outerJoinDescList[toStream - 1]; } outerJoinExpr = outerJoinDesc.makeExprNode(exprEvaluatorContext); } if (historicalStreamIndexLists[toStream] == null) { historicalStreamIndexLists[toStream] = new HistoricalStreamIndexList(toStream, typesPerStream, queryGraph); } historicalStreamIndexLists[toStream].addIndex(fromStream); historicalPlans[i] = new HistoricalDataPlanNode(toStream, rootStreamNum, fromStream, typesPerStream.length, outerJoinExpr); } else { plans[i] = NStreamQueryPlanBuilder.createLookupPlan(queryGraph, fromStream, toStream, indexSpecs[toStream], typesPerStream); } } String fromStreamName = streamNames[fromStream]; LookupInstructionPlan instruction = new LookupInstructionPlan(fromStream, fromStreamName, substreams, plans, historicalPlans, requiredPerStream); result.add(instruction); } return result; }
From source file:com.glaf.shiro.MyShiroFilterFactoryBean.java
@Override public Map<String, String> getFilterChainDefinitionMap() { logger.debug("load system security properties..."); logger.debug("filterChain size:" + filterChainDefinitionMap.size()); LinkedHashMap<String, String> props = SecurityConfig.getProperties(); Iterator<String> it = props.keySet().iterator(); while (it.hasNext()) { String key = it.next();/*from w ww . ja v a2 s. c om*/ String value = props.get(key); if (StringUtils.startsWith("**", key) || StringUtils.startsWith("/**", key)) { continue; } filterChainDefinitionMap.put(key, value); logger.debug("add security filter chain:" + key + "=" + value); } /** * ?? */ filterChainDefinitionMap.put("/rs/**", "authc"); filterChainDefinitionMap.put("/mx/**", "authc"); logger.debug(filterChainDefinitionMap); return filterChainDefinitionMap; }
From source file:com.vmware.bdd.cli.commands.CommandsUtils.java
/** * Show a table(include table column names and table contents) by left * justifying. More specifically, the {@code columnNamesWithGetMethodNames} * argument is a map struct, the key is table column name and value is method * name list which it will be invoked by reflection. The {@code entities} * argument is traversed entity array.It is source of table data. In * addition,the method name must be each of the {@code entities} argument 's * member. The {@code spacesBeforeStart} argument is whitespace in the front * of the row./* ww w.java 2s . c om*/ * <p> * * @param columnNamesWithGetMethodNames * the container of table column name and invoked method name. * @param entities * the traversed entity array. * @param spacesBeforeStart * the whitespace in the front of the row. * @throws Exception */ public static void printInTableFormat(LinkedHashMap<String, List<String>> columnNamesWithGetMethodNames, Object[] entities, String spacesBeforeStart) throws Exception { if (entities != null && entities.length > 0) { // get number of columns int columnNum = columnNamesWithGetMethodNames.size(); String[][] table = new String[entities.length + 1][columnNum]; //build table header: column names String[] tableHeader = new String[columnNum]; Set<String> columnNames = columnNamesWithGetMethodNames.keySet(); columnNames.toArray(tableHeader); //put table column names into the first row table[0] = tableHeader; //build table contents Collection<List<String>> getMethodNamesCollect = columnNamesWithGetMethodNames.values(); int i = 1; for (Object entity : entities) { int j = 0; for (List<String> getMethodNames : getMethodNamesCollect) { Object tempValue = null; int k = 0; for (String methodName : getMethodNames) { if (tempValue == null) tempValue = entity; Object value = tempValue.getClass().getMethod(methodName).invoke(tempValue); if (k == getMethodNames.size() - 1) { table[i][j] = value == null ? "" : ((value instanceof Double) ? String.valueOf( round(((Double) value).doubleValue(), 2, BigDecimal.ROUND_FLOOR)) : value.toString()); if (isJansiAvailable() && !isBlank(table[i][j])) { table[i][j] = transferEncoding(table[i][j]); } j++; } else { tempValue = value; k++; } } } i++; } printTable(table, spacesBeforeStart); } }
From source file:ai.susi.json.JsonTray.java
public JSONObject toJSON() { JSONObject j = new JSONObject(); for (String key : this.per.keySet()) { j.put(key, this.per.get(key)); }//from w ww .j a va 2 s .c om synchronized (this.vol) { LinkedHashMap<String, JSONObject> map = this.vol.getMap(); for (String key : map.keySet()) { j.put(key, map.get(key)); } } return j; }
From source file:de.roderick.weberknecht.WebSocketHandshake.java
private String generateHeader(LinkedHashMap<String, String> headers) { String header = new String(); for (String fieldName : headers.keySet()) { header += fieldName + ": " + headers.get(fieldName) + "\r\n"; }/*from w ww. j a v a2 s . c o m*/ return header; }
From source file:ubic.gemma.core.datastructure.matrix.ExpressionDataMatrixColumnSort.java
/** * Divide the biomaterials up into chunks based on the experimental factor given, keeping everybody in order. If the * factor is continuous, there is just one chunk. * * @return ordered map of fv->bm where fv is of ef, or null if it couldn't be done properly. *///w w w. ja va2 s . co m private static LinkedHashMap<FactorValue, List<BioMaterial>> chunkOnFactor(ExperimentalFactor ef, List<BioMaterial> bms) { if (bms == null) { return null; } LinkedHashMap<FactorValue, List<BioMaterial>> chunks = new LinkedHashMap<>(); /* * Get the factor values in the order we have things right now */ for (BioMaterial bm : bms) { for (FactorValue fv : bm.getFactorValues()) { if (!ef.getFactorValues().contains(fv)) { continue; } if (chunks.keySet().contains(fv)) { continue; } chunks.put(fv, new ArrayList<BioMaterial>()); } } /* * What if bm doesn't have a value for the factorvalue. Need a dummy value. */ FactorValue dummy = FactorValue.Factory.newInstance(ef); dummy.setValue(""); dummy.setId(-1L); chunks.put(dummy, new ArrayList<BioMaterial>()); for (BioMaterial bm : bms) { boolean found = false; for (FactorValue fv : bm.getFactorValues()) { if (ef.getFactorValues().contains(fv)) { found = true; assert chunks.containsKey(fv); chunks.get(fv).add(bm); } } if (!found) { if (ExpressionDataMatrixColumnSort.log.isDebugEnabled()) ExpressionDataMatrixColumnSort.log .debug(bm + " has no value for factor=" + ef + "; using dummy value"); chunks.get(dummy).add(bm); } } if (chunks.get(dummy).size() == 0) { if (ExpressionDataMatrixColumnSort.log.isDebugEnabled()) ExpressionDataMatrixColumnSort.log.debug("removing dummy"); chunks.remove(dummy); } ExpressionDataMatrixColumnSort.log .debug(chunks.size() + " chunks for " + ef + ", from current chunk of size " + bms.size()); /* * Sanity check */ int total = 0; for (FactorValue fv : chunks.keySet()) { List<BioMaterial> chunk = chunks.get(fv); total += chunk.size(); } assert total == bms.size() : "expected " + bms.size() + ", got " + total; return chunks; }
From source file:ubic.gemma.core.datastructure.matrix.ExpressionDataMatrixColumnSort.java
/** * Sort biomaterials according to a list of ordered factors * * @param start biomaterials to sort//from w ww . j ava 2 s .com * @param factors sorted list of factors to define sort order for biomaterials, cannot be null */ private static List<BioMaterial> orderBiomaterialsBySortedFactors(List<BioMaterial> start, List<ExperimentalFactor> factors) { if (start.size() == 1) { return start; } if (start.size() == 0) { throw new IllegalArgumentException("Must provide some biomaterials"); } if (factors == null) { throw new IllegalArgumentException("Must provide sorted factors, or at least an empty list"); } if (factors.isEmpty()) { // we're done. return start; } ExperimentalFactor simplest = factors.get(0); if (simplest == null) { // we're done. return start; } /* * Order this chunk by the selected factor */ Map<FactorValue, List<BioMaterial>> fv2bms = ExpressionDataMatrixColumnSort.buildFv2BmMap(start); List<BioMaterial> ordered = ExpressionDataMatrixColumnSort.orderByFactor(simplest, fv2bms, start); // Abort ordering, so we are ordered only by the first continuous factor. if (ExperimentalDesignUtils.isContinuous(simplest)) { assert ordered != null; return ordered; } LinkedList<ExperimentalFactor> factorsStillToDo = new LinkedList<>(); factorsStillToDo.addAll(factors); factorsStillToDo.remove(simplest); if (factorsStillToDo.size() == 0) { /* * No more ordering is necessary. */ return ordered; } ExpressionDataMatrixColumnSort.log.debug("Factors: " + factors.size()); /* * Recurse in and order each chunk. First split it up, but retaining the order we just made. */ LinkedHashMap<FactorValue, List<BioMaterial>> chunks = ExpressionDataMatrixColumnSort .chunkOnFactor(simplest, ordered); if (chunks == null) { // this means we should bail, gracefully. return start; } /* * Process each chunk. */ List<BioMaterial> result = new ArrayList<>(); for (FactorValue fv : chunks.keySet()) { List<BioMaterial> chunk = chunks.get(fv); if (chunk.size() < 2) { result.addAll(chunk); } else { List<BioMaterial> orderedChunk = ExpressionDataMatrixColumnSort .orderBiomaterialsBySortedFactors(chunk, factorsStillToDo); if (orderedChunk != null) { result.addAll(orderedChunk); } } } return result; }
From source file:org.obiba.mica.core.service.SchemaFormContentFileService.java
private void cleanFileJsonArrays(JSONArray... arrays) { if (arrays != null) { Arrays.stream(arrays).forEach(s -> s.forEach(a -> { if (a instanceof LinkedHashMap) { LinkedHashMap<String, String> jsonMap = (LinkedHashMap<String, String>) a; jsonMap.keySet().stream().filter(k -> k.contains("$")).collect(Collectors.toList()) .forEach(jsonMap::remove); }//from w w w. j av a 2s . c o m })); } }
From source file:com.arkea.jenkins.openstack.heat.orchestration.template.Bundle.java
/** * Convert an object value from the yaml to string * // w w w.j av a2s . co m * @param value * the object value from the yaml * @return the object in string format */ @SuppressWarnings("unchecked") private String convertValue(Object value) { if (value instanceof java.util.LinkedHashMap) { StringBuilder rtn = new StringBuilder("{"); java.util.LinkedHashMap<String, String> data = (java.util.LinkedHashMap<String, String>) value; for (String key : data.keySet()) { rtn.append(key).append(":").append(data.get(key)).append(","); } rtn.deleteCharAt(rtn.length() - 1); rtn.append("}"); return rtn.toString(); } return String.valueOf(value); }