List of usage examples for java.util LinkedHashMap entrySet
public Set<Map.Entry<K, V>> entrySet()
From source file:org.jboss.pressgang.ccms.contentspec.utils.CSTransformer.java
/** * Transforms a content spec datasource entity into a generic content spec object. * * @param spec The content spec entity to be transformed. * @param providerFactory//from ww w.j a va 2s .c o m * @return The generic Content Spec object that was transformed from the entity. */ public static ContentSpec transform(final ContentSpecWrapper spec, final DataProviderFactory providerFactory, final boolean includeChecksum) { // local variables that are used to map transformed content Map<Integer, Node> nodes = new HashMap<Integer, Node>(); Map<String, SpecTopic> topicTargets = new HashMap<String, SpecTopic>(); List<CSNodeWrapper> relationshipFromNodes = new ArrayList<CSNodeWrapper>(); List<Process> processes = new ArrayList<Process>(); // Start the transformation final ContentSpec contentSpec = new ContentSpec(); contentSpec.setId(spec.getId()); transformGlobalOptions(spec, contentSpec); // Add all the levels/topics boolean localeFound = false; if (spec.getChildren() != null) { final List<CSNodeWrapper> childNodes = spec.getChildren().getItems(); final HashMap<CSNodeWrapper, Node> levelNodes = new HashMap<CSNodeWrapper, Node>(); for (final CSNodeWrapper childNode : childNodes) { if (childNode.getNodeType() == CommonConstants.CS_NODE_TOPIC) { final SpecTopic topic = transformSpecTopic(childNode, nodes, topicTargets, relationshipFromNodes); levelNodes.put(childNode, topic); } else if (childNode.getNodeType() == CommonConstants.CS_NODE_COMMENT) { final Comment comment = transformComment(childNode); levelNodes.put(childNode, comment); } else if (childNode.getNodeType() == CommonConstants.CS_NODE_COMMON_CONTENT) { final CommonContent commonContent = transformCommonContent(childNode); levelNodes.put(childNode, commonContent); } else if (childNode.getNodeType() == CommonConstants.CS_NODE_META_DATA || childNode.getNodeType() == CommonConstants.CS_NODE_META_DATA_TOPIC) { if (!IGNORE_META_DATA.contains(childNode.getTitle().toLowerCase())) { final KeyValueNode<?> metaDataNode = transformMetaData(childNode, nodes, topicTargets, relationshipFromNodes); levelNodes.put(childNode, metaDataNode); } if (CommonConstants.CS_LOCALE_TITLE.equalsIgnoreCase(childNode.getTitle())) { localeFound = true; } } else { final Level level = transformLevel(childNode, nodes, topicTargets, relationshipFromNodes, processes); levelNodes.put(childNode, level); } } // Sort the level nodes so that they are in the right order based on next/prev values. final LinkedHashMap<CSNodeWrapper, Node> sortedMap = CSNodeSorter.sortMap(levelNodes); // Add the child nodes to the content spec now that they are in the right order. boolean addToBaseLevel = false; final Iterator<Map.Entry<CSNodeWrapper, Node>> iter = sortedMap.entrySet().iterator(); while (iter.hasNext()) { final Map.Entry<CSNodeWrapper, Node> entry = iter.next(); // If a level or spec topic is found then start adding to the base level instead of the content spec if ((entry.getValue() instanceof Level || entry.getValue() instanceof SpecTopic) && !addToBaseLevel) { addToBaseLevel = true; // Add the locale if it wasn't specified if (!localeFound) { contentSpec.setLocale(spec.getLocale() == null ? null : spec.getLocale().getValue()); } // Add a space between the base metadata and optional metadata contentSpec.appendChild(new TextNode("\n")); } // Add the node to the right component. if (addToBaseLevel) { contentSpec.getBaseLevel().appendChild(entry.getValue()); // Add a new line to separate chapters/parts if (isNodeASeparatorLevel(entry.getValue()) && iter.hasNext()) { contentSpec.getBaseLevel().appendChild(new TextNode("\n")); } } else { contentSpec.appendChild(entry.getValue()); } } } // Apply the relationships to the nodes applyRelationships(contentSpec, nodes, topicTargets, relationshipFromNodes, processes, providerFactory); // Set the line numbers setLineNumbers(contentSpec, includeChecksum ? 2 : 1); return contentSpec; }
From source file:org.gumtree.vis.mask.ChartMaskingUtilities.java
public static void drawText(Graphics2D g2, Rectangle2D imageArea, LinkedHashMap<Rectangle2D, String> textContentMap, JFreeChart chart) { if (textContentMap == null || textContentMap.size() == 0) { return;/*from www . ja v a 2 s . c om*/ } // for (Entry<Rectangle2D, String> textEntry : textMap.entrySet()) { // Rectangle2D rect = textEntry.getKey(); // String text = textEntry.getValue(); // drawText(g2, imageArea, rect, text, chart); // } Color oldColor = g2.getColor(); g2.setColor(Color.BLACK); for (Entry<Rectangle2D, String> entry : textContentMap.entrySet()) { Rectangle2D rect = entry.getKey(); Point2D screenPoint = ChartMaskingUtilities .translateChartPoint(new Point2D.Double(rect.getX(), rect.getY()), imageArea, chart); String text = entry.getValue(); if (text == null) { continue; } String[] lines = text.split("\n"); g2.setColor(Color.BLACK); for (int i = 0; i < lines.length; i++) { g2.drawString(lines[i], (int) screenPoint.getX() + 3, (int) screenPoint.getY() - 3 + i * 15); } // if (rect == selectedTextWrapper) { // FontMetrics fm = g2.getFontMetrics(); // int maxWidth = 0; // int maxHeight = 0; // for (int i = 0; i < lines.length; i++) { // int lineWidth = fm.stringWidth(lines[i]); // if (lineWidth > maxWidth) { // maxWidth = lineWidth; // } // } // maxHeight = 15 * lines.length; // if (maxWidth < 100) { // maxWidth = 100; // } // Rectangle2D inputBox = new Rectangle2D.Double(screenPoint.getX(), screenPoint.getY() - 15, maxWidth + 8, maxHeight); // Color fillColor = new Color(250, 250, 50, 30); // g2.setPaint(fillColor); // g2.fill(inputBox); // g2.setColor(Color.ORANGE); // g2.drawRect((int) screenPoint.getX(), (int) screenPoint.getY() - 15, maxWidth + 8, maxHeight); // // } // g2.drawString(text == null ? "" : text, (int) screenPoint.getX() + 3, (int) screenPoint.getY() - 3); } g2.setColor(oldColor); }
From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java
/** * Regenerates a tar file from a list of entries. * //w w w. j a v a 2 s .c o m * @param entries * @param file * @throws IOException */ private static void generateTarFile(LinkedHashMap<UUID, byte[]> entries, File file) throws IOException { log.info("Regenerating tar file " + file); TarWriter writer = new TarWriter(file); for (Map.Entry<UUID, byte[]> entry : entries.entrySet()) { UUID uuid = entry.getKey(); byte[] data = entry.getValue(); writer.writeEntry(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits(), data, 0, data.length); } writer.close(); }
From source file:gov.llnl.lc.smt.command.route.SmtRoute.java
public static String getPortSummaryString(RT_Node node, RT_Table table, OSM_Fabric fabric) { // report number of ports with routes, and then each port summary StringBuffer buff = new StringBuffer(); LinkedHashMap<String, RT_Port> PortRouteMap = RT_Node.sortPortRouteTable(node.getPortRouteMap(), true); buff.append(//from w w w . java 2 s. c o m "(" + PortRouteMap.size() + ") Ports with routes [total routes=" + node.getNumRoutes() + "]\n"); if (PortRouteMap != null) { for (Map.Entry<String, RT_Port> entry : PortRouteMap.entrySet()) { RT_Port rp = entry.getValue(); int portNum = rp.getPortNumber(); IB_Guid g = node.getGuid(); buff.append(getPortRouteLine(fabric, table, g, portNum)); } } return buff.toString(); }
From source file:org.bimserver.charting.SupportFunctions.java
public static ArrayList<LinkedHashMap<String, Object>> getIfcMaterialsByNameWithTreeStructure( String structureKeyword, IfcModelInterface model, Chart chart, MutableInt subChartCount) { // Derive the column name. String leafColumnName = structureKeyword; // Update the chart configuration. chart.setDimensionLookupKey(structureKeyword, leafColumnName); chart.setDimensionLookupKey("date", "date"); chart.setDimensionLookupKey("size", "size"); // Prepare to iterate the relationships. LinkedHashMap<String, ArrayList<Double>> materialNameWithSizes = new LinkedHashMap<>(); // Iterate only the relationships. for (IfcRelAssociatesMaterial ifcRelAssociatesMaterial : model .getAllWithSubTypes(IfcRelAssociatesMaterial.class)) { // IfcMaterialSelect: IfcMaterial, IfcMaterialList, IfcMaterialLayerSetUsage, IfcMaterialLayerSet, IfcMaterialLayer. IfcMaterialSelect materialLike = ifcRelAssociatesMaterial.getRelatingMaterial(); // If there was a material-like object, sum the names of what it decomposes into across X individually. if (materialLike != null) { // First, get size data from IFC products. ArrayList<Double> sizes = new ArrayList<>(); // Iterate objects. EList<IfcRoot> ifcRoots = ifcRelAssociatesMaterial.getRelatedObjects(); for (IfcRoot ifcRoot : ifcRoots) { Double size = 0.0; if (ifcRoot instanceof IfcObjectDefinition) { IfcObjectDefinition ifcObjectDefinition = (IfcObjectDefinition) ifcRoot; if (ifcObjectDefinition instanceof IfcObject) { IfcObject ifcObject = (IfcObject) ifcObjectDefinition; if (ifcObject instanceof IfcProduct) { IfcProduct ifcProduct = (IfcProduct) ifcObject; Double volume = getRoughVolumeEstimateFromIfcProduct(ifcProduct); size = volume; }//from w w w. j a v a 2 s.co m } } if (size != null && size > 0) sizes.add(size); } // Get material names with percentages, like: Material Name -> 0.5 LinkedHashMap<String, Double> materials = getNameOfMaterialsFromMaterialLikeWithPercents( materialLike, false); // Second, iterate materials, realizing the percentage of the sizes onto the collection of sizes for each material name. for (Entry<String, Double> materialEntry : materials.entrySet()) { String materialName = materialEntry.getKey(); Double percent = materialEntry.getValue(); // Use material name if available. Otherwise, use OID of top-level material-like object. String name = (materialName != null) ? materialName : String.format("%d", materialLike.getOid()); // Add entry if it doesn't exist. if (!materialNameWithSizes.containsKey(name)) materialNameWithSizes.put(name, new ArrayList<Double>()); ArrayList<Double> theseSizes = materialNameWithSizes.get(name); // Get existing size data. if (percent != null && percent > 0) { // If not alteration is required, clone into the stack. if (percent == 1.0) theseSizes.addAll(sizes); // Otherwise, realize the percent of the size. else for (Double size : sizes) theseSizes.add(size * percent); } } } } // subChartCount.setValue(materialNameWithSizes.size()); // ArrayList<LinkedHashMap<String, Object>> rawData = new ArrayList<>(); // for (Entry<String, ArrayList<Double>> entry : materialNameWithSizes.entrySet()) { String name = entry.getKey(); // Get existing size data. ArrayList<Double> sizes = materialNameWithSizes.get(name); // Sort, value ascending. Collections.sort(sizes, sortSmallerValuesToFront); sizes.add(0, 0.0); if (sizes.size() == 1) sizes.add(0, 0.0); // Count including empty first entry. double count = Math.max(1, sizes.size() - 1); double step = 10000.0 / count; double runningSize = 0.0; // Add sum of zero at entry zero. int i = 0; // Iterate objects, summing them across 0 to 10000 (an arbitrary range, a way to relate to other sums along X). for (Double size : sizes) { double someMeasurement = (size != null) ? size : 0.0; runningSize += someMeasurement; // Prepare to store this raw data entry. LinkedHashMap<String, Object> dataEntry = new LinkedHashMap<>(); // Name the group. dataEntry.put(leafColumnName, name); dataEntry.put("date", i * step); dataEntry.put("size", runningSize); // Push the entry into the data pool. rawData.add(dataEntry); // i += 1; } } // Send it all back. return rawData; }
From source file:com.stratio.crossdata.sh.utils.ConsoleUtils.java
/** * In order to print the result, this method calculates the maximum width of every column. * * @param resultSet structure representing the result of a execution. * @return Map<String, Integer> where the key is the name of the column and Integer is the maximum * width./*from ww w . ja v a2s . c o m*/ */ private static Map<String, Integer> calculateColWidths(ResultSet resultSet) { LinkedHashMap<String, Integer> colWidths = new LinkedHashMap<>(); // Get column names or aliases width for (ColumnMetadata columnMetadata : resultSet.getColumnMetadata()) { colWidths.put(columnMetadata.getName().getColumnNameToShow(), columnMetadata.getName().getColumnNameToShow().length()); } // Find widest cell content of every column for (Row row : resultSet) { int pos = 0; for (String key : row.getCells().keySet()) { String cellContent = String.valueOf(row.getCell(key).getValue()); int currentWidth; if (colWidths.containsKey(key)) { currentWidth = colWidths.get(key); } else { Iterator<Map.Entry<String, Integer>> iter = colWidths.entrySet().iterator(); int limit = 0; while (limit < pos) { iter.next(); limit++; } currentWidth = iter.next().getKey().length(); } if (cellContent.length() > currentWidth) { colWidths.put(key, cellContent.length()); } pos++; } } return colWidths; }
From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java
/** * Check the state of a newly constructed, empty LinkedHashMap. * * @param hashMap//from w w w . j a v a 2 s .com */ private static void checkEmptyLinkedHashMapAssumptions(LinkedHashMap<?, ?> hashMap) { assertNotNull(hashMap); assertTrue(hashMap.isEmpty()); assertNotNull(hashMap.values()); assertTrue(hashMap.values().isEmpty()); assertTrue(hashMap.values().size() == 0); assertNotNull(hashMap.keySet()); assertTrue(hashMap.keySet().isEmpty()); assertTrue(hashMap.keySet().size() == 0); assertNotNull(hashMap.entrySet()); assertTrue(hashMap.entrySet().isEmpty()); assertTrue(hashMap.entrySet().size() == 0); assertNotNull(hashMap.entrySet().iterator()); assertFalse(hashMap.entrySet().iterator().hasNext()); }
From source file:org.apache.jackrabbit.oak.segment.file.TarReader.java
/** * Regenerates a tar file from a list of entries. * /* w ww . j av a2s . com*/ * @param entries * @param file * @throws IOException */ private static void generateTarFile(LinkedHashMap<UUID, byte[]> entries, File file) throws IOException { log.info("Regenerating tar file {}", file); TarWriter writer = new TarWriter(file); for (Map.Entry<UUID, byte[]> entry : entries.entrySet()) { UUID uuid = entry.getKey(); byte[] data = entry.getValue(); int generation = getGcGeneration(wrap(data), uuid); writer.writeEntry(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits(), data, 0, data.length, generation); } writer.close(); }
From source file:org.jahia.loganalyzer.writers.internal.JSONLogEntryWriter.java
public void write(LogEntry logEntry) { try {/*from w w w .j a va 2 s .co m*/ jsonGenerator.writeStartObject(); LinkedHashMap<String, Object> values = logEntry.getValues(); for (Map.Entry<String, Object> valueEntry : values.entrySet()) { jsonGenerator.writeObjectField(valueEntry.getKey(), valueEntry.getValue()); } /* String[] fieldValues = logEntry.toStringArray(dateFormat); String[] fieldNames = logEntry.getColumnKeys(); for (int i = 0; i < fieldNames.length; i++) { jsonGenerator.writeStringField(fieldNames[i], fieldValues[i]); } */ jsonGenerator.writeEndObject(); } catch (IOException e) { e.printStackTrace(); } }
From source file:com.evolveum.midpoint.wf.impl.processors.primary.policy.ProcessSpecifications.java
static ProcessSpecifications createFromRules(List<EvaluatedPolicyRule> rules, PrismContext prismContext) throws ObjectNotFoundException { // Step 1: plain list of approval actions -> map: process-spec -> list of related actions/rules ("collected") LinkedHashMap<WfProcessSpecificationType, List<Pair<ApprovalPolicyActionType, EvaluatedPolicyRule>>> collectedSpecifications = new LinkedHashMap<>(); for (EvaluatedPolicyRule rule : rules) { for (ApprovalPolicyActionType approvalAction : rule.getEnabledActions(ApprovalPolicyActionType.class)) { WfProcessSpecificationType spec = approvalAction.getProcessSpecification(); collectedSpecifications.computeIfAbsent(spec, s -> new ArrayList<>()) .add(new ImmutablePair<>(approvalAction, rule)); }/*from w ww. j av a 2s . co m*/ } // Step 2: resolve references for (WfProcessSpecificationType spec : new HashSet<>(collectedSpecifications.keySet())) { // cloned to avoid concurrent modification exception if (spec != null && spec.getRef() != null) { List<Map.Entry<WfProcessSpecificationType, List<Pair<ApprovalPolicyActionType, EvaluatedPolicyRule>>>> matching = collectedSpecifications .entrySet().stream() .filter(e -> e.getKey() != null && spec.getRef().equals(e.getKey().getName())) .collect(Collectors.toList()); if (matching.isEmpty()) { throw new IllegalStateException("Process specification named '" + spec.getRef() + "' referenced from an approval action couldn't be found"); } else if (matching.size() > 1) { throw new IllegalStateException("More than one process specification named '" + spec.getRef() + "' referenced from an approval action: " + matching); } else { // move all actions/rules to the referenced process specification List<Pair<ApprovalPolicyActionType, EvaluatedPolicyRule>> referencedSpecActions = matching .get(0).getValue(); referencedSpecActions.addAll(collectedSpecifications.get(spec)); collectedSpecifications.remove(spec); } } } Map<String, Pair<ApprovalPolicyActionType, EvaluatedPolicyRule>> actionsMap = null; // Step 3: include other actions for (Map.Entry<WfProcessSpecificationType, List<Pair<ApprovalPolicyActionType, EvaluatedPolicyRule>>> processSpecificationEntry : collectedSpecifications .entrySet()) { WfProcessSpecificationType spec = processSpecificationEntry.getKey(); if (spec == null || spec.getIncludeAction().isEmpty() && spec.getIncludeActionIfPresent().isEmpty()) { continue; } if (actionsMap == null) { actionsMap = createActionsMap(collectedSpecifications.values()); } for (String actionToInclude : spec.getIncludeAction()) { processActionToInclude(actionToInclude, actionsMap, processSpecificationEntry, true); } for (String actionToInclude : spec.getIncludeActionIfPresent()) { processActionToInclude(actionToInclude, actionsMap, processSpecificationEntry, false); } } // Step 4: sorts process specifications and wraps into ProcessSpecification objects ProcessSpecifications rv = new ProcessSpecifications(prismContext); collectedSpecifications.entrySet().stream().sorted((ps1, ps2) -> { WfProcessSpecificationType key1 = ps1.getKey(); WfProcessSpecificationType key2 = ps2.getKey(); if (key1 == null) { return key2 == null ? 0 : 1; // non-empty (key2) records first } else if (key2 == null) { return -1; // non-empty (key1) record first } int order1 = defaultIfNull(key1.getOrder(), Integer.MAX_VALUE); int order2 = defaultIfNull(key2.getOrder(), Integer.MAX_VALUE); return Integer.compare(order1, order2); }).forEach(e -> rv.specifications.add(rv.new ProcessSpecification(e))); return rv; }