List of usage examples for java.util LinkedHashMap entrySet
public Set<Map.Entry<K, V>> entrySet()
From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java
public void testLRU() { LinkedHashMap<String, String> m = new LinkedHashMap<String, String>(10, .5f, true); m.put("A", "A"); m.put("B", "B"); m.put("C", "C"); m.put("D", "D"); Iterator<Entry<String, String>> entry = m.entrySet().iterator(); assertEquals("A", entry.next().getValue()); assertEquals("B", entry.next().getValue()); assertEquals("C", entry.next().getValue()); assertEquals("D", entry.next().getValue()); m.get("B");//from w ww. ja v a 2s .co m m.get("D"); entry = m.entrySet().iterator(); assertEquals("A", entry.next().getValue()); assertEquals("C", entry.next().getValue()); assertEquals("B", entry.next().getValue()); assertEquals("D", entry.next().getValue()); }
From source file:com.opengamma.financial.analytics.ircurve.calcconfig.MultiCurveCalculationConfig.java
public MultiCurveCalculationConfig(final String calculationConfigName, final String[] yieldCurveNames, final ComputationTargetSpecification target, final String calculationMethod, final LinkedHashMap<String, CurveInstrumentConfig> curveExposuresForInstruments, final LinkedHashMap<String, String[]> exogenousConfigAndCurveNames) { ArgumentChecker.notNull(calculationConfigName, "calculation configuration name"); ArgumentChecker.notNull(yieldCurveNames, "yield curve names"); ArgumentChecker.notNull(target, "target"); ArgumentChecker.notNull(calculationMethod, "calculation methods"); ArgumentChecker.notEmpty(yieldCurveNames, "yield curve names"); ArgumentChecker.noNulls(yieldCurveNames, "yield curve names"); if (curveExposuresForInstruments != null) { ArgumentChecker.notEmpty(curveExposuresForInstruments, "curve exposures for instruments"); }/*w w w . j a v a2 s.c om*/ if (exogenousConfigAndCurveNames != null) { ArgumentChecker.notEmpty(exogenousConfigAndCurveNames, "exogenous config names"); ArgumentChecker.noNulls(exogenousConfigAndCurveNames.entrySet(), "exogenous config names"); } _calculationConfigName = calculationConfigName; _yieldCurveNames = yieldCurveNames; _target = target; _calculationMethod = calculationMethod; _curveExposuresForInstruments = curveExposuresForInstruments; _exogenousConfigAndCurveNames = exogenousConfigAndCurveNames; }
From source file:com.cburch.logisim.gui.main.SelectionAttributes.java
private void updateList(boolean ignoreIfSelectionSame) { Selection sel = selection;//w ww .j a v a2 s. c o m Set<Component> oldSel = selected; Set<Component> newSel; if (sel == null) newSel = Collections.emptySet(); else newSel = createSet(sel.getComponents()); if (haveSameElements(newSel, oldSel)) { if (ignoreIfSelectionSame) return; newSel = oldSel; } else { for (Component o : oldSel) { if (!newSel.contains(o)) { o.getAttributeSet().removeAttributeListener(listener); } } for (Component o : newSel) { if (!oldSel.contains(o)) { o.getAttributeSet().addAttributeListener(listener); } } } LinkedHashMap<Attribute<Object>, Object> attrMap = computeAttributes(newSel); boolean same = isSame(attrMap, this.attrs, this.values); if (same) { if (newSel != oldSel) this.selected = newSel; } else { Attribute<?>[] oldAttrs = this.attrs; Object[] oldValues = this.values; Attribute<?>[] newAttrs = new Attribute[attrMap.size()]; Object[] newValues = new Object[newAttrs.length]; boolean[] newReadOnly = new boolean[newAttrs.length]; int i = -1; for (Map.Entry<Attribute<Object>, Object> entry : attrMap.entrySet()) { i++; newAttrs[i] = entry.getKey(); newValues[i] = entry.getValue(); newReadOnly[i] = computeReadOnly(newSel, newAttrs[i]); } if (newSel != oldSel) this.selected = newSel; this.attrs = newAttrs; this.attrsView = UnmodifiableList.decorate(Arrays.asList(newAttrs)); this.values = newValues; this.readOnly = newReadOnly; boolean listSame = oldAttrs != null && oldAttrs.length == newAttrs.length; if (listSame) { for (i = 0; i < oldAttrs.length; i++) { if (!oldAttrs[i].equals(newAttrs[i])) { listSame = false; break; } } } if (listSame) { for (i = 0; i < oldValues.length; i++) { Object oldVal = oldValues[i]; Object newVal = newValues[i]; boolean sameVals = oldVal == null ? newVal == null : oldVal.equals(newVal); if (!sameVals) { @SuppressWarnings("unchecked") Attribute<Object> attr = (Attribute<Object>) oldAttrs[i]; fireAttributeValueChanged(attr, newVal); } } } else { fireAttributeListChanged(); } } }
From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java
@SuppressWarnings("unchecked") public void testClone() { LinkedHashMap<String, String> srcMap = new LinkedHashMap<String, String>(); checkEmptyLinkedHashMapAssumptions(srcMap); // Check empty clone behavior LinkedHashMap<String, String> dstMap = (LinkedHashMap<String, String>) srcMap.clone(); assertNotNull(dstMap);/* www . j a v a2s . c o m*/ assertEquals(dstMap.size(), srcMap.size()); assertEquals(dstMap.keySet().toArray(), srcMap.keySet().toArray()); assertEquals(dstMap.entrySet().toArray(), srcMap.entrySet().toArray()); // Check non-empty clone behavior srcMap.put(KEY_1, VALUE_1); srcMap.put(KEY_2, VALUE_2); srcMap.put(KEY_3, VALUE_3); srcMap.put(KEY_4, VALUE_4); dstMap = (LinkedHashMap<String, String>) srcMap.clone(); assertNotNull(dstMap); assertEquals(dstMap.size(), srcMap.size()); assertEquals(dstMap.keySet().toArray(), srcMap.keySet().toArray()); assertEquals(dstMap.entrySet().toArray(), srcMap.entrySet().toArray()); }
From source file:org.slage.SlageGame.java
/** * Add a list of objects to the draw list... This method is provided as a * convenience and optimization to add all objects in a Room to the list. * //w w w . j av a 2s. com * @param listDraw objects to draw */ public void addToDrawList(java.util.LinkedHashMap<String, SlageObject> listDraw) { Set<Map.Entry<String, SlageObject>> set = listDraw.entrySet(); for (Map.Entry<String, SlageObject> me : set) listToDraw.add(me.getValue()); bDrawListNeedsSort = true; }
From source file:citation_prediction.CitationCore.java
/** * This function will print a list containing the calculated values for the Newton-Raphson method in * a formated way that is easy to read.//from ww w .j a v a2s. co m * * @param l The list containing the Newton-Raphon calculated values. */ public void printList(LinkedHashMap<String, Double> l) { for (Entry<String, Double> e : l.entrySet()) { System.out.println(e.getKey() + "=" + e.getValue()); } }
From source file:com.versusoft.packages.ooo.odt2daisy.addon.gui.UnoGUI.java
/** * Save as DAISY XML./* w w w. j ava 2 s . c o m*/ * * @return true if the the content could be saved as valid DAISY XML, false otherwise (e.g. an error occurred). */ public boolean saveAsXML() { Odt2Daisy odt2daisy = null; String tmpOdtUrl = null; String tmpOdtUnoUrl = null; String exportUnoUrl = null; try { // Start status bar xStatusIndicator.start(L10N_StatusIndicator_Step_1, 100); xStatusIndicator.setValue(5); // Request a temporary file xStatusIndicator.setText(L10N_StatusIndicator_Step_2); xStatusIndicator.setValue(10); logger.fine("request a temporary file"); File tmpFile = File.createTempFile(TMP_ODT_PREFIX, TMP_ODT_SUFFIX); tmpFile.deleteOnExit(); tmpOdtUrl = tmpFile.getAbsolutePath(); tmpOdtUnoUrl = UnoUtils.createUnoFileURL(tmpOdtUrl, m_xContext); logger.fine("tmpOdtUrl:" + tmpOdtUrl); logger.fine("tmpOdtUnoUrl:" + tmpOdtUnoUrl); // Export in ODT Format using UNO API xStatusIndicator.setText(L10N_StatusIndicator_Step_3); xStatusIndicator.setValue(15); logger.fine("save current document in ODT using UNO API"); PropertyValue[] conversionProperties = new PropertyValue[1]; conversionProperties[0] = new PropertyValue(); conversionProperties[0].Name = "FilterName"; conversionProperties[0].Value = FLAT_XML_FILTER_NAME; //Daisy DTBook OpenDocument XML XStorable storable = (XStorable) UnoRuntime.queryInterface(XStorable.class, m_xFrame.getController().getModel()); storable.storeToURL(tmpOdtUnoUrl, conversionProperties); // Create and Init odt2daisy xStatusIndicator.setText(L10N_StatusIndicator_Step_4); xStatusIndicator.setValue(20); logger.fine("create and init odt2daisy"); odt2daisy = new Odt2Daisy(tmpOdtUrl); //@todo add initial output directory URL odt2daisy.init(); xStatusIndicator.setText(L10N_StatusIndicator_Step_5); xStatusIndicator.setValue(40); // Stop Progress bar during user inputs xStatusIndicator.end(); // Show an alert if ODT empty if (odt2daisy.isEmptyDocument()) { String messageBoxTitle = L10N_MessageBox_Info_Title; String message = "\n" + L10N_Empty_Document_Message + " \n"; UnoAwtUtils.showInfoMessageBox(parentWindowPeer, messageBoxTitle, message); return false; } // Show a warning if ODT does not contain any headings (Heading 1) if (!odt2daisy.isUsingHeadings()) { String messageBoxTitle = L10N_MessageBox_Warning_Title; String message = L10N_No_Headings_Warning + "\n"; Short result = UnoAwtUtils.showYesNoWarningMessageBox(parentWindowPeer, messageBoxTitle, message); // Abort on Cancel if (result == (short) 3) { logger.fine("User cancelled export"); return false; } } // @todo Ideally, warning if ODT contains images in format not supported by DAISY 3 should be here, instead of after the Save as dialog. // Raise File Export Dialog @todo add initial output directory URL exportUnoUrl = UnoAwtUtils.showSaveAsDialog(L10N_Default_Export_Filename, "DAISY DTBook XML", "*.xml", m_xContext); logger.fine("exportUnoUrl=" + exportUnoUrl); if (exportUnoUrl.length() < 1) { logger.info("user cancelled export"); return false; } // Auto append extension manually because crash autoextension crash on OOo beta 3 macosx if (!exportUnoUrl.endsWith(".xml")) { exportUnoUrl = exportUnoUrl.concat(".xml"); } exportUrl = UnoUtils.UnoURLtoURL(exportUnoUrl, m_xContext); logger.fine("exportUrl=" + exportUrl); // Raise Export Dialog Options dialog = new ExportDialog(m_xContext, isFullExport); dialog.setUid(odt2daisy.getUidParam()); dialog.setDoctitle(odt2daisy.getTitleParam()); dialog.setCreator(odt2daisy.getCreatorParam()); dialog.setPublisher(odt2daisy.getPublisherParam()); dialog.setProducer(odt2daisy.getProducerParam()); dialog.setLang(odt2daisy.getLangParam()); dialog.setAlternateLevelMarkup(odt2daisy.isUseAlternateLevelParam()); boolean retDialog = dialog.execute(); if (!retDialog) { logger.info("user cancelled export"); return false; } xStatusIndicator.start(L10N_StatusIndicator_Step_6, 100); xStatusIndicator.setValue(45); if (dialog.isPaginationEnable()) { logger.info("Pagination process started"); odt2daisy.paginationProcessing(); logger.info("Pagination process end"); } // Correction Processing xStatusIndicator.setText(L10N_StatusIndicator_Step_7); xStatusIndicator.setValue(60); logger.fine("Trying ODF XML correction");// was: "daisy correction"?? odt2daisy.correctionProcessing(); // Set Params according to DAISY Expport dialog odt2daisy.setUidParam(dialog.getUid()); odt2daisy.setTitleParam(dialog.getDoctitle()); odt2daisy.setCreatorParam(dialog.getCreator()); odt2daisy.setPublisherParam(dialog.getPublisher()); odt2daisy.setProducerParam(dialog.getProducer()); odt2daisy.setUseAlternateLevelParam(dialog.isAlternateLevelMarkup()); odt2daisy.setWriteCSSParam(dialog.isWriteCSS()); // Convert as DAISY XML xStatusIndicator.setText(L10N_StatusIndicator_Step_8); xStatusIndicator.setValue(70); logger.fine("Trying daisy translation"); odt2daisy.convertAsDTBook(exportUrl, IMAGE_DIR); //@todo Check whether imagesProcessing(dtbookFile, imageDir) [in odt2daisy object] can be moved to a method that is called before odt2daisy.convertAsDTBook(exportUrl, IMAGE_DIR) // or just check merged XML instead (i.e. without checking file names stored inside ODF). if (odt2daisy.containsIncompatibleImages()) { LinkedHashMap<String, String> incompatibleImg = odt2daisy.getIncompatibleImages(); StringBuffer messageWithImgList = new StringBuffer(L10N_Incompatible_Images_Error); for (java.util.Map.Entry<String, String> entry : incompatibleImg.entrySet()) { messageWithImgList.append("* ").append(entry.getKey()).append("\n"); } String message = messageWithImgList.toString(); Short result = UnoAwtUtils.showErrorMessageBox(parentWindowPeer, L10N_MessageBox_Error_Title, message); logger.severe(message); return false; } else { logger.fine("No incompatible images."); } // DTD Validation xStatusIndicator.setText(L10N_StatusIndicator_Step_9); xStatusIndicator.setValue(90); logger.fine("Trying daisy DTD validation"); odt2daisy.validateDTD(exportUrl); if (odt2daisy.getErrorHandler().hadError()) { String messageBoxTitle = L10N_MessageBox_Error_Title; String message = L10N_DTD_Error_Message + "\n\n" + L10N_Line + ": " + odt2daisy.getErrorHandler().getLineNumber() + "\n" + L10N_Message + ": " + odt2daisy.getErrorHandler().getMessage() + "\n" + "\n"; UnoAwtUtils.showErrorMessageBox(parentWindowPeer, messageBoxTitle, message); logger.severe(message); return false; } xStatusIndicator.setText(L10N_StatusIndicator_Step_10); xStatusIndicator.setValue(100); return true; } catch (Exception e) { String messageBoxTitle = L10N_MessageBox_InternalError_Title; String message = L10N_Export_Aborted_Message + " " + logFile.getAbsolutePath() + "\n"; UnoAwtUtils.showErrorMessageBox(parentWindowPeer, messageBoxTitle, message); if (logger != null) { logger.log(Level.SEVERE, null, e); } return false; } }
From source file:gov.llnl.lc.infiniband.opensm.plugin.data.RT_NodeBalance.java
private boolean initialize() { boolean balanced = false; // if Node == null, then this balance is for the entire Table, not just a switch if ((Table != null) && (Fabric != null) && (VertexMap != null)) { if (Node != null) { // find the matching IB_Vertex for this node Vertex = VertexMap.get(IB_Vertex.getVertexKey(Node.getGuid())); NumCaRoutes = Node.getNumCaRoutes(Table); LinkedHashMap<String, RT_Port> PortRouteMap = RT_Node.sortPortRouteTable(Node.getPortRouteMap(), true);/* w ww .ja va 2 s.c o m*/ if (PortRouteMap != null) { OSM_Node node = Fabric.getOSM_Node(Node.getGuid()); NumPortsTotal += node.sbnNode.num_ports; // bin up the ports by the number of (non-zero) CA routes for (Map.Entry<String, RT_Port> entry : PortRouteMap.entrySet()) { RT_Port rp = entry.getValue(); int nCAs = rp.getNumCaRoutes(Table); if (nCAs > 0) { if (Vertex.isDownLink(rp.getPortNumber())) DownCA_Bins.add(rp, Integer.toString(nCAs)); else if (Vertex.isUpLink(rp.getPortNumber())) UpCA_Bins.add(rp, Integer.toString(nCAs)); else { System.err.println("Unknow type of Port (not up or down)"); System.err.println(rp.getPortNumber()); System.err.println(rp.toIB_RouteString(Table, Fabric)); } } } // now I have the up and down CA Bins, calculate their stats seperately UpStats = calculateStatistics(UpCA_Bins); DownStats = calculateStatistics(DownCA_Bins); getPortsExceedingSubscription(UpCA_Bins, UpStats); getPortsExceedingSubscription(DownCA_Bins, DownStats); } // // System.err.println("**************************************"); // System.err.println("Levels: " + NumFabricLevels + ", this node level: " + NodeLevel); // System.err.println("U/D Balanced: " + isUpDownBalanced()); // System.err.println("Down Balanced: " + isDownLinksBalanced()); // System.err.println("Up Balanced: " + isUpLinksBalanced()); // System.err.println("Up average: " + UpStats.getMean() + ", and std dev: " + UpStats.getStandardDeviation()); // System.err.println("Down average: " + DownStats.getMean() + ", and std dev: " + DownStats.getStandardDeviation()); // System.err.println("Num Under Subscribed: " + UnderSubscribedPorts.size() + ", " + UnderSubscribedPorts.toString()); // System.err.println("Num Over Subscribed: " + OverSubscribedPorts.size() + ", " + OverSubscribedPorts.toString()); // System.err.println("Balanced: " + isBalanced()); // System.err.println("**************************************"); // } } // System.err.println("Up Bins: " + UpCA_Bins.size() + ", balanced? " + isBinListBalanced(UpCA_Bins)); // System.err.println("Down Bins: " + DownCA_Bins.size() + ", balanced? " + isBinListBalanced(DownCA_Bins)); // // return balanced; }
From source file:nl.nn.adapterframework.webcontrol.api.ShowConfigurationStatus.java
@SuppressWarnings({ "unchecked", "rawtypes" }) @PUT//from w ww . ja va2s .c o m @RolesAllowed({ "ObserverAccess", "IbisTester", "AdminAccess" }) @Path("/adapters/") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response updateAdapters(LinkedHashMap<String, Object> json) throws ApiException { initBase(servletConfig); Response.ResponseBuilder response = Response.status(Response.Status.NO_CONTENT); //PUT defaults to no content String action = null; ArrayList<String> adapters = new ArrayList<String>(); for (Entry<String, Object> entry : json.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (key.equalsIgnoreCase("action")) {//Start or stop an adapter! if (value.equals("stop")) { action = "stopadapter"; } if (value.equals("start")) { action = "startadapter"; } } if (key.equalsIgnoreCase("adapters")) { try { adapters.addAll((ArrayList) value); } catch (Exception e) { return response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); } } } if (action != null) { response.status(Response.Status.ACCEPTED); if (adapters.size() == 0) { ibisManager.handleAdapter(action, "*ALL*", "*ALL*", null, null, false); } else { for (Iterator<String> iterator = adapters.iterator(); iterator.hasNext();) { String adapterName = iterator.next(); ibisManager.handleAdapter(action, "", adapterName, null, null, false); } } } return response.build(); }
From source file:nl.systemsgenetics.eqtlannotation.EncodeMultipleTfbsOverlap.java
private static LinkedHashMap<String, HashMap<String, ArrayList<EncodeNarrowPeak>>> readMultipleTfbsInformation( String inputFolderTfbsData) throws IOException { LinkedHashMap<String, HashMap<String, ArrayList<EncodeNarrowPeak>>> data = new LinkedHashMap<>(); File file = new File(inputFolderTfbsData); File[] files = file.listFiles(); ArrayList<String> vecFiles = new ArrayList<>(); for (File f : files) { // System.out.println(f.getAbsolutePath()); vecFiles.add(f.getAbsolutePath()); }/*from w ww .ja v a 2s . c o m*/ for (String fileToRead : vecFiles) { TextFile reader = new TextFile(fileToRead, TextFile.R); String[] storingInformation = fileToRead.split("_"); // String cellLine = storingInformation[1].replace("TFBS\\",""); String transcriptionFactor = storingInformation[2].replace(".narrowPeak", ""); if (storingInformation.length > 4) { for (int i = 3; i < (storingInformation.length - 1); ++i) { transcriptionFactor = transcriptionFactor + "_" + storingInformation[i].replace(".narrowPeak", ""); } } String row; while ((row = reader.readLine()) != null) { String[] parts = StringUtils.split(row, '\t'); if (!data.containsKey(transcriptionFactor)) { data.put(transcriptionFactor, new HashMap<String, ArrayList<EncodeNarrowPeak>>()); } if (!data.get(transcriptionFactor).containsKey(parts[0])) { data.get(transcriptionFactor).put(parts[0], new ArrayList<EncodeNarrowPeak>()); } data.get(transcriptionFactor).get(parts[0]).add(new EncodeNarrowPeak(parts, fileToRead)); } reader.close(); } ArrayList<String> cleanList = new ArrayList<>(); for (Entry<String, HashMap<String, ArrayList<EncodeNarrowPeak>>> tfInformation : data.entrySet()) { System.out.println("Transcription factor: " + tfInformation.getKey()); int counter = 0; for (Entry<String, ArrayList<EncodeNarrowPeak>> tfEntry : tfInformation.getValue().entrySet()) { Collections.sort(tfEntry.getValue()); counter += tfEntry.getValue().size(); } System.out.println("\tcontacts: " + counter); //remove all with less than 750 contacts // if(counter<750){ // cleanList.add(tfInformation.getKey()); // } } for (String k : cleanList) { data.remove(k); } return data; }