List of usage examples for java.util HashMap size
int size
To view the source code for java.util HashMap size.
Click Source Link
From source file:com.bigdata.rockstor.console.UploadServlet.java
@Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (!ServletFileUpload.isMultipartContent(req)) { LOG.error("It is not a MultipartContent, return error."); resp.sendError(500, "It is not a MultipartContent, return error."); return;//from ww w.jav a 2 s .c om } FileItemFactory factory = new DiskFileItemFactory(); ServletFileUpload upload = new ServletFileUpload(factory); upload.setFileSizeMax(1024 * 1024 * 512); List<FileItem> fileItems = null; try { fileItems = upload.parseRequest(req); LOG.info("parse requeset success : items num : " + fileItems.size()); } catch (FileUploadException e) { LOG.error("parse requeset failed !"); resp.sendError(500, "parse requeset failed !"); return; } HashMap<String, String> headMap = new HashMap<String, String>(); FileItem theFile = null; long size = -1; URI uri = null; Iterator<FileItem> iter = fileItems.iterator(); while (iter.hasNext()) { FileItem item = (FileItem) iter.next(); if (item.isFormField()) { String name = item.getFieldName(); String value = null; try { value = item.getString("UTF-8").trim(); } catch (UnsupportedEncodingException e1) { e1.printStackTrace(); } LOG.info("Parse head info : " + name + " -- " + value); if (name.equals("ObjName")) { try { uri = new URI(value); } catch (URISyntaxException e) { LOG.info("Parse uri info error : " + value); uri = null; } } else if (name.equals("ObjSize")) { try { size = Long.parseLong(value); } catch (Exception e) { LOG.error("Parse objSize error : " + value); } } else { headMap.put(name, value); } } else { theFile = item; } } if (size == -1 || uri == null || theFile == null || headMap.size() == 0) { LOG.error("Parse upload info error : size==-1 || uri == null || theFile == null || headMap.size()==0"); resp.sendError(500, "Parse upload info error : size==-1 || uri == null || theFile == null || headMap.size()==0"); return; } HttpPut put = new HttpPut(); put.setURI(uri); for (Map.Entry<String, String> e : headMap.entrySet()) { if ("Filename".equals(e.getKey())) continue; put.setHeader(e.getKey(), e.getValue()); } put.setEntity(new InputStreamEntity(theFile.getInputStream(), size)); DefaultHttpClient client = new DefaultHttpClient(); HttpResponse response = client.execute(put); if (200 != response.getStatusLine().getStatusCode()) { LOG.error("Put object error : " + response.getStatusLine().getStatusCode() + " : " + response.getStatusLine().getReasonPhrase()); resp.sendError(response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); return; } LOG.info("Put object OK : " + uri); response.setStatusCode(200); }
From source file:com.ibm.bi.dml.test.utils.TestUtils.java
/** * Compares two matrices given as HashMaps. The matrix containing more nnz * is iterated and each cell value compared against the corresponding cell * in the smaller matrix, to ensure that all values are compared.<br/> * This method does not assert. Instead statistics are added to * AssertionBuffer, at the end of the test you should call * {@link TestUtils#displayAssertionBuffer()}. * // w ww. j a va 2s . com * @param m1 * @param m2 * @param tolerance * @return True if matrices are identical regarding tolerance. */ public static boolean compareMatrices(HashMap<CellIndex, Double> m1, HashMap<CellIndex, Double> m2, double tolerance, String name1, String name2, boolean ignoreNaN) { HashMap<CellIndex, Double> first = m2; HashMap<CellIndex, Double> second = m1; String namefirst = name2; String namesecond = name1; boolean flag = true; /** to ensure that always the matrix with more nnz is iterated */ if (m1.size() > m2.size()) { first = m1; second = m2; namefirst = name1; namesecond = name2; flag = false; } int countErrorWithinTolerance = 0; int countIdentical = 0; double minerr = -1; double maxerr = 0; for (CellIndex index : first.keySet()) { Double v1 = first.get(index); Double v2 = second.get(index); if (v1 == null) v1 = 0.0; if (v2 == null) v2 = 0.0; if (Math.abs(v1 - v2) < minerr || minerr == -1) minerr = Math.abs(v1 - v2); if (Math.abs(v1 - v2) > maxerr) maxerr = Math.abs(v1 - v2); if (!compareCellValue(first.get(index), second.get(index), 0, ignoreNaN)) { if (!compareCellValue(first.get(index), second.get(index), tolerance, ignoreNaN)) { countErrorWithinTolerance++; if (!flag) System.out.println(index + ": " + first.get(index) + " <--> " + second.get(index)); else System.out.println(index + ": " + second.get(index) + " <--> " + first.get(index)); } } else { countIdentical++; } } String assertPrefix = (countErrorWithinTolerance == 0) ? " " : "! "; _AssertInfos.add( assertPrefix + name1 + "<->" + name2 + " # stored values in " + namefirst + ": " + first.size()); _AssertInfos.add( assertPrefix + name1 + "<->" + name2 + " # stored values in " + namesecond + ": " + second.size()); _AssertInfos.add(assertPrefix + name1 + "<->" + name2 + " identical values(z=0): " + countIdentical); _AssertInfos.add(assertPrefix + name1 + "<->" + name2 + " wrong values(z=" + tolerance + "): " + countErrorWithinTolerance); _AssertInfos.add(assertPrefix + name1 + "<->" + name2 + " min error: " + minerr); _AssertInfos.add(assertPrefix + name1 + "<->" + name2 + " max error: " + maxerr); if (countErrorWithinTolerance == 0) return true; _AssertOccured = true; return false; }
From source file:edu.illinois.cs.cogcomp.transliteration.WikiTransliteration.java
/** * Helper function.//from w ww . j a v a 2 s . c o m * @param word1 * @param maxSubstringLength * @param probMap * @param probs * @param memoizationTable * @param pruneToSize * @return */ public static HashMap<String, Double> Predict2(String word1, int maxSubstringLength, Map<String, HashSet<String>> probMap, HashMap<Production, Double> probs, HashMap<String, HashMap<String, Double>> memoizationTable, int pruneToSize) { HashMap<String, Double> result; if (word1.length() == 0) { result = new HashMap<>(1); result.put("", 1.0); return result; } if (memoizationTable.containsKey(word1)) { return memoizationTable.get(word1); } result = new HashMap<>(); int maxSubstringLength1 = Math.min(word1.length(), maxSubstringLength); for (int i = 1; i <= maxSubstringLength1; i++) { String substring1 = word1.substring(0, i); if (probMap.containsKey(substring1)) { // recursion right here. HashMap<String, Double> appends = Predict2(word1.substring(i), maxSubstringLength, probMap, probs, memoizationTable, pruneToSize); //int segmentations = Segmentations( word1.Length - i ); for (String tgt : probMap.get(substring1)) { Production alignment = new Production(substring1, tgt); double alignmentProb = probs.get(alignment); for (String key : appends.keySet()) { Double value = appends.get(key); String word = alignment.getSecond() + key; //double combinedProb = (pair.Value/segmentations) * alignmentProb; double combinedProb = (value) * alignmentProb; // I hope this is an accurate translation... Dictionaries.IncrementOrSet(result, word, combinedProb, combinedProb); } } } } if (result.size() > pruneToSize) { Double[] valuesArray = result.values().toArray(new Double[result.values().size()]); String[] data = result.keySet().toArray(new String[result.size()]); //Array.Sort<Double, String> (valuesArray, data); TreeMap<Double, String> sorted = new TreeMap<>(); for (int i = 0; i < valuesArray.length; i++) { sorted.put(valuesArray[i], data[i]); } // FIXME: is this sorted in the correct order??? //double sum = 0; //for (int i = data.Length - pruneToSize; i < data.Length; i++) // sum += valuesArray[i]; result = new HashMap<>(pruneToSize); // for (int i = data.length - pruneToSize; i < data.length; i++) // result.put(data[i], valuesArray[i]); int i = 0; for (Double d : sorted.descendingKeySet()) { result.put(sorted.get(d), d); if (i++ > pruneToSize) { break; } } } memoizationTable.put(word1, result); return result; }
From source file:com.surevine.alfresco.user.AlfrescoUserManagerTest.java
/** * Test the// w ww . j av a2s .co m * {@link AlfrescoUserManager#setUserDashboard(String, com.surevine.alfresco.dashboard.DashboardDefinition)} * method with a successful call * * @throws JSONException * @throws AlfrescoException */ @Test public void testSetUserDashboardSuccess() throws AlfrescoException, JSONException { final String testTemplateId = "test_template_id"; final String testUsername = "test_username"; // Construct a mocked dashboard definition DashboardDefinition dashboardDef = Mockito.mock(DashboardDefinition.class); int[] rowCounts = { 2, 0, 4 }; Map<String, List<String>> dashlets = DashboardDefinitionTest.createTestDashlets(rowCounts); when(dashboardDef.getDashlets()).thenReturn(dashlets); when(dashboardDef.getTemplateId()).thenReturn(testTemplateId); JSONObject successJSON = new JSONObject(); successJSON.put("success", true); when(alfrescoConnector.doSharePost(eq(SET_USER_DASHBOARD_SERVICE), Matchers.any(JSONObject.class))) .thenReturn(successJSON); // This will capture the parameters sent to the http service ArgumentCaptor<JSONObject> jsonCaptor = ArgumentCaptor.forClass(JSONObject.class); // Do that actual test call alfrescoUserManager.setUserDashboard(testUsername, dashboardDef); // Verify various stuff verify(dashboardDef).getDashlets(); verify(alfrescoConnector).doSharePost(eq(SET_USER_DASHBOARD_SERVICE), jsonCaptor.capture()); JSONObject jsonResult = jsonCaptor.getValue(); assertEquals("Template ID is wrong", testTemplateId, jsonResult.getString("templateId")); assertEquals("dashboardPage", "user/" + testUsername + "/dashboard", jsonResult.getString("dashboardPage")); JSONArray resultDashlets = jsonResult.getJSONArray("dashlets"); // This will be a map of all the expected dashlet urls which should be in the post HashMap<String, String> expectedDashlets = new HashMap<String, String>(); for (Entry<String, List<String>> entry : dashlets.entrySet()) { int col = Integer.valueOf(entry.getKey()) + 1; for (int i = 0; i < entry.getValue().size(); ++i) { expectedDashlets.put("component-" + col + "-" + (i + 1), entry.getValue().get(i)); } } // We firstly check that all the resulting dashlets are contained in the original dashboard definition for (int i = 0; i < resultDashlets.length(); ++i) { JSONObject resultDashlet = resultDashlets.getJSONObject(i); String regionId = resultDashlet.getString("regionId"); String url = resultDashlet.getString("url"); assertTrue("Unexpected dashlet regionId: " + regionId, expectedDashlets.containsKey(regionId)); assertEquals("Wrong url for regionId: " + regionId, expectedDashlets.get(regionId), url); // We will remove it from the map, then at the end if there are any left over we know something has gone wrong! expectedDashlets.remove(regionId); } // If there are expected dashlets left then they were not all included in the post assertTrue("Dashlets were expected but not delivered", expectedDashlets.size() == 0); }
From source file:edu.ku.brc.specify.config.FixAttachments.java
/** * @param tableList/*from ww w . j ava 2 s. co m*/ * @param tableIdList * @param resultsHashMap * @param tblTypeHash * @param tableHash * @param totalFiles */ private void displayBadAttachments(final ArrayList<JTable> tableList, final ArrayList<Integer> tableIdList, final HashMap<Integer, Vector<Object[]>> resultsHashMap, final HashMap<Integer, String> tblTypeHash, final HashMap<Integer, AttchTableModel> tableHash, final int totalFiles) { CellConstraints cc = new CellConstraints(); int maxWidth = 200; int y = 1; String rowDef = tableList.size() == 1 ? "f:p:g" : UIHelper.createDuplicateJGoodiesDef("p", "10px", tableList.size()); PanelBuilder pb = new PanelBuilder(new FormLayout("f:p:g", rowDef)); if (tableList.size() > 1) { int i = 0; for (JTable table : tableList) { Integer tblId = tableIdList.get(i++); int numRows = table.getModel().getRowCount(); PanelBuilder pb2 = new PanelBuilder(new FormLayout("f:p:g", "p,2px,f:p:g")); if (resultsHashMap.size() > 1) { UIHelper.calcColumnWidths(table, numRows < 15 ? numRows + 1 : 15, maxWidth); } else { UIHelper.calcColumnWidths(table, 15, maxWidth); } pb2.addSeparator(tblTypeHash.get(tblId), cc.xy(1, 1)); pb2.add(UIHelper.createScrollPane(table), cc.xy(1, 3)); pb.add(pb2.getPanel(), cc.xy(1, y)); y += 2; } } else { UIHelper.calcColumnWidths(tableList.get(0), 15, maxWidth); pb.add(UIHelper.createScrollPane(tableList.get(0)), cc.xy(1, 1)); } tableList.clear(); pb.setDefaultDialogBorder(); JScrollPane panelSB = UIHelper.createScrollPane(pb.getPanel()); panelSB.setBorder(BorderFactory.createEmptyBorder()); Dimension dim = panelSB.getPreferredSize(); panelSB.setPreferredSize(new Dimension(dim.width + 10, 600)); final int totFiles = totalFiles; String title = String.format("Attachment Information - %d files to recover.", totalFiles); CustomDialog dlg = new CustomDialog((Dialog) null, title, true, CustomDialog.OKCANCELAPPLYHELP, panelSB) { @Override protected void helpButtonPressed() { File file = produceSummaryReport(resultsHashMap, tableHash, totFiles); try { AttachmentUtils.openURI(file.toURI()); } catch (Exception e) { } } @Override protected void applyButtonPressed() { boolean isOK = UIRegistry.displayConfirm("Clean up", "Are you sure you want to remove all references to the missing attachments?", "Remove", "Cancel", JOptionPane.WARNING_MESSAGE); if (isOK) { super.applyButtonPressed(); } } }; dlg.setCloseOnApplyClk(true); dlg.setCancelLabel("Skip"); dlg.setOkLabel("Recover Files"); dlg.setHelpLabel("Show Summary"); dlg.setApplyLabel("Delete References"); dlg.createUI(); dlg.pack(); dlg.setVisible(true); if (dlg.getBtnPressed() == CustomDialog.OK_BTN) { reattachFiles(resultsHashMap, tableHash, totalFiles); } else if (dlg.getBtnPressed() == CustomDialog.APPLY_BTN) { doAttachmentRefCleanup(resultsHashMap, tableHash, totFiles); } }
From source file:io.seldon.recommendation.RecentCategoryItemsRecommender.java
@Override public ItemRecommendationResultSet recommend(String client, Long user, Set<Integer> dimensions, int maxRecsCount, RecommendationContext ctxt, List<Long> recentItemInteractions) { HashMap<Long, Double> recommendations = new HashMap<>(); Set<Long> exclusions; if (ctxt.getMode() == RecommendationContext.MODE.INCLUSION) { logger.warn("Can't run RecentICategorytemsRecommender in inclusion context mode"); return new ItemRecommendationResultSet(name); } else {/*from www . j a v a 2 s .c o m*/ exclusions = ctxt.getContextItems(); } Integer dimId = getDimensionForAttrName(ctxt.getCurrentItem(), client, ctxt); if (dimId != null) { Collection<Long> recList = itemStorage.retrieveRecentlyAddedItemsTwoDimensions(client, maxRecsCount + exclusions.size(), dimensions, dimId).getItems(); if (recList.size() > 0) { double scoreIncr = 1.0 / (double) recList.size(); int count = 0; for (Long item : recList) { if (count >= maxRecsCount) break; else if (!exclusions.contains(item)) recommendations.put(item, 1.0 - (count++ * scoreIncr)); } List<ItemRecommendationResultSet.ItemRecommendationResult> results = new ArrayList<>(); for (Map.Entry<Long, Double> entry : recommendations.entrySet()) { results.add(new ItemRecommendationResultSet.ItemRecommendationResult(entry.getKey(), entry.getValue().floatValue())); } if (logger.isDebugEnabled()) logger.debug("Recent items algorithm returned " + recommendations.size() + " items"); return new ItemRecommendationResultSet(results, name); } else { logger.warn("No items returned for recent items of dimension " + StringUtils.join(dimensions, ",") + " for " + client); } } else logger.info("Can't get dimension for item " + ctxt.getCurrentItem()); return new ItemRecommendationResultSet(Collections.EMPTY_LIST, name); }
From source file:edu.utexas.cs.tactex.UtilityEstimatorTest.java
@Test public void testAddAndRemoveTariffEvaluation() { CustomerInfo customer1 = new CustomerInfo("Austin", 2); CustomerInfo customer2 = new CustomerInfo("Dallas", 4); TariffSpecification spec1 = new TariffSpecification(brokerContext.getBroker(), PowerType.CONSUMPTION); TariffSpecification spec2 = new TariffSpecification(brokerContext.getBroker(), PowerType.CONSUMPTION); ArrayRealVector energy1 = new ArrayRealVector(7 * 24, 6.0); ArrayRealVector energy2 = new ArrayRealVector(7 * 24, 8.0); // allocate data structures used for utility computation HashMap<CustomerInfo, HashMap<TariffSpecification, Double>> customer2estimatedTariffCharges = new HashMap<CustomerInfo, HashMap<TariffSpecification, Double>>(); HashMap<CustomerInfo, ArrayRealVector> customer2energy = new HashMap<CustomerInfo, ArrayRealVector>(); customer2estimatedTariffCharges.put(customer1, new HashMap<TariffSpecification, Double>()); customer2estimatedTariffCharges.put(customer2, new HashMap<TariffSpecification, Double>()); // populate data structures with data from above // tariff charges - customer2 has 2x consumption so we did // 2x (it's probably not necessary for testing purposes) // remember that charge is per single population member of a customer customer2estimatedTariffCharges.get(customer1).put(spec1, -10.0); customer2estimatedTariffCharges.get(customer1).put(spec2, -20.0); customer2estimatedTariffCharges.get(customer2).put(spec1, -30.0); customer2estimatedTariffCharges.get(customer2).put(spec2, -40.0); // energy consumption vector of customers customer2energy.put(customer1, energy1); customer2energy.put(customer2, energy2); // verify initial state assertEquals("number of customers in initial set", 2, customer2estimatedTariffCharges.size()); assertEquals("number of specs per customer1", 2, customer2estimatedTariffCharges.get(customer1).size()); assertEquals("number of specs per customer2", 2, customer2estimatedTariffCharges.get(customer2).size()); }
From source file:org.powertac.common.TariffEvaluatorTest.java
@Test public void revokeSuperseding() { TariffSpecification bobTS = new TariffSpecification(bob, PowerType.CONSUMPTION) .addRate(new Rate().withValue(-0.4)); Tariff bobTariff = new Tariff(bobTS); initTariff(bobTariff);/*from w w w.j a va 2 s .com*/ TariffSpecification jimTS = new TariffSpecification(jim, PowerType.CONSUMPTION) .withMinDuration(TimeService.DAY * 5).addRate(new Rate().withValue(-0.4)); Tariff jimTariff = new Tariff(jimTS); initTariff(jimTariff); double[] profile = { 1.0, 2.0 }; cma.capacityProfile = profile; cma.setChoiceSamples(0.45, 0.55); // distribute all customers across jim & bob subscribeTo(bobTariff, 5000); subscribeTo(jimTariff, 5000); // revoke Jim's tariff, supersede it jimTariff.setState(Tariff.State.KILLED); TariffSpecification jimSTS = new TariffSpecification(jim, PowerType.CONSUMPTION) .withMinDuration(TimeService.DAY * 5).addSupersedes(jimTariff.getId()) .addRate(new Rate().withValue(-0.4)); Tariff jimSuper = new Tariff(jimSTS); when(tariffRepo.findTariffById(jimTariff.getId())).thenReturn(jimTariff); initTariff(jimSuper); // capture calls to tariffMarket final HashMap<Tariff, Integer> calls = new HashMap<Tariff, Integer>(); doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) { Object[] args = invocation.getArguments(); assertEquals("correct customer", customer, args[1]); calls.put((Tariff) args[0], (Integer) args[2]); return null; } }).when(tariffMarket).subscribeToTariff(any(Tariff.class), any(CustomerInfo.class), anyInt()); ArrayList<Tariff> tariffs = new ArrayList<Tariff>(); tariffs.add(defaultConsumption); tariffs.add(bobTariff); tariffs.add(jimSuper); when(tariffRepo.findRecentActiveTariffs(anyInt(), any(PowerType.class))).thenReturn(tariffs); evaluator.evaluateTariffs(); assertEquals("three calls", 3, calls.size()); assertEquals("-5000 for jim", new Integer(-5000), calls.get(jimTariff)); assertEquals("+2500 for jimSuper", new Integer(2500), calls.get(jimSuper)); assertEquals("+2500 for bob", new Integer(2500), calls.get(bobTariff)); }
From source file:org.powertac.common.TariffEvaluatorTest.java
@Test public void revokeToKilledSuperseding() { TariffSpecification bobTS = new TariffSpecification(bob, PowerType.CONSUMPTION) .addRate(new Rate().withValue(-0.4)); Tariff bobTariff = new Tariff(bobTS); initTariff(bobTariff);/*from w ww .j av a2s .c o m*/ TariffSpecification jimTS = new TariffSpecification(jim, PowerType.CONSUMPTION) .withMinDuration(TimeService.DAY * 5).addRate(new Rate().withValue(-0.4)); Tariff jimTariff = new Tariff(jimTS); initTariff(jimTariff); double[] profile = { 1.0, 2.0 }; cma.capacityProfile = profile; cma.setChoiceSamples(0.45, 0.55); // distribute all customers across jim & bob subscribeTo(bobTariff, 5000); subscribeTo(jimTariff, 5000); // revoke Jim's tariff, supersede it jimTariff.setState(Tariff.State.KILLED); TariffSpecification jimSTS = new TariffSpecification(jim, PowerType.CONSUMPTION) .withMinDuration(TimeService.DAY * 5).addSupersedes(jimTariff.getId()) .addRate(new Rate().withValue(-0.4)); Tariff jimSuper = new Tariff(jimSTS); when(tariffRepo.findTariffById(jimTariff.getId())).thenReturn(jimTariff); initTariff(jimSuper); // Revoke the superseding tariff jimSuper.setState(Tariff.State.KILLED); // capture calls to tariffMarket final HashMap<Tariff, Integer> calls = new HashMap<Tariff, Integer>(); doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) { Object[] args = invocation.getArguments(); assertEquals("correct customer", customer, args[1]); calls.put((Tariff) args[0], (Integer) args[2]); return null; } }).when(tariffMarket).subscribeToTariff(any(Tariff.class), any(CustomerInfo.class), anyInt()); ArrayList<Tariff> tariffs = new ArrayList<Tariff>(); tariffs.add(defaultConsumption); tariffs.add(bobTariff); //tariffs.add(jimSuper); when(tariffRepo.findRecentActiveTariffs(anyInt(), any(PowerType.class))).thenReturn(tariffs); evaluator.evaluateTariffs(); assertEquals("two calls", 2, calls.size()); assertEquals("-5000 for jim", new Integer(-5000), calls.get(jimTariff)); assertEquals("+5000 for defaultConsumption", new Integer(5000), calls.get(bobTariff)); }
From source file:org.eurocarbdb.application.glycoworkbench.plugin.PeakListChartPanel.java
public void addIsotopeCurves(TreeMap<Peak, Collection<Annotation>> annotations) { if (theDocument.size() == 0) return;//from ww w . ja v a 2 s . com // remove old curves removeIsotopeCurves(); // add curves if (annotations != null) { // set renderer if (show_all_isotopes) { thePlot.setRenderer(1, new StandardXYItemRenderer(StandardXYItemRenderer.SHAPES)); thePlot.getRenderer(1).setShape(new Ellipse2D.Double(0, 0, 7, 7)); } else thePlot.setRenderer(1, new StandardXYItemRenderer(StandardXYItemRenderer.LINES)); MSUtils.IsotopeList isotope_list = new MSUtils.IsotopeList(show_all_isotopes); for (Map.Entry<Peak, Collection<Annotation>> pa : annotations.entrySet()) { Peak p = pa.getKey(); // get compositions HashSet<Molecule> compositions = new HashSet<Molecule>(); for (Annotation a : pa.getValue()) { try { compositions.add(a.getFragmentEntry().fragment.computeIon()); } catch (Exception e) { e.printStackTrace(); } } // collect curves for this peak HashMap<String, double[][]> all_curves = new HashMap<String, double[][]>(); for (Molecule m : compositions) { try { double[][] data = MSUtils.getIsotopesCurve(1, m, show_all_isotopes); // overlay the distribution with the existing list of isotopes isotope_list.adjust(data, p.getMZ(), p.getIntensity()); all_curves.put(m.toString(), data); } catch (Exception e) { LogUtils.report(e); } } // add average curve for this peak if (all_curves.size() > 1) { double[][] data = MSUtils.average(all_curves.values(), show_all_isotopes); // add the average to the chart String name = "average-" + p.getMZ(); theIsotopesDataset.addSeries(name, data); thePlot.getRenderer(1).setSeriesPaint(theIsotopesDataset.indexOf(name), Color.magenta); thePlot.getRenderer(1).setSeriesStroke(theIsotopesDataset.indexOf(name), new BasicStroke(2)); // add the average to the isotope list isotope_list.add(data, false); } else if (all_curves.size() == 1) { // add the only curve to the isotope list isotope_list.add(all_curves.values().iterator().next(), false); } // add the other curves for (Map.Entry<String, double[][]> e : all_curves.entrySet()) { String name = e.getKey() + "-" + p.getMZ(); theIsotopesDataset.addSeries(name, e.getValue()); thePlot.getRenderer(1).setSeriesPaint(theIsotopesDataset.indexOf(name), Color.blue); } } } updateIntensityAxis(); }