List of usage examples for java.util TreeMap get
public V get(Object key)
From source file:com.itemanalysis.jmetrik.graph.nicc.NonparametricCurveAnalysis.java
/** * Called from done on EDT// ww w .j a va2 s .c o m * * @throws IllegalArgumentException */ private void publishAllSeries() throws IllegalArgumentException { double[] tcc = new double[gridPoints]; double[] values; double[] points = uniformDistributionApproximation.getPoints(); double tccMin = 0; double tccMax = 0; for (VariableAttributes v : categoryRegression.keySet()) { KernelRegressionCategories kCategories; XYSeries series; //add lines to this collection repeat for each item XYSeriesCollection xyCollection = new XYSeriesCollection(); //increment TCC for focal group, also create expected value series series = new XYSeries(""); kCategories = categoryRegression.get(v); tccMin += kCategories.getMinimumPossibleScore(); tccMax += kCategories.getMaximumPossibleScore(); values = kCategories.getExpectedValues(); for (int i = 0; i < tcc.length; i++) { tcc[i] += values[i]; series.add(points[i], values[i]); } XYSeries catSeries; //add line for every category TreeMap<Object, KernelRegression> kregMap = kCategories.getRegressionMap(); for (Object o : kregMap.keySet()) { catSeries = new XYSeries(o.toString() + "(" + kCategories.getScoreValue(o) + ")"); values = kregMap.get(o).value(); for (int i = 0; i < points.length; i++) catSeries.add(points[i], values[i]); xyCollection.addSeries(catSeries); } if (allCategories) { nonparametricPanel.updateDatasetFor(v.getName().toString(), 0, 1, xyCollection); } else { nonparametricPanel.updateDatasetFor(v.getName().toString(), kCategories.getMinimumPossibleScore(), kCategories.getMaximumPossibleScore(), xyCollection); } } //end loop over items //add series for test characteristic curve XYSeriesCollection xyCollection = new XYSeriesCollection(); XYSeries tccSeries1 = new XYSeries("TCC"); for (int i = 0; i < tcc.length; i++) { tccSeries1.add(points[i], tcc[i]); } xyCollection.addSeries(tccSeries1); nonparametricPanel.updateDatasetFor("tcc", tccMin, tccMax, xyCollection); }
From source file:com.sfs.dao.GadgetPreferencesDAOImpl.java
/** * Converts the key/value TreeMap into a string for persisting to the * data-store./* w w w. j a va2s .c o m*/ * * @param keyValues the key values * * @return the string */ private String writeGadgetPreferences(final TreeMap<String, String> keyValues) { StringBuffer userPrefs = new StringBuffer(); if (keyValues != null) { for (String key : keyValues.keySet()) { String value = keyValues.get(key); // Add key and value to StringBuffer userPrefs.append(key); userPrefs.append("="); userPrefs.append(value); userPrefs.append("\n"); } } return userPrefs.toString(); }
From source file:org.apache.camel.dataformat.bindy.BindyCsvFactory.java
public String unbind(Map<String, Object> model) throws Exception { StringBuilder buffer = new StringBuilder(); results = new HashMap<Integer, List>(); // Check if separator exists ObjectHelper.notNull(this.separator, "The separator has not been instantiated or property not defined in the @CsvRecord annotation"); char separator = Converter.getCharDelimitor(this.getSeparator()); if (LOG.isDebugEnabled()) { LOG.debug("Separator converted : '0x" + Integer.toHexString(separator) + "', from : " + this.getSeparator()); }//w w w.j a va 2 s .c om for (Class clazz : models) { if (model.containsKey(clazz.getName())) { Object obj = model.get(clazz.getName()); if (LOG.isDebugEnabled()) { LOG.debug("Model object : " + obj + ", class : " + obj.getClass().getName()); } if (obj != null) { // Generate Csv table generateCsvPositionMap(clazz, obj); } } } // Transpose result List<List> l = new ArrayList<List>(); if (isOneToMany) { l = product(results); } else { // Convert Map<Integer, List> into List<List> TreeMap<Integer, List> sortValues = new TreeMap<Integer, List>(results); List<String> temp = new ArrayList<String>(); for (Integer key : sortValues.keySet()) { // Get list of values List<String> val = sortValues.get(key); // For one to one relation // There is only one item in the list String value = (String) val.get(0); // Add the value to the temp array if (value != null) { temp.add(value); } else { temp.add(""); } } l.add(temp); } if (l != null) { Iterator it = l.iterator(); while (it.hasNext()) { List<String> tokens = (ArrayList<String>) it.next(); Iterator itx = tokens.iterator(); while (itx.hasNext()) { String res = (String) itx.next(); if (res != null) { buffer.append(res); } else { buffer.append(""); } if (itx.hasNext()) { buffer.append(separator); } } if (it.hasNext()) { buffer.append(Converter.getStringCarriageReturn(getCarriageReturn())); } } } return buffer.toString(); }
From source file:com.sfs.whichdoctor.analysis.RevenueAnalysisDAOImpl.java
/** * Consolidate summary./*from w ww . j a v a2s .com*/ * * @param revenueMap the revenue map * * @return the tree map< revenue bean, collection< revenue bean>> */ private RevenueAnalysisBean consolidateSummary(final TreeMap<Object, ArrayList<RevenueBean>> revenueMap) { final RevenueAnalysisBean result = new RevenueAnalysisBean(); final Collection<RevenueBean> summary = new ArrayList<RevenueBean>(); for (Object key : revenueMap.keySet()) { RevenueBean summaryRevenue = new RevenueBean(); for (RevenueBean revenue : revenueMap.get(key)) { summaryRevenue.setBatchReference(revenue.getBatchReference()); summaryRevenue.setBatchNo(revenue.getBatchNo()); summaryRevenue.setRevenueType(revenue.getRevenueType()); summaryRevenue.setRevenueClass(revenue.getRevenueClass()); final double summaryValue = summaryRevenue.getValue(); final double summaryNetValue = summaryRevenue.getNetValue(); /* Update the summary revenue totals for this batch */ summaryRevenue.setValue(summaryValue + revenue.getValue()); summaryRevenue.setNetValue(summaryNetValue + revenue.getNetValue()); for (Double gstRate : revenue.getGSTValues().keySet()) { final double gstValue = revenue.getGSTValues().get(gstRate); double gstSubtotal = 0; if (summaryRevenue.getGSTValues().containsKey(gstRate)) { gstSubtotal = summaryRevenue.getGSTValues().get(gstRate); } summaryRevenue.setGSTValue(gstRate, gstSubtotal + gstValue); } /* Add receipts/payments to this revenue batch */ TreeMap<Integer, ReceiptBean> receipts = summaryRevenue.getReceipts(); if (receipts == null) { receipts = new TreeMap<Integer, ReceiptBean>(); } for (Integer receiptId : revenue.getReceipts().keySet()) { ReceiptBean receipt = revenue.getReceipts().get(receiptId); if (receipts.containsKey(receiptId)) { ReceiptBean summaryReceipt = receipts.get(receiptId); ArrayList<PaymentBean> payments = (ArrayList<PaymentBean>) summaryReceipt.getPayments(); if (payments == null) { payments = new ArrayList<PaymentBean>(); } if (receipt.getPayments() != null) { for (PaymentBean payment : receipt.getPayments()) { payments.add(payment); } } summaryReceipt.setPayments(payments); receipts.put(receiptId, summaryReceipt); } else { receipts.put(receiptId, receipt); } } } if (dataLogger.isDebugEnabled()) { dataLogger.debug("Summary value: " + summaryRevenue.getValue()); dataLogger.debug("Summary net value: " + summaryRevenue.getNetValue()); } summary.add(summaryRevenue); } // Calculate the totals for the revenue analysis for (RevenueBean summaryRevenue : summary) { /* Update the overall running revenue totals */ result.setValue(result.getValue() + summaryRevenue.getValue()); result.setNetValue(result.getNetValue() + summaryRevenue.getNetValue()); /* Update the GST totals */ for (double gstRate : summaryRevenue.getGSTValues().keySet()) { final double gstValue = summaryRevenue.getGSTValues().get(gstRate); double currentGSTValue = 0; if (result.getGSTValues().containsKey(gstRate)) { currentGSTValue = result.getGSTValues().get(gstRate); } result.setGSTValue(gstRate, currentGSTValue + gstValue); } } // Ensure the revenue beans have the same GST fields result.setRevenue(processGSTRates(summary)); if (dataLogger.isDebugEnabled()) { dataLogger.debug("Total calculated value: " + result.getValue()); dataLogger.debug("Total calculated net value: " + result.getNetValue()); } return result; }
From source file:com.datatorrent.contrib.hdht.PurgeTest.java
/** * Purge data from start, middle and end of the file. *//*w ww . j av a 2 s .c om*/ @Test public void purgeDataFromMiddleOfFile() throws IOException { File file = new File(testInfo.getDir()); FileUtils.deleteDirectory(file); FileAccessFSImpl fa = new MockFileAccess(); fa.setBasePath(file.getAbsolutePath()); HDHTWriter hds = new HDHTWriter(); hds.setFileStore(fa); hds.setFlushSize(0); // flush after every key hds.setFlushIntervalCount(0); hds.setup(new OperatorContextTestHelper.TestIdOperatorContext(0, new DefaultAttributeMap())); hds.writeExecutor = MoreExecutors.sameThreadExecutor(); // synchronous flush hds.beginWindow(1); for (int i = 100; i < 1000; i++) { hds.put(1, newSlice(i), newData(i)); } hds.endWindow(); hds.checkpointed(1); hds.committed(1); hds.beginWindow(2); hds.purge(1, newSlice(150), newSlice(250)); hds.purge(1, newSlice(200), newSlice(400)); hds.purge(1, newSlice(450), newSlice(700)); hds.purge(1, newSlice(950), newSlice(1500)); hds.endWindow(); hds.checkpointed(2); hds.committed(2); HDHTReader.BucketFileMeta fmeta = hds.loadBucketMeta(1).files.firstEntry().getValue(); TreeMap<Slice, Slice> data = getData(fa, 1, fmeta.name); int startKey = sliceToInt(data.firstKey()); Assert.assertEquals("The start key in new file", 100, startKey); int endKey = sliceToInt(data.lastKey()); Assert.assertArrayEquals("Key 149 is present in file ", newData(149), data.get(newSlice(149)).toByteArray()); Assert.assertEquals("Key 150 is removed from file ", null, data.get(newSlice(150))); Assert.assertEquals("Key 160 is removed from file ", null, data.get(newSlice(160))); Assert.assertEquals("Key 220 is removed from file ", null, data.get(newSlice(220))); Assert.assertEquals("Key 400 is removed from file ", null, data.get(newSlice(400))); Assert.assertArrayEquals("Key 401 is present in file ", newData(401), data.get(newSlice(401)).toByteArray()); Assert.assertArrayEquals("Key 449 is present in file ", newData(449), data.get(newSlice(449)).toByteArray()); Assert.assertEquals("Key 450 is removed from file ", null, data.get(newSlice(450))); Assert.assertEquals("Key 500 is removed from file ", null, data.get(newSlice(500))); Assert.assertEquals("Key 700 is removed from file ", null, data.get(newSlice(700))); Assert.assertArrayEquals("Key 701 is present in file ", newData(701), data.get(newSlice(701)).toByteArray()); Assert.assertArrayEquals("Key 949 is present in file ", newData(949), data.get(newSlice(949)).toByteArray()); Assert.assertEquals("Key 950 is removed from file ", null, data.get(newSlice(950))); Assert.assertEquals("Key 999 is removed from file ", null, data.get(newSlice(999))); Assert.assertEquals("The end key in new file", 949, endKey); }
From source file:com.cloudfoundry.bae.cloudpush.Channel.java
/** * genSign//from w w w . j ava2 s . c o m * * ? * * ?method, url, ? ??? */ private String genSign(String method, String url, Map<String, String> opt) { String gather = method + url; TreeMap<String, String> sortOpt = new TreeMap<String, String>(opt); NavigableSet<String> keySet = sortOpt.navigableKeySet(); Iterator<String> it = keySet.iterator(); while (it.hasNext()) { String key = it.next(); String value = sortOpt.get(key); gather += key + "=" + value; } gather += secretKey; logger.info("sign source content: " + gather); String encodedGather; try { // encodedGather = new URLCodec("utf8").encode(gather); encodedGather = URLEncoder.encode(gather, "utf8"); } catch (UnsupportedEncodingException ex) { throw new ChannelException("wrong params are seted: " + gather, CHANNEL_SDK_PARAM); } String sign = DigestUtils.md5Hex(encodedGather); return sign; }
From source file:model.plate.ANATestResult.java
public boolean initPosCtrl2(double negControl) { final Comparator<DiagnosisConstant.ANA_Titer> titerComparator = new Comparator<DiagnosisConstant.ANA_Titer>() { @Override/*from ww w .ja v a 2s .c o m*/ public int compare(DiagnosisConstant.ANA_Titer t, DiagnosisConstant.ANA_Titer t1) { if (t.getId() < 0) { throw new RuntimeException("Titer: " + t.name()); } if (t1.getId() < 0) { throw new RuntimeException("Titer: " + t.name()); } if (t.getId() > 6) { throw new RuntimeException("Titer: " + t.name()); } if (t1.getId() > 6) { throw new RuntimeException("Titer: " + t.name()); } return t.getId() < t1.getId() ? -1 : t.getId() == t1.getId() ? 0 : 1; } }; TreeMap<DiagnosisConstant.ANA_Titer, Double> decreasingSignals = new TreeMap<>(titerComparator); decreasingSignals.putAll(signals); SimpleRegression regression = new SimpleRegression(); Iterator<DiagnosisConstant.ANA_Titer> it = decreasingSignals.keySet().iterator(); DiagnosisConstant.ANA_Titer t; double signal, posCtrl = getFirstPlateSignal(); while (it.hasNext()) { t = it.next(); signal = decreasingSignals.get(t); // posCtrl=signal>posCtrl?signal:posCtrl; ??1:40, regression.addData((double) t.getId(), signal); if (signal > posCtrl * PlateConstants.PositiveCutOffRatio || signal > negControl * PlateConstants.NegativeCutOffRatio) { titer = t; } } if (titer.getId() >= DiagnosisConstant.ANA_Titer.ANA_1_320.getId()) { positivity = DiagnosisConstant.ANA_Result.POSITIVE; System.out.println("found titer for " + plateID + " : " + titer); System.out.println(); System.out.println(); } r2 = regression.getRSquare(); if (r2 < PlateConstants.R2_TH) { warningMessage.add(WarningMessage.PositiveControlLinearity.getId()); } if (titer == null || titer.getId() < DiagnosisConstant.ANA_Titer.ANA_1_320.getId()) {//1:40 titer = DiagnosisConstant.ANA_Titer.ANA_LESS_1_40; System.out.println(); for (DiagnosisConstant.ANA_Titer t1 : decreasingSignals.keySet()) { System.out.println(plateID + " Control Sample Compare"); System.out.println(t1 + ": posCtrl=" + decreasingSignals.get(t1) + "\tv.s.\tnegCtrl=" + negControl + " (" + decreasingSignals.get(t1) / negControl + ")"); } System.out.println(); positivity = DiagnosisConstant.ANA_Result.NEGATIVE; System.out.println("barcode " + this.julien_barcode); warningMessage.add(WarningMessage.PositiveNegativeControlComparison.getId()); } else { positivity = DiagnosisConstant.ANA_Result.POSITIVE; } if (posCtrl < negControl * PlateConstants.CTRL_RATIO_TH) { this.warningMessage.add(WarningMessage.PosCtrlFailed.getId()); return false; } return true; }
From source file:net.spfbl.core.Analise.java
protected static void dumpClusterTLD(StringBuilder builder) { TreeMap<String, Short[]> map = getClusterMap(); for (String token : map.keySet()) { Short[] dist = map.get(token); int spam = dist[1]; if (spam > 512) { int ham = dist[0]; float total = ham + spam; float reputation = spam / total; if (reputation > CLUSTER_RED) { if (Domain.isOfficialTLD(token)) { if (!Block.contains(token)) { builder.append(token); builder.append(' '); builder.append(ham); builder.append(' '); builder.append(spam); builder.append('\n'); }//from w w w.j a va2s. co m } } } } }
From source file:net.spfbl.core.Analise.java
protected static void dumpClusterCPF(StringBuilder builder) { TreeMap<String, Short[]> map = getClusterMap(); for (String token : map.keySet()) { Short[] dist = map.get(token); int spam = dist[1]; if (spam > 512) { int ham = dist[0]; float total = ham + spam; float reputation = spam / total; if (reputation > CLUSTER_RED) { if (Owner.isOwnerCPF(token)) { if (!Block.contains(token)) { builder.append(token); builder.append(' '); builder.append(ham); builder.append(' '); builder.append(spam); builder.append('\n'); }/*from w w w. j a v a 2s . c om*/ } } } } }
From source file:net.spfbl.core.Analise.java
protected static void dumpClusterCNPJ(StringBuilder builder) { TreeMap<String, Short[]> map = getClusterMap(); for (String token : map.keySet()) { Short[] dist = map.get(token); int spam = dist[1]; if (spam > 512) { int ham = dist[0]; float total = ham + spam; float reputation = spam / total; if (reputation > CLUSTER_RED) { if (Owner.isOwnerCNPJ(token)) { if (!Block.contains(token)) { builder.append(token); builder.append(' '); builder.append(ham); builder.append(' '); builder.append(spam); builder.append('\n'); }//from w w w . ja v a 2 s . c om } } } } }