List of usage examples for java.util TreeMap get
public V get(Object key)
From source file:com.clust4j.algo.KMeans.java
/** * Reorder the labels in order of appearance using the * {@link LabelEncoder}. Also reorder the centroids to correspond * with new label order//from w w w. ja va2s.c o m */ @Override protected void reorderLabelsAndCentroids() { boolean wss_null = null == wss; /* * reorder labels... */ final LabelEncoder encoder = new LabelEncoder(labels).fit(); labels = encoder.getEncodedLabels(); // also reorder centroids... takes O(2K) passes TreeMap<Integer, double[]> tmpCentroids = new TreeMap<>(); double[] new_wss = new double[k]; /* * We have to be delicate about this--KMedoids stores * labels as indices pointing to which record is the medoid, * whereas KMeans uses 0 thru K. Thus we can simply index in * KMeans, but will get an IndexOOB exception in Kmedoids, so * we need to come up with a universal solution which might * look ugly at a glance, but is robust to both. */ int encoded; for (int i = 0; i < k; i++) { encoded = encoder.reverseEncodeOrNull(i); tmpCentroids.put(i, centroids.get(encoded)); new_wss[i] = wss_null ? Double.NaN : wss[encoded]; } for (int i = 0; i < k; i++) centroids.set(i, tmpCentroids.get(i)); // reset wss this.wss = new_wss; }
From source file:byps.test.TestRemoteStreams.java
private void internalTestRemoteStreamsManyStreams(int nbOfStreams) throws InterruptedException, IOException { Map<Integer, InputStream> streams = new TreeMap<Integer, InputStream>(); Map<Integer, byte[]> streamBytes = new TreeMap<Integer, byte[]>(); for (int i = 0; i < nbOfStreams; i++) { byte[] bytes = new byte[1]; ByteArrayInputStream bis = new ByteArrayInputStream(bytes); streamBytes.put(i, bytes);//from w ww .ja v a 2 s. c o m InputStream istrm = new BContentStreamWrapper(bis, "application/octet-stream", bytes.length); streams.put(i, istrm); } remote.setImages(streams, -1); TreeMap<Integer, InputStream> istrmsR = remote.getImages(); TestUtils.assertEquals(log, "streams", streams, istrmsR); // Does not // compare // streams. for (int i = 0; i < nbOfStreams; i++) { InputStream istrm = new ByteArrayInputStream(streamBytes.get(i)); InputStream istrmR = istrmsR.get(i); TestUtils.assertEquals(log, "stream-" + i, istrm, istrmR); istrmR.close(); } }
From source file:web.diva.server.unused.PCAGenerator.java
/** * * * @return dataset.// w ww. j a v a 2s .c o m */ private XYDataset createDataset(TreeMap<Integer, PCAPoint> points, int[] subSelectionData, int[] selection, boolean zoom, DivaDataset divaDataset) { final XYSeriesCollection dataset = new XYSeriesCollection(); seriesList = new TreeMap<String, XYSeries>(); seriesList.put("#000000", new XYSeries("#000000")); seriesList.put("unGrouped", new XYSeries("LIGHT_GRAY")); for (Group g : divaDataset.getRowGroups()) { if (g.isActive() && !g.getName().equalsIgnoreCase("all")) { seriesList.put(g.getHashColor(), new XYSeries(g.getHashColor())); } } if (!zoom && (selection == null || selection.length == 0) && subSelectionData == null) { for (int key : points.keySet()) { PCAPoint point = points.get(key); if (seriesList.containsKey(point.getColor())) { // seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); } else { seriesList.get("unGrouped").add(point.getX(), point.getY()); } } } else if (zoom) { selectionSet.clear(); for (int i : selection) { selectionSet.add(i); } for (int x : subSelectionData) { PCAPoint point = points.get(x); if (selectionSet.contains(point.getGeneIndex())) { if (seriesList.containsKey(point.getColor())) { // seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); } else { seriesList.get("#000000").add(point.getX(), point.getY()); } } else { seriesList.get("unGrouped").add(point.getX(), point.getY()); } } } else if (subSelectionData != null) { selectionSet.clear(); for (int i : selection) { selectionSet.add(i); } // for (int key : subSelectionData) { // PCAPoint point = points.get(key); // if (selectionSet.contains(point.getGeneIndex())) { // if (seriesList.containsKey(divaDataset.getGeneColorArr()[point.getGeneIndex()])) { // seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); // // } else { // // seriesList.get("#000000").add(point.getX(), point.getY()); // } // // } else { // // seriesList.get("unGrouped").add(point.getX(), point.getY()); // } // // } } else //selection without zoom { selectionSet.clear(); for (int i : selection) { selectionSet.add(i); } // for (int key : points.keySet()) { // PCAPoint point = points.get(key); // // if (selectionSet.contains(point.getGeneIndex())) { // if (seriesList.containsKey(divaDataset.getGeneColorArr()[point.getGeneIndex()])) { // seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); // // } else { // // seriesList.get("#000000").add(point.getX(), point.getY()); // } // // } else { // // seriesList.get("unGrouped").add(point.getX(), point.getY()); // } // // } } for (XYSeries ser : seriesList.values()) { dataset.addSeries(ser); } return dataset; }
From source file:com.alibaba.wasp.master.handler.TableEventHandler.java
public boolean reOpenAllEntityGroups(List<EntityGroupInfo> entityGroups) throws IOException { boolean done = false; LOG.info("Bucketing entityGroups by entityGroup server..."); TreeMap<ServerName, List<EntityGroupInfo>> serverToEntityGroups = Maps.newTreeMap(); NavigableMap<EntityGroupInfo, ServerName> egiHserverMapping = FMetaScanner .allTableEntityGroups(server.getConfiguration(), tableName, false); List<EntityGroupInfo> reEntityGroups = new ArrayList<EntityGroupInfo>(); for (EntityGroupInfo egi : entityGroups) { ServerName egLocation = egiHserverMapping.get(egi); // Skip the offlined split parent EntityGroup if (null == egLocation) { LOG.info("Skip " + egi); continue; }// w w w . j av a 2s.c om if (!serverToEntityGroups.containsKey(egLocation)) { LinkedList<EntityGroupInfo> egiList = Lists.newLinkedList(); serverToEntityGroups.put(egLocation, egiList); } reEntityGroups.add(egi); serverToEntityGroups.get(egLocation).add(egi); } LOG.info("Reopening " + reEntityGroups.size() + " entityGroups on " + serverToEntityGroups.size() + " fservers."); this.fMasterServices.getAssignmentManager().setEntityGroupsToReopen(reEntityGroups); BulkReOpen bulkReopen = new BulkReOpen(this.server, serverToEntityGroups, this.fMasterServices.getAssignmentManager()); while (true) { try { if (bulkReopen.bulkReOpen()) { done = true; break; } else { LOG.warn("Timeout before reopening all entityGroups"); } } catch (InterruptedException e) { LOG.warn("Reopen was interrupted"); // Preserve the interrupt. Thread.currentThread().interrupt(); break; } } return done; }
From source file:edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniquesReducer.java
private double[] computeCorrelationTechniques(ArrayList<TreeMap<Integer, Float>>[] timeSeries, int index1, int index2, boolean temporalPermutation) { double[] values = { 0.0, 0.0, 0.0 }; TreeMap<Integer, Float> map1 = timeSeries[index1].get(dataset1Key); TreeMap<Integer, Float> map2 = timeSeries[index2].get(dataset2Key); ArrayList<Double> array1 = new ArrayList<Double>(); ArrayList<Double> array2 = new ArrayList<Double>(); for (int temp : map1.keySet()) { if (map2.containsKey(temp)) { array1.add((double) map1.get(temp)); array2.add((double) map2.get(temp)); }//from w w w. j a v a 2 s .com } double[] completeTempArray1 = new double[map1.keySet().size()]; int index = 0; for (int temp : map1.keySet()) { completeTempArray1[index] = map1.get(temp); index++; } double[] completeTempArray2 = new double[map2.keySet().size()]; index = 0; for (int temp : map2.keySet()) { completeTempArray2[index] = map2.get(temp); index++; } map1.clear(); map2.clear(); if (array1.size() < 2) return null; // Pearson's Correlation double[] tempDoubleArray1 = new double[array1.size()]; double[] tempDoubleArray2 = new double[array2.size()]; int indexD1 = (temporalPermutation) ? new Random().nextInt(array1.size()) : 0; int indexD2 = (temporalPermutation) ? new Random().nextInt(array2.size()) : 0; for (int i = 0; i < array1.size(); i++) { int j = (indexD1 + i) % array1.size(); int k = (indexD2 + i) % array2.size(); tempDoubleArray1[i] = array1.get(j); tempDoubleArray2[i] = array2.get(k); } array1 = null; array2 = null; PearsonsCorrelation pearsonsCorr = new PearsonsCorrelation(); values[0] = pearsonsCorr.correlation(tempDoubleArray1, tempDoubleArray2); // Mutual Information try { values[1] = getMIScore(tempDoubleArray1, tempDoubleArray2); } catch (Exception e) { e.printStackTrace(); /*String data1 = ""; for (double d : tempDoubleArray1) data1 += d + ", "; String data2 = ""; for (double d : tempDoubleArray2) data2 += d + ", "; System.out.println(data1); System.out.println(data2);*/ System.exit(-1); } tempDoubleArray1 = null; tempDoubleArray2 = null; // DTW double[] completeTempDoubleArray1 = new double[completeTempArray1.length]; double[] completeTempDoubleArray2 = new double[completeTempArray2.length]; if (temporalPermutation) { indexD1 = new Random().nextInt(completeTempArray1.length); for (int i = 0; i < completeTempArray1.length; i++) { int j = (indexD1 + i) % completeTempArray1.length; completeTempDoubleArray1[i] = completeTempArray1[j]; } indexD2 = new Random().nextInt(completeTempArray2.length); for (int i = 0; i < completeTempArray2.length; i++) { int j = (indexD2 + i) % completeTempArray2.length; completeTempDoubleArray2[i] = completeTempArray2[j]; } } else { System.arraycopy(completeTempArray1, 0, completeTempDoubleArray1, 0, completeTempArray1.length); System.arraycopy(completeTempArray2, 0, completeTempDoubleArray2, 0, completeTempArray2.length); } completeTempArray1 = null; completeTempArray2 = null; completeTempDoubleArray1 = normalize(completeTempDoubleArray1); completeTempDoubleArray2 = normalize(completeTempDoubleArray2); values[2] = getDTWScore(completeTempDoubleArray1, completeTempDoubleArray2); return values; }
From source file:web.diva.server.model.PCAGenerator.java
/** * * * @return dataset.//from w ww . j a va 2s . c om */ private XYDataset createDataset(TreeMap<Integer, PCAPoint> points, int[] subSelectionData, int[] selection, boolean zoom, DivaDataset divaDataset) { final XYSeriesCollection dataset = new XYSeriesCollection(); seriesList = new TreeMap<String, XYSeries>(); seriesList.put("#000000", new XYSeries("#000000")); seriesList.put("unGrouped", new XYSeries("LIGHT_GRAY")); for (Group g : divaDataset.getRowGroups()) { if (g.isActive() && !g.getName().equalsIgnoreCase("all")) { seriesList.put(g.getHashColor(), new XYSeries(g.getHashColor())); } } if (!zoom && (selection == null || selection.length == 0) && subSelectionData == null) { for (int key : points.keySet()) { PCAPoint point = points.get(key); if (seriesList.containsKey(point.getColor())) { seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); } else { seriesList.get("unGrouped").add(point.getX(), point.getY()); } } } else if (zoom) { selectionSet.clear(); for (int i : selection) { selectionSet.add(i); } for (int x : subSelectionData) { PCAPoint point = points.get(x); if (selectionSet.contains(point.getGeneIndex())) { if (seriesList.containsKey(point.getColor())) { seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); } else { seriesList.get("#000000").add(point.getX(), point.getY()); } } else { seriesList.get("unGrouped").add(point.getX(), point.getY()); } } } else if (subSelectionData != null) { selectionSet.clear(); for (int i : selection) { selectionSet.add(i); } for (int key : subSelectionData) { PCAPoint point = points.get(key); if (selectionSet.contains(point.getGeneIndex())) { if (seriesList.containsKey(divaDataset.getGeneColorArr()[point.getGeneIndex()])) { seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); } else { seriesList.get("#000000").add(point.getX(), point.getY()); } } else { seriesList.get("unGrouped").add(point.getX(), point.getY()); } } } else //selection without zoom { selectionSet.clear(); for (int i : selection) { selectionSet.add(i); } for (int key : points.keySet()) { PCAPoint point = points.get(key); if (selectionSet.contains(point.getGeneIndex())) { if (seriesList.containsKey(divaDataset.getGeneColorArr()[point.getGeneIndex()])) { seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); } else { seriesList.get("#000000").add(point.getX(), point.getY()); } } else { seriesList.get("unGrouped").add(point.getX(), point.getY()); } } } for (XYSeries ser : seriesList.values()) { dataset.addSeries(ser); } return dataset; }
From source file:com.eucalyptus.tests.awssdk.S3ListMpuTests.java
@Test public void maxKeys() throws Exception { testInfo(this.getClass().getSimpleName() + " - maxKeys"); try {//w ww . j a v a 2 s . co m int numKeys = 3 + random.nextInt(3); // 3-5 keys int numUploads = 3 + random.nextInt(3); // 3-5 uploads int maxUploads = numUploads - 1; int totalUploads = numKeys * numUploads; int counter = (totalUploads % maxUploads == 0) ? (totalUploads / maxUploads) : ((totalUploads / maxUploads) + 1); print("Number of keys: " + numKeys); print("Number of uploads per key: " + numUploads); print("Number of mpus per listing: " + maxUploads); // Generate some mpus TreeMap<String, List<String>> keyUploadIdMap = initiateMpusForMultipleKeys(s3ClientA, accountA, numKeys, numUploads, new String()); Iterator<String> keyIterator = keyUploadIdMap.keySet().iterator(); String key = keyIterator.next(); Iterator<String> uploadIdIterator = keyUploadIdMap.get(key).iterator(); String uploadId = null; String nextKeyMarker = null; String nextUploadIdMarker = null; MultipartUploadListing listing = null; for (int i = 1; i <= counter; i++) { if (i != counter) { listing = listMpu(s3ClientA, accountA, bucketName, nextKeyMarker, nextUploadIdMarker, null, null, maxUploads, true); assertTrue( "Expected " + maxUploads + " mpu listings, but got " + listing.getMultipartUploads().size(), maxUploads == listing.getMultipartUploads().size()); } else { listing = listMpu(s3ClientA, accountA, bucketName, nextKeyMarker, nextUploadIdMarker, null, null, maxUploads, false); assertTrue( "Expected " + totalUploads + " mpu listings, but got " + listing.getMultipartUploads().size(), totalUploads == listing.getMultipartUploads().size()); } for (MultipartUpload mpu : listing.getMultipartUploads()) { if (!uploadIdIterator.hasNext()) { key = keyIterator.next(); uploadIdIterator = keyUploadIdMap.get(key).iterator(); } uploadId = uploadIdIterator.next(); assertTrue("Expected key to be " + key + ", but got " + mpu.getKey(), mpu.getKey().equals(key)); assertTrue("Expected upload ID to be " + uploadId + ", but got " + mpu.getUploadId(), mpu.getUploadId().equals(uploadId)); verifyCommonElements(mpu); totalUploads--; } nextKeyMarker = key; nextUploadIdMarker = uploadId; } } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run maxKeys"); } }
From source file:com.sfs.whichdoctor.analysis.RevenueAnalysisDAOImpl.java
/** * Stream analysis./* w ww . j av a 2s.co m*/ * * @param search the search * * @return the revenue analysis bean * * @throws WhichDoctorAnalysisDaoException the which doctor analysis dao * exception */ @SuppressWarnings("unchecked") public final RevenueAnalysisBean streamAnalysis(final RevenueAnalysisBean search) throws WhichDoctorAnalysisDaoException { /* Zero out values in revenueanalysis bean */ search.setValue(0); search.setNetValue(0); /* Set ordering system of returned results */ String sqlORDER = " ORDER BY RevenueType, receipt.ReceiptNo"; final StringBuffer sqlWHERE = new StringBuffer(); Collection<Object> parameters = new ArrayList<Object>(); if (search.getSQLWhereStatement() != null) { if (search.getSQLWhereStatement().compareTo("") != 0) { sqlWHERE.append(" AND "); sqlWHERE.append(search.getSQLWhereStatement()); } } if (search.getSearchParameters() != null) { parameters = search.getSearchParameters(); } // BUILD SQL Statement final StringBuffer searchSQL = new StringBuffer(); searchSQL.append(this.getSQL().getValue("revenue")); searchSQL.append(sqlWHERE.toString()); searchSQL.append(" GROUP BY payment.PaymentId, RevenueType "); searchSQL.append(sqlORDER); dataLogger.info("SQL Query: " + searchSQL.toString()); Collection<RevenueBean> results = new ArrayList<RevenueBean>(); try { results = this.getJdbcTemplateReader().query(searchSQL.toString(), parameters.toArray(), new RowMapper() { public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException { return loadStreamRevenue(rs); } }); } catch (IncorrectResultSizeDataAccessException ie) { // No results found for this search dataLogger.debug("No results found for search: " + ie.getMessage()); } TreeMap<Object, ArrayList<RevenueBean>> revenueTypeMap = new TreeMap<Object, ArrayList<RevenueBean>>(); for (RevenueBean revenue : results) { if (dataLogger.isDebugEnabled()) { dataLogger.debug("Net value: " + revenue.getNetValue()); dataLogger.debug("Value: " + revenue.getValue()); } ArrayList<RevenueBean> revenueList = new ArrayList<RevenueBean>(); if (revenueTypeMap.containsKey(revenue.getRevenueType())) { revenueList = revenueTypeMap.get(revenue.getRevenueType()); } revenueList.add(revenue); revenueTypeMap.put(revenue.getRevenueType(), revenueList); } final RevenueAnalysisBean summary = consolidateSummary(revenueTypeMap); search.setValue(summary.getValue()); search.setNetValue(summary.getNetValue()); search.setGSTValues(summary.getGSTValues()); search.setRevenue(summary.getRevenue()); return search; }
From source file:com.jsonstore.database.DatabaseSchema.java
public boolean equals(TreeMap<String, String> schema_compare) { if (schema_compare.size() != (this.nodes.size() + this.internalNodes.size())) { return false; }/*from w w w. j a v a 2 s . c o m*/ for (String key : schema_compare.keySet()) { String safeKey = JSONStoreUtil.getDatabaseSafeSearchFieldName(key); SearchFieldType type = null; if (this.safeNodes.containsKey(safeKey)) { type = this.nodes.get(safeKey); if (null == type) { type = this.safeNodes.get(safeKey); } } else { type = this.internalNodes.get(safeKey); } if ((type == null) || !type.getMappedType().equals(schema_compare.get(key))) { return false; } } return true; }
From source file:com.sfs.whichdoctor.analysis.GroupAnalysisDAOImpl.java
/** * Builds the order.//from w ww .j a va 2s. c o m * * @param referenceObjects the reference objects * @param groupClass the group class * * @return the tree map< string, integer> */ private TreeMap<String, Integer> buildOrder(final TreeMap<Integer, Object> referenceObjects, final String groupClass) { if (referenceObjects == null) { throw new NullPointerException("The reference objects map cannot be null"); } if (groupClass == null) { throw new NullPointerException("The group class string cannot be null"); } dataLogger.debug("Building order for " + groupClass + ", with a size of: " + referenceObjects.size()); TreeMap<String, Integer> orderMap = new TreeMap<String, Integer>(); for (Integer referenceGUID : referenceObjects.keySet()) { Object reference = referenceObjects.get(referenceGUID); String orderKey = GroupAnalysisBean.getOrderKey(reference, groupClass); orderMap.put(orderKey, referenceGUID); } return orderMap; }