List of usage examples for java.util TreeMap entrySet
EntrySet entrySet
To view the source code for java.util TreeMap entrySet.
Click Source Link
From source file:decision_tree_learning.Matrix.java
double mostCommonValue(int col) { TreeMap<Double, Integer> tm = new TreeMap<Double, Integer>(); for (int i = 0; i < rows(); i++) { double v = get(i, col); if (v != MISSING) { Integer count = tm.get(v); if (count == null) tm.put(v, new Integer(1)); else/*ww w.j a va2 s.c o m*/ tm.put(v, new Integer(count.intValue() + 1)); } } int maxCount = 0; double val = MISSING; Iterator<Entry<Double, Integer>> it = tm.entrySet().iterator(); while (it.hasNext()) { Entry<Double, Integer> e = it.next(); if (e.getValue() > maxCount) { maxCount = e.getValue(); val = e.getKey(); } } return val; }
From source file:com.espertech.esper.rowregex.EventRowRegexNFAView.java
private void removeSkippedEndStates(TreeMap<Integer, Object> endStatesPerEndEvent, int skipPastRow) { for (Map.Entry<Integer, Object> entry : endStatesPerEndEvent.entrySet()) { Object value = entry.getValue(); if (value instanceof List) { List<RegexNFAStateEntry> endStatesUnranked = (List<RegexNFAStateEntry>) value; Iterator<RegexNFAStateEntry> it = endStatesUnranked.iterator(); for (; it.hasNext();) { RegexNFAStateEntry endState = it.next(); if (endState.getMatchBeginEventSeqNo() <= skipPastRow) { it.remove();//ww w.ja va 2 s .c o m } } } else { RegexNFAStateEntry endState = (RegexNFAStateEntry) value; if (endState.getMatchBeginEventSeqNo() <= skipPastRow) { endStatesPerEndEvent.put(entry.getKey(), null); } } } }
From source file:com.eucalyptus.tests.awssdk.S3ListMpuTests.java
@Test public void prefix() throws Exception { testInfo(this.getClass().getSimpleName() + " - prefix"); try {//from w w w. java 2 s . com int numKeys = 3 + random.nextInt(3); // 3-5 keys int numUploads = 3 + random.nextInt(3); // 3-5 uploads print("Number of keys: " + numKeys); print("Number of uploads per key: " + numUploads); // Generate some mpus TreeMap<String, List<String>> keyUploadIdMap = initiateMpusForMultipleKeys(s3ClientA, accountA, numKeys, numUploads, new String()); // Using each key as the prefix in the listing, verify the listing for that specific key for (Entry<String, List<String>> mapEntry : keyUploadIdMap.entrySet()) { MultipartUploadListing listing = listMpu(s3ClientA, accountA, bucketName, null, null, mapEntry.getKey(), null, null, false); assertTrue( "Expected " + numUploads + " mpu listings, but got " + listing.getMultipartUploads().size(), numUploads == listing.getMultipartUploads().size()); for (int i = 0; i < numUploads; i++) { MultipartUpload mpu = listing.getMultipartUploads().get(i); assertTrue("Expected key to be " + mapEntry.getKey() + ", but got " + mpu.getKey(), mpu.getKey().equals(mapEntry.getKey())); assertTrue("Expected upload ID to be " + mapEntry.getValue().get(i) + ", but got " + mpu.getUploadId(), mpu.getUploadId().equals(mapEntry.getValue().get(i))); verifyCommonElements(mpu); } } // Verify the entire mpu listing MultipartUploadListing listing = listMpu(s3ClientA, accountA, bucketName, null, null, null, null, null, false); assertTrue( "Expected " + (numKeys * numUploads) + " mpu listings, but got " + listing.getMultipartUploads().size(), (numKeys * numUploads) == listing.getMultipartUploads().size()); Iterator<MultipartUpload> mpuIterator = listing.getMultipartUploads().iterator(); for (Entry<String, List<String>> mapEntry : keyUploadIdMap.entrySet()) { for (String uploadId : mapEntry.getValue()) { MultipartUpload mpu = mpuIterator.next(); assertTrue("Expected key to be " + mapEntry.getKey() + ", but got " + mpu.getKey(), mpu.getKey().equals(mapEntry.getKey())); assertTrue("Expected upload ID to be " + uploadId + ", but got " + mpu.getUploadId(), mpu.getUploadId().equals(uploadId)); verifyCommonElements(mpu); } } assertTrue("Expected mpu iterator to be empty", !mpuIterator.hasNext()); } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run prefix"); } }
From source file:org.sonatype.nexus.rest.AbstractResourceStoreContentPlexusResource.java
protected ContentListDescribeResponseResource describeResponse(Context context, Request req, Response res, Variant variant, ResourceStoreRequest request, StorageItem item, Throwable e) { ContentListDescribeResponseResource result = new ContentListDescribeResponseResource(); result.getProcessedRepositoriesList().addAll(request.getProcessedRepositories()); // applied mappings for (Map.Entry<String, List<String>> mappingEntry : request.getAppliedMappings().entrySet()) { result.addAppliedMapping(mappingEntry.getKey() + " repository applied " + mappingEntry.getValue()); }// w ww.jav a 2 s . co m if (item == null) { result.setResponseType("NOT_FOUND"); if (e != null) { result.addNotFoundReasoning(buildNotFoundReasoning(null, e)); } return result; } if (item instanceof StorageFileItem) { result.setResponseType("FILE"); } else if (item instanceof StorageCollectionItem) { result.setResponseType("COLL"); } else if (item instanceof StorageLinkItem) { result.setResponseType("LINK"); } else { result.setResponseType(item.getClass().getName()); } result.setResponseActualClass(item.getClass().getName()); result.setResponsePath(item.getPath()); if (!item.isVirtual()) { result.setResponseUid(item.getRepositoryItemUid().toString()); result.setOriginatingRepositoryId(item.getRepositoryItemUid().getRepository().getId()); result.setOriginatingRepositoryName(item.getRepositoryItemUid().getRepository().getName()); result.setOriginatingRepositoryMainFacet( item.getRepositoryItemUid().getRepository().getRepositoryKind().getMainFacet().getName()); } else { result.setResponseUid("virtual"); } // properties result.addProperty("created=" + item.getCreated()); result.addProperty("modified=" + item.getModified()); result.addProperty("lastRequested=" + item.getLastRequested()); result.addProperty("remoteChecked=" + item.getRemoteChecked()); result.addProperty("remoteUrl=" + item.getRemoteUrl()); result.addProperty("storedLocally=" + item.getStoredLocally()); result.addProperty("isExpired=" + item.isExpired()); result.addProperty("readable=" + item.isReadable()); result.addProperty("writable=" + item.isWritable()); result.addProperty("virtual=" + item.isVirtual()); // attributes final TreeMap<String, String> sortedAttributes = Maps.newTreeMap(); sortedAttributes.putAll(item.getRepositoryItemAttributes().asMap()); for (Map.Entry<String, String> entry : sortedAttributes.entrySet()) { result.addAttribute(entry.toString()); } // sources if (item instanceof StorageCompositeItem) { StorageCompositeItem composite = (StorageCompositeItem) item; for (StorageItem source : composite.getSources()) { if (!source.isVirtual()) { result.addSource(source.getRepositoryItemUid().toString()); } else { result.addSource(source.getPath()); } } } return result; }
From source file:com.hp.mqm.atrf.core.configuration.FetchConfiguration.java
public void logProperties() { //put in TreeMap for sorting TreeMap<String, String> props = new TreeMap<>(this.properties); props.remove(ALM_PASSWORD_PARAM);//from w w w.ja v a2 s . com props.remove(OCTANE_PASSWORD_PARAM); if (Integer.toString(SYNC_BULK_SIZE_DEFAULT).equals(getSyncBulkSize())) { props.remove(SYNC_BULK_SIZE_PARAM); } if (Integer.toString(SYNC_SLEEP_BETWEEN_POSTS_DEFAULT).equals(getSyncSleepBetweenPosts())) { props.remove(SYNC_SLEEP_BETWEEN_POSTS_PARAM); } if (Integer.toString(ALM_RUN_FILTER_FETCH_LIMIT_DEFAULT).equals(getRunFilterFetchLimit())) { props.remove(ALM_RUN_FILTER_FETCH_LIMIT_PARAM); } logger.info("Loaded configuration : " + (props.entrySet().toString())); }
From source file:com.eucalyptus.tests.awssdk.S3ListMpuTests.java
@Test public void keyMarkerUploadIdMarker() throws Exception { testInfo(this.getClass().getSimpleName() + " - keyMarkerUploadIdMarker"); try {// w w w . j a v a 2 s . c o m int numKeys = 3 + random.nextInt(3); // 3-5 keys int numUploads = 3 + random.nextInt(3); // 3-5 uploads print("Number of keys: " + numKeys); print("Number of uploads per key: " + numUploads); // Generate some mpus TreeMap<String, List<String>> keyUploadIdMap = initiateMpusForMultipleKeys(s3ClientA, accountA, numKeys, numUploads, new String()); // Starting with every key and upload ID in the ascending order, list the mpus using the pair and verify that the results. for (Map.Entry<String, List<String>> mapEntry : keyUploadIdMap.entrySet()) { // Compute what the sorted mpus should look like NavigableMap<String, List<String>> tailMap = keyUploadIdMap.tailMap(mapEntry.getKey(), false); for (int i = 0; i < numUploads; i++) { // Compute what the sorted uploadIds should look like this key List<String> tailList = mapEntry.getValue().subList(i + 1, numUploads); // List mpus using the key marker and upload ID marker and verify MultipartUploadListing listing = listMpu(s3ClientA, accountA, bucketName, mapEntry.getKey(), mapEntry.getValue().get(i), null, null, null, false); assertTrue( "Expected " + ((tailMap.size() * numUploads) + (numUploads - i - 1)) + " mpu listings, but got " + listing.getMultipartUploads().size(), ((tailMap.size() * numUploads) + (numUploads - i - 1)) == listing.getMultipartUploads() .size()); Iterator<MultipartUpload> mpuIterator = listing.getMultipartUploads().iterator(); for (String uploadId : tailList) { MultipartUpload mpu = mpuIterator.next(); assertTrue("Expected key to be " + mapEntry.getKey() + ", but got " + mpu.getKey(), mpu.getKey().equals(mapEntry.getKey())); assertTrue("Expected upload ID to be " + uploadId + ", but got " + mpu.getUploadId(), mpu.getUploadId().equals(uploadId)); verifyCommonElements(mpu); } for (Entry<String, List<String>> tailMapEntry : tailMap.entrySet()) { for (String uploadId : tailMapEntry.getValue()) { MultipartUpload mpu = mpuIterator.next(); assertTrue("Expected key to be " + tailMapEntry.getKey() + ", but got " + mpu.getKey(), mpu.getKey().equals(tailMapEntry.getKey())); assertTrue("Expected upload ID to be " + uploadId + ", but got " + mpu.getUploadId(), mpu.getUploadId().equals(uploadId)); verifyCommonElements(mpu); } } assertTrue("Expected mpu iterator to be empty", !mpuIterator.hasNext()); } } } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run keyMarkerUploadIdMarker"); } }
From source file:org.apache.hadoop.streaming.MapStreamJob.java
/** * Prints out the jobconf properties on stdout when verbose is specified. *///from w w w. j a va 2 s . c o m protected void listJobConfProperties() { msg("==== JobConf properties:"); Iterator it = jobConf_.iterator(); TreeMap sorted = new TreeMap(); while (it.hasNext()) { Map.Entry en = (Map.Entry) it.next(); sorted.put(en.getKey(), en.getValue()); } it = sorted.entrySet().iterator(); while (it.hasNext()) { Map.Entry en = (Map.Entry) it.next(); msg(en.getKey() + "=" + en.getValue()); } msg("===="); }
From source file:org.apache.hadoop.hive.common.ndv.hll.HyperLogLog.java
/** * Estimate bias from lookup table//from w ww .j a v a 2 s . c om * @param count * - cardinality before bias correction * @return cardinality after bias correction */ private long estimateBias(long count) { double[] rawEstForP = HLLConstants.rawEstimateData[p - 4]; // compute distance and store it in sorted map TreeMap<Double, Integer> estIndexMap = new TreeMap<>(); double distance = 0; for (int i = 0; i < rawEstForP.length; i++) { distance = Math.pow(count - rawEstForP[i], 2); estIndexMap.put(distance, i); } // take top-k closest neighbors and compute the bias corrected cardinality long result = 0; double[] biasForP = HLLConstants.biasData[p - 4]; double biasSum = 0; int kNeighbors = HLLConstants.K_NEAREST_NEIGHBOR; for (Map.Entry<Double, Integer> entry : estIndexMap.entrySet()) { biasSum += biasForP[entry.getValue()]; kNeighbors--; if (kNeighbors <= 0) { break; } } // 0.5 added for rounding off result = (long) ((biasSum / HLLConstants.K_NEAREST_NEIGHBOR) + 0.5); return result; }
From source file:com.amalto.workbench.utils.XSDAnnotationsStructure.java
public void removeSchematron(String pattern) { TreeMap<String, String> infos = getSchematrons(); for (Entry<String, String> entry : infos.entrySet()) { if (pattern.equals(entry.getValue())) { infos.remove(entry.getKey()); break; }//from www . ja va 2s . c o m } setSchematrons(infos.values()); }
From source file:com.amalto.workbench.utils.XSDAnnotationsStructure.java
public void removeWorkflow(String pattern) { TreeMap<String, String> infos = getSchematrons(); for (Entry<String, String> entry : infos.entrySet()) { if (pattern.equals(entry.getValue())) { infos.remove(entry.getKey()); break; }/*from w w w. j a v a 2 s.co m*/ } setSchematrons(infos.values()); }