List of usage examples for java.util TreeMap entrySet
EntrySet entrySet
To view the source code for java.util TreeMap entrySet.
Click Source Link
From source file:org.springframework.security.oauth.consumer.CoreOAuthConsumerSupport.java
/** * Get the signature base string for the specified parameters. It is presumed the parameters are NOT OAuth-encoded. * * @param oauthParams The parameters (NOT oauth-encoded). * @param requestURL The request URL./*from w ww . j a va2 s .c om*/ * @param httpMethod The http method. * @return The signature base string. */ protected String getSignatureBaseString(Map<String, Set<CharSequence>> oauthParams, URL requestURL, String httpMethod) { TreeMap<String, TreeSet<String>> sortedParameters = new TreeMap<String, TreeSet<String>>(); for (Map.Entry<String, Set<CharSequence>> param : oauthParams.entrySet()) { //first encode all parameter names and values (spec section 9.1) String key = oauthEncode(param.getKey()); //add the encoded parameters sorted according to the spec. TreeSet<String> sortedValues = sortedParameters.get(key); if (sortedValues == null) { sortedValues = new TreeSet<String>(); sortedParameters.put(key, sortedValues); } for (CharSequence value : param.getValue()) { sortedValues.add(oauthEncode(value.toString())); } } //now concatenate them into a single query string according to the spec. StringBuilder queryString = new StringBuilder(); Iterator<Map.Entry<String, TreeSet<String>>> sortedIt = sortedParameters.entrySet().iterator(); while (sortedIt.hasNext()) { Map.Entry<String, TreeSet<String>> sortedParameter = sortedIt.next(); for (String parameterValue : sortedParameter.getValue()) { if (parameterValue == null) { parameterValue = ""; } queryString.append(sortedParameter.getKey()).append('=').append(parameterValue); if (sortedIt.hasNext()) { queryString.append('&'); } } } StringBuilder url = new StringBuilder(requestURL.getProtocol().toLowerCase()).append("://") .append(requestURL.getHost().toLowerCase()); if ((requestURL.getPort() >= 0) && (requestURL.getPort() != requestURL.getDefaultPort())) { url.append(":").append(requestURL.getPort()); } url.append(requestURL.getPath()); return new StringBuilder(httpMethod.toUpperCase()).append('&').append(oauthEncode(url.toString())) .append('&').append(oauthEncode(queryString.toString())).toString(); }
From source file:net.tsquery.DataEndpoint.java
@SuppressWarnings("unchecked") private JSONObject PlotToStandardJSON(Plot plot, long tsFrom, long tsTo, int topN) { final JSONObject plotObject = new JSONObject(); JSONArray seriesArray = new JSONArray(); final TreeMap<Double, JSONObject> weightMap = new TreeMap<>(Collections.reverseOrder()); for (DataPoints dataPoints : plot.getDataPoints()) { double weight = 0; JSONArray dataArray = new JSONArray(); StringBuilder nameBuilder = new StringBuilder(); nameBuilder.append(dataPoints.metricName()).append(": "); Map<String, String> tags = dataPoints.getTags(); for (String s : tags.keySet()) { nameBuilder.append(String.format("%s=%s, ", s, tags.get(s))); }// w w w . j a va2s . c o m nameBuilder.setLength(nameBuilder.length() - 2); for (DataPoint point : dataPoints) { long timestamp = point.timestamp(); if (timestamp < tsFrom || timestamp > tsTo) continue; double dpValue = getValue(point); JSONArray values = new JSONArray(); values.add(timestamp * 1000); values.add(dpValue); weight += ((dpValue) / 1000000.0); dataArray.add(values); } JSONObject series = new JSONObject(); series.put("name", nameBuilder.toString()); series.put("data", dataArray); while (weightMap.containsKey(weight)) weight -= 0.00000001; weightMap.put(weight, series); } int counter = 0; for (Map.Entry<Double, JSONObject> entry : weightMap.entrySet()) { seriesArray.add(entry.getValue()); ++counter; if ((topN > 0) && (counter >= topN)) break; } plotObject.put("plot", seriesArray); return plotObject; }
From source file:com.alibaba.rocketmq.tools.command.consumer.ConsumerSubCommand.java
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try {/*from w w w . j a v a 2s. c o m*/ defaultMQAdminExt.start(); String group = commandLine.getOptionValue('g').trim(); ConsumerConnection cc = defaultMQAdminExt.examineConsumerConnectionInfo(group); boolean jstack = commandLine.hasOption('s'); if (!commandLine.hasOption('i')) { int i = 1; long now = System.currentTimeMillis(); final TreeMap<String/* clientId */, ConsumerRunningInfo> criTable = new TreeMap<String, ConsumerRunningInfo>(); for (Connection conn : cc.getConnectionSet()) { try { ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, conn.getClientId(), jstack); if (consumerRunningInfo != null) { criTable.put(conn.getClientId(), consumerRunningInfo); String filePath = now + "/" + conn.getClientId(); MixAll.string2FileNotSafe(consumerRunningInfo.formatString(), filePath); System.out.printf("%03d %-40s %-20s %s%n", // i++, // conn.getClientId(), // MQVersion.getVersionDesc(conn.getVersion()), // filePath); } } catch (Exception e) { e.printStackTrace(); } } if (!criTable.isEmpty()) { boolean subSame = ConsumerRunningInfo.analyzeSubscription(criTable); boolean rebalanceOK = subSame && ConsumerRunningInfo.analyzeRebalance(criTable); if (subSame) { System.out.println("%n%nSame subscription in the same group of consumer"); System.out.printf("%n%nRebalance %s%n", rebalanceOK ? "OK" : "Failed"); Iterator<Entry<String, ConsumerRunningInfo>> it = criTable.entrySet().iterator(); while (it.hasNext()) { Entry<String, ConsumerRunningInfo> next = it.next(); String result = ConsumerRunningInfo.analyzeProcessQueue(next.getKey(), next.getValue()); if (result.length() > 0) { System.out.println(result); } } } else { System.out.println("\n\nWARN: Different subscription in the same group of consumer!!!"); } } } else { String clientId = commandLine.getOptionValue('i').trim(); ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, clientId, jstack); if (consumerRunningInfo != null) { System.out.println(consumerRunningInfo.formatString()); } } } catch (Exception e) { e.printStackTrace(); } finally { defaultMQAdminExt.shutdown(); } }
From source file:com.alibaba.rocketmq.tools.command.consumer.ConsumerStatusSubCommand.java
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try {/* w w w .j a va 2 s .c o m*/ defaultMQAdminExt.start(); String group = commandLine.getOptionValue('g').trim(); ConsumerConnection cc = defaultMQAdminExt.examineConsumerConnectionInfo(group); boolean jstack = commandLine.hasOption('s'); if (!commandLine.hasOption('i')) { // ? int i = 1; long now = System.currentTimeMillis(); final TreeMap<String/* clientId */, ConsumerRunningInfo> criTable = new TreeMap<String, ConsumerRunningInfo>(); for (Connection conn : cc.getConnectionSet()) { try { ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, conn.getClientId(), jstack); if (consumerRunningInfo != null) { criTable.put(conn.getClientId(), consumerRunningInfo); String filePath = now + "/" + conn.getClientId(); MixAll.string2FileNotSafe(consumerRunningInfo.formatString(), filePath); System.out.printf("%03d %-40s %-20s %s\n", // i++, // conn.getClientId(), // MQVersion.getVersionDesc(conn.getVersion()), // filePath); } } catch (Exception e) { e.printStackTrace(); } } if (!criTable.isEmpty()) { boolean subSame = ConsumerRunningInfo.analyzeSubscription(criTable); boolean rebalanceOK = subSame && ConsumerRunningInfo.analyzeRebalance(criTable); if (subSame) { System.out.println("\n\nSame subscription in the same group of consumer"); System.out.printf("\n\nRebalance %s\n", rebalanceOK ? "OK" : "Failed"); Iterator<Entry<String, ConsumerRunningInfo>> it = criTable.entrySet().iterator(); while (it.hasNext()) { Entry<String, ConsumerRunningInfo> next = it.next(); String result = ConsumerRunningInfo.analyzeProcessQueue(next.getKey(), next.getValue()); if (result.length() > 0) { System.out.println(result); } } } else { System.out.println("\n\nWARN: Different subscription in the same group of consumer!!!"); } } } else { String clientId = commandLine.getOptionValue('i').trim(); ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, clientId, jstack); if (consumerRunningInfo != null) { System.out.println(consumerRunningInfo.formatString()); } } } catch (Exception e) { e.printStackTrace(); } finally { defaultMQAdminExt.shutdown(); } }
From source file:org.springframework.security.oauth.consumer.client.CoreOAuthConsumerSupport.java
/** * Get the signature base string for the specified parameters. It is presumed the parameters are NOT OAuth-encoded. * * @param oauthParams The parameters (NOT oauth-encoded). * @param requestURL The request URL.//from www .j a va 2s . c o m * @param httpMethod The http method. * @return The signature base string. */ protected String getSignatureBaseString(Map<String, Set<CharSequence>> oauthParams, URL requestURL, String httpMethod) { TreeMap<String, TreeSet<String>> sortedParameters = new TreeMap<String, TreeSet<String>>(); for (Map.Entry<String, Set<CharSequence>> param : oauthParams.entrySet()) { //first encode all parameter names and values (spec section 9.1) String key = oauthEncode(param.getKey()); //add the encoded parameters sorted according to the spec. TreeSet<String> sortedValues = sortedParameters.get(key); if (sortedValues == null) { sortedValues = new TreeSet<String>(); sortedParameters.put(key, sortedValues); } for (CharSequence value : param.getValue()) { sortedValues.add(oauthEncode(value.toString())); } } //now concatenate them into a single query string according to the spec. StringBuilder queryString = new StringBuilder(); Iterator<Map.Entry<String, TreeSet<String>>> sortedIt = sortedParameters.entrySet().iterator(); while (sortedIt.hasNext()) { Map.Entry<String, TreeSet<String>> sortedParameter = sortedIt.next(); for (Iterator<String> sortedParametersIterator = sortedParameter.getValue() .iterator(); sortedParametersIterator.hasNext();) { String parameterValue = sortedParametersIterator.next(); if (parameterValue == null) { parameterValue = ""; } queryString.append(sortedParameter.getKey()).append('=').append(parameterValue); if (sortedIt.hasNext() || sortedParametersIterator.hasNext()) { queryString.append('&'); } } } StringBuilder url = new StringBuilder(requestURL.getProtocol().toLowerCase()).append("://") .append(requestURL.getHost().toLowerCase()); if ((requestURL.getPort() >= 0) && (requestURL.getPort() != requestURL.getDefaultPort())) { url.append(":").append(requestURL.getPort()); } url.append(requestURL.getPath()); return new StringBuilder(httpMethod.toUpperCase()).append('&').append(oauthEncode(url.toString())) .append('&').append(oauthEncode(queryString.toString())).toString(); }
From source file:org.apache.whirr.service.accumulo.AccumuloClusterTest.java
@Test @Override//from ww w . jav a 2s . c o m public void testBootstrapAndConfigure() throws Exception { // override the default behaviour to create some scripts which we can // run on docker containers ClusterSpec cookbookWithDefaultRecipe = newClusterSpecForProperties(ImmutableMap.of( "whirr.instance-templates", "1 hadoop-namenode+hadoop-jobtracker+zookeeper+accumulo-master+accumulo-monitor+accumulo-gc, 1 hadoop-datanode+hadoop-tasktracker+accumulo-tserver+accumulo-tracer", "whirr.hadoop.version", "1.2.1", "whirr.zookeeper.tarball.url", "http://archive.apache.org/dist/zookeeper/zookeeper-3.4.5/zookeeper-3.4.5.tar.gz", "hadoop-hdfs.dfs.durable.sync", "true")); DryRun dryRun = launchWithClusterSpec(cookbookWithDefaultRecipe); File tgtDir = new File("target"); TreeMap<String, String> nodePropMap = new TreeMap<String, String>(); Set<String> nodes = new TreeSet<String>(); int n = -1; int i = 0; for (Entry<NodeMetadata, Statement> exe : dryRun.getExecutions().entries()) { NodeMetadata nodeMeta = exe.getKey(); System.err.println(ToStringBuilder.reflectionToString(exe.getKey())); if (!nodes.contains(nodeMeta.getName())) { i = 0; n++; nodes.add(nodeMeta.getName()); } String filename = String.format("%s-%s-%d.sh", nodeMeta.getPrivateAddresses().iterator().next(), nodeMeta.getPublicAddresses().iterator().next(), i); PrintStream ps = new PrintStream(new File(tgtDir, filename)); ps.println(exe.getValue().render(OsFamily.UNIX)); ps.close(); // System.err.println("\t" + exe.getValue().render(OsFamily.UNIX)); i++; nodePropMap.put(String.format("%d\tPUBLIC_IP", n), nodeMeta.getPublicAddresses().iterator().next()); nodePropMap.put(String.format("%d\tPRIVATE_IP", n), nodeMeta.getPrivateAddresses().iterator().next()); nodePropMap.put(String.format("%d\tHOSTNAME", n), nodeMeta.getName()); nodePropMap.put(String.format("%d\tSCRIPTS", n), String.valueOf(i)); } nodePropMap.put("NODES", String.valueOf(nodes.size())); // write jclouds assigned properties to file so we can update with the // actual ones assigned to each docker container File propFile = new File(tgtDir, "cluster.properties"); PrintStream ps = new PrintStream(propFile); for (Entry<String, String> entry : nodePropMap.entrySet()) { ps.println(entry.getKey() + "\t" + entry.getValue()); } ps.close(); }
From source file:org.mozilla.gecko.sync.repositories.android.AndroidBrowserBookmarksRepositorySession.java
/** * Retrieve the child array for a record, repositioning and updating the database as necessary. * * @param folderID// w w w .ja v a2s. c o m * The database ID of the folder. * @param persist * True if generated positions should be written to the database. The modified * time of the parent folder is only bumped if this is true. * @return * An array of GUIDs. * @throws NullCursorException */ @SuppressWarnings("unchecked") private JSONArray getChildrenArray(long folderID, boolean persist) throws NullCursorException { trace("Calling getChildren for androidID " + folderID); JSONArray childArray = new JSONArray(); Cursor children = dataAccessor.getChildren(folderID); try { if (!children.moveToFirst()) { trace("No children: empty cursor."); return childArray; } final int positionIndex = children.getColumnIndex(BrowserContract.Bookmarks.POSITION); final int count = children.getCount(); Logger.debug(LOG_TAG, "Expecting " + count + " children."); // Sorted by requested position. TreeMap<Long, ArrayList<String>> guids = new TreeMap<Long, ArrayList<String>>(); while (!children.isAfterLast()) { final String childGuid = getGUID(children); final long childPosition = getPosition(children, positionIndex); trace(" Child GUID: " + childGuid); trace(" Child position: " + childPosition); Utils.addToIndexBucketMap(guids, Math.abs(childPosition), childGuid); children.moveToNext(); } // This will suffice for taking a jumble of records and indices and // producing a sorted sequence that preserves some kind of order -- // from the abs of the position, falling back on cursor order (that // is, creation time and ID). // Note that this code is not intended to merge values from two sources! boolean changed = false; int i = 0; for (Entry<Long, ArrayList<String>> entry : guids.entrySet()) { long pos = entry.getKey().longValue(); int atPos = entry.getValue().size(); // If every element has a different index, and the indices are // in strict natural order, then changed will be false. if (atPos > 1 || pos != i) { changed = true; } for (String guid : entry.getValue()) { if (!forbiddenGUID(guid)) { childArray.add(guid); } } } if (Logger.logVerbose(LOG_TAG)) { // Don't JSON-encode unless we're logging. Logger.trace(LOG_TAG, "Output child array: " + childArray.toJSONString()); } if (!changed) { Logger.debug(LOG_TAG, "Nothing moved! Database reflects child array."); return childArray; } if (!persist) { return childArray; } Logger.debug(LOG_TAG, "Generating child array required moving records. Updating DB."); final long time = now(); if (0 < dataAccessor.updatePositions(childArray)) { Logger.debug(LOG_TAG, "Bumping parent time to " + time + "."); dataAccessor.bumpModified(folderID, time); } } finally { children.close(); } return childArray; }
From source file:org.opencms.workplace.tools.content.CmsElementRename.java
/** * Builds the html for the template select box.<p> * /*from ww w . ja v a2 s . co m*/ * @param attributes optional attributes for the <select> tag * @return the html for the template select box */ public String buildSelectTemplates(String attributes) { List options = new ArrayList(); List values = new ArrayList(); TreeMap templates = null; int selectedIndex = -1; try { // get all available templates templates = CmsNewResourceXmlPage.getTemplates(getCms(), null); } catch (CmsException e) { // can usually be ignored if (LOG.isInfoEnabled()) { LOG.info(e); } } if (templates == null) { // no templates found, return empty String return ""; } else { // templates found, create option and value lists CmsMessages messages = Messages.get().getBundle(getLocale()); options.add(messages.key(Messages.GUI_PLEASE_SELECT_0)); values.add(""); options.add(messages.key(Messages.GUI_BUTTON_ALL_0)); values.add(ALL); if (ALL.equals(getParamTemplate())) { selectedIndex = 1; } Iterator i = templates.entrySet().iterator(); int counter = 2; while (i.hasNext()) { Map.Entry entry = (Map.Entry) i.next(); String key = (String) entry.getKey(); String path = (String) entry.getValue(); if (path.equals(getParamTemplate())) { selectedIndex = counter; } options.add(key); values.add(path); counter++; } } return buildSelect(attributes, options, values, selectedIndex, false); }
From source file:org.commoncrawl.service.listcrawler.CrawlHistoryManager.java
private static void syncAndValidateItems(TreeMap<URLFP, ProxyCrawlHistoryItem> items, CrawlHistoryManager logManager) throws IOException { // ok now sync the list final TreeMap<URLFP, ProxyCrawlHistoryItem> syncedItemList = new TreeMap<URLFP, ProxyCrawlHistoryItem>(); try {/*from w w w . j ava2 s .com*/ logManager.syncList(0L, Sets.newTreeSet(items.keySet()), new ItemUpdater() { @Override public void updateItemState(URLFP fingerprint, ProxyCrawlHistoryItem item) throws IOException { try { syncedItemList.put((URLFP) fingerprint.clone(), (ProxyCrawlHistoryItem) item.clone()); } catch (CloneNotSupportedException e) { e.printStackTrace(); } } }); } catch (IOException e) { LOG.error(CCStringUtils.stringifyException(e)); Assert.assertTrue(false); } // assert that the key set is equal Assert.assertEquals(items.keySet(), syncedItemList.keySet()); // ok now validate that the values are equal for (Map.Entry<URLFP, ProxyCrawlHistoryItem> item : items.entrySet()) { ProxyCrawlHistoryItem other = syncedItemList.get(item.getKey()); Assert.assertEquals(item.getValue(), other); } }
From source file:org.apache.hadoop.mapred.HFSPScheduler.java
/** Create a first estimation of the job duration based on available data */ private JobDurationInfo createInitialJobDurationInfo(JobInProgress jip, TaskType type) { TreeMap<JobDurationInfo, JobInProgress> sizeBasedJobQueue = this.getSizeBasedJobQueue(type); long singleTaskDuration = 0; if (!sizeBasedJobQueue.isEmpty()) { long taskDuration = 0; int trained = 0; for (Entry<JobDurationInfo, JobInProgress> entry : sizeBasedJobQueue.entrySet()) { if (this.isTrained(entry.getValue(), type)) { JobDurationInfo jdi = entry.getKey(); int numVirtualTasks = jdi.getTasks().size(); if (entry.getKey().getPhaseTotalDuration() > 0 && numVirtualTasks > 0) { trained += 1;// ww w . j a va 2 s .c om taskDuration += Math.ceil(entry.getKey().getPhaseTotalDuration() / numVirtualTasks); } } } if (trained > 0) { singleTaskDuration = (long) Math.ceil(taskDuration / trained); } } if (singleTaskDuration == 0) { singleTaskDuration = (type == TaskType.MAP) ? initialMapTaskDuration : initialReduceTaskDuration; } float durationModifier = (type == TaskType.MAP) ? this.durationModifierMap : this.durationModifierReduce; LOG.debug(jip.getJobID() + ":" + type + " singleTaskDuration: " + singleTaskDuration + " durationModifier: " + durationModifier + " => singleTaskDuration: " + (long) (singleTaskDuration * durationModifier)); singleTaskDuration = (long) (singleTaskDuration * durationModifier); return new UniformJobDurationInfo(jip, singleTaskDuration, type); }