List of usage examples for java.util Map toString
public String toString()
From source file:org.hawkular.client.test.metrics.openshift.CollectionRateDetailTest.java
private void getData(String metricID, String testID, long start, long end, Duration timeBucket) { Reporter.log("Fetching large data set... may take a couple minutes", true); List<DataPoint<Double>> rawData = client().metrics().gauge() .findGaugeDataWithId(metricID, String.valueOf(start), String.valueOf(end), null, null, null) .getEntity();/* w ww. j av a 2 s.c o m*/ Assert.assertNotNull(rawData, testID); Reporter.log("raw datapoints: " + rawData.size(), true); List<Long> zeroList = findZeroValues(rawData); Assert.assertTrue(zeroList == null || zeroList.size() == 0, testID); Map<Long, Integer> hist = OpenshiftBaseTest.makeHistogram(rawData, timeBucket); Double[] result = hist.entrySet().stream().map(x -> new Double(x.getValue())) .toArray(size -> new Double[size]); double[] d = ArrayUtils.toPrimitive(result); // drop the first and last as they are usually outliers double[] samples = Arrays.copyOfRange(d, 1, d.length - 1); DescriptiveStatistics stats = new DescriptiveStatistics(samples); Reporter.log(hist.toString(), true); Reporter.log("size: " + stats.getN(), true); Reporter.log("min/max: " + stats.getMin() + "/" + stats.getMax(), true); Reporter.log("mean: " + stats.getMean(), true); Reporter.log("variance: " + stats.getVariance(), true); Reporter.log("stddev: " + stats.getStandardDeviation(), true); }
From source file:de.zib.vold.userInterface.RESTController.java
/** * Handles Get requests.//from w ww .j ava 2 s. c o m * * This method is used by clients to lookup some keys. * * @param keys The URL arguments of the request. * @param request Request informations * @return A map of found keys with its associated values. */ @RequestMapping(method = RequestMethod.GET) public ResponseEntity<Map<Key, Set<String>>> lookup(@RequestParam Map<String, String> keys, HttpServletRequest request) { // guard { logger.debug("GET: " + keys.toString()); checkState(); } Map<Key, Set<String>> merged_result = new HashMap<Key, Set<String>>(); // get actual scope String scope; { scope = request.getRequestURI(); String removepath = removePrefix + request.getContextPath() + request.getServletPath(); scope = scope.substring(removepath.length(), scope.length()); } // process each key for (Map.Entry<String, String> entry : keys.entrySet()) { URIKey urikey; Key k; // build key { urikey = URIKey.fromURIString(entry.getKey(), enc); File path_correction = new File(scope + "/" + urikey.getKey().get_scope()); k = new Key(path_correction.getPath(), urikey.getKey().get_type(), urikey.getKey().get_keyname()); } // lookup and remember result { Map<Key, Set<String>> _result; try { _result = frontend.lookup(k); } catch (VoldException e) { logger.error("Error on lookup for key " + k + " (" + entry.getKey() + "): ", e); continue; /* Set< String > s = new HashSet< String >(); s.add( e.getMessage() ); merged_result.clear(); merged_result.put( k, s ); return new ResponseEntity< Map< Key, Set< String > > >( merged_result, HttpStatus.INTERNAL_SERVER_ERROR ); */ } // found something if (null != _result) { merged_result.putAll(_result); } } } return new ResponseEntity<Map<Key, Set<String>>>(merged_result, HttpStatus.OK); }
From source file:edu.harvard.iq.dataverse.datasetutility.FileUploadTestPage.java
public String init() { Map<String, String> params = FacesContext.getCurrentInstance().getExternalContext() .getRequestParameterMap();/*from w w w . j a va 2 s . c om*/ msgt("params: " + params.toString()); if (params.containsKey("ds_id")) { String ds_id = params.get("ds_id"); if ((!ds_id.isEmpty()) && (StringUtils.isNumeric(ds_id))) { datasetId = Long.parseLong(ds_id); dataset = datasetService.find(datasetId); datasetVersion = dataset.getLatestVersion(); checkRetrievalTest(); } } if (params.containsKey("fid")) { String fid = params.get("fid"); if ((!fid.isEmpty()) && (StringUtils.isNumeric(fid))) { fileToReplace = datafileService.find(Long.parseLong(fid)); } } if (fileToReplace != null) { replaceOperation = true; } else { replaceOperation = false; } return null; }
From source file:org.kew.rmf.core.lucene.LuceneDataLoader.java
private Document indexRecord(Map<String, String> record) throws Exception { Document doc = new Document(); String idFieldName = Configuration.ID_FIELD_NAME; logger.trace("rawRecord: {}", record.toString()); doc.add(new StringField(idFieldName, record.get(idFieldName), Field.Store.YES)); // The remainder of the columns are added as specified in the properties for (Property p : this.config.getProperties()) { String authorityName = p.getAuthorityColumnName() + Configuration.TRANSFORMED_SUFFIX; String value = record.get(p.getAuthorityColumnName()); // super-csv treats blank as null, we don't for now value = (value != null) ? value : ""; // Index the value in its original state, pre transformation.. Field f = new TextField(p.getAuthorityColumnName(), value, Field.Store.YES); doc.add(f);//from w ww . j a va 2 s .com // ..*then* transform the value if necessary.. for (Transformer t : p.getAuthorityTransformers()) { value = t.transform(value); } //.. and add this one to the index Field f1 = new TextField(authorityName, value, Field.Store.YES); doc.add(f1); // For some fields (those which will be passed into a fuzzy matcher like Levenshtein), we index the length if (p.isIndexLength()) { int length = 0; if (value != null) length = value.length(); Field fl = new StringField(authorityName + Configuration.LENGTH_SUFFIX, String.format("%02d", length), Field.Store.YES); doc.add(fl); } if (p.isIndexInitial() & StringUtils.isNotBlank(value)) { Field finit = new TextField(authorityName + Configuration.INITIAL_SUFFIX, value.substring(0, 1), Field.Store.YES); doc.add(finit); } } logger.trace("Document to be indexed: {}", doc.toString()); this.indexWriter.addDocument(doc); return doc; }
From source file:nl.surfnet.coin.janus.JanusRestClient.java
@Override public JanusEntity getEntity(String entityId) { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("entityid", entityId); URI signedUri = null;// ww w .ja v a2 s.c o m try { signedUri = sign("getEntity", parameters); if (LOG.isTraceEnabled()) { LOG.trace("Signed Janus-request is: {}", signedUri); } } catch (IOException e) { LOG.error("Could not do getEntity request to Janus", e); } @SuppressWarnings("unchecked") final Map<String, Object> restResponse = restTemplate.getForObject(signedUri, Map.class); if (LOG.isTraceEnabled()) { LOG.trace("Janus-request returned: {}", restResponse.toString()); } return restResponse == null ? null : JanusEntity.fromJanusResponse(restResponse); }
From source file:org.apache.hadoop.hashtable.HashTableBenchmark.java
private String getHistogram(int[] entries) { Map<Integer, Integer> hist = new HashMap<Integer, Integer>(); for (int i = 0; i < entries.length; i++) { Integer count = hist.get(entries[i]); if (count == null) { hist.put(entries[i], 1);//www . j a v a 2 s . c o m } else { hist.put(entries[i], count + 1); } } return "HISTOGRAM: entriesLen: " + entries.length + " -- " + hist.toString(); }
From source file:org.encuestame.comet.services.TweetPollCometService.java
/** * Autosave process.//from w ww. ja v a 2 s. co m * @param remote * @param message */ @PreAuthorize("hasRole('ENCUESTAME_USER')") @Listener("/service/tweetpoll/autosave") @SuppressWarnings("unchecked") @Deprecated public void processAutoSave(final ServerSession remote, final ServerMessage.Mutable message) { ; log.debug("--------- TweetPoll COMMET AUTOSAVE ----------"); final Map<String, Object> inputMessage = message.getDataAsMap(); Map<String, Object> outPutMessage = new HashedMap(); if (log.isDebugEnabled()) { log.debug("Messages content:{" + inputMessage.toString()); log.debug("Messages content JSON:{" + message.getJSON()); log.debug("Messages content TweetPoll:{" + inputMessage.get("tweetPoll")); } final Map<String, Object> tweetPollJson = (Map<String, Object>) inputMessage.get("tweetPoll"); List<String> hastagsArray = new ArrayList<String>(); List<Long> answerArray = new ArrayList<Long>(); final Object[] hashtags = (Object[]) tweetPollJson.get("hashtags"); if (log.isDebugEnabled()) { log.debug("Array of hashtags: ---->" + tweetPollJson.get("hashtags")); log.debug("Array of hashtags: ---->" + hashtags); log.debug("Array of hashtags: ---->" + hashtags.length); } //{"hashtags":[{"id":null,"newValue":true,"label":"nicaragua"} for (int i = 0; i < hashtags.length; i++) { HashMap<String, String> hashtagsMap = (HashMap<String, String>) hashtags[i]; if (log.isDebugEnabled()) { log.debug("Hashtag: ---->" + hashtagsMap.get("label")); log.debug(hashtagsMap.get("newValue")); } if (hashtagsMap.get("label") != null) { hastagsArray.add(hashtagsMap.get("label")); } } final Object[] answers = (Object[]) tweetPollJson.get("answers"); if (log.isDebugEnabled()) { log.debug("Array of Answer: ---->" + tweetPollJson.get("answers")); log.debug("Array of Answer: ---->" + answers.length); } for (int i = 0; i < answers.length; i++) { Long answersMap = (Long) answers[i]; //log.debug("Answer: ---->"+answersMap.get("value")); if (answersMap != null) { answerArray.add(Long.valueOf(answersMap)); } } if (log.isDebugEnabled()) { log.debug("review answerArray: " + answerArray.size()); log.debug("review hastagsArray: " + hastagsArray.size()); } final HashMap<String, String> questionMap = (HashMap<String, String>) tweetPollJson.get("question"); final String question = filterValue(questionMap.get("value") == null ? "" : questionMap.get("value")); //Options final Options options = new Options((tweetPollJson.get("options") == null ? new HashedMap() : (Map<String, Object>) tweetPollJson.get("options"))); if (log.isDebugEnabled()) { log.debug("review options: " + options.toString()); } try { //get user account from session. final UserAccount user = getUserAccount(); if (user != null) { final Long tweetPollId = tweetPollJson.get("tweetPollId") == null ? null : Long.valueOf(tweetPollJson.get("tweetPollId").toString()); if (tweetPollId == null) { final TweetPollBean tweetPollBean = this.fillTweetPoll(options, question, user, hastagsArray, null); //new tweetpoll domain. final TweetPoll tweetPoll = createTweetPoll(tweetPollBean); outPutMessage.put("tweetPollId", tweetPoll.getTweetPollId()); //retrieve answers stored. log.debug("tweet poll created."); } else { log.debug("updated tweetPoll:{" + tweetPollJson.get("tweetPollId")); //update tweetPoll final TweetPollBean tweetPollBean = this.fillTweetPoll(options, question, user, hastagsArray, tweetPollId); //final TweetPoll tweetPoll = updateTweetPoll(tweetPollId, question, hastagsArray.toArray(new String[]{}), // answerArray.toArray(new Long[]{})); updateTweetPoll(tweetPollBean); outPutMessage = inputMessage; log.debug("updated tweetPoll:{" + tweetPollJson.get("tweetPollId")); } } else { log.warn("forbiden access"); } } catch (EnMeExpcetion e) { log.error(e); } catch (ParseException e) { log.error(e); } log.debug("tweetPoll content:{" + outPutMessage); remote.deliver(getServerSession(), message.getChannel(), outPutMessage, null); }
From source file:org.nuxeo.segment.io.SegmentIOComponent.java
public void track(NuxeoPrincipal principal, String eventName, Map<String, Serializable> metadata) { SegmentIODataWrapper wrapper = new SegmentIODataWrapper(principal, metadata); if (Framework.isTestModeSet()) { pushForTest("track", wrapper.getUserId(), eventName, metadata); } else {//from ww w .j a va2s . c o m if (debugMode) { log.info("send track for " + eventName + " user : " + wrapper.getUserId() + " with meta : " + metadata.toString()); } else { log.debug("send track with " + metadata.toString()); EventProperties eventProperties = new EventProperties(); eventProperties.putAll(wrapper.getMetadata()); Analytics.track(wrapper.getUserId(), eventName, eventProperties); } } }
From source file:com.hybris.datahub.service.impl.DefaultMarketplaceIntegrationService.java
@Override public void processRawItem(final String rawItemType, final Map<String, String> csv) { if (null == csv) { return;//from www. j a v a 2 s .c om } final List<Map<String, String>> rawFragments = new LinkedList<Map<String, String>>(); rawFragments.add(csv); final boolean result = rawFragmentInputChannel.send(new GenericMessage<List<Map<String, String>>>( rawFragments, constructMessageHeader(rawItemType, getFeedName(rawItemType)))); if (LOG.isInfoEnabled()) { LOG.info("Process result : " + result + ", item type :" + rawItemType + " fragment: " + csv.toString()); } }
From source file:org.apache.metron.pcapservice.PcapGetterHBaseImplTest.java
/** * Test_create start and stop row keys./* w ww.j ava2 s .co m*/ * * @throws IOException * Signals that an I/O exception has occurred. */ @Test public void test_createStartAndStopRowKeys() throws IOException { PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl.getInstance(); String key = "18800006-1800000b-11-0035-3810"; Map<String, String> map = pcapGetter.createStartAndStopRowKeys(key, false, false); System.out.println("map =" + map.toString()); String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423"; Map<String, String> map1 = pcapGetter.createStartAndStopRowKeys(lastRowKey, true, false); System.out.println("map1 =" + map1.toString()); String lastRowKey2 = "18800006-1800000b-11-0035-3810-23234-32423"; Map<String, String> map2 = pcapGetter.createStartAndStopRowKeys(lastRowKey2, true, true); System.out.println("map2 =" + map2.toString()); }