List of usage examples for java.util Map toString
public String toString()
From source file:nl.surfnet.coin.janus.JanusRestClient.java
/** * {@inheritDoc}//from ww w . ja v a 2 s .c o m */ @Override public EntityMetadata getMetadataByEntityId(String entityId) { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("entityid", entityId); final Collection metadataAsStrings = CollectionUtils.collect(Arrays.asList(Metadata.values()), new Transformer() { @Override public Object transform(Object input) { return ((Metadata) input).val(); } }); parameters.put("keys", StringUtils.join(metadataAsStrings, ',')); URI signedUri; try { signedUri = sign("getMetadata", parameters); if (LOG.isTraceEnabled()) { LOG.trace("Signed Janus-request is: {}", signedUri); } @SuppressWarnings("unchecked") final Map<String, Object> restResponse = restTemplate.getForObject(signedUri, Map.class); Assert.notNull(restResponse, "Rest response from Janus should not be null"); if (LOG.isTraceEnabled()) { LOG.trace("Janus-request returned: {}", restResponse.toString()); } final EntityMetadata entityMetadata = EntityMetadata.fromMetadataMap(restResponse); entityMetadata.setAppEntityId(entityId); return entityMetadata; } catch (IOException e) { LOG.error("While doing Janus-request", e); } return null; }
From source file:org.opendaylight.controller.topology.web.Topology.java
/** * Re-position nodes in circular layout/* w ww . j a va 2s . co m*/ */ private void repositionTopology(String containerName) { Graph<String, String> graph = new SparseMultigraph<String, String>(); metaCache.get(containerName).clear(); metaCache.get(containerName).putAll(stagedNodes); metaCache.get(containerName).putAll(newNodes); for (Map<String, Object> on : metaCache.get(containerName).values()) { graph.addVertex(on.toString()); List<Map<String, Object>> adjacencies = (List<Map<String, Object>>) on.get("adjacencies"); for (Map<String, Object> adj : adjacencies) { graph.addEdge(adj.toString(), adj.get("nodeFrom").toString(), adj.get("nodeTo").toString()); } } CircleLayout<String, String> layout = new CircleLayout<String, String>(graph); layout.setSize(new Dimension(1200, 365)); for (Map.Entry<String, Map<String, Object>> v : newNodes.entrySet()) { Double x = layout.transform(v.getKey()).getX(); Double y = layout.transform(v.getKey()).getY(); Map<String, String> nodeData = (HashMap<String, String>) v.getValue().get("data"); nodeData.put("$x", (x - 600) + ""); nodeData.put("$y", (y - 225) + ""); newNodes.get(v.getKey()).put("data", nodeData); } }
From source file:org.apache.ofbiz.accounting.thirdparty.sagepay.SagePayServices.java
private static Map<String, String> buildSagePayProperties(Map<String, Object> context, Delegator delegator) { Map<String, String> sagePayConfig = new HashMap<String, String>(); String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId"); if (UtilValidate.isNotEmpty(paymentGatewayConfigId)) { try {/*w w w .j av a 2 s . c om*/ GenericValue sagePay = EntityQuery.use(delegator).from("PaymentGatewaySagePay") .where("paymentGatewayConfigId", paymentGatewayConfigId).queryOne(); if (sagePay != null) { Map<String, Object> tmp = sagePay.getAllFields(); Set<String> keys = tmp.keySet(); for (String key : keys) { String value = tmp.get(key).toString(); sagePayConfig.put(key, value); } } } catch (GenericEntityException e) { Debug.logError(e, module); } } Debug.logInfo("SagePay Configuration : " + sagePayConfig.toString(), module); return sagePayConfig; }
From source file:org.apache.hadoop.hdfs.TestGetBlocks.java
@Test public void testBlockKey() { Map<Block, Long> map = new HashMap<>(); final Random RAN = new Random(); final long seed = RAN.nextLong(); System.out.println("seed=" + seed); RAN.setSeed(seed);/*from ww w .j av a 2s .co m*/ long[] blkids = new long[10]; for (int i = 0; i < blkids.length; i++) { blkids[i] = 1000L + RAN.nextInt(100000); map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]); } System.out.println("map=" + map.toString().replace(",", "\n ")); for (long blkid : blkids) { Block b = new Block(blkid, 0, GenerationStamp.GRANDFATHER_GENERATION_STAMP); Long v = map.get(b); System.out.println(b + " => " + v); assertEquals(v.longValue(), blkid); } }
From source file:com.parse.interceptors.ParseLogInterceptor.java
private void logRequestInfo(Logger logger, String requestId, ParseHttpRequest request) throws IOException { logger.lock();/*from w w w . jav a 2s . c o m*/ logger.writeLine(KEY_TYPE, TYPE_REQUEST); logger.writeLine(KEY_REQUEST_ID, requestId); logger.writeLine(KEY_URL, request.getUrl()); logger.writeLine(KEY_METHOD, request.getMethod().toString()); // Add missing headers Map<String, String> headers = new HashMap<>(request.getAllHeaders()); if (request.getBody() != null) { headers.put(KEY_CONTENT_LENGTH, String.valueOf(request.getBody().getContentLength())); headers.put(KEY_CONTENT_TYPE, request.getBody().getContentType()); } logger.writeLine(KEY_HEADERS, headers.toString()); // Body if (request.getBody() != null) { String requestBodyInfo; String contentType = request.getBody().getContentType(); if (isContentTypePrintable(contentType)) { ByteArrayOutputStream output = new ByteArrayOutputStream(); request.getBody().writeTo(output); requestBodyInfo = formatBytes(output.toByteArray(), contentType); } else { requestBodyInfo = IGNORED_BODY_INFO; } logger.writeLine(KEY_BODY, requestBodyInfo); } logger.writeLine(LOG_PARAGRAPH_BREAKER); logger.unlock(); }
From source file:com.amazonaws.services.kinesis.leases.impl.LeaseManager.java
/** * List with the given page size. Package access for integration testing. * /* w w w. java2s .c om*/ * @param limit number of items to consider at a time - used by integration tests to force paging. * @return list of leases * @throws InvalidStateException if table does not exist * @throws DependencyException if DynamoDB scan fail in an unexpected way * @throws ProvisionedThroughputException if DynamoDB scan fail due to exceeded capacity */ List<T> list(Integer limit) throws DependencyException, InvalidStateException, ProvisionedThroughputException { if (LOG.isDebugEnabled()) { LOG.debug("Listing leases from table " + table); } ScanRequest scanRequest = new ScanRequest(); scanRequest.setTableName(table); if (limit != null) { scanRequest.setLimit(limit); } try { ScanResult scanResult = dynamoDBClient.scan(scanRequest); List<T> result = new ArrayList<T>(); while (scanResult != null) { for (Map<String, AttributeValue> item : scanResult.getItems()) { if (LOG.isDebugEnabled()) { LOG.debug("Got item " + item.toString() + " from DynamoDB."); } result.add(serializer.fromDynamoRecord(item)); } Map<String, AttributeValue> lastEvaluatedKey = scanResult.getLastEvaluatedKey(); if (lastEvaluatedKey == null) { // Signify that we're done. scanResult = null; if (LOG.isDebugEnabled()) { LOG.debug("lastEvaluatedKey was null - scan finished."); } } else { // Make another request, picking up where we left off. scanRequest.setExclusiveStartKey(lastEvaluatedKey); if (LOG.isDebugEnabled()) { LOG.debug("lastEvaluatedKey was " + lastEvaluatedKey + ", continuing scan."); } scanResult = dynamoDBClient.scan(scanRequest); } } if (LOG.isDebugEnabled()) { LOG.debug("Listed " + result.size() + " leases from table " + table); } return result; } catch (ResourceNotFoundException e) { throw new InvalidStateException("Cannot scan lease table " + table + " because it does not exist.", e); } catch (ProvisionedThroughputExceededException e) { throw new ProvisionedThroughputException(e); } catch (AmazonClientException e) { throw new DependencyException(e); } }
From source file:org.deri.iris.queryrewriting.PositionDependenciesTest.java
@Test public void testPositionDependenciesLinearCyclic() throws Exception { ///* w ww . j a v a 2s . com*/ // Theory: // // [R1] t(X,Y) -> s(X). // [R2] t(X,Y) -> t(Y,X). // final IPredicate t = Factory.BASIC.createPredicate("t", 2); final IPredicate s = Factory.BASIC.createPredicate("s", 1); final ILiteral txy = Factory.BASIC.createLiteral(true, t, tupleXY); final ILiteral tyx = Factory.BASIC.createLiteral(true, t, tupleYX); final ILiteral sx = Factory.BASIC.createLiteral(true, s, tupleX); // Input structure final List<ILiteral> h1 = new LinkedList<ILiteral>(); final List<ILiteral> b1 = new LinkedList<ILiteral>(); b1.add(txy); h1.add(sx); final List<ILiteral> h2 = new LinkedList<ILiteral>(); final List<ILiteral> b2 = new LinkedList<ILiteral>(); b2.add(txy); h2.add(tyx); final IRule r1 = Factory.BASIC.createRule(h1, b1); final IRule r2 = Factory.BASIC.createRule(h2, b2); final List<IRule> in = ImmutableList.of(r1, r2); // // Comparison Structure: // // t[1] -> t[1] {<>, <R2,R2>} // t[2] -> t[2] {<>, <R2,R2>} // s[1] -> s[1] {<>} // // t[1] -> s[1] {<R1>} // t[1] -> t[2] {<R2>} // t[2] -> t[1] {<R2>} // // t[2] -> s[1] {<R2,R1>} // final Map<Pair<IPosition, IPosition>, Set<List<IRule>>> cmp = new HashMap<Pair<IPosition, IPosition>, Set<List<IRule>>>(); final IPosition t1 = new Position(t.getPredicateSymbol(), 1); final IPosition t2 = new Position(t.getPredicateSymbol(), 2); final IPosition s1 = new Position(s.getPredicateSymbol(), 1); final List<IRule> lEmpty = ImmutableList.of(); final List<IRule> lr2r2 = ImmutableList.of(r2, r2); final Set<List<IRule>> st1t1 = Sets.newHashSet(); st1t1.add(lEmpty); st1t1.add(lr2r2); final Set<List<IRule>> st2t2 = Sets.newHashSet(); st2t2.add(lEmpty); st2t2.add(lr2r2); final Set<List<IRule>> ss1s1 = Sets.newHashSet(); ss1s1.add(lEmpty); // t[1] -> t[1] {<>, <R2,R2>} cmp.put(Pair.of(t1, t1), st1t1); // t[2] -> t[2] {<>, <R2,R2>} cmp.put(Pair.of(t2, t2), st2t2); // s[1] -> s[1] {<>} cmp.put(Pair.of(s1, s1), ss1s1); // t[1] -> s[1] {<R1>} final List<IRule> lr1 = ImmutableList.of(r1); final Set<List<IRule>> slr1 = Sets.newHashSet(); slr1.add(lr1); cmp.put(Pair.of(t1, s1), slr1); // t[2] -> s[1] {<R2,R1>} final List<IRule> lr2r1 = ImmutableList.of(r2, r1); final Set<List<IRule>> slr2r1 = Sets.newHashSet(); slr2r1.add(lr2r1); cmp.put(Pair.of(t2, s1), slr2r1); // t[1] -> t[2] {<R2>} // t[2] -> t[1] {<R2>} final List<IRule> lr2 = ImmutableList.of(r2); final Set<List<IRule>> slr2 = Sets.newHashSet(); slr2.add(lr2); cmp.put(Pair.of(t1, t2), slr2); cmp.put(Pair.of(t2, t1), slr2); final Map<Pair<IPosition, IPosition>, Set<List<IRule>>> depGraph = DepGraphUtils .computePositionDependencyGraph(in); LOGGER.debug(depGraph.toString()); System.out.println("Actual:" + depGraph.toString()); System.out.println("Expected:" + cmp.toString()); assertEquals(true, depGraph.equals(cmp)); }
From source file:ezbake.deployer.utilities.YamlManifestFileReader.java
@Override public List<ArtifactManifest> readFile(Reader reader) throws IOException, IllegalStateException { YamlReader artifactReader = new YamlReader(reader); // the thing to be returned with yaml filled stuff ArtifactManifest applicationManifest = new ArtifactManifest(); applicationManifest.setApplicationInfo(new ApplicationInfo()); applicationManifest.setUser(userProvider.getUser()); List<ArtifactManifest> result = Lists.newArrayList(); try {//from w w w . j a v a 2 s . c o m Map<String, Object> ymlTopManifest = (Map<String, Object>) artifactReader.read(); Map<String, Object> ymlAppManifest = (Map<String, Object>) ymlTopManifest.get("Application"); Preconditions.checkState(ymlAppManifest != null, "The root node must be called Application: " + ymlTopManifest.toString()); setApplicationInfo(ymlAppManifest, applicationManifest.getApplicationInfo(), true); Preconditions.checkState(applicationManifest.getApplicationInfo().isSetApplicationId(), "name must be set in application"); List<Map<String, Object>> services = (List<Map<String, Object>>) ymlAppManifest.get("Services"); Preconditions.checkState(services != null && !services.isEmpty(), "the Services block must be set under the Application block and have more than 1 item"); for (Map<String, Object> ymlServiceManifest : services) { ArtifactManifest manifest = new ArtifactManifest(applicationManifest); manifest.setArtifactInfo(new ArtifactInfo()); setArtifactType(ymlServiceManifest, manifest); setApplicationInfo(ymlServiceManifest, manifest.getApplicationInfo(), false); setArtifactInfo(ymlServiceManifest, manifest.getArtifactInfo(), manifest.getApplicationInfo()); setScaling(ymlServiceManifest, manifest); setWebAppInfo(ymlServiceManifest, manifest); setDatabaseInfo(ymlServiceManifest, manifest); setThriftService(ymlServiceManifest, manifest); setFrackService(ymlServiceManifest, manifest); setCustomService(ymlServiceManifest, manifest); setBatchService(ymlServiceManifest, manifest); validateManifest(manifest); result.add(manifest); } return result; } catch (YamlException | IllegalArgumentException e) { throw new IOException(e); } }
From source file:org.cloudifysource.rest.interceptors.ApiVersionValidationAndRestResponseBuilderInterceptor.java
/** * Filters the modelAndView object and retrieves the actual object returned by the controller. * This implementation assumes the model consists of just one returned object and a BindingResult. * If the model is empty, the supported return types are String (the view name) or void. *//*from w ww.j a va 2 s . c o m*/ private Object filterModel(final ModelAndView modelAndView, final Object handler) throws RestErrorException { Object methodReturnObject = null; Map<String, Object> model = modelAndView.getModel(); if (MapUtils.isNotEmpty(model)) { // the model is not empty. The return value is the first value that is not a BindingResult for (Map.Entry<String, Object> entry : model.entrySet()) { Object value = entry.getValue(); if (!(value instanceof BindingResult)) { methodReturnObject = value; break; } } if (methodReturnObject == null) { logger.warning("return object not found in model: " + model.toString()); throw new RestErrorException("return object not found in model: " + model.toString()); } } else { // the model is empty, this means the return type is String or void if (handler instanceof HandlerMethod) { Class<?> returnType = ((HandlerMethod) handler).getMethod().getReturnType(); if (returnType == Void.TYPE) { methodReturnObject = null; } else if (returnType == String.class) { String viewName = modelAndView.getViewName(); methodReturnObject = viewName; } else { logger.warning("return type not supported: " + returnType); throw new RestErrorException("return type not supported: " + returnType); } } else { logger.warning("handler object is not a HandlerMethod: " + handler); throw new RestErrorException("handler object is not a HandlerMethod: " + handler); } } return methodReturnObject; }