List of usage examples for java.util Map toString
public String toString()
From source file:logdruid.util.DataMiner.java
public static ChartData gatherSourceData(final Repository repo) { PatternCache patternCache = new PatternCache(); ChartData cd = new ChartData(); List<File> listOfFiles = null; logger.info("Base file path: " + repo.getBaseSourcePath()); if (repo.getBaseSourcePath() == null) return null; File folder = new File(repo.getBaseSourcePath()); try {// www.j a v a 2 s. co m if (repo.isRecursiveMode()) { listOfFiles = FileListing.getFileListing(folder); } else { listOfFiles = Arrays.asList(folder.listFiles()); } } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.info("number of files: " + listOfFiles.size()); cd.sourceArrayList = repo.getSources(); Iterator<Source> sourceIterator = cd.sourceArrayList.iterator(); while (sourceIterator.hasNext()) { final Source source = sourceIterator.next(); cd.selectedSourceFiles = new HashMap<Integer, FileRecord>(); // sourceFiles contains all the matched files for a given source if (source.getActive()) { for (int i = 0; i < listOfFiles.size(); i++) { if (listOfFiles.get(i).isFile()) { String s1 = source.getSourcePattern(); try { Matcher matcher = patternCache.getPattern(s1) .matcher(new File(repo.getBaseSourcePath()).toURI() .relativize(new File(listOfFiles.get(i).getCanonicalPath()).toURI()) .getPath()); if (logger.isDebugEnabled()) { logger.debug(i + " matching file: " + new File(repo.getBaseSourcePath()).toURI() .relativize(new File(listOfFiles.get(i).getCanonicalPath()).toURI()) .getPath() + " with pattern: " + s1); } if (matcher.find()) { FileRecord tempFileRecord = new FileRecord(i, new File((String) listOfFiles.get(i).getCanonicalPath())); cd.selectedSourceFiles.put(i, tempFileRecord); if (logger.isDebugEnabled()) { logger.debug("Source: " + source.getSourceName() + " file: " + listOfFiles.get(i).getCanonicalPath()); logger.debug(" Graphpanel file: " + new File(repo.getBaseSourcePath()).toURI() .relativize(new File(listOfFiles.get(i).getCanonicalPath()).toURI()) .getPath()); logger.debug(tempFileRecord.getCompletePath()); } } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } if (logger.isEnabledFor(Level.INFO)) logger.info("matched file: " + cd.selectedSourceFiles.size() + " to source " + source.getSourceName()); } cd.sourceFileArrayListMap.put(source, cd.selectedSourceFiles); } Map<String, ArrayList<FileRecord>> sourceFileGroup = null; Iterator<Entry<Source, Map<Integer, FileRecord>>> ite = cd.sourceFileArrayListMap.entrySet().iterator(); while (ite.hasNext()) { final Map.Entry sourcePairs = ite.next(); final Source src = (Source) sourcePairs.getKey(); Map<Integer, FileRecord> sourceFiles = (Map<Integer, FileRecord>) sourcePairs.getValue(); sourceFileGroup = getSourceFileGroup(sourceFiles, src, repo); if (logger.isEnabledFor(Level.INFO)) logger.info("matched groups: " + sourceFileGroup.keySet().size() + " for source " + src.getSourceName()); logger.debug(sourceFileGroup.toString()); cd.setGroupFilesArrayListMap(src, sourceFileGroup); } return cd; }
From source file:edu.ucuenca.authorsdisambiguation.Distance.java
public synchronized String Http2(String s, Map<String, String> mp) throws SQLException, IOException { String md = s + mp.toString(); Statement stmt = conn.createStatement(); String sql;//from ww w. ja v a 2s.c om sql = "SELECT * FROM cache where cache.key='" + getMD5(md) + "'"; java.sql.ResultSet rs = stmt.executeQuery(sql); String resp = ""; if (rs.next()) { resp = rs.getString("value"); rs.close(); stmt.close(); } else { rs.close(); stmt.close(); HttpClient client = new HttpClient(); PostMethod method = new PostMethod(s); //Add any parameter if u want to send it with Post req. for (Entry<String, String> mcc : mp.entrySet()) { method.addParameter(mcc.getKey(), mcc.getValue()); } int statusCode = client.executeMethod(method); if (statusCode != -1) { InputStream in = method.getResponseBodyAsStream(); final Scanner reader = new Scanner(in, "UTF-8"); while (reader.hasNextLine()) { final String line = reader.nextLine(); resp += line + "\n"; } reader.close(); try { JsonParser parser = new JsonParser(); parser.parse(resp); PreparedStatement stmt2 = conn.prepareStatement("INSERT INTO cache (key, value) values (?, ?)"); stmt2.setString(1, getMD5(md)); stmt2.setString(2, resp); stmt2.executeUpdate(); stmt2.close(); } catch (Exception e) { System.out.printf("Error al insertar en la DB: " + e); } } } return resp; }
From source file:be.integrationarchitects.web.dragdrop.servlet.impl.DragDropServlet.java
protected void prepareMultiPartFile(DragDropMimeHandlerRequest mimeRequest, File f, HttpServletResponse response) throws IOException { logger.logTrace("process multipart"); mimeRequest.setFiles(new ArrayList<DragDropMimeFile>()); PrintWriter out = response.getWriter(); FileInputStream fin = new FileInputStream(f); int partcount = 0; try {/*from w w w . j a va 2 s. co m*/ MultipartStream multipartStream = new MultipartStream(fin, mimeRequest.getMimeBoundary().getBytes(), 1000, null); boolean nextPart = multipartStream.skipPreamble(); int filecount = 0; while (nextPart) { partcount++; String header = multipartStream.readHeaders(); logger.logTrace("PART HEADER:" + header); if (header.startsWith("Content-Disposition: form-data")) { filecount++; //params Map<String, String> params = utils.getFilePartParams(header); //also save request params to .inf file for (String key : mimeRequest.getRequestParams().keySet()) { params.put(key, mimeRequest.getRequestParams().get(key)); } params.put("user", mimeRequest.getCtx().getUser()); ByteArrayOutputStream bout = new ByteArrayOutputStream(); bout.write(params.toString().getBytes()); File f2 = new File(cfg.getFolder(), f.getName() + "." + filecount + ".inf"); FileOutputStream output = new FileOutputStream(f2); output.write(bout.toByteArray()); output.close(); //file File f3 = new File(cfg.getFolder(), f.getName() + "." + filecount + ".dat"); FileOutputStream output3 = new FileOutputStream(f3); multipartStream.readBodyData(output3); output3.close(); String hash = utils.getHash(f3); logger.logTrace("hash:" + hash + ":" + params.get("md5") + ", equals:" + hash.trim().equalsIgnoreCase(params.get("md5").trim())); if (cfg.checkHash()) { if (!hash.trim().equalsIgnoreCase(params.get("md5").trim())) { logger.logError("Invalid hash:" + params.get("md5"), null); throw new IllegalArgumentException("Invalid hash:" + params.get("md5")); } } if (cfg.getMaxFileSizePerFile() > 0 && f3.length() > cfg.getMaxFileSizePerFile()) { logger.logError("File too big hash:" + f3.length(), null); } else { DragDropMimeFile tf = new DragDropMimeFile(); tf.setFile(f3); tf.setHash(hash); tf.setPrepareParams(params); tf.setFileName(params.get("filename")); mimeRequest.getFiles().add(tf); } } else { System.err.println("skipping part:" + header); multipartStream.readBodyData(new ByteArrayOutputStream()); } nextPart = multipartStream.readBoundary(); } // f.delete();//delete multipart upload file since already splitted in info and data } catch (MultipartStream.MalformedStreamException e) { logger.logError(e.getMessage(), e); e.printStackTrace(); throw new IllegalArgumentException(e); } catch (IOException e) { logger.logError(e.getMessage(), e); e.printStackTrace(); throw new IllegalArgumentException(e); } if (cfg.getHandler() != null) { DragDropMimeHandlerResponse mimeResponse = cfg.getHandler().prepare(mimeRequest); if (mimeResponse.getResponseContent() != null) { logger.logTrace("setting html response content..."); response.setContentType(mimeResponse.getResponseContentType().getMimeType()); out.println(mimeResponse.getResponseContent()); } else { //TODO check redirects } //handler.handleFile(f2,params,reqparams,reqheaders); } }
From source file:com.tobedevoured.naether.NaetherTest.java
@Test public void getDependencyGraph() throws NaetherException { Dependency dependency = new Dependency( new DefaultArtifact("org.springframework:org.springframework.orm:3.0.5.RELEASE"), "compile"); naether.addRemoteRepositoryByUrl("http://repository.springsource.com/maven/bundles/release"); naether.addRemoteRepositoryByUrl("http://repository.springsource.com/maven/bundles/external"); naether.addDependency(dependency);/*from ww w . j ava 2s .c om*/ naether.resolveDependencies(false); /* * This is the expected out: * org.springframework:org.springframework.orm:jar:3.0.5.RELEASE={ org.springframework:org.springframework.jdbc:jar:3.0.5.RELEASE={}, org.springframework:org.springframework.transaction:jar:3.0.5.RELEASE={ org.springframework:org.springframework.context:jar:3.0.5.RELEASE={ org.springframework:org.springframework.expression:jar:3.0.5.RELEASE={} }, org.springframework:org.springframework.aop:jar:3.0.5.RELEASE={}, org.aopalliance:com.springsource.org.aopalliance:jar:1.0.0={} }, org.springframework:org.springframework.core:jar:3.0.5.RELEASE={}, org.springframework:org.springframework.beans:jar:3.0.5.RELEASE={ org.springframework:org.springframework.asm:jar:3.0.5.RELEASE={} } } */ Map<String, Map> ormDeps = new LinkedHashMap<String, Map>(); ormDeps.put("org.springframework:org.springframework.jdbc:jar:3.0.5.RELEASE", new HashMap()); Map<String, Map> expressionDeps = new HashMap<String, Map>(); expressionDeps.put("org.springframework:org.springframework.expression:jar:3.0.5.RELEASE", new HashMap()); Map<String, Map> transactionDeps = new LinkedHashMap<String, Map>(); transactionDeps.put("org.springframework:org.springframework.context:jar:3.0.5.RELEASE", expressionDeps); transactionDeps.put("org.springframework:org.springframework.aop:jar:3.0.5.RELEASE", new HashMap()); transactionDeps.put("org.aopalliance:com.springsource.org.aopalliance:jar:1.0.0", new HashMap()); ormDeps.put("org.springframework:org.springframework.transaction:jar:3.0.5.RELEASE", transactionDeps); ormDeps.put("org.springframework:org.springframework.core:jar:3.0.5.RELEASE", new HashMap()); Map<String, Map> beansDeps = new HashMap<String, Map>(); beansDeps.put("org.springframework:org.springframework.asm:jar:3.0.5.RELEASE", new HashMap()); ormDeps.put("org.springframework:org.springframework.beans:jar:3.0.5.RELEASE", beansDeps); Map<String, Map> results = new HashMap<String, Map>(); results.put("org.springframework:org.springframework.orm:jar:3.0.5.RELEASE", ormDeps); Map<String, Map> graph = naether.getDependenciesGraph(); // XXX: Map equality wont place nice (even with Map instead of LinkedHashMap). assertEquals(results.toString(), graph.toString()); Set<String> resolvedDependencies = naether.getDependenciesNotation(); Set<String> dependencies = new HashSet<String>(); dependencies.add("org.springframework:org.springframework.orm:jar:3.0.5.RELEASE"); dependencies.add("org.springframework:org.springframework.beans:jar:3.0.5.RELEASE"); dependencies.add("org.springframework:org.springframework.asm:jar:3.0.5.RELEASE"); dependencies.add("org.springframework:org.springframework.core:jar:3.0.5.RELEASE"); dependencies.add("org.springframework:org.springframework.jdbc:jar:3.0.5.RELEASE"); dependencies.add("org.springframework:org.springframework.transaction:jar:3.0.5.RELEASE"); dependencies.add("org.aopalliance:com.springsource.org.aopalliance:jar:1.0.0"); dependencies.add("org.springframework:org.springframework.aop:jar:3.0.5.RELEASE"); dependencies.add("org.springframework:org.springframework.context:jar:3.0.5.RELEASE"); dependencies.add("org.springframework:org.springframework.expression:jar:3.0.5.RELEASE"); assertEquals(dependencies, resolvedDependencies); }
From source file:org.apache.cxf.fediz.service.idp.beans.SigninParametersCacheAction.java
public void store(RequestContext context) { Map<String, Object> signinParams = new HashMap<String, Object>(); String uuidKey = UUID.randomUUID().toString(); Object value = WebUtils.getAttributeFromFlowScope(context, FederationConstants.PARAM_REPLY); if (value != null) { signinParams.put(FederationConstants.PARAM_REPLY, value); }//from w w w . j a v a 2s.c om value = WebUtils.getAttributeFromFlowScope(context, FederationConstants.PARAM_TREALM); if (value != null) { signinParams.put(FederationConstants.PARAM_TREALM, value); } value = WebUtils.getAttributeFromFlowScope(context, FederationConstants.PARAM_HOME_REALM); if (value != null) { signinParams.put(FederationConstants.PARAM_HOME_REALM, value); } WebUtils.putAttributeInExternalContext(context, uuidKey, signinParams); LOG.debug("SignIn parameters cached: {}", signinParams.toString()); WebUtils.putAttributeInFlowScope(context, FederationConstants.PARAM_CONTEXT, uuidKey); LOG.info( "SignIn parameters cached and " + FederationConstants.PARAM_CONTEXT + " set to [" + uuidKey + "]."); }
From source file:org.apache.kylin.rest.controller2.CubeControllerV2.java
/** * Initiate the very beginning of a streaming cube. Will seek the latest offests of each partition from streaming * source (kafka) and record in the cube descriptor; In the first build job, it will use these offests as the start point. * @param cubeName/*from ww w .j a v a2 s .c o m*/ * @return */ @RequestMapping(value = "/{cubeName}/init_start_offsets", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" }) @ResponseBody public EnvelopeResponse initStartOffsetsV2(@PathVariable String cubeName) throws IOException { Message msg = MsgPicker.getMsg(); checkCubeNameV2(cubeName); CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cubeName); if (cubeInstance.getSourceType() != ISourceAware.ID_STREAMING) { throw new BadRequestException(String.format(msg.getNOT_STREAMING_CUBE(), cubeName)); } final GeneralResponse response = new GeneralResponse(); final Map<Integer, Long> startOffsets = KafkaClient.getLatestOffsets(cubeInstance); CubeDesc desc = cubeInstance.getDescriptor(); desc.setPartitionOffsetStart(startOffsets); cubeService.getCubeDescManager().updateCubeDesc(desc); response.setProperty("result", "success"); response.setProperty("offsets", startOffsets.toString()); return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, response, ""); }
From source file:monasca.persister.repository.vertica.VerticaMetricRepo.java
private String getMeta(String name, Metric metric, Map<String, Object> meta, String id) { if (meta.containsKey(name)) { return (String) meta.get(name); } else {// w w w . j ava 2 s .c o m logger.warn("[{}]: failed to find {} in message envelope meta data. metric message may be malformed. " + "setting {} to empty string.", id, name); logger.warn("[{}]: metric: {}", id, metric.toString()); logger.warn("[{}]: meta: {}", id, meta.toString()); return ""; } }
From source file:yodlee.ysl.api.io.HTTP.java
public static String doPostRegisterUser(String url, String registerParam, Map<String, String> cobTokens, boolean isEncodingNeeded) throws IOException { //String processedURL = url+"?registerParam="+registerParam; /* String registerUserJson="{" + " \"user\" : {"//from w w w . j a v a2 s.co m + " \"loginName\" : \"TestT749\"," + " \"password\" : \"TESTT@123\"," + " \"email\" : \"testet@yodlee.com\"," + " \"firstName\" : \"Rehhsty\"," + " \"lastName\" :\"ysl\"" +" } ," + " \"preference\" : {" + " \"address\" : {" + " \"street\" : \"abcd\"," + " \"state\" : \"CA\"," + " \"city\" : \"RWS\"," + " \"postalCode\" : \"98405\"," + " \"countryIsoCode\" : \"USA\"" +" } ," + " \"currency\" : \"USD\"," + " \"timeZone\" : \"PST\"," + " \"dateFormat\" : \"MM/dd/yyyy\"" + "}" + "}";*/ //encoding url registerParam = java.net.URLEncoder.encode(registerParam, "UTF-8"); String processedURL = url + "?registerParam=" + registerParam; String mn = "doIO(POST : " + processedURL + ", " + registerParam + "sessionTokens : " + " )"; System.out.println(fqcn + " :: " + mn); URL restURL = new URL(processedURL); HttpURLConnection conn = (HttpURLConnection) restURL.openConnection(); conn.setRequestMethod("POST"); conn.setRequestProperty("User-Agent", userAgent); conn.setRequestProperty("Content-Type", contentTypeURLENCODED); conn.setRequestProperty("Content-Type", "text/plain;charset=UTF-8"); conn.setRequestProperty("Authorization", cobTokens.toString()); conn.setDoOutput(true); conn.setRequestProperty("Accept-Charset", "UTF-8"); int responseCode = conn.getResponseCode(); if (responseCode == 200) { System.out.println(fqcn + " :: " + mn + " : " + "Sending 'HTTP POST' request"); System.out.println(fqcn + " :: " + mn + " : " + "Response Code : " + responseCode); BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream())); String inputLine; StringBuilder jsonResponse = new StringBuilder(); while ((inputLine = in.readLine()) != null) { jsonResponse.append(inputLine); } in.close(); System.out.println(fqcn + " :: " + mn + " : " + jsonResponse.toString()); return new String(jsonResponse); } else { System.out.println("Invalid input"); return new String(); } }
From source file:org.hyperic.hq.product.Collector.java
String mapToString(Map map) { Map props = new HashMap(); for (Iterator it = map.entrySet().iterator(); it.hasNext();) { Map.Entry entry = (Map.Entry) it.next(); String key = (String) entry.getKey(); String val = (String) entry.getValue(); if (ConfigSchema.isSecret(key)) { val = Metric.mask(val); }/*from ww w . j av a2 s . co m*/ props.put(key, val); } return props.toString(); }
From source file:com.prey.net.PreyRestHttpClient.java
public PreyHttpResponse postStatusAutentication(String url, String status, Map<String, String> params, PreyConfig preyConfig) throws IOException { HttpPost method = new HttpPost(url); method.setHeader("Accept", "*/*"); method.setEntity(new UrlEncodedFormEntity(getHttpParamsFromMap(params), HTTP.UTF_8)); method.addHeader("Authorization", "Basic " + getCredentials(preyConfig.getApiKey(), "X")); method.addHeader("X-Prey-Status", status); PreyLogger.i("status " + status); // method.setParams(getHttpParamsFromMap(params)); PreyLogger.d("Sending using 'POST' - URI: " + url + " - parameters: " + params.toString()); httpclient.setRedirectHandler(new NotRedirectHandler()); HttpResponse httpResponse = httpclient.execute(method); PreyHttpResponse response = new PreyHttpResponse(httpResponse); //PreyLogger.d("Response from server: " + response.toString()); return response; }