List of usage examples for java.io PrintStream print
public void print(Object obj)
From source file:jp.ikedam.jenkins.plugins.jobcopy_builder.ReplaceOperation.java
/** * Returns modified XML Document of the job configuration. * //from www . ja v a2s. co m * Replace the strings in the job configuration: * only applied to strings in text nodes, so the XML structure is never destroyed. * * @param doc XML Document of the job to be copied (job/NAME/config.xml) * @param env Variables defined in the build. * @param logger The output stream to log. * @return modified XML Document. Return null if an error occurs. * @see jp.ikedam.jenkins.plugins.jobcopy_builder.AbstractXmlJobcopyOperation#perform(org.w3c.dom.Document, hudson.EnvVars, java.io.PrintStream) */ @Override public Document perform(Document doc, EnvVars env, PrintStream logger) { String fromStr = getFromStr(); String toStr = getToStr(); if (StringUtils.isEmpty(fromStr)) { logger.println("From String is empty"); return null; } if (toStr == null) { toStr = ""; } String expandedFromStr = isExpandFromStr() ? env.expand(fromStr) : fromStr; String expandedToStr = isExpandToStr() ? env.expand(toStr) : toStr; if (StringUtils.isEmpty(expandedFromStr)) { logger.println("From String got to be empty"); return null; } if (expandedToStr == null) { expandedToStr = ""; } logger.print("Replacing: " + expandedFromStr + " -> " + expandedToStr); try { // Retrieve all text nodes. NodeList textNodeList = getNodeList(doc, "//text()"); // Perform replacing to all text nodes. // NodeList does not implement Collection, and foreach is not usable. for (int i = 0; i < textNodeList.getLength(); ++i) { Node node = textNodeList.item(i); node.setNodeValue(StringUtils.replace(node.getNodeValue(), expandedFromStr, expandedToStr)); } logger.println(""); return doc; } catch (Exception e) { logger.print("Error occured in XML operation"); e.printStackTrace(logger); return null; } }
From source file:com.qut.middleware.spep.authn.bindings.impl.AuthnPostBindingImpl.java
private void handleAuthnRequest(HttpServletRequest request, HttpServletResponse response, AuthnProcessorData data, SPEP spep) throws AuthenticationException { try {//from w ww .j a va 2s . c om String remoteAddress = request.getRemoteAddr(); this.logger.info("[Authn for {}] Initiating HTTP POST binding. Creating AuthnRequest", remoteAddress); String document = buildAuthnRequestDocument(request.getParameter("redirectURL"), request, response, data, spep); PrintStream out = new PrintStream(response.getOutputStream()); /* Set cookie to allow javascript enabled browsers to autosubmit, ensures navigation with the back button is not broken because auto submit is active for only a very short period */ Cookie autoSubmit = new Cookie("spepAutoSubmit", "enabled"); autoSubmit.setMaxAge(172800); //set expiry to be 48 hours just to make sure we still work with badly configured clocks skewed from GMT autoSubmit.setPath("/"); response.addCookie(autoSubmit); response.setStatus(HttpServletResponse.SC_OK); response.setHeader("Content-Type", "text/html"); out.print(document); out.close(); this.logger.info("[Authn for {}] Sent AuthnRequest successfully", remoteAddress); } catch (IOException e) { throw new AuthenticationException("Unable to send response due to an I/O error.", e); } }
From source file:edu.umn.cs.spatialHadoop.operations.DistributedJoin.java
private static long selfJoinLocal(Path in, Path out, OperationsParams params) throws IOException { if (isOneShotReadMode) { // Ensure all objects are read in one shot params.setInt(SpatialSite.MaxBytesInOneRead, -1); params.setInt(SpatialSite.MaxShapesInOneRead, -1); } else {// ww w . ja v a 2 s. c o m params.setInt(SpatialSite.MaxBytesInOneRead, maxBytesInOneRead); params.setInt(SpatialSite.MaxShapesInOneRead, maxShapesInOneRead); } ShapeArrayInputFormat inputFormat = new ShapeArrayInputFormat(); JobConf job = new JobConf(params); FileInputFormat.addInputPath(job, in); InputSplit[] splits = inputFormat.getSplits(job, 1); FileSystem outFs = out.getFileSystem(params); final PrintStream writer = new PrintStream(outFs.create(out)); // Process all input files long resultSize = 0; for (InputSplit split : splits) { ShapeArrayRecordReader reader = new ShapeArrayRecordReader(job, (FileSplit) split); final Text temp = new Text(); Rectangle key = reader.createKey(); ArrayWritable value = reader.createValue(); if (reader.next(key, value)) { Shape[] writables = (Shape[]) value.get(); resultSize += SpatialAlgorithms.SelfJoin_planeSweep(writables, true, new OutputCollector<Shape, Shape>() { @Override public void collect(Shape r, Shape s) throws IOException { writer.print(r.toText(temp)); writer.print(","); writer.println(s.toText(temp)); } }, null); if (reader.next(key, value)) { throw new RuntimeException("Error! Not all values read in one shot."); } } reader.close(); } writer.close(); return resultSize; }
From source file:com.act.lcms.plotter.WriteAndPlotMS1Results.java
private List<String> writeFeedMS1Values(List<Pair<Double, List<XZ>>> ms1s, Double maxIntensity, OutputStream os) throws IOException { // Write data output to outfile PrintStream out = new PrintStream(os); List<String> plotID = new ArrayList<>(ms1s.size()); for (Pair<Double, List<XZ>> ms1ForFeed : ms1s) { Double feedingConcentration = ms1ForFeed.getLeft(); List<XZ> ms1 = ms1ForFeed.getRight(); plotID.add(String.format("concentration: %5e", feedingConcentration)); // print out the spectra to outDATA for (XZ xz : ms1) { out.format("%.4f\t%.4f\n", xz.getTime(), xz.getIntensity()); out.flush();//from ww w.java 2 s . c o m } // delimit this dataset from the rest out.print("\n\n"); } return plotID; }
From source file:org.broadinstitute.gatk.tools.walkers.cancer.contamination.ContaminationResults.java
public void writeCurves(PrintStream out) { boolean outputBins = false; for (Map.Entry<String, Map<String, ContaminationStats>> entry : stats.entrySet()) { for (ContaminationStats stats : entry.getValue().values()) { if (!outputBins) { String[] bins = new String[stats.getContamination().getBins().length]; for (int index = 0; index < stats.getContamination().getBins().length; index++) bins[index] = String .valueOf(100.0 * (1 - (double) index / stats.getContamination().getBins().length)); outputBins = true;/*from w w w .jav a2 s.c o m*/ out.print("name,pop,"); out.println(Utils.join(",", bins)); } String[] bins = new String[stats.getContamination().getBins().length]; int index = 0; for (double value : stats.getContamination().getBins()) bins[index++] = String.valueOf(value); out.print(entry.getKey() + ",\"" + stats.getContamination().getPopultationName() + "\","); out.println(Utils.join(",", bins)); } } }
From source file:net.rim.ejde.internal.ui.views.profiler.ProfilerView.java
/** * Writes the profile data to <code>file</code>. * * @param file/* www .j av a2 s . c o m*/ * Destination file. * @throws IDEError */ private void saveContents(File file) throws IDEError { if (file == null) { return; } RIA ria = RIA.getCurrentDebugger(); if (ria == null) { return; } String debugAttachedTo = ria.getDebugAttachTo(); if (debugAttachedTo == null || debugAttachedTo.isEmpty()) { return; } PrintStream out = null; try { out = new PrintStream(new FileOutputStream(file)); out.print(RIA.getString("ProfileCSVFileHeader1")); //$NON-NLS-1$ out.print(ria.profileGetTypes()[_whatToProfile].getDescription()); out.print(RIA.getString("ProfileCSVFileHeader2")); //$NON-NLS-1$ out.println(); ProfileItem[] modules = sortedElements(_pd, null); for (int i = 0; i < modules.length; i++) { ProfileItem module = modules[i]; Object moduleName = module; ProfileItem[] methods = sortedElements(module, null); for (int j = 0; j < methods.length; j++) { ProfileItem method = methods[j]; out.print(moduleName); out.print(", "); //$NON-NLS-1$ String methodStr = method.toString(); Object handle = method.getMethodHandle(); if (handle != null && handle instanceof DebugMethod) { methodStr = ((DebugMethod) handle).getFullName(); } out.print(Util.replace(methodStr, ",", "")); //$NON-NLS-1$ //$NON-NLS-2$ out.print(", "); //$NON-NLS-1$ out.print(method.getTicks()); out.print(", "); //$NON-NLS-1$ out.print(method.getCount()); out.println(); } } out.close(); } catch (IOException e) { log.error("", e); } }
From source file:jenkins.plugins.asqatasun.AsqatasunRunnerBuilder.java
/** * // www .j a va 2 s . com * @param asqatasunRunner * @param scenario * @param workspace */ private void linkToWebapp(AsqatasunRunner asqatasunRunner, String scenarioName, String scenario, File contextDir, PrintStream printStream, boolean isDebug, String projectName) throws IOException, InterruptedException { File insertProcedureFile = AsqatasunRunnerBuilder.createTempFile(contextDir, SQL_PROCEDURE_SCRIPT_NAME, IOUtils.toString(getClass().getResourceAsStream(SQL_PROCEDURE_NAME))); if (isDebug) { printStream.print("insertProcedureFile created : " + insertProcedureFile.getAbsolutePath()); printStream.print("with content : " + FileUtils.readFileToString(insertProcedureFile)); } String script = IOUtils.toString(getClass().getResourceAsStream(INSERT_ACT_NAME)) .replace("$host", AsqatasunInstallation.get().getDatabaseHost()) .replace("$user", AsqatasunInstallation.get().getDatabaseLogin()) .replace("$port", AsqatasunInstallation.get().getDatabasePort()) .replace("$passwd", AsqatasunInstallation.get().getDatabasePassword()) .replace("$db", AsqatasunInstallation.get().getDatabaseName()) .replace("$procedureFileName", TMP_FOLDER_NAME + SQL_PROCEDURE_SCRIPT_NAME); File insertActFile = AsqatasunRunnerBuilder.createTempFile(contextDir, INSERT_ACT_SCRIPT_NAME, script); ProcessBuilder pb = new ProcessBuilder(TMP_FOLDER_NAME + INSERT_ACT_SCRIPT_NAME, AsqatasunInstallation.get().getAsqatasunLogin(), projectName.replaceAll("'", QUOTES), scenarioName.replaceAll("'", QUOTES), AsqatasunRunnerBuilder.forceVersion1ToScenario(scenario.replaceAll("'", QUOTES)), asqatasunRunner.getAuditId()); pb.directory(contextDir); pb.redirectErrorStream(true); Process p = pb.start(); p.waitFor(); FileUtils.forceDelete(insertActFile); FileUtils.forceDelete(insertProcedureFile); }
From source file:hudson.cli.CLI.java
/** * @deprecated Specific to {@link Mode#REMOTING}. *///from w ww . jav a 2s . c o m @Deprecated private Channel connectViaCliPort(URL jenkins, CliPort clip) throws IOException { LOGGER.log(FINE, "Trying to connect directly via Remoting over TCP/IP to {0}", clip.endpoint); if (authorization != null) { LOGGER.warning("-auth ignored when using JNLP agent port"); } final Socket s = new Socket(); // this prevents a connection from silently terminated by the router in between or the other peer // and that goes without unnoticed. However, the time out is often very long (for example 2 hours // by default in Linux) that this alone is enough to prevent that. s.setKeepAlive(true); // we take care of buffering on our own s.setTcpNoDelay(true); OutputStream out; if (httpsProxyTunnel != null) { String[] tokens = httpsProxyTunnel.split(":"); LOGGER.log(Level.FINE, "Using HTTP proxy {0}:{1} to connect to CLI port", new Object[] { tokens[0], tokens[1] }); s.connect(new InetSocketAddress(tokens[0], Integer.parseInt(tokens[1]))); PrintStream o = new PrintStream(s.getOutputStream()); o.print("CONNECT " + clip.endpoint.getHostString() + ":" + clip.endpoint.getPort() + " HTTP/1.0\r\n\r\n"); // read the response from the proxy ByteArrayOutputStream rsp = new ByteArrayOutputStream(); while (!rsp.toString("ISO-8859-1").endsWith("\r\n\r\n")) { int ch = s.getInputStream().read(); if (ch < 0) throw new IOException("Failed to read the HTTP proxy response: " + rsp); rsp.write(ch); } String head = new BufferedReader(new StringReader(rsp.toString("ISO-8859-1"))).readLine(); if (head == null) { throw new IOException("Unexpected empty response"); } if (!(head.startsWith("HTTP/1.0 200 ") || head.startsWith("HTTP/1.1 200 "))) { s.close(); LOGGER.log(Level.SEVERE, "Failed to tunnel the CLI port through the HTTP proxy. Falling back to HTTP."); throw new IOException("Failed to establish a connection through HTTP proxy: " + rsp); } // HTTP proxies (at least the one I tried --- squid) doesn't seem to do half-close very well. // So instead of relying on it, we'll just send the close command and then let the server // cut their side, then close the socket after the join. out = new SocketOutputStream(s) { @Override public void close() throws IOException { // ignore } }; } else { s.connect(clip.endpoint, 3000); out = SocketChannelStream.out(s); } closables.add(new Closeable() { public void close() throws IOException { s.close(); } }); Connection c = new Connection(SocketChannelStream.in(s), out); switch (clip.version) { case 1: DataOutputStream dos = new DataOutputStream(s.getOutputStream()); dos.writeUTF("Protocol:CLI-connect"); // we aren't checking greeting from the server here because I'm too lazy. It gets ignored by Channel constructor. break; case 2: DataInputStream dis = new DataInputStream(s.getInputStream()); dos = new DataOutputStream(s.getOutputStream()); dos.writeUTF("Protocol:CLI2-connect"); String greeting = dis.readUTF(); if (!greeting.equals("Welcome")) throw new IOException("Handshaking failed: " + greeting); try { byte[] secret = c.diffieHellman(false).generateSecret(); SecretKey sessionKey = new SecretKeySpec(Connection.fold(secret, 128 / 8), "AES"); c = c.encryptConnection(sessionKey, "AES/CFB8/NoPadding"); // validate the instance identity, so that we can be sure that we are talking to the same server // and there's no one in the middle. byte[] signature = c.readByteArray(); if (clip.identity != null) { Signature verifier = Signature.getInstance("SHA1withRSA"); verifier.initVerify(clip.getIdentity()); verifier.update(secret); if (!verifier.verify(signature)) throw new IOException("Server identity signature validation failed."); } } catch (GeneralSecurityException e) { throw (IOException) new IOException("Failed to negotiate transport security").initCause(e); } } return new Channel("CLI connection to " + jenkins, pool, new BufferedInputStream(c.in), new BufferedOutputStream(c.out)); }
From source file:com.google.cloud.dataflow.sdk.runners.worker.TextReaderTest.java
private void testNewlineHandling(String separator, boolean stripNewlines) throws Exception { File tmpFile = tmpFolder.newFile(); PrintStream writer = new PrintStream(new FileOutputStream(tmpFile)); List<String> expected = Arrays.asList("", " hi there ", "bob", "", " ", "--zowie!--", ""); List<Integer> expectedSizes = new ArrayList<>(); for (String line : expected) { writer.print(line); writer.print(separator);/* w w w .j a v a 2 s. co m*/ expectedSizes.add(line.length() + separator.length()); } writer.close(); TextReader<String> textReader = new TextReader<>(tmpFile.getPath(), stripNewlines, null, null, StringUtf8Coder.of(), TextIO.CompressionType.UNCOMPRESSED); ExecutorTestUtils.TestReaderObserver observer = new ExecutorTestUtils.TestReaderObserver(textReader); List<String> actual = new ArrayList<>(); try (Reader.ReaderIterator<String> iterator = textReader.iterator()) { while (iterator.hasNext()) { actual.add(iterator.next()); } } if (stripNewlines) { assertEquals(expected, actual); } else { List<String> unstripped = new LinkedList<>(); for (String s : expected) { unstripped.add(s + separator); } assertEquals(unstripped, actual); } assertEquals(expectedSizes, observer.getActualSizes()); }
From source file:hudson.plugins.jobConfigHistory.FileHistoryDao.java
/** * Creates a new history entry and saves the slave configuration. * * @param node node./* w w w . j a v a 2s. c om*/ * @param content content. * @param operation operation. */ void createNewHistoryEntryAndSaveConfig(Node node, String content, final String operation) { final File timestampedDir = createNewHistoryEntry(node, operation); final File nodeConfigHistoryFile = new File(timestampedDir, "config.xml"); PrintStream stream = null; try { stream = new PrintStream(nodeConfigHistoryFile); stream.print(content); } catch (IOException ex) { throw new RuntimeException("Unable to write " + nodeConfigHistoryFile, ex); } finally { if (stream != null) { stream.close(); } } }