List of usage examples for java.io PrintStream println
public void println(Object x)
From source file:hu.bme.mit.sette.run.Run.java
private static String readScenario(String[] args, BufferedReader in, PrintStream out) throws IOException { String scenario = null;//from w ww . j a v a2 s . co m if (args.length > 1) { out.println("Usage: java -jar SETTE.jar [scenario]"); out.println("Available scenarios:"); for (int i = 0; i < Run.scenarios.length; i++) { out.println(String.format(" [%d] %s", i, Run.scenarios[i])); } } else if (args.length == 1) { scenario = Run.parseScenario(args[0]); if (scenario == null) { out.println("Invalid scenario: " + args[0].trim()); out.println("Available scenarios:"); for (int i = 0; i < Run.scenarios.length; i++) { out.println(String.format(" [%d] %s", i, Run.scenarios[i])); } } } else { while (scenario == null) { out.println("Available scenarios:"); for (int i = 0; i < Run.scenarios.length; i++) { out.println(String.format(" [%d] %s", i, Run.scenarios[i])); } out.print("Select scenario: "); String line = in.readLine(); if (line == null) { out.println("EOF detected, exiting"); return null; } else if (StringUtils.isBlank(line)) { out.println("Exiting"); return null; } scenario = Run.parseScenario(line); if (scenario == null) { out.println("Invalid scenario: " + line.trim()); } } } out.println("Selected scenario: " + scenario); return scenario; }
From source file:com.simiacryptus.util.Util.java
/** * Report./* w w w . j a v a2s .com*/ * * @param fragments the fragments * @throws IOException the io exception */ public static void report(@javax.annotation.Nonnull final Stream<String> fragments) throws IOException { @javax.annotation.Nonnull final File outDir = new File("reports"); outDir.mkdirs(); final StackTraceElement caller = com.simiacryptus.util.Util .getLast(Arrays.stream(Thread.currentThread().getStackTrace())// .filter(x -> x.getClassName().contains("simiacryptus"))); @javax.annotation.Nonnull final File report = new File(outDir, caller.getClassName() + "_" + caller.getLineNumber() + ".html"); @javax.annotation.Nonnull final PrintStream out = new PrintStream(new FileOutputStream(report)); out.println("<html><head></head><body>"); fragments.forEach(out::println); out.println("</body></html>"); out.close(); Desktop.getDesktop().browse(report.toURI()); }
From source file:Armadillo.Analytics.Base.FastMathCalc.java
/** * Print an array./*from w ww . jav a2 s . c o m*/ * @param out text output stream where output should be printed * @param name array name * @param expectedLen expected length of the array * @param array array data */ static void printarray(PrintStream out, String name, int expectedLen, double[] array) { out.println(name + "="); checkLen(expectedLen, array.length); out.println(TABLE_START_DECL); for (double d : array) { out.printf(" %s%n", format(d)); // one entry per line } out.println(TABLE_END_DECL); }
From source file:Armadillo.Analytics.Base.FastMathCalc.java
/** * Print an array.//w ww .j a v a2 s . c o m * @param out text output stream where output should be printed * @param name array name * @param expectedLen expected length of the array * @param array2d array data */ static void printarray(PrintStream out, String name, int expectedLen, double[][] array2d) { out.println(name); checkLen(expectedLen, array2d.length); out.println(TABLE_START_DECL + " "); int i = 0; for (double[] array : array2d) { // "double array[]" causes PMD parsing error out.print(" {"); for (double d : array) { // assume inner array has very few entries out.printf("%-25.25s", format(d)); // multiple entries per line } out.println("}, // " + i++); } out.println(TABLE_END_DECL); }
From source file:ie.aib.nbp.zosresttest.RunTest.java
private static void runPerformanceTest(String username, String password, PrintStream out, boolean attachResponse, boolean formatResponse) { ZosRestServicePerformanceRunner runner = new ZosRestServicePerformanceRunner(config.getProperty("HOST"), Integer.parseInt(config.getProperty("PORT")), out); try {/*ww w .j a v a 2 s. c o m*/ readConfig(RUN_PARAM_FILE_PATH); String url = config.getProperty("URL"); String payload = config.getProperty("PAYLOAD"); int loops = Integer.parseInt(config.getProperty("LOOPS")); SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss z"); out.println("z/OS performance test started at: " + sdf.format(new Date())); out.println(" "); List<Long> elapsedTimes = new ArrayList<>(); for (int i = 0; i < loops; i++) { long elapsedTime = runner.executeServices(url, username, password, payload, out, attachResponse, formatResponse); elapsedTimes.add(elapsedTime); } // calculating avg. time Long totalTime = 0L; for (Long time : elapsedTimes) totalTime = totalTime + time; Double avgTime = totalTime.doubleValue() / elapsedTimes.size(); out.println("Average run time: " + avgTime + " miliseconds"); out.println(""); out.println("z/OS performance test ended at: " + sdf.format(new Date())); } catch (NumberFormatException ex) { out.println("Unable to run the test, elther the number of parameters or their values are incorrect."); } }
From source file:com.phantom.hadoop.examples.BaileyBorweinPlouffe.java
/** Run a map/reduce job to compute Pi. */ private static void compute(int startDigit, int nDigits, int nMaps, String workingDir, Configuration conf, PrintStream out) throws IOException { final String name = startDigit + "_" + nDigits; // setup wroking directory out.println("Working Directory = " + workingDir); out.println();//w ww .j a v a2 s . com final FileSystem fs = FileSystem.get(conf); final Path dir = fs.makeQualified(new Path(workingDir)); if (fs.exists(dir)) { throw new IOException("Working directory " + dir + " already exists. Please remove it first."); } else if (!fs.mkdirs(dir)) { throw new IOException("Cannot create working directory " + dir); } out.println("Start Digit = " + startDigit); out.println("Number of Digits = " + nDigits); out.println("Number of Maps = " + nMaps); // setup a job final Job job = createJob(name, conf); final Path hexfile = new Path(dir, "pi_" + name + ".hex"); FileOutputFormat.setOutputPath(job, new Path(dir, "out")); // setup custom properties job.getConfiguration().set(WORKING_DIR_PROPERTY, dir.toString()); job.getConfiguration().set(HEX_FILE_PROPERTY, hexfile.toString()); job.getConfiguration().setInt(DIGIT_START_PROPERTY, startDigit); job.getConfiguration().setInt(DIGIT_SIZE_PROPERTY, nDigits); job.getConfiguration().setInt(DIGIT_PARTS_PROPERTY, nMaps); // start a map/reduce job out.println("\nStarting Job ..."); final long startTime = System.currentTimeMillis(); try { if (!job.waitForCompletion(true)) { out.println("Job failed."); System.exit(1); } } catch (Exception e) { throw new RuntimeException(e); } finally { final double duration = (System.currentTimeMillis() - startTime) / 1000.0; out.println("Duration is " + duration + " seconds."); } out.println("Output file: " + hexfile); }
From source file:cn.jpush.hdfs.mr.example.BaileyBorweinPlouffe.java
/** Run a map/reduce job to compute Pi. */ private static void compute(int startDigit, int nDigits, int nMaps, String workingDir, Configuration conf, PrintStream out) throws IOException { final String name = startDigit + "_" + nDigits; // setup wroking directory out.println("Working Directory = " + workingDir); out.println();// w w w . ja va 2 s . c om // final FileSystem fs = FileSystem.get(conf);// ? final FileSystem fs = new Path(workingDir, "part-r-00000").getFileSystem(conf);// ? final Path dir = fs.makeQualified(new Path(workingDir)); if (fs.exists(dir)) { throw new IOException("Working directory " + dir + " already exists. Please remove it first."); } else if (!fs.mkdirs(dir)) { throw new IOException("Cannot create working directory " + dir); } out.println("Start Digit = " + startDigit); out.println("Number of Digits = " + nDigits); out.println("Number of Maps = " + nMaps); // setup a job final Job job = createJob(name, conf); final Path hexfile = new Path(dir, "pi_" + name + ".hex"); FileOutputFormat.setOutputPath(job, new Path(dir, "out")); // setup custom properties job.getConfiguration().set(WORKING_DIR_PROPERTY, dir.toString()); job.getConfiguration().set(HEX_FILE_PROPERTY, hexfile.toString()); job.getConfiguration().setInt(DIGIT_START_PROPERTY, startDigit); job.getConfiguration().setInt(DIGIT_SIZE_PROPERTY, nDigits); job.getConfiguration().setInt(DIGIT_PARTS_PROPERTY, nMaps); // start a map/reduce job out.println("\nStarting Job ..."); final long startTime = System.currentTimeMillis(); try { if (!job.waitForCompletion(true)) { out.println("Job failed."); System.exit(1); } } catch (Exception e) { throw new RuntimeException(e); } finally { final double duration = (System.currentTimeMillis() - startTime) / 1000.0; out.println("Duration is " + duration + " seconds."); } out.println("Output file: " + hexfile); }
From source file:disko.DU.java
public static void printScope(PrintStream out, HyperGraph graph, HGHandle scope, HGAtomPredicate stopRecursionPredicate, String indent) { Object scopeAtom = graph.get(scope); if (scopeAtom instanceof SynRel) out.println(indent + ((SynRel) scopeAtom).toString(graph)); else/* ww w . ja v a 2 s .co m*/ out.println(indent + scopeAtom); List<HGHandle> scopeLinks = hg.findAll(graph, hg.and(hg.type(ScopeLink.class), hg.incident(scope), hg.orderedLink(scope, graph.getHandleFactory().anyHandle()))); for (HGHandle lh : scopeLinks) { ScopeLink link = graph.get(lh); if (link == null) { System.err.println("Opps, missing atom for " + lh); continue; } HGHandle scoped = link.getTargetAt(1); if (stopRecursionPredicate == null || !stopRecursionPredicate.satisfies(graph, scoped)) printScope(out, graph, scoped, stopRecursionPredicate, indent + "\t"); } }
From source file:hu.bme.mit.sette.run.Run.java
private static Tool readTool(BufferedReader in, PrintStream out) throws IOException { // select tool Tool[] tools = ToolRegister.toArray(); Tool tool = null;/* w ww . j a v a 2 s . c o m*/ while (tool == null) { out.println("Available tools:"); for (int i = 0; i < tools.length; i++) { out.println(String.format(" [%d] %s", i + 1, tools[i].getName())); } out.print("Select tool: "); String line = in.readLine(); if (line == null) { out.println("EOF detected, exiting"); return null; } else if (StringUtils.isBlank(line)) { out.println("Exiting"); return null; } line = line.trim(); int idx = -1; for (int i = 0; i < tools.length; i++) { if (tools[i].getName().equalsIgnoreCase(line)) { idx = i; break; } } if (idx >= 0) { tool = tools[idx]; } else { try { tool = tools[Integer.parseInt(line) - 1]; } catch (Exception e) { tool = null; } } if (tool == null) { out.println("Invalid tool: " + line.trim()); } } out.println("Selected tool: " + tool.getName()); return tool; }
From source file:hudson.os.solaris.ZFSInstaller.java
/** * Migrates $HUDSON_HOME to a new ZFS file system. * * TODO: do this in a separate JVM to elevate the privilege. * * @param listener//from w ww .j av a 2 s . c o m * Log of migration goes here. * @param target * Dataset to move the data to. * @return * false if a migration failed. */ private static boolean migrate(TaskListener listener, String target) throws IOException, InterruptedException { PrintStream out = listener.getLogger(); File home = Hudson.getInstance().getRootDir(); // do the migration LibZFS zfs = new LibZFS(); ZFSFileSystem existing = zfs.getFileSystemByMountPoint(home); if (existing != null) { out.println(home + " is already on ZFS. Doing nothing"); return true; } File tmpDir = Util.createTempDir(); // mount a new file system to a temporary location out.println("Opening " + target); ZFSFileSystem hudson = zfs.open(target, ZFSFileSystem.class); hudson.setMountPoint(tmpDir); hudson.setProperty("hudson:managed-by", "hudson"); // mark this file system as "managed by Hudson" hudson.mount(); // copy all the files out.println("Copying all existing data files"); if (system(home, listener, "/usr/bin/cp", "-pR", ".", tmpDir.getAbsolutePath()) != 0) { out.println("Failed to copy " + home + " to " + tmpDir); return false; } // unmount out.println("Unmounting " + target); hudson.unmount(MountFlags.MS_FORCE); // move the original directory to the side File backup = new File(home.getPath() + ".backup"); out.println("Moving " + home + " to " + backup); if (backup.exists()) Util.deleteRecursive(backup); if (!home.renameTo(backup)) { out.println("Failed to move your current data " + home + " out of the way"); } // update the mount point out.println("Creating a new mount point at " + home); if (!home.mkdir()) throw new IOException("Failed to create mount point " + home); out.println("Mounting " + target); hudson.setMountPoint(home); hudson.mount(); out.println("Sharing " + target); try { hudson.setProperty("sharesmb", "on"); hudson.setProperty("sharenfs", "on"); hudson.share(); } catch (ZFSException e) { listener.error("Failed to share the file systems: " + e.getCode()); } // delete back up out.println("Deleting " + backup); if (system(new File("/"), listener, "/usr/bin/rm", "-rf", backup.getAbsolutePath()) != 0) { out.println("Failed to delete " + backup.getAbsolutePath()); return false; } out.println("Migration completed"); return true; }