Example usage for java.io PrintStream PrintStream

List of usage examples for java.io PrintStream PrintStream

Introduction

In this page you can find the example usage for java.io PrintStream PrintStream.

Prototype

public PrintStream(OutputStream out, boolean autoFlush, Charset charset) 

Source Link

Document

Creates a new print stream, with the specified OutputStream, automatic line flushing and charset.

Usage

From source file:au.csiro.casda.sodalint.ValidateServiceDescriptorTest.java

/**
 * Test a v1.2 VOTable service descriptor document.
 * //from  www. j  a va 2s.c  o m
 * @throws IOException
 *             If the data file cannot be read.
 */
@Test
public void testVerifyV12ServiceDescriptor() throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    PrintStream ps = new PrintStream(baos, false, CHARSET_UTF_8);
    Reporter reporter = new TextOutputReporter(ps, ReportType.values(), 10, false, 1024);
    String xmlContent = FileUtils
            .readFileToString(new File("src/test/resources/service-descriptor-v1_2-good.xml"));
    vsd.verifyServiceDescriptor(reporter, xmlContent);

    String result = baos.toString(CHARSET_UTF_8);
    System.out.println(result);
    assertEquals("No message should have been reported", "", result);
}

From source file:com.sonarsource.lits.Dump.java

static void save(List<IssueKey> issues, File dir) {
    try {//from w  w  w .  ja  v a 2s . co m
        FileUtils.forceMkdir(dir);
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }

    Collections.sort(issues, new IssueKeyComparator());

    PrintStream out = null;
    String prevRuleKey = null;
    String prevComponentKey = null;
    for (IssueKey issueKey : issues) {
        if (!issueKey.ruleKey.equals(prevRuleKey)) {
            if (out != null) {
                endRule(out);
            }
            try {
                out = new PrintStream(new FileOutputStream(new File(dir, ruleKeyToFileName(issueKey.ruleKey))),
                        /* autoFlush: */ true, StandardCharsets.UTF_8.name());
            } catch (IOException e) {
                throw Throwables.propagate(e);
            }
            out.print("{\n");
            startComponent(out, issueKey.componentKey);
        } else if (!issueKey.componentKey.equals(prevComponentKey)) {
            endComponent(out);
            startComponent(out, issueKey.componentKey);
        }
        out.print(issueKey.line + ",\n");
        prevComponentKey = issueKey.componentKey;
        prevRuleKey = issueKey.ruleKey;
    }
    if (out != null) {
        endRule(out);
    }
}

From source file:hudson.plugins.clearcase.changelog.UcmChangeLogSet.java

@Override
public void saveToFile(File changeLogFile) throws IOException {

    FileOutputStream fileOutputStream = new FileOutputStream(changeLogFile);

    PrintStream stream = new PrintStream(fileOutputStream, false, "UTF-8");

    stream.println("<?xml version='1.0' encoding='UTF-8'?>");
    stream.println("<history>");
    for (UcmActivity activity : history) {
        stream.println("\t<entry>");
        stream.println("\t\t<name>" + escapeXml(activity.getName()) + "</name>");
        stream.println("\t\t<headline>" + escapeXml(activity.getHeadline()) + "</headline>");
        stream.println("\t\t<stream>" + escapeXml(activity.getStream()) + "</stream>");
        stream.println("\t\t<user>" + escapeXml(activity.getUser()) + "</user>");
        for (UcmActivity subActivity : activity.getSubActivities()) {
            printSubActivity(stream, subActivity);
        }/*from   ww w.  j  ava  2 s  .c om*/
        for (hudson.plugins.clearcase.objects.AffectedFile file : activity.getFiles()) {
            printFile(stream, file);
        }
        stream.println("\t</entry>");
    }
    stream.println("</history>");

    stream.close();
    fileOutputStream.close();
}

From source file:com.bjond.Main.java

/**
*  Given an input stream _in_ to an audit log, the unobfuscated log will be stream to _out_.
* 
* @param in/*from   w w w.  j a v  a 2 s  . c  o m*/
* @param out
* 
* @throws IOException
* @throws SQLException
*/
public static void process(final InputStream in, final OutputStream out) throws IOException, SQLException {
    log.info("Execution begins...");

    // Generate the POSTGRESQL URL form system envirionment variables.
    POSTGRESQL_URL = String.format("jdbc:postgresql://%s:%s/%s", OPENSHIFT_POSTGRESQL_DB_HOST,
            OPENSHIFT_POSTGRESQL_DB_PORT, OPENSHIFT_APP_NAME);

    try (final Connection db = DriverManager.getConnection(POSTGRESQL_URL, OPENSHIFT_POSTGRESQL_DB_USERNAME,
            OPENSHIFT_POSTGRESQL_DB_PASSWORD)) {

        final PrintStream outPrintStream = new PrintStream(out, true, "UTF-8");
        final Reader inReader = new InputStreamReader(in, "UTF-8");
        final Iterable<CSVRecord> records = CSVFormat.DEFAULT.withQuote('\'').parse(inReader);

        log.info("PostgreSQL DB connectiion valid: {}", db.isValid(1000));

        records.forEach(record -> {
            record.iterator().forEachRemaining(e -> {
                try {
                    if (!e.isEmpty()) {
                        final String[] tuple = keyValueSplitter(e);
                        outPrintStream.printf("%s='%s',", tuple[0], resolve(db, tuple[0], tuple[1]));
                    }
                } catch (final Exception exception) {
                    log.error("unexpected error on " + e, exception);
                }
            });

            outPrintStream.printf("%n"); // EOL
        });
    }

    log.info("Execution ends...");
}

From source file:aos.lucene.search.advanced.SortingExample.java

public void displayResults(Query query, Sort sort) //
        throws IOException {
    IndexSearcher searcher = new IndexSearcher(directory);

    searcher.setDefaultFieldSortScoring(true, false); //

    TopDocs results = searcher.search(query, null, //
            20, sort); //

    LOGGER.info("\nResults for: " + //
            query.toString() + " sorted by " + sort);

    LOGGER.info(StringUtils.rightPad("Title", 30) + StringUtils.rightPad("pubmonth", 10)
            + StringUtils.center("id", 4) + StringUtils.center("score", 15));

    PrintStream out = new PrintStream(System.out, true, "UTF-8"); //

    DecimalFormat scoreFormatter = new DecimalFormat("0.######");
    for (ScoreDoc sd : results.scoreDocs) {
        int docID = sd.doc;
        float score = sd.score;
        Document doc = searcher.doc(docID);
        out.println(StringUtils.rightPad( //
                StringUtils.abbreviate(doc.get("title"), 29), 30) + //
                StringUtils.rightPad(doc.get("pubmonth"), 10) + //
                StringUtils.center("" + docID, 4) + //
                StringUtils.leftPad( //
                        scoreFormatter.format(score), 12)); //
        out.println("   " + doc.get("category"));
        //out.println(searcher.explain(query, docID));   //
    }/*  w  w  w .  j  a  v a2 s .c  o m*/

    searcher.close();
}

From source file:cc.wikitools.lucene.hadoop.FetchWikipediaArticleHdfs.java

@SuppressWarnings("static-access")
@Override//from  w  w w .  j  a  v a  2s  .  co  m
public int run(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(
            OptionBuilder.withArgName("path").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("id").create(ID_OPTION));
    options.addOption(
            OptionBuilder.withArgName("string").hasArg().withDescription("title").create(TITLE_OPTION));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!(cmdline.hasOption(ID_OPTION) || cmdline.hasOption(TITLE_OPTION))
            || !cmdline.hasOption(INDEX_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(FetchWikipediaArticle.class.getName(), options);
        System.exit(-1);
    }

    String indexLocation = cmdline.getOptionValue(INDEX_OPTION);

    HdfsWikipediaSearcher searcher = new HdfsWikipediaSearcher(new Path(indexLocation), getConf());
    PrintStream out = new PrintStream(System.out, true, "UTF-8");

    if (cmdline.hasOption(ID_OPTION)) {
        int id = Integer.parseInt(cmdline.getOptionValue(ID_OPTION));
        Document doc = searcher.getArticle(id);

        if (doc == null) {
            System.err.print("id " + id + " doesn't exist!\n");
        } else {
            out.println(doc.getField(IndexField.TEXT.name).stringValue());
        }
    } else {
        String title = cmdline.getOptionValue(TITLE_OPTION);
        Document doc = searcher.getArticle(title);

        if (doc == null) {
            System.err.print("article \"" + title + "\" doesn't exist!\n");
        } else {
            out.println(doc.getField(IndexField.TEXT.name).stringValue());
        }
    }

    searcher.close();
    out.close();

    return 0;
}

From source file:com.sk89q.craftapi.streaming.StreamingServerClient.java

/**
 * Construct the instance.//from w ww. j  ava 2 s  . co m
 * 
 * @param server
 * @param socket
 */
public StreamingServerClient(StreamingServer server, Socket socket) throws Throwable {
    this.server = server;
    this.socket = socket;

    random = SecureRandom.getInstance("SHA1PRNG");
    challenge = new byte[32];
    random.nextBytes(challenge);

    InputStreamReader inReader = new InputStreamReader(socket.getInputStream(), "utf-8");
    in = new BufferedReader(inReader);
    out = new PrintStream(socket.getOutputStream(), true, "utf-8");
}

From source file:azkaban.jobtype.ReportalHiveRunner.java

@Override
protected void runReportal() throws Exception {
    System.out.println("Reportal Hive: Setting up Hive");
    HiveConf conf = new HiveConf(SessionState.class);

    if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
        conf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
    }//from   w  ww . ja va  2  s  .co m

    File tempTSVFile = new File("./temp.tsv");
    OutputStream tsvTempOutputStream = new BoundedOutputStream(
            new BufferedOutputStream(new FileOutputStream(tempTSVFile)), outputCapacity);
    PrintStream logOut = System.out;

    // NOTE: It is critical to do this here so that log4j is reinitialized
    // before any of the other core hive classes are loaded
    // criccomini@linkedin.com: I disabled this because it appears to swallow
    // all future logging (even outside of hive).
    // SessionState.initHiveLog4j();

    String orig = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);

    CliSessionState sessionState = new CliSessionState(conf);
    sessionState.in = System.in;
    sessionState.out = new PrintStream(tsvTempOutputStream, true, "UTF-8");
    sessionState.err = new PrintStream(logOut, true, "UTF-8");

    OptionsProcessor oproc = new OptionsProcessor();

    // Feed in Hive Args
    String[] args = buildHiveArgs();
    if (!oproc.process_stage1(args)) {
        throw new Exception("unable to parse options stage 1");
    }

    if (!oproc.process_stage2(sessionState)) {
        throw new Exception("unable to parse options stage 2");
    }

    // Set all properties specified via command line
    for (Map.Entry<Object, Object> item : sessionState.cmdProperties.entrySet()) {
        conf.set((String) item.getKey(), (String) item.getValue());
    }

    SessionState.start(sessionState);

    String expanded = expandHiveAuxJarsPath(orig);
    if (orig == null || orig.equals(expanded)) {
        System.out.println("Hive aux jars variable not expanded");
    } else {
        System.out.println("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]");
        HiveConf.setVar(conf, HiveConf.ConfVars.HIVEAUXJARS, expanded);
    }

    if (!ShimLoader.getHadoopShims().usesJobShell()) {
        // hadoop-20 and above - we need to augment classpath using hiveconf
        // components
        // see also: code in ExecDriver.java
        ClassLoader loader = conf.getClassLoader();
        String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);

        System.out.println("Got auxJars = " + auxJars);

        if (StringUtils.isNotBlank(auxJars)) {
            loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
        }
        conf.setClassLoader(loader);
        Thread.currentThread().setContextClassLoader(loader);
    }

    CliDriver cli = new CliDriver();
    int returnValue = 0;
    String prefix = "";

    returnValue = cli.processLine("set hive.cli.print.header=true;");
    String[] queries = jobQuery.split("\n");
    for (String line : queries) {
        if (!prefix.isEmpty()) {
            prefix += '\n';
        }
        if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
            line = prefix + line;
            line = injectVariables(line);
            System.out.println("Reportal Hive: Running Hive Query: " + line);
            System.out.println("Reportal Hive: HiveConf HIVEAUXJARS: "
                    + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS));
            returnValue = cli.processLine(line);
            prefix = "";
        } else {
            prefix = prefix + line;
            continue;
        }
    }

    tsvTempOutputStream.close();

    // convert tsv to csv and write it do disk
    System.out.println("Reportal Hive: Converting output");
    InputStream tsvTempInputStream = new BufferedInputStream(new FileInputStream(tempTSVFile));
    Scanner rowScanner = new Scanner(tsvTempInputStream);
    PrintStream csvOutputStream = new PrintStream(outputStream);
    while (rowScanner.hasNextLine()) {
        String tsvLine = rowScanner.nextLine();
        // strip all quotes, and then quote the columns
        csvOutputStream.println("\"" + tsvLine.replace("\"", "").replace("\t", "\",\"") + "\"");
    }
    rowScanner.close();
    csvOutputStream.close();

    // Flush the temp file out
    tempTSVFile.delete();

    if (returnValue != 0) {
        throw new Exception("Hive query finished with a non zero return code");
    }

    System.out.println("Reportal Hive: Ended successfully");
}

From source file:cc.wikitools.lucene.hadoop.ScoreWikipediaArticleHdfs.java

@SuppressWarnings("static-access")
@Override/*from  w w  w  .j  a v a2 s.co m*/
public int run(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(
            OptionBuilder.withArgName("path").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("id").create(ID_OPTION));
    options.addOption(
            OptionBuilder.withArgName("string").hasArg().withDescription("title").create(TITLE_OPTION));
    options.addOption(
            OptionBuilder.withArgName("string").hasArg().withDescription("query text").create(QUERY_OPTION));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!(cmdline.hasOption(ID_OPTION) || cmdline.hasOption(TITLE_OPTION)) || !cmdline.hasOption(INDEX_OPTION)
            || !cmdline.hasOption(QUERY_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(ScoreWikipediaArticleHdfs.class.getName(), options);
        System.exit(-1);
    }

    String indexLocation = cmdline.getOptionValue(INDEX_OPTION);
    String queryText = cmdline.getOptionValue(QUERY_OPTION);

    HdfsWikipediaSearcher searcher = new HdfsWikipediaSearcher(new Path(indexLocation), getConf());
    PrintStream out = new PrintStream(System.out, true, "UTF-8");

    if (cmdline.hasOption(ID_OPTION)) {
        out.println("score: "
                + searcher.scoreArticle(queryText, Integer.parseInt(cmdline.getOptionValue(ID_OPTION))));
    } else {
        out.println("score: " + searcher.scoreArticle(queryText, cmdline.getOptionValue(TITLE_OPTION)));
    }

    searcher.close();
    out.close();

    return 0;
}

From source file:DatabaseTest.java

@Before
public void importDataSet() throws Exception {
    dbConn = DriverManager.getConnection(JDBC_URL, USER, PASSWORD);
    createSchema();/* w w  w  . j  a  va2 s  . c o  m*/
    IDataSet dataSet = readDataSet();
    cleanlyInsert(dataSet);
    //dbConn.setAutoCommit(false); // Emulate command line use

    testOutput = new ByteArrayOutputStream();
    props = new Properties();
    props.setProperty("db.driver", JDBC_DRIVER);
    props.setProperty("db.database", JDBC_URL);
    props.setProperty("db.user", USER);
    props.setProperty("db.password", PASSWORD);
    engine = new CLI(props);
    engine.setOutputStream(new PrintStream(testOutput, false, UTF8_ENCODING));
    engine.openConnection();
}