Example usage for java.io PrintStream flush

List of usage examples for java.io PrintStream flush

Introduction

In this page you can find the example usage for java.io PrintStream flush.

Prototype

public void flush() 

Source Link

Document

Flushes the stream.

Usage

From source file:ArrayDictionary.java

/**
 * A kludge for testing ArrayDictionary//from   w  ww.  j  a  v  a2s. co  m
 */
public static void main(String[] args) {
    try {
        PrintStream out = System.out;
        DataInputStream in = new DataInputStream(System.in);

        String line = null;

        out.print("n ? ");
        out.flush();
        line = in.readLine();
        int n = Integer.parseInt(line);
        ArrayDictionary ad = new ArrayDictionary(n);

        String key = null, value = null;
        while (true) {
            out.print("action ? ");
            out.flush();
            line = in.readLine();

            switch (line.charAt(0)) {
            case 'p':
            case 'P':
                out.print("key ? ");
                out.flush();
                key = in.readLine();
                out.print("value ? ");
                out.flush();
                value = in.readLine();
                value = (String) ad.put(key, value);
                out.println("old: " + value);
                break;
            case 'r':
            case 'R':
                out.print("key ? ");
                out.flush();
                key = in.readLine();
                value = (String) ad.remove(key);
                out.println("old: " + value);
                break;
            case 'g':
            case 'G':
                out.print("key ? ");
                out.flush();
                key = in.readLine();
                value = (String) ad.get(key);
                out.println("value: " + value);
                break;
            case 'd':
            case 'D':
                out.println(ad.toString());
                break;
            case 'q':
            case 'Q':
                return;
            }
        }
    } catch (Exception ex) {
        ex.printStackTrace();
    }
}

From source file:Messenger.TorLib.java

public static void main(String[] args) {
    String req = "-r";
    String targetHostname = "tor.eff.org";
    String targetDir = "index.html";
    int targetPort = 80;

    if (args.length > 0 && args[0].equals("-h")) {
        System.out.println("Tinfoil/TorLib - interface for using Tor from Java\n"
                + "By Joe Foley<foley@mit.edu>\n" + "Usage: java Tinfoil.TorLib <cmd> <args>\n"
                + "<cmd> can be: -h for help\n" + "              -r for resolve\n"
                + "              -w for wget\n" + "For -r, the arg is:\n"
                + "  <hostname> Hostname to DNS resolve\n" + "For -w, the args are:\n"
                + "   <host> <path> <optional port>\n"
                + " for example, http://tor.eff.org:80/index.html would be\n" + "   tor.eff.org index.html 80\n"
                + " Since this is a demo, the default is the tor website.\n");
        System.exit(2);/*from  ww  w .  j  av a 2 s.c om*/
    }

    if (args.length >= 4)
        targetPort = new Integer(args[2]).intValue();
    if (args.length >= 3)
        targetDir = args[2];
    if (args.length >= 2)
        targetHostname = args[1];
    if (args.length >= 1)
        req = args[0];

    if (req.equals("-r")) {
        System.out.println(TorResolve(targetHostname));
    } else if (req.equals("-w")) {
        try {
            Socket s = TorSocket(targetHostname, targetPort);
            DataInputStream is = new DataInputStream(s.getInputStream());
            PrintStream out = new java.io.PrintStream(s.getOutputStream());

            //Construct an HTTP request
            out.print("GET  /" + targetDir + " HTTP/1.0\r\n");
            out.print("Host: " + targetHostname + ":" + targetPort + "\r\n");
            out.print("Accept: */*\r\n");
            out.print("Connection: Keep-Aliv\r\n");
            out.print("Pragma: no-cache\r\n");
            out.print("\r\n");
            out.flush();

            // this is from Java Examples In a Nutshell
            final InputStreamReader from_server = new InputStreamReader(is);
            char[] buffer = new char[1024];
            int chars_read;

            // read until stream closes
            while ((chars_read = from_server.read(buffer)) != -1) {
                // loop through array of chars
                // change \n to local platform terminator
                // this is a nieve implementation
                for (int j = 0; j < chars_read; j++) {
                    if (buffer[j] == '\n')
                        System.out.println();
                    else
                        System.out.print(buffer[j]);
                }
                System.out.flush();
            }
            s.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

From source file:org.apache.streams.sysomos.provider.SysomosProvider.java

/**
 * To use from command line:/*  www  .  j  a v  a 2 s . c  o m*/
 * <p/>
 * Supply configuration similar to src/test/resources/rss.conf
 * <p/>
 * Launch using:
 * <p/>
 * mvn exec:java -Dexec.mainClass=org.apache.streams.rss.provider.RssStreamProvider -Dexec.args="rss.conf articles.json"
 * @param args args
 * @throws Exception Exception
 */
public static void main(String[] args) throws Exception {

    Preconditions.checkArgument(args.length >= 2);

    String configfile = args[0];
    String outfile = args[1];

    Config reference = ConfigFactory.load();
    File file = new File(configfile);
    assert (file.exists());
    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file,
            ConfigParseOptions.defaults().setAllowMissing(false));

    Config typesafe = testResourceConfig.withFallback(reference).resolve();

    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
    SysomosConfiguration config = new ComponentConfigurator<>(SysomosConfiguration.class)
            .detectConfiguration(typesafe, "rss");
    SysomosProvider provider = new SysomosProvider(config);

    ObjectMapper mapper = StreamsJacksonMapper.getInstance();

    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
    provider.prepare(config);
    provider.startStream();
    do {
        Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(),
                TimeUnit.MILLISECONDS);
        for (StreamsDatum datum : provider.readCurrent()) {
            String json;
            try {
                json = mapper.writeValueAsString(datum.getDocument());
                outStream.println(json);
            } catch (JsonProcessingException ex) {
                System.err.println(ex.getMessage());
            }
        }
    } while (provider.isRunning());
    provider.cleanUp();
    outStream.flush();
}

From source file:org.apache.streams.twitter.provider.TwitterTimelineProvider.java

/**
 * To use from command line:/*w w w. jav  a2s.  co  m*/
 *
 * <p/>
 * Supply (at least) the following required configuration in application.conf:
 *
 * <p/>
 * twitter.oauth.consumerKey
 * twitter.oauth.consumerSecret
 * twitter.oauth.accessToken
 * twitter.oauth.accessTokenSecret
 * twitter.info
 *
 * <p/>
 * Launch using:
 *
 * <p/>
 * mvn exec:java -Dexec.mainClass=org.apache.streams.twitter.provider.TwitterTimelineProvider -Dexec.args="application.conf tweets.json"
 *
 * @param args args
 * @throws Exception Exception
 */
public static void main(String[] args) throws Exception {

    Preconditions.checkArgument(args.length >= 2);

    String configfile = args[0];
    String outfile = args[1];

    Config reference = ConfigFactory.load();
    File file = new File(configfile);
    assert (file.exists());
    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file,
            ConfigParseOptions.defaults().setAllowMissing(false));

    Config typesafe = testResourceConfig.withFallback(reference).resolve();

    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
    TwitterTimelineProviderConfiguration config = new ComponentConfigurator<>(
            TwitterTimelineProviderConfiguration.class).detectConfiguration(typesafe, "twitter");
    TwitterTimelineProvider provider = new TwitterTimelineProvider(config);

    ObjectMapper mapper = new StreamsJacksonMapper(
            Stream.of(TwitterDateTimeFormat.TWITTER_FORMAT).collect(Collectors.toList()));

    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
    provider.prepare(config);
    provider.startStream();
    do {
        Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(),
                TimeUnit.MILLISECONDS);
        for (StreamsDatum datum : provider.readCurrent()) {
            String json;
            try {
                json = mapper.writeValueAsString(datum.getDocument());
                outStream.println(json);
            } catch (JsonProcessingException ex) {
                System.err.println(ex.getMessage());
            }
        }
    } while (provider.isRunning());
    provider.cleanUp();
    outStream.flush();
}

From source file:org.apache.streams.twitter.provider.TwitterUserInformationProvider.java

/**
 * To use from command line://from w w  w.  ja  v  a  2 s .  co m
 *
 * <p/>
 * Supply (at least) the following required configuration in application.conf:
 *
 * <p/>
 * twitter.oauth.consumerKey
 * twitter.oauth.consumerSecret
 * twitter.oauth.accessToken
 * twitter.oauth.accessTokenSecret
 * twitter.info
 *
 * <p/>
 * Launch using:
 *
 * <p/>
 * mvn exec:java -Dexec.mainClass=org.apache.streams.twitter.provider.TwitterUserInformationProvider -Dexec.args="application.conf tweets.json"
 *
 * @param args args
 * @throws Exception Exception
 */
public static void main(String[] args) throws Exception {

    Preconditions.checkArgument(args.length >= 2);

    String configfile = args[0];
    String outfile = args[1];

    Config reference = ConfigFactory.load();
    File file = new File(configfile);
    assert (file.exists());
    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file,
            ConfigParseOptions.defaults().setAllowMissing(false));

    Config typesafe = testResourceConfig.withFallback(reference).resolve();

    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
    TwitterUserInformationConfiguration config = new ComponentConfigurator<>(
            TwitterUserInformationConfiguration.class).detectConfiguration(typesafe, "twitter");
    TwitterUserInformationProvider provider = new TwitterUserInformationProvider(config);

    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
    provider.prepare(config);
    provider.startStream();
    do {
        Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(),
                TimeUnit.MILLISECONDS);
        for (StreamsDatum datum : provider.readCurrent()) {
            String json;
            try {
                json = MAPPER.writeValueAsString(datum.getDocument());
                outStream.println(json);
            } catch (JsonProcessingException ex) {
                System.err.println(ex.getMessage());
            }
        }
    } while (provider.isRunning());
    provider.cleanUp();
    outStream.flush();
}

From source file:org.apache.streams.twitter.provider.TwitterStreamProvider.java

/**
 * To use from command line:/*from  ww  w .j ava2 s  .  co m*/
 *
 * <p/>
 * Supply (at least) the following required configuration in application.conf:
 *
 * <p/>
 * twitter.oauth.consumerKey
 * twitter.oauth.consumerSecret
 * twitter.oauth.accessToken
 * twitter.oauth.accessTokenSecret
 *
 * <p/>
 * Launch using:
 *
 * <p/>
 * mvn exec:java -Dexec.mainClass=org.apache.streams.twitter.provider.TwitterStreamProvider -Dexec.args="application.conf tweets.json"
 *
 * @param args
 */
public static void main(String[] args) {

    Preconditions.checkArgument(args.length >= 2);

    String configfile = args[0];
    String outfile = args[1];

    Config reference = ConfigFactory.load();
    File file = new File(configfile);
    assert (file.exists());
    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file,
            ConfigParseOptions.defaults().setAllowMissing(false));

    Config typesafe = testResourceConfig.withFallback(reference).resolve();

    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
    TwitterStreamConfiguration config = new ComponentConfigurator<>(TwitterStreamConfiguration.class)
            .detectConfiguration(typesafe, "twitter");
    TwitterStreamProvider provider = new TwitterStreamProvider(config);

    ObjectMapper mapper = StreamsJacksonMapper
            .getInstance(Collections.singletonList(TwitterDateTimeFormat.TWITTER_FORMAT));

    PrintStream outStream;
    try {
        outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
    } catch (FileNotFoundException ex) {
        LOGGER.error("FileNotFoundException", ex);
        return;
    }
    provider.prepare(config);
    provider.startStream();
    do {
        Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(),
                TimeUnit.MILLISECONDS);
        for (StreamsDatum datum : provider.readCurrent()) {
            String json;
            try {
                json = mapper.writeValueAsString(datum.getDocument());
                outStream.println(json);
            } catch (JsonProcessingException ex) {
                System.err.println(ex.getMessage());
            }
        }
    } while (provider.isRunning());
    provider.cleanUp();
    outStream.flush();
}

From source file:nab.detectors.htmjava.HTMModel.java

/**
 * Launch htm.java NAB detector/*from   w w w  .  j av  a  2  s  .c  o  m*/
 *
 * Usage:
 *      As a standalone application (for debug purpose only):
 *
 *          java -jar htm.java-nab.jar "{\"modelParams\":{....}}" < nab_data.csv > anomalies.out
 *
 *      For complete list of command line options use:
 *
 *          java -jar htm.java-nab.jar --help
 *
 *      As a NAB detector (see 'htmjava_detector.py'):
 *
 *          python run.py --detect --score --normalize -d htmjava
 *
 *      Logging options, see "log4j.properties":
 *
 *          - "LOGLEVEL": Controls log output (default: "OFF")
 *          - "LOGGER": Either "CONSOLE" or "FILE" (default: "CONSOLE")
 *          - "LOGFILE": Log file destination (default: "htmjava.log")
 *
 *      For example:
 *
 *          java -DLOGLEVEL=TRACE -DLOGGER=FILE -jar htm.java-nab.jar "{\"modelParams\":{....}}" < nab_data.csv > anomalies.out
 *
 */
@SuppressWarnings("resource")
public static void main(String[] args) {
    try {
        LOGGER.trace("main({})", Arrays.asList(args));
        // Parse command line args
        OptionParser parser = new OptionParser();
        parser.nonOptions("OPF parameters object (JSON)");
        parser.acceptsAll(Arrays.asList("p", "params"),
                "OPF parameters file (JSON).\n(default: first non-option argument)").withOptionalArg()
                .ofType(File.class);
        parser.acceptsAll(Arrays.asList("i", "input"), "Input data file (csv).\n(default: stdin)")
                .withOptionalArg().ofType(File.class);
        parser.acceptsAll(Arrays.asList("o", "output"), "Output results file (csv).\n(default: stdout)")
                .withOptionalArg().ofType(File.class);
        parser.acceptsAll(Arrays.asList("s", "skip"), "Header lines to skip").withOptionalArg()
                .ofType(Integer.class).defaultsTo(0);
        parser.acceptsAll(Arrays.asList("h", "?", "help"), "Help");
        OptionSet options = parser.parse(args);
        if (args.length == 0 || options.has("h")) {
            parser.printHelpOn(System.out);
            return;
        }

        // Get in/out files
        final PrintStream output;
        final InputStream input;
        if (options.has("i")) {
            input = new FileInputStream((File) options.valueOf("i"));
        } else {
            input = System.in;
        }
        if (options.has("o")) {
            output = new PrintStream((File) options.valueOf("o"));
        } else {
            output = System.out;
        }

        // Parse OPF Model Parameters
        JsonNode params;
        ObjectMapper mapper = new ObjectMapper();
        if (options.has("p")) {
            params = mapper.readTree((File) options.valueOf("p"));
        } else if (options.nonOptionArguments().isEmpty()) {
            try {
                input.close();
            } catch (Exception ignore) {
            }
            if (options.has("o")) {
                try {
                    output.flush();
                    output.close();
                } catch (Exception ignore) {
                }
            }
            throw new IllegalArgumentException("Expecting OPF parameters. See 'help' for more information");
        } else {
            params = mapper.readTree((String) options.nonOptionArguments().get(0));
        }

        // Number of header lines to skip
        int skip = (int) options.valueOf("s");

        // Force timezone to UTC
        DateTimeZone.setDefault(DateTimeZone.UTC);

        // Create NAB Network Model
        HTMModel model = new HTMModel(params);
        Network network = model.getNetwork();
        network.observe().subscribe((inference) -> {
            double score = inference.getAnomalyScore();
            int record = inference.getRecordNum();
            LOGGER.trace("record = {}, score = {}", record, score);
            // Output raw anomaly score
            output.println(score);
        }, (error) -> {
            LOGGER.error("Error processing data", error);
        }, () -> {
            LOGGER.trace("Done processing data");
            if (LOGGER.isDebugEnabled()) {
                model.showDebugInfo();
            }
        });
        network.start();

        // Pipe data to network
        Publisher publisher = model.getPublisher();
        BufferedReader in = new BufferedReader(new InputStreamReader(input));
        String line;
        while ((line = in.readLine()) != null && line.trim().length() > 0) {
            // Skip header lines
            if (skip > 0) {
                skip--;
                continue;
            }
            publisher.onNext(line);
        }
        publisher.onComplete();
        in.close();
        LOGGER.trace("Done publishing data");
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:org.apache.jackrabbit.oak.plugins.document.rdb.RDBExport.java

public static void main(String[] args) throws ClassNotFoundException, SQLException, IOException {

    String url = null, user = null, pw = null, table = "nodes", query = null, dumpfile = null, lobdir = null;
    List<String> fieldList = Collections.emptyList();
    Format format = Format.JSON;
    PrintStream out = System.out;
    Set<String> excl = new HashSet<String>();
    excl.add(Document.ID);/* w  w  w  . jav  a  2  s  . co  m*/
    RDBDocumentSerializer ser = new RDBDocumentSerializer(new MemoryDocumentStore(), excl);
    String columns = null;

    String param = null;
    try {
        for (int i = 0; i < args.length; i++) {
            param = args[i];
            if ("-u".equals(param) || "--username".equals(param)) {
                user = args[++i];
            } else if ("-p".equals(param) || "--password".equals(param)) {
                pw = args[++i];
            } else if ("-c".equals(param) || "--collection".equals(param)) {
                table = args[++i];
            } else if ("-j".equals(param) || "--jdbc-url".equals(param)) {
                url = args[++i];
            } else if ("-q".equals(param) || "--query".equals(param)) {
                query = args[++i];
            } else if ("-o".equals(param) || "--out".equals(param)) {
                OutputStream os = new FileOutputStream(args[++i]);
                out = new PrintStream(os, true, "UTF-8");
            } else if ("--from-db2-dump".equals(param)) {
                dumpfile = args[++i];
            } else if ("--lobdir".equals(param)) {
                lobdir = args[++i];
            } else if ("--jsonArray".equals(param)) {
                format = Format.JSONARRAY;
            } else if ("--csv".equals(param)) {
                format = Format.CSV;
            } else if ("--columns".equals(param)) {
                columns = args[++i];
            } else if ("--fields".equals(param)) {
                String fields = args[++i];
                fieldList = Arrays.asList(fields.split(","));
            } else if ("--version".equals(param)) {
                System.out.println(RDBExport.class.getName() + " version " + OakVersion.getVersion());
                System.exit(0);
            } else if ("--help".equals(param)) {
                printHelp();
                System.exit(0);
            } else {
                System.err.println(RDBExport.class.getName() + ": invalid parameter " + args[i]);
                printUsage();
                System.exit(2);
            }
        }
    } catch (IndexOutOfBoundsException ex) {
        System.err.println(RDBExport.class.getName() + ": value missing for parameter " + param);
        printUsage();
        System.exit(2);
    }

    if (format == Format.CSV && fieldList.isEmpty()) {
        System.err.println(RDBExport.class.getName() + ": csv output requires specification of field list");
        System.exit(2);
    }

    // JSON output with fieldList missing "_id"
    if ((format == Format.JSON || format == Format.JSONARRAY) && !fieldList.isEmpty()
            && !fieldList.contains("_id")) {
        fieldList = new ArrayList<String>(fieldList);
        fieldList.add(0, "_id");
    }

    if (dumpfile == null && url == null) {
        System.err.println(RDBExport.class.getName() + ": must use either dump file or JDBC URL");
        printUsage();
        System.exit(2);
    } else if (dumpfile != null) {
        columns = (columns == null)
                ? "id, modified, hasbinary, deletedonce, cmodcount, modcount, dsize, data, bdata"
                : columns;
        List<String> columnList = Arrays
                .asList(columns.toLowerCase(Locale.ENGLISH).replace(" ", "").split(","));
        dumpFile(dumpfile, lobdir, format, out, fieldList, columnList, ser);
    } else {
        if (columns != null) {
            System.err.println(RDBExport.class.getName() + ": column names ignored when using JDBC");
        }
        dumpJDBC(url, user, pw, table, query, format, out, fieldList, ser);
    }

    out.flush();
    out.close();
}

From source file:Main.java

/**
 * @param throwable/*from   w  ww.j  av a  2s  . c o  m*/
 * @return return the stack-trace
 */
public static String getStackTrace(Throwable throwable) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    PrintStream pstream = new PrintStream(baos);
    String stack = null;
    throwable.printStackTrace(pstream);
    pstream.flush();
    stack = baos.toString();
    return stack;
}

From source file:Main.java

private static String asString(final PrintStream ps, ByteArrayOutputStream os)
        throws UnsupportedEncodingException {
    if (ps != null) {
        ps.flush();
        return os.toString("UTF-8");
    } else {/*from   w ww.j  av  a 2 s  .  c o  m*/
        return "";
    }
}