Example usage for java.io BufferedWriter newLine

List of usage examples for java.io BufferedWriter newLine

Introduction

In this page you can find the example usage for java.io BufferedWriter newLine.

Prototype

public void newLine() throws IOException 

Source Link

Document

Writes a line separator.

Usage

From source file:net.arccotangent.pacchat.net.ConnectionHandler.java

public void run() {
    try {//  w  w w .ja  v a 2  s.com
        String line1 = input.readLine();
        switch (line1) {
        case "101 ping":
            ch_log.i("Client pinged us, responding with an acknowledgement.");
            output.write("102 pong");
            output.newLine();
            output.flush();
            output.close();
            break;
        case "302 request key update":
            ch_log.i("Client is requesting a key update.");
            KeyUpdate update = new KeyUpdate(ip);
            KeyUpdateManager.addPendingUpdate(connection_id, update);
            while (KeyUpdateManager.getUpdate(connection_id).isProcessed()) {
                try {
                    Thread.sleep(50);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }

            boolean accepted = KeyUpdateManager.getUpdate(connection_id).isAccepted();
            KeyUpdateManager.completeIncomingUpdate(connection_id, KeyUpdateManager.getUpdate(connection_id));
            if (accepted) {
                ch_log.i("Accepting key update");
                try {
                    output.write("303 update");
                    output.newLine();
                    output.flush();

                    String pubkeyB64 = input.readLine();

                    byte[] pubEncoded = Base64.decodeBase64(pubkeyB64);
                    X509EncodedKeySpec pubSpec = new X509EncodedKeySpec(pubEncoded);
                    KeyFactory keyFactory = KeyFactory.getInstance("RSA");

                    output.close();
                    input.close();

                    KeyManager.saveKeyByIP(ip, keyFactory.generatePublic(pubSpec));
                } catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
                    ch_log.e("Error updating sender's key!");
                    e.printStackTrace();
                }
            } else {
                ch_log.i("Rejecting key update.");
                output.write("304 no update");
                output.newLine();
                output.flush();
                output.close();
            }
            break;
        case "301 getkey":
            ch_log.i("Client requested our public key, sending.");
            String pubkeyB64 = Base64.encodeBase64String(Main.getKeypair().getPublic().getEncoded());
            output.write(pubkeyB64);
            output.newLine();
            output.flush();
            output.close();
            break;
        case "200 encrypted message": //incoming encrypted message
            ch_log.i("Client sent an encrypted message, attempting verification and decryption.");
            PrivateKey privkey = Main.getKeypair().getPrivate();
            String cryptedMsg = input.readLine() + "\n" + input.readLine() + "\n" + input.readLine();

            ch_log.i("Checking for sender's public key.");
            if (KeyManager.checkIfIPKeyExists(ip)) {
                ch_log.i("Public key found.");
            } else {
                ch_log.i("Public key not found, requesting key from their server.");
                try {
                    Socket socketGetkey = new Socket();
                    socketGetkey.connect(new InetSocketAddress(InetAddress.getByName(ip), Server.PORT), 1000);
                    BufferedReader inputGetkey = new BufferedReader(
                            new InputStreamReader(socketGetkey.getInputStream()));
                    BufferedWriter outputGetkey = new BufferedWriter(
                            new OutputStreamWriter(socketGetkey.getOutputStream()));

                    outputGetkey.write("301 getkey");
                    outputGetkey.newLine();
                    outputGetkey.flush();

                    String sender_pubkeyB64 = inputGetkey.readLine();
                    byte[] pubEncoded = Base64.decodeBase64(sender_pubkeyB64);
                    X509EncodedKeySpec pubSpec = new X509EncodedKeySpec(pubEncoded);
                    KeyFactory keyFactory = KeyFactory.getInstance("RSA");

                    outputGetkey.close();
                    inputGetkey.close();

                    KeyManager.saveKeyByIP(ip, keyFactory.generatePublic(pubSpec));
                } catch (IOException | NoSuchAlgorithmException | InvalidKeySpecException e) {
                    ch_log.e("Error saving sender's key!");
                    e.printStackTrace();
                }
            }

            PacchatMessage message = MsgCrypto.decryptAndVerifyMessage(cryptedMsg, privkey,
                    KeyManager.loadKeyByIP(ip));

            String msg = message.getMessage();
            boolean verified = message.isVerified();
            boolean decrypted = message.isDecryptedSuccessfully();

            String ANSI_RESET = "\u001B[0m";
            String ANSI_CYAN = "\u001B[36m";
            String ANSI_BOLD = "\u001B[1m";
            if (verified && decrypted) {
                ch_log.i("Acknowledging message.");
                output.write("201 message acknowledgement");
                output.newLine();
                output.flush();
                output.close();
                System.out.println(ANSI_BOLD + ANSI_CYAN + "-----BEGIN MESSAGE-----" + ANSI_RESET);
                System.out.println(ANSI_BOLD + ANSI_CYAN + msg + ANSI_RESET);
                System.out.println(ANSI_BOLD + ANSI_CYAN + "-----END MESSAGE-----" + ANSI_RESET);
            } else if (!verified && decrypted) {
                ch_log.w("Notifying client that message authenticity was not verified.");
                output.write("203 unable to verify");
                output.newLine();
                output.flush();
                output.close();
                System.out.println(ANSI_BOLD + ANSI_CYAN + "-----BEGIN MESSAGE-----" + ANSI_RESET);
                System.out.println(ANSI_BOLD + ANSI_CYAN + msg + ANSI_RESET);
                System.out.println(ANSI_BOLD + ANSI_CYAN + "-----END MESSAGE-----" + ANSI_RESET);
            } else if (!verified) {
                ch_log.w("Notifying client that message could not be decrypted.");
                output.write("202 unable to decrypt");
                output.newLine();
                output.flush();
                output.close();
            }
            break;
        case "201 message acknowledgement":
            ch_log.i("Client sent an invalid message acknowledgement.");
            output.write("400 invalid transmission header");
            output.newLine();
            output.flush();
            output.close();
        case "202 unable to decrypt":
            ch_log.i("Client sent an invalid 'unable to decrypt' transmission.");
            output.write("400 invalid transmission header");
            output.newLine();
            output.flush();
            output.close();
        case "203 unable to verify":
            ch_log.i("Client sent an invalid 'unable to verify' transmission.");
            output.write("400 invalid transmission header");
            output.newLine();
            output.flush();
            output.close();
        default:
            ch_log.i("Client sent an invalid request header: " + line1);
            output.write("400 invalid transmission header");
            output.newLine();
            output.flush();
            output.close();
            break;
        }
    } catch (IOException e) {
        ch_log.e("Error in connection handler " + connection_id);
        e.printStackTrace();
    }
}

From source file:jade.core.messaging.FileMessageStorage.java

private void createMessageFile(File toStore, GenericMessage msg, AID receiver) throws IOException {

    BufferedWriter out = null;
    try {//from w ww . j av a2 s. c om
        toStore.createNewFile();
        out = new BufferedWriter(new FileWriter(toStore));

        // Write the number of message copies (of course is 1 to begin with)
        out.write("1", 0, 1);
        out.newLine();

        // NL (23/01/04) Now write a serialized GenericMessage
        ByteArrayOutputStream ostream = new ByteArrayOutputStream();
        ObjectOutputStream p = new ObjectOutputStream(ostream);
        p.writeObject(msg);
        String strMessage = new String(Base64.encodeBase64(ostream.toByteArray()), "US-ASCII");
        ostream.close();
        out.write(strMessage, 0, strMessage.length());
        out.newLine();

        // Write the receiver AID in string format
        String strReceiver = receiver.toString();
        out.write(strReceiver, 0, strReceiver.length());
        out.newLine();

    } catch (NoClassDefFoundError er) {
        myLogger.log(Logger.WARNING,
                "*********** Cannot store message: the Persistent Delivery FileMessageStorage requires the commons-codec jar file to be in the classpath ***********");
    } finally {
        if (out != null) {
            out.close();
        }
    }
}

From source file:com.iyonger.apm.web.service.MonitorCollectorPlugin.java

@Override
public void startSampling(final ISingleConsole singleConsole, PerfTest perfTest,
        IPerfTestService perfTestService) {
    final List<String> targetHostIP = perfTest.getTargetHostIP();
    final Integer samplingInterval = perfTest.getSamplingInterval();
    for (final String target : targetHostIP) {
        scheduledTaskService.runAsync(new Runnable() {
            @Override// w w w .  ja v a 2s  .  co m
            public void run() {
                LOGGER.info("Start JVM monitoring for IP:{}", target);
                MonitorClientService client = new MonitorClientService(target,
                        MonitorCollectorPlugin.this.port);
                client.init();
                if (client.isConnected()) {
                    File testReportDir = singleConsole.getReportPath();
                    File dataFile = null;
                    FileWriter fw = null;
                    BufferedWriter bw = null;
                    try {
                        dataFile = new File(testReportDir, MONITOR_FILE_PREFIX + target + ".data");
                        fw = new FileWriter(dataFile, false);
                        bw = new BufferedWriter(fw);
                        // write header info
                        bw.write(SystemInfo.HEADER);
                        bw.newLine();
                        bw.flush();
                        clientMap.put(client, bw);
                    } catch (IOException e) {
                        LOGGER.error("Error to write to file:{}, Error:{}", dataFile.getPath(), e.getMessage());
                    } finally {
                        closeQuietly(bw);
                        closeQuietly(fw);
                    }
                }
            }
        });
    }
    assignScheduledTask(samplingInterval);
}

From source file:net.stuxcrystal.simpledev.configuration.parser.generators.xml.CommentAwareDumper.java

/**
 * Dumps the content of a single node.//from  w w  w .  j a  v  a  2 s  . c  om
 *
 * @param writer The writer to write data.
 * @param node   The node.
 * @param indent The indent.
 * @throws java.io.IOException If an I/O-Operation fails.
 */
private void dumpNode(BufferedWriter writer, Node<?> node, int indent) throws IOException {
    String name = (node.getName() != null && !node.getName().isEmpty()) ? node.getName() : "item";

    writeIndent(writer, indent);

    writer.write("<");
    writer.write(name);
    writer.write(">");

    this.dump(writer, node, indent + 2);

    writer.write("</");
    writer.write(name);
    writer.write(">");

    writer.newLine();
}

From source file:org.apache.hadoop.fs.TestJHLA.java

@Before
public void setUp() throws Exception {
    File logFile = new File(historyLog);
    if (!logFile.getParentFile().exists())
        if (!logFile.getParentFile().mkdirs())
            LOG.error("Cannot create dirs for history log file: " + historyLog);
    if (!logFile.createNewFile())
        LOG.error("Cannot create history log file: " + historyLog);
    BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(historyLog)));
    writer.write("$!!FILE=file1.log!!");
    writer.newLine();
    writer.write("Meta VERSION=\"1\" .");
    writer.newLine();/*from  w ww. j  a v a  2 s  .  c o  m*/
    writer.write(
            "Job JOBID=\"job_200903250600_0004\" JOBNAME=\"streamjob21364.jar\" USER=\"hadoop\" SUBMIT_TIME=\"1237962008012\" JOBCONF=\"hdfs:///job_200903250600_0004/job.xml\" .");
    writer.newLine();
    writer.write("Job JOBID=\"job_200903250600_0004\" JOB_PRIORITY=\"NORMAL\" .");
    writer.newLine();
    writer.write(
            "Job JOBID=\"job_200903250600_0004\" LAUNCH_TIME=\"1237962008712\" TOTAL_MAPS=\"2\" TOTAL_REDUCES=\"0\" JOB_STATUS=\"PREP\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0004_m_000003\" TASK_TYPE=\"SETUP\" START_TIME=\"1237962008736\" SPLITS=\"\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0004_m_000003\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000003_0\" START_TIME=\"1237962010929\" TRACKER_NAME=\"tracker_50445\" HTTP_PORT=\"50060\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0004_m_000003\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000003_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962012459\" RPC_ADDRESSES=\"host.com\" STATE_STRING=\"setup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0004_m_000003\" TASK_TYPE=\"SETUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962023824\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" .");
    writer.newLine();
    writer.write("Job JOBID=\"job_200903250600_0004\" JOB_STATUS=\"RUNNING\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0004_m_000000\" TASK_TYPE=\"MAP\" START_TIME=\"1237962024049\" SPLITS=\"host1.com,host2.com,host3.com\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0004_m_000001\" TASK_TYPE=\"MAP\" START_TIME=\"1237962024065\" SPLITS=\"host1.com,host2.com,host3.com\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0004_m_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000000_0\" START_TIME=\"1237962026157\" TRACKER_NAME=\"tracker_50524\" HTTP_PORT=\"50060\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0004_m_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000000_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962041307\" RPC_ADDRESSES=\"host.com\" STATE_STRING=\"Records R/W=2681/1\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_READ)(HDFS_BYTES_READ)(56630)][(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(28327)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(2681)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(28327)][(MAP_OUTPUT_RECORDS)(Map output records)(2681)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0004_m_000000\" TASK_TYPE=\"MAP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962054138\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_READ)(HDFS_BYTES_READ)(56630)][(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(28327)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(2681)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(28327)][(MAP_OUTPUT_RECORDS)(Map output records)(2681)]}\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0004_m_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000001_0\" START_TIME=\"1237962026077\" TRACKER_NAME=\"tracker_50162\" HTTP_PORT=\"50060\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0004_m_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000001_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962041030\" RPC_ADDRESSES=\"host.com\" STATE_STRING=\"Records R/W=2634/1\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_READ)(HDFS_BYTES_READ)(28316)][(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(28303)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(2634)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(28303)][(MAP_OUTPUT_RECORDS)(Map output records)(2634)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0004_m_000001\" TASK_TYPE=\"MAP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962054187\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_READ)(HDFS_BYTES_READ)(28316)][(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(28303)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(2634)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(28303)][(MAP_OUTPUT_RECORDS)(Map output records)(2634)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0004_m_000002\" TASK_TYPE=\"CLEANUP\" START_TIME=\"1237962054187\" SPLITS=\"\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0004_m_000002\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000002_0\" START_TIME=\"1237962055578\" TRACKER_NAME=\"tracker_50162\" HTTP_PORT=\"50060\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0004_m_000002\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000002_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962056782\" RPC_ADDRESSES=\"host.com\" STATE_STRING=\"cleanup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0004_m_000002\" TASK_TYPE=\"CLEANUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962069193\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" .");
    writer.newLine();
    writer.write(
            "Job JOBID=\"job_200903250600_0004\" FINISH_TIME=\"1237962069193\" JOB_STATUS=\"SUCCESS\" FINISHED_MAPS=\"2\" FINISHED_REDUCES=\"0\" FAILED_MAPS=\"0\" FAILED_REDUCES=\"0\" COUNTERS=\"{(org.apache.hadoop.mapred.JobInProgress$Counter)(Job Counters )[(TOTAL_LAUNCHED_MAPS)(Launched map tasks)(2)]}{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_READ)(HDFS_BYTES_READ)(84946)][(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(56630)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(5315)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(56630)][(MAP_OUTPUT_RECORDS)(Map output records)(5315)]}\" .");
    writer.newLine();
    writer.write("$!!FILE=file2.log!!");
    writer.newLine();
    writer.write("Meta VERSION=\"1\" .");
    writer.newLine();
    writer.write(
            "Job JOBID=\"job_200903250600_0023\" JOBNAME=\"TestJob\" USER=\"hadoop2\" SUBMIT_TIME=\"1237964779799\" JOBCONF=\"hdfs:///job_200903250600_0023/job.xml\" .");
    writer.newLine();
    writer.write("Job JOBID=\"job_200903250600_0023\" JOB_PRIORITY=\"NORMAL\" .");
    writer.newLine();
    writer.write(
            "Job JOBID=\"job_200903250600_0023\" LAUNCH_TIME=\"1237964780928\" TOTAL_MAPS=\"2\" TOTAL_REDUCES=\"0\" JOB_STATUS=\"PREP\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0023_r_000001\" TASK_TYPE=\"SETUP\" START_TIME=\"1237964780940\" SPLITS=\"\" .");
    writer.newLine();
    writer.write(
            "ReduceAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0023_r_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_r_000001_0\" START_TIME=\"1237964720322\" TRACKER_NAME=\"tracker_3065\" HTTP_PORT=\"50060\" .");
    writer.newLine();
    writer.write(
            "ReduceAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0023_r_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_r_000001_0\" TASK_STATUS=\"SUCCESS\" SHUFFLE_FINISHED=\"1237964722118\" SORT_FINISHED=\"1237964722118\" FINISH_TIME=\"1237964722118\" RPC_ADDRESSES=\"host.com\" STATE_STRING=\"setup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0023_r_000001\" TASK_TYPE=\"SETUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964796054\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}\" .");
    writer.newLine();
    writer.write("Job JOBID=\"job_200903250600_0023\" JOB_STATUS=\"RUNNING\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0023_m_000000\" TASK_TYPE=\"MAP\" START_TIME=\"1237964796176\" SPLITS=\"\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0023_m_000001\" TASK_TYPE=\"MAP\" START_TIME=\"1237964796176\" SPLITS=\"\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0023_m_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_m_000000_0\" START_TIME=\"1237964809765\" TRACKER_NAME=\"tracker_50459\" HTTP_PORT=\"50060\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0023_m_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_m_000000_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964911772\" RPC_ADDRESSES=\"host.com\" STATE_STRING=\"\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(500000000)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(5000000)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(5000000)][(MAP_OUTPUT_RECORDS)(Map output records)(5000000)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0023_m_000000\" TASK_TYPE=\"MAP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964916534\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(500000000)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(5000000)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(5000000)][(MAP_OUTPUT_RECORDS)(Map output records)(5000000)]}\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0023_m_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_m_000001_0\" START_TIME=\"1237964798169\" TRACKER_NAME=\"tracker_1524\" HTTP_PORT=\"50060\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0023_m_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_m_000001_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964962960\" RPC_ADDRESSES=\"host.com\" STATE_STRING=\"\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(500000000)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(5000000)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(5000000)][(MAP_OUTPUT_RECORDS)(Map output records)(5000000)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0023_m_000001\" TASK_TYPE=\"MAP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964976870\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(500000000)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(5000000)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(5000000)][(MAP_OUTPUT_RECORDS)(Map output records)(5000000)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0023_r_000000\" TASK_TYPE=\"CLEANUP\" START_TIME=\"1237964976871\" SPLITS=\"\" .");
    writer.newLine();
    writer.write(
            "ReduceAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0023_r_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_r_000000_0\" START_TIME=\"1237964977208\" TRACKER_NAME=\"tracker_1524\" HTTP_PORT=\"50060\" .");
    writer.newLine();
    writer.write(
            "ReduceAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0023_r_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_r_000000_0\" TASK_STATUS=\"SUCCESS\" SHUFFLE_FINISHED=\"1237964979031\" SORT_FINISHED=\"1237964979031\" FINISH_TIME=\"1237964979032\" RPC_ADDRESSES=\"host.com\" STATE_STRING=\"cleanup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0023_r_000000\" TASK_TYPE=\"CLEANUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964991879\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}\" .");
    writer.newLine();
    writer.write(
            "Job JOBID=\"job_200903250600_0023\" FINISH_TIME=\"1237964991879\" JOB_STATUS=\"SUCCESS\" FINISHED_MAPS=\"2\" FINISHED_REDUCES=\"0\" FAILED_MAPS=\"0\" FAILED_REDUCES=\"0\" COUNTERS=\"{(org.apache.hadoop.mapred.JobInProgress$Counter)(Job Counters )[(TOTAL_LAUNCHED_MAPS)(Launched map tasks)(2)]}{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(1000000000)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(10000000)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(10000000)][(MAP_OUTPUT_RECORDS)(Map output records)(10000000)]}\" .");
    writer.newLine();
    writer.write("$!!FILE=file3.log!!");
    writer.newLine();
    writer.write("Meta VERSION=\"1\" .");
    writer.newLine();
    writer.write(
            "Job JOBID=\"job_200903250600_0034\" JOBNAME=\"TestJob\" USER=\"hadoop3\" SUBMIT_TIME=\"1237966370007\" JOBCONF=\"hdfs:///job_200903250600_0034/job.xml\" .");
    writer.newLine();
    writer.write("Job JOBID=\"job_200903250600_0034\" JOB_PRIORITY=\"NORMAL\" .");
    writer.newLine();
    writer.write(
            "Job JOBID=\"job_200903250600_0034\" LAUNCH_TIME=\"1237966371076\" TOTAL_MAPS=\"2\" TOTAL_REDUCES=\"0\" JOB_STATUS=\"PREP\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0034_m_000003\" TASK_TYPE=\"SETUP\" START_TIME=\"1237966371093\" SPLITS=\"\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0034_m_000003\" TASK_ATTEMPT_ID=\"attempt_200903250600_0034_m_000003_0\" START_TIME=\"1237966371524\" TRACKER_NAME=\"tracker_50118\" HTTP_PORT=\"50060\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0034_m_000003\" TASK_ATTEMPT_ID=\"attempt_200903250600_0034_m_000003_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237966373174\" RPC_ADDRESSES=\"host.com\" STATE_STRING=\"setup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0034_m_000003\" TASK_TYPE=\"SETUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237966386098\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" .");
    writer.newLine();
    writer.write("Job JOBID=\"job_200903250600_0034\" JOB_STATUS=\"RUNNING\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0034_m_000000\" TASK_TYPE=\"MAP\" START_TIME=\"1237966386111\" SPLITS=\"\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0034_m_000001\" TASK_TYPE=\"MAP\" START_TIME=\"1237966386124\" SPLITS=\"\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0034_m_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0034_m_000001_0\" TASK_STATUS=\"FAILED\" FINISH_TIME=\"1237967174546\" RPC_ADDRESSES=\"host.com\" ERROR=\"java.io.IOException: Task process exit with nonzero status of 15.");
    writer.newLine();
    writer.write("  at org.apache.hadoop.mapred.TaskRunner.run(TaskRunner.java:424)");
    writer.newLine();
    writer.write(",java.io.IOException: Task process exit with nonzero status of 15.");
    writer.newLine();
    writer.write("  at org.apache.hadoop.mapred.TaskRunner.run(TaskRunner.java:424)");
    writer.newLine();
    writer.write("\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0034_m_000002\" TASK_TYPE=\"CLEANUP\" START_TIME=\"1237967170815\" SPLITS=\"\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0034_m_000002\" TASK_ATTEMPT_ID=\"attempt_200903250600_0034_m_000002_0\" START_TIME=\"1237967168653\" TRACKER_NAME=\"tracker_3105\" HTTP_PORT=\"50060\" .");
    writer.newLine();
    writer.write(
            "MapAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0034_m_000002\" TASK_ATTEMPT_ID=\"attempt_200903250600_0034_m_000002_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237967171301\" RPC_ADDRESSES=\"host.com\" STATE_STRING=\"cleanup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" .");
    writer.newLine();
    writer.write(
            "Task TASKID=\"task_200903250600_0034_m_000002\" TASK_TYPE=\"CLEANUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237967185818\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" .");
    writer.newLine();
    writer.write(
            "Job JOBID=\"job_200903250600_0034\" FINISH_TIME=\"1237967185818\" JOB_STATUS=\"KILLED\" FINISHED_MAPS=\"0\" FINISHED_REDUCES=\"0\" .");
    writer.newLine();
    writer.close();
}

From source file:com.elevenpaths.googleindexretriever.GoogleSearch.java

public void saveKeywords() throws IOException {
    File file = new File("keywords.txt");
    // if file doesnt exists, then create it
    if (!file.exists()) {
        file.createNewFile();/*from  w w w.  j  av  a  2s. c o  m*/
    }

    FileWriter fw = new FileWriter(file.getAbsoluteFile());
    BufferedWriter bw = new BufferedWriter(fw);
    for (String k : keywords) {
        bw.write(k);
        bw.newLine();
    }
    bw.close();
    fw.close();

}

From source file:com.day.cq.replication.dispatcher.DispatcherFlushContentBuilder.java

/**
 * Create the replication content, containing one or more URIs to be
 * re-fetched immediately upon flushing a page.
 *
 * @param factory factory/*from  w  w  w. j  a va  2  s .c o  m*/
 * @param uris URIs to re-fetch
 * @return replication content
 *
 * @throws ReplicationException if an error occurs
 */
private ReplicationContent create(ReplicationContentFactory factory, String[] uris)
        throws ReplicationException {

    File tmpFile;
    BufferedWriter out = null;

    try {
        tmpFile = File.createTempFile("cq5", ".post");
    } catch (IOException e) {
        throw new ReplicationException("Unable to create temp file", e);
    }

    try {
        out = new BufferedWriter(new FileWriter(tmpFile));
        for (int i = 0; i < uris.length; i++) {
            out.write(uris[i]);
            out.newLine();
            logger.debug("adding " + uris[i]);
        }
        out.close();
        IOUtils.closeQuietly(out);
        out = null;
        return factory.create("text/plain", tmpFile, true);
    } catch (IOException e) {
        if (out != null) {
            IOUtils.closeQuietly(out);
        }
        tmpFile.delete();
        throw new ReplicationException("Unable to create repository content", e);
    }
}

From source file:com.elevenpaths.googleindexretriever.GoogleSearch.java

public void saveSpamKeywords() throws IOException {
    File file = new File("spamKeywords.txt");
    // if file doesnt exists, then create it
    if (!file.exists()) {
        file.createNewFile();/*from w w  w.  j av a 2 s.c om*/
    }

    FileWriter fw = new FileWriter(file.getAbsoluteFile());
    BufferedWriter bw = new BufferedWriter(fw);
    for (String k : keywordsSpam) {
        bw.write(k);
        bw.newLine();
    }
    bw.close();
    fw.close();
}

From source file:edu.ucdenver.ccp.nlp.ae.dict_util.OboToDictionary.java

public void convert(File oboFile, File outputFile)
            throws IOException, FileNotFoundException, OBOParseException {

        DefaultOBOParser parser = new DefaultOBOParser();
        OBOParseEngine engine = new OBOParseEngine(parser);
        List<String> paths = new ArrayList<String>();
        paths.add(oboFile.getAbsolutePath());
        engine.setPaths(paths);//  w  ww.j a  v  a2 s  . c om
        engine.parse();
        OBOSession session = parser.getSession();
        BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFile, true),
                Charset.forName("UTF-8").newEncoder().onMalformedInput(CodingErrorAction.REPORT)
                        .onUnmappableCharacter(CodingErrorAction.REPORT)));

        writer.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<synonym>");
        writer.newLine();

        buildEntries(session.getObjects(), writer);

        writer.write("</synonym>\n");
        writer.close();
    }

From source file:com.mitre.holdshort.AlertLogger.java

private void endAlert(Location loc, AlertEndType endType) {

    currentAlert.setStopTime(System.currentTimeMillis());
    currentAlert.setStopLatLon(String.valueOf(loc.getLatitude()), String.valueOf(loc.getLongitude()));
    currentAlert.setStopHeading(loc.getBearing());
    currentAlert.setStopSpeed((float) (loc.getSpeed() * MainActivity.MS_TO_KNOTS));
    currentAlert.setStopAltitude(loc.getAltitude() * MainActivity.M_TO_FEET);
    currentAlert.setEndType(endType);/*from  www .j av  a 2  s  .  co m*/

    try {
        alertFTPClient.connect(this.ftpHost, 21);
        if (alertFTPClient.login(this.ftpUser, this.ftpPassword)) {
            alertFTPClient.enterLocalPassiveMode();
            writeDataToFtpServer(currentAlert);
            currentAlert = null;
        }

    } catch (UnknownHostException e) {
        System.err.println("No Connection For FTP Client");

        // No write that stuff to oldAlerts.dat
        File oldAlerts = new File(ctx.getFilesDir() + "/oldAlerts.dat");
        FileWriter logWriter = null;
        try {
            logWriter = new FileWriter(oldAlerts, true);
            BufferedWriter outer = new BufferedWriter(logWriter);
            outer.append(currentAlert.getAlertString());
            outer.newLine();
            outer.close();
            System.out.println(String.valueOf(oldAlerts.length()));

        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        System.err.println("No Connection For FTP Client");

        // No write that stuff to oldAlerts.dat
        File oldAlerts = new File(ctx.getFilesDir() + "/oldAlerts.dat");
        FileWriter logWriter = null;
        try {
            logWriter = new FileWriter(oldAlerts, true);
            BufferedWriter outer = new BufferedWriter(logWriter);
            outer.append(currentAlert.getAlertString());
            outer.newLine();
            outer.close();
            System.out.println(String.valueOf(oldAlerts.length()));

        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

    }

}