Example usage for java.io Writer append

List of usage examples for java.io Writer append

Introduction

In this page you can find the example usage for java.io Writer append.

Prototype

public Writer append(char c) throws IOException 

Source Link

Document

Appends the specified character to this writer.

Usage

From source file:org.netbeans.util.source.minify.MinifyUtil.java

public MinifyFileResult compressCss(String inputFilename, String outputFilename, MinifyProperty minifyProperty)
        throws IOException {
    Reader in = null;//from w  w w  .ja v a2 s  .  c  o m
    Writer out = null;
    MinifyFileResult minifyFileResult = new MinifyFileResult();
    try {
        File inputFile = new File(inputFilename);
        File outputFile = new File(outputFilename);
        in = new InputStreamReader(new FileInputStream(inputFile), minifyProperty.getCharset());
        minifyFileResult.setInputFileSize(inputFile.length());

        CssCompressor compressor = new CssCompressor(in);
        in.close();
        in = null;

        out = new OutputStreamWriter(new FileOutputStream(outputFile), minifyProperty.getCharset());
        compressor.compress(out, minifyProperty.getLineBreakPosition());
        out.flush();
        minifyFileResult.setOutputFileSize(outputFile.length());
        if (minifyProperty.isAppendLogToFile()) {
            out.append("\n/*Size: " + minifyFileResult.getInputFileSize() + "->"
                    + minifyFileResult.getOutputFileSize() + "Bytes " + "\n Saved "
                    + minifyFileResult.getSavedPercentage() + "%*/");
        }
        out.flush();

    } finally {
        IOUtils.closeQuietly(in);
        IOUtils.closeQuietly(out);
    }
    return minifyFileResult;
}

From source file:com.github.hateoas.forms.spring.xhtml.XhtmlResourceMessageConverter.java

/**
 * From {@link ServletServerHttpRequest}:
 * Use {@link javax.servlet.ServletRequest#getParameterMap()} to reconstruct the
 * body of a form 'POST' providing a predictable outcome as opposed to reading
 * from the body, which can fail if any other code has used ServletRequest
 * to access a parameter thus causing the input stream to be "consumed".
 *//*from   w  w  w. j  a  v a  2 s.c o m*/
private InputStream getBodyFromServletRequestParameters(HttpServletRequest request, String charset)
        throws IOException {

    ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
    Writer writer = new OutputStreamWriter(bos, charset);
    @SuppressWarnings("unchecked")
    Map<String, String[]> form = request.getParameterMap();
    for (Iterator<String> nameIterator = form.keySet().iterator(); nameIterator.hasNext();) {
        String name = nameIterator.next();
        List<String> values = Arrays.asList(form.get(name));
        for (Iterator<String> valueIterator = values.iterator(); valueIterator.hasNext();) {
            String value = valueIterator.next();
            writer.write(URLEncoder.encode(name, charset));
            if (value != null) {
                writer.write('=');
                writer.write(URLEncoder.encode(value, charset));
                if (valueIterator.hasNext()) {
                    writer.write('&');
                }
            }
        }
        if (nameIterator.hasNext()) {
            writer.append('&');
        }
    }
    writer.flush();

    return new ByteArrayInputStream(bos.toByteArray());
}

From source file:com.emc.vipr.sync.CasMigrationTest.java

protected List<String> createTestClips(FPPool pool, int maxBlobSize, int thisMany, Writer summaryWriter)
        throws Exception {
    ExecutorService service = Executors.newFixedThreadPool(CAS_SETUP_THREADS);

    System.out.print("Creating clips");

    List<String> clipIds = Collections.synchronizedList(new ArrayList<String>());
    List<String> summaries = Collections.synchronizedList(new ArrayList<String>());
    for (int clipIdx = 0; clipIdx < thisMany; clipIdx++) {
        service.submit(new ClipWriter(pool, clipIds, maxBlobSize, summaries));
    }/*from   w  ww .j a va  2s .com*/

    service.shutdown();
    service.awaitTermination(CAS_SETUP_WAIT_MINUTES, TimeUnit.MINUTES);
    service.shutdownNow();

    Collections.sort(summaries);
    for (String summary : summaries) {
        summaryWriter.append(summary);
    }

    System.out.println();

    return clipIds;
}

From source file:com.github.rvesse.airline.help.cli.bash.BashCompletionGenerator.java

@Override
public void usage(GlobalMetadata<T> global, OutputStream output) throws IOException {
    Writer writer = new OutputStreamWriter(output);

    // Script header
    writeHeader(writer);/*from w ww .  ja  va 2 s .  c o m*/
    writeHelperFunctions(writer);

    // If there are multiple groups then we will need to generate a function
    // for each
    boolean hasGroups = global.getCommandGroups().size() > 1 || global.getDefaultGroupCommands().size() == 0;
    if (hasGroups) {
        generateGroupFunctions(global, writer);
    }
    // Need to generate functions for default group commands regardless
    generateCommandFunctions(global, writer);

    // Start main completion function
    writeFunctionName(writer, global, true);

    indent(writer, 2);
    writer.append("# Get completion data").append(NEWLINE);
    indent(writer, 2);
    writer.append("CURR_WORD=${COMP_WORDS[COMP_CWORD]}").append(NEWLINE);
    indent(writer, 2);
    writer.append("PREV_WORD=${COMP_WORDS[COMP_CWORD-1]}").append(NEWLINE);
    indent(writer, 2);
    writer.append("CURR_CMD=").append(NEWLINE);
    indent(writer, 2);
    writer.append("if [[ ${COMP_CWORD} -ge 1 ]]; then").append(NEWLINE);
    indent(writer, 4);
    writer.append("CURR_CMD=${COMP_WORDS[1]}").append(NEWLINE);
    indent(writer, 2);
    writer.append("fi").append(DOUBLE_NEWLINE);

    // Prepare list of top level commands and groups
    Set<String> commandNames = new HashSet<>();
    for (CommandMetadata command : global.getDefaultGroupCommands()) {
        if (command.isHidden() && !this.includeHidden())
            continue;
        commandNames.add(command.getName());
    }
    if (hasGroups) {
        for (CommandGroupMetadata group : global.getCommandGroups()) {
            if (group.isHidden() && !this.includeHidden())
                continue;

            commandNames.add(group.getName());
        }
    }
    if (global.getDefaultCommand() != null)
        commandNames.add(global.getDefaultCommand().getName());
    writeWordListVariable(writer, 2, "COMMANDS", commandNames.iterator());

    // Firstly check whether we are only completing the group or command
    indent(writer, 2);
    writer.append("if [[ ${COMP_CWORD} -eq 1 ]]; then").append(NEWLINE);

    // Include the default command directly if present
    if (global.getDefaultCommand() != null) {
        // Need to call the completion function and combine its output
        // with that of the list of available commands
        writeCommandFunctionCall(writer, global, null, global.getDefaultCommand(), 4);
        indent(writer, 4);
        writer.append("DEFAULT_COMMAND_COMPLETIONS=(${COMPREPLY[@]})").append(NEWLINE);
    }
    indent(writer, 4);
    writer.append("COMPREPLY=()").append(NEWLINE);
    if (global.getDefaultCommand() != null) {
        writeCompletionGeneration(writer, 4, false, null, "COMMANDS", "DEFAULT_COMMAND_COMPLETIONS");
    } else {
        writeCompletionGeneration(writer, 4, false, null, "COMMANDS");
    }
    indent(writer, 2);
    writer.append("fi").append(DOUBLE_NEWLINE);

    // Otherwise we must be in a specific group/command
    // Use a switch statement to provide group/command specific completion
    writer.append("  case ${CURR_CMD} in ").append(NEWLINE);
    if (hasGroups) {
        Set<String> groups = new HashSet<String>();

        // Add a case for each group
        for (CommandGroupMetadata group : global.getCommandGroups()) {
            if (group.isHidden() && !this.includeHidden())
                continue;

            // Add case for the group
            writeGroupCase(writer, global, group, 4);

            // Track which groups we've generated completion functions for
            groups.add(group.getName());
        }

        // Include commands in the default group directly provided there
        // isn't a conflicting group
        for (CommandMetadata command : global.getDefaultGroupCommands()) {
            if (groups.contains(command.getName()))
                continue;

            groups.add(command.getName());

            if (command.isHidden() && !this.includeHidden())
                continue;

            // Add case for the command
            writeCommandCase(writer, global, null, command, 4, false);

            groups.add(command.getName());
        }
    } else {
        // Add a case for each command
        for (CommandMetadata command : global.getDefaultGroupCommands()) {
            if (command.isHidden() && !this.includeHidden())
                continue;

            // Add case for the command
            writeCommandCase(writer, global, null, command, 4, false);
        }
    }

    indent(writer, 2);
    writer.append("esac").append(DOUBLE_NEWLINE);

    // End Function
    if (this.withDebugging) {
        indent(writer, 2);
        writer.append("set +o xtrace").append(NEWLINE);
    }
    writer.append("}").append(DOUBLE_NEWLINE);

    // Completion setup
    writer.append("complete -F ");
    writeFunctionName(writer, global, false);
    writer.append(" ").append(global.getName());

    // Flush the output
    writer.flush();
    output.flush();
}

From source file:org.gradle.api.publish.internal.ModuleMetadataFileGenerator.java

public void generateTo(PublicationInternal publication, Collection<? extends PublicationInternal> publications,
        Writer writer) throws IOException {
    // Collect a map from component to coordinates. This might be better to move to the component or some publications model
    Map<SoftwareComponent, ComponentData> coordinates = new HashMap<SoftwareComponent, ComponentData>();
    collectCoordinates(publications, coordinates);

    // Collect a map from component to its owning component. This might be better to move to the component or some publications model
    Map<SoftwareComponent, SoftwareComponent> owners = new HashMap<SoftwareComponent, SoftwareComponent>();
    collectOwners(publications, owners);

    // Write the output
    JsonWriter jsonWriter = new JsonWriter(writer);
    jsonWriter.setHtmlSafe(false);// www .  j  a v  a  2s .co m
    jsonWriter.setIndent("  ");
    writeComponentWithVariants(publication, publication.getComponent(), coordinates, owners, jsonWriter);
    jsonWriter.flush();
    writer.append('\n');
}

From source file:org.apache.hadoop.hbase.mob.compactions.PartitionedMobCompactor.java

/**
 * Compacts the del file in a batch.//  w w  w.  ja  va 2s.  c o m
 * @param request The compaction request.
 * @param delFiles The del files.
 * @return The path of new del file after merging.
 * @throws IOException
 */
private Path compactDelFilesInBatch(PartitionedMobCompactionRequest request, List<StoreFile> delFiles)
        throws IOException {
    // create a scanner for the del files.
    StoreScanner scanner = createScanner(delFiles, ScanType.COMPACT_RETAIN_DELETES);
    Writer writer = null;
    Path filePath = null;
    try {
        writer = MobUtils.createDelFileWriter(conf, fs, column,
                MobUtils.formatDate(new Date(request.selectionTime)), tempPath, Long.MAX_VALUE,
                column.getCompactionCompression(), HConstants.EMPTY_START_ROW, compactionCacheConfig,
                cryptoContext);
        filePath = writer.getPath();
        List<Cell> cells = new ArrayList<Cell>();
        boolean hasMore = false;
        ScannerContext scannerContext = ScannerContext.newBuilder().setBatchLimit(compactionKVMax).build();
        do {
            hasMore = scanner.next(cells, scannerContext);
            for (Cell cell : cells) {
                writer.append(cell);
            }
            cells.clear();
        } while (hasMore);
    } finally {
        scanner.close();
        if (writer != null) {
            try {
                writer.close();
            } catch (IOException e) {
                LOG.error("Failed to close the writer of the file " + filePath, e);
            }
        }
    }
    // commit the new del file
    Path path = MobUtils.commitFile(conf, fs, filePath, mobFamilyDir, compactionCacheConfig);
    // archive the old del files
    try {
        MobUtils.removeMobFiles(conf, fs, tableName, mobTableDir, column.getName(), delFiles);
    } catch (IOException e) {
        LOG.error("Failed to archive the old del files " + delFiles, e);
    }
    return path;
}

From source file:net.pandoragames.far.ui.MimeConfParser.java

/**
 * Format the current mime definitions as xml.
 * /*from w w  w  .  j av a  2  s. c o  m*/
 * @param output where to write to.
 * @throws IOException
 */
public void format(OutputStream output) throws IOException {
    Writer writer = new OutputStreamWriter(output, "UTF-8");
    Map<String, MimeTreeNode> nodeSet = new HashMap<String, MimeTreeNode>();
    MimeTreeNode rootNode = null;
    for (BUILDIN fileTypeID : BUILDIN.values()) {
        FileType fileType = FileType.getType(fileTypeID.name());
        MimeTreeNode node = new MimeTreeNode(fileType);
        if (fileTypeID == BUILDIN.FILE) {
            rootNode = node;
        } else {
            nodeSet.get(fileType.getParentType().getName()).getChildren().add(node);
        }
        nodeSet.put(node.getName(), node);
    }
    List<MimeType> mimelist = MimeType.MimeRegistry.listAll();
    Collections.sort(mimelist, new Comparator<MimeType>() {
        public int compare(MimeType a, MimeType b) {
            return a.getName().compareTo(b.getName());
        }
    });
    for (MimeType mimeType : mimelist) {
        addMimeType(mimeType, nodeSet);
    }
    writer.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>");
    writer.append('\n');
    writeNode(rootNode, writer, 0);
    writer.flush();
}

From source file:org.netbeans.util.source.minify.MinifyUtil.java

public MinifyFileResult compressJson(String inputFilename, String outputFilename, MinifyProperty minifyProperty)
        throws IOException {
    InputStreamReader in = null;/*from w w w  .java2  s. com*/
    Writer out = null;
    MinifyFileResult minifyFileResult = new MinifyFileResult();
    try {
        File inputFile = new File(inputFilename);
        File outputFile = new File(outputFilename);
        in = new InputStreamReader(new FileInputStream(inputFile), minifyProperty.getCharset());
        minifyFileResult.setInputFileSize(inputFile.length());

        JSONMinifyUtil compressor = new JSONMinifyUtil();
        String output = compressor.minify(fromStream(in));

        in.close();
        in = null;

        out = new OutputStreamWriter(new FileOutputStream(outputFile), minifyProperty.getCharset());
        out.write(output);

        out.flush();
        minifyFileResult.setOutputFileSize(outputFile.length());
        if (minifyProperty.isAppendLogToFile()) {
            out.append("\n<!--Size: " + minifyFileResult.getInputFileSize() + "=>"
                    + minifyFileResult.getOutputFileSize() + "Bytes " + "\n Saved "
                    + minifyFileResult.getSavedPercentage() + "%-->");
        }
        out.flush();

    } finally {
        IOUtils.closeQuietly(in);
        IOUtils.closeQuietly(out);
    }
    return minifyFileResult;
}

From source file:org.apache.mahout.utils.vectors.VectorDumper.java

@Override
public int run(String[] args) throws Exception {
    /**// ww w . j  a  v a 2  s .c  om
     Option seqOpt = obuilder.withLongName("seqFile").withRequired(false).withArgument(
     abuilder.withName("seqFile").withMinimum(1).withMaximum(1).create()).withDescription(
     "The Sequence File containing the Vectors").withShortName("s").create();
     Option dirOpt = obuilder.withLongName("seqDirectory").withRequired(false).withArgument(
     abuilder.withName("seqDirectory").withMinimum(1).withMaximum(1).create())
     .withDescription("The directory containing Sequence File of Vectors")
     .withShortName("d").create();
     */
    addInputOption();
    addOutputOption();
    addOption("useKey", "u", "If the Key is a vector than dump that instead");
    addOption("printKey", "p", "Print out the key as well, delimited by tab (or the value if useKey is true");
    addOption("dictionary", "d", "The dictionary file.", false);
    addOption("dictionaryType", "dt", "The dictionary file type (text|seqfile)", false);
    addOption("csv", "c",
            "Output the Vector as CSV.  Otherwise it substitutes in the terms for vector cell entries");
    addOption("namesAsComments", "n", "If using CSV output, optionally add a comment line for each NamedVector "
            + "(if the vector is one) printing out the name");
    addOption("nameOnly", "N", "Use the name as the value for each NamedVector (skip other vectors)");
    addOption("sortVectors", "sort",
            "Sort output key/value pairs of the vector entries in abs magnitude " + "descending order");
    addOption("quiet", "q", "Print only file contents");
    addOption("sizeOnly", "sz", "Dump only the size of the vector");
    addOption("numItems", "ni", "Output at most <n> vecors", false);
    addOption("vectorSize", "vs",
            "Truncate vectors to <vs> length when dumping (most useful when in" + " conjunction with -sort",
            false);
    addOption(buildOption("filter", "fi",
            "Only dump out those vectors whose name matches the filter."
                    + "  Multiple items may be specified by repeating the argument.",
            true, 1, Integer.MAX_VALUE, false, null));

    if (parseArguments(args, false, true) == null) {
        return -1;
    }

    Path[] pathArr;
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    Path input = getInputPath();
    FileStatus fileStatus = fs.getFileStatus(input);
    if (fileStatus.isDir()) {
        pathArr = FileUtil.stat2Paths(fs.listStatus(input, PathFilters.logsCRCFilter()));
    } else {
        FileStatus[] inputPaths = fs.globStatus(input);
        pathArr = new Path[inputPaths.length];
        int i = 0;
        for (FileStatus fstatus : inputPaths) {
            pathArr[i++] = fstatus.getPath();
        }
    }

    String dictionaryType = getOption("dictionaryType", "text");

    boolean sortVectors = hasOption("sortVectors");
    boolean quiet = hasOption("quiet");
    if (!quiet) {
        log.info("Sort? {}", sortVectors);
    }

    String[] dictionary = null;
    if (hasOption("dictionary")) {
        String dictFile = getOption("dictionary");
        switch (dictionaryType) {
        case "text":
            dictionary = VectorHelper.loadTermDictionary(new File(dictFile));
            break;
        case "sequencefile":
            dictionary = VectorHelper.loadTermDictionary(conf, dictFile);
            break;
        default:
            //TODO: support Lucene's FST as a dictionary type
            throw new IOException("Invalid dictionary type: " + dictionaryType);
        }
    }

    Set<String> filters;
    if (hasOption("filter")) {
        filters = Sets.newHashSet(getOptions("filter"));
    } else {
        filters = null;
    }

    boolean useCSV = hasOption("csv");

    boolean sizeOnly = hasOption("sizeOnly");
    boolean nameOnly = hasOption("nameOnly");
    boolean namesAsComments = hasOption("namesAsComments");
    boolean transposeKeyValue = hasOption("vectorAsKey");
    Writer writer;
    boolean shouldClose;
    File output = getOutputFile();
    if (output != null) {
        shouldClose = true;
        log.info("Output file: {}", output);
        Files.createParentDirs(output);
        writer = Files.newWriter(output, Charsets.UTF_8);
    } else {
        shouldClose = false;
        writer = new OutputStreamWriter(System.out, Charsets.UTF_8);
    }
    try {
        boolean printKey = hasOption("printKey");
        if (useCSV && dictionary != null) {
            writer.write("#");
            for (int j = 0; j < dictionary.length; j++) {
                writer.write(dictionary[j]);
                if (j < dictionary.length - 1) {
                    writer.write(',');
                }
            }
            writer.write('\n');
        }
        Long numItems = null;
        if (hasOption("numItems")) {
            numItems = Long.parseLong(getOption("numItems"));
            if (quiet) {
                writer.append("#Max Items to dump: ").append(String.valueOf(numItems)).append('\n');
            }
        }
        int maxIndexesPerVector = hasOption("vectorSize") ? Integer.parseInt(getOption("vectorSize"))
                : Integer.MAX_VALUE;
        long itemCount = 0;
        int fileCount = 0;
        for (Path path : pathArr) {
            if (numItems != null && numItems <= itemCount) {
                break;
            }
            if (quiet) {
                log.info("Processing file '{}' ({}/{})", path, ++fileCount, pathArr.length);
            }
            SequenceFileIterable<Writable, Writable> iterable = new SequenceFileIterable<>(path, true, conf);
            Iterator<Pair<Writable, Writable>> iterator = iterable.iterator();
            long i = 0;
            while (iterator.hasNext() && (numItems == null || itemCount < numItems)) {
                Pair<Writable, Writable> record = iterator.next();
                Writable keyWritable = record.getFirst();
                Writable valueWritable = record.getSecond();
                if (printKey) {
                    Writable notTheVectorWritable = transposeKeyValue ? valueWritable : keyWritable;
                    writer.write(notTheVectorWritable.toString());
                    writer.write('\t');
                }
                Vector vector;
                try {
                    vector = ((VectorWritable) (transposeKeyValue ? keyWritable : valueWritable)).get();
                } catch (ClassCastException e) {
                    if ((transposeKeyValue ? keyWritable
                            : valueWritable) instanceof WeightedPropertyVectorWritable) {
                        vector = ((WeightedPropertyVectorWritable) (transposeKeyValue ? keyWritable
                                : valueWritable)).getVector();
                    } else {
                        throw e;
                    }
                }
                if (filters == null || !(vector instanceof NamedVector)
                        || filters.contains(((NamedVector) vector).getName())) {
                    if (sizeOnly) {
                        if (vector instanceof NamedVector) {
                            writer.write(((NamedVector) vector).getName());
                            writer.write(":");
                        } else {
                            writer.write(String.valueOf(i++));
                            writer.write(":");
                        }
                        writer.write(String.valueOf(vector.size()));
                        writer.write('\n');
                    } else if (nameOnly) {
                        if (vector instanceof NamedVector) {
                            writer.write(((NamedVector) vector).getName());
                            writer.write('\n');
                        }
                    } else {
                        String fmtStr;
                        if (useCSV) {
                            fmtStr = VectorHelper.vectorToCSVString(vector, namesAsComments);
                        } else {
                            fmtStr = VectorHelper.vectorToJson(vector, dictionary, maxIndexesPerVector,
                                    sortVectors);
                        }
                        writer.write(fmtStr);
                        writer.write('\n');
                    }
                    itemCount++;
                }
            }
        }
        writer.flush();
    } finally {
        if (shouldClose) {
            Closeables.close(writer, false);
        }
    }

    return 0;
}

From source file:org.wso2.carbon.registry.ws.client.registry.WSRegistryServiceClient.java

public void dump(String s, Writer writer) throws RegistryException {
    try {/*w  w w .  j av  a  2 s.c  o  m*/
        DataHandler dataHandler = stub.wsDump(s);
        ByteArrayInputStream inputStream = new ByteArrayInputStream(
                WSRegistryClientUtils.makeBytesFromDataHandler(dataHandler));
        BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));

        String inputLine;
        while ((inputLine = reader.readLine()) != null) {
            writer.append(inputLine);
        }
        writer.flush();
        reader.close();

    } catch (Exception e) {
        String msg = "Failed to perform dump operation.";
        log.error(msg, e);
        throw new RegistryException(msg, e);
    }
}