Example usage for org.apache.commons.io FileUtils writeLines

List of usage examples for org.apache.commons.io FileUtils writeLines

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils writeLines.

Prototype

public static void writeLines(File file, Collection lines) throws IOException 

Source Link

Document

Writes the toString() value of each item in a collection to the specified File line by line.

Usage

From source file:org.apache.lucene.analysis.kr.test.CNounsUtils.java

private void writeResult(Map map, String fName) throws IOException {

    Iterator<String> iter = map.keySet().iterator();
    List list = new ArrayList();
    while (iter.hasNext()) {
        String str = iter.next();
        list.add(map.get(str));//www.ja  va  2s .  c o  m
    }

    FileUtils.writeLines(new File(fName), list);
}

From source file:org.apache.lucene.analysis.kr.test.MorphAnalyzerTest.java

/**
 * ?  ? ?     ?/* w  ww  .j a v a  2s . c om*/
 *
 * @throws Exception
 */
@Test
@Ignore(" ?? ?? .")
public void yongonAnalysis() throws Exception {

    String fname = "data/_?.txt";

    List<String> list = FileUtils.readLines(new File(fname));
    Map<String, String> younons = new HashMap();

    MorphAnalyzer analyzer = new MorphAnalyzer();
    long start = 0;
    List youngOutputs = new ArrayList();
    for (String input : list) {

        if (!input.endsWith("") && !input.endsWith("?")) {
            youngOutputs.add(input);
            continue;
        }
        String eogan = input.substring(0, input.length() - 2);

        List<AnalysisOutput> outputs = analyzer.analyze(input);
        AnalysisOutput o = outputs.get(0);
        String result = o.toString() + "->";
        for (int i = 0; i < o.getCNounList().size(); i++) {
            result += o.getCNounList().get(i).getWord() + "/";
        }
        result += "<" + o.getScore() + ">";

        String tmp = younons.get(eogan);
        if (tmp == null) {
            younons.put(eogan, result);
        } else {
            younons.put(eogan, tmp + "| " + result);
        }
    }

    fname = "data/_?.txt";
    String cheonOutfile = "data/cheon.txt";
    String youngOutfile = "data/youngon.txt";

    List<String> cheons = FileUtils.readLines(new File(fname));
    List<String> outputs = new ArrayList();
    System.out.println(younons.size());
    for (String cheon : cheons) {
        String str = younons.remove(cheon);
        if (str != null) {
            cheon += "=> " + str;
            //            younons.remove(cheon);
        }
        outputs.add(cheon);
    }

    Iterator<String> iter = younons.keySet().iterator();
    while (iter.hasNext()) {
        String key = iter.next();
        outputs.add(key + "=> " + younons.get(key));
    }

    Collections.sort(outputs);
    Collections.sort(youngOutputs);

    FileUtils.writeLines(new File(cheonOutfile), outputs);
    FileUtils.writeLines(new File(youngOutfile), youngOutputs);

    outputs.addAll(youngOutputs);
    Collections.sort(outputs);
    FileUtils.writeLines(new File("data/all.txt"), outputs);
}

From source file:org.apache.maven.plugin.resources.filters.ItFilter.java

/** 
 * @see org.apache.maven.shared.filtering.MavenResourcesFiltering#filterResources(org.apache.maven.shared.filtering.MavenResourcesExecution)
 *///from   w  ww .  ja  v  a 2 s. co  m
public void filterResources(MavenResourcesExecution mavenResourcesExecution) throws MavenFilteringException {
    System.out.println("ItFilter filterResources");
    try {
        File f = new File(mavenResourcesExecution.getOutputDirectory(), "foo.txt");
        List<String> lines = new ArrayList<String>();

        lines.add("foo");
        lines.add("version=" + mavenResourcesExecution.getMavenProject().getVersion());
        lines.add("toto="
                + mavenResourcesExecution.getMavenSession().getExecutionProperties().getProperty("toto"));
        FileUtils.writeLines(f, lines);
    } catch (IOException e) {
        throw new MavenFilteringException(e.getMessage(), e);
    }

}

From source file:org.apache.maven.plugins.jdee.JdeeMojo.java

private Set getSourceDirs() throws Exception {
    Set results = new HashSet();
    File file = new File(".jdee_sources");
    if (file.exists()) {
        results.addAll(FileUtils.readLines(file));
    } else {//  ww  w  .  j av  a2s. com
        file.createNewFile();
    }

    results.addAll(emitPaths(project.getCompileSourceRoots()));
    results.addAll(emitPaths(project.getTestCompileSourceRoots()));
    FileUtils.writeLines(file, results);
    return results;
}

From source file:org.apache.maven.plugins.jdee.JdeeMojo.java

private Set getGlobalClasspath() throws Exception {
    Set results = new HashSet();
    File file = new File(".jdee_classpath");
    if (file.exists()) {
        results.addAll(FileUtils.readLines(file));
    } else {//from w w w  . j  a  va  2 s.  c om
        file.createNewFile();
    }

    results.addAll(emitPaths(project.getCompileClasspathElements()));
    results.addAll(emitPaths(project.getTestClasspathElements()));
    results.addAll(emitPaths(project.getSystemClasspathElements()));
    results.addAll(getDependencies());
    FileUtils.writeLines(file, results);
    return results;
}

From source file:org.apache.qpid.proton.apireconciliation.CFunctionNameListReaderTest.java

private String createTestFileContaining(String... functionNames) throws Exception {
    File file = File.createTempFile(CFunctionNameListReader.class.getSimpleName(), "txt");
    file.deleteOnExit();//from   w ww  .  ja v a 2  s  . c om
    FileUtils.writeLines(file, Arrays.asList(functionNames));
    return file.getAbsolutePath();
}

From source file:org.apache.qpid.proton.apireconciliation.reportwriter.ReconciliationReportWriter.java

public void write(String outputFile, ReconciliationReport report) throws IOException {
    File output = new File(outputFile);
    List<String> reportLines = new ArrayList<String>();

    reportLines.add(REPORT_TITLE);/*  w  ww.ja va2  s . co  m*/

    Iterator<ReportRow> itr = report.rowIterator();
    while (itr.hasNext()) {
        ReportRow row = itr.next();
        Method javaMethod = row.getJavaMethod();
        String cFunction = defaultString(row.getCFunction());

        String fullyQualifiedMethodName = "";
        String annotationCFunction = "";
        if (javaMethod != null) {
            fullyQualifiedMethodName = createFullyQualifiedJavaMethodName(javaMethod);
            annotationCFunction = defaultString(_annotationAccessor.getAnnotationValue(javaMethod));
        }
        reportLines.add(format(ROW_FORMAT, cFunction, fullyQualifiedMethodName, annotationCFunction));
    }

    FileUtils.writeLines(output, reportLines);
}

From source file:org.apache.sqoop.test.minicluster.SqoopMiniCluster.java

/**
 * Prepare temporary directory for starting Sqoop server.
 *
 * @throws IOException//from   ww  w.j  av  a2  s . c  o m
 */
protected void prepareTemporaryPath() throws IOException {
    File tmpDir = new File(getTemporaryPath());
    File configDir = new File(getConfigurationPath());
    File logDir = new File(getLogPath());

    FileUtils.deleteDirectory(tmpDir);
    FileUtils.forceMkdir(tmpDir);
    FileUtils.forceMkdir(configDir);
    FileUtils.forceMkdir(logDir);

    // Create configuration files
    System.setProperty(ConfigurationConstants.SYSPROP_CONFIG_DIR, getConfigurationPath());

    // sqoop_bootstrap.properties
    FileUtils.writeStringToFile(new File(getConfigurationPath() + "sqoop_bootstrap.properties"),
            "sqoop.config.provider=org.apache.sqoop.core.PropertiesConfigurationProvider");

    // sqoop.properties
    // TODO: This should be generated more dynamically so that user can specify Repository, Submission and Execution engines
    File f = new File(getConfigurationPath() + "sqoop.properties");

    List<String> sqoopProperties = new LinkedList<String>();
    mapToProperties(sqoopProperties, getLoggerConfiguration());
    mapToProperties(sqoopProperties, getRepositoryConfiguration());
    mapToProperties(sqoopProperties, getSubmissionEngineConfiguration());
    mapToProperties(sqoopProperties, getExecutionEngineConfiguration());

    FileUtils.writeLines(f, sqoopProperties);

    // Hadoop configuration
    OutputStream stream = FileUtils.openOutputStream(new File(getConfigurationPath() + "hadoop-site.xml"));
    configuration.writeXml(stream);
    stream.close();
}

From source file:org.apache.synapse.transport.vfs.VFSTransportListener.java

private synchronized void addFailedRecord(PollTableEntry pollTableEntry, FileObject failedObject,
        String timeString) {/*from   w w  w. j av  a 2 s  .c om*/
    try {
        String record = failedObject.getName().getBaseName() + VFSConstants.FAILED_RECORD_DELIMITER
                + timeString;
        String recordFile = pollTableEntry.getFailedRecordFileDestination()
                + pollTableEntry.getFailedRecordFileName();
        File failedRecordFile = new File(recordFile);
        if (!failedRecordFile.exists()) {
            FileUtils.writeStringToFile(failedRecordFile, record);
            if (log.isDebugEnabled()) {
                log.debug("Added fail record '" + VFSUtils.maskURLPassword(record.toString())
                        + "' into the record file '" + recordFile + "'");
            }
        } else {
            List<String> content = FileUtils.readLines(failedRecordFile);
            if (!content.contains(record)) {
                content.add(record);
            }
            FileUtils.writeLines(failedRecordFile, content);
        }
    } catch (IOException e) {
        log.fatal("Failure while writing the failed records!", e);
    }
}

From source file:org.apereo.portal.tools.dbloader.HibernateDbLoader.java

@Override
public void process(DbLoaderConfig configuration)
        throws ParserConfigurationException, SAXException, IOException {
    final String scriptFile = configuration.getScriptFile();
    final List<String> script;
    if (scriptFile == null) {
        script = null;/*from  w w w .j a  va  2  s .  com*/
    } else {
        script = new LinkedList<String>();
    }

    final ITableDataProvider tableData = this.loadTables(configuration, dialect);

    //Handle table drop/create
    if (configuration.isDropTables() || configuration.isCreateTables()) {
        //Load Table object model
        final Map<String, Table> tables = tableData.getTables();

        final Mapping mapping = this.configuration.buildMapping();
        final String defaultCatalog = this.configuration.getProperty(Environment.DEFAULT_CATALOG);
        final String defaultSchema = this.configuration.getProperty(Environment.DEFAULT_SCHEMA);

        final Map<String, DataAccessException> failedSql = new LinkedHashMap<String, DataAccessException>();

        //Generate and execute drop table scripts
        if (configuration.isDropTables()) {
            final List<String> dropScript = this.dropScript(tables.values(), dialect, defaultCatalog,
                    defaultSchema);

            if (script == null) {
                this.logger.info("Dropping existing tables");
                for (final String sql : dropScript) {
                    this.logger.info(sql);
                    try {
                        jdbcOperations.update(sql);
                    } catch (NonTransientDataAccessResourceException dae) {
                        throw dae;
                    } catch (DataAccessException dae) {
                        failedSql.put(sql, dae);
                    }
                }
            } else {
                script.addAll(dropScript);
            }
        }

        //Log any drop/create statements that failed 
        for (final Map.Entry<String, DataAccessException> failedSqlEntry : failedSql.entrySet()) {
            this.logger.warn(
                    "'" + failedSqlEntry.getKey() + "' failed to execute due to " + failedSqlEntry.getValue());
        }

        //Generate and execute create table scripts
        if (configuration.isCreateTables()) {
            final List<String> createScript = this.createScript(tables.values(), dialect, mapping,
                    defaultCatalog, defaultSchema);

            if (script == null) {
                this.logger.info("Creating tables");
                for (final String sql : createScript) {
                    this.logger.info(sql);
                    jdbcOperations.update(sql);
                }
            } else {
                script.addAll(createScript);
            }
        }
    }

    //Perform database population
    if (script == null && configuration.isPopulateTables()) {
        this.logger.info("Populating database");
        final Map<String, Map<String, Integer>> tableColumnTypes = tableData.getTableColumnTypes();
        this.populateTables(configuration, tableColumnTypes);
    }

    //Write out the script file
    if (script != null) {
        for (final ListIterator<String> iterator = script.listIterator(); iterator.hasNext();) {
            final String sql = iterator.next();
            iterator.set(sql + ";");
        }

        final File outputFile = new File(scriptFile);
        FileUtils.writeLines(outputFile, script);
        this.logger.info("Saved DDL to: " + outputFile.getAbsolutePath());
    }
}