Example usage for java.io FileWriter append

List of usage examples for java.io FileWriter append

Introduction

In this page you can find the example usage for java.io FileWriter append.

Prototype

@Override
    public Writer append(CharSequence csq) throws IOException 

Source Link

Usage

From source file:com.opengamma.analytics.financial.provider.curve.MulticurveBuildingHullWhiteDiscountFuturesEUR3Test.java

@Test(enabled = false)
/**//from   w w  w .  j a  v a 2s  .c om
 * Analyzes the shape of the forward curve.
 */
public void forwardAnalysis() {
    final HullWhiteOneFactorProvider marketDsc = CURVES_PAR_SPREAD_MQ_WITHOUT_TODAY_BLOCK.get(0).getFirst();
    final int jump = 1;
    final int startIndex = 0;
    final int nbDate = 2750;
    ZonedDateTime startDate = ScheduleCalculator.getAdjustedDate(NOW,
            EURIBOR3M.getSpotLag() + startIndex * jump, TARGET);
    final double[] rateDsc = new double[nbDate];
    final double[] startTime = new double[nbDate];
    try {
        final FileWriter writer = new FileWriter("fwd-dsc.csv");
        for (int loopdate = 0; loopdate < nbDate; loopdate++) {
            startTime[loopdate] = TimeCalculator.getTimeBetween(NOW, startDate);
            final ZonedDateTime endDate = ScheduleCalculator.getAdjustedDate(startDate, EURIBOR3M, TARGET);
            final double endTime = TimeCalculator.getTimeBetween(NOW, endDate);
            final double accrualFactor = EURIBOR3M.getDayCount().getDayCountFraction(startDate, endDate);
            rateDsc[loopdate] = marketDsc.getMulticurveProvider().getForwardRate(EURIBOR3M, startTime[loopdate],
                    endTime, accrualFactor);
            startDate = ScheduleCalculator.getAdjustedDate(startDate, jump, TARGET);
            writer.append(0.0 + "," + startTime[loopdate] + "," + rateDsc[loopdate] + "\n");
        }
        writer.flush();
        writer.close();
    } catch (final IOException e) {
        e.printStackTrace();
    }
}

From source file:com.opengamma.analytics.financial.provider.curve.MulticurveBuildingDiscountingDiscountEURCommittee2Test.java

@Test(enabled = false)
/**//from   ww  w.  j av  a 2 s.  co  m
 * Analyzes the shape of the forward rate curve for EURIBOR6M forward curve.
 */
public void forwardAnalysisTenor() {
    final MulticurveProviderInterface multicurve = CURVES_PAR_SPREAD_MQ_WITHOUT_TODAY_BLOCK.get(1).getFirst();
    final int jump = 1;
    //      final int startIndex = 0;
    final int nbDate = 750;
    ZonedDateTime startDate = NOW;
    final double[] rateDsc = new double[nbDate];
    final double[] startTime = new double[nbDate];
    try {
        final FileWriter writer = new FileWriter("dsc-committee.csv");
        for (int loopdate = 0; loopdate < nbDate; loopdate++) {
            startTime[loopdate] = TimeCalculator.getTimeBetween(NOW, startDate);
            final ZonedDateTime endDate = ScheduleCalculator.getAdjustedDate(startDate, EURIBOR6M, TARGET);
            final double endTime = TimeCalculator.getTimeBetween(NOW, endDate);
            final double accrualFactor = EURIBOR6M.getDayCount().getDayCountFraction(startDate, endDate);
            rateDsc[loopdate] = multicurve.getForwardRate(EURIBOR6M, startTime[loopdate], endTime,
                    accrualFactor);
            startDate = ScheduleCalculator.getAdjustedDate(startDate, jump, TARGET);
            writer.append(0.0 + "," + startTime[loopdate] + "," + rateDsc[loopdate] + "\n");
        }
        writer.flush();
        writer.close();
    } catch (final IOException e) {
        e.printStackTrace();
    }
}

From source file:org.clothocad.phagebook.controllers.OrdersController.java

@RequestMapping(value = "/exportOrderCSV", method = RequestMethod.GET)
public void exportOrderCSV(@RequestParam Map<String, String> params, HttpServletResponse response)
        throws IOException, ServletException {

    ClothoConnection conn = new ClothoConnection(Args.clothoLocation);
    Clotho clothoObject = new Clotho(conn);
    String username = this.backendPhagebookUser;
    String password = this.backendPhagebookPassword;
    Map loginMap = new HashMap();
    loginMap.put("username", username);
    loginMap.put("credentials", password);

    clothoObject.login(loginMap);//from  ww  w . j  a v a  2  s.  c o  m

    Order order = ClothoAdapter.getOrder(params.get("orderId"), clothoObject);
    System.out.println(order.getId());
    List<String> cartItems = new ArrayList<String>();
    List<OrderColumns> ColumnList = new ArrayList<OrderColumns>();
    List<String> CList = new ArrayList<String>();

    CList.add("ITEM");
    CList.add("QTY.");
    CList.add("UNIT PRICE");
    CList.add("CUSTOM UNIT PRICE");
    CList.add("TOTAL PRICE");

    ColumnList.add(OrderColumns.SERIAL_NUMBER);

    for (String cartItem : CList) {

        switch (cartItem) { //can add all of them for a customizable form
        case "ITEM":
            ColumnList.add(OrderColumns.PRODUCT_NAME);
            break;
        case "QTY.":
            ColumnList.add(OrderColumns.QUANTITY);
            break;
        case "UNIT PRICE":
            ColumnList.add(OrderColumns.UNIT_PRICE);
            break;
        case "CUSTOM UNIT PRICE":
            ColumnList.add(OrderColumns.CUSTOM_UNIT_PRICE);
            break;
        case "TOTAL PRICE":
            ColumnList.add(OrderColumns.TOTAL_PRICE);
            break;
        }
    }

    cartItems = org.clothocad.phagebook.controller.OrderController.createOrderForm(order, ColumnList);
    String path = Utilities.getFilepath();
    FileWriter file = new FileWriter(
            new File(path + "src/main/webapp/resources/OrderSheets/", "Order_" + order.getId() + ".csv"));

    for (String cartItem : cartItems) {
        file.append(cartItem);
    }
    file.flush();
    file.close();

    PrintWriter writer = response.getWriter();

    writer.println(order.getId());
    writer.flush();
    writer.close();
    conn.closeConnection();
}

From source file:SimpleGui.QueryGenerator.java

/**
 * following piece of code is to check how many distinct queries that are present in the list
 * It should present only single query in this case. Count should be 3000
 *//*from   w w w  .  j a va  2  s  .c o  m*/
public void countQueries(int epoch) throws IOException {

    int independentQuery = -1;
    Query_Count[] distinctQueries = new Query_Count[10000];
    ArrayList<String> queryList = new ArrayList<>();
    ArrayList arrivalTimes = new ArrayList();
    ArrayList userLocations = new ArrayList();

    try (BufferedReader br = new BufferedReader(
            new FileReader("//home//santhilata//Dropbox//CacheLearning//QGen//src//main//java//QueryInput//"
                    + epoch + "_queryRepeat_10.csv"))) {

        String sCurrentLine;

        while ((sCurrentLine = br.readLine()) != null) {
            String[] query = sCurrentLine.split(Pattern.quote("@("));
            queryList.add("(" + query[1]);
            //each line contain <arrivalTime, userLocation, query> ==<int, int, string>
            arrivalTimes.add(query[0].split(Pattern.quote("@"))[0]);
            userLocations.add(query[0].split(Pattern.quote("@"))[1]);
        }

    } catch (IOException e) {
        e.printStackTrace();
    }

    for (String query : queryList) {
        boolean present = false;
        int z = 0;
        while (z <= independentQuery) {
            if (query.equals(distinctQueries[z].getQuery())) {
                distinctQueries[z].addCount(1);
                present = true;
                break;
            } else {
                z++;
            }
            // System.out.println("z = "+z);
        }
        if (!present) {
            independentQuery++;
            distinctQueries[independentQuery] = new Query_Count(query, queryList.indexOf(query));

        }
    }
    String fileOut = "//home//santhilata//Desktop//QueryOutput//CountQueries//test.csv";

    File outFile11 = new File(fileOut);
    FileWriter writer1 = new FileWriter(outFile11);

    writer1.flush();
    writer1.append("QueryNumber,QRepetitions");
    writer1.append('\n');

    int total = 0;
    for (int i = 0; i < independentQuery; i++) {

        if (distinctQueries[i].getCount() > 0) {
            System.out.println(distinctQueries[i].getIndex() + " " + distinctQueries[i].getQuery() + " "
                    + distinctQueries[i].getCount());
            writer1.append(distinctQueries[i].getIndex() + "," + distinctQueries[i].getCount() + "\n");
            total += distinctQueries[i].getCount();
        }

    }
    System.out.println("total queries " + total);
    writer1.close();

    String arrivalTime = "//home//santhilata//Desktop//QueryOutput//CountQueries//arrivalTimes.csv";
    File aTFile = new File(arrivalTime);
    FileWriter Atwriter = new FileWriter(aTFile);
    Atwriter.flush();
    Atwriter.append("ArrivalTimes,noOfTimes" + "\n");
    int noOfTimes = 1;
    Collections.sort(arrivalTimes);
    for (int i = 0; i < arrivalTimes.size(); i++) {

        if (i > 0) {

            if (Integer.parseInt((String) arrivalTimes.get(i)) == Integer
                    .parseInt((String) arrivalTimes.get(i - 1))) {
                noOfTimes++;

            } else {
                Atwriter.append(arrivalTimes.get(i - 1) + "," + noOfTimes + "\n");
                noOfTimes = 1;
            }
        }
    }

    for (int i = 0; i < queryList.size(); i++) {
        if (!arrivalTimes.contains(i + "")) {
            Atwriter.append(i + "," + 0 + "\n");
        }
    }

    Atwriter.close();

}

From source file:com.opengamma.analytics.financial.provider.curve.MulticurveBuildingDiscountingDiscountEURCommittee2Test.java

@Test(enabled = false)
/**/*from  ww  w.  j  a  v  a 2 s  .c  o  m*/
 * Analyzes the shape of the pseudo-on forward rates for the EURIBOR6M forward curve.
 */
public void forwardAnalysisON() {
    final MulticurveProviderInterface multicurve = CURVES_PAR_SPREAD_MQ_WITHOUT_TODAY_BLOCK.get(1).getFirst();
    final int jump = 1;
    //      final int startIndex = 0;
    final int nbDate = 500;
    ZonedDateTime startDate = NOW;
    final double[] rateDsc = new double[nbDate];
    final double[] startTime = new double[nbDate];
    try {
        final FileWriter writer = new FileWriter("dsc-committee.csv");
        for (int loopdate = 0; loopdate < nbDate; loopdate++) {
            startTime[loopdate] = TimeCalculator.getTimeBetween(NOW, startDate);
            final ZonedDateTime endDate = ScheduleCalculator.getAdjustedDate(startDate, 1, TARGET);
            final double endTime = TimeCalculator.getTimeBetween(NOW, endDate);
            final double accrualFactor = EONIA.getDayCount().getDayCountFraction(startDate, endDate);
            rateDsc[loopdate] = multicurve.getForwardRate(EONIA, startTime[loopdate], endTime, accrualFactor); // EONIA curve
            //        rateDsc[loopdate] = multicurve.getForwardRate(EURIBOR6M, startTime[loopdate], endTime, accrualFactor); // EURIBOR6M curve
            startDate = ScheduleCalculator.getAdjustedDate(startDate, jump, TARGET);
            writer.append(0.0 + "," + startTime[loopdate] + "," + rateDsc[loopdate] + "\n");
        }
        writer.flush();
        writer.close();
    } catch (final IOException e) {
        e.printStackTrace();
    }
}

From source file:org.apache.sqoop.manager.sqlserver.SQLServerDatatypeImportSequenceFileManualTest.java

public synchronized void addToReport(MSSQLTestData td, Object result) {
    System.out.println("called");
    try {/*w ww.  ja  v  a  2s .  c o  m*/
        FileWriter fr = new FileWriter(getResportFileName(), true);
        String offset = td.getData(KEY_STRINGS.OFFSET);
        String res = "_";
        if (result == null) {
            res = "Success";
        } else {
            try {
                res = "FAILED " + removeNewLines(((AssertionError) result).getMessage());
            } catch (Exception ae) {
                if (result instanceof Exception && ((Exception) result) != null) {
                    res = "FAILED " + removeNewLines(((Exception) result).getMessage());
                } else {
                    res = "FAILED " + result.toString();
                }
            }
        }

        fr.append(offset + "\t" + res + "\n");
        fr.close();
    } catch (Exception e) {
        LOG.error(StringUtils.stringifyException(e));
    }
}

From source file:org.jahia.tools.maven.plugins.LegalArtifactAggregator.java

void execute() {
    Collection<File> jarFiles = FileUtils.listFiles(scanDirectory, new String[] { "jar" }, true);
    for (File jarFile : jarFiles) {
        FileInputStream jarInputStream = null;
        try {//  w  w  w  . j  a  v a  2 s  .c  om
            jarInputStream = new FileInputStream(jarFile);
            processJarFile(jarInputStream, jarFile.getPath(), null, true, 0, true, true, false);
        } catch (IOException e) {
            output(START_INDENT, "Error handling JAR " + jarFile.getPath() + ":" + e.getMessage()
                    + ". This file will be ignored.", true, true);
            e.printStackTrace();
        } finally {
            IOUtils.closeQuietly(jarInputStream);
        }
    }

    if (verbose || outputDiagnostics) {
        outputDiagnostics(false);
        outputDiagnostics(true);
    }

    output(START_INDENT, "Processed projects: ");
    List<String> allNoticeLines = new LinkedList<>();
    for (Map.Entry<String, JarMetadata> entry : jarDatabase.entrySet()) {
        final String project = entry.getKey();
        output(START_INDENT, project);
        final Collection<Notice> notices = entry.getValue().getNoticeFiles().values();
        if (!notices.isEmpty()) {
            allNoticeLines.add("");
            allNoticeLines.add(getStartTitle("Notice for " + project));
            for (Notice notice : notices) {
                allNoticeLines.add(notice.toString());
            }
            allNoticeLines.add(getEndTitle("End of notice for " + project));
        }
    }

    FileWriter writer = null;
    try {
        File aggregatedNoticeFile = new File(outputDirectory, "NOTICE-aggregated");
        writer = new FileWriter(aggregatedNoticeFile);
        for (String noticeLine : allNoticeLines) {
            writer.append(noticeLine);
            writer.append("\n");
        }

        output(START_INDENT, "Aggregated NOTICE created at " + aggregatedNoticeFile.getPath());
    } catch (IOException e) {
        e.printStackTrace();
    }
    IOUtils.closeQuietly(writer);

    try {
        File aggregatedLicenseFile = new File(outputDirectory, "LICENSE-aggregated");
        writer = new FileWriter(aggregatedLicenseFile);
        for (Map.Entry<KnownLicense, SortedSet<LicenseFile>> foundKnownLicenseEntry : knownLicensesFound
                .entrySet()) {
            output(START_INDENT, "Adding license " + foundKnownLicenseEntry.getKey().getName());
            SortedSet<LicenseFile> licenseFiles = foundKnownLicenseEntry.getValue();
            writer.append("License for:\n");
            for (LicenseFile licenseFile : licenseFiles) {
                writer.append("  " + FilenameUtils.getBaseName(licenseFile.getProjectOrigin()) + "\n");
            }
            writer.append(getStartTitle(foundKnownLicenseEntry.getKey().getName()));
            writer.append("\n");
            writer.append(foundKnownLicenseEntry.getKey().getTextToUse());
            writer.append(getEndTitle("End of " + foundKnownLicenseEntry.getKey().getName()));
            writer.append("\n\n");
        }

        output(START_INDENT, "Aggregated LICENSE created at " + aggregatedLicenseFile.getPath());
    } catch (IOException e) {
        e.printStackTrace();
    }
    IOUtils.closeQuietly(writer);

    if (updateKnownLicenses) {
        saveKnownLicenses();
    }

    File jarPackagesFile = new File(outputDirectory, "jar-database.json");
    try {
        mapper.enable(SerializationFeature.INDENT_OUTPUT);
        mapper.writeValue(jarPackagesFile, jarDatabase);
    } catch (IOException e) {
        e.printStackTrace();
    }

    outputPackageLicenses();

}

From source file:org.occiware.clouddesigner.occi.docker.connector.dockerjava.DockerContainerManager.java

public void addHost(final String key, final String ip, final String knowHosts) {
    try {/*from ww w .j a v a 2 s .co m*/
        final FileWriter tmpwriter = new FileWriter(knowHosts, true);
        final String newLine = (((ip + " ssh-rsa ") + key) + "\n");
        boolean _hostAlreadyExist = this.hostAlreadyExist(newLine, knowHosts);
        boolean _not = (!_hostAlreadyExist);
        if (_not) {
            tmpwriter.append(newLine);
            DockerContainerManager.LOGGER.info(((ip + " ssh-rsa ") + key));
            tmpwriter.flush();
            tmpwriter.close();
        }
    } catch (final Throwable _t) {
        if (_t instanceof IOException) {
            final IOException e = (IOException) _t;
            e.printStackTrace();
        } else {
            throw Exceptions.sneakyThrow(_t);
        }
    }
}

From source file:it.infn.ct.aleph_portlet.java

public void getRecordsOAR(String search, int jrec, int num_rec) {
    String responseXML = null;//from  w  w  w  .  j av a2  s  . co  m
    HttpClient client = new HttpClient();
    HttpMethod method = callAPIOAR(search, jrec, num_rec);
    try {
        client.executeMethod(method);
        if (method.getStatusCode() == HttpStatus.SC_OK) {
            method.getResponseBody();
            responseXML = convertStreamToString(method.getResponseBodyAsStream());
            FileWriter fw = new FileWriter(
                    appServerPath + "datatable/marcXML_OAR_" + jrec + "_" + num_rec + ".xml");
            System.out.println();
            fw.append(responseXML);
            fw.close();
        }
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        method.releaseConnection();
    }
}

From source file:org.opennms.systemreport.system.ThreadReportPlugin.java

@Override
public TreeMap<String, Resource> getEntries() {
    final TreeMap<String, Resource> map = new TreeMap<String, Resource>();

    LOG.trace("starting thread dump");
    triggerThreadDump();/*from   w ww.j a v a 2 s. co m*/
    LOG.trace("thread dump finished");

    final String outputLog = System.getProperty("opennms.home") + File.separator + "logs" + File.separator
            + "daemon" + File.separator + "output.log";
    LOG.debug("reading file {}", outputLog);
    final File outputLogFile = new File(outputLog);
    FileReader fr = null;
    BufferedReader bfr = null;
    FileWriter fw = null;
    File threadDumpFile = null;
    String threadDump = null;
    if (outputLogFile.exists()) {
        try {
            threadDumpFile = File.createTempFile("ThreadReportPlugin", null);
            threadDumpFile.deleteOnExit();
            fr = new FileReader(outputLogFile);
            bfr = new BufferedReader(fr);
            fw = new FileWriter(threadDumpFile);
            String line;
            StringBuffer sb = null;
            boolean endOnCarriageReturn = false;
            while ((line = bfr.readLine()) != null) {
                if (line.startsWith("Full thread dump")) {
                    LOG.debug("found full thread dump");
                    sb = new StringBuffer();
                    sb.append(line).append("\n");
                } else if (sb != null) {
                    if (endOnCarriageReturn && line.length() == 0) {
                        endOnCarriageReturn = false;
                        threadDump = sb.toString();
                        sb = null;
                    } else if (line.startsWith("Heap")) {
                        endOnCarriageReturn = true;
                        sb.append(line).append("\n");
                    } else {
                        sb.append(line).append("\n");
                    }
                }
            }
            if (threadDump == null) {
                LOG.debug("No thread dump was found.");
            } else {
                fw.append(threadDump);
                map.put("ThreadDump.txt", new FileSystemResource(threadDumpFile));
            }
        } catch (final Exception e) {
            LOG.debug("Unable to read from '{}'", outputLog, e);
        } finally {
            IOUtils.closeQuietly(fw);
            IOUtils.closeQuietly(bfr);
            IOUtils.closeQuietly(fr);
        }
    } else {
        LOG.warn("could not find output.log in '{}'", outputLog);
    }

    return map;
}