List of usage examples for java.io FileWriter append
@Override public Writer append(CharSequence csq) throws IOException
From source file:de.tudarmstadt.dvs.myhealthassistant.myhealthhub.fragments.EventGeneratorFragment.java
private void writeStringToLogDelivery(String text) { File root = Environment.getExternalStorageDirectory(); File file = new File(root, "EventGeneratorLogDelivery.txt"); try {/*from w ww .j a va 2s . c om*/ if (!file.exists()) file.createNewFile(); FileWriter filewriter = new FileWriter(file, true); filewriter.append(text); filewriter.close(); } catch (IOException e) { Toast.makeText(getActivity().getApplicationContext(), "Unable to write file: " + e.toString(), Toast.LENGTH_SHORT).show(); e.printStackTrace(); } }
From source file:org.shareok.data.documentProcessor.CsvHandler.java
public void writeCsvLine(FileWriter w, List<String> values) throws IOException { boolean first = true; StringBuilder sb = new StringBuilder(); for (String value : values) { if (!first) { sb.append(DEFAULT_SEPARATOR); }/*from w ww. ja v a 2 s .co m*/ sb.append(followCVSformat(value)); first = false; } sb.append("\n"); w.append(sb.toString()); }
From source file:dk.netarkivet.archive.arcrepositoryadmin.ReplicaCacheDatabaseTester.java
License:asdf
private File makeTemporaryChecksumFile1() throws Exception { File res = new File(TestInfo.TEST_DIR, "checksum_1.out"); FileWriter fw = new FileWriter(res); StringBuilder fileContent = new StringBuilder(); fileContent.append("TEST1##1234567890"); fileContent.append("\n"); fileContent.append("TEST2##0987654321"); fileContent.append("\n"); fileContent.append("TEST3##1029384756"); fileContent.append("\n"); fileContent.append("TEST4##0192837465"); fw.append(fileContent.toString()); fw.flush();//from w ww. ja v a 2 s .c om fw.close(); return res; }
From source file:dk.netarkivet.archive.arcrepositoryadmin.ReplicaCacheDatabaseTester.java
License:asdf
private File makeTemporaryChecksumFile2() throws Exception { File res = new File(TestInfo.TEST_DIR, "checksum_2.out"); FileWriter fw = new FileWriter(res); StringBuilder fileContent = new StringBuilder(); fileContent.append("TEST1##ABCDEFGHIJ"); fileContent.append("\n"); fileContent.append("TEST2##JIHGFEDCBA"); fileContent.append("\n"); fileContent.append("TEST3##AJIBHCGDFE"); fileContent.append("\n"); fileContent.append("TEST4##JABICHDGEF"); fw.append(fileContent.toString()); fw.flush();// w w w . j a va2 s .c o m fw.close(); return res; }
From source file:org.apache.oozie.service.TestZKXLogStreamingService.java
public void testStreamingWithMultipleOozieServers() throws Exception { XLogFilter.reset();//from w ww.ja v a 2 s . co m XLogFilter.defineParameter("USER"); XLogFilter.defineParameter("GROUP"); XLogFilter.defineParameter("TOKEN"); XLogFilter.defineParameter("APP"); XLogFilter.defineParameter("JOB"); XLogFilter.defineParameter("ACTION"); XLogFilter xf = new XLogFilter(); xf.setParameter("JOB", "0000003-130610102426873-oozie-rkan-W"); xf.setLogLevel("WARN|INFO"); File log4jFile = new File(getTestCaseConfDir(), "test-log4j.properties"); ClassLoader cl = Thread.currentThread().getContextClassLoader(); InputStream is = cl.getResourceAsStream("test-no-dash-log4j.properties"); Properties log4jProps = new Properties(); log4jProps.load(is); // prevent conflicts with other tests by changing the log file location log4jProps.setProperty("log4j.appender.oozie.File", getTestCaseDir() + "/oozie.log"); log4jProps.store(new FileOutputStream(log4jFile), ""); setSystemProperty(XLogService.LOG4J_FILE, log4jFile.getName()); assertFalse(doStreamDisabledCheck()); File logFile = new File(Services.get().get(XLogService.class).getOozieLogPath(), Services.get().get(XLogService.class).getOozieLogName()); logFile.getParentFile().mkdirs(); FileWriter logWriter = new FileWriter(logFile); // local logs logWriter.append( "2013-06-10 10:25:44,008 WARN HiveActionExecutor:542 SERVER[foo] USER[rkanter] GROUP[-] TOKEN[] " + "APP[hive-wf] JOB[0000003-130610102426873-oozie-rkan-W] ACTION[0000003-130610102426873-oozie-rkan-W@hive-node] " + "credentials is null for the action _L3_") .append("\n") .append("2013-06-10 10:26:10,008 INFO HiveActionExecutor:539 SERVER[foo] USER[rkanter] GROUP[-] TOKEN[] " + "APP[hive-wf] JOB[0000003-130610102426873-oozie-rkan-W] ACTION[0000003-130610102426873-oozie-rkan-W@hive-node] " + "action completed, external ID [job_201306101021_0005] _L4_") .append("\n") .append("2013-06-10 10:26:10,341 WARN ActionStartXCommand:542 USER[rkanter] GROUP[-] TOKEN[] " + "APP[hive-wf] JOB[0000003-130610102426873-oozie-rkan-W] ACTION[0000003-130610102426873-oozie-rkan-W@end] " + "[***0000003-130610102426873-oozie-rkan-W@end***]Action updated in DB! _L6_") .append("\n"); logWriter.close(); // logs to be returned by another "Oozie server" DummyLogStreamingServlet.logs = "2013-06-10 10:25:43,575 WARN ActionStartXCommand:542 SERVER[foo] USER[rkanter] GROUP[-] TOKEN[] APP[hive-wf] " + "JOB[0000003-130610102426873-oozie-rkan-W] ACTION[0000003-130610102426873-oozie-rkan-W@:start:] " + "[***0000003-130610102426873-oozie-rkan-W@:start:***]Action status=DONE _L1_" + "\n" + "2013-06-10 10:25:43,575 WARN ActionStartXCommand:542 SERVER[foo] USER[rkanter] GROUP[-] TOKEN[] APP[hive-wf] " + "JOB[0000003-130610102426873-oozie-rkan-W] ACTION[0000003-130610102426873-oozie-rkan-W@:start:] " + "[***0000003-130610102426873-oozie-rkan-W@:start:***]Action updated in DB! _L2_" + "\n" + "2013-06-10 10:26:10,148 INFO HiveActionExecutor:539 SERVER[foo] USER[rkanter] GROUP[-] TOKEN[] APP[hive-wf] " + "JOB[0000003-130610102426873-oozie-rkan-W] ACTION[0000003-130610102426873-oozie-rkan-W@hive-node] action produced" + " output _L5_" + "\n" // a multiline message with a stack trace + "2013-06-10 10:26:30,202 WARN ActionStartXCommand:542 - SERVER[foo] USER[rkanter] GROUP[-] TOKEN[] APP[hive-wf] " + "JOB[0000003-130610102426873-oozie-rkan-W] ACTION[0000003-130610102426873-oozie-rkan-W@hive-node] Error starting " + "action [hive-node]. ErrorType [TRANSIENT], ErrorCode [JA009], Message [JA009: java.io.IOException: Unknown " + "protocol to name node: org.apache.hadoop.mapred.JobSubmissionProtocol _L7_\n" + " at org.apache.hadoop.hdfs.server.namenode.NameNode.getProtocolVersion(NameNode.java:156) _L8_\n" + " at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)_L9_\n" + " at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190) _L10_\n" + " at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1426) _L11_\n" + "] _L12_\n" + "org.apache.oozie.action.ActionExecutorException: JA009: java.io.IOException: Unknown protocol to name node: " + "org.apache.hadoop.mapred.JobSubmissionProtocol _L13_\n" + " at org.apache.hadoop.hdfs.server.namenode.NameNode.getProtocolVersion(NameNode.java:156) _L14_\n" + " at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) _L15_\n" + " at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) _L16_\n"; String out = doStreamLog(xf); String[] outArr = out.split("\n"); assertEquals(3, outArr.length); assertFalse(out.contains("_L1_")); assertFalse(out.contains("_L2_")); assertTrue(outArr[0].contains("_L3_")); assertTrue(outArr[1].contains("_L4_")); assertFalse(out.contains("_L5_")); assertTrue(outArr[2].contains("_L6_")); // We'll use a DummyZKOozie to create an entry in ZK and then set its url to an (unrelated) servlet that will simply return // some log messages DummyZKOozie dummyOozie = null; EmbeddedServletContainer container = new EmbeddedServletContainer("oozie"); container.addServletEndpoint("/other-oozie-server/*", DummyLogStreamingServlet.class); try { container.start(); dummyOozie = new DummyZKOozie("9876", container.getServletURL("/other-oozie-server/*")); DummyLogStreamingServlet.lastQueryString = null; out = doStreamLog(xf); outArr = out.split("\n"); assertEquals(16, outArr.length); assertTrue(outArr[0].contains("_L1_")); assertTrue(outArr[1].contains("_L2_")); assertTrue(outArr[2].contains("_L3_")); assertTrue(outArr[3].contains("_L4_")); assertTrue(outArr[4].contains("_L5_")); assertTrue(outArr[5].contains("_L6_")); assertTrue(outArr[6].contains("_L7_")); assertTrue(outArr[7].contains("_L8_")); assertTrue(outArr[8].contains("_L9_")); assertTrue(outArr[9].contains("_L10_")); assertTrue(outArr[10].contains("_L11_")); assertTrue(outArr[11].contains("_L12_")); assertTrue(outArr[12].contains("_L13_")); assertTrue(outArr[13].contains("_L14_")); assertTrue(outArr[14].contains("_L15_")); assertTrue(outArr[15].contains("_L16_")); assertEquals("show=log&allservers=false", DummyLogStreamingServlet.lastQueryString); // If we stop the container but leave the DummyZKOozie running, it will simulate if that server is down but still has // info in ZK; we should be able to get the logs from other servers (in this case, this server) and a message about // which servers it couldn't reach container.stop(); out = doStreamLog(xf); outArr = out.split("\n"); assertEquals(6, outArr.length); assertTrue(outArr[0].startsWith("Unable")); assertEquals("9876", outArr[1].trim()); assertEquals("", outArr[2]); assertFalse(out.contains("_L1_")); assertFalse(out.contains("_L2_")); assertTrue(outArr[3].contains("_L3_")); assertTrue(outArr[4].contains("_L4_")); assertFalse(out.contains("_L5_")); assertTrue(outArr[5].contains("_L6_")); } finally { if (dummyOozie != null) { dummyOozie.teardown(); } container.stop(); } }
From source file:org.ala.spatial.util.AnalysisJobMaxent.java
private void writeToFile(String text, String filename) { try {//from w ww . j a v a2 s . c om FileWriter fw = new FileWriter(filename); fw.append(text); fw.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:ml.shifu.shifu.core.processor.ComboModelProcessor.java
/** * Create configuration file for sub-model * if the configuration for parent model exists, it will copy that parent configuration firstly * and append new content.//w w w . j a v a 2 s .c o m * * @param subModelName sub model name * @param namesPrefix prefix of name * @param parentNamesFile parent names of files * @param varNames var names * @return model file name * @throws IOException any io exception */ private String createModelNamesFile(String subModelName, String namesPrefix, String parentNamesFile, String... varNames) throws IOException { String modelNamesCfg = namesPrefix + ".names"; File mnFile = new File(subModelName + File.separator + modelNamesCfg); // copy existing meta file if (StringUtils.isNotBlank(parentNamesFile)) { FileUtils.copyFile(new File(parentNamesFile), mnFile); } // append uid column as meta FileWriter writer = new FileWriter(mnFile); try { for (String var : varNames) { writer.append(var + "\n"); } } catch (IOException e) { // skip it } finally { writer.close(); } return modelNamesCfg; }
From source file:dk.netarkivet.archive.arcrepositoryadmin.ReplicaCacheDatabaseTester.java
License:asdf
private File makeTemporaryDuplicateFilelistFile() throws Exception { File res = new File(TestInfo.TEST_DIR, "filelist.out"); FileWriter fw = new FileWriter(res); StringBuilder fileContent = new StringBuilder(); fileContent.append("TEST1"); fileContent.append("\n"); fileContent.append("TEST2"); fileContent.append("\n"); fileContent.append("TEST1"); fileContent.append("\n"); fileContent.append("TEST3"); fileContent.append("\n"); fileContent.append("TEST1"); fileContent.append("\n"); fw.append(fileContent.toString()); fw.flush();/* w w w . ja v a 2 s . c o m*/ fw.close(); return res; }
From source file:org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl.java
private void writeConfig(File file, Iterable<Map.Entry<String, String>> settings) throws IOException { FileWriter fileWriter = new FileWriter(file); fileWriter.append("<configuration>\n"); for (Entry<String, String> entry : settings) { String value = entry.getValue().replace("&", "&").replace("<", "<").replace(">", ">"); fileWriter.append(//from ww w .ja va2 s.c o m "<property><name>" + entry.getKey() + "</name><value>" + value + "</value></property>\n"); } fileWriter.append("</configuration>\n"); fileWriter.close(); }